New Upstream Snapshot - golang-google-appengine

Ready changes

Summary

Merged new upstream version: 2.0.2+git20221116.1.504804f (was: 1.6.7).

Resulting package

Built on 2023-01-17T13:46 (took 6m0s)

The resulting binary packages can be installed (if you have the apt repository enabled) by running one of:

apt install -t fresh-snapshots golang-google-appengine-dev

Diff

diff --git a/.travis.yml b/.travis.yml
deleted file mode 100644
index 6d03f4d..0000000
--- a/.travis.yml
+++ /dev/null
@@ -1,18 +0,0 @@
-language: go
-
-go_import_path: google.golang.org/appengine
-
-install:
-  - ./travis_install.sh
-
-script:
-  - ./travis_test.sh
-
-matrix:
-  include:
-    - go: 1.9.x
-      env: GOAPP=true
-    - go: 1.10.x
-      env: GOAPP=false
-    - go: 1.11.x
-      env: GO111MODULE=on
diff --git a/CONTRIBUTING.md b/CONTRIBUTING.md
index ffc2985..2896936 100644
--- a/CONTRIBUTING.md
+++ b/CONTRIBUTING.md
@@ -19,14 +19,12 @@
 
 ## Running system tests
 
-Download and install the [Go App Engine SDK](https://cloud.google.com/appengine/docs/go/download). Make sure the `go_appengine` dir is in your `PATH`.
-
 Set the `APPENGINE_DEV_APPSERVER` environment variable to `/path/to/go_appengine/dev_appserver.py`.
 
-Run tests with `goapp test`:
+Run tests with `go test`:
 
 ```
-goapp test -v google.golang.org/appengine/...
+go test -v google.golang.org/appengine/...
 ```
 
 ## Contributor License Agreements
diff --git a/README.md b/README.md
index 9fdbacd..4e3f50f 100644
--- a/README.md
+++ b/README.md
@@ -1,6 +1,6 @@
 # Go App Engine packages
 
-[![Build Status](https://travis-ci.org/golang/appengine.svg)](https://travis-ci.org/golang/appengine)
+[![CI Status](https://github.com/golang/appengine/actions/workflows/ci.yml/badge.svg)](https://github.com/golang/appengine/actions/workflows/ci.yml)
 
 This repository supports the Go runtime on *App Engine standard*.
 It provides APIs for interacting with App Engine services.
@@ -72,7 +72,7 @@ A few APIs were cleaned up, and there are some differences:
 * `appengine/socket` is not required on App Engine flexible environment / Managed VMs.
   Use the standard `net` package instead.
 
-## Key Encode/Decode compatibiltiy to help with datastore library migrations
+## Key Encode/Decode compatibility to help with datastore library migrations
 
 Key compatibility updates have been added to help customers transition from google.golang.org/appengine/datastore to cloud.google.com/go/datastore.
 The `EnableKeyConversion` enables automatic conversion from a key encoded with cloud.google.com/go/datastore to google.golang.org/appengine/datastore key type.
diff --git a/aetest/instance_classic.go b/aetest/instance_classic.go
index fbceaa5..c0a7c09 100644
--- a/aetest/instance_classic.go
+++ b/aetest/instance_classic.go
@@ -1,3 +1,4 @@
+//go:build appengine
 // +build appengine
 
 package aetest
@@ -13,7 +14,7 @@ func NewInstance(opts *Options) (Instance, error) {
 	var aeOpts *aetest.Options
 	if opts != nil {
 		aeOpts = &aetest.Options{
-			AppID: opts.AppID,
+			AppID:                       opts.AppID,
 			StronglyConsistentDatastore: opts.StronglyConsistentDatastore,
 		}
 	}
diff --git a/aetest/instance_vm.go b/aetest/instance_vm.go
index 89ff8b1..e1647fd 100644
--- a/aetest/instance_vm.go
+++ b/aetest/instance_vm.go
@@ -1,3 +1,4 @@
+//go:build !appengine
 // +build !appengine
 
 package aetest
@@ -17,7 +18,6 @@ import (
 	"regexp"
 	"time"
 
-	"golang.org/x/net/context"
 	"google.golang.org/appengine/internal"
 )
 
@@ -60,7 +60,6 @@ type instance struct {
 	appDir         string
 	appID          string
 	startupTimeout time.Duration
-	relFuncs       []func() // funcs to release any associated contexts
 }
 
 // NewRequest returns an *http.Request associated with this instance.
@@ -71,21 +70,11 @@ func (i *instance) NewRequest(method, urlStr string, body io.Reader) (*http.Requ
 	}
 
 	// Associate this request.
-	req, release := internal.RegisterTestRequest(req, i.apiURL, func(ctx context.Context) context.Context {
-		ctx = internal.WithAppIDOverride(ctx, "dev~"+i.appID)
-		return ctx
-	})
-	i.relFuncs = append(i.relFuncs, release)
-
-	return req, nil
+	return internal.RegisterTestRequest(req, i.apiURL, "dev~"+i.appID), nil
 }
 
 // Close kills the child api_server.py process, releasing its resources.
 func (i *instance) Close() (err error) {
-	for _, rel := range i.relFuncs {
-		rel()
-	}
-	i.relFuncs = nil
 	child := i.child
 	if child == nil {
 		return nil
@@ -192,6 +181,11 @@ func (i *instance) startChild() (err error) {
 		return err
 	}
 
+	datastorePath := os.Getenv("APPENGINE_DEV_APPSERVER_DATASTORE_PATH")
+	if len(datastorePath) == 0 {
+		datastorePath = filepath.Join(i.appDir, "datastore")
+	}
+
 	appserverArgs = append(appserverArgs,
 		"--port=0",
 		"--api_port=0",
@@ -200,7 +194,7 @@ func (i *instance) startChild() (err error) {
 		"--skip_sdk_update_check=true",
 		"--clear_datastore=true",
 		"--clear_search_indexes=true",
-		"--datastore_path", filepath.Join(i.appDir, "datastore"),
+		"--datastore_path", datastorePath,
 	)
 	if i.opts != nil && i.opts.StronglyConsistentDatastore {
 		appserverArgs = append(appserverArgs, "--datastore_consistency_policy=consistent")
diff --git a/appengine.go b/appengine.go
index 8c96976..ad8e94b 100644
--- a/appengine.go
+++ b/appengine.go
@@ -35,18 +35,18 @@ import (
 //
 // Main is designed so that the app's main package looks like this:
 //
-//      package main
+//	package main
 //
-//      import (
-//              "google.golang.org/appengine"
+//	import (
+//	        "google.golang.org/appengine"
 //
-//              _ "myapp/package0"
-//              _ "myapp/package1"
-//      )
+//	        _ "myapp/package0"
+//	        _ "myapp/package1"
+//	)
 //
-//      func main() {
-//              appengine.Main()
-//      }
+//	func main() {
+//	        appengine.Main()
+//	}
 //
 // The "myapp/packageX" packages are expected to register HTTP handlers
 // in their init functions.
@@ -54,6 +54,9 @@ func Main() {
 	internal.Main()
 }
 
+// Middleware wraps an http handler so that it can make GAE API calls
+var Middleware func(http.Handler) http.Handler = internal.Middleware
+
 // IsDevAppServer reports whether the App Engine app is running in the
 // development App Server.
 func IsDevAppServer() bool {
diff --git a/appengine_vm.go b/appengine_vm.go
index f4b645a..6e1d041 100644
--- a/appengine_vm.go
+++ b/appengine_vm.go
@@ -2,19 +2,19 @@
 // Use of this source code is governed by the Apache 2.0
 // license that can be found in the LICENSE file.
 
+//go:build !appengine
 // +build !appengine
 
 package appengine
 
 import (
-	"golang.org/x/net/context"
-
-	"google.golang.org/appengine/internal"
+	"context"
 )
 
 // BackgroundContext returns a context not associated with a request.
-// This should only be used when not servicing a request.
-// This only works in App Engine "flexible environment".
+//
+// Deprecated: App Engine no longer has a special background context.
+// Just use context.Background().
 func BackgroundContext() context.Context {
-	return internal.BackgroundContext()
+	return context.Background()
 }
diff --git a/capability/capability.go b/capability/capability.go
index 35604d4..a7cc166 100644
--- a/capability/capability.go
+++ b/capability/capability.go
@@ -9,6 +9,7 @@ for specific API capabilities.
 This package does not work in App Engine "flexible environment".
 
 Example:
+
 	if !capability.Enabled(c, "datastore_v3", "write") {
 		// show user a different page
 	}
diff --git a/cloudsql/cloudsql.go b/cloudsql/cloudsql.go
index 7b27e6b..022e600 100644
--- a/cloudsql/cloudsql.go
+++ b/cloudsql/cloudsql.go
@@ -14,19 +14,19 @@ with protocol "cloudsql" and an address of the Cloud SQL instance.
 
 A Go MySQL driver that has been tested to work well with Cloud SQL
 is the go-sql-driver:
+
 	import "database/sql"
 	import _ "github.com/go-sql-driver/mysql"
 
 	db, err := sql.Open("mysql", "user@cloudsql(project-id:instance-name)/dbname")
 
-
 Another driver that works well with Cloud SQL is the mymysql driver:
+
 	import "database/sql"
 	import _ "github.com/ziutek/mymysql/godrv"
 
 	db, err := sql.Open("mymysql", "cloudsql:instance-name*dbname/user/password")
 
-
 Using either of these drivers, you can perform a standard SQL query.
 This example assumes there is a table named 'users' with
 columns 'first_name' and 'last_name':
diff --git a/cloudsql/cloudsql_classic.go b/cloudsql/cloudsql_classic.go
index af62dba..f0b80cf 100644
--- a/cloudsql/cloudsql_classic.go
+++ b/cloudsql/cloudsql_classic.go
@@ -2,6 +2,7 @@
 // Use of this source code is governed by the Apache 2.0
 // license that can be found in the LICENSE file.
 
+//go:build appengine
 // +build appengine
 
 package cloudsql
diff --git a/cloudsql/cloudsql_vm.go b/cloudsql/cloudsql_vm.go
index 90fa7b3..c3e4903 100644
--- a/cloudsql/cloudsql_vm.go
+++ b/cloudsql/cloudsql_vm.go
@@ -2,6 +2,7 @@
 // Use of this source code is governed by the Apache 2.0
 // license that can be found in the LICENSE file.
 
+//go:build !appengine
 // +build !appengine
 
 package cloudsql
diff --git a/cmd/aebundler/aebundler.go b/cmd/aebundler/aebundler.go
index c66849e..a46994d 100644
--- a/cmd/aebundler/aebundler.go
+++ b/cmd/aebundler/aebundler.go
@@ -8,9 +8,10 @@
 // A main func is synthesized if one does not exist.
 //
 // A sample Dockerfile to be used with this bundler could look like this:
-//     FROM gcr.io/google-appengine/go-compat
-//     ADD . /app
-//     RUN GOPATH=/app/_gopath go build -tags appenginevm -o /app/_ah/exe
+//
+//	FROM gcr.io/google-appengine/go-compat
+//	ADD . /app
+//	RUN GOPATH=/app/_gopath go build -tags appenginevm -o /app/_ah/exe
 package main
 
 import (
diff --git a/datastore/doc.go b/datastore/doc.go
index 85616cf..1ecf518 100644
--- a/datastore/doc.go
+++ b/datastore/doc.go
@@ -5,8 +5,7 @@
 /*
 Package datastore provides a client for App Engine's datastore service.
 
-
-Basic Operations
+# Basic Operations
 
 Entities are the unit of storage and are associated with a key. A key
 consists of an optional parent key, a string application ID, a string kind
@@ -74,8 +73,7 @@ GetMulti, PutMulti and DeleteMulti are batch versions of the Get, Put and
 Delete functions. They take a []*Key instead of a *Key, and may return an
 appengine.MultiError when encountering partial failure.
 
-
-Properties
+# Properties
 
 An entity's contents can be represented by a variety of types. These are
 typically struct pointers, but can also be any type that implements the
@@ -137,8 +135,7 @@ Example code:
 		J int `datastore:",noindex" json:"j"`
 	}
 
-
-Structured Properties
+# Structured Properties
 
 If the struct pointed to contains other structs, then the nested or embedded
 structs are flattened. For example, given these definitions:
@@ -179,8 +176,7 @@ equivalent field would instead be: FooDotZ bool `datastore:"Foo.Z"`.
 If an outer struct is tagged "noindex" then all of its implicit flattened
 fields are effectively "noindex".
 
-
-The PropertyLoadSaver Interface
+# The PropertyLoadSaver Interface
 
 An entity's contents can also be represented by any type that implements the
 PropertyLoadSaver interface. This type may be a struct pointer, but it does
@@ -230,8 +226,7 @@ Example code:
 The *PropertyList type implements PropertyLoadSaver, and can therefore hold an
 arbitrary entity's contents.
 
-
-Queries
+# Queries
 
 Queries retrieve entities based on their properties or key's ancestry. Running
 a query yields an iterator of results: either keys or (key, entity) pairs.
@@ -284,8 +279,7 @@ Example code:
 		io.Copy(w, b)
 	}
 
-
-Transactions
+# Transactions
 
 RunInTransaction runs a function in a transaction.
 
@@ -323,8 +317,7 @@ Example code:
 		fmt.Fprintf(w, "Count=%d", count)
 	}
 
-
-Metadata
+# Metadata
 
 The datastore package provides access to some of App Engine's datastore
 metadata. This metadata includes information about the entity groups,
diff --git a/datastore/metadata.go b/datastore/metadata.go
index 6acacc3..177a1b9 100644
--- a/datastore/metadata.go
+++ b/datastore/metadata.go
@@ -50,13 +50,14 @@ func keyNames(keys []*Key) []string {
 // The properties are returned as a map of property names to a slice of the
 // representation types. The representation types for the supported Go property
 // types are:
-//   "INT64":     signed integers and time.Time
-//   "DOUBLE":    float32 and float64
-//   "BOOLEAN":   bool
-//   "STRING":    string, []byte and ByteString
-//   "POINT":     appengine.GeoPoint
-//   "REFERENCE": *Key
-//   "USER":      (not used in the Go runtime)
+//
+//	"INT64":     signed integers and time.Time
+//	"DOUBLE":    float32 and float64
+//	"BOOLEAN":   bool
+//	"STRING":    string, []byte and ByteString
+//	"POINT":     appengine.GeoPoint
+//	"REFERENCE": *Key
+//	"USER":      (not used in the Go runtime)
 func KindProperties(ctx context.Context, kind string) (map[string][]string, error) {
 	// TODO(djd): Support range queries.
 	kindKey := NewKey(ctx, kindKind, kind, 0, nil)
diff --git a/datastore/query.go b/datastore/query.go
index 4124534..47d8e83 100644
--- a/datastore/query.go
+++ b/datastore/query.go
@@ -476,7 +476,7 @@ func callNext(c context.Context, res *pb.QueryResult, offset, count int32) error
 // The keys returned by GetAll will be in a 1-1 correspondence with the entities
 // added to dst.
 //
-// If q is a ``keys-only'' query, GetAll ignores dst and only returns the keys.
+// If q is a “keys-only” query, GetAll ignores dst and only returns the keys.
 //
 // The running time and number of API calls made by GetAll scale linearly with
 // the sum of the query's offset and limit. Unless the result count is
diff --git a/debian/changelog b/debian/changelog
index db3f527..812ec35 100644
--- a/debian/changelog
+++ b/debian/changelog
@@ -1,3 +1,9 @@
+golang-google-appengine (2.0.2+git20221116.1.504804f-1) UNRELEASED; urgency=low
+
+  * New upstream snapshot.
+
+ -- Debian Janitor <janitor@jelmer.uk>  Tue, 17 Jan 2023 13:43:39 -0000
+
 golang-google-appengine (1.6.7-2) unstable; urgency=medium
 
   * Team upload
diff --git a/delay/delay.go b/delay/delay.go
index 0a8df62..d02acd9 100644
--- a/delay/delay.go
+++ b/delay/delay.go
@@ -10,14 +10,19 @@ To declare a function that may be executed later, call Func
 in a top-level assignment context, passing it an arbitrary string key
 and a function whose first argument is of type context.Context.
 The key is used to look up the function so it can be called later.
+
 	var laterFunc = delay.Func("key", myFunc)
+
 It is also possible to use a function literal.
+
 	var laterFunc = delay.Func("key", func(c context.Context, x string) {
 		// ...
 	})
 
 To call a function, invoke its Call method.
+
 	laterFunc.Call(c, "something")
+
 A function may be called any number of times. If the function has any
 return arguments, and the last one is of type error, the function may
 return a non-nil error to signal that the function should be retried.
@@ -37,9 +42,9 @@ with the string key that was passed to the Func function. Updating an app
 with pending function invocations should safe as long as the relevant
 functions have the (filename, key) combination preserved. The filename is
 parsed according to these rules:
-  * Paths in package main are shortened to just the file name (github.com/foo/foo.go -> foo.go)
-  * Paths are stripped to just package paths (/go/src/github.com/foo/bar.go -> github.com/foo/bar.go)
-  * Module versions are stripped (/go/pkg/mod/github.com/foo/bar@v0.0.0-20181026220418-f595d03440dc/baz.go -> github.com/foo/bar/baz.go)
+  - Paths in package main are shortened to just the file name (github.com/foo/foo.go -> foo.go)
+  - Paths are stripped to just package paths (/go/src/github.com/foo/bar.go -> github.com/foo/bar.go)
+  - Module versions are stripped (/go/pkg/mod/github.com/foo/bar@v0.0.0-20181026220418-f595d03440dc/baz.go -> github.com/foo/bar/baz.go)
 
 There is some inherent risk of pending function invocations being lost during
 an update that contains large changes. For example, switching from using GOPATH
@@ -208,10 +213,13 @@ type invocation struct {
 }
 
 // Call invokes a delayed function.
-//   err := f.Call(c, ...)
+//
+//	err := f.Call(c, ...)
+//
 // is equivalent to
-//   t, _ := f.Task(...)
-//   _, err := taskqueue.Add(c, t, "")
+//
+//	t, _ := f.Task(...)
+//	_, err := taskqueue.Add(c, t, "")
 func (f *Function) Call(c context.Context, args ...interface{}) error {
 	t, err := f.Task(args...)
 	if err != nil {
diff --git a/demos/guestbook/guestbook.go b/demos/guestbook/guestbook.go
index 04a0432..86df3fe 100644
--- a/demos/guestbook/guestbook.go
+++ b/demos/guestbook/guestbook.go
@@ -3,6 +3,7 @@
 // license that can be found in the LICENSE file.
 
 // This example only works on App Engine "flexible environment".
+//go:build !appengine
 // +build !appengine
 
 package main
diff --git a/demos/helloworld/helloworld.go b/demos/helloworld/helloworld.go
index fbe9f56..d1da952 100644
--- a/demos/helloworld/helloworld.go
+++ b/demos/helloworld/helloworld.go
@@ -3,6 +3,7 @@
 // license that can be found in the LICENSE file.
 
 // This example only works on App Engine "flexible environment".
+//go:build !appengine
 // +build !appengine
 
 package main
diff --git a/go.mod b/go.mod
index 635c34f..85a8987 100644
--- a/go.mod
+++ b/go.mod
@@ -3,7 +3,7 @@ module google.golang.org/appengine
 go 1.11
 
 require (
-	github.com/golang/protobuf v1.3.1
-	golang.org/x/net v0.0.0-20190603091049-60506f45cf65
-	golang.org/x/text v0.3.2
+	github.com/golang/protobuf v1.5.2
+	golang.org/x/net v0.0.0-20210525063256-abc453219eb5
+	golang.org/x/text v0.3.6
 )
diff --git a/go.sum b/go.sum
index ce22f68..d9d27d8 100644
--- a/go.sum
+++ b/go.sum
@@ -1,11 +1,18 @@
-github.com/golang/protobuf v1.3.1 h1:YF8+flBXS5eO826T4nzqPrxfhQThhXl0YzfuUPu4SBg=
-github.com/golang/protobuf v1.3.1/go.mod h1:6lQm79b+lXiMfvg/cZm0SGofjICqVBUtrP5yJMmIC1U=
-golang.org/x/crypto v0.0.0-20190308221718-c2843e01d9a2/go.mod h1:djNgcEr1/C05ACkg1iLfiJU5Ep61QUkGW8qpdssI0+w=
-golang.org/x/net v0.0.0-20190603091049-60506f45cf65 h1:+rhAzEzT3f4JtomfC371qB+0Ola2caSKcY69NUBZrRQ=
-golang.org/x/net v0.0.0-20190603091049-60506f45cf65/go.mod h1:HSz+uSET+XFnRR8LxR5pz3Of3rY3CfYBVs4xY44aLks=
-golang.org/x/sys v0.0.0-20190215142949-d0b11bdaac8a/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY=
-golang.org/x/text v0.3.0 h1:g61tztE5qeGQ89tm6NTjjM9VPIm088od1l6aSorWRWg=
-golang.org/x/text v0.3.0/go.mod h1:NqM8EUOU14njkJ3fqMW+pc6Ldnwhi/IjpwHt7yyuwOQ=
-golang.org/x/text v0.3.2 h1:tW2bmiBqwgJj/UpqtC8EpXEZVYOwU0yG4iWbprSVAcs=
-golang.org/x/text v0.3.2/go.mod h1:bEr9sfX3Q8Zfm5fL9x+3itogRgK3+ptLWKqgva+5dAk=
+github.com/golang/protobuf v1.5.0/go.mod h1:FsONVRAS9T7sI+LIUmWTfcYkHO4aIWwzhcaSAoJOfIk=
+github.com/golang/protobuf v1.5.2 h1:ROPKBNFfQgOUMifHyP+KYbvpjbdoFNs+aK7DXlji0Tw=
+github.com/golang/protobuf v1.5.2/go.mod h1:XVQd3VNwM+JqD3oG2Ue2ip4fOMUkwXdXDdiuN0vRsmY=
+github.com/google/go-cmp v0.5.5 h1:Khx7svrCpmxxtHBq5j2mp/xVjsi8hQMfNLvJFAlrGgU=
+github.com/google/go-cmp v0.5.5/go.mod h1:v8dTdLbMG2kIc/vJvl+f65V22dbkXbowE6jgT/gNBxE=
+golang.org/x/net v0.0.0-20210525063256-abc453219eb5 h1:wjuX4b5yYQnEQHzd+CBcrcC6OVR2J1CN6mUy0oSxIPo=
+golang.org/x/net v0.0.0-20210525063256-abc453219eb5/go.mod h1:9nx3DQGgdP8bBQD5qxJ1jj9UTztislL4KSBs9R2vV5Y=
+golang.org/x/sys v0.0.0-20201119102817-f84b799fce68/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
+golang.org/x/sys v0.0.0-20210423082822-04245dca01da/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
+golang.org/x/term v0.0.0-20201126162022-7de9c90e9dd1/go.mod h1:bj7SfCRtBDWHUb9snDiAeCFNEtKQo2Wmx5Cou7ajbmo=
+golang.org/x/text v0.3.6 h1:aRYxNxv6iGQlyVaZmk6ZgYEDa+Jg18DxebPSrd6bg1M=
+golang.org/x/text v0.3.6/go.mod h1:5Zoc/QRtKVWzQhOtBMvqHzDpF6irO9z98xDceosuGiQ=
 golang.org/x/tools v0.0.0-20180917221912-90fa682c2a6e/go.mod h1:n7NCudcB/nEzxVGmLbDWY5pfWTLqBcC2KZ6jyYvM4mQ=
+golang.org/x/xerrors v0.0.0-20191204190536-9bdfabe68543 h1:E7g+9GITq07hpfrRu66IVDexMakfv52eLZ2CXBWiKr4=
+golang.org/x/xerrors v0.0.0-20191204190536-9bdfabe68543/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0=
+google.golang.org/protobuf v1.26.0-rc.1/go.mod h1:jlhhOSvTdKEhbULTjvd4ARK9grFBp09yW+WbY/TyQbw=
+google.golang.org/protobuf v1.26.0 h1:bxAC2xTBsZGibn2RTntX0oH50xLsqy1OxA9tTL3p/lk=
+google.golang.org/protobuf v1.26.0/go.mod h1:9q0QmTI4eRPtz6boOQmLYwt+qCgq0jsYwAQnmE0givc=
diff --git a/internal/api.go b/internal/api.go
index 721053c..2339da3 100644
--- a/internal/api.go
+++ b/internal/api.go
@@ -2,6 +2,7 @@
 // Use of this source code is governed by the Apache 2.0
 // license that can be found in the LICENSE file.
 
+//go:build !appengine
 // +build !appengine
 
 package internal
@@ -23,8 +24,9 @@ import (
 	"sync/atomic"
 	"time"
 
+	netcontext "context"
+
 	"github.com/golang/protobuf/proto"
-	netcontext "golang.org/x/net/context"
 
 	basepb "google.golang.org/appengine/internal/base"
 	logpb "google.golang.org/appengine/internal/log"
@@ -32,8 +34,7 @@ import (
 )
 
 const (
-	apiPath             = "/rpc_http"
-	defaultTicketSuffix = "/default.20150612t184001.0"
+	apiPath = "/rpc_http"
 )
 
 var (
@@ -65,21 +66,22 @@ var (
 			IdleConnTimeout:     90 * time.Second,
 		},
 	}
-
-	defaultTicketOnce     sync.Once
-	defaultTicket         string
-	backgroundContextOnce sync.Once
-	backgroundContext     netcontext.Context
 )
 
-func apiURL() *url.URL {
+func apiURL(ctx netcontext.Context) *url.URL {
 	host, port := "appengine.googleapis.internal", "10001"
 	if h := os.Getenv("API_HOST"); h != "" {
 		host = h
 	}
+	if hostOverride := ctx.Value(apiHostOverrideKey); hostOverride != nil {
+		host = hostOverride.(string)
+	}
 	if p := os.Getenv("API_PORT"); p != "" {
 		port = p
 	}
+	if portOverride := ctx.Value(apiPortOverrideKey); portOverride != nil {
+		port = portOverride.(string)
+	}
 	return &url.URL{
 		Scheme: "http",
 		Host:   host + ":" + port,
@@ -87,82 +89,97 @@ func apiURL() *url.URL {
 	}
 }
 
-func handleHTTP(w http.ResponseWriter, r *http.Request) {
-	c := &context{
-		req:       r,
-		outHeader: w.Header(),
-		apiURL:    apiURL(),
-	}
-	r = r.WithContext(withContext(r.Context(), c))
-	c.req = r
-
-	stopFlushing := make(chan int)
+// Middleware wraps an http handler so that it can make GAE API calls
+func Middleware(next http.Handler) http.Handler {
+	return handleHTTPMiddleware(executeRequestSafelyMiddleware(next))
+}
 
-	// Patch up RemoteAddr so it looks reasonable.
-	if addr := r.Header.Get(userIPHeader); addr != "" {
-		r.RemoteAddr = addr
-	} else if addr = r.Header.Get(remoteAddrHeader); addr != "" {
-		r.RemoteAddr = addr
-	} else {
-		// Should not normally reach here, but pick a sensible default anyway.
-		r.RemoteAddr = "127.0.0.1"
-	}
-	// The address in the headers will most likely be of these forms:
-	//	123.123.123.123
-	//	2001:db8::1
-	// net/http.Request.RemoteAddr is specified to be in "IP:port" form.
-	if _, _, err := net.SplitHostPort(r.RemoteAddr); err != nil {
-		// Assume the remote address is only a host; add a default port.
-		r.RemoteAddr = net.JoinHostPort(r.RemoteAddr, "80")
-	}
+func handleHTTPMiddleware(next http.Handler) http.Handler {
+	return http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) {
+		c := &context{
+			req:       r,
+			outHeader: w.Header(),
+		}
+		r = r.WithContext(withContext(r.Context(), c))
+		c.req = r
+
+		stopFlushing := make(chan int)
+
+		// Patch up RemoteAddr so it looks reasonable.
+		if addr := r.Header.Get(userIPHeader); addr != "" {
+			r.RemoteAddr = addr
+		} else if addr = r.Header.Get(remoteAddrHeader); addr != "" {
+			r.RemoteAddr = addr
+		} else {
+			// Should not normally reach here, but pick a sensible default anyway.
+			r.RemoteAddr = "127.0.0.1"
+		}
+		// The address in the headers will most likely be of these forms:
+		//	123.123.123.123
+		//	2001:db8::1
+		// net/http.Request.RemoteAddr is specified to be in "IP:port" form.
+		if _, _, err := net.SplitHostPort(r.RemoteAddr); err != nil {
+			// Assume the remote address is only a host; add a default port.
+			r.RemoteAddr = net.JoinHostPort(r.RemoteAddr, "80")
+		}
 
-	// Start goroutine responsible for flushing app logs.
-	// This is done after adding c to ctx.m (and stopped before removing it)
-	// because flushing logs requires making an API call.
-	go c.logFlusher(stopFlushing)
+		if logToLogservice() {
+			// Start goroutine responsible for flushing app logs.
+			// This is done after adding c to ctx.m (and stopped before removing it)
+			// because flushing logs requires making an API call.
+			go c.logFlusher(stopFlushing)
+		}
 
-	executeRequestSafely(c, r)
-	c.outHeader = nil // make sure header changes aren't respected any more
+		next.ServeHTTP(c, r)
+		c.outHeader = nil // make sure header changes aren't respected any more
 
-	stopFlushing <- 1 // any logging beyond this point will be dropped
+		flushed := make(chan struct{})
+		if logToLogservice() {
+			stopFlushing <- 1 // any logging beyond this point will be dropped
 
-	// Flush any pending logs asynchronously.
-	c.pendingLogs.Lock()
-	flushes := c.pendingLogs.flushes
-	if len(c.pendingLogs.lines) > 0 {
-		flushes++
-	}
-	c.pendingLogs.Unlock()
-	flushed := make(chan struct{})
-	go func() {
-		defer close(flushed)
-		// Force a log flush, because with very short requests we
-		// may not ever flush logs.
-		c.flushLog(true)
-	}()
-	w.Header().Set(logFlushHeader, strconv.Itoa(flushes))
+			// Flush any pending logs asynchronously.
+			c.pendingLogs.Lock()
+			flushes := c.pendingLogs.flushes
+			if len(c.pendingLogs.lines) > 0 {
+				flushes++
+			}
+			c.pendingLogs.Unlock()
+			go func() {
+				defer close(flushed)
+				// Force a log flush, because with very short requests we
+				// may not ever flush logs.
+				c.flushLog(true)
+			}()
+			w.Header().Set(logFlushHeader, strconv.Itoa(flushes))
+		}
 
-	// Avoid nil Write call if c.Write is never called.
-	if c.outCode != 0 {
-		w.WriteHeader(c.outCode)
-	}
-	if c.outBody != nil {
-		w.Write(c.outBody)
-	}
-	// Wait for the last flush to complete before returning,
-	// otherwise the security ticket will not be valid.
-	<-flushed
+		// Avoid nil Write call if c.Write is never called.
+		if c.outCode != 0 {
+			w.WriteHeader(c.outCode)
+		}
+		if c.outBody != nil {
+			w.Write(c.outBody)
+		}
+		if logToLogservice() {
+			// Wait for the last flush to complete before returning,
+			// otherwise the security ticket will not be valid.
+			<-flushed
+		}
+	})
 }
 
-func executeRequestSafely(c *context, r *http.Request) {
-	defer func() {
-		if x := recover(); x != nil {
-			logf(c, 4, "%s", renderPanic(x)) // 4 == critical
-			c.outCode = 500
-		}
-	}()
+func executeRequestSafelyMiddleware(next http.Handler) http.Handler {
+	return http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) {
+		defer func() {
+			if x := recover(); x != nil {
+				c := w.(*context)
+				logf(c, 4, "%s", renderPanic(x)) // 4 == critical
+				c.outCode = 500
+			}
+		}()
 
-	http.DefaultServeMux.ServeHTTP(c, r)
+		next.ServeHTTP(w, r)
+	})
 }
 
 func renderPanic(x interface{}) string {
@@ -218,8 +235,6 @@ type context struct {
 		lines   []*logpb.UserAppLogLine
 		flushes int
 	}
-
-	apiURL *url.URL
 }
 
 var contextKey = "holds a *context"
@@ -287,59 +302,19 @@ func WithContext(parent netcontext.Context, req *http.Request) netcontext.Contex
 	}
 }
 
-// DefaultTicket returns a ticket used for background context or dev_appserver.
-func DefaultTicket() string {
-	defaultTicketOnce.Do(func() {
-		if IsDevAppServer() {
-			defaultTicket = "testapp" + defaultTicketSuffix
-			return
-		}
-		appID := partitionlessAppID()
-		escAppID := strings.Replace(strings.Replace(appID, ":", "_", -1), ".", "_", -1)
-		majVersion := VersionID(nil)
-		if i := strings.Index(majVersion, "."); i > 0 {
-			majVersion = majVersion[:i]
-		}
-		defaultTicket = fmt.Sprintf("%s/%s.%s.%s", escAppID, ModuleName(nil), majVersion, InstanceID())
-	})
-	return defaultTicket
-}
-
-func BackgroundContext() netcontext.Context {
-	backgroundContextOnce.Do(func() {
-		// Compute background security ticket.
-		ticket := DefaultTicket()
-
-		c := &context{
-			req: &http.Request{
-				Header: http.Header{
-					ticketHeader: []string{ticket},
-				},
-			},
-			apiURL: apiURL(),
-		}
-		backgroundContext = toContext(c)
-
-		// TODO(dsymonds): Wire up the shutdown handler to do a final flush.
-		go c.logFlusher(make(chan int))
-	})
-
-	return backgroundContext
-}
-
 // RegisterTestRequest registers the HTTP request req for testing, such that
-// any API calls are sent to the provided URL. It returns a closure to delete
-// the registration.
+// any API calls are sent to the provided URL.
 // It should only be used by aetest package.
-func RegisterTestRequest(req *http.Request, apiURL *url.URL, decorate func(netcontext.Context) netcontext.Context) (*http.Request, func()) {
-	c := &context{
-		req:    req,
-		apiURL: apiURL,
-	}
-	ctx := withContext(decorate(req.Context()), c)
-	req = req.WithContext(ctx)
-	c.req = req
-	return req, func() {}
+func RegisterTestRequest(req *http.Request, apiURL *url.URL, appID string) *http.Request {
+	ctx := req.Context()
+	ctx = withAPIHostOverride(ctx, apiURL.Hostname())
+	ctx = withAPIPortOverride(ctx, apiURL.Port())
+	ctx = WithAppIDOverride(ctx, appID)
+
+	// use the unregistered request as a placeholder so that withContext can read the headers
+	c := &context{req: req}
+	c.req = req.WithContext(withContext(ctx, c))
+	return c.req
 }
 
 var errTimeout = &CallError{
@@ -384,10 +359,11 @@ func (c *context) WriteHeader(code int) {
 	c.outCode = code
 }
 
-func (c *context) post(body []byte, timeout time.Duration) (b []byte, err error) {
+func post(ctx netcontext.Context, body []byte, timeout time.Duration) (b []byte, err error) {
+	apiURL := apiURL(ctx)
 	hreq := &http.Request{
 		Method: "POST",
-		URL:    c.apiURL,
+		URL:    apiURL,
 		Header: http.Header{
 			apiEndpointHeader: apiEndpointHeaderValue,
 			apiMethodHeader:   apiMethodHeaderValue,
@@ -396,13 +372,16 @@ func (c *context) post(body []byte, timeout time.Duration) (b []byte, err error)
 		},
 		Body:          ioutil.NopCloser(bytes.NewReader(body)),
 		ContentLength: int64(len(body)),
-		Host:          c.apiURL.Host,
+		Host:          apiURL.Host,
 	}
-	if info := c.req.Header.Get(dapperHeader); info != "" {
-		hreq.Header.Set(dapperHeader, info)
-	}
-	if info := c.req.Header.Get(traceHeader); info != "" {
-		hreq.Header.Set(traceHeader, info)
+	c := fromContext(ctx)
+	if c != nil {
+		if info := c.req.Header.Get(dapperHeader); info != "" {
+			hreq.Header.Set(dapperHeader, info)
+		}
+		if info := c.req.Header.Get(traceHeader); info != "" {
+			hreq.Header.Set(traceHeader, info)
+		}
 	}
 
 	tr := apiHTTPClient.Transport.(*http.Transport)
@@ -463,10 +442,6 @@ func Call(ctx netcontext.Context, service, method string, in, out proto.Message)
 	}
 
 	c := fromContext(ctx)
-	if c == nil {
-		// Give a good error message rather than a panic lower down.
-		return errNotAppEngineContext
-	}
 
 	// Apply transaction modifications if we're in a transaction.
 	if t := transactionFromContext(ctx); t != nil {
@@ -487,20 +462,13 @@ func Call(ctx netcontext.Context, service, method string, in, out proto.Message)
 		return err
 	}
 
-	ticket := c.req.Header.Get(ticketHeader)
-	// Use a test ticket under test environment.
-	if ticket == "" {
-		if appid := ctx.Value(&appIDOverrideKey); appid != nil {
-			ticket = appid.(string) + defaultTicketSuffix
+	ticket := ""
+	if c != nil {
+		ticket = c.req.Header.Get(ticketHeader)
+		if dri := c.req.Header.Get(devRequestIdHeader); IsDevAppServer() && dri != "" {
+			ticket = dri
 		}
 	}
-	// Fall back to use background ticket when the request ticket is not available in Flex or dev_appserver.
-	if ticket == "" {
-		ticket = DefaultTicket()
-	}
-	if dri := c.req.Header.Get(devRequestIdHeader); IsDevAppServer() && dri != "" {
-		ticket = dri
-	}
 	req := &remotepb.Request{
 		ServiceName: &service,
 		Method:      &method,
@@ -512,7 +480,7 @@ func Call(ctx netcontext.Context, service, method string, in, out proto.Message)
 		return err
 	}
 
-	hrespBody, err := c.post(hreqBody, timeout)
+	hrespBody, err := post(ctx, hreqBody, timeout)
 	if err != nil {
 		return err
 	}
@@ -581,12 +549,14 @@ func logf(c *context, level int64, format string, args ...interface{}) {
 	}
 	s := fmt.Sprintf(format, args...)
 	s = strings.TrimRight(s, "\n") // Remove any trailing newline characters.
-	c.addLogLine(&logpb.UserAppLogLine{
-		TimestampUsec: proto.Int64(time.Now().UnixNano() / 1e3),
-		Level:         &level,
-		Message:       &s,
-	})
-	// Only duplicate log to stderr if not running on App Engine second generation
+	if logToLogservice() {
+		c.addLogLine(&logpb.UserAppLogLine{
+			TimestampUsec: proto.Int64(time.Now().UnixNano() / 1e3),
+			Level:         &level,
+			Message:       &s,
+		})
+	}
+	// Log to stdout if not deployed
 	if !IsSecondGen() {
 		log.Print(logLevelName[level] + ": " + s)
 	}
@@ -676,3 +646,9 @@ func (c *context) logFlusher(stop <-chan int) {
 func ContextForTesting(req *http.Request) netcontext.Context {
 	return toContext(&context{req: req})
 }
+
+func logToLogservice() bool {
+	// TODO: replace logservice with json structured logs to $LOG_DIR/app.log.json
+	// where $LOG_DIR is /var/log in prod and some tmpdir in dev
+	return os.Getenv("LOG_TO_LOGSERVICE") != "0"
+}
diff --git a/internal/api_classic.go b/internal/api_classic.go
index f0f40b2..890c84b 100644
--- a/internal/api_classic.go
+++ b/internal/api_classic.go
@@ -2,6 +2,7 @@
 // Use of this source code is governed by the Apache 2.0
 // license that can be found in the LICENSE file.
 
+//go:build appengine
 // +build appengine
 
 package internal
@@ -144,8 +145,8 @@ func Call(ctx netcontext.Context, service, method string, in, out proto.Message)
 	return err
 }
 
-func handleHTTP(w http.ResponseWriter, r *http.Request) {
-	panic("handleHTTP called; this should be impossible")
+func Middleware(next http.Handler) http.Handler {
+	panic("Middleware called; this should be impossible")
 }
 
 func logf(c appengine.Context, level int64, format string, args ...interface{}) {
diff --git a/internal/api_common.go b/internal/api_common.go
index e0c0b21..f6101d3 100644
--- a/internal/api_common.go
+++ b/internal/api_common.go
@@ -5,13 +5,19 @@
 package internal
 
 import (
+	netcontext "context"
 	"errors"
 	"os"
 
 	"github.com/golang/protobuf/proto"
-	netcontext "golang.org/x/net/context"
 )
 
+type ctxKey string
+
+func (c ctxKey) String() string {
+	return "appengine context key: " + string(c)
+}
+
 var errNotAppEngineContext = errors.New("not an App Engine context")
 
 type CallOverrideFunc func(ctx netcontext.Context, service, method string, in, out proto.Message) error
@@ -55,6 +61,18 @@ func WithAppIDOverride(ctx netcontext.Context, appID string) netcontext.Context
 	return netcontext.WithValue(ctx, &appIDOverrideKey, appID)
 }
 
+var apiHostOverrideKey = ctxKey("holds a string, being the alternate API_HOST")
+
+func withAPIHostOverride(ctx netcontext.Context, apiHost string) netcontext.Context {
+	return netcontext.WithValue(ctx, apiHostOverrideKey, apiHost)
+}
+
+var apiPortOverrideKey = ctxKey("holds a string, being the alternate API_PORT")
+
+func withAPIPortOverride(ctx netcontext.Context, apiPort string) netcontext.Context {
+	return netcontext.WithValue(ctx, apiPortOverrideKey, apiPort)
+}
+
 var namespaceKey = "holds the namespace string"
 
 func withNamespace(ctx netcontext.Context, ns string) netcontext.Context {
diff --git a/internal/api_race_test.go b/internal/api_race_test.go
index 6cfe906..845d0da 100644
--- a/internal/api_race_test.go
+++ b/internal/api_race_test.go
@@ -2,6 +2,7 @@
 // Use of this source code is governed by the Apache 2.0
 // license that can be found in the LICENSE file.
 
+//go:build race
 // +build race
 
 package internal
diff --git a/internal/api_test.go b/internal/api_test.go
index aa36029..c1be17d 100644
--- a/internal/api_test.go
+++ b/internal/api_test.go
@@ -2,6 +2,7 @@
 // Use of this source code is governed by the Apache 2.0
 // license that can be found in the LICENSE file.
 
+//go:build !appengine
 // +build !appengine
 
 package internal
@@ -9,6 +10,7 @@ package internal
 import (
 	"bufio"
 	"bytes"
+	netcontext "context"
 	"fmt"
 	"io"
 	"io/ioutil"
@@ -17,19 +19,20 @@ import (
 	"net/url"
 	"os"
 	"os/exec"
+	"runtime"
 	"strings"
 	"sync/atomic"
 	"testing"
 	"time"
 
 	"github.com/golang/protobuf/proto"
-	netcontext "golang.org/x/net/context"
 
 	basepb "google.golang.org/appengine/internal/base"
 	remotepb "google.golang.org/appengine/internal/remote_api"
 )
 
 const testTicketHeader = "X-Magic-Ticket-Header"
+const logserviceEnvVarKey = "LOG_TO_LOGSERVICE"
 
 func init() {
 	ticketHeader = testTicketHeader
@@ -39,6 +42,8 @@ type fakeAPIHandler struct {
 	hang chan int // used for RunSlowly RPC
 
 	LogFlushes int32 // atomic
+
+	allowMissingTicket bool
 }
 
 func (f *fakeAPIHandler) ServeHTTP(w http.ResponseWriter, r *http.Request) {
@@ -65,7 +70,7 @@ func (f *fakeAPIHandler) ServeHTTP(w http.ResponseWriter, r *http.Request) {
 		http.Error(w, fmt.Sprintf("Bad encoded API request: %v", err), 500)
 		return
 	}
-	if *apiReq.RequestId != "s3cr3t" && *apiReq.RequestId != DefaultTicket() {
+	if *apiReq.RequestId != "s3cr3t" && !f.allowMissingTicket {
 		writeResponse(&remotepb.Response{
 			RpcError: &remotepb.RpcError{
 				Code:   proto.Int32(int32(remotepb.RpcError_SECURITY_VIOLATION)),
@@ -145,18 +150,36 @@ func setup() (f *fakeAPIHandler, c *context, cleanup func()) {
 	f = &fakeAPIHandler{}
 	srv := httptest.NewServer(f)
 	u, err := url.Parse(srv.URL + apiPath)
+	restoreAPIHost := restoreEnvVar("API_HOST")
+	restoreAPIPort := restoreEnvVar("API_HOST")
+	os.Setenv("API_HOST", u.Hostname())
+	os.Setenv("API_PORT", u.Port())
 	if err != nil {
 		panic(fmt.Sprintf("url.Parse(%q): %v", srv.URL+apiPath, err))
 	}
 	return f, &context{
-		req: &http.Request{
-			Header: http.Header{
-				ticketHeader: []string{"s3cr3t"},
-				dapperHeader: []string{"trace-001"},
+			req: &http.Request{
+				Header: http.Header{
+					ticketHeader: []string{"s3cr3t"},
+					dapperHeader: []string{"trace-001"},
+				},
 			},
-		},
-		apiURL: u,
-	}, srv.Close
+		}, func() {
+			restoreAPIHost()
+			restoreAPIPort()
+			srv.Close()
+		}
+}
+
+func restoreEnvVar(key string) (cleanup func()) {
+	oldval, ok := os.LookupEnv(key)
+	return func() {
+		if ok {
+			os.Setenv(key, oldval)
+		} else {
+			os.Unsetenv(key)
+		}
+	}
 }
 
 func TestAPICall(t *testing.T) {
@@ -179,8 +202,9 @@ func TestAPICall(t *testing.T) {
 func TestAPICallTicketUnavailable(t *testing.T) {
 	resetEnv := SetTestEnv()
 	defer resetEnv()
-	_, c, cleanup := setup()
+	f, c, cleanup := setup()
 	defer cleanup()
+	f.allowMissingTicket = true
 
 	c.req.Header.Set(ticketHeader, "")
 	req := &basepb.StringProto{
@@ -230,13 +254,9 @@ func TestAPICallRPCFailure(t *testing.T) {
 func TestAPICallDialFailure(t *testing.T) {
 	// See what happens if the API host is unresponsive.
 	// This should time out quickly, not hang forever.
-	_, c, cleanup := setup()
-	defer cleanup()
-	// Reset the URL to the production address so that dialing fails.
-	c.apiURL = apiURL()
-
+	// We intentially don't set up the fakeAPIHandler for this test to cause the dail failure.
 	start := time.Now()
-	err := Call(toContext(c), "foo", "bar", &basepb.VoidProto{}, &basepb.VoidProto{})
+	err := Call(netcontext.Background(), "foo", "bar", &basepb.VoidProto{}, &basepb.VoidProto{})
 	const max = 1 * time.Second
 	if taken := time.Since(start); taken > max {
 		t.Errorf("Dial hang took too long: %v > %v", taken, max)
@@ -247,81 +267,114 @@ func TestAPICallDialFailure(t *testing.T) {
 }
 
 func TestDelayedLogFlushing(t *testing.T) {
-	f, c, cleanup := setup()
-	defer cleanup()
-
-	http.HandleFunc("/slow_log", func(w http.ResponseWriter, r *http.Request) {
-		logC := WithContext(netcontext.Background(), r)
-		fromContext(logC).apiURL = c.apiURL // Otherwise it will try to use the default URL.
-		Logf(logC, 1, "It's a lovely day.")
-		w.WriteHeader(200)
-		time.Sleep(1200 * time.Millisecond)
-		w.Write(make([]byte, 100<<10)) // write 100 KB to force HTTP flush
-	})
+	defer restoreEnvVar(logserviceEnvVarKey)()
 
-	r := &http.Request{
-		Method: "GET",
-		URL: &url.URL{
-			Scheme: "http",
-			Path:   "/slow_log",
-		},
-		Header: c.req.Header,
-		Body:   ioutil.NopCloser(bytes.NewReader(nil)),
+	testCases := []struct {
+		logToLogservice    string
+		wantInitialFlushes int32
+		wantHeader         string
+		wantEndFlushes     int32
+	}{
+		{logToLogservice: "", wantHeader: "1", wantInitialFlushes: 1, wantEndFlushes: 2}, // default behavior
+		{logToLogservice: "1", wantHeader: "1", wantInitialFlushes: 1, wantEndFlushes: 2},
+		{logToLogservice: "0", wantHeader: "", wantInitialFlushes: 0, wantEndFlushes: 0},
 	}
-	w := httptest.NewRecorder()
+	for _, tc := range testCases {
+		t.Run(fmt.Sprintf("$%s=%q", logserviceEnvVarKey, tc.logToLogservice), func(t *testing.T) {
+			f, c, cleanup := setup()
+			defer cleanup()
+			os.Setenv(logserviceEnvVarKey, tc.logToLogservice)
+
+			path := "/slow_log_" + tc.logToLogservice
+
+			http.HandleFunc(path, func(w http.ResponseWriter, r *http.Request) {
+				logC := WithContext(netcontext.Background(), r)
+				Logf(logC, 1, "It's a lovely day.")
+				w.WriteHeader(200)
+				time.Sleep(1200 * time.Millisecond)
+				w.Write(make([]byte, 100<<10)) // write 100 KB to force HTTP flush
+			})
 
-	handled := make(chan struct{})
-	go func() {
-		defer close(handled)
-		handleHTTP(w, r)
-	}()
-	// Check that the log flush eventually comes in.
-	time.Sleep(1200 * time.Millisecond)
-	if f := atomic.LoadInt32(&f.LogFlushes); f != 1 {
-		t.Errorf("After 1.2s: f.LogFlushes = %d, want 1", f)
-	}
+			r := &http.Request{
+				Method: "GET",
+				URL: &url.URL{
+					Scheme: "http",
+					Path:   path,
+				},
+				Header: c.req.Header,
+				Body:   ioutil.NopCloser(bytes.NewReader(nil)),
+			}
+			w := httptest.NewRecorder()
+
+			handled := make(chan struct{})
+			go func() {
+				defer close(handled)
+				Middleware(http.DefaultServeMux).ServeHTTP(w, r)
+			}()
+			// Check that the log flush eventually comes in.
+			time.Sleep(1200 * time.Millisecond)
+			if got := atomic.LoadInt32(&f.LogFlushes); got != tc.wantInitialFlushes {
+				t.Errorf("After 1.2s: f.LogFlushes = %d, want %d", got, tc.wantInitialFlushes)
+			}
 
-	<-handled
-	const hdr = "X-AppEngine-Log-Flush-Count"
-	if got, want := w.HeaderMap.Get(hdr), "1"; got != want {
-		t.Errorf("%s header = %q, want %q", hdr, got, want)
-	}
-	if got, want := atomic.LoadInt32(&f.LogFlushes), int32(2); got != want {
-		t.Errorf("After HTTP response: f.LogFlushes = %d, want %d", got, want)
+			<-handled
+			const hdr = "X-AppEngine-Log-Flush-Count"
+			if got := w.HeaderMap.Get(hdr); got != tc.wantHeader {
+				t.Errorf("%s header = %q, want %q", hdr, got, tc.wantHeader)
+			}
+			if got := atomic.LoadInt32(&f.LogFlushes); got != tc.wantEndFlushes {
+				t.Errorf("After HTTP response: f.LogFlushes = %d, want %d", got, tc.wantEndFlushes)
+			}
+		})
 	}
-
 }
 
 func TestLogFlushing(t *testing.T) {
-	f, c, cleanup := setup()
-	defer cleanup()
+	defer restoreEnvVar(logserviceEnvVarKey)()
 
-	http.HandleFunc("/quick_log", func(w http.ResponseWriter, r *http.Request) {
-		logC := WithContext(netcontext.Background(), r)
-		fromContext(logC).apiURL = c.apiURL // Otherwise it will try to use the default URL.
-		Logf(logC, 1, "It's a lovely day.")
-		w.WriteHeader(200)
-		w.Write(make([]byte, 100<<10)) // write 100 KB to force HTTP flush
-	})
-
-	r := &http.Request{
-		Method: "GET",
-		URL: &url.URL{
-			Scheme: "http",
-			Path:   "/quick_log",
-		},
-		Header: c.req.Header,
-		Body:   ioutil.NopCloser(bytes.NewReader(nil)),
+	testCases := []struct {
+		logToLogservice string
+		wantHeader      string
+		wantFlushes     int32
+	}{
+		{logToLogservice: "", wantHeader: "1", wantFlushes: 1}, // default behavior
+		{logToLogservice: "1", wantHeader: "1", wantFlushes: 1},
+		{logToLogservice: "0", wantHeader: "", wantFlushes: 0},
 	}
-	w := httptest.NewRecorder()
+	for _, tc := range testCases {
+		t.Run(fmt.Sprintf("$%s=%q", logserviceEnvVarKey, tc.logToLogservice), func(t *testing.T) {
+			f, c, cleanup := setup()
+			defer cleanup()
+			os.Setenv(logserviceEnvVarKey, tc.logToLogservice)
+
+			path := "/quick_log_" + tc.logToLogservice
+			http.HandleFunc(path, func(w http.ResponseWriter, r *http.Request) {
+				logC := WithContext(netcontext.Background(), r)
+				Logf(logC, 1, "It's a lovely day.")
+				w.WriteHeader(200)
+				w.Write(make([]byte, 100<<10)) // write 100 KB to force HTTP flush
+			})
 
-	handleHTTP(w, r)
-	const hdr = "X-AppEngine-Log-Flush-Count"
-	if got, want := w.HeaderMap.Get(hdr), "1"; got != want {
-		t.Errorf("%s header = %q, want %q", hdr, got, want)
-	}
-	if got, want := atomic.LoadInt32(&f.LogFlushes), int32(1); got != want {
-		t.Errorf("After HTTP response: f.LogFlushes = %d, want %d", got, want)
+			r := &http.Request{
+				Method: "GET",
+				URL: &url.URL{
+					Scheme: "http",
+					Path:   path,
+				},
+				Header: c.req.Header,
+				Body:   ioutil.NopCloser(bytes.NewReader(nil)),
+			}
+			w := httptest.NewRecorder()
+
+			Middleware(http.DefaultServeMux).ServeHTTP(w, r)
+			const hdr = "X-AppEngine-Log-Flush-Count"
+			if got := w.HeaderMap.Get(hdr); got != tc.wantHeader {
+				t.Errorf("%s header = %q, want %q", hdr, got, tc.wantHeader)
+			}
+			if got := atomic.LoadInt32(&f.LogFlushes); got != tc.wantFlushes {
+				t.Errorf("After HTTP response: f.LogFlushes = %d, want %d", got, tc.wantFlushes)
+			}
+		})
 	}
 }
 
@@ -356,7 +409,7 @@ func TestRemoteAddr(t *testing.T) {
 			Header: tc.headers,
 			Body:   ioutil.NopCloser(bytes.NewReader(nil)),
 		}
-		handleHTTP(httptest.NewRecorder(), r)
+		Middleware(http.DefaultServeMux).ServeHTTP(httptest.NewRecorder(), r)
 		if addr != tc.addr {
 			t.Errorf("Header %v, got %q, want %q", tc.headers, addr, tc.addr)
 		}
@@ -373,7 +426,7 @@ func TestPanickingHandler(t *testing.T) {
 		Body:   ioutil.NopCloser(bytes.NewReader(nil)),
 	}
 	rec := httptest.NewRecorder()
-	handleHTTP(rec, r)
+	Middleware(http.DefaultServeMux).ServeHTTP(rec, r)
 	if rec.Code != 500 {
 		t.Errorf("Panicking handler returned HTTP %d, want HTTP %d", rec.Code, 500)
 	}
@@ -387,7 +440,8 @@ func TestAPICallAllocations(t *testing.T) {
 	}
 
 	// Run the test API server in a subprocess so we aren't counting its allocations.
-	u, cleanup := launchHelperProcess(t)
+	cleanup := launchHelperProcess(t)
+
 	defer cleanup()
 	c := &context{
 		req: &http.Request{
@@ -396,7 +450,6 @@ func TestAPICallAllocations(t *testing.T) {
 				dapperHeader: []string{"trace-001"},
 			},
 		},
-		apiURL: u,
 	}
 
 	req := &basepb.StringProto{
@@ -415,13 +468,18 @@ func TestAPICallAllocations(t *testing.T) {
 	}
 
 	// Lots of room for improvement...
-	const min, max float64 = 60, 86
+	var min, max float64 = 60, 86
+	if strings.HasPrefix(runtime.Version(), "go1.11.") || strings.HasPrefix(runtime.Version(), "go1.12.") {
+		// add a bit more overhead for versions before go1.13
+		// see https://go.dev/doc/go1.13#compilers
+		max = 90
+	}
 	if avg < min || max < avg {
 		t.Errorf("Allocations per API call = %g, want in [%g,%g]", avg, min, max)
 	}
 }
 
-func launchHelperProcess(t *testing.T) (apiURL *url.URL, cleanup func()) {
+func launchHelperProcess(t *testing.T) (cleanup func()) {
 	cmd := exec.Command(os.Args[0], "-test.run=TestHelperProcess")
 	cmd.Env = []string{"GO_WANT_HELPER_PROCESS=1"}
 	stdin, err := cmd.StdinPipe()
@@ -456,7 +514,13 @@ func launchHelperProcess(t *testing.T) (apiURL *url.URL, cleanup func()) {
 		t.Fatal("Helper process never reported")
 	}
 
-	return u, func() {
+	restoreAPIHost := restoreEnvVar("API_HOST")
+	restoreAPIPort := restoreEnvVar("API_HOST")
+	os.Setenv("API_HOST", u.Hostname())
+	os.Setenv("API_PORT", u.Port())
+	return func() {
+		restoreAPIHost()
+		restoreAPIPort()
 		stdin.Close()
 		if err := cmd.Wait(); err != nil {
 			t.Errorf("Helper process did not exit cleanly: %v", err)
@@ -481,20 +545,3 @@ func TestHelperProcess(*testing.T) {
 	// Wait for stdin to be closed.
 	io.Copy(ioutil.Discard, os.Stdin)
 }
-
-func TestBackgroundContext(t *testing.T) {
-	resetEnv := SetTestEnv()
-	defer resetEnv()
-
-	ctx, key := fromContext(BackgroundContext()), "X-Magic-Ticket-Header"
-	if g, w := ctx.req.Header.Get(key), "my-app-id/default.20150612t184001.0"; g != w {
-		t.Errorf("%v = %q, want %q", key, g, w)
-	}
-
-	// Check that using the background context doesn't panic.
-	req := &basepb.StringProto{
-		Value: proto.String("Doctor Who"),
-	}
-	res := &basepb.StringProto{}
-	Call(BackgroundContext(), "actordb", "LookupActor", req, res) // expected to fail
-}
diff --git a/internal/identity_classic.go b/internal/identity_classic.go
index 4e979f4..77fb7cc 100644
--- a/internal/identity_classic.go
+++ b/internal/identity_classic.go
@@ -2,6 +2,7 @@
 // Use of this source code is governed by the Apache 2.0
 // license that can be found in the LICENSE file.
 
+//go:build appengine
 // +build appengine
 
 package internal
diff --git a/internal/identity_flex.go b/internal/identity_flex.go
index d5e2e7b..4201b6b 100644
--- a/internal/identity_flex.go
+++ b/internal/identity_flex.go
@@ -2,6 +2,7 @@
 // Use of this source code is governed by the Apache 2.0
 // license that can be found in the LICENSE file.
 
+//go:build appenginevm
 // +build appenginevm
 
 package internal
diff --git a/internal/identity_vm.go b/internal/identity_vm.go
index 5d80672..0a32cc3 100644
--- a/internal/identity_vm.go
+++ b/internal/identity_vm.go
@@ -2,6 +2,7 @@
 // Use of this source code is governed by the Apache 2.0
 // license that can be found in the LICENSE file.
 
+//go:build !appengine
 // +build !appengine
 
 package internal
@@ -130,5 +131,5 @@ func fullyQualifiedAppID(_ netcontext.Context) string {
 }
 
 func IsDevAppServer() bool {
-	return os.Getenv("RUN_WITH_DEVAPPSERVER") != ""
+	return os.Getenv("RUN_WITH_DEVAPPSERVER") != "" || os.Getenv("GAE_ENV") == "localdev"
 }
diff --git a/internal/internal_vm_test.go b/internal/internal_vm_test.go
index f809761..2ec0367 100644
--- a/internal/internal_vm_test.go
+++ b/internal/internal_vm_test.go
@@ -2,6 +2,7 @@
 // Use of this source code is governed by the Apache 2.0
 // license that can be found in the LICENSE file.
 
+//go:build !appengine
 // +build !appengine
 
 package internal
diff --git a/internal/main.go b/internal/main.go
index 1e76531..afd0ae8 100644
--- a/internal/main.go
+++ b/internal/main.go
@@ -2,6 +2,7 @@
 // Use of this source code is governed by the Apache 2.0
 // license that can be found in the LICENSE file.
 
+//go:build appengine
 // +build appengine
 
 package internal
diff --git a/internal/main_test.go b/internal/main_test.go
index 17308e0..24f6d64 100644
--- a/internal/main_test.go
+++ b/internal/main_test.go
@@ -1,3 +1,4 @@
+//go:build !appengine
 // +build !appengine
 
 package internal
diff --git a/internal/main_vm.go b/internal/main_vm.go
index ddb79a3..86a8caf 100644
--- a/internal/main_vm.go
+++ b/internal/main_vm.go
@@ -2,6 +2,7 @@
 // Use of this source code is governed by the Apache 2.0
 // license that can be found in the LICENSE file.
 
+//go:build !appengine
 // +build !appengine
 
 package internal
@@ -29,7 +30,7 @@ func Main() {
 	if IsDevAppServer() {
 		host = "127.0.0.1"
 	}
-	if err := http.ListenAndServe(host+":"+port, http.HandlerFunc(handleHTTP)); err != nil {
+	if err := http.ListenAndServe(host+":"+port, Middleware(http.DefaultServeMux)); err != nil {
 		log.Fatalf("http.ListenAndServe: %v", err)
 	}
 }
diff --git a/internal/net_test.go b/internal/net_test.go
index 24da8bb..3952526 100644
--- a/internal/net_test.go
+++ b/internal/net_test.go
@@ -2,6 +2,7 @@
 // Use of this source code is governed by the Apache 2.0
 // license that can be found in the LICENSE file.
 
+//go:build !appengine
 // +build !appengine
 
 package internal
diff --git a/internal/user/user_service.pb.go b/internal/user/user_service.pb.go
index 8090a4e..c32f726 100644
--- a/internal/user/user_service.pb.go
+++ b/internal/user/user_service.pb.go
@@ -38,7 +38,7 @@ var UserServiceError_ErrorCode_name = map[int32]string{
 	5: "OAUTH_ERROR",
 }
 var UserServiceError_ErrorCode_value = map[string]int32{
-	"OK": 0,
+	"OK":                    0,
 	"REDIRECT_URL_TOO_LONG": 1,
 	"NOT_ALLOWED":           2,
 	"OAUTH_INVALID_TOKEN":   3,
diff --git a/log/log.go b/log/log.go
index 731ad8c..cb900eb 100644
--- a/log/log.go
+++ b/log/log.go
@@ -7,6 +7,7 @@ Package log provides the means of writing and querying an application's logs
 from within an App Engine application.
 
 Example:
+
 	c := appengine.NewContext(r)
 	query := &log.Query{
 		AppLogs:  true,
diff --git a/mail/mail.go b/mail/mail.go
index 1ce1e87..52f474f 100644
--- a/mail/mail.go
+++ b/mail/mail.go
@@ -7,6 +7,7 @@ Package mail provides the means of sending email from an
 App Engine application.
 
 Example:
+
 	msg := &mail.Message{
 		Sender:  "romeo@montague.com",
 		To:      []string{"Juliet <juliet@capulet.org>"},
diff --git a/search/doc.go b/search/doc.go
index 5208f18..2a030e3 100644
--- a/search/doc.go
+++ b/search/doc.go
@@ -5,8 +5,7 @@
 /*
 Package search provides a client for App Engine's search service.
 
-
-Basic Operations
+# Basic Operations
 
 Indexes contain documents. Each index is identified by its name: a
 human-readable ASCII string.
@@ -54,8 +53,7 @@ to Get to hold the resulting document.
 		return err
 	}
 
-
-Search and Listing Documents
+# Search and Listing Documents
 
 Indexes have two methods for retrieving multiple documents at once: Search and
 List.
@@ -98,8 +96,7 @@ Call List to iterate over all documents in an index.
 		fmt.Fprintf(w, "%s -> %#v\n", id, doc)
 	}
 
-
-Fields and Facets
+# Fields and Facets
 
 A document's contents can be represented by a variety of types. These are
 typically struct pointers, but they can also be represented by any type
@@ -145,8 +142,7 @@ Example code:
 		I float64 `search:",facet" json:"i"`
 	}
 
-
-The FieldLoadSaver Interface
+# The FieldLoadSaver Interface
 
 A document's contents can also be represented by any type that implements the
 FieldLoadSaver interface. This type may be a struct pointer, but it
diff --git a/socket/socket_classic.go b/socket/socket_classic.go
index 0ad50e2..2139b66 100644
--- a/socket/socket_classic.go
+++ b/socket/socket_classic.go
@@ -2,6 +2,7 @@
 // Use of this source code is governed by the Apache 2.0
 // license that can be found in the LICENSE file.
 
+//go:build appengine
 // +build appengine
 
 package socket
diff --git a/socket/socket_vm.go b/socket/socket_vm.go
index c804169..5f5a538 100644
--- a/socket/socket_vm.go
+++ b/socket/socket_vm.go
@@ -2,6 +2,7 @@
 // Use of this source code is governed by the Apache 2.0
 // license that can be found in the LICENSE file.
 
+//go:build !appengine
 // +build !appengine
 
 package socket
diff --git a/travis_install.sh b/travis_install.sh
deleted file mode 100755
index 785b62f..0000000
--- a/travis_install.sh
+++ /dev/null
@@ -1,18 +0,0 @@
-#!/bin/bash
-set -e
-
-if [[ $GO111MODULE == "on" ]]; then
-  go get .
-else
-  go get -u -v $(go list -f '{{join .Imports "\n"}}{{"\n"}}{{join .TestImports "\n"}}' ./... | sort | uniq | grep -v appengine)
-fi
-
-if [[ $GOAPP == "true" ]]; then
-  mkdir /tmp/sdk
-  curl -o /tmp/sdk.zip "https://storage.googleapis.com/appengine-sdks/featured/go_appengine_sdk_linux_amd64-1.9.68.zip"
-  unzip -q /tmp/sdk.zip -d /tmp/sdk
-  # NOTE: Set the following env vars in the test script:
-  # export PATH="$PATH:/tmp/sdk/go_appengine"
-  # export APPENGINE_DEV_APPSERVER=/tmp/sdk/go_appengine/dev_appserver.py
-fi
-
diff --git a/travis_test.sh b/travis_test.sh
deleted file mode 100755
index d4390f0..0000000
--- a/travis_test.sh
+++ /dev/null
@@ -1,12 +0,0 @@
-#!/bin/bash
-set -e
-
-go version
-go test -v google.golang.org/appengine/...
-go test -v -race google.golang.org/appengine/...
-if [[ $GOAPP == "true" ]]; then
-  export PATH="$PATH:/tmp/sdk/go_appengine"
-  export APPENGINE_DEV_APPSERVER=/tmp/sdk/go_appengine/dev_appserver.py
-  goapp version
-  goapp test -v google.golang.org/appengine/...
-fi
diff --git a/urlfetch/urlfetch.go b/urlfetch/urlfetch.go
index 6ffe1e6..8d44bfe 100644
--- a/urlfetch/urlfetch.go
+++ b/urlfetch/urlfetch.go
@@ -44,11 +44,10 @@ type Transport struct {
 var _ http.RoundTripper = (*Transport)(nil)
 
 // Client returns an *http.Client using a default urlfetch Transport. This
-// client will have the default deadline of 5 seconds, and will check the
-// validity of SSL certificates.
+// client will check the validity of SSL certificates.
 //
-// Any deadline of the provided context will be used for requests through this client;
-// if the client does not have a deadline then a 5 second default is used.
+// Any deadline of the provided context will be used for requests through this client.
+// If the client does not have a deadline, then an App Engine default of 60 second is used.
 func Client(ctx context.Context) *http.Client {
 	return &http.Client{
 		Transport: &Transport{
diff --git a/user/user_classic.go b/user/user_classic.go
index 8131509..45572fc 100644
--- a/user/user_classic.go
+++ b/user/user_classic.go
@@ -2,6 +2,7 @@
 // Use of this source code is governed by the Apache 2.0
 // license that can be found in the LICENSE file.
 
+//go:build appengine
 // +build appengine
 
 package user
diff --git a/user/user_test.go b/user/user_test.go
index 5fc5957..78fd2fd 100644
--- a/user/user_test.go
+++ b/user/user_test.go
@@ -2,6 +2,7 @@
 // Use of this source code is governed by the Apache 2.0
 // license that can be found in the LICENSE file.
 
+//go:build !appengine
 // +build !appengine
 
 package user
diff --git a/user/user_vm.go b/user/user_vm.go
index 8dc672e..8198c5e 100644
--- a/user/user_vm.go
+++ b/user/user_vm.go
@@ -2,6 +2,7 @@
 // Use of this source code is governed by the Apache 2.0
 // license that can be found in the LICENSE file.
 
+//go:build !appengine
 // +build !appengine
 
 package user
diff --git a/v2/CONTRIBUTING.md b/v2/CONTRIBUTING.md
new file mode 100644
index 0000000..6c9f13b
--- /dev/null
+++ b/v2/CONTRIBUTING.md
@@ -0,0 +1,88 @@
+# Contributing
+
+1. Sign one of the contributor license agreements below.
+1. Get the package:
+
+    `go get -d google.golang.org/appengine`
+1. Change into the checked out source:
+
+    `cd $GOPATH/src/google.golang.org/appengine`
+1. Fork the repo.
+1. Set your fork as a remote:
+
+    `git remote add fork git@github.com:GITHUB_USERNAME/appengine.git`
+1. Make changes, commit to your fork.
+1. Send a pull request with your changes. 
+   The first line of your commit message is conventionally a one-line summary of the change, prefixed by the primary affected package, and is used as the title of your pull request.
+
+# Testing
+
+## Running system tests
+
+Set the `APPENGINE_DEV_APPSERVER` environment variable to `/path/to/go_appengine/dev_appserver.py`.
+
+Run tests with `go test`:
+
+```
+go test -v google.golang.org/appengine/v2/...
+```
+
+## Contributor License Agreements
+
+Before we can accept your pull requests you'll need to sign a Contributor
+License Agreement (CLA):
+
+- **If you are an individual writing original source code** and **you own the
+intellectual property**, then you'll need to sign an [individual CLA][indvcla].
+- **If you work for a company that wants to allow you to contribute your work**,
+then you'll need to sign a [corporate CLA][corpcla].
+
+You can sign these electronically (just scroll to the bottom). After that,
+we'll be able to accept your pull requests.
+
+## Contributor Code of Conduct
+
+As contributors and maintainers of this project,
+and in the interest of fostering an open and welcoming community,
+we pledge to respect all people who contribute through reporting issues,
+posting feature requests, updating documentation,
+submitting pull requests or patches, and other activities.
+
+We are committed to making participation in this project
+a harassment-free experience for everyone,
+regardless of level of experience, gender, gender identity and expression,
+sexual orientation, disability, personal appearance,
+body size, race, ethnicity, age, religion, or nationality.
+
+Examples of unacceptable behavior by participants include:
+
+* The use of sexualized language or imagery
+* Personal attacks
+* Trolling or insulting/derogatory comments
+* Public or private harassment
+* Publishing other's private information,
+such as physical or electronic
+addresses, without explicit permission
+* Other unethical or unprofessional conduct.
+
+Project maintainers have the right and responsibility to remove, edit, or reject
+comments, commits, code, wiki edits, issues, and other contributions
+that are not aligned to this Code of Conduct.
+By adopting this Code of Conduct,
+project maintainers commit themselves to fairly and consistently
+applying these principles to every aspect of managing this project.
+Project maintainers who do not follow or enforce the Code of Conduct
+may be permanently removed from the project team.
+
+This code of conduct applies both within project spaces and in public spaces
+when an individual is representing the project or its community.
+
+Instances of abusive, harassing, or otherwise unacceptable behavior
+may be reported by opening an issue
+or contacting one or more of the project maintainers.
+
+This Code of Conduct is adapted from the [Contributor Covenant](http://contributor-covenant.org), version 1.2.0,
+available at [http://contributor-covenant.org/version/1/2/0/](http://contributor-covenant.org/version/1/2/0/)
+
+[indvcla]: https://developers.google.com/open-source/cla/individual
+[corpcla]: https://developers.google.com/open-source/cla/corporate
diff --git a/v2/LICENSE b/v2/LICENSE
new file mode 100644
index 0000000..d645695
--- /dev/null
+++ b/v2/LICENSE
@@ -0,0 +1,202 @@
+
+                                 Apache License
+                           Version 2.0, January 2004
+                        http://www.apache.org/licenses/
+
+   TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION
+
+   1. Definitions.
+
+      "License" shall mean the terms and conditions for use, reproduction,
+      and distribution as defined by Sections 1 through 9 of this document.
+
+      "Licensor" shall mean the copyright owner or entity authorized by
+      the copyright owner that is granting the License.
+
+      "Legal Entity" shall mean the union of the acting entity and all
+      other entities that control, are controlled by, or are under common
+      control with that entity. For the purposes of this definition,
+      "control" means (i) the power, direct or indirect, to cause the
+      direction or management of such entity, whether by contract or
+      otherwise, or (ii) ownership of fifty percent (50%) or more of the
+      outstanding shares, or (iii) beneficial ownership of such entity.
+
+      "You" (or "Your") shall mean an individual or Legal Entity
+      exercising permissions granted by this License.
+
+      "Source" form shall mean the preferred form for making modifications,
+      including but not limited to software source code, documentation
+      source, and configuration files.
+
+      "Object" form shall mean any form resulting from mechanical
+      transformation or translation of a Source form, including but
+      not limited to compiled object code, generated documentation,
+      and conversions to other media types.
+
+      "Work" shall mean the work of authorship, whether in Source or
+      Object form, made available under the License, as indicated by a
+      copyright notice that is included in or attached to the work
+      (an example is provided in the Appendix below).
+
+      "Derivative Works" shall mean any work, whether in Source or Object
+      form, that is based on (or derived from) the Work and for which the
+      editorial revisions, annotations, elaborations, or other modifications
+      represent, as a whole, an original work of authorship. For the purposes
+      of this License, Derivative Works shall not include works that remain
+      separable from, or merely link (or bind by name) to the interfaces of,
+      the Work and Derivative Works thereof.
+
+      "Contribution" shall mean any work of authorship, including
+      the original version of the Work and any modifications or additions
+      to that Work or Derivative Works thereof, that is intentionally
+      submitted to Licensor for inclusion in the Work by the copyright owner
+      or by an individual or Legal Entity authorized to submit on behalf of
+      the copyright owner. For the purposes of this definition, "submitted"
+      means any form of electronic, verbal, or written communication sent
+      to the Licensor or its representatives, including but not limited to
+      communication on electronic mailing lists, source code control systems,
+      and issue tracking systems that are managed by, or on behalf of, the
+      Licensor for the purpose of discussing and improving the Work, but
+      excluding communication that is conspicuously marked or otherwise
+      designated in writing by the copyright owner as "Not a Contribution."
+
+      "Contributor" shall mean Licensor and any individual or Legal Entity
+      on behalf of whom a Contribution has been received by Licensor and
+      subsequently incorporated within the Work.
+
+   2. Grant of Copyright License. Subject to the terms and conditions of
+      this License, each Contributor hereby grants to You a perpetual,
+      worldwide, non-exclusive, no-charge, royalty-free, irrevocable
+      copyright license to reproduce, prepare Derivative Works of,
+      publicly display, publicly perform, sublicense, and distribute the
+      Work and such Derivative Works in Source or Object form.
+
+   3. Grant of Patent License. Subject to the terms and conditions of
+      this License, each Contributor hereby grants to You a perpetual,
+      worldwide, non-exclusive, no-charge, royalty-free, irrevocable
+      (except as stated in this section) patent license to make, have made,
+      use, offer to sell, sell, import, and otherwise transfer the Work,
+      where such license applies only to those patent claims licensable
+      by such Contributor that are necessarily infringed by their
+      Contribution(s) alone or by combination of their Contribution(s)
+      with the Work to which such Contribution(s) was submitted. If You
+      institute patent litigation against any entity (including a
+      cross-claim or counterclaim in a lawsuit) alleging that the Work
+      or a Contribution incorporated within the Work constitutes direct
+      or contributory patent infringement, then any patent licenses
+      granted to You under this License for that Work shall terminate
+      as of the date such litigation is filed.
+
+   4. Redistribution. You may reproduce and distribute copies of the
+      Work or Derivative Works thereof in any medium, with or without
+      modifications, and in Source or Object form, provided that You
+      meet the following conditions:
+
+      (a) You must give any other recipients of the Work or
+          Derivative Works a copy of this License; and
+
+      (b) You must cause any modified files to carry prominent notices
+          stating that You changed the files; and
+
+      (c) You must retain, in the Source form of any Derivative Works
+          that You distribute, all copyright, patent, trademark, and
+          attribution notices from the Source form of the Work,
+          excluding those notices that do not pertain to any part of
+          the Derivative Works; and
+
+      (d) If the Work includes a "NOTICE" text file as part of its
+          distribution, then any Derivative Works that You distribute must
+          include a readable copy of the attribution notices contained
+          within such NOTICE file, excluding those notices that do not
+          pertain to any part of the Derivative Works, in at least one
+          of the following places: within a NOTICE text file distributed
+          as part of the Derivative Works; within the Source form or
+          documentation, if provided along with the Derivative Works; or,
+          within a display generated by the Derivative Works, if and
+          wherever such third-party notices normally appear. The contents
+          of the NOTICE file are for informational purposes only and
+          do not modify the License. You may add Your own attribution
+          notices within Derivative Works that You distribute, alongside
+          or as an addendum to the NOTICE text from the Work, provided
+          that such additional attribution notices cannot be construed
+          as modifying the License.
+
+      You may add Your own copyright statement to Your modifications and
+      may provide additional or different license terms and conditions
+      for use, reproduction, or distribution of Your modifications, or
+      for any such Derivative Works as a whole, provided Your use,
+      reproduction, and distribution of the Work otherwise complies with
+      the conditions stated in this License.
+
+   5. Submission of Contributions. Unless You explicitly state otherwise,
+      any Contribution intentionally submitted for inclusion in the Work
+      by You to the Licensor shall be under the terms and conditions of
+      this License, without any additional terms or conditions.
+      Notwithstanding the above, nothing herein shall supersede or modify
+      the terms of any separate license agreement you may have executed
+      with Licensor regarding such Contributions.
+
+   6. Trademarks. This License does not grant permission to use the trade
+      names, trademarks, service marks, or product names of the Licensor,
+      except as required for reasonable and customary use in describing the
+      origin of the Work and reproducing the content of the NOTICE file.
+
+   7. Disclaimer of Warranty. Unless required by applicable law or
+      agreed to in writing, Licensor provides the Work (and each
+      Contributor provides its Contributions) on an "AS IS" BASIS,
+      WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
+      implied, including, without limitation, any warranties or conditions
+      of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A
+      PARTICULAR PURPOSE. You are solely responsible for determining the
+      appropriateness of using or redistributing the Work and assume any
+      risks associated with Your exercise of permissions under this License.
+
+   8. Limitation of Liability. In no event and under no legal theory,
+      whether in tort (including negligence), contract, or otherwise,
+      unless required by applicable law (such as deliberate and grossly
+      negligent acts) or agreed to in writing, shall any Contributor be
+      liable to You for damages, including any direct, indirect, special,
+      incidental, or consequential damages of any character arising as a
+      result of this License or out of the use or inability to use the
+      Work (including but not limited to damages for loss of goodwill,
+      work stoppage, computer failure or malfunction, or any and all
+      other commercial damages or losses), even if such Contributor
+      has been advised of the possibility of such damages.
+
+   9. Accepting Warranty or Additional Liability. While redistributing
+      the Work or Derivative Works thereof, You may choose to offer,
+      and charge a fee for, acceptance of support, warranty, indemnity,
+      or other liability obligations and/or rights consistent with this
+      License. However, in accepting such obligations, You may act only
+      on Your own behalf and on Your sole responsibility, not on behalf
+      of any other Contributor, and only if You agree to indemnify,
+      defend, and hold each Contributor harmless for any liability
+      incurred by, or claims asserted against, such Contributor by reason
+      of your accepting any such warranty or additional liability.
+
+   END OF TERMS AND CONDITIONS
+
+   APPENDIX: How to apply the Apache License to your work.
+
+      To apply the Apache License to your work, attach the following
+      boilerplate notice, with the fields enclosed by brackets "[]"
+      replaced with your own identifying information. (Don't include
+      the brackets!)  The text should be enclosed in the appropriate
+      comment syntax for the file format. We also recommend that a
+      file or class name and description of purpose be included on the
+      same "printed page" as the copyright notice for easier
+      identification within third-party archives.
+
+   Copyright [yyyy] [name of copyright owner]
+
+   Licensed under the Apache License, Version 2.0 (the "License");
+   you may not use this file except in compliance with the License.
+   You may obtain a copy of the License at
+
+       http://www.apache.org/licenses/LICENSE-2.0
+
+   Unless required by applicable law or agreed to in writing, software
+   distributed under the License is distributed on an "AS IS" BASIS,
+   WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+   See the License for the specific language governing permissions and
+   limitations under the License.
diff --git a/v2/README.md b/v2/README.md
new file mode 100644
index 0000000..2aef8ed
--- /dev/null
+++ b/v2/README.md
@@ -0,0 +1,105 @@
+# Go App Engine packages
+
+[![Build Status](https://travis-ci.org/golang/appengine.svg)](https://travis-ci.org/golang/appengine)
+
+This repository supports the Go runtime on *App Engine standard*.
+It provides APIs for interacting with App Engine services.
+Its canonical import path is `google.golang.org/appengine`.
+
+See https://cloud.google.com/appengine/docs/go/
+for more information.
+
+File issue reports and feature requests on the [GitHub's issue
+tracker](https://github.com/golang/appengine/issues).
+
+## Upgrading an App Engine app to the flexible environment
+
+This package does not work on *App Engine flexible*.
+
+There are many differences between the App Engine standard environment and
+the flexible environment.
+
+See the [documentation on upgrading to the flexible environment](https://cloud.google.com/appengine/docs/flexible/go/upgrading).
+
+## Directory structure
+
+The top level directory of this repository is the `appengine` package. It
+contains the
+basic APIs (e.g. `appengine.NewContext`) that apply across APIs. Specific API
+packages are in subdirectories (e.g. `datastore`).
+
+There is an `internal` subdirectory that contains service protocol buffers,
+plus packages required for connectivity to make API calls. App Engine apps
+should not directly import any package under `internal`.
+
+## Updating from legacy (`import "appengine"`) packages
+
+If you're currently using the bare `appengine` packages
+(that is, not these ones, imported via `google.golang.org/appengine`),
+then you can use the `aefix` tool to help automate an upgrade to these packages.
+
+Run `go get google.golang.org/appengine/cmd/aefix` to install it.
+
+### 1. Update import paths
+
+The import paths for App Engine packages are now fully qualified, based at `google.golang.org/appengine`.
+You will need to update your code to use import paths starting with that; for instance,
+code importing `appengine/datastore` will now need to import `google.golang.org/appengine/datastore`.
+
+### 2. Update code using deprecated, removed or modified APIs
+
+Most App Engine services are available with exactly the same API.
+A few APIs were cleaned up, and there are some differences:
+
+* `appengine.Context` has been replaced with the `Context` type from `golang.org/x/net/context`.
+* Logging methods that were on `appengine.Context` are now functions in `google.golang.org/appengine/log`.
+* `appengine.Timeout` has been removed. Use `context.WithTimeout` instead.
+* `appengine.Datacenter` now takes a `context.Context` argument.
+* `datastore.PropertyLoadSaver` has been simplified to use slices in place of channels.
+* `delay.Call` now returns an error.
+* `search.FieldLoadSaver` now handles document metadata.
+* `urlfetch.Transport` no longer has a Deadline field; set a deadline on the
+  `context.Context` instead.
+* `aetest` no longer declares its own Context type, and uses the standard one instead.
+* `taskqueue.QueueStats` no longer takes a maxTasks argument. That argument has been
+  deprecated and unused for a long time.
+* `appengine.BackendHostname` and `appengine.BackendInstance` were for the deprecated backends feature.
+  Use `appengine.ModuleHostname`and `appengine.ModuleName` instead.
+* Most of `appengine/file` and parts of `appengine/blobstore` are deprecated.
+  Use [Google Cloud Storage](https://godoc.org/cloud.google.com/go/storage) if the
+  feature you require is not present in the new
+  [blobstore package](https://google.golang.org/appengine/blobstore).
+* `appengine/socket` is not required on App Engine flexible environment / Managed VMs.
+  Use the standard `net` package instead.
+
+## Key Encode/Decode compatibiltiy to help with datastore library migrations
+
+Key compatibility updates have been added to help customers transition from google.golang.org/appengine/datastore to cloud.google.com/go/datastore.
+The `EnableKeyConversion` enables automatic conversion from a key encoded with cloud.google.com/go/datastore to google.golang.org/appengine/datastore key type.
+
+### Enabling key conversion
+
+Enable key conversion by calling `EnableKeyConversion(ctx)` in the `/_ah/start` handler for basic and manual scaling or any handler in automatic scaling.
+
+#### 1. Basic or manual scaling
+
+This start handler will enable key conversion for all handlers in the service.
+
+```
+http.HandleFunc("/_ah/start", func(w http.ResponseWriter, r *http.Request) {
+    datastore.EnableKeyConversion(appengine.NewContext(r))
+})
+```
+
+#### 2. Automatic scaling
+
+`/_ah/start` is not supported for automatic scaling and `/_ah/warmup` is not guaranteed to run, so you must call `datastore.EnableKeyConversion(appengine.NewContext(r))`
+before you use code that needs key conversion.
+
+You may want to add this to each of your handlers, or introduce middleware where it's called.
+`EnableKeyConversion` is safe for concurrent use. Any call to it after the first is ignored.
+
+## QA
+
+Googlers, [integration tests](http://go/appengine-go-integration) run on the QA branch. You should first merge to QA
+and verify the integration tests pass before cutting a new release.
\ No newline at end of file
diff --git a/v2/aetest/doc.go b/v2/aetest/doc.go
new file mode 100644
index 0000000..3933e48
--- /dev/null
+++ b/v2/aetest/doc.go
@@ -0,0 +1,42 @@
+/*
+Package aetest provides an API for running dev_appserver for use in tests.
+
+An example test file:
+
+	package foo_test
+
+	import (
+		"testing"
+
+		"google.golang.org/appengine/v2/memcache"
+		"google.golang.org/appengine/v2/aetest"
+	)
+
+	func TestFoo(t *testing.T) {
+		ctx, done, err := aetest.NewContext()
+		if err != nil {
+			t.Fatal(err)
+		}
+		defer done()
+
+		it := &memcache.Item{
+			Key:   "some-key",
+			Value: []byte("some-value"),
+		}
+		err = memcache.Set(ctx, it)
+		if err != nil {
+			t.Fatalf("Set err: %v", err)
+		}
+		it, err = memcache.Get(ctx, "some-key")
+		if err != nil {
+			t.Fatalf("Get err: %v; want no error", err)
+		}
+		if g, w := string(it.Value), "some-value" ; g != w {
+			t.Errorf("retrieved Item.Value = %q, want %q", g, w)
+		}
+	}
+
+The environment variable APPENGINE_DEV_APPSERVER specifies the location of the
+dev_appserver.py executable to use. If unset, the system PATH is consulted.
+*/
+package aetest
diff --git a/v2/aetest/instance.go b/v2/aetest/instance.go
new file mode 100644
index 0000000..994bec6
--- /dev/null
+++ b/v2/aetest/instance.go
@@ -0,0 +1,339 @@
+package aetest
+
+import (
+	"bufio"
+	"context"
+	"crypto/rand"
+	"errors"
+	"fmt"
+	"io"
+	"io/ioutil"
+	"net/http"
+	"net/url"
+	"os"
+	"os/exec"
+	"path/filepath"
+	"regexp"
+	"time"
+
+	"google.golang.org/appengine/v2"
+	"google.golang.org/appengine/v2/internal"
+)
+
+// Instance represents a running instance of the development API Server.
+type Instance interface {
+	// Close kills the child api_server.py process, releasing its resources.
+	io.Closer
+	// NewRequest returns an *http.Request associated with this instance.
+	NewRequest(method, urlStr string, body io.Reader) (*http.Request, error)
+}
+
+// Options is used to specify options when creating an Instance.
+type Options struct {
+	// AppID specifies the App ID to use during tests.
+	// By default, "testapp".
+	AppID string
+	// StronglyConsistentDatastore is whether the local datastore should be
+	// strongly consistent. This will diverge from production behaviour.
+	StronglyConsistentDatastore bool
+	// SupportDatastoreEmulator is whether use Cloud Datastore Emulator or
+	// use old SQLite based Datastore backend or use default settings.
+	SupportDatastoreEmulator *bool
+	// SuppressDevAppServerLog is whether the dev_appserver running in tests
+	// should output logs.
+	SuppressDevAppServerLog bool
+	// StartupTimeout is a duration to wait for instance startup.
+	// By default, 15 seconds.
+	StartupTimeout time.Duration
+}
+
+// NewContext starts an instance of the development API server, and returns
+// a context that will route all API calls to that server, as well as a
+// closure that must be called when the Context is no longer required.
+func NewContext() (context.Context, func(), error) {
+	inst, err := NewInstance(nil)
+	if err != nil {
+		return nil, nil, err
+	}
+	req, err := inst.NewRequest("GET", "/", nil)
+	if err != nil {
+		inst.Close()
+		return nil, nil, err
+	}
+	ctx := appengine.NewContext(req)
+	return ctx, func() {
+		inst.Close()
+	}, nil
+}
+
+// PrepareDevAppserver is a hook which, if set, will be called before the
+// dev_appserver.py is started, each time it is started. If aetest.NewContext
+// is invoked from the goapp test tool, this hook is unnecessary.
+var PrepareDevAppserver func() error
+
+// NewInstance launches a running instance of api_server.py which can be used
+// for multiple test Contexts that delegate all App Engine API calls to that
+// instance.
+// If opts is nil the default values are used.
+func NewInstance(opts *Options) (Instance, error) {
+	i := &instance{
+		opts:           opts,
+		appID:          "testapp",
+		startupTimeout: 15 * time.Second,
+	}
+	if opts != nil {
+		if opts.AppID != "" {
+			i.appID = opts.AppID
+		}
+		if opts.StartupTimeout > 0 {
+			i.startupTimeout = opts.StartupTimeout
+		}
+	}
+	if err := i.startChild(); err != nil {
+		return nil, err
+	}
+	return i, nil
+}
+
+func newSessionID() string {
+	var buf [16]byte
+	io.ReadFull(rand.Reader, buf[:])
+	return fmt.Sprintf("%x", buf[:])
+}
+
+// instance implements the Instance interface.
+type instance struct {
+	opts           *Options
+	child          *exec.Cmd
+	apiURL         *url.URL // base URL of API HTTP server
+	adminURL       string   // base URL of admin HTTP server
+	appDir         string
+	appID          string
+	startupTimeout time.Duration
+}
+
+// NewRequest returns an *http.Request associated with this instance.
+func (i *instance) NewRequest(method, urlStr string, body io.Reader) (*http.Request, error) {
+	req, err := http.NewRequest(method, urlStr, body)
+	if err != nil {
+		return nil, err
+	}
+
+	// Associate this request.
+	return internal.RegisterTestRequest(req, i.apiURL, "dev~"+i.appID), nil
+}
+
+// Close kills the child api_server.py process, releasing its resources.
+func (i *instance) Close() (err error) {
+	child := i.child
+	if child == nil {
+		return nil
+	}
+	defer func() {
+		i.child = nil
+		err1 := os.RemoveAll(i.appDir)
+		if err == nil {
+			err = err1
+		}
+	}()
+
+	if p := child.Process; p != nil {
+		errc := make(chan error, 1)
+		go func() {
+			errc <- child.Wait()
+		}()
+
+		// Call the quit handler on the admin server.
+		res, err := http.Get(i.adminURL + "/quit")
+		if err != nil {
+			p.Kill()
+			return fmt.Errorf("unable to call /quit handler: %v", err)
+		}
+		res.Body.Close()
+		select {
+		case <-time.After(15 * time.Second):
+			p.Kill()
+			return errors.New("timeout killing child process")
+		case err = <-errc:
+			// Do nothing.
+		}
+	}
+	return
+}
+
+func fileExists(path string) bool {
+	_, err := os.Stat(path)
+	return err == nil
+}
+
+func findPython() (path string, err error) {
+	for _, name := range []string{"python2.7", "python"} {
+		path, err = exec.LookPath(name)
+		if err == nil {
+			return
+		}
+	}
+	return
+}
+
+func findDevAppserver() (string, error) {
+	if p := os.Getenv("APPENGINE_DEV_APPSERVER"); p != "" {
+		if fileExists(p) {
+			return p, nil
+		}
+		return "", fmt.Errorf("invalid APPENGINE_DEV_APPSERVER environment variable; path %q doesn't exist", p)
+	}
+	return exec.LookPath("dev_appserver.py")
+}
+
+var apiServerAddrRE = regexp.MustCompile(`Starting API server at: (\S+)`)
+var adminServerAddrRE = regexp.MustCompile(`Starting admin server at: (\S+)`)
+
+func (i *instance) startChild() (err error) {
+	if PrepareDevAppserver != nil {
+		if err := PrepareDevAppserver(); err != nil {
+			return err
+		}
+	}
+	executable := os.Getenv("APPENGINE_DEV_APPSERVER_BINARY")
+	var appserverArgs []string
+	if len(executable) == 0 {
+		executable, err = findPython()
+		if err != nil {
+			return fmt.Errorf("Could not find python interpreter: %v", err)
+		}
+		devAppserver, err := findDevAppserver()
+		if err != nil {
+			return fmt.Errorf("Could not find dev_appserver.py: %v", err)
+		}
+		appserverArgs = append(appserverArgs, devAppserver)
+	}
+
+	i.appDir, err = ioutil.TempDir("", "appengine-aetest")
+	if err != nil {
+		return err
+	}
+	defer func() {
+		if err != nil {
+			os.RemoveAll(i.appDir)
+		}
+	}()
+	err = os.Mkdir(filepath.Join(i.appDir, "app"), 0755)
+	if err != nil {
+		return err
+	}
+	err = ioutil.WriteFile(filepath.Join(i.appDir, "app", "app.yaml"), []byte(i.appYAML()), 0644)
+	if err != nil {
+		return err
+	}
+	err = ioutil.WriteFile(filepath.Join(i.appDir, "app", "stubapp.go"), []byte(appSource), 0644)
+	if err != nil {
+		return err
+	}
+
+	datastorePath := os.Getenv("APPENGINE_DEV_APPSERVER_DATASTORE_PATH")
+	if len(datastorePath) == 0 {
+		datastorePath = filepath.Join(i.appDir, "datastore")
+	}
+
+	appserverArgs = append(appserverArgs,
+		"--port=0",
+		"--api_port=0",
+		"--admin_port=0",
+		"--automatic_restart=false",
+		"--skip_sdk_update_check=true",
+		"--clear_datastore=true",
+		"--clear_search_indexes=true",
+		"--datastore_path", datastorePath,
+	)
+	if i.opts != nil && i.opts.StronglyConsistentDatastore {
+		appserverArgs = append(appserverArgs, "--datastore_consistency_policy=consistent")
+	}
+	if i.opts != nil && i.opts.SupportDatastoreEmulator != nil {
+		appserverArgs = append(appserverArgs, fmt.Sprintf("--support_datastore_emulator=%t", *i.opts.SupportDatastoreEmulator))
+	}
+	appserverArgs = append(appserverArgs, filepath.Join(i.appDir, "app"))
+
+	i.child = exec.Command(executable, appserverArgs...)
+
+	i.child.Stdout = os.Stdout
+	var stderr io.Reader
+	stderr, err = i.child.StderrPipe()
+	if err != nil {
+		return err
+	}
+
+	if err = i.child.Start(); err != nil {
+		return err
+	}
+
+	// Read stderr until we have read the URLs of the API server and admin interface.
+	errc := make(chan error, 1)
+	go func() {
+		s := bufio.NewScanner(stderr)
+		for s.Scan() {
+			// Pass stderr along as we go so the user can see it.
+			if !(i.opts != nil && i.opts.SuppressDevAppServerLog) {
+				fmt.Fprintln(os.Stderr, s.Text())
+			}
+			if match := apiServerAddrRE.FindStringSubmatch(s.Text()); match != nil {
+				u, err := url.Parse(match[1])
+				if err != nil {
+					errc <- fmt.Errorf("failed to parse API URL %q: %v", match[1], err)
+					return
+				}
+				i.apiURL = u
+			}
+			if match := adminServerAddrRE.FindStringSubmatch(s.Text()); match != nil {
+				i.adminURL = match[1]
+			}
+			if i.adminURL != "" && i.apiURL != nil {
+				// Pass along stderr to the user after we're done with it.
+				if !(i.opts != nil && i.opts.SuppressDevAppServerLog) {
+					go io.Copy(os.Stderr, stderr)
+				}
+				break
+			}
+		}
+		errc <- s.Err()
+	}()
+
+	select {
+	case <-time.After(i.startupTimeout):
+		if p := i.child.Process; p != nil {
+			p.Kill()
+		}
+		return errors.New("timeout starting child process")
+	case err := <-errc:
+		if err != nil {
+			return fmt.Errorf("error reading child process stderr: %v", err)
+		}
+	}
+	if i.adminURL == "" {
+		return errors.New("unable to find admin server URL")
+	}
+	if i.apiURL == nil {
+		return errors.New("unable to find API server URL")
+	}
+	return nil
+}
+
+func (i *instance) appYAML() string {
+	return fmt.Sprintf(appYAMLTemplate, i.appID)
+}
+
+const appYAMLTemplate = `
+application: %s
+version: 1
+runtime: go111
+
+handlers:
+- url: /.*
+  script: _go_app
+`
+
+const appSource = `
+package main
+import "google.golang.org/appengine/v2"
+func main() { appengine.Main() }
+`
diff --git a/v2/aetest/instance_test.go b/v2/aetest/instance_test.go
new file mode 100644
index 0000000..afb51f9
--- /dev/null
+++ b/v2/aetest/instance_test.go
@@ -0,0 +1,119 @@
+package aetest
+
+import (
+	"os"
+	"testing"
+
+	"google.golang.org/appengine/v2"
+	"google.golang.org/appengine/v2/datastore"
+	"google.golang.org/appengine/v2/internal"
+	"google.golang.org/appengine/v2/memcache"
+	"google.golang.org/appengine/v2/user"
+)
+
+func TestBasicAPICalls(t *testing.T) {
+	// Only run the test if APPENGINE_DEV_APPSERVER is explicitly set.
+	if os.Getenv("APPENGINE_DEV_APPSERVER") == "" {
+		t.Skip("APPENGINE_DEV_APPSERVER not set")
+	}
+	resetEnv := internal.SetTestEnv()
+	defer resetEnv()
+
+	inst, err := NewInstance(nil)
+	if err != nil {
+		t.Fatalf("NewInstance: %v", err)
+	}
+	defer inst.Close()
+
+	req, err := inst.NewRequest("GET", "http://example.com/page", nil)
+	if err != nil {
+		t.Fatalf("NewRequest: %v", err)
+	}
+	ctx := appengine.NewContext(req)
+
+	it := &memcache.Item{
+		Key:   "some-key",
+		Value: []byte("some-value"),
+	}
+	err = memcache.Set(ctx, it)
+	if err != nil {
+		t.Fatalf("Set err: %v", err)
+	}
+	it, err = memcache.Get(ctx, "some-key")
+	if err != nil {
+		t.Fatalf("Get err: %v; want no error", err)
+	}
+	if g, w := string(it.Value), "some-value"; g != w {
+		t.Errorf("retrieved Item.Value = %q, want %q", g, w)
+	}
+
+	type Entity struct{ Value string }
+	e := &Entity{Value: "foo"}
+	k := datastore.NewIncompleteKey(ctx, "Entity", nil)
+	k, err = datastore.Put(ctx, k, e)
+	if err != nil {
+		t.Fatalf("datastore.Put: %v", err)
+	}
+	e = new(Entity)
+	if err := datastore.Get(ctx, k, e); err != nil {
+		t.Fatalf("datastore.Get: %v", err)
+	}
+	if g, w := e.Value, "foo"; g != w {
+		t.Errorf("retrieved Entity.Value = %q, want %q", g, w)
+	}
+}
+
+func TestContext(t *testing.T) {
+	// Only run the test if APPENGINE_DEV_APPSERVER is explicitly set.
+	if os.Getenv("APPENGINE_DEV_APPSERVER") == "" {
+		t.Skip("APPENGINE_DEV_APPSERVER not set")
+	}
+
+	// Check that the context methods work.
+	_, done, err := NewContext()
+	if err != nil {
+		t.Fatalf("NewContext: %v", err)
+	}
+	done()
+}
+
+func TestUsers(t *testing.T) {
+	// Only run the test if APPENGINE_DEV_APPSERVER is explicitly set.
+	if os.Getenv("APPENGINE_DEV_APPSERVER") == "" {
+		t.Skip("APPENGINE_DEV_APPSERVER not set")
+	}
+
+	inst, err := NewInstance(nil)
+	if err != nil {
+		t.Fatalf("NewInstance: %v", err)
+	}
+	defer inst.Close()
+
+	req, err := inst.NewRequest("GET", "http://example.com/page", nil)
+	if err != nil {
+		t.Fatalf("NewRequest: %v", err)
+	}
+	ctx := appengine.NewContext(req)
+
+	if user := user.Current(ctx); user != nil {
+		t.Errorf("user.Current initially %v, want nil", user)
+	}
+
+	u := &user.User{
+		Email: "gopher@example.com",
+		Admin: true,
+	}
+	Login(u, req)
+
+	if got := user.Current(ctx); got.Email != u.Email {
+		t.Errorf("user.Current: %v, want %v", got, u)
+	}
+	if admin := user.IsAdmin(ctx); !admin {
+		t.Errorf("user.IsAdmin: %t, want true", admin)
+	}
+
+	Logout(req)
+	if user := user.Current(ctx); user != nil {
+		t.Errorf("user.Current after logout %v, want nil", user)
+	}
+}
diff --git a/v2/aetest/user.go b/v2/aetest/user.go
new file mode 100644
index 0000000..62bd412
--- /dev/null
+++ b/v2/aetest/user.go
@@ -0,0 +1,42 @@
+package aetest
+
+import (
+	"hash/crc32"
+	"net/http"
+	"strconv"
+
+	"google.golang.org/appengine/v2/user"
+)
+
+// Login causes the provided Request to act as though issued by the given user.
+func Login(u *user.User, req *http.Request) {
+	req.Header.Set("X-AppEngine-User-Email", u.Email)
+	id := u.ID
+	if id == "" {
+		id = strconv.Itoa(int(crc32.Checksum([]byte(u.Email), crc32.IEEETable)))
+	}
+	req.Header.Set("X-AppEngine-User-Id", id)
+	req.Header.Set("X-AppEngine-Federated-Identity", u.FederatedIdentity)
+	req.Header.Set("X-AppEngine-Federated-Provider", u.FederatedProvider)
+	// NOTE: the following two headers are wrong, but are preserved to not break legacy tests.
+	req.Header.Set("X-AppEngine-User-Federated-Identity", u.Email)
+	req.Header.Set("X-AppEngine-User-Federated-Provider", u.FederatedProvider)
+	if u.Admin {
+		req.Header.Set("X-AppEngine-User-Is-Admin", "1")
+	} else {
+		req.Header.Set("X-AppEngine-User-Is-Admin", "0")
+	}
+}
+
+// Logout causes the provided Request to act as though issued by a logged-out
+// user.
+func Logout(req *http.Request) {
+	req.Header.Del("X-AppEngine-User-Email")
+	req.Header.Del("X-AppEngine-User-Id")
+	req.Header.Del("X-AppEngine-User-Is-Admin")
+	req.Header.Del("X-AppEngine-Federated-Identity")
+	req.Header.Del("X-AppEngine-Federated-Provider")
+	// NOTE: the following two headers are wrong, but are preserved to not break legacy tests.
+	req.Header.Del("X-AppEngine-User-Federated-Identity")
+	req.Header.Del("X-AppEngine-User-Federated-Provider")
+}
diff --git a/v2/appengine.go b/v2/appengine.go
new file mode 100644
index 0000000..1941d36
--- /dev/null
+++ b/v2/appengine.go
@@ -0,0 +1,146 @@
+// Copyright 2011 Google Inc. All rights reserved.
+// Use of this source code is governed by the Apache 2.0
+// license that can be found in the LICENSE file.
+
+// Package appengine provides basic functionality for Google App Engine.
+//
+// For more information on how to write Go apps for Google App Engine, see:
+// https://cloud.google.com/appengine/docs/go/
+package appengine // import "google.golang.org/appengine/v2"
+
+import (
+	"context"
+	"net/http"
+
+	"github.com/golang/protobuf/proto"
+
+	"google.golang.org/appengine/v2/internal"
+)
+
+// The gophers party all night; the rabbits provide the beats.
+
+// Main is the principal entry point for an app running in App Engine.
+//
+// On App Engine Flexible it installs a trivial health checker if one isn't
+// already registered, and starts listening on port 8080 (overridden by the
+// $PORT environment variable).
+//
+// See https://cloud.google.com/appengine/docs/flexible/custom-runtimes#health_check_requests
+// for details on how to do your own health checking.
+//
+// On App Engine Standard it ensures the server has started and is prepared to
+// receive requests.
+//
+// Main never returns.
+//
+// Main is designed so that the app's main package looks like this:
+//
+//	package main
+//
+//	import (
+//	        "google.golang.org/appengine/v2"
+//
+//	        _ "myapp/package0"
+//	        _ "myapp/package1"
+//	)
+//
+//	func main() {
+//	        appengine.Main()
+//	}
+//
+// The "myapp/packageX" packages are expected to register HTTP handlers
+// in their init functions.
+func Main() {
+	internal.Main()
+}
+
+// Middleware wraps an http handler so that it can make GAE API calls
+var Middleware func(http.Handler) http.Handler = internal.Middleware
+
+// IsDevAppServer reports whether the App Engine app is running in the
+// development App Server.
+func IsDevAppServer() bool {
+	return internal.IsDevAppServer()
+}
+
+// IsStandard reports whether the App Engine app is running in the standard
+// environment. This includes both the first generation runtimes (<= Go 1.9)
+// and the second generation runtimes (>= Go 1.11).
+func IsStandard() bool {
+	return internal.IsStandard()
+}
+
+// IsFlex reports whether the App Engine app is running in the flexible environment.
+func IsFlex() bool {
+	return internal.IsFlex()
+}
+
+// IsAppEngine reports whether the App Engine app is running on App Engine, in either
+// the standard or flexible environment.
+func IsAppEngine() bool {
+	return internal.IsAppEngine()
+}
+
+// IsSecondGen reports whether the App Engine app is running on the second generation
+// runtimes (>= Go 1.11).
+func IsSecondGen() bool {
+	return internal.IsSecondGen()
+}
+
+// NewContext returns a context for an in-flight HTTP request.
+// This function is cheap.
+func NewContext(req *http.Request) context.Context {
+	return internal.ReqContext(req)
+}
+
+// WithContext returns a copy of the parent context
+// and associates it with an in-flight HTTP request.
+// This function is cheap.
+func WithContext(parent context.Context, req *http.Request) context.Context {
+	return internal.WithContext(parent, req)
+}
+
+// BlobKey is a key for a blobstore blob.
+//
+// Conceptually, this type belongs in the blobstore package, but it lives in
+// the appengine package to avoid a circular dependency: blobstore depends on
+// datastore, and datastore needs to refer to the BlobKey type.
+type BlobKey string
+
+// GeoPoint represents a location as latitude/longitude in degrees.
+type GeoPoint struct {
+	Lat, Lng float64
+}
+
+// Valid returns whether a GeoPoint is within [-90, 90] latitude and [-180, 180] longitude.
+func (g GeoPoint) Valid() bool {
+	return -90 <= g.Lat && g.Lat <= 90 && -180 <= g.Lng && g.Lng <= 180
+}
+
+// APICallFunc defines a function type for handling an API call.
+// See WithCallOverride.
+type APICallFunc func(ctx context.Context, service, method string, in, out proto.Message) error
+
+// WithAPICallFunc returns a copy of the parent context
+// that will cause API calls to invoke f instead of their normal operation.
+//
+// This is intended for advanced users only.
+func WithAPICallFunc(ctx context.Context, f APICallFunc) context.Context {
+	return internal.WithCallOverride(ctx, internal.CallOverrideFunc(f))
+}
+
+// APICall performs an API call.
+//
+// This is not intended for general use; it is exported for use in conjunction
+// with WithAPICallFunc.
+func APICall(ctx context.Context, service, method string, in, out proto.Message) error {
+	return internal.Call(ctx, service, method, in, out)
+}
+
+// BackgroundContext returns a context not associated with a request.
+//
+// Deprecated: App Engine no longer has a special background context.
+// Just use context.Background().
+func BackgroundContext() context.Context {
+	return context.Background()
+}
diff --git a/v2/appengine_test.go b/v2/appengine_test.go
new file mode 100644
index 0000000..f1cf0a1
--- /dev/null
+++ b/v2/appengine_test.go
@@ -0,0 +1,49 @@
+// Copyright 2014 Google Inc. All rights reserved.
+// Use of this source code is governed by the Apache 2.0
+// license that can be found in the LICENSE file.
+
+package appengine
+
+import (
+	"testing"
+)
+
+func TestValidGeoPoint(t *testing.T) {
+	testCases := []struct {
+		desc string
+		pt   GeoPoint
+		want bool
+	}{
+		{
+			"valid",
+			GeoPoint{67.21, 13.37},
+			true,
+		},
+		{
+			"high lat",
+			GeoPoint{-90.01, 13.37},
+			false,
+		},
+		{
+			"low lat",
+			GeoPoint{90.01, 13.37},
+			false,
+		},
+		{
+			"high lng",
+			GeoPoint{67.21, 182},
+			false,
+		},
+		{
+			"low lng",
+			GeoPoint{67.21, -181},
+			false,
+		},
+	}
+
+	for _, tc := range testCases {
+		if got := tc.pt.Valid(); got != tc.want {
+			t.Errorf("%s: got %v, want %v", tc.desc, got, tc.want)
+		}
+	}
+}
diff --git a/v2/blobstore/blobstore.go b/v2/blobstore/blobstore.go
new file mode 100644
index 0000000..ee1ba89
--- /dev/null
+++ b/v2/blobstore/blobstore.go
@@ -0,0 +1,306 @@
+// Copyright 2011 Google Inc. All rights reserved.
+// Use of this source code is governed by the Apache 2.0
+// license that can be found in the LICENSE file.
+
+// Package blobstore provides a client for App Engine's persistent blob
+// storage service.
+package blobstore // import "google.golang.org/appengine/v2/blobstore"
+
+import (
+	"bufio"
+	"bytes"
+	"context"
+	"encoding/base64"
+	"fmt"
+	"io"
+	"io/ioutil"
+	"mime"
+	"mime/multipart"
+	"net/http"
+	"net/textproto"
+	"net/url"
+	"strconv"
+	"strings"
+	"time"
+
+	"github.com/golang/protobuf/proto"
+	"golang.org/x/text/encoding/htmlindex"
+
+	"google.golang.org/appengine/v2"
+	"google.golang.org/appengine/v2/datastore"
+	"google.golang.org/appengine/v2/internal"
+
+	basepb "google.golang.org/appengine/v2/internal/base"
+	blobpb "google.golang.org/appengine/v2/internal/blobstore"
+)
+
+const (
+	blobInfoKind      = "__BlobInfo__"
+	blobFileIndexKind = "__BlobFileIndex__"
+	zeroKey           = appengine.BlobKey("")
+)
+
+// BlobInfo is the blob metadata that is stored in the datastore.
+// Filename may be empty.
+type BlobInfo struct {
+	BlobKey      appengine.BlobKey
+	ContentType  string    `datastore:"content_type"`
+	CreationTime time.Time `datastore:"creation"`
+	Filename     string    `datastore:"filename"`
+	Size         int64     `datastore:"size"`
+	MD5          string    `datastore:"md5_hash"`
+
+	// ObjectName is the Google Cloud Storage name for this blob.
+	ObjectName string `datastore:"gs_object_name"`
+}
+
+// isErrFieldMismatch returns whether err is a datastore.ErrFieldMismatch.
+//
+// The blobstore stores blob metadata in the datastore. When loading that
+// metadata, it may contain fields that we don't care about. datastore.Get will
+// return datastore.ErrFieldMismatch in that case, so we ignore that specific
+// error.
+func isErrFieldMismatch(err error) bool {
+	_, ok := err.(*datastore.ErrFieldMismatch)
+	return ok
+}
+
+// Stat returns the BlobInfo for a provided blobKey. If no blob was found for
+// that key, Stat returns datastore.ErrNoSuchEntity.
+func Stat(c context.Context, blobKey appengine.BlobKey) (*BlobInfo, error) {
+	c, _ = appengine.Namespace(c, "") // Blobstore is always in the empty string namespace
+	dskey := datastore.NewKey(c, blobInfoKind, string(blobKey), 0, nil)
+	bi := &BlobInfo{
+		BlobKey: blobKey,
+	}
+	if err := datastore.Get(c, dskey, bi); err != nil && !isErrFieldMismatch(err) {
+		return nil, err
+	}
+	return bi, nil
+}
+
+// Send sets the headers on response to instruct App Engine to send a blob as
+// the response body. This is more efficient than reading and writing it out
+// manually and isn't subject to normal response size limits.
+func Send(response http.ResponseWriter, blobKey appengine.BlobKey) {
+	hdr := response.Header()
+	hdr.Set("X-AppEngine-BlobKey", string(blobKey))
+
+	if hdr.Get("Content-Type") == "" {
+		// This value is known to dev_appserver to mean automatic.
+		// In production this is remapped to the empty value which
+		// means automatic.
+		hdr.Set("Content-Type", "application/vnd.google.appengine.auto")
+	}
+}
+
+// UploadURL creates an upload URL for the form that the user will
+// fill out, passing the application path to load when the POST of the
+// form is completed. These URLs expire and should not be reused. The
+// opts parameter may be nil.
+func UploadURL(c context.Context, successPath string, opts *UploadURLOptions) (*url.URL, error) {
+	req := &blobpb.CreateUploadURLRequest{
+		SuccessPath: proto.String(successPath),
+	}
+	if opts != nil {
+		if n := opts.MaxUploadBytes; n != 0 {
+			req.MaxUploadSizeBytes = &n
+		}
+		if n := opts.MaxUploadBytesPerBlob; n != 0 {
+			req.MaxUploadSizePerBlobBytes = &n
+		}
+		if s := opts.StorageBucket; s != "" {
+			req.GsBucketName = &s
+		}
+	}
+	res := &blobpb.CreateUploadURLResponse{}
+	if err := internal.Call(c, "blobstore", "CreateUploadURL", req, res); err != nil {
+		return nil, err
+	}
+	return url.Parse(*res.Url)
+}
+
+// UploadURLOptions are the options to create an upload URL.
+type UploadURLOptions struct {
+	MaxUploadBytes        int64 // optional
+	MaxUploadBytesPerBlob int64 // optional
+
+	// StorageBucket specifies the Google Cloud Storage bucket in which
+	// to store the blob.
+	// This is required if you use Cloud Storage instead of Blobstore.
+	// Your application must have permission to write to the bucket.
+	// You may optionally specify a bucket name and path in the format
+	// "bucket_name/path", in which case the included path will be the
+	// prefix of the uploaded object's name.
+	StorageBucket string
+}
+
+// Delete deletes a blob.
+func Delete(c context.Context, blobKey appengine.BlobKey) error {
+	return DeleteMulti(c, []appengine.BlobKey{blobKey})
+}
+
+// DeleteMulti deletes multiple blobs.
+func DeleteMulti(c context.Context, blobKey []appengine.BlobKey) error {
+	s := make([]string, len(blobKey))
+	for i, b := range blobKey {
+		s[i] = string(b)
+	}
+	req := &blobpb.DeleteBlobRequest{
+		BlobKey: s,
+	}
+	res := &basepb.VoidProto{}
+	if err := internal.Call(c, "blobstore", "DeleteBlob", req, res); err != nil {
+		return err
+	}
+	return nil
+}
+
+func errorf(format string, args ...interface{}) error {
+	return fmt.Errorf("blobstore: "+format, args...)
+}
+
+// ParseUpload parses the synthetic POST request that your app gets from
+// App Engine after a user's successful upload of blobs. Given the request,
+// ParseUpload returns a map of the blobs received (keyed by HTML form
+// element name) and other non-blob POST parameters.
+func ParseUpload(req *http.Request) (blobs map[string][]*BlobInfo, other url.Values, err error) {
+	_, params, err := mime.ParseMediaType(req.Header.Get("Content-Type"))
+	if err != nil {
+		return nil, nil, err
+	}
+	boundary := params["boundary"]
+	if boundary == "" {
+		return nil, nil, errorf("did not find MIME multipart boundary")
+	}
+
+	blobs = make(map[string][]*BlobInfo)
+	other = make(url.Values)
+
+	mreader := multipart.NewReader(io.MultiReader(req.Body, strings.NewReader("\r\n\r\n")), boundary)
+	for {
+		part, perr := mreader.NextPart()
+		if perr == io.EOF {
+			break
+		}
+		if perr != nil {
+			return nil, nil, errorf("error reading next mime part with boundary %q (len=%d): %v",
+				boundary, len(boundary), perr)
+		}
+
+		bi := &BlobInfo{}
+		ctype, params, err := mime.ParseMediaType(part.Header.Get("Content-Disposition"))
+		if err != nil {
+			return nil, nil, err
+		}
+		bi.Filename = params["filename"]
+		formKey := params["name"]
+
+		ctype, params, err = mime.ParseMediaType(part.Header.Get("Content-Type"))
+		if err != nil {
+			return nil, nil, err
+		}
+		bi.BlobKey = appengine.BlobKey(params["blob-key"])
+		charset := params["charset"]
+
+		if ctype != "message/external-body" || bi.BlobKey == "" {
+			if formKey != "" {
+				slurp, serr := ioutil.ReadAll(part)
+				if serr != nil {
+					return nil, nil, errorf("error reading %q MIME part", formKey)
+				}
+
+				// Handle base64 content transfer encoding. multipart.Part transparently
+				// handles quoted-printable, and no special handling is required for
+				// 7bit, 8bit, or binary.
+				ctype, params, err = mime.ParseMediaType(part.Header.Get("Content-Transfer-Encoding"))
+				if err == nil && ctype == "base64" {
+					slurp, serr = ioutil.ReadAll(base64.NewDecoder(
+						base64.StdEncoding, bytes.NewReader(slurp)))
+					if serr != nil {
+						return nil, nil, errorf("error %s decoding %q MIME part", ctype, formKey)
+					}
+				}
+
+				// Handle charset
+				if charset != "" {
+					encoding, err := htmlindex.Get(charset)
+					if err != nil {
+						return nil, nil, errorf("error getting decoder for charset %q", charset)
+					}
+
+					slurp, err = encoding.NewDecoder().Bytes(slurp)
+					if err != nil {
+						return nil, nil, errorf("error decoding from charset %q", charset)
+					}
+				}
+
+				other[formKey] = append(other[formKey], string(slurp))
+			}
+			continue
+		}
+
+		// App Engine sends a MIME header as the body of each MIME part.
+		tp := textproto.NewReader(bufio.NewReader(part))
+		header, mimeerr := tp.ReadMIMEHeader()
+		if mimeerr != nil {
+			return nil, nil, mimeerr
+		}
+		bi.Size, err = strconv.ParseInt(header.Get("Content-Length"), 10, 64)
+		if err != nil {
+			return nil, nil, err
+		}
+		bi.ContentType = header.Get("Content-Type")
+
+		// Parse the time from the MIME header like:
+		// X-AppEngine-Upload-Creation: 2011-03-15 21:38:34.712136
+		createDate := header.Get("X-AppEngine-Upload-Creation")
+		if createDate == "" {
+			return nil, nil, errorf("expected to find an X-AppEngine-Upload-Creation header")
+		}
+		bi.CreationTime, err = time.Parse("2006-01-02 15:04:05.000000", createDate)
+		if err != nil {
+			return nil, nil, errorf("error parsing X-AppEngine-Upload-Creation: %s", err)
+		}
+
+		if hdr := header.Get("Content-MD5"); hdr != "" {
+			md5, err := base64.URLEncoding.DecodeString(hdr)
+			if err != nil {
+				return nil, nil, errorf("bad Content-MD5 %q: %v", hdr, err)
+			}
+			bi.MD5 = string(md5)
+		}
+
+		// If the GCS object name was provided, record it.
+		bi.ObjectName = header.Get("X-AppEngine-Cloud-Storage-Object")
+
+		blobs[formKey] = append(blobs[formKey], bi)
+	}
+	return
+}
+
+// Reader is a blob reader.
+type Reader interface {
+	io.Reader
+	io.ReaderAt
+	io.Seeker
+}
+
+// NewReader returns a reader for a blob. It always succeeds; if the blob does
+// not exist then an error will be reported upon first read.
+func NewReader(c context.Context, blobKey appengine.BlobKey) Reader {
+	return openBlob(c, blobKey)
+}
+
+// BlobKeyForFile returns a BlobKey for a Google Storage file.
+// The filename should be of the form "/gs/bucket_name/object_name".
+func BlobKeyForFile(c context.Context, filename string) (appengine.BlobKey, error) {
+	req := &blobpb.CreateEncodedGoogleStorageKeyRequest{
+		Filename: &filename,
+	}
+	res := &blobpb.CreateEncodedGoogleStorageKeyResponse{}
+	if err := internal.Call(c, "blobstore", "CreateEncodedGoogleStorageKey", req, res); err != nil {
+		return "", err
+	}
+	return appengine.BlobKey(*res.BlobKey), nil
+}
diff --git a/v2/blobstore/blobstore_test.go b/v2/blobstore/blobstore_test.go
new file mode 100644
index 0000000..6a9304b
--- /dev/null
+++ b/v2/blobstore/blobstore_test.go
@@ -0,0 +1,289 @@
+// Copyright 2011 Google Inc. All rights reserved.
+// Use of this source code is governed by the Apache 2.0
+// license that can be found in the LICENSE file.
+
+package blobstore
+
+import (
+	"bytes"
+	"encoding/base64"
+	"fmt"
+	"io"
+	"mime/multipart"
+	"mime/quotedprintable"
+	"net/http"
+	"net/textproto"
+	"os"
+	"strconv"
+	"strings"
+	"testing"
+
+	"golang.org/x/text/encoding/htmlindex"
+
+	"google.golang.org/appengine/v2"
+	"google.golang.org/appengine/v2/internal/aetesting"
+
+	pb "google.golang.org/appengine/v2/internal/blobstore"
+)
+
+const rbs = readBufferSize
+
+const charsetUTF8 = "utf-8"
+const charsetISO2022JP = "iso-2022-jp"
+const nonASCIIStr = "Hello, 世界"
+
+func min(x, y int) int {
+	if x < y {
+		return x
+	}
+	return y
+}
+
+func fakeFetchData(req *pb.FetchDataRequest, res *pb.FetchDataResponse) error {
+	i0 := int(*req.StartIndex)
+	i1 := int(*req.EndIndex + 1) // Blobstore's end-indices are inclusive; Go's are exclusive.
+	bk := *req.BlobKey
+	if i := strings.Index(bk, "."); i != -1 {
+		// Strip everything past the ".".
+		bk = bk[:i]
+	}
+	switch bk {
+	case "a14p":
+		const s = "abcdefghijklmnop"
+		i0 := min(len(s), i0)
+		i1 := min(len(s), i1)
+		res.Data = []byte(s[i0:i1])
+	case "longBlob":
+		res.Data = make([]byte, i1-i0)
+		for i := range res.Data {
+			res.Data[i] = 'A' + uint8(i0/rbs)
+			i0++
+		}
+	}
+	return nil
+}
+
+// step is one step of a readerTest.
+// It consists of a Reader method to call, the method arguments
+// (lenp, offset, whence) and the expected results.
+type step struct {
+	method  string
+	lenp    int
+	offset  int64
+	whence  int
+	want    string
+	wantErr error
+}
+
+var readerTest = []struct {
+	blobKey string
+	step    []step
+}{
+	{"noSuchBlobKey", []step{
+		{"Read", 8, 0, 0, "", io.EOF},
+	}},
+	{"a14p.0", []step{
+		// Test basic reads.
+		{"Read", 1, 0, 0, "a", nil},
+		{"Read", 3, 0, 0, "bcd", nil},
+		{"Read", 1, 0, 0, "e", nil},
+		{"Read", 2, 0, 0, "fg", nil},
+		// Test Seek.
+		{"Seek", 0, 2, os.SEEK_SET, "2", nil},
+		{"Read", 5, 0, 0, "cdefg", nil},
+		{"Seek", 0, 2, os.SEEK_CUR, "9", nil},
+		{"Read", 1, 0, 0, "j", nil},
+		// Test reads up to and past EOF.
+		{"Read", 5, 0, 0, "klmno", nil},
+		{"Read", 5, 0, 0, "p", nil},
+		{"Read", 5, 0, 0, "", io.EOF},
+		// Test ReadAt.
+		{"ReadAt", 4, 0, 0, "abcd", nil},
+		{"ReadAt", 4, 3, 0, "defg", nil},
+		{"ReadAt", 4, 12, 0, "mnop", nil},
+		{"ReadAt", 4, 13, 0, "nop", io.EOF},
+		{"ReadAt", 4, 99, 0, "", io.EOF},
+	}},
+	{"a14p.1", []step{
+		// Test Seek before any reads.
+		{"Seek", 0, 2, os.SEEK_SET, "2", nil},
+		{"Read", 1, 0, 0, "c", nil},
+		// Test that ReadAt doesn't affect the Read offset.
+		{"ReadAt", 3, 9, 0, "jkl", nil},
+		{"Read", 3, 0, 0, "def", nil},
+	}},
+	{"a14p.2", []step{
+		// Test ReadAt before any reads or seeks.
+		{"ReadAt", 2, 14, 0, "op", nil},
+	}},
+	{"longBlob.0", []step{
+		// Test basic read.
+		{"Read", 1, 0, 0, "A", nil},
+		// Test that Read returns early when the buffer is exhausted.
+		{"Seek", 0, rbs - 2, os.SEEK_SET, strconv.Itoa(rbs - 2), nil},
+		{"Read", 5, 0, 0, "AA", nil},
+		{"Read", 3, 0, 0, "BBB", nil},
+		// Test that what we just read is still in the buffer.
+		{"Seek", 0, rbs - 2, os.SEEK_SET, strconv.Itoa(rbs - 2), nil},
+		{"Read", 5, 0, 0, "AABBB", nil},
+		// Test ReadAt.
+		{"ReadAt", 3, rbs - 4, 0, "AAA", nil},
+		{"ReadAt", 6, rbs - 4, 0, "AAAABB", nil},
+		{"ReadAt", 8, rbs - 4, 0, "AAAABBBB", nil},
+		{"ReadAt", 5, rbs - 4, 0, "AAAAB", nil},
+		{"ReadAt", 2, rbs - 4, 0, "AA", nil},
+		// Test seeking backwards from the Read offset.
+		{"Seek", 0, 2*rbs - 8, os.SEEK_SET, strconv.Itoa(2*rbs - 8), nil},
+		{"Read", 1, 0, 0, "B", nil},
+		{"Read", 1, 0, 0, "B", nil},
+		{"Read", 1, 0, 0, "B", nil},
+		{"Read", 1, 0, 0, "B", nil},
+		{"Read", 8, 0, 0, "BBBBCCCC", nil},
+	}},
+	{"longBlob.1", []step{
+		// Test ReadAt with a slice larger than the buffer size.
+		{"LargeReadAt", 2*rbs - 2, 0, 0, strconv.Itoa(2*rbs - 2), nil},
+		{"LargeReadAt", 2*rbs - 1, 0, 0, strconv.Itoa(2*rbs - 1), nil},
+		{"LargeReadAt", 2*rbs + 0, 0, 0, strconv.Itoa(2*rbs + 0), nil},
+		{"LargeReadAt", 2*rbs + 1, 0, 0, strconv.Itoa(2*rbs + 1), nil},
+		{"LargeReadAt", 2*rbs + 2, 0, 0, strconv.Itoa(2*rbs + 2), nil},
+		{"LargeReadAt", 2*rbs - 2, 1, 0, strconv.Itoa(2*rbs - 2), nil},
+		{"LargeReadAt", 2*rbs - 1, 1, 0, strconv.Itoa(2*rbs - 1), nil},
+		{"LargeReadAt", 2*rbs + 0, 1, 0, strconv.Itoa(2*rbs + 0), nil},
+		{"LargeReadAt", 2*rbs + 1, 1, 0, strconv.Itoa(2*rbs + 1), nil},
+		{"LargeReadAt", 2*rbs + 2, 1, 0, strconv.Itoa(2*rbs + 2), nil},
+	}},
+}
+
+func TestReader(t *testing.T) {
+	for _, rt := range readerTest {
+		c := aetesting.FakeSingleContext(t, "blobstore", "FetchData", fakeFetchData)
+		r := NewReader(c, appengine.BlobKey(rt.blobKey))
+		for i, step := range rt.step {
+			var (
+				got    string
+				gotErr error
+				n      int
+				offset int64
+			)
+			switch step.method {
+			case "LargeReadAt":
+				p := make([]byte, step.lenp)
+				n, gotErr = r.ReadAt(p, step.offset)
+				got = strconv.Itoa(n)
+			case "Read":
+				p := make([]byte, step.lenp)
+				n, gotErr = r.Read(p)
+				got = string(p[:n])
+			case "ReadAt":
+				p := make([]byte, step.lenp)
+				n, gotErr = r.ReadAt(p, step.offset)
+				got = string(p[:n])
+			case "Seek":
+				offset, gotErr = r.Seek(step.offset, step.whence)
+				got = strconv.FormatInt(offset, 10)
+			default:
+				t.Fatalf("unknown method: %s", step.method)
+			}
+			if gotErr != step.wantErr {
+				t.Fatalf("%s step %d: got error %v want %v", rt.blobKey, i, gotErr, step.wantErr)
+			}
+			if got != step.want {
+				t.Fatalf("%s step %d: got %q want %q", rt.blobKey, i, got, step.want)
+			}
+		}
+	}
+}
+
+// doPlainTextParseUploadTest tests ParseUpload's decoding of non-file form fields.
+// It ensures that MIME multipart parts with Content-Type not equal to
+// "message/external-body" (i.e. form fields that are not file uploads) are decoded
+// correctly according to the value of their Content-Transfer-Encoding header field.
+// If charset is not the empty string it will be set in the request's Content-Type
+// header field, and if encoding is not the empty string then the Content-Transfer-Encoding
+// header field will be set.
+func doPlainTextParseUploadTest(t *testing.T, charset string, encoding string,
+	rawContent string, encodedContent string) {
+	bodyBuf := &bytes.Buffer{}
+	w := multipart.NewWriter(bodyBuf)
+
+	fieldName := "foo"
+	hdr := textproto.MIMEHeader{}
+	hdr.Set("Content-Disposition", fmt.Sprintf("form-data; name=%q", fieldName))
+
+	if charset != "" {
+		hdr.Set("Content-Type", fmt.Sprintf("text/plain; charset=%q", charset))
+	} else {
+		hdr.Set("Content-Type", "text/plain")
+	}
+
+	if encoding != "" {
+		hdr.Set("Content-Transfer-Encoding", encoding)
+	}
+
+	pw, err := w.CreatePart(hdr)
+	if err != nil {
+		t.Fatalf("error creating part: %v", err)
+	}
+	pw.Write([]byte(encodedContent))
+
+	if err := w.Close(); err != nil {
+		t.Fatalf("error closing multipart writer: %v\n", err)
+	}
+
+	req, err := http.NewRequest("POST", "/upload", bodyBuf)
+	if err != nil {
+		t.Fatalf("error creating request: %v", err)
+	}
+
+	req.Header.Set("Content-Type", w.FormDataContentType())
+	_, other, err := ParseUpload(req)
+	if err != nil {
+		t.Fatalf("error parsing upload: %v", err)
+	}
+
+	if other[fieldName][0] != rawContent {
+		t.Errorf("got %q expected %q", other[fieldName][0], rawContent)
+	}
+}
+
+func TestParseUploadUTF8Base64Encoding(t *testing.T) {
+	encoded := base64.StdEncoding.EncodeToString([]byte(nonASCIIStr))
+	doPlainTextParseUploadTest(t, charsetUTF8, "base64", nonASCIIStr, encoded)
+}
+
+func TestParseUploadUTF8Base64EncodingMultiline(t *testing.T) {
+	testStr := "words words words words words words words words words words words words"
+	encoded := "d29yZHMgd29yZHMgd29yZHMgd29yZHMgd29yZHMgd29yZHMgd29yZHMgd29yZHMgd29yZHMgd29y\r\nZHMgd29yZHMgd29yZHM="
+	doPlainTextParseUploadTest(t, charsetUTF8, "base64", testStr, encoded)
+}
+
+func TestParseUploadUTF8QuotedPrintableEncoding(t *testing.T) {
+	var encoded bytes.Buffer
+	writer := quotedprintable.NewWriter(&encoded)
+	writer.Write([]byte(nonASCIIStr))
+	writer.Close()
+
+	doPlainTextParseUploadTest(t, charsetUTF8, "quoted-printable", nonASCIIStr,
+		encoded.String())
+}
+
+func TestParseUploadISO2022JPBase64Encoding(t *testing.T) {
+	testStr := "こんにちは"
+	encoding, err := htmlindex.Get(charsetISO2022JP)
+	if err != nil {
+		t.Fatalf("error getting encoding: %v", err)
+	}
+
+	charsetEncoded, err := encoding.NewEncoder().String(testStr)
+	if err != nil {
+		t.Fatalf("error encoding string: %v", err)
+	}
+
+	base64Encoded := base64.StdEncoding.EncodeToString([]byte(charsetEncoded))
+	doPlainTextParseUploadTest(t, charsetISO2022JP, "base64", testStr, base64Encoded)
+}
+
+func TestParseUploadNoEncoding(t *testing.T) {
+	doPlainTextParseUploadTest(t, "", "", "Hello", "Hello")
+}
diff --git a/v2/blobstore/read.go b/v2/blobstore/read.go
new file mode 100644
index 0000000..93524f2
--- /dev/null
+++ b/v2/blobstore/read.go
@@ -0,0 +1,160 @@
+// Copyright 2012 Google Inc. All rights reserved.
+// Use of this source code is governed by the Apache 2.0
+// license that can be found in the LICENSE file.
+
+package blobstore
+
+import (
+	"context"
+	"errors"
+	"fmt"
+	"io"
+	"os"
+	"sync"
+
+	"github.com/golang/protobuf/proto"
+
+	"google.golang.org/appengine/v2"
+	"google.golang.org/appengine/v2/internal"
+
+	blobpb "google.golang.org/appengine/v2/internal/blobstore"
+)
+
+// openBlob returns a reader for a blob. It always succeeds; if the blob does
+// not exist then an error will be reported upon first read.
+func openBlob(c context.Context, blobKey appengine.BlobKey) Reader {
+	return &reader{
+		c:       c,
+		blobKey: blobKey,
+	}
+}
+
+const readBufferSize = 256 * 1024
+
+// reader is a blob reader. It implements the Reader interface.
+type reader struct {
+	c context.Context
+
+	// Either blobKey or filename is set:
+	blobKey  appengine.BlobKey
+	filename string
+
+	closeFunc func() // is nil if unavailable or already closed.
+
+	// buf is the read buffer. r is how much of buf has been read.
+	// off is the offset of buf[0] relative to the start of the blob.
+	// An invariant is 0 <= r && r <= len(buf).
+	// Reads that don't require an RPC call will increment r but not off.
+	// Seeks may modify r without discarding the buffer, but only if the
+	// invariant can be maintained.
+	mu  sync.Mutex
+	buf []byte
+	r   int
+	off int64
+}
+
+func (r *reader) Close() error {
+	if f := r.closeFunc; f != nil {
+		f()
+	}
+	r.closeFunc = nil
+	return nil
+}
+
+func (r *reader) Read(p []byte) (int, error) {
+	if len(p) == 0 {
+		return 0, nil
+	}
+	r.mu.Lock()
+	defer r.mu.Unlock()
+	if r.r == len(r.buf) {
+		if err := r.fetch(r.off + int64(r.r)); err != nil {
+			return 0, err
+		}
+	}
+	n := copy(p, r.buf[r.r:])
+	r.r += n
+	return n, nil
+}
+
+func (r *reader) ReadAt(p []byte, off int64) (int, error) {
+	if len(p) == 0 {
+		return 0, nil
+	}
+	r.mu.Lock()
+	defer r.mu.Unlock()
+	// Convert relative offsets to absolute offsets.
+	ab0 := r.off + int64(r.r)
+	ab1 := r.off + int64(len(r.buf))
+	ap0 := off
+	ap1 := off + int64(len(p))
+	// Check if we can satisfy the read entirely out of the existing buffer.
+	if r.off <= ap0 && ap1 <= ab1 {
+		// Convert off from an absolute offset to a relative offset.
+		rp0 := int(ap0 - r.off)
+		return copy(p, r.buf[rp0:]), nil
+	}
+	// Restore the original Read/Seek offset after ReadAt completes.
+	defer r.seek(ab0)
+	// Repeatedly fetch and copy until we have filled p.
+	n := 0
+	for len(p) > 0 {
+		if err := r.fetch(off + int64(n)); err != nil {
+			return n, err
+		}
+		r.r = copy(p, r.buf)
+		n += r.r
+		p = p[r.r:]
+	}
+	return n, nil
+}
+
+func (r *reader) Seek(offset int64, whence int) (ret int64, err error) {
+	r.mu.Lock()
+	defer r.mu.Unlock()
+	switch whence {
+	case os.SEEK_SET:
+		ret = offset
+	case os.SEEK_CUR:
+		ret = r.off + int64(r.r) + offset
+	case os.SEEK_END:
+		return 0, errors.New("seeking relative to the end of a blob isn't supported")
+	default:
+		return 0, fmt.Errorf("invalid Seek whence value: %d", whence)
+	}
+	if ret < 0 {
+		return 0, errors.New("negative Seek offset")
+	}
+	return r.seek(ret)
+}
+
+// fetch fetches readBufferSize bytes starting at the given offset. On success,
+// the data is saved as r.buf.
+func (r *reader) fetch(off int64) error {
+	req := &blobpb.FetchDataRequest{
+		BlobKey:    proto.String(string(r.blobKey)),
+		StartIndex: proto.Int64(off),
+		EndIndex:   proto.Int64(off + readBufferSize - 1), // EndIndex is inclusive.
+	}
+	res := &blobpb.FetchDataResponse{}
+	if err := internal.Call(r.c, "blobstore", "FetchData", req, res); err != nil {
+		return err
+	}
+	if len(res.Data) == 0 {
+		return io.EOF
+	}
+	r.buf, r.r, r.off = res.Data, 0, off
+	return nil
+}
+
+// seek seeks to the given offset with an effective whence equal to SEEK_SET.
+// It discards the read buffer if the invariant cannot be maintained.
+func (r *reader) seek(off int64) (int64, error) {
+	delta := off - r.off
+	if delta >= 0 && delta < int64(len(r.buf)) {
+		r.r = int(delta)
+		return off, nil
+	}
+	r.buf, r.r, r.off = nil, 0, off
+	return off, nil
+}
diff --git a/v2/capability/capability.go b/v2/capability/capability.go
new file mode 100644
index 0000000..dbf1b5d
--- /dev/null
+++ b/v2/capability/capability.go
@@ -0,0 +1,48 @@
+// Copyright 2011 Google Inc. All rights reserved.
+// Use of this source code is governed by the Apache 2.0
+// license that can be found in the LICENSE file.
+
+/*
+Package capability exposes information about outages and scheduled downtime
+for specific API capabilities.
+
+This package does not work in App Engine "flexible environment".
+
+Example:
+
+	if !capability.Enabled(c, "datastore_v3", "write") {
+		// show user a different page
+	}
+*/
+package capability // import "google.golang.org/appengine/v2/capability"
+
+import (
+	"context"
+
+	"google.golang.org/appengine/v2/internal"
+	"google.golang.org/appengine/v2/log"
+
+	pb "google.golang.org/appengine/v2/internal/capability"
+)
+
+// Enabled returns whether an API's capabilities are enabled.
+// The wildcard "*" capability matches every capability of an API.
+// If the underlying RPC fails (if the package is unknown, for example),
+// false is returned and information is written to the application log.
+func Enabled(ctx context.Context, api, capability string) bool {
+	// For non datastore*/write requests always return ENABLED
+	if !(api == "datastore_v3" && capability == "write") {
+		return true
+	}
+
+	req := &pb.IsEnabledRequest{
+		Package:    &api,
+		Capability: []string{capability},
+	}
+	res := &pb.IsEnabledResponse{}
+	if err := internal.Call(ctx, "capability_service", "IsEnabled", req, res); err != nil {
+		log.Warningf(ctx, "capability.Enabled: RPC failed: %v", err)
+		return false
+	}
+	return *res.SummaryStatus == pb.IsEnabledResponse_ENABLED
+}
diff --git a/v2/cmd/aebundler/aebundler.go b/v2/cmd/aebundler/aebundler.go
new file mode 100644
index 0000000..3ad57cc
--- /dev/null
+++ b/v2/cmd/aebundler/aebundler.go
@@ -0,0 +1,343 @@
+// Copyright 2015 Google Inc. All rights reserved.
+// Use of this source code is governed by the Apache 2.0
+// license that can be found in the LICENSE file.
+
+// Program aebundler turns a Go app into a fully self-contained tar file.
+// The app and its subdirectories (if any) are placed under "."
+// and the dependencies from $GOPATH are placed under ./_gopath/src.
+// A main func is synthesized if one does not exist.
+//
+// A sample Dockerfile to be used with this bundler could look like this:
+//
+//	FROM gcr.io/google-appengine/go-compat
+//	ADD . /app
+//	RUN GOPATH=/app/_gopath go build -tags appenginevm -o /app/_ah/exe
+package main
+
+import (
+	"archive/tar"
+	"flag"
+	"fmt"
+	"go/ast"
+	"go/build"
+	"go/parser"
+	"go/token"
+	"io"
+	"io/ioutil"
+	"os"
+	"path/filepath"
+	"strings"
+)
+
+var (
+	output  = flag.String("o", "", "name of output tar file or '-' for stdout")
+	rootDir = flag.String("root", ".", "directory name of application root")
+	vm      = flag.Bool("vm", true, `bundle an app for App Engine "flexible environment"`)
+
+	skipFiles = map[string]bool{
+		".git":        true,
+		".gitconfig":  true,
+		".hg":         true,
+		".travis.yml": true,
+	}
+)
+
+const (
+	newMain = `package main
+import "google.golang.org/appengine/v2"
+func main() {
+	appengine.Main()
+}
+`
+)
+
+func usage() {
+	fmt.Fprintf(os.Stderr, "Usage of %s:\n", os.Args[0])
+	fmt.Fprintf(os.Stderr, "\t%s -o <file.tar|->\tBundle app to named tar file or stdout\n", os.Args[0])
+	fmt.Fprintf(os.Stderr, "\noptional arguments:\n")
+	flag.PrintDefaults()
+}
+
+func main() {
+	flag.Usage = usage
+	flag.Parse()
+
+	var tags []string
+	if *vm {
+		tags = append(tags, "appenginevm")
+	} else {
+		tags = append(tags, "appengine")
+	}
+
+	tarFile := *output
+	if tarFile == "" {
+		usage()
+		errorf("Required -o flag not specified.")
+	}
+
+	app, err := analyze(tags)
+	if err != nil {
+		errorf("Error analyzing app: %v", err)
+	}
+	if err := app.bundle(tarFile); err != nil {
+		errorf("Unable to bundle app: %v", err)
+	}
+}
+
+// errorf prints the error message and exits.
+func errorf(format string, a ...interface{}) {
+	fmt.Fprintf(os.Stderr, "aebundler: "+format+"\n", a...)
+	os.Exit(1)
+}
+
+type app struct {
+	hasMain  bool
+	appFiles []string
+	imports  map[string]string
+}
+
+// analyze checks the app for building with the given build tags and returns hasMain,
+// app files, and a map of full directory import names to original import names.
+func analyze(tags []string) (*app, error) {
+	ctxt := buildContext(tags)
+	hasMain, appFiles, err := checkMain(ctxt)
+	if err != nil {
+		return nil, err
+	}
+	gopath := filepath.SplitList(ctxt.GOPATH)
+	im, err := imports(ctxt, *rootDir, gopath)
+	return &app{
+		hasMain:  hasMain,
+		appFiles: appFiles,
+		imports:  im,
+	}, err
+}
+
+// buildContext returns the context for building the source.
+func buildContext(tags []string) *build.Context {
+	return &build.Context{
+		GOARCH:    build.Default.GOARCH,
+		GOOS:      build.Default.GOOS,
+		GOROOT:    build.Default.GOROOT,
+		GOPATH:    build.Default.GOPATH,
+		Compiler:  build.Default.Compiler,
+		BuildTags: append(build.Default.BuildTags, tags...),
+	}
+}
+
+// bundle bundles the app into the named tarFile ("-"==stdout).
+func (s *app) bundle(tarFile string) (err error) {
+	var out io.Writer
+	if tarFile == "-" {
+		out = os.Stdout
+	} else {
+		f, err := os.Create(tarFile)
+		if err != nil {
+			return err
+		}
+		defer func() {
+			if cerr := f.Close(); err == nil {
+				err = cerr
+			}
+		}()
+		out = f
+	}
+	tw := tar.NewWriter(out)
+
+	for srcDir, importName := range s.imports {
+		dstDir := "_gopath/src/" + importName
+		if err = copyTree(tw, dstDir, srcDir); err != nil {
+			return fmt.Errorf("unable to copy directory %v to %v: %v", srcDir, dstDir, err)
+		}
+	}
+	if err := copyTree(tw, ".", *rootDir); err != nil {
+		return fmt.Errorf("unable to copy root directory to /app: %v", err)
+	}
+	if !s.hasMain {
+		if err := synthesizeMain(tw, s.appFiles); err != nil {
+			return fmt.Errorf("unable to synthesize new main func: %v", err)
+		}
+	}
+
+	if err := tw.Close(); err != nil {
+		return fmt.Errorf("unable to close tar file %v: %v", tarFile, err)
+	}
+	return nil
+}
+
+// synthesizeMain generates a new main func and writes it to the tarball.
+func synthesizeMain(tw *tar.Writer, appFiles []string) error {
+	appMap := make(map[string]bool)
+	for _, f := range appFiles {
+		appMap[f] = true
+	}
+	var f string
+	for i := 0; i < 100; i++ {
+		f = fmt.Sprintf("app_main%d.go", i)
+		if !appMap[filepath.Join(*rootDir, f)] {
+			break
+		}
+	}
+	if appMap[filepath.Join(*rootDir, f)] {
+		return fmt.Errorf("unable to find unique name for %v", f)
+	}
+	hdr := &tar.Header{
+		Name: f,
+		Mode: 0644,
+		Size: int64(len(newMain)),
+	}
+	if err := tw.WriteHeader(hdr); err != nil {
+		return fmt.Errorf("unable to write header for %v: %v", f, err)
+	}
+	if _, err := tw.Write([]byte(newMain)); err != nil {
+		return fmt.Errorf("unable to write %v to tar file: %v", f, err)
+	}
+	return nil
+}
+
+// imports returns a map of all import directories (recursively) used by the app.
+// The return value maps full directory names to original import names.
+func imports(ctxt *build.Context, srcDir string, gopath []string) (map[string]string, error) {
+	pkg, err := ctxt.ImportDir(srcDir, 0)
+	if err != nil {
+		return nil, fmt.Errorf("unable to analyze source: %v", err)
+	}
+
+	// Resolve all non-standard-library imports
+	result := make(map[string]string)
+	for _, v := range pkg.Imports {
+		if !strings.Contains(v, ".") {
+			continue
+		}
+		src, err := findInGopath(v, gopath)
+		if err != nil {
+			return nil, fmt.Errorf("unable to find import %v in gopath %v: %v", v, gopath, err)
+		}
+		result[src] = v
+		im, err := imports(ctxt, src, gopath)
+		if err != nil {
+			return nil, fmt.Errorf("unable to parse package %v: %v", src, err)
+		}
+		for k, v := range im {
+			result[k] = v
+		}
+	}
+	return result, nil
+}
+
+// findInGopath searches the gopath for the named import directory.
+func findInGopath(dir string, gopath []string) (string, error) {
+	for _, v := range gopath {
+		dst := filepath.Join(v, "src", dir)
+		if _, err := os.Stat(dst); err == nil {
+			return dst, nil
+		}
+	}
+	return "", fmt.Errorf("unable to find package %v in gopath %v", dir, gopath)
+}
+
+// copyTree copies srcDir to tar file dstDir, ignoring skipFiles.
+func copyTree(tw *tar.Writer, dstDir, srcDir string) error {
+	entries, err := ioutil.ReadDir(srcDir)
+	if err != nil {
+		return fmt.Errorf("unable to read dir %v: %v", srcDir, err)
+	}
+	for _, entry := range entries {
+		n := entry.Name()
+		if skipFiles[n] {
+			continue
+		}
+		s := filepath.Join(srcDir, n)
+		d := filepath.Join(dstDir, n)
+		if entry.IsDir() {
+			if err := copyTree(tw, d, s); err != nil {
+				return fmt.Errorf("unable to copy dir %v to %v: %v", s, d, err)
+			}
+			continue
+		}
+		if err := copyFile(tw, d, s); err != nil {
+			return fmt.Errorf("unable to copy dir %v to %v: %v", s, d, err)
+		}
+	}
+	return nil
+}
+
+// copyFile copies src to tar file dst.
+func copyFile(tw *tar.Writer, dst, src string) error {
+	s, err := os.Open(src)
+	if err != nil {
+		return fmt.Errorf("unable to open %v: %v", src, err)
+	}
+	defer s.Close()
+	fi, err := s.Stat()
+	if err != nil {
+		return fmt.Errorf("unable to stat %v: %v", src, err)
+	}
+
+	hdr, err := tar.FileInfoHeader(fi, dst)
+	if err != nil {
+		return fmt.Errorf("unable to create tar header for %v: %v", dst, err)
+	}
+	hdr.Name = dst
+	if err := tw.WriteHeader(hdr); err != nil {
+		return fmt.Errorf("unable to write header for %v: %v", dst, err)
+	}
+	_, err = io.Copy(tw, s)
+	if err != nil {
+		return fmt.Errorf("unable to copy %v to %v: %v", src, dst, err)
+	}
+	return nil
+}
+
+// checkMain verifies that there is a single "main" function.
+// It also returns a list of all Go source files in the app.
+func checkMain(ctxt *build.Context) (bool, []string, error) {
+	pkg, err := ctxt.ImportDir(*rootDir, 0)
+	if err != nil {
+		return false, nil, fmt.Errorf("unable to analyze source: %v", err)
+	}
+	if !pkg.IsCommand() {
+		errorf("Your app's package needs to be changed from %q to \"main\".\n", pkg.Name)
+	}
+	// Search for a "func main"
+	var hasMain bool
+	var appFiles []string
+	for _, f := range pkg.GoFiles {
+		n := filepath.Join(*rootDir, f)
+		appFiles = append(appFiles, n)
+		if hasMain, err = readFile(n); err != nil {
+			return false, nil, fmt.Errorf("error parsing %q: %v", n, err)
+		}
+	}
+	return hasMain, appFiles, nil
+}
+
+// isMain returns whether the given function declaration is a main function.
+// Such a function must be called "main", not have a receiver, and have no arguments or return types.
+func isMain(f *ast.FuncDecl) bool {
+	ft := f.Type
+	return f.Name.Name == "main" && f.Recv == nil && ft.Params.NumFields() == 0 && ft.Results.NumFields() == 0
+}
+
+// readFile reads and parses the Go source code file and returns whether it has a main function.
+func readFile(filename string) (hasMain bool, err error) {
+	var src []byte
+	src, err = ioutil.ReadFile(filename)
+	if err != nil {
+		return
+	}
+	fset := token.NewFileSet()
+	file, err := parser.ParseFile(fset, filename, src, 0)
+	for _, decl := range file.Decls {
+		funcDecl, ok := decl.(*ast.FuncDecl)
+		if !ok {
+			continue
+		}
+		if !isMain(funcDecl) {
+			continue
+		}
+		hasMain = true
+		break
+	}
+	return
+}
diff --git a/v2/cmd/aefix/ae.go b/v2/cmd/aefix/ae.go
new file mode 100644
index 0000000..6883b88
--- /dev/null
+++ b/v2/cmd/aefix/ae.go
@@ -0,0 +1,180 @@
+// Copyright 2016 Google Inc. All rights reserved.
+// Use of this source code is governed by the Apache 2.0
+// license that can be found in the LICENSE file.
+
+package main
+
+import (
+	"go/ast"
+	"strconv"
+	"strings"
+)
+
+const (
+	ctxPackage = "golang.org/x/net/context"
+
+	newPackageBase = "google.golang.org/"
+)
+
+func init() {
+	register(fix{
+		"ae",
+		"2016-04-15",
+		aeFn,
+		`Update old App Engine APIs to new App Engine APIs`,
+	})
+}
+
+// logMethod is the set of methods on appengine.Context used for logging.
+var logMethod = map[string]bool{
+	"Debugf":    true,
+	"Infof":     true,
+	"Warningf":  true,
+	"Errorf":    true,
+	"Criticalf": true,
+}
+
+// mapPackage turns "appengine" into "google.golang.org/appengine/v2", etc.
+func mapPackage(s string) string {
+	return newPackageBase + strings.Replace(s, "appengine", "appengine/v2", 1)
+}
+
+func aeFn(f *ast.File) bool {
+	// During the walk, we track the last thing seen that looks like
+	// an appengine.Context, and reset it once the walk leaves a func.
+	var lastContext *ast.Ident
+
+	fixed := false
+
+	// Update imports.
+	mainImp := "appengine"
+	for _, imp := range f.Imports {
+		pth, _ := strconv.Unquote(imp.Path.Value)
+		if pth == "appengine" || strings.HasPrefix(pth, "appengine/") {
+			newPth := mapPackage(pth)
+			imp.Path.Value = strconv.Quote(newPth)
+			fixed = true
+
+			if pth == "appengine" {
+				mainImp = newPth
+			}
+		}
+	}
+
+	// Update any API changes.
+	walk(f, func(n interface{}) {
+		if ft, ok := n.(*ast.FuncType); ok && ft.Params != nil {
+			// See if this func has an `appengine.Context arg`.
+			// If so, remember its identifier.
+			for _, param := range ft.Params.List {
+				if !isPkgDot(param.Type, "appengine", "Context") {
+					continue
+				}
+				if len(param.Names) == 1 {
+					lastContext = param.Names[0]
+					break
+				}
+			}
+			return
+		}
+
+		if as, ok := n.(*ast.AssignStmt); ok {
+			if len(as.Lhs) == 1 && len(as.Rhs) == 1 {
+				// If this node is an assignment from an appengine.NewContext invocation,
+				// remember the identifier on the LHS.
+				if isCall(as.Rhs[0], "appengine", "NewContext") {
+					if ident, ok := as.Lhs[0].(*ast.Ident); ok {
+						lastContext = ident
+						return
+					}
+				}
+				// x (=|:=) appengine.Timeout(y, z)
+				//   should become
+				// x, _ (=|:=) context.WithTimeout(y, z)
+				if isCall(as.Rhs[0], "appengine", "Timeout") {
+					addImport(f, ctxPackage)
+					as.Lhs = append(as.Lhs, ast.NewIdent("_"))
+					// isCall already did the type checking.
+					sel := as.Rhs[0].(*ast.CallExpr).Fun.(*ast.SelectorExpr)
+					sel.X = ast.NewIdent("context")
+					sel.Sel = ast.NewIdent("WithTimeout")
+					fixed = true
+					return
+				}
+			}
+			return
+		}
+
+		// If this node is a FuncDecl, we've finished the function, so reset lastContext.
+		if _, ok := n.(*ast.FuncDecl); ok {
+			lastContext = nil
+			return
+		}
+
+		if call, ok := n.(*ast.CallExpr); ok {
+			if isPkgDot(call.Fun, "appengine", "Datacenter") && len(call.Args) == 0 {
+				insertContext(f, call, lastContext)
+				fixed = true
+				return
+			}
+			if isPkgDot(call.Fun, "taskqueue", "QueueStats") && len(call.Args) == 3 {
+				call.Args = call.Args[:2] // drop last arg
+				fixed = true
+				return
+			}
+
+			sel, ok := call.Fun.(*ast.SelectorExpr)
+			if !ok {
+				return
+			}
+			if lastContext != nil && refersTo(sel.X, lastContext) && logMethod[sel.Sel.Name] {
+				// c.Errorf(...)
+				//   should become
+				// log.Errorf(c, ...)
+				addImport(f, mapPackage("appengine/log"))
+				sel.X = &ast.Ident{ // ast.NewIdent doesn't preserve the position.
+					NamePos: sel.X.Pos(),
+					Name:    "log",
+				}
+				insertContext(f, call, lastContext)
+				fixed = true
+				return
+			}
+		}
+	})
+
+	// Change any `appengine.Context` to `context.Context`.
+	// Do this in a separate walk because the previous walk
+	// wants to identify "appengine.Context".
+	walk(f, func(n interface{}) {
+		expr, ok := n.(ast.Expr)
+		if ok && isPkgDot(expr, "appengine", "Context") {
+			addImport(f, ctxPackage)
+			// isPkgDot did the type checking.
+			n.(*ast.SelectorExpr).X.(*ast.Ident).Name = "context"
+			fixed = true
+			return
+		}
+	})
+
+	// The changes above might remove the need to import "appengine".
+	// Check if it's used, and drop it if it isn't.
+	if fixed && !usesImport(f, mainImp) {
+		deleteImport(f, mainImp)
+	}
+
+	return fixed
+}
+
+// ctx may be nil.
+func insertContext(f *ast.File, call *ast.CallExpr, ctx *ast.Ident) {
+	if ctx == nil {
+		// context is unknown, so use a plain "ctx".
+		ctx = ast.NewIdent("ctx")
+	} else {
+		// Create a fresh *ast.Ident so we drop the position information.
+		ctx = ast.NewIdent(ctx.Name)
+	}
+
+	call.Args = append([]ast.Expr{ctx}, call.Args...)
+}
diff --git a/v2/cmd/aefix/ae_test.go b/v2/cmd/aefix/ae_test.go
new file mode 100644
index 0000000..9a1cd2b
--- /dev/null
+++ b/v2/cmd/aefix/ae_test.go
@@ -0,0 +1,144 @@
+// Copyright 2016 Google Inc. All rights reserved.
+// Use of this source code is governed by the Apache 2.0
+// license that can be found in the LICENSE file.
+
+package main
+
+func init() {
+	addTestCases(aeTests, nil)
+}
+
+var aeTests = []testCase{
+	// Collection of fixes:
+	//	- imports
+	//	- appengine.Timeout -> context.WithTimeout
+	//	- add ctx arg to appengine.Datacenter
+	//	- logging API
+	{
+		Name: "ae.0",
+		In: `package foo
+
+import (
+	"net/http"
+	"time"
+
+	"appengine"
+	"appengine/datastore"
+)
+
+func f(w http.ResponseWriter, r *http.Request) {
+	c := appengine.NewContext(r)
+
+	c = appengine.Timeout(c, 5*time.Second)
+	err := datastore.ErrNoSuchEntity
+	c.Errorf("Something interesting happened: %v", err)
+	_ = appengine.Datacenter()
+}
+`,
+		Out: `package foo
+
+import (
+	"net/http"
+	"time"
+
+	"golang.org/x/net/context"
+	"google.golang.org/appengine/v2"
+	"google.golang.org/appengine/v2/datastore"
+	"google.golang.org/appengine/v2/log"
+)
+
+func f(w http.ResponseWriter, r *http.Request) {
+	c := appengine.NewContext(r)
+
+	c, _ = context.WithTimeout(c, 5*time.Second)
+	err := datastore.ErrNoSuchEntity
+	log.Errorf(c, "Something interesting happened: %v", err)
+	_ = appengine.Datacenter(c)
+}
+`,
+	},
+
+	// Updating a function that takes an appengine.Context arg.
+	{
+		Name: "ae.1",
+		In: `package foo
+
+import (
+	"appengine"
+)
+
+func LogSomething(c2 appengine.Context) {
+	c2.Warningf("Stand back! I'm going to try science!")
+}
+`,
+		Out: `package foo
+
+import (
+	"golang.org/x/net/context"
+	"google.golang.org/appengine/v2/log"
+)
+
+func LogSomething(c2 context.Context) {
+	log.Warningf(c2, "Stand back! I'm going to try science!")
+}
+`,
+	},
+
+	// Less widely used API changes:
+	//	- drop maxTasks arg to taskqueue.QueueStats
+	{
+		Name: "ae.2",
+		In: `package foo
+
+import (
+	"appengine"
+	"appengine/taskqueue"
+)
+
+func f(ctx appengine.Context) {
+	stats, err := taskqueue.QueueStats(ctx, []string{"one", "two"}, 0)
+}
+`,
+		Out: `package foo
+
+import (
+	"golang.org/x/net/context"
+	"google.golang.org/appengine/v2/taskqueue"
+)
+
+func f(ctx context.Context) {
+	stats, err := taskqueue.QueueStats(ctx, []string{"one", "two"})
+}
+`,
+	},
+
+	// Check that the main "appengine" import will not be dropped
+	// if an appengine.Context -> context.Context change happens
+	// but the appengine package is still referenced.
+	{
+		Name: "ae.3",
+		In: `package foo
+
+import (
+	"appengine"
+	"io"
+)
+
+func f(ctx appengine.Context, w io.Writer) {
+	_ = appengine.IsDevAppServer()
+}
+`,
+		Out: `package foo
+
+import (
+	"golang.org/x/net/context"
+	"google.golang.org/appengine/v2"
+	"io"
+)
+
+func f(ctx context.Context, w io.Writer) {
+	_ = appengine.IsDevAppServer()
+}
+`,
+	},
+}
diff --git a/v2/cmd/aefix/fix.go b/v2/cmd/aefix/fix.go
new file mode 100644
index 0000000..23d073f
--- /dev/null
+++ b/v2/cmd/aefix/fix.go
@@ -0,0 +1,851 @@
+// Copyright 2011 The Go Authors.  All rights reserved.
+// Use of this source code is governed by a BSD-style
+// license that can be found in the LICENSE file.
+
+package main
+
+import (
+	"fmt"
+	"go/ast"
+	"go/parser"
+	"go/token"
+	"os"
+	"path"
+	"reflect"
+	"strconv"
+	"strings"
+)
+
+type fix struct {
+	name string
+	date string // date that fix was introduced, in YYYY-MM-DD format
+	f    func(*ast.File) bool
+	desc string
+}
+
+// main runs sort.Sort(byName(fixes)) before printing list of fixes.
+type byName []fix
+
+func (f byName) Len() int           { return len(f) }
+func (f byName) Swap(i, j int)      { f[i], f[j] = f[j], f[i] }
+func (f byName) Less(i, j int) bool { return f[i].name < f[j].name }
+
+// main runs sort.Sort(byDate(fixes)) before applying fixes.
+type byDate []fix
+
+func (f byDate) Len() int           { return len(f) }
+func (f byDate) Swap(i, j int)      { f[i], f[j] = f[j], f[i] }
+func (f byDate) Less(i, j int) bool { return f[i].date < f[j].date }
+
+var fixes []fix
+
+func register(f fix) {
+	fixes = append(fixes, f)
+}
+
+// walk traverses the AST x, calling visit(y) for each node y in the tree but
+// also with a pointer to each ast.Expr, ast.Stmt, and *ast.BlockStmt,
+// in a bottom-up traversal.
+func walk(x interface{}, visit func(interface{})) {
+	walkBeforeAfter(x, nop, visit)
+}
+
+func nop(interface{}) {}
+
+// walkBeforeAfter is like walk but calls before(x) before traversing
+// x's children and after(x) afterward.
+func walkBeforeAfter(x interface{}, before, after func(interface{})) {
+	before(x)
+
+	switch n := x.(type) {
+	default:
+		panic(fmt.Errorf("unexpected type %T in walkBeforeAfter", x))
+
+	case nil:
+
+	// pointers to interfaces
+	case *ast.Decl:
+		walkBeforeAfter(*n, before, after)
+	case *ast.Expr:
+		walkBeforeAfter(*n, before, after)
+	case *ast.Spec:
+		walkBeforeAfter(*n, before, after)
+	case *ast.Stmt:
+		walkBeforeAfter(*n, before, after)
+
+	// pointers to struct pointers
+	case **ast.BlockStmt:
+		walkBeforeAfter(*n, before, after)
+	case **ast.CallExpr:
+		walkBeforeAfter(*n, before, after)
+	case **ast.FieldList:
+		walkBeforeAfter(*n, before, after)
+	case **ast.FuncType:
+		walkBeforeAfter(*n, before, after)
+	case **ast.Ident:
+		walkBeforeAfter(*n, before, after)
+	case **ast.BasicLit:
+		walkBeforeAfter(*n, before, after)
+
+	// pointers to slices
+	case *[]ast.Decl:
+		walkBeforeAfter(*n, before, after)
+	case *[]ast.Expr:
+		walkBeforeAfter(*n, before, after)
+	case *[]*ast.File:
+		walkBeforeAfter(*n, before, after)
+	case *[]*ast.Ident:
+		walkBeforeAfter(*n, before, after)
+	case *[]ast.Spec:
+		walkBeforeAfter(*n, before, after)
+	case *[]ast.Stmt:
+		walkBeforeAfter(*n, before, after)
+
+	// These are ordered and grouped to match ../../pkg/go/ast/ast.go
+	case *ast.Field:
+		walkBeforeAfter(&n.Names, before, after)
+		walkBeforeAfter(&n.Type, before, after)
+		walkBeforeAfter(&n.Tag, before, after)
+	case *ast.FieldList:
+		for _, field := range n.List {
+			walkBeforeAfter(field, before, after)
+		}
+	case *ast.BadExpr:
+	case *ast.Ident:
+	case *ast.Ellipsis:
+		walkBeforeAfter(&n.Elt, before, after)
+	case *ast.BasicLit:
+	case *ast.FuncLit:
+		walkBeforeAfter(&n.Type, before, after)
+		walkBeforeAfter(&n.Body, before, after)
+	case *ast.CompositeLit:
+		walkBeforeAfter(&n.Type, before, after)
+		walkBeforeAfter(&n.Elts, before, after)
+	case *ast.ParenExpr:
+		walkBeforeAfter(&n.X, before, after)
+	case *ast.SelectorExpr:
+		walkBeforeAfter(&n.X, before, after)
+	case *ast.IndexExpr:
+		walkBeforeAfter(&n.X, before, after)
+		walkBeforeAfter(&n.Index, before, after)
+	case *ast.SliceExpr:
+		walkBeforeAfter(&n.X, before, after)
+		if n.Low != nil {
+			walkBeforeAfter(&n.Low, before, after)
+		}
+		if n.High != nil {
+			walkBeforeAfter(&n.High, before, after)
+		}
+	case *ast.TypeAssertExpr:
+		walkBeforeAfter(&n.X, before, after)
+		walkBeforeAfter(&n.Type, before, after)
+	case *ast.CallExpr:
+		walkBeforeAfter(&n.Fun, before, after)
+		walkBeforeAfter(&n.Args, before, after)
+	case *ast.StarExpr:
+		walkBeforeAfter(&n.X, before, after)
+	case *ast.UnaryExpr:
+		walkBeforeAfter(&n.X, before, after)
+	case *ast.BinaryExpr:
+		walkBeforeAfter(&n.X, before, after)
+		walkBeforeAfter(&n.Y, before, after)
+	case *ast.KeyValueExpr:
+		walkBeforeAfter(&n.Key, before, after)
+		walkBeforeAfter(&n.Value, before, after)
+
+	case *ast.ArrayType:
+		walkBeforeAfter(&n.Len, before, after)
+		walkBeforeAfter(&n.Elt, before, after)
+	case *ast.StructType:
+		walkBeforeAfter(&n.Fields, before, after)
+	case *ast.FuncType:
+		walkBeforeAfter(&n.Params, before, after)
+		if n.Results != nil {
+			walkBeforeAfter(&n.Results, before, after)
+		}
+	case *ast.InterfaceType:
+		walkBeforeAfter(&n.Methods, before, after)
+	case *ast.MapType:
+		walkBeforeAfter(&n.Key, before, after)
+		walkBeforeAfter(&n.Value, before, after)
+	case *ast.ChanType:
+		walkBeforeAfter(&n.Value, before, after)
+
+	case *ast.BadStmt:
+	case *ast.DeclStmt:
+		walkBeforeAfter(&n.Decl, before, after)
+	case *ast.EmptyStmt:
+	case *ast.LabeledStmt:
+		walkBeforeAfter(&n.Stmt, before, after)
+	case *ast.ExprStmt:
+		walkBeforeAfter(&n.X, before, after)
+	case *ast.SendStmt:
+		walkBeforeAfter(&n.Chan, before, after)
+		walkBeforeAfter(&n.Value, before, after)
+	case *ast.IncDecStmt:
+		walkBeforeAfter(&n.X, before, after)
+	case *ast.AssignStmt:
+		walkBeforeAfter(&n.Lhs, before, after)
+		walkBeforeAfter(&n.Rhs, before, after)
+	case *ast.GoStmt:
+		walkBeforeAfter(&n.Call, before, after)
+	case *ast.DeferStmt:
+		walkBeforeAfter(&n.Call, before, after)
+	case *ast.ReturnStmt:
+		walkBeforeAfter(&n.Results, before, after)
+	case *ast.BranchStmt:
+	case *ast.BlockStmt:
+		walkBeforeAfter(&n.List, before, after)
+	case *ast.IfStmt:
+		walkBeforeAfter(&n.Init, before, after)
+		walkBeforeAfter(&n.Cond, before, after)
+		walkBeforeAfter(&n.Body, before, after)
+		walkBeforeAfter(&n.Else, before, after)
+	case *ast.CaseClause:
+		walkBeforeAfter(&n.List, before, after)
+		walkBeforeAfter(&n.Body, before, after)
+	case *ast.SwitchStmt:
+		walkBeforeAfter(&n.Init, before, after)
+		walkBeforeAfter(&n.Tag, before, after)
+		walkBeforeAfter(&n.Body, before, after)
+	case *ast.TypeSwitchStmt:
+		walkBeforeAfter(&n.Init, before, after)
+		walkBeforeAfter(&n.Assign, before, after)
+		walkBeforeAfter(&n.Body, before, after)
+	case *ast.CommClause:
+		walkBeforeAfter(&n.Comm, before, after)
+		walkBeforeAfter(&n.Body, before, after)
+	case *ast.SelectStmt:
+		walkBeforeAfter(&n.Body, before, after)
+	case *ast.ForStmt:
+		walkBeforeAfter(&n.Init, before, after)
+		walkBeforeAfter(&n.Cond, before, after)
+		walkBeforeAfter(&n.Post, before, after)
+		walkBeforeAfter(&n.Body, before, after)
+	case *ast.RangeStmt:
+		walkBeforeAfter(&n.Key, before, after)
+		walkBeforeAfter(&n.Value, before, after)
+		walkBeforeAfter(&n.X, before, after)
+		walkBeforeAfter(&n.Body, before, after)
+
+	case *ast.ImportSpec:
+	case *ast.ValueSpec:
+		walkBeforeAfter(&n.Type, before, after)
+		walkBeforeAfter(&n.Values, before, after)
+		walkBeforeAfter(&n.Names, before, after)
+	case *ast.TypeSpec:
+		walkBeforeAfter(&n.Type, before, after)
+
+	case *ast.BadDecl:
+	case *ast.GenDecl:
+		walkBeforeAfter(&n.Specs, before, after)
+	case *ast.FuncDecl:
+		if n.Recv != nil {
+			walkBeforeAfter(&n.Recv, before, after)
+		}
+		walkBeforeAfter(&n.Type, before, after)
+		if n.Body != nil {
+			walkBeforeAfter(&n.Body, before, after)
+		}
+
+	case *ast.File:
+		walkBeforeAfter(&n.Decls, before, after)
+
+	case *ast.Package:
+		walkBeforeAfter(&n.Files, before, after)
+
+	case []*ast.File:
+		for i := range n {
+			walkBeforeAfter(&n[i], before, after)
+		}
+	case []ast.Decl:
+		for i := range n {
+			walkBeforeAfter(&n[i], before, after)
+		}
+	case []ast.Expr:
+		for i := range n {
+			walkBeforeAfter(&n[i], before, after)
+		}
+	case []*ast.Ident:
+		for i := range n {
+			walkBeforeAfter(&n[i], before, after)
+		}
+	case []ast.Stmt:
+		for i := range n {
+			walkBeforeAfter(&n[i], before, after)
+		}
+	case []ast.Spec:
+		for i := range n {
+			walkBeforeAfter(&n[i], before, after)
+		}
+	}
+	after(x)
+}
+
+// imports returns true if f imports path.
+func imports(f *ast.File, path string) bool {
+	return importSpec(f, path) != nil
+}
+
+// importSpec returns the import spec if f imports path,
+// or nil otherwise.
+func importSpec(f *ast.File, path string) *ast.ImportSpec {
+	for _, s := range f.Imports {
+		if importPath(s) == path {
+			return s
+		}
+	}
+	return nil
+}
+
+// importPath returns the unquoted import path of s,
+// or "" if the path is not properly quoted.
+func importPath(s *ast.ImportSpec) string {
+	t, err := strconv.Unquote(s.Path.Value)
+	if err == nil {
+		return t
+	}
+	return ""
+}
+
+// declImports reports whether gen contains an import of path.
+func declImports(gen *ast.GenDecl, path string) bool {
+	if gen.Tok != token.IMPORT {
+		return false
+	}
+	for _, spec := range gen.Specs {
+		impspec := spec.(*ast.ImportSpec)
+		if importPath(impspec) == path {
+			return true
+		}
+	}
+	return false
+}
+
+// isPkgDot returns true if t is the expression "pkg.name"
+// where pkg is an imported identifier.
+func isPkgDot(t ast.Expr, pkg, name string) bool {
+	sel, ok := t.(*ast.SelectorExpr)
+	return ok && isTopName(sel.X, pkg) && sel.Sel.String() == name
+}
+
+// isPtrPkgDot returns true if f is the expression "*pkg.name"
+// where pkg is an imported identifier.
+func isPtrPkgDot(t ast.Expr, pkg, name string) bool {
+	ptr, ok := t.(*ast.StarExpr)
+	return ok && isPkgDot(ptr.X, pkg, name)
+}
+
+// isTopName returns true if n is a top-level unresolved identifier with the given name.
+func isTopName(n ast.Expr, name string) bool {
+	id, ok := n.(*ast.Ident)
+	return ok && id.Name == name && id.Obj == nil
+}
+
+// isName returns true if n is an identifier with the given name.
+func isName(n ast.Expr, name string) bool {
+	id, ok := n.(*ast.Ident)
+	return ok && id.String() == name
+}
+
+// isCall returns true if t is a call to pkg.name.
+func isCall(t ast.Expr, pkg, name string) bool {
+	call, ok := t.(*ast.CallExpr)
+	return ok && isPkgDot(call.Fun, pkg, name)
+}
+
+// If n is an *ast.Ident, isIdent returns it; otherwise isIdent returns nil.
+func isIdent(n interface{}) *ast.Ident {
+	id, _ := n.(*ast.Ident)
+	return id
+}
+
+// refersTo returns true if n is a reference to the same object as x.
+func refersTo(n ast.Node, x *ast.Ident) bool {
+	id, ok := n.(*ast.Ident)
+	// The test of id.Name == x.Name handles top-level unresolved
+	// identifiers, which all have Obj == nil.
+	return ok && id.Obj == x.Obj && id.Name == x.Name
+}
+
+// isBlank returns true if n is the blank identifier.
+func isBlank(n ast.Expr) bool {
+	return isName(n, "_")
+}
+
+// isEmptyString returns true if n is an empty string literal.
+func isEmptyString(n ast.Expr) bool {
+	lit, ok := n.(*ast.BasicLit)
+	return ok && lit.Kind == token.STRING && len(lit.Value) == 2
+}
+
+func warn(pos token.Pos, msg string, args ...interface{}) {
+	if pos.IsValid() {
+		msg = "%s: " + msg
+		arg1 := []interface{}{fset.Position(pos).String()}
+		args = append(arg1, args...)
+	}
+	fmt.Fprintf(os.Stderr, msg+"\n", args...)
+}
+
+// countUses returns the number of uses of the identifier x in scope.
+func countUses(x *ast.Ident, scope []ast.Stmt) int {
+	count := 0
+	ff := func(n interface{}) {
+		if n, ok := n.(ast.Node); ok && refersTo(n, x) {
+			count++
+		}
+	}
+	for _, n := range scope {
+		walk(n, ff)
+	}
+	return count
+}
+
+// rewriteUses replaces all uses of the identifier x and !x in scope
+// with f(x.Pos()) and fnot(x.Pos()).
+func rewriteUses(x *ast.Ident, f, fnot func(token.Pos) ast.Expr, scope []ast.Stmt) {
+	var lastF ast.Expr
+	ff := func(n interface{}) {
+		ptr, ok := n.(*ast.Expr)
+		if !ok {
+			return
+		}
+		nn := *ptr
+
+		// The child node was just walked and possibly replaced.
+		// If it was replaced and this is a negation, replace with fnot(p).
+		not, ok := nn.(*ast.UnaryExpr)
+		if ok && not.Op == token.NOT && not.X == lastF {
+			*ptr = fnot(nn.Pos())
+			return
+		}
+		if refersTo(nn, x) {
+			lastF = f(nn.Pos())
+			*ptr = lastF
+		}
+	}
+	for _, n := range scope {
+		walk(n, ff)
+	}
+}
+
+// assignsTo returns true if any of the code in scope assigns to or takes the address of x.
+func assignsTo(x *ast.Ident, scope []ast.Stmt) bool {
+	assigned := false
+	ff := func(n interface{}) {
+		if assigned {
+			return
+		}
+		switch n := n.(type) {
+		case *ast.UnaryExpr:
+			// use of &x
+			if n.Op == token.AND && refersTo(n.X, x) {
+				assigned = true
+				return
+			}
+		case *ast.AssignStmt:
+			for _, l := range n.Lhs {
+				if refersTo(l, x) {
+					assigned = true
+					return
+				}
+			}
+		}
+	}
+	for _, n := range scope {
+		if assigned {
+			break
+		}
+		walk(n, ff)
+	}
+	return assigned
+}
+
+// newPkgDot returns an ast.Expr referring to "pkg.name" at position pos.
+func newPkgDot(pos token.Pos, pkg, name string) ast.Expr {
+	return &ast.SelectorExpr{
+		X: &ast.Ident{
+			NamePos: pos,
+			Name:    pkg,
+		},
+		Sel: &ast.Ident{
+			NamePos: pos,
+			Name:    name,
+		},
+	}
+}
+
+// renameTop renames all references to the top-level name old.
+// It returns true if it makes any changes.
+func renameTop(f *ast.File, old, new string) bool {
+	var fixed bool
+
+	// Rename any conflicting imports
+	// (assuming package name is last element of path).
+	for _, s := range f.Imports {
+		if s.Name != nil {
+			if s.Name.Name == old {
+				s.Name.Name = new
+				fixed = true
+			}
+		} else {
+			_, thisName := path.Split(importPath(s))
+			if thisName == old {
+				s.Name = ast.NewIdent(new)
+				fixed = true
+			}
+		}
+	}
+
+	// Rename any top-level declarations.
+	for _, d := range f.Decls {
+		switch d := d.(type) {
+		case *ast.FuncDecl:
+			if d.Recv == nil && d.Name.Name == old {
+				d.Name.Name = new
+				d.Name.Obj.Name = new
+				fixed = true
+			}
+		case *ast.GenDecl:
+			for _, s := range d.Specs {
+				switch s := s.(type) {
+				case *ast.TypeSpec:
+					if s.Name.Name == old {
+						s.Name.Name = new
+						s.Name.Obj.Name = new
+						fixed = true
+					}
+				case *ast.ValueSpec:
+					for _, n := range s.Names {
+						if n.Name == old {
+							n.Name = new
+							n.Obj.Name = new
+							fixed = true
+						}
+					}
+				}
+			}
+		}
+	}
+
+	// Rename top-level old to new, both unresolved names
+	// (probably defined in another file) and names that resolve
+	// to a declaration we renamed.
+	walk(f, func(n interface{}) {
+		id, ok := n.(*ast.Ident)
+		if ok && isTopName(id, old) {
+			id.Name = new
+			fixed = true
+		}
+		if ok && id.Obj != nil && id.Name == old && id.Obj.Name == new {
+			id.Name = id.Obj.Name
+			fixed = true
+		}
+	})
+
+	return fixed
+}
+
+// matchLen returns the length of the longest prefix shared by x and y.
+func matchLen(x, y string) int {
+	i := 0
+	for i < len(x) && i < len(y) && x[i] == y[i] {
+		i++
+	}
+	return i
+}
+
+// addImport adds the import path to the file f, if absent.
+func addImport(f *ast.File, ipath string) (added bool) {
+	if imports(f, ipath) {
+		return false
+	}
+
+	// Determine name of import.
+	// Assume added imports follow convention of using last element.
+	_, name := path.Split(ipath)
+
+	// Rename any conflicting top-level references from name to name_.
+	renameTop(f, name, name+"_")
+
+	newImport := &ast.ImportSpec{
+		Path: &ast.BasicLit{
+			Kind:  token.STRING,
+			Value: strconv.Quote(ipath),
+		},
+	}
+
+	// Find an import decl to add to.
+	var (
+		bestMatch  = -1
+		lastImport = -1
+		impDecl    *ast.GenDecl
+		impIndex   = -1
+	)
+	for i, decl := range f.Decls {
+		gen, ok := decl.(*ast.GenDecl)
+		if ok && gen.Tok == token.IMPORT {
+			lastImport = i
+			// Do not add to import "C", to avoid disrupting the
+			// association with its doc comment, breaking cgo.
+			if declImports(gen, "C") {
+				continue
+			}
+
+			// Compute longest shared prefix with imports in this block.
+			for j, spec := range gen.Specs {
+				impspec := spec.(*ast.ImportSpec)
+				n := matchLen(importPath(impspec), ipath)
+				if n > bestMatch {
+					bestMatch = n
+					impDecl = gen
+					impIndex = j
+				}
+			}
+		}
+	}
+
+	// If no import decl found, add one after the last import.
+	if impDecl == nil {
+		impDecl = &ast.GenDecl{
+			Tok: token.IMPORT,
+		}
+		f.Decls = append(f.Decls, nil)
+		copy(f.Decls[lastImport+2:], f.Decls[lastImport+1:])
+		f.Decls[lastImport+1] = impDecl
+	}
+
+	// Ensure the import decl has parentheses, if needed.
+	if len(impDecl.Specs) > 0 && !impDecl.Lparen.IsValid() {
+		impDecl.Lparen = impDecl.Pos()
+	}
+
+	insertAt := impIndex + 1
+	if insertAt == 0 {
+		insertAt = len(impDecl.Specs)
+	}
+	impDecl.Specs = append(impDecl.Specs, nil)
+	copy(impDecl.Specs[insertAt+1:], impDecl.Specs[insertAt:])
+	impDecl.Specs[insertAt] = newImport
+	if insertAt > 0 {
+		// Assign same position as the previous import,
+		// so that the sorter sees it as being in the same block.
+		prev := impDecl.Specs[insertAt-1]
+		newImport.Path.ValuePos = prev.Pos()
+		newImport.EndPos = prev.Pos()
+	}
+
+	f.Imports = append(f.Imports, newImport)
+	return true
+}
+
+// deleteImport deletes the import path from the file f, if present.
+func deleteImport(f *ast.File, path string) (deleted bool) {
+	oldImport := importSpec(f, path)
+
+	// Find the import node that imports path, if any.
+	for i, decl := range f.Decls {
+		gen, ok := decl.(*ast.GenDecl)
+		if !ok || gen.Tok != token.IMPORT {
+			continue
+		}
+		for j, spec := range gen.Specs {
+			impspec := spec.(*ast.ImportSpec)
+			if oldImport != impspec {
+				continue
+			}
+
+			// We found an import spec that imports path.
+			// Delete it.
+			deleted = true
+			copy(gen.Specs[j:], gen.Specs[j+1:])
+			gen.Specs = gen.Specs[:len(gen.Specs)-1]
+
+			// If this was the last import spec in this decl,
+			// delete the decl, too.
+			if len(gen.Specs) == 0 {
+				copy(f.Decls[i:], f.Decls[i+1:])
+				f.Decls = f.Decls[:len(f.Decls)-1]
+			} else if len(gen.Specs) == 1 {
+				gen.Lparen = token.NoPos // drop parens
+			}
+			if j > 0 {
+				// We deleted an entry but now there will be
+				// a blank line-sized hole where the import was.
+				// Close the hole by making the previous
+				// import appear to "end" where this one did.
+				gen.Specs[j-1].(*ast.ImportSpec).EndPos = impspec.End()
+			}
+			break
+		}
+	}
+
+	// Delete it from f.Imports.
+	for i, imp := range f.Imports {
+		if imp == oldImport {
+			copy(f.Imports[i:], f.Imports[i+1:])
+			f.Imports = f.Imports[:len(f.Imports)-1]
+			break
+		}
+	}
+
+	return
+}
+
+// rewriteImport rewrites any import of path oldPath to path newPath.
+func rewriteImport(f *ast.File, oldPath, newPath string) (rewrote bool) {
+	for _, imp := range f.Imports {
+		if importPath(imp) == oldPath {
+			rewrote = true
+			// record old End, because the default is to compute
+			// it using the length of imp.Path.Value.
+			imp.EndPos = imp.End()
+			imp.Path.Value = strconv.Quote(newPath)
+		}
+	}
+	return
+}
+
+func usesImport(f *ast.File, path string) (used bool) {
+	spec := importSpec(f, path)
+	if spec == nil {
+		return
+	}
+
+	name := spec.Name.String()
+	switch name {
+	case "<nil>":
+		// If the package name is not explicitly specified,
+		// make an educated guess. This is not guaranteed to be correct.
+		if strings.HasSuffix(path, "/v2") {
+			path = strings.TrimRight(path, "/v2")
+		}
+		lastSlash := strings.LastIndex(path, "/")
+		if lastSlash == -1 {
+			name = path
+		} else {
+			name = path[lastSlash+1:]
+		}
+	case "_", ".":
+		// Not sure if this import is used - err on the side of caution.
+		return true
+	}
+
+	walk(f, func(n interface{}) {
+		sel, ok := n.(*ast.SelectorExpr)
+		if ok && isTopName(sel.X, name) {
+			used = true
+		}
+	})
+
+	return
+}
+
+func expr(s string) ast.Expr {
+	x, err := parser.ParseExpr(s)
+	if err != nil {
+		panic("parsing " + s + ": " + err.Error())
+	}
+	// Remove position information to avoid spurious newlines.
+	killPos(reflect.ValueOf(x))
+	return x
+}
+
+var posType = reflect.TypeOf(token.Pos(0))
+
+func killPos(v reflect.Value) {
+	switch v.Kind() {
+	case reflect.Ptr, reflect.Interface:
+		if !v.IsNil() {
+			killPos(v.Elem())
+		}
+	case reflect.Slice:
+		n := v.Len()
+		for i := 0; i < n; i++ {
+			killPos(v.Index(i))
+		}
+	case reflect.Struct:
+		n := v.NumField()
+		for i := 0; i < n; i++ {
+			f := v.Field(i)
+			if f.Type() == posType {
+				f.SetInt(0)
+				continue
+			}
+			killPos(f)
+		}
+	}
+}
+
+// A Rename describes a single renaming.
+type rename struct {
+	OldImport string // only apply rename if this import is present
+	NewImport string // add this import during rewrite
+	Old       string // old name: p.T or *p.T
+	New       string // new name: p.T or *p.T
+}
+
+func renameFix(tab []rename) func(*ast.File) bool {
+	return func(f *ast.File) bool {
+		return renameFixTab(f, tab)
+	}
+}
+
+func parseName(s string) (ptr bool, pkg, nam string) {
+	i := strings.Index(s, ".")
+	if i < 0 {
+		panic("parseName: invalid name " + s)
+	}
+	if strings.HasPrefix(s, "*") {
+		ptr = true
+		s = s[1:]
+		i--
+	}
+	pkg = s[:i]
+	nam = s[i+1:]
+	return
+}
+
+func renameFixTab(f *ast.File, tab []rename) bool {
+	fixed := false
+	added := map[string]bool{}
+	check := map[string]bool{}
+	for _, t := range tab {
+		if !imports(f, t.OldImport) {
+			continue
+		}
+		optr, opkg, onam := parseName(t.Old)
+		walk(f, func(n interface{}) {
+			np, ok := n.(*ast.Expr)
+			if !ok {
+				return
+			}
+			x := *np
+			if optr {
+				p, ok := x.(*ast.StarExpr)
+				if !ok {
+					return
+				}
+				x = p.X
+			}
+			if !isPkgDot(x, opkg, onam) {
+				return
+			}
+			if t.NewImport != "" && !added[t.NewImport] {
+				addImport(f, t.NewImport)
+				added[t.NewImport] = true
+			}
+			*np = expr(t.New)
+			check[t.OldImport] = true
+			fixed = true
+		})
+	}
+
+	for ipath := range check {
+		if !usesImport(f, ipath) {
+			deleteImport(f, ipath)
+		}
+	}
+	return fixed
+}
diff --git a/v2/cmd/aefix/main.go b/v2/cmd/aefix/main.go
new file mode 100644
index 0000000..8e193a6
--- /dev/null
+++ b/v2/cmd/aefix/main.go
@@ -0,0 +1,258 @@
+// Copyright 2011 The Go Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style
+// license that can be found in the LICENSE file.
+
+package main
+
+import (
+	"bytes"
+	"flag"
+	"fmt"
+	"go/ast"
+	"go/format"
+	"go/parser"
+	"go/scanner"
+	"go/token"
+	"io/ioutil"
+	"os"
+	"os/exec"
+	"path/filepath"
+	"sort"
+	"strings"
+)
+
+var (
+	fset     = token.NewFileSet()
+	exitCode = 0
+)
+
+var allowedRewrites = flag.String("r", "",
+	"restrict the rewrites to this comma-separated list")
+
+var forceRewrites = flag.String("force", "",
+	"force these fixes to run even if the code looks updated")
+
+var allowed, force map[string]bool
+
+var doDiff = flag.Bool("diff", false, "display diffs instead of rewriting files")
+
+// enable for debugging fix failures
+const debug = false // display incorrectly reformatted source and exit
+
+func usage() {
+	fmt.Fprintf(os.Stderr, "usage: aefix [-diff] [-r fixname,...] [-force fixname,...] [path ...]\n")
+	flag.PrintDefaults()
+	fmt.Fprintf(os.Stderr, "\nAvailable rewrites are:\n")
+	sort.Sort(byName(fixes))
+	for _, f := range fixes {
+		fmt.Fprintf(os.Stderr, "\n%s\n", f.name)
+		desc := strings.TrimSpace(f.desc)
+		desc = strings.Replace(desc, "\n", "\n\t", -1)
+		fmt.Fprintf(os.Stderr, "\t%s\n", desc)
+	}
+	os.Exit(2)
+}
+
+func main() {
+	flag.Usage = usage
+	flag.Parse()
+
+	sort.Sort(byDate(fixes))
+
+	if *allowedRewrites != "" {
+		allowed = make(map[string]bool)
+		for _, f := range strings.Split(*allowedRewrites, ",") {
+			allowed[f] = true
+		}
+	}
+
+	if *forceRewrites != "" {
+		force = make(map[string]bool)
+		for _, f := range strings.Split(*forceRewrites, ",") {
+			force[f] = true
+		}
+	}
+
+	if flag.NArg() == 0 {
+		if err := processFile("standard input", true); err != nil {
+			report(err)
+		}
+		os.Exit(exitCode)
+	}
+
+	for i := 0; i < flag.NArg(); i++ {
+		path := flag.Arg(i)
+		switch dir, err := os.Stat(path); {
+		case err != nil:
+			report(err)
+		case dir.IsDir():
+			walkDir(path)
+		default:
+			if err := processFile(path, false); err != nil {
+				report(err)
+			}
+		}
+	}
+
+	os.Exit(exitCode)
+}
+
+const parserMode = parser.ParseComments
+
+func gofmtFile(f *ast.File) ([]byte, error) {
+	var buf bytes.Buffer
+	if err := format.Node(&buf, fset, f); err != nil {
+		return nil, err
+	}
+	return buf.Bytes(), nil
+}
+
+func processFile(filename string, useStdin bool) error {
+	var f *os.File
+	var err error
+	var fixlog bytes.Buffer
+
+	if useStdin {
+		f = os.Stdin
+	} else {
+		f, err = os.Open(filename)
+		if err != nil {
+			return err
+		}
+		defer f.Close()
+	}
+
+	src, err := ioutil.ReadAll(f)
+	if err != nil {
+		return err
+	}
+
+	file, err := parser.ParseFile(fset, filename, src, parserMode)
+	if err != nil {
+		return err
+	}
+
+	// Apply all fixes to file.
+	newFile := file
+	fixed := false
+	for _, fix := range fixes {
+		if allowed != nil && !allowed[fix.name] {
+			continue
+		}
+		if fix.f(newFile) {
+			fixed = true
+			fmt.Fprintf(&fixlog, " %s", fix.name)
+
+			// AST changed.
+			// Print and parse, to update any missing scoping
+			// or position information for subsequent fixers.
+			newSrc, err := gofmtFile(newFile)
+			if err != nil {
+				return err
+			}
+			newFile, err = parser.ParseFile(fset, filename, newSrc, parserMode)
+			if err != nil {
+				if debug {
+					fmt.Printf("%s", newSrc)
+					report(err)
+					os.Exit(exitCode)
+				}
+				return err
+			}
+		}
+	}
+	if !fixed {
+		return nil
+	}
+	fmt.Fprintf(os.Stderr, "%s: fixed %s\n", filename, fixlog.String()[1:])
+
+	// Print AST.  We did that after each fix, so this appears
+	// redundant, but it is necessary to generate gofmt-compatible
+	// source code in a few cases.  The official gofmt style is the
+	// output of the printer run on a standard AST generated by the parser,
+	// but the source we generated inside the loop above is the
+	// output of the printer run on a mangled AST generated by a fixer.
+	newSrc, err := gofmtFile(newFile)
+	if err != nil {
+		return err
+	}
+
+	if *doDiff {
+		data, err := diff(src, newSrc)
+		if err != nil {
+			return fmt.Errorf("computing diff: %s", err)
+		}
+		fmt.Printf("diff %s fixed/%s\n", filename, filename)
+		os.Stdout.Write(data)
+		return nil
+	}
+
+	if useStdin {
+		os.Stdout.Write(newSrc)
+		return nil
+	}
+
+	return ioutil.WriteFile(f.Name(), newSrc, 0)
+}
+
+var gofmtBuf bytes.Buffer
+
+func gofmt(n interface{}) string {
+	gofmtBuf.Reset()
+	if err := format.Node(&gofmtBuf, fset, n); err != nil {
+		return "<" + err.Error() + ">"
+	}
+	return gofmtBuf.String()
+}
+
+func report(err error) {
+	scanner.PrintError(os.Stderr, err)
+	exitCode = 2
+}
+
+func walkDir(path string) {
+	filepath.Walk(path, visitFile)
+}
+
+func visitFile(path string, f os.FileInfo, err error) error {
+	if err == nil && isGoFile(f) {
+		err = processFile(path, false)
+	}
+	if err != nil {
+		report(err)
+	}
+	return nil
+}
+
+func isGoFile(f os.FileInfo) bool {
+	// ignore non-Go files
+	name := f.Name()
+	return !f.IsDir() && !strings.HasPrefix(name, ".") && strings.HasSuffix(name, ".go")
+}
+
+func diff(b1, b2 []byte) (data []byte, err error) {
+	f1, err := ioutil.TempFile("", "go-fix")
+	if err != nil {
+		return nil, err
+	}
+	defer os.Remove(f1.Name())
+	defer f1.Close()
+
+	f2, err := ioutil.TempFile("", "go-fix")
+	if err != nil {
+		return nil, err
+	}
+	defer os.Remove(f2.Name())
+	defer f2.Close()
+
+	f1.Write(b1)
+	f2.Write(b2)
+
+	data, err = exec.Command("diff", "-u", f1.Name(), f2.Name()).CombinedOutput()
+	if len(data) > 0 {
+		// diff exits with a non-zero status when the files don't match.
+		// Ignore that failure as long as we get output.
+		err = nil
+	}
+	return
+}
diff --git a/v2/cmd/aefix/main_test.go b/v2/cmd/aefix/main_test.go
new file mode 100644
index 0000000..2151bf2
--- /dev/null
+++ b/v2/cmd/aefix/main_test.go
@@ -0,0 +1,129 @@
+// Copyright 2011 The Go Authors.  All rights reserved.
+// Use of this source code is governed by a BSD-style
+// license that can be found in the LICENSE file.
+
+package main
+
+import (
+	"go/ast"
+	"go/parser"
+	"strings"
+	"testing"
+)
+
+type testCase struct {
+	Name string
+	Fn   func(*ast.File) bool
+	In   string
+	Out  string
+}
+
+var testCases []testCase
+
+func addTestCases(t []testCase, fn func(*ast.File) bool) {
+	// Fill in fn to avoid repetition in definitions.
+	if fn != nil {
+		for i := range t {
+			if t[i].Fn == nil {
+				t[i].Fn = fn
+			}
+		}
+	}
+	testCases = append(testCases, t...)
+}
+
+func fnop(*ast.File) bool { return false }
+
+func parseFixPrint(t *testing.T, fn func(*ast.File) bool, desc, in string, mustBeGofmt bool) (out string, fixed, ok bool) {
+	file, err := parser.ParseFile(fset, desc, in, parserMode)
+	if err != nil {
+		t.Errorf("%s: parsing: %v", desc, err)
+		return
+	}
+
+	outb, err := gofmtFile(file)
+	if err != nil {
+		t.Errorf("%s: printing: %v", desc, err)
+		return
+	}
+	if s := string(outb); in != s && mustBeGofmt {
+		t.Errorf("%s: not gofmt-formatted.\n--- %s\n%s\n--- %s | gofmt\n%s",
+			desc, desc, in, desc, s)
+		tdiff(t, in, s)
+		return
+	}
+
+	if fn == nil {
+		for _, fix := range fixes {
+			if fix.f(file) {
+				fixed = true
+			}
+		}
+	} else {
+		fixed = fn(file)
+	}
+
+	outb, err = gofmtFile(file)
+	if err != nil {
+		t.Errorf("%s: printing: %v", desc, err)
+		return
+	}
+
+	return string(outb), fixed, true
+}
+
+func TestRewrite(t *testing.T) {
+	for _, tt := range testCases {
+		// Apply fix: should get tt.Out.
+		out, fixed, ok := parseFixPrint(t, tt.Fn, tt.Name, tt.In, true)
+		if !ok {
+			continue
+		}
+
+		// reformat to get printing right
+		out, _, ok = parseFixPrint(t, fnop, tt.Name, out, false)
+		if !ok {
+			continue
+		}
+
+		if out != tt.Out {
+			t.Errorf("%s: incorrect output.\n", tt.Name)
+			if !strings.HasPrefix(tt.Name, "testdata/") {
+				t.Errorf("--- have\n%s\n--- want\n%s", out, tt.Out)
+			}
+			tdiff(t, out, tt.Out)
+			continue
+		}
+
+		if changed := out != tt.In; changed != fixed {
+			t.Errorf("%s: changed=%v != fixed=%v", tt.Name, changed, fixed)
+			continue
+		}
+
+		// Should not change if run again.
+		out2, fixed2, ok := parseFixPrint(t, tt.Fn, tt.Name+" output", out, true)
+		if !ok {
+			continue
+		}
+
+		if fixed2 {
+			t.Errorf("%s: applied fixes during second round", tt.Name)
+			continue
+		}
+
+		if out2 != out {
+			t.Errorf("%s: changed output after second round of fixes.\n--- output after first round\n%s\n--- output after second round\n%s",
+				tt.Name, out, out2)
+			tdiff(t, out, out2)
+		}
+	}
+}
+
+func tdiff(t *testing.T, a, b string) {
+	data, err := diff([]byte(a), []byte(b))
+	if err != nil {
+		t.Error(err)
+		return
+	}
+	t.Error(string(data))
+}
diff --git a/v2/cmd/aefix/typecheck.go b/v2/cmd/aefix/typecheck.go
new file mode 100644
index 0000000..d54d375
--- /dev/null
+++ b/v2/cmd/aefix/typecheck.go
@@ -0,0 +1,673 @@
+// Copyright 2011 The Go Authors.  All rights reserved.
+// Use of this source code is governed by a BSD-style
+// license that can be found in the LICENSE file.
+
+package main
+
+import (
+	"fmt"
+	"go/ast"
+	"go/token"
+	"os"
+	"reflect"
+	"strings"
+)
+
+// Partial type checker.
+//
+// The fact that it is partial is very important: the input is
+// an AST and a description of some type information to
+// assume about one or more packages, but not all the
+// packages that the program imports.  The checker is
+// expected to do as much as it can with what it has been
+// given.  There is not enough information supplied to do
+// a full type check, but the type checker is expected to
+// apply information that can be derived from variable
+// declarations, function and method returns, and type switches
+// as far as it can, so that the caller can still tell the types
+// of expression relevant to a particular fix.
+//
+// TODO(rsc,gri): Replace with go/typechecker.
+// Doing that could be an interesting test case for go/typechecker:
+// the constraints about working with partial information will
+// likely exercise it in interesting ways.  The ideal interface would
+// be to pass typecheck a map from importpath to package API text
+// (Go source code), but for now we use data structures (TypeConfig, Type).
+//
+// The strings mostly use gofmt form.
+//
+// A Field or FieldList has as its type a comma-separated list
+// of the types of the fields.  For example, the field list
+//	x, y, z int
+// has type "int, int, int".
+
+// The prefix "type " is the type of a type.
+// For example, given
+//	var x int
+//	type T int
+// x's type is "int" but T's type is "type int".
+// mkType inserts the "type " prefix.
+// getType removes it.
+// isType tests for it.
+
+func mkType(t string) string {
+	return "type " + t
+}
+
+func getType(t string) string {
+	if !isType(t) {
+		return ""
+	}
+	return t[len("type "):]
+}
+
+func isType(t string) bool {
+	return strings.HasPrefix(t, "type ")
+}
+
+// TypeConfig describes the universe of relevant types.
+// For ease of creation, the types are all referred to by string
+// name (e.g., "reflect.Value").  TypeByName is the only place
+// where the strings are resolved.
+
+type TypeConfig struct {
+	Type map[string]*Type
+	Var  map[string]string
+	Func map[string]string
+}
+
+// typeof returns the type of the given name, which may be of
+// the form "x" or "p.X".
+func (cfg *TypeConfig) typeof(name string) string {
+	if cfg.Var != nil {
+		if t := cfg.Var[name]; t != "" {
+			return t
+		}
+	}
+	if cfg.Func != nil {
+		if t := cfg.Func[name]; t != "" {
+			return "func()" + t
+		}
+	}
+	return ""
+}
+
+// Type describes the Fields and Methods of a type.
+// If the field or method cannot be found there, it is next
+// looked for in the Embed list.
+type Type struct {
+	Field  map[string]string // map field name to type
+	Method map[string]string // map method name to comma-separated return types (should start with "func ")
+	Embed  []string          // list of types this type embeds (for extra methods)
+	Def    string            // definition of named type
+}
+
+// dot returns the type of "typ.name", making its decision
+// using the type information in cfg.
+func (typ *Type) dot(cfg *TypeConfig, name string) string {
+	if typ.Field != nil {
+		if t := typ.Field[name]; t != "" {
+			return t
+		}
+	}
+	if typ.Method != nil {
+		if t := typ.Method[name]; t != "" {
+			return t
+		}
+	}
+
+	for _, e := range typ.Embed {
+		etyp := cfg.Type[e]
+		if etyp != nil {
+			if t := etyp.dot(cfg, name); t != "" {
+				return t
+			}
+		}
+	}
+
+	return ""
+}
+
+// typecheck type checks the AST f assuming the information in cfg.
+// It returns two maps with type information:
+// typeof maps AST nodes to type information in gofmt string form.
+// assign maps type strings to lists of expressions that were assigned
+// to values of another type that were assigned to that type.
+func typecheck(cfg *TypeConfig, f *ast.File) (typeof map[interface{}]string, assign map[string][]interface{}) {
+	typeof = make(map[interface{}]string)
+	assign = make(map[string][]interface{})
+	cfg1 := &TypeConfig{}
+	*cfg1 = *cfg // make copy so we can add locally
+	copied := false
+
+	// gather function declarations
+	for _, decl := range f.Decls {
+		fn, ok := decl.(*ast.FuncDecl)
+		if !ok {
+			continue
+		}
+		typecheck1(cfg, fn.Type, typeof, assign)
+		t := typeof[fn.Type]
+		if fn.Recv != nil {
+			// The receiver must be a type.
+			rcvr := typeof[fn.Recv]
+			if !isType(rcvr) {
+				if len(fn.Recv.List) != 1 {
+					continue
+				}
+				rcvr = mkType(gofmt(fn.Recv.List[0].Type))
+				typeof[fn.Recv.List[0].Type] = rcvr
+			}
+			rcvr = getType(rcvr)
+			if rcvr != "" && rcvr[0] == '*' {
+				rcvr = rcvr[1:]
+			}
+			typeof[rcvr+"."+fn.Name.Name] = t
+		} else {
+			if isType(t) {
+				t = getType(t)
+			} else {
+				t = gofmt(fn.Type)
+			}
+			typeof[fn.Name] = t
+
+			// Record typeof[fn.Name.Obj] for future references to fn.Name.
+			typeof[fn.Name.Obj] = t
+		}
+	}
+
+	// gather struct declarations
+	for _, decl := range f.Decls {
+		d, ok := decl.(*ast.GenDecl)
+		if ok {
+			for _, s := range d.Specs {
+				switch s := s.(type) {
+				case *ast.TypeSpec:
+					if cfg1.Type[s.Name.Name] != nil {
+						break
+					}
+					if !copied {
+						copied = true
+						// Copy map lazily: it's time.
+						cfg1.Type = make(map[string]*Type)
+						for k, v := range cfg.Type {
+							cfg1.Type[k] = v
+						}
+					}
+					t := &Type{Field: map[string]string{}}
+					cfg1.Type[s.Name.Name] = t
+					switch st := s.Type.(type) {
+					case *ast.StructType:
+						for _, f := range st.Fields.List {
+							for _, n := range f.Names {
+								t.Field[n.Name] = gofmt(f.Type)
+							}
+						}
+					case *ast.ArrayType, *ast.StarExpr, *ast.MapType:
+						t.Def = gofmt(st)
+					}
+				}
+			}
+		}
+	}
+
+	typecheck1(cfg1, f, typeof, assign)
+	return typeof, assign
+}
+
+func makeExprList(a []*ast.Ident) []ast.Expr {
+	var b []ast.Expr
+	for _, x := range a {
+		b = append(b, x)
+	}
+	return b
+}
+
+// Typecheck1 is the recursive form of typecheck.
+// It is like typecheck but adds to the information in typeof
+// instead of allocating a new map.
+func typecheck1(cfg *TypeConfig, f interface{}, typeof map[interface{}]string, assign map[string][]interface{}) {
+	// set sets the type of n to typ.
+	// If isDecl is true, n is being declared.
+	set := func(n ast.Expr, typ string, isDecl bool) {
+		if typeof[n] != "" || typ == "" {
+			if typeof[n] != typ {
+				assign[typ] = append(assign[typ], n)
+			}
+			return
+		}
+		typeof[n] = typ
+
+		// If we obtained typ from the declaration of x
+		// propagate the type to all the uses.
+		// The !isDecl case is a cheat here, but it makes
+		// up in some cases for not paying attention to
+		// struct fields.  The real type checker will be
+		// more accurate so we won't need the cheat.
+		if id, ok := n.(*ast.Ident); ok && id.Obj != nil && (isDecl || typeof[id.Obj] == "") {
+			typeof[id.Obj] = typ
+		}
+	}
+
+	// Type-check an assignment lhs = rhs.
+	// If isDecl is true, this is := so we can update
+	// the types of the objects that lhs refers to.
+	typecheckAssign := func(lhs, rhs []ast.Expr, isDecl bool) {
+		if len(lhs) > 1 && len(rhs) == 1 {
+			if _, ok := rhs[0].(*ast.CallExpr); ok {
+				t := split(typeof[rhs[0]])
+				// Lists should have same length but may not; pair what can be paired.
+				for i := 0; i < len(lhs) && i < len(t); i++ {
+					set(lhs[i], t[i], isDecl)
+				}
+				return
+			}
+		}
+		if len(lhs) == 1 && len(rhs) == 2 {
+			// x = y, ok
+			rhs = rhs[:1]
+		} else if len(lhs) == 2 && len(rhs) == 1 {
+			// x, ok = y
+			lhs = lhs[:1]
+		}
+
+		// Match as much as we can.
+		for i := 0; i < len(lhs) && i < len(rhs); i++ {
+			x, y := lhs[i], rhs[i]
+			if typeof[y] != "" {
+				set(x, typeof[y], isDecl)
+			} else {
+				set(y, typeof[x], false)
+			}
+		}
+	}
+
+	expand := func(s string) string {
+		typ := cfg.Type[s]
+		if typ != nil && typ.Def != "" {
+			return typ.Def
+		}
+		return s
+	}
+
+	// The main type check is a recursive algorithm implemented
+	// by walkBeforeAfter(n, before, after).
+	// Most of it is bottom-up, but in a few places we need
+	// to know the type of the function we are checking.
+	// The before function records that information on
+	// the curfn stack.
+	var curfn []*ast.FuncType
+
+	before := func(n interface{}) {
+		// push function type on stack
+		switch n := n.(type) {
+		case *ast.FuncDecl:
+			curfn = append(curfn, n.Type)
+		case *ast.FuncLit:
+			curfn = append(curfn, n.Type)
+		}
+	}
+
+	// After is the real type checker.
+	after := func(n interface{}) {
+		if n == nil {
+			return
+		}
+		if false && reflect.TypeOf(n).Kind() == reflect.Ptr { // debugging trace
+			defer func() {
+				if t := typeof[n]; t != "" {
+					pos := fset.Position(n.(ast.Node).Pos())
+					fmt.Fprintf(os.Stderr, "%s: typeof[%s] = %s\n", pos, gofmt(n), t)
+				}
+			}()
+		}
+
+		switch n := n.(type) {
+		case *ast.FuncDecl, *ast.FuncLit:
+			// pop function type off stack
+			curfn = curfn[:len(curfn)-1]
+
+		case *ast.FuncType:
+			typeof[n] = mkType(joinFunc(split(typeof[n.Params]), split(typeof[n.Results])))
+
+		case *ast.FieldList:
+			// Field list is concatenation of sub-lists.
+			t := ""
+			for _, field := range n.List {
+				if t != "" {
+					t += ", "
+				}
+				t += typeof[field]
+			}
+			typeof[n] = t
+
+		case *ast.Field:
+			// Field is one instance of the type per name.
+			all := ""
+			t := typeof[n.Type]
+			if !isType(t) {
+				// Create a type, because it is typically *T or *p.T
+				// and we might care about that type.
+				t = mkType(gofmt(n.Type))
+				typeof[n.Type] = t
+			}
+			t = getType(t)
+			if len(n.Names) == 0 {
+				all = t
+			} else {
+				for _, id := range n.Names {
+					if all != "" {
+						all += ", "
+					}
+					all += t
+					typeof[id.Obj] = t
+					typeof[id] = t
+				}
+			}
+			typeof[n] = all
+
+		case *ast.ValueSpec:
+			// var declaration.  Use type if present.
+			if n.Type != nil {
+				t := typeof[n.Type]
+				if !isType(t) {
+					t = mkType(gofmt(n.Type))
+					typeof[n.Type] = t
+				}
+				t = getType(t)
+				for _, id := range n.Names {
+					set(id, t, true)
+				}
+			}
+			// Now treat same as assignment.
+			typecheckAssign(makeExprList(n.Names), n.Values, true)
+
+		case *ast.AssignStmt:
+			typecheckAssign(n.Lhs, n.Rhs, n.Tok == token.DEFINE)
+
+		case *ast.Ident:
+			// Identifier can take its type from underlying object.
+			if t := typeof[n.Obj]; t != "" {
+				typeof[n] = t
+			}
+
+		case *ast.SelectorExpr:
+			// Field or method.
+			name := n.Sel.Name
+			if t := typeof[n.X]; t != "" {
+				if strings.HasPrefix(t, "*") {
+					t = t[1:] // implicit *
+				}
+				if typ := cfg.Type[t]; typ != nil {
+					if t := typ.dot(cfg, name); t != "" {
+						typeof[n] = t
+						return
+					}
+				}
+				tt := typeof[t+"."+name]
+				if isType(tt) {
+					typeof[n] = getType(tt)
+					return
+				}
+			}
+			// Package selector.
+			if x, ok := n.X.(*ast.Ident); ok && x.Obj == nil {
+				str := x.Name + "." + name
+				if cfg.Type[str] != nil {
+					typeof[n] = mkType(str)
+					return
+				}
+				if t := cfg.typeof(x.Name + "." + name); t != "" {
+					typeof[n] = t
+					return
+				}
+			}
+
+		case *ast.CallExpr:
+			// make(T) has type T.
+			if isTopName(n.Fun, "make") && len(n.Args) >= 1 {
+				typeof[n] = gofmt(n.Args[0])
+				return
+			}
+			// new(T) has type *T
+			if isTopName(n.Fun, "new") && len(n.Args) == 1 {
+				typeof[n] = "*" + gofmt(n.Args[0])
+				return
+			}
+			// Otherwise, use type of function to determine arguments.
+			t := typeof[n.Fun]
+			in, out := splitFunc(t)
+			if in == nil && out == nil {
+				return
+			}
+			typeof[n] = join(out)
+			for i, arg := range n.Args {
+				if i >= len(in) {
+					break
+				}
+				if typeof[arg] == "" {
+					typeof[arg] = in[i]
+				}
+			}
+
+		case *ast.TypeAssertExpr:
+			// x.(type) has type of x.
+			if n.Type == nil {
+				typeof[n] = typeof[n.X]
+				return
+			}
+			// x.(T) has type T.
+			if t := typeof[n.Type]; isType(t) {
+				typeof[n] = getType(t)
+			} else {
+				typeof[n] = gofmt(n.Type)
+			}
+
+		case *ast.SliceExpr:
+			// x[i:j] has type of x.
+			typeof[n] = typeof[n.X]
+
+		case *ast.IndexExpr:
+			// x[i] has key type of x's type.
+			t := expand(typeof[n.X])
+			if strings.HasPrefix(t, "[") || strings.HasPrefix(t, "map[") {
+				// Lazy: assume there are no nested [] in the array
+				// length or map key type.
+				if i := strings.Index(t, "]"); i >= 0 {
+					typeof[n] = t[i+1:]
+				}
+			}
+
+		case *ast.StarExpr:
+			// *x for x of type *T has type T when x is an expr.
+			// We don't use the result when *x is a type, but
+			// compute it anyway.
+			t := expand(typeof[n.X])
+			if isType(t) {
+				typeof[n] = "type *" + getType(t)
+			} else if strings.HasPrefix(t, "*") {
+				typeof[n] = t[len("*"):]
+			}
+
+		case *ast.UnaryExpr:
+			// &x for x of type T has type *T.
+			t := typeof[n.X]
+			if t != "" && n.Op == token.AND {
+				typeof[n] = "*" + t
+			}
+
+		case *ast.CompositeLit:
+			// T{...} has type T.
+			typeof[n] = gofmt(n.Type)
+
+		case *ast.ParenExpr:
+			// (x) has type of x.
+			typeof[n] = typeof[n.X]
+
+		case *ast.RangeStmt:
+			t := expand(typeof[n.X])
+			if t == "" {
+				return
+			}
+			var key, value string
+			if t == "string" {
+				key, value = "int", "rune"
+			} else if strings.HasPrefix(t, "[") {
+				key = "int"
+				if i := strings.Index(t, "]"); i >= 0 {
+					value = t[i+1:]
+				}
+			} else if strings.HasPrefix(t, "map[") {
+				if i := strings.Index(t, "]"); i >= 0 {
+					key, value = t[4:i], t[i+1:]
+				}
+			}
+			changed := false
+			if n.Key != nil && key != "" {
+				changed = true
+				set(n.Key, key, n.Tok == token.DEFINE)
+			}
+			if n.Value != nil && value != "" {
+				changed = true
+				set(n.Value, value, n.Tok == token.DEFINE)
+			}
+			// Ugly failure of vision: already type-checked body.
+			// Do it again now that we have that type info.
+			if changed {
+				typecheck1(cfg, n.Body, typeof, assign)
+			}
+
+		case *ast.TypeSwitchStmt:
+			// Type of variable changes for each case in type switch,
+			// but go/parser generates just one variable.
+			// Repeat type check for each case with more precise
+			// type information.
+			as, ok := n.Assign.(*ast.AssignStmt)
+			if !ok {
+				return
+			}
+			varx, ok := as.Lhs[0].(*ast.Ident)
+			if !ok {
+				return
+			}
+			t := typeof[varx]
+			for _, cas := range n.Body.List {
+				cas := cas.(*ast.CaseClause)
+				if len(cas.List) == 1 {
+					// Variable has specific type only when there is
+					// exactly one type in the case list.
+					if tt := typeof[cas.List[0]]; isType(tt) {
+						tt = getType(tt)
+						typeof[varx] = tt
+						typeof[varx.Obj] = tt
+						typecheck1(cfg, cas.Body, typeof, assign)
+					}
+				}
+			}
+			// Restore t.
+			typeof[varx] = t
+			typeof[varx.Obj] = t
+
+		case *ast.ReturnStmt:
+			if len(curfn) == 0 {
+				// Probably can't happen.
+				return
+			}
+			f := curfn[len(curfn)-1]
+			res := n.Results
+			if f.Results != nil {
+				t := split(typeof[f.Results])
+				for i := 0; i < len(res) && i < len(t); i++ {
+					set(res[i], t[i], false)
+				}
+			}
+		}
+	}
+	walkBeforeAfter(f, before, after)
+}
+
+// Convert between function type strings and lists of types.
+// Using strings makes this a little harder, but it makes
+// a lot of the rest of the code easier.  This will all go away
+// when we can use go/typechecker directly.
+
+// splitFunc splits "func(x,y,z) (a,b,c)" into ["x", "y", "z"] and ["a", "b", "c"].
+func splitFunc(s string) (in, out []string) {
+	if !strings.HasPrefix(s, "func(") {
+		return nil, nil
+	}
+
+	i := len("func(") // index of beginning of 'in' arguments
+	nparen := 0
+	for j := i; j < len(s); j++ {
+		switch s[j] {
+		case '(':
+			nparen++
+		case ')':
+			nparen--
+			if nparen < 0 {
+				// found end of parameter list
+				out := strings.TrimSpace(s[j+1:])
+				if len(out) >= 2 && out[0] == '(' && out[len(out)-1] == ')' {
+					out = out[1 : len(out)-1]
+				}
+				return split(s[i:j]), split(out)
+			}
+		}
+	}
+	return nil, nil
+}
+
+// joinFunc is the inverse of splitFunc.
+func joinFunc(in, out []string) string {
+	outs := ""
+	if len(out) == 1 {
+		outs = " " + out[0]
+	} else if len(out) > 1 {
+		outs = " (" + join(out) + ")"
+	}
+	return "func(" + join(in) + ")" + outs
+}
+
+// split splits "int, float" into ["int", "float"] and splits "" into [].
+func split(s string) []string {
+	out := []string{}
+	i := 0 // current type being scanned is s[i:j].
+	nparen := 0
+	for j := 0; j < len(s); j++ {
+		switch s[j] {
+		case ' ':
+			if i == j {
+				i++
+			}
+		case '(':
+			nparen++
+		case ')':
+			nparen--
+			if nparen < 0 {
+				// probably can't happen
+				return nil
+			}
+		case ',':
+			if nparen == 0 {
+				if i < j {
+					out = append(out, s[i:j])
+				}
+				i = j + 1
+			}
+		}
+	}
+	if nparen != 0 {
+		// probably can't happen
+		return nil
+	}
+	if i < len(s) {
+		out = append(out, s[i:])
+	}
+	return out
+}
+
+// join is the inverse of split.
+func join(x []string) string {
+	return strings.Join(x, ", ")
+}
diff --git a/v2/datastore/datastore.go b/v2/datastore/datastore.go
new file mode 100644
index 0000000..47c95f1
--- /dev/null
+++ b/v2/datastore/datastore.go
@@ -0,0 +1,407 @@
+// Copyright 2011 Google Inc. All rights reserved.
+// Use of this source code is governed by the Apache 2.0
+// license that can be found in the LICENSE file.
+
+package datastore
+
+import (
+	"context"
+	"errors"
+	"fmt"
+	"reflect"
+
+	"github.com/golang/protobuf/proto"
+
+	"google.golang.org/appengine/v2"
+	"google.golang.org/appengine/v2/internal"
+	pb "google.golang.org/appengine/v2/internal/datastore"
+)
+
+var (
+	// ErrInvalidEntityType is returned when functions like Get or Next are
+	// passed a dst or src argument of invalid type.
+	ErrInvalidEntityType = errors.New("datastore: invalid entity type")
+	// ErrInvalidKey is returned when an invalid key is presented.
+	ErrInvalidKey = errors.New("datastore: invalid key")
+	// ErrNoSuchEntity is returned when no entity was found for a given key.
+	ErrNoSuchEntity = errors.New("datastore: no such entity")
+)
+
+// ErrFieldMismatch is returned when a field is to be loaded into a different
+// type than the one it was stored from, or when a field is missing or
+// unexported in the destination struct.
+// StructType is the type of the struct pointed to by the destination argument
+// passed to Get or to Iterator.Next.
+type ErrFieldMismatch struct {
+	StructType reflect.Type
+	FieldName  string
+	Reason     string
+}
+
+func (e *ErrFieldMismatch) Error() string {
+	return fmt.Sprintf("datastore: cannot load field %q into a %q: %s",
+		e.FieldName, e.StructType, e.Reason)
+}
+
+// protoToKey converts a Reference proto to a *Key. If the key is invalid,
+// protoToKey will return the invalid key along with ErrInvalidKey.
+func protoToKey(r *pb.Reference) (k *Key, err error) {
+	appID := r.GetApp()
+	namespace := r.GetNameSpace()
+	for _, e := range r.Path.Element {
+		k = &Key{
+			kind:      e.GetType(),
+			stringID:  e.GetName(),
+			intID:     e.GetId(),
+			parent:    k,
+			appID:     appID,
+			namespace: namespace,
+		}
+		if !k.valid() {
+			return k, ErrInvalidKey
+		}
+	}
+	return
+}
+
+// keyToProto converts a *Key to a Reference proto.
+func keyToProto(defaultAppID string, k *Key) *pb.Reference {
+	appID := k.appID
+	if appID == "" {
+		appID = defaultAppID
+	}
+	n := 0
+	for i := k; i != nil; i = i.parent {
+		n++
+	}
+	e := make([]*pb.Path_Element, n)
+	for i := k; i != nil; i = i.parent {
+		n--
+		e[n] = &pb.Path_Element{
+			Type: &i.kind,
+		}
+		// At most one of {Name,Id} should be set.
+		// Neither will be set for incomplete keys.
+		if i.stringID != "" {
+			e[n].Name = &i.stringID
+		} else if i.intID != 0 {
+			e[n].Id = &i.intID
+		}
+	}
+	var namespace *string
+	if k.namespace != "" {
+		namespace = proto.String(k.namespace)
+	}
+	return &pb.Reference{
+		App:       proto.String(appID),
+		NameSpace: namespace,
+		Path: &pb.Path{
+			Element: e,
+		},
+	}
+}
+
+// multiKeyToProto is a batch version of keyToProto.
+func multiKeyToProto(appID string, key []*Key) []*pb.Reference {
+	ret := make([]*pb.Reference, len(key))
+	for i, k := range key {
+		ret[i] = keyToProto(appID, k)
+	}
+	return ret
+}
+
+// multiValid is a batch version of Key.valid. It returns an error, not a
+// []bool.
+func multiValid(key []*Key) error {
+	invalid := false
+	for _, k := range key {
+		if !k.valid() {
+			invalid = true
+			break
+		}
+	}
+	if !invalid {
+		return nil
+	}
+	err := make(appengine.MultiError, len(key))
+	for i, k := range key {
+		if !k.valid() {
+			err[i] = ErrInvalidKey
+		}
+	}
+	return err
+}
+
+// It's unfortunate that the two semantically equivalent concepts pb.Reference
+// and pb.PropertyValue_ReferenceValue aren't the same type. For example, the
+// two have different protobuf field numbers.
+
+// referenceValueToKey is the same as protoToKey except the input is a
+// PropertyValue_ReferenceValue instead of a Reference.
+func referenceValueToKey(r *pb.PropertyValue_ReferenceValue) (k *Key, err error) {
+	appID := r.GetApp()
+	namespace := r.GetNameSpace()
+	for _, e := range r.Pathelement {
+		k = &Key{
+			kind:      e.GetType(),
+			stringID:  e.GetName(),
+			intID:     e.GetId(),
+			parent:    k,
+			appID:     appID,
+			namespace: namespace,
+		}
+		if !k.valid() {
+			return nil, ErrInvalidKey
+		}
+	}
+	return
+}
+
+// keyToReferenceValue is the same as keyToProto except the output is a
+// PropertyValue_ReferenceValue instead of a Reference.
+func keyToReferenceValue(defaultAppID string, k *Key) *pb.PropertyValue_ReferenceValue {
+	ref := keyToProto(defaultAppID, k)
+	pe := make([]*pb.PropertyValue_ReferenceValue_PathElement, len(ref.Path.Element))
+	for i, e := range ref.Path.Element {
+		pe[i] = &pb.PropertyValue_ReferenceValue_PathElement{
+			Type: e.Type,
+			Id:   e.Id,
+			Name: e.Name,
+		}
+	}
+	return &pb.PropertyValue_ReferenceValue{
+		App:         ref.App,
+		NameSpace:   ref.NameSpace,
+		Pathelement: pe,
+	}
+}
+
+type multiArgType int
+
+const (
+	multiArgTypeInvalid multiArgType = iota
+	multiArgTypePropertyLoadSaver
+	multiArgTypeStruct
+	multiArgTypeStructPtr
+	multiArgTypeInterface
+)
+
+// checkMultiArg checks that v has type []S, []*S, []I, or []P, for some struct
+// type S, for some interface type I, or some non-interface non-pointer type P
+// such that P or *P implements PropertyLoadSaver.
+//
+// It returns what category the slice's elements are, and the reflect.Type
+// that represents S, I or P.
+//
+// As a special case, PropertyList is an invalid type for v.
+func checkMultiArg(v reflect.Value) (m multiArgType, elemType reflect.Type) {
+	if v.Kind() != reflect.Slice {
+		return multiArgTypeInvalid, nil
+	}
+	if v.Type() == typeOfPropertyList {
+		return multiArgTypeInvalid, nil
+	}
+	elemType = v.Type().Elem()
+	if reflect.PtrTo(elemType).Implements(typeOfPropertyLoadSaver) {
+		return multiArgTypePropertyLoadSaver, elemType
+	}
+	switch elemType.Kind() {
+	case reflect.Struct:
+		return multiArgTypeStruct, elemType
+	case reflect.Interface:
+		return multiArgTypeInterface, elemType
+	case reflect.Ptr:
+		elemType = elemType.Elem()
+		if elemType.Kind() == reflect.Struct {
+			return multiArgTypeStructPtr, elemType
+		}
+	}
+	return multiArgTypeInvalid, nil
+}
+
+// Get loads the entity stored for k into dst, which must be a struct pointer
+// or implement PropertyLoadSaver. If there is no such entity for the key, Get
+// returns ErrNoSuchEntity.
+//
+// The values of dst's unmatched struct fields are not modified, and matching
+// slice-typed fields are not reset before appending to them. In particular, it
+// is recommended to pass a pointer to a zero valued struct on each Get call.
+//
+// ErrFieldMismatch is returned when a field is to be loaded into a different
+// type than the one it was stored from, or when a field is missing or
+// unexported in the destination struct. ErrFieldMismatch is only returned if
+// dst is a struct pointer.
+func Get(c context.Context, key *Key, dst interface{}) error {
+	if dst == nil { // GetMulti catches nil interface; we need to catch nil ptr here
+		return ErrInvalidEntityType
+	}
+	err := GetMulti(c, []*Key{key}, []interface{}{dst})
+	if me, ok := err.(appengine.MultiError); ok {
+		return me[0]
+	}
+	return err
+}
+
+// GetMulti is a batch version of Get.
+//
+// dst must be a []S, []*S, []I or []P, for some struct type S, some interface
+// type I, or some non-interface non-pointer type P such that P or *P
+// implements PropertyLoadSaver. If an []I, each element must be a valid dst
+// for Get: it must be a struct pointer or implement PropertyLoadSaver.
+//
+// As a special case, PropertyList is an invalid type for dst, even though a
+// PropertyList is a slice of structs. It is treated as invalid to avoid being
+// mistakenly passed when []PropertyList was intended.
+func GetMulti(c context.Context, key []*Key, dst interface{}) error {
+	v := reflect.ValueOf(dst)
+	multiArgType, _ := checkMultiArg(v)
+	if multiArgType == multiArgTypeInvalid {
+		return errors.New("datastore: dst has invalid type")
+	}
+	if len(key) != v.Len() {
+		return errors.New("datastore: key and dst slices have different length")
+	}
+	if len(key) == 0 {
+		return nil
+	}
+	if err := multiValid(key); err != nil {
+		return err
+	}
+	req := &pb.GetRequest{
+		Key: multiKeyToProto(internal.FullyQualifiedAppID(c), key),
+	}
+	res := &pb.GetResponse{}
+	if err := internal.Call(c, "datastore_v3", "Get", req, res); err != nil {
+		return err
+	}
+	if len(key) != len(res.Entity) {
+		return errors.New("datastore: internal error: server returned the wrong number of entities")
+	}
+	multiErr, any := make(appengine.MultiError, len(key)), false
+	for i, e := range res.Entity {
+		if e.Entity == nil {
+			multiErr[i] = ErrNoSuchEntity
+		} else {
+			elem := v.Index(i)
+			if multiArgType == multiArgTypePropertyLoadSaver || multiArgType == multiArgTypeStruct {
+				elem = elem.Addr()
+			}
+			if multiArgType == multiArgTypeStructPtr && elem.IsNil() {
+				elem.Set(reflect.New(elem.Type().Elem()))
+			}
+			multiErr[i] = loadEntity(elem.Interface(), e.Entity)
+		}
+		if multiErr[i] != nil {
+			any = true
+		}
+	}
+	if any {
+		return multiErr
+	}
+	return nil
+}
+
+// Put saves the entity src into the datastore with key k. src must be a struct
+// pointer or implement PropertyLoadSaver; if a struct pointer then any
+// unexported fields of that struct will be skipped. If k is an incomplete key,
+// the returned key will be a unique key generated by the datastore.
+func Put(c context.Context, key *Key, src interface{}) (*Key, error) {
+	k, err := PutMulti(c, []*Key{key}, []interface{}{src})
+	if err != nil {
+		if me, ok := err.(appengine.MultiError); ok {
+			return nil, me[0]
+		}
+		return nil, err
+	}
+	return k[0], nil
+}
+
+// PutMulti is a batch version of Put.
+//
+// src must satisfy the same conditions as the dst argument to GetMulti.
+func PutMulti(c context.Context, key []*Key, src interface{}) ([]*Key, error) {
+	v := reflect.ValueOf(src)
+	multiArgType, _ := checkMultiArg(v)
+	if multiArgType == multiArgTypeInvalid {
+		return nil, errors.New("datastore: src has invalid type")
+	}
+	if len(key) != v.Len() {
+		return nil, errors.New("datastore: key and src slices have different length")
+	}
+	if len(key) == 0 {
+		return nil, nil
+	}
+	appID := internal.FullyQualifiedAppID(c)
+	if err := multiValid(key); err != nil {
+		return nil, err
+	}
+	req := &pb.PutRequest{}
+	for i := range key {
+		elem := v.Index(i)
+		if multiArgType == multiArgTypePropertyLoadSaver || multiArgType == multiArgTypeStruct {
+			elem = elem.Addr()
+		}
+		sProto, err := saveEntity(appID, key[i], elem.Interface())
+		if err != nil {
+			return nil, err
+		}
+		req.Entity = append(req.Entity, sProto)
+	}
+	res := &pb.PutResponse{}
+	if err := internal.Call(c, "datastore_v3", "Put", req, res); err != nil {
+		return nil, err
+	}
+	if len(key) != len(res.Key) {
+		return nil, errors.New("datastore: internal error: server returned the wrong number of keys")
+	}
+	ret := make([]*Key, len(key))
+	for i := range ret {
+		var err error
+		ret[i], err = protoToKey(res.Key[i])
+		if err != nil || ret[i].Incomplete() {
+			return nil, errors.New("datastore: internal error: server returned an invalid key")
+		}
+	}
+	return ret, nil
+}
+
+// Delete deletes the entity for the given key.
+func Delete(c context.Context, key *Key) error {
+	err := DeleteMulti(c, []*Key{key})
+	if me, ok := err.(appengine.MultiError); ok {
+		return me[0]
+	}
+	return err
+}
+
+// DeleteMulti is a batch version of Delete.
+func DeleteMulti(c context.Context, key []*Key) error {
+	if len(key) == 0 {
+		return nil
+	}
+	if err := multiValid(key); err != nil {
+		return err
+	}
+	req := &pb.DeleteRequest{
+		Key: multiKeyToProto(internal.FullyQualifiedAppID(c), key),
+	}
+	res := &pb.DeleteResponse{}
+	return internal.Call(c, "datastore_v3", "Delete", req, res)
+}
+
+func namespaceMod(m proto.Message, namespace string) {
+	// pb.Query is the only type that has a name_space field.
+	// All other namespace support in datastore is in the keys.
+	switch m := m.(type) {
+	case *pb.Query:
+		if m.NameSpace == nil {
+			m.NameSpace = &namespace
+		}
+	}
+}
+
+func init() {
+	internal.NamespaceMods["datastore_v3"] = namespaceMod
+	internal.RegisterErrorCodeMap("datastore_v3", pb.Error_ErrorCode_name)
+	internal.RegisterTimeoutErrorCode("datastore_v3", int32(pb.Error_TIMEOUT))
+}
diff --git a/v2/datastore/datastore_test.go b/v2/datastore/datastore_test.go
new file mode 100644
index 0000000..c546698
--- /dev/null
+++ b/v2/datastore/datastore_test.go
@@ -0,0 +1,1750 @@
+// Copyright 2011 Google Inc. All Rights Reserved.
+// Use of this source code is governed by the Apache 2.0
+// license that can be found in the LICENSE file.
+
+package datastore
+
+import (
+	"encoding/json"
+	"errors"
+	"fmt"
+	"os"
+	"reflect"
+	"sort"
+	"strings"
+	"testing"
+	"time"
+
+	"google.golang.org/appengine/v2"
+	"google.golang.org/appengine/v2/internal/aetesting"
+	pb "google.golang.org/appengine/v2/internal/datastore"
+)
+
+const testAppID = "testApp"
+
+type (
+	myBlob   []byte
+	myByte   byte
+	myString string
+)
+
+func makeMyByteSlice(n int) []myByte {
+	b := make([]myByte, n)
+	for i := range b {
+		b[i] = myByte(i)
+	}
+	return b
+}
+
+func makeInt8Slice(n int) []int8 {
+	b := make([]int8, n)
+	for i := range b {
+		b[i] = int8(i)
+	}
+	return b
+}
+
+func makeUint8Slice(n int) []uint8 {
+	b := make([]uint8, n)
+	for i := range b {
+		b[i] = uint8(i)
+	}
+	return b
+}
+
+func newKey(stringID string, parent *Key) *Key {
+	return &Key{
+		kind:     "kind",
+		stringID: stringID,
+		intID:    0,
+		parent:   parent,
+		appID:    testAppID,
+	}
+}
+
+var (
+	testKey0     = newKey("name0", nil)
+	testKey1a    = newKey("name1", nil)
+	testKey1b    = newKey("name1", nil)
+	testKey2a    = newKey("name2", testKey0)
+	testKey2b    = newKey("name2", testKey0)
+	testGeoPt0   = appengine.GeoPoint{Lat: 1.2, Lng: 3.4}
+	testGeoPt1   = appengine.GeoPoint{Lat: 5, Lng: 10}
+	testBadGeoPt = appengine.GeoPoint{Lat: 1000, Lng: 34}
+
+	now = time.Unix(1e9, 0).UTC()
+)
+
+type B0 struct {
+	B []byte
+}
+
+type B1 struct {
+	B []int8
+}
+
+type B2 struct {
+	B myBlob
+}
+
+type B3 struct {
+	B []myByte
+}
+
+type B4 struct {
+	B [][]byte
+}
+
+type B5 struct {
+	B ByteString
+}
+
+type C0 struct {
+	I int
+	C chan int
+}
+
+type C1 struct {
+	I int
+	C *chan int
+}
+
+type C2 struct {
+	I int
+	C []chan int
+}
+
+type C3 struct {
+	C string
+}
+
+type E struct{}
+
+type G0 struct {
+	G appengine.GeoPoint
+}
+
+type G1 struct {
+	G []appengine.GeoPoint
+}
+
+type K0 struct {
+	K *Key
+}
+
+type K1 struct {
+	K []*Key
+}
+
+type S struct {
+	St string
+}
+
+type NoOmit struct {
+	A string
+	B int  `datastore:"Bb"`
+	C bool `datastore:",noindex"`
+}
+
+type OmitAll struct {
+	A string    `datastore:",omitempty"`
+	B int       `datastore:"Bb,omitempty"`
+	C bool      `datastore:",omitempty,noindex"`
+	D time.Time `datastore:",omitempty"`
+	F []int     `datastore:",omitempty"`
+}
+
+type Omit struct {
+	A string    `datastore:",omitempty"`
+	B int       `datastore:"Bb,omitempty"`
+	C bool      `datastore:",omitempty,noindex"`
+	D time.Time `datastore:",omitempty"`
+	F []int     `datastore:",omitempty"`
+	S `datastore:",omitempty"`
+}
+
+type NoOmits struct {
+	No []NoOmit `datastore:",omitempty"`
+	S  `datastore:",omitempty"`
+	Ss S `datastore:",omitempty"`
+}
+
+type N0 struct {
+	X0
+	Nonymous X0
+	Ignore   string `datastore:"-"`
+	Other    string
+}
+
+type N1 struct {
+	X0
+	Nonymous []X0
+	Ignore   string `datastore:"-"`
+	Other    string
+}
+
+type N2 struct {
+	N1    `datastore:"red"`
+	Green N1 `datastore:"green"`
+	Blue  N1
+	White N1 `datastore:"-"`
+}
+
+type O0 struct {
+	I int64
+}
+
+type O1 struct {
+	I int32
+}
+
+type U0 struct {
+	U uint
+}
+
+type U1 struct {
+	U string
+}
+
+type T struct {
+	T time.Time
+}
+
+type X0 struct {
+	S string
+	I int
+	i int
+}
+
+type X1 struct {
+	S myString
+	I int32
+	J int64
+}
+
+type X2 struct {
+	Z string
+	i int
+}
+
+type X3 struct {
+	S bool
+	I int
+}
+
+type Y0 struct {
+	B bool
+	F []float64
+	G []float64
+}
+
+type Y1 struct {
+	B bool
+	F float64
+}
+
+type Y2 struct {
+	B bool
+	F []int64
+}
+
+type Tagged struct {
+	A int   `datastore:"a,noindex"`
+	B []int `datastore:"b"`
+	C int   `datastore:",noindex"`
+	D int   `datastore:""`
+	E int
+	// The "flatten" option is parsed but ignored for now.
+	F int `datastore:",noindex,flatten"`
+	G int `datastore:",flatten"`
+	I int `datastore:"-"`
+	J int `datastore:",noindex" json:"j"`
+
+	Y0 `datastore:"-"`
+	Z  chan int `datastore:"-,"`
+}
+
+type InvalidTagged1 struct {
+	I int `datastore:"\t"`
+}
+
+type InvalidTagged2 struct {
+	I int
+	J int `datastore:"I"`
+}
+
+type Inner1 struct {
+	W int32
+	X string
+}
+
+type Inner2 struct {
+	Y float64
+}
+
+type Inner3 struct {
+	Z bool
+}
+
+type Outer struct {
+	A int16
+	I []Inner1
+	J Inner2
+	Inner3
+}
+
+type OuterEquivalent struct {
+	A     int16
+	IDotW []int32  `datastore:"I.W"`
+	IDotX []string `datastore:"I.X"`
+	JDotY float64  `datastore:"J.Y"`
+	Z     bool
+}
+
+type Dotted struct {
+	A DottedA `datastore:"A0.A1.A2"`
+}
+
+type DottedA struct {
+	B DottedB `datastore:"B3"`
+}
+
+type DottedB struct {
+	C int `datastore:"C4.C5"`
+}
+
+type SliceOfSlices struct {
+	I int
+	S []struct {
+		J int
+		F []float64
+	}
+}
+
+type Recursive struct {
+	I int
+	R []Recursive
+}
+
+type MutuallyRecursive0 struct {
+	I int
+	R []MutuallyRecursive1
+}
+
+type MutuallyRecursive1 struct {
+	I int
+	R []MutuallyRecursive0
+}
+
+type Doubler struct {
+	S string
+	I int64
+	B bool
+}
+
+type Repeat struct {
+	Key   string
+	Value []byte
+}
+
+type Repeated struct {
+	Repeats []Repeat
+}
+
+func (d *Doubler) Load(props []Property) error {
+	return LoadStruct(d, props)
+}
+
+type EmbeddedTime struct {
+	time.Time
+}
+
+type SpecialTime struct {
+	MyTime EmbeddedTime
+}
+
+func (d *Doubler) Save() ([]Property, error) {
+	// Save the default Property slice to an in-memory buffer (a PropertyList).
+	props, err := SaveStruct(d)
+	if err != nil {
+		return nil, err
+	}
+	var list PropertyList
+	if err := list.Load(props); err != nil {
+		return nil, err
+	}
+
+	// Edit that PropertyList, and send it on.
+	for i := range list {
+		switch v := list[i].Value.(type) {
+		case string:
+			// + means string concatenation.
+			list[i].Value = v + v
+		case int64:
+			// + means integer addition.
+			list[i].Value = v + v
+		}
+	}
+	return list.Save()
+}
+
+var _ PropertyLoadSaver = (*Doubler)(nil)
+
+type Deriver struct {
+	S, Derived, Ignored string
+}
+
+func (e *Deriver) Load(props []Property) error {
+	for _, p := range props {
+		if p.Name != "S" {
+			continue
+		}
+		e.S = p.Value.(string)
+		e.Derived = "derived+" + e.S
+	}
+	return nil
+}
+
+func (e *Deriver) Save() ([]Property, error) {
+	return []Property{
+		{
+			Name:  "S",
+			Value: e.S,
+		},
+	}, nil
+}
+
+var _ PropertyLoadSaver = (*Deriver)(nil)
+
+type BadMultiPropEntity struct{}
+
+func (e *BadMultiPropEntity) Load(props []Property) error {
+	return errors.New("unimplemented")
+}
+
+func (e *BadMultiPropEntity) Save() ([]Property, error) {
+	// Write multiple properties with the same name "I", but Multiple is false.
+	var props []Property
+	for i := 0; i < 3; i++ {
+		props = append(props, Property{
+			Name:  "I",
+			Value: int64(i),
+		})
+	}
+	return props, nil
+}
+
+var _ PropertyLoadSaver = (*BadMultiPropEntity)(nil)
+
+type BK struct {
+	Key appengine.BlobKey
+}
+
+type testCase struct {
+	desc   string
+	src    interface{}
+	want   interface{}
+	putErr string
+	getErr string
+}
+
+var testCases = []testCase{
+	{
+		"chan save fails",
+		&C0{I: -1},
+		&E{},
+		"unsupported struct field",
+		"",
+	},
+	{
+		"*chan save fails",
+		&C1{I: -1},
+		&E{},
+		"unsupported struct field",
+		"",
+	},
+	{
+		"[]chan save fails",
+		&C2{I: -1, C: make([]chan int, 8)},
+		&E{},
+		"unsupported struct field",
+		"",
+	},
+	{
+		"chan load fails",
+		&C3{C: "not a chan"},
+		&C0{},
+		"",
+		"type mismatch",
+	},
+	{
+		"*chan load fails",
+		&C3{C: "not a *chan"},
+		&C1{},
+		"",
+		"type mismatch",
+	},
+	{
+		"[]chan load fails",
+		&C3{C: "not a []chan"},
+		&C2{},
+		"",
+		"type mismatch",
+	},
+	{
+		"empty struct",
+		&E{},
+		&E{},
+		"",
+		"",
+	},
+	{
+		"geopoint",
+		&G0{G: testGeoPt0},
+		&G0{G: testGeoPt0},
+		"",
+		"",
+	},
+	{
+		"geopoint invalid",
+		&G0{G: testBadGeoPt},
+		&G0{},
+		"invalid GeoPoint value",
+		"",
+	},
+	{
+		"geopoint as props",
+		&G0{G: testGeoPt0},
+		&PropertyList{
+			Property{Name: "G", Value: testGeoPt0, NoIndex: false, Multiple: false},
+		},
+		"",
+		"",
+	},
+	{
+		"geopoint slice",
+		&G1{G: []appengine.GeoPoint{testGeoPt0, testGeoPt1}},
+		&G1{G: []appengine.GeoPoint{testGeoPt0, testGeoPt1}},
+		"",
+		"",
+	},
+	{
+		"omit empty, all",
+		&OmitAll{},
+		new(PropertyList),
+		"",
+		"",
+	},
+	{
+		"omit empty",
+		&Omit{},
+		&PropertyList{
+			Property{Name: "St", Value: "", NoIndex: false, Multiple: false},
+		},
+		"",
+		"",
+	},
+	{
+		"omit empty, fields populated",
+		&Omit{
+			A: "a",
+			B: 10,
+			C: true,
+			D: now,
+			F: []int{11},
+		},
+		&PropertyList{
+			Property{Name: "A", Value: "a", NoIndex: false, Multiple: false},
+			Property{Name: "Bb", Value: int64(10), NoIndex: false, Multiple: false},
+			Property{Name: "C", Value: true, NoIndex: true, Multiple: false},
+			Property{Name: "D", Value: now, NoIndex: false, Multiple: false},
+			Property{Name: "F", Value: int64(11), NoIndex: false, Multiple: true},
+			Property{Name: "St", Value: "", NoIndex: false, Multiple: false},
+		},
+		"",
+		"",
+	},
+	{
+		"omit empty, fields populated",
+		&Omit{
+			A: "a",
+			B: 10,
+			C: true,
+			D: now,
+			F: []int{11},
+			S: S{St: "string"},
+		},
+		&PropertyList{
+			Property{Name: "A", Value: "a", NoIndex: false, Multiple: false},
+			Property{Name: "Bb", Value: int64(10), NoIndex: false, Multiple: false},
+			Property{Name: "C", Value: true, NoIndex: true, Multiple: false},
+			Property{Name: "D", Value: now, NoIndex: false, Multiple: false},
+			Property{Name: "F", Value: int64(11), NoIndex: false, Multiple: true},
+			Property{Name: "St", Value: "string", NoIndex: false, Multiple: false},
+		},
+		"",
+		"",
+	},
+	{
+		"omit empty does not propagate",
+		&NoOmits{
+			No: []NoOmit{
+				NoOmit{},
+			},
+			S:  S{},
+			Ss: S{},
+		},
+		&PropertyList{
+			Property{Name: "No.A", Value: "", NoIndex: false, Multiple: true},
+			Property{Name: "No.Bb", Value: int64(0), NoIndex: false, Multiple: true},
+			Property{Name: "No.C", Value: false, NoIndex: true, Multiple: true},
+			Property{Name: "Ss.St", Value: "", NoIndex: false, Multiple: false},
+			Property{Name: "St", Value: "", NoIndex: false, Multiple: false}},
+		"",
+		"",
+	},
+	{
+		"key",
+		&K0{K: testKey1a},
+		&K0{K: testKey1b},
+		"",
+		"",
+	},
+	{
+		"key with parent",
+		&K0{K: testKey2a},
+		&K0{K: testKey2b},
+		"",
+		"",
+	},
+	{
+		"nil key",
+		&K0{},
+		&K0{},
+		"",
+		"",
+	},
+	{
+		"all nil keys in slice",
+		&K1{[]*Key{nil, nil}},
+		&K1{[]*Key{nil, nil}},
+		"",
+		"",
+	},
+	{
+		"some nil keys in slice",
+		&K1{[]*Key{testKey1a, nil, testKey2a}},
+		&K1{[]*Key{testKey1b, nil, testKey2b}},
+		"",
+		"",
+	},
+	{
+		"overflow",
+		&O0{I: 1 << 48},
+		&O1{},
+		"",
+		"overflow",
+	},
+	{
+		"time",
+		&T{T: time.Unix(1e9, 0)},
+		&T{T: time.Unix(1e9, 0)},
+		"",
+		"",
+	},
+	{
+		"time as props",
+		&T{T: time.Unix(1e9, 0)},
+		&PropertyList{
+			Property{Name: "T", Value: time.Unix(1e9, 0).UTC(), NoIndex: false, Multiple: false},
+		},
+		"",
+		"",
+	},
+	{
+		"uint save",
+		&U0{U: 1},
+		&U0{},
+		"unsupported struct field",
+		"",
+	},
+	{
+		"uint load",
+		&U1{U: "not a uint"},
+		&U0{},
+		"",
+		"type mismatch",
+	},
+	{
+		"zero",
+		&X0{},
+		&X0{},
+		"",
+		"",
+	},
+	{
+		"basic",
+		&X0{S: "one", I: 2, i: 3},
+		&X0{S: "one", I: 2},
+		"",
+		"",
+	},
+	{
+		"save string/int load myString/int32",
+		&X0{S: "one", I: 2, i: 3},
+		&X1{S: "one", I: 2},
+		"",
+		"",
+	},
+	{
+		"missing fields",
+		&X0{S: "one", I: 2, i: 3},
+		&X2{},
+		"",
+		"no such struct field",
+	},
+	{
+		"save string load bool",
+		&X0{S: "one", I: 2, i: 3},
+		&X3{I: 2},
+		"",
+		"type mismatch",
+	},
+	{
+		"basic slice",
+		&Y0{B: true, F: []float64{7, 8, 9}},
+		&Y0{B: true, F: []float64{7, 8, 9}},
+		"",
+		"",
+	},
+	{
+		"save []float64 load float64",
+		&Y0{B: true, F: []float64{7, 8, 9}},
+		&Y1{B: true},
+		"",
+		"requires a slice",
+	},
+	{
+		"save []float64 load []int64",
+		&Y0{B: true, F: []float64{7, 8, 9}},
+		&Y2{B: true},
+		"",
+		"type mismatch",
+	},
+	{
+		"single slice is too long",
+		&Y0{F: make([]float64, maxIndexedProperties+1)},
+		&Y0{},
+		"too many indexed properties",
+		"",
+	},
+	{
+		"two slices are too long",
+		&Y0{F: make([]float64, maxIndexedProperties), G: make([]float64, maxIndexedProperties)},
+		&Y0{},
+		"too many indexed properties",
+		"",
+	},
+	{
+		"one slice and one scalar are too long",
+		&Y0{F: make([]float64, maxIndexedProperties), B: true},
+		&Y0{},
+		"too many indexed properties",
+		"",
+	},
+	{
+		"slice of slices of bytes",
+		&Repeated{
+			Repeats: []Repeat{
+				{
+					Key:   "key 1",
+					Value: []byte("value 1"),
+				},
+				{
+					Key:   "key 2",
+					Value: []byte("value 2"),
+				},
+			},
+		},
+		&Repeated{
+			Repeats: []Repeat{
+				{
+					Key:   "key 1",
+					Value: []byte("value 1"),
+				},
+				{
+					Key:   "key 2",
+					Value: []byte("value 2"),
+				},
+			},
+		},
+		"",
+		"",
+	},
+	{
+		"long blob",
+		&B0{B: makeUint8Slice(maxIndexedProperties + 1)},
+		&B0{B: makeUint8Slice(maxIndexedProperties + 1)},
+		"",
+		"",
+	},
+	{
+		"long []int8 is too long",
+		&B1{B: makeInt8Slice(maxIndexedProperties + 1)},
+		&B1{},
+		"too many indexed properties",
+		"",
+	},
+	{
+		"short []int8",
+		&B1{B: makeInt8Slice(3)},
+		&B1{B: makeInt8Slice(3)},
+		"",
+		"",
+	},
+	{
+		"long myBlob",
+		&B2{B: makeUint8Slice(maxIndexedProperties + 1)},
+		&B2{B: makeUint8Slice(maxIndexedProperties + 1)},
+		"",
+		"",
+	},
+	{
+		"short myBlob",
+		&B2{B: makeUint8Slice(3)},
+		&B2{B: makeUint8Slice(3)},
+		"",
+		"",
+	},
+	{
+		"long []myByte",
+		&B3{B: makeMyByteSlice(maxIndexedProperties + 1)},
+		&B3{B: makeMyByteSlice(maxIndexedProperties + 1)},
+		"",
+		"",
+	},
+	{
+		"short []myByte",
+		&B3{B: makeMyByteSlice(3)},
+		&B3{B: makeMyByteSlice(3)},
+		"",
+		"",
+	},
+	{
+		"slice of blobs",
+		&B4{B: [][]byte{
+			makeUint8Slice(3),
+			makeUint8Slice(4),
+			makeUint8Slice(5),
+		}},
+		&B4{B: [][]byte{
+			makeUint8Slice(3),
+			makeUint8Slice(4),
+			makeUint8Slice(5),
+		}},
+		"",
+		"",
+	},
+	{
+		"short ByteString",
+		&B5{B: ByteString(makeUint8Slice(3))},
+		&B5{B: ByteString(makeUint8Slice(3))},
+		"",
+		"",
+	},
+	{
+		"short ByteString as props",
+		&B5{B: ByteString(makeUint8Slice(3))},
+		&PropertyList{
+			Property{Name: "B", Value: ByteString(makeUint8Slice(3)), NoIndex: false, Multiple: false},
+		},
+		"",
+		"",
+	},
+	{
+		"short ByteString into string",
+		&B5{B: ByteString("legacy")},
+		&struct{ B string }{"legacy"},
+		"",
+		"",
+	},
+	{
+		"[]byte must be noindex",
+		&PropertyList{
+			Property{Name: "B", Value: makeUint8Slice(3), NoIndex: false},
+		},
+		nil,
+		"cannot index a []byte valued Property",
+		"",
+	},
+	{
+		"save tagged load props",
+		&Tagged{A: 1, B: []int{21, 22, 23}, C: 3, D: 4, E: 5, F: 6, G: 7, I: 8, J: 9},
+		&PropertyList{
+			// A and B are renamed to a and b; A and C are noindex, I is ignored.
+			// Indexed properties are loaded before raw properties. Thus, the
+			// result is: b, b, b, D, E, a, c.
+			Property{Name: "C", Value: int64(3), NoIndex: true, Multiple: false},
+			Property{Name: "D", Value: int64(4), NoIndex: false, Multiple: false},
+			Property{Name: "E", Value: int64(5), NoIndex: false, Multiple: false},
+			Property{Name: "F", Value: int64(6), NoIndex: true, Multiple: false},
+			Property{Name: "G", Value: int64(7), NoIndex: false, Multiple: false},
+			Property{Name: "J", Value: int64(9), NoIndex: true, Multiple: false},
+			Property{Name: "a", Value: int64(1), NoIndex: true, Multiple: false},
+			Property{Name: "b", Value: int64(21), NoIndex: false, Multiple: true},
+			Property{Name: "b", Value: int64(22), NoIndex: false, Multiple: true},
+			Property{Name: "b", Value: int64(23), NoIndex: false, Multiple: true},
+		},
+		"",
+		"",
+	},
+	{
+		"save tagged load tagged",
+		&Tagged{A: 1, B: []int{21, 22, 23}, C: 3, D: 4, E: 5, I: 6, J: 7},
+		&Tagged{A: 1, B: []int{21, 22, 23}, C: 3, D: 4, E: 5, J: 7},
+		"",
+		"",
+	},
+	{
+		"save props load tagged",
+		&PropertyList{
+			Property{Name: "A", Value: int64(11), NoIndex: true, Multiple: false},
+			Property{Name: "a", Value: int64(12), NoIndex: true, Multiple: false},
+		},
+		&Tagged{A: 12},
+		"",
+		`cannot load field "A"`,
+	},
+	{
+		"invalid tagged1",
+		&InvalidTagged1{I: 1},
+		&InvalidTagged1{},
+		"struct tag has invalid property name",
+		"",
+	},
+	{
+		"invalid tagged2",
+		&InvalidTagged2{I: 1, J: 2},
+		&InvalidTagged2{},
+		"struct tag has repeated property name",
+		"",
+	},
+	{
+		"doubler",
+		&Doubler{S: "s", I: 1, B: true},
+		&Doubler{S: "ss", I: 2, B: true},
+		"",
+		"",
+	},
+	{
+		"save struct load props",
+		&X0{S: "s", I: 1},
+		&PropertyList{
+			Property{Name: "I", Value: int64(1), NoIndex: false, Multiple: false},
+			Property{Name: "S", Value: "s", NoIndex: false, Multiple: false},
+		},
+		"",
+		"",
+	},
+	{
+		"save props load struct",
+		&PropertyList{
+			Property{Name: "S", Value: "s", NoIndex: false, Multiple: false},
+			Property{Name: "I", Value: int64(1), NoIndex: false, Multiple: false},
+		},
+		&X0{S: "s", I: 1},
+		"",
+		"",
+	},
+	{
+		"nil-value props",
+		&PropertyList{
+			Property{Name: "I", Value: nil, NoIndex: false, Multiple: false},
+			Property{Name: "B", Value: nil, NoIndex: false, Multiple: false},
+			Property{Name: "S", Value: nil, NoIndex: false, Multiple: false},
+			Property{Name: "F", Value: nil, NoIndex: false, Multiple: false},
+			Property{Name: "K", Value: nil, NoIndex: false, Multiple: false},
+			Property{Name: "T", Value: nil, NoIndex: false, Multiple: false},
+			Property{Name: "J", Value: nil, NoIndex: false, Multiple: true},
+			Property{Name: "J", Value: int64(7), NoIndex: false, Multiple: true},
+			Property{Name: "J", Value: nil, NoIndex: false, Multiple: true},
+		},
+		&struct {
+			I int64
+			B bool
+			S string
+			F float64
+			K *Key
+			T time.Time
+			J []int64
+		}{
+			J: []int64{0, 7, 0},
+		},
+		"",
+		"",
+	},
+	{
+		"save outer load props",
+		&Outer{
+			A: 1,
+			I: []Inner1{
+				{10, "ten"},
+				{20, "twenty"},
+				{30, "thirty"},
+			},
+			J: Inner2{
+				Y: 3.14,
+			},
+			Inner3: Inner3{
+				Z: true,
+			},
+		},
+		&PropertyList{
+			Property{Name: "A", Value: int64(1), NoIndex: false, Multiple: false},
+			Property{Name: "I.W", Value: int64(10), NoIndex: false, Multiple: true},
+			Property{Name: "I.W", Value: int64(20), NoIndex: false, Multiple: true},
+			Property{Name: "I.W", Value: int64(30), NoIndex: false, Multiple: true},
+			Property{Name: "I.X", Value: "ten", NoIndex: false, Multiple: true},
+			Property{Name: "I.X", Value: "twenty", NoIndex: false, Multiple: true},
+			Property{Name: "I.X", Value: "thirty", NoIndex: false, Multiple: true},
+			Property{Name: "J.Y", Value: float64(3.14), NoIndex: false, Multiple: false},
+			Property{Name: "Z", Value: true, NoIndex: false, Multiple: false},
+		},
+		"",
+		"",
+	},
+	{
+		"save props load outer-equivalent",
+		&PropertyList{
+			Property{Name: "A", Value: int64(1), NoIndex: false, Multiple: false},
+			Property{Name: "I.W", Value: int64(10), NoIndex: false, Multiple: true},
+			Property{Name: "I.X", Value: "ten", NoIndex: false, Multiple: true},
+			Property{Name: "I.W", Value: int64(20), NoIndex: false, Multiple: true},
+			Property{Name: "I.X", Value: "twenty", NoIndex: false, Multiple: true},
+			Property{Name: "I.W", Value: int64(30), NoIndex: false, Multiple: true},
+			Property{Name: "I.X", Value: "thirty", NoIndex: false, Multiple: true},
+			Property{Name: "J.Y", Value: float64(3.14), NoIndex: false, Multiple: false},
+			Property{Name: "Z", Value: true, NoIndex: false, Multiple: false},
+		},
+		&OuterEquivalent{
+			A:     1,
+			IDotW: []int32{10, 20, 30},
+			IDotX: []string{"ten", "twenty", "thirty"},
+			JDotY: 3.14,
+			Z:     true,
+		},
+		"",
+		"",
+	},
+	{
+		"save outer-equivalent load outer",
+		&OuterEquivalent{
+			A:     1,
+			IDotW: []int32{10, 20, 30},
+			IDotX: []string{"ten", "twenty", "thirty"},
+			JDotY: 3.14,
+			Z:     true,
+		},
+		&Outer{
+			A: 1,
+			I: []Inner1{
+				{10, "ten"},
+				{20, "twenty"},
+				{30, "thirty"},
+			},
+			J: Inner2{
+				Y: 3.14,
+			},
+			Inner3: Inner3{
+				Z: true,
+			},
+		},
+		"",
+		"",
+	},
+	{
+		"dotted names save",
+		&Dotted{A: DottedA{B: DottedB{C: 88}}},
+		&PropertyList{
+			Property{Name: "A0.A1.A2.B3.C4.C5", Value: int64(88), NoIndex: false, Multiple: false},
+		},
+		"",
+		"",
+	},
+	{
+		"dotted names load",
+		&PropertyList{
+			Property{Name: "A0.A1.A2.B3.C4.C5", Value: int64(99), NoIndex: false, Multiple: false},
+		},
+		&Dotted{A: DottedA{B: DottedB{C: 99}}},
+		"",
+		"",
+	},
+	{
+		"save struct load deriver",
+		&X0{S: "s", I: 1},
+		&Deriver{S: "s", Derived: "derived+s"},
+		"",
+		"",
+	},
+	{
+		"save deriver load struct",
+		&Deriver{S: "s", Derived: "derived+s", Ignored: "ignored"},
+		&X0{S: "s"},
+		"",
+		"",
+	},
+	{
+		"bad multi-prop entity",
+		&BadMultiPropEntity{},
+		&BadMultiPropEntity{},
+		"Multiple is false",
+		"",
+	},
+	// Regression: CL 25062824 broke handling of appengine.BlobKey fields.
+	{
+		"appengine.BlobKey",
+		&BK{Key: "blah"},
+		&BK{Key: "blah"},
+		"",
+		"",
+	},
+	{
+		"zero time.Time",
+		&T{T: time.Time{}},
+		&T{T: time.Time{}},
+		"",
+		"",
+	},
+	{
+		"time.Time near Unix zero time",
+		&T{T: time.Unix(0, 4e3)},
+		&T{T: time.Unix(0, 4e3)},
+		"",
+		"",
+	},
+	{
+		"time.Time, far in the future",
+		&T{T: time.Date(99999, 1, 1, 0, 0, 0, 0, time.UTC)},
+		&T{T: time.Date(99999, 1, 1, 0, 0, 0, 0, time.UTC)},
+		"",
+		"",
+	},
+	{
+		"time.Time, very far in the past",
+		&T{T: time.Date(-300000, 1, 1, 0, 0, 0, 0, time.UTC)},
+		&T{},
+		"time value out of range",
+		"",
+	},
+	{
+		"time.Time, very far in the future",
+		&T{T: time.Date(294248, 1, 1, 0, 0, 0, 0, time.UTC)},
+		&T{},
+		"time value out of range",
+		"",
+	},
+	{
+		"structs",
+		&N0{
+			X0:       X0{S: "one", I: 2, i: 3},
+			Nonymous: X0{S: "four", I: 5, i: 6},
+			Ignore:   "ignore",
+			Other:    "other",
+		},
+		&N0{
+			X0:       X0{S: "one", I: 2},
+			Nonymous: X0{S: "four", I: 5},
+			Other:    "other",
+		},
+		"",
+		"",
+	},
+	{
+		"slice of structs",
+		&N1{
+			X0: X0{S: "one", I: 2, i: 3},
+			Nonymous: []X0{
+				{S: "four", I: 5, i: 6},
+				{S: "seven", I: 8, i: 9},
+				{S: "ten", I: 11, i: 12},
+				{S: "thirteen", I: 14, i: 15},
+			},
+			Ignore: "ignore",
+			Other:  "other",
+		},
+		&N1{
+			X0: X0{S: "one", I: 2},
+			Nonymous: []X0{
+				{S: "four", I: 5},
+				{S: "seven", I: 8},
+				{S: "ten", I: 11},
+				{S: "thirteen", I: 14},
+			},
+			Other: "other",
+		},
+		"",
+		"",
+	},
+	{
+		"structs with slices of structs",
+		&N2{
+			N1: N1{
+				X0: X0{S: "rouge"},
+				Nonymous: []X0{
+					{S: "rosso0"},
+					{S: "rosso1"},
+				},
+			},
+			Green: N1{
+				X0: X0{S: "vert"},
+				Nonymous: []X0{
+					{S: "verde0"},
+					{S: "verde1"},
+					{S: "verde2"},
+				},
+			},
+			Blue: N1{
+				X0: X0{S: "bleu"},
+				Nonymous: []X0{
+					{S: "blu0"},
+					{S: "blu1"},
+					{S: "blu2"},
+					{S: "blu3"},
+				},
+			},
+		},
+		&N2{
+			N1: N1{
+				X0: X0{S: "rouge"},
+				Nonymous: []X0{
+					{S: "rosso0"},
+					{S: "rosso1"},
+				},
+			},
+			Green: N1{
+				X0: X0{S: "vert"},
+				Nonymous: []X0{
+					{S: "verde0"},
+					{S: "verde1"},
+					{S: "verde2"},
+				},
+			},
+			Blue: N1{
+				X0: X0{S: "bleu"},
+				Nonymous: []X0{
+					{S: "blu0"},
+					{S: "blu1"},
+					{S: "blu2"},
+					{S: "blu3"},
+				},
+			},
+		},
+		"",
+		"",
+	},
+	{
+		"save structs load props",
+		&N2{
+			N1: N1{
+				X0: X0{S: "rouge"},
+				Nonymous: []X0{
+					{S: "rosso0"},
+					{S: "rosso1"},
+				},
+			},
+			Green: N1{
+				X0: X0{S: "vert"},
+				Nonymous: []X0{
+					{S: "verde0"},
+					{S: "verde1"},
+					{S: "verde2"},
+				},
+			},
+			Blue: N1{
+				X0: X0{S: "bleu"},
+				Nonymous: []X0{
+					{S: "blu0"},
+					{S: "blu1"},
+					{S: "blu2"},
+					{S: "blu3"},
+				},
+			},
+		},
+		&PropertyList{
+			Property{Name: "Blue.I", Value: int64(0), NoIndex: false, Multiple: false},
+			Property{Name: "Blue.Nonymous.I", Value: int64(0), NoIndex: false, Multiple: true},
+			Property{Name: "Blue.Nonymous.I", Value: int64(0), NoIndex: false, Multiple: true},
+			Property{Name: "Blue.Nonymous.I", Value: int64(0), NoIndex: false, Multiple: true},
+			Property{Name: "Blue.Nonymous.I", Value: int64(0), NoIndex: false, Multiple: true},
+			Property{Name: "Blue.Nonymous.S", Value: "blu0", NoIndex: false, Multiple: true},
+			Property{Name: "Blue.Nonymous.S", Value: "blu1", NoIndex: false, Multiple: true},
+			Property{Name: "Blue.Nonymous.S", Value: "blu2", NoIndex: false, Multiple: true},
+			Property{Name: "Blue.Nonymous.S", Value: "blu3", NoIndex: false, Multiple: true},
+			Property{Name: "Blue.Other", Value: "", NoIndex: false, Multiple: false},
+			Property{Name: "Blue.S", Value: "bleu", NoIndex: false, Multiple: false},
+			Property{Name: "green.I", Value: int64(0), NoIndex: false, Multiple: false},
+			Property{Name: "green.Nonymous.I", Value: int64(0), NoIndex: false, Multiple: true},
+			Property{Name: "green.Nonymous.I", Value: int64(0), NoIndex: false, Multiple: true},
+			Property{Name: "green.Nonymous.I", Value: int64(0), NoIndex: false, Multiple: true},
+			Property{Name: "green.Nonymous.S", Value: "verde0", NoIndex: false, Multiple: true},
+			Property{Name: "green.Nonymous.S", Value: "verde1", NoIndex: false, Multiple: true},
+			Property{Name: "green.Nonymous.S", Value: "verde2", NoIndex: false, Multiple: true},
+			Property{Name: "green.Other", Value: "", NoIndex: false, Multiple: false},
+			Property{Name: "green.S", Value: "vert", NoIndex: false, Multiple: false},
+			Property{Name: "red.I", Value: int64(0), NoIndex: false, Multiple: false},
+			Property{Name: "red.Nonymous.I", Value: int64(0), NoIndex: false, Multiple: true},
+			Property{Name: "red.Nonymous.I", Value: int64(0), NoIndex: false, Multiple: true},
+			Property{Name: "red.Nonymous.S", Value: "rosso0", NoIndex: false, Multiple: true},
+			Property{Name: "red.Nonymous.S", Value: "rosso1", NoIndex: false, Multiple: true},
+			Property{Name: "red.Other", Value: "", NoIndex: false, Multiple: false},
+			Property{Name: "red.S", Value: "rouge", NoIndex: false, Multiple: false},
+		},
+		"",
+		"",
+	},
+	{
+		"save props load structs with ragged fields",
+		&PropertyList{
+			Property{Name: "red.S", Value: "rot", NoIndex: false, Multiple: false},
+			Property{Name: "green.Nonymous.I", Value: int64(10), NoIndex: false, Multiple: true},
+			Property{Name: "green.Nonymous.I", Value: int64(11), NoIndex: false, Multiple: true},
+			Property{Name: "green.Nonymous.I", Value: int64(12), NoIndex: false, Multiple: true},
+			Property{Name: "green.Nonymous.I", Value: int64(13), NoIndex: false, Multiple: true},
+			Property{Name: "Blue.Nonymous.S", Value: "blau0", NoIndex: false, Multiple: true},
+			Property{Name: "Blue.Nonymous.I", Value: int64(20), NoIndex: false, Multiple: true},
+			Property{Name: "Blue.Nonymous.S", Value: "blau1", NoIndex: false, Multiple: true},
+			Property{Name: "Blue.Nonymous.I", Value: int64(21), NoIndex: false, Multiple: true},
+			Property{Name: "Blue.Nonymous.S", Value: "blau2", NoIndex: false, Multiple: true},
+		},
+		&N2{
+			N1: N1{
+				X0: X0{S: "rot"},
+			},
+			Green: N1{
+				Nonymous: []X0{
+					{I: 10},
+					{I: 11},
+					{I: 12},
+					{I: 13},
+				},
+			},
+			Blue: N1{
+				Nonymous: []X0{
+					{S: "blau0", I: 20},
+					{S: "blau1", I: 21},
+					{S: "blau2"},
+				},
+			},
+		},
+		"",
+		"",
+	},
+	{
+		"save structs with noindex tags",
+		&struct {
+			A struct {
+				X string `datastore:",noindex"`
+				Y string
+			} `datastore:",noindex"`
+			B struct {
+				X string `datastore:",noindex"`
+				Y string
+			}
+		}{},
+		&PropertyList{
+			Property{Name: "A.X", Value: "", NoIndex: true, Multiple: false},
+			Property{Name: "A.Y", Value: "", NoIndex: true, Multiple: false},
+			Property{Name: "B.X", Value: "", NoIndex: true, Multiple: false},
+			Property{Name: "B.Y", Value: "", NoIndex: false, Multiple: false},
+		},
+		"",
+		"",
+	},
+	{
+		"embedded struct with name override",
+		&struct {
+			Inner1 `datastore:"foo"`
+		}{},
+		&PropertyList{
+			Property{Name: "foo.W", Value: int64(0), NoIndex: false, Multiple: false},
+			Property{Name: "foo.X", Value: "", NoIndex: false, Multiple: false},
+		},
+		"",
+		"",
+	},
+	{
+		"slice of slices",
+		&SliceOfSlices{},
+		nil,
+		"flattening nested structs leads to a slice of slices",
+		"",
+	},
+	{
+		"recursive struct",
+		&Recursive{},
+		nil,
+		"recursive struct",
+		"",
+	},
+	{
+		"mutually recursive struct",
+		&MutuallyRecursive0{},
+		nil,
+		"recursive struct",
+		"",
+	},
+	{
+		"non-exported struct fields",
+		&struct {
+			i, J int64
+		}{i: 1, J: 2},
+		&PropertyList{
+			Property{Name: "J", Value: int64(2), NoIndex: false, Multiple: false},
+		},
+		"",
+		"",
+	},
+	{
+		"json.RawMessage",
+		&struct {
+			J json.RawMessage
+		}{
+			J: json.RawMessage("rawr"),
+		},
+		&PropertyList{
+			Property{Name: "J", Value: []byte("rawr"), NoIndex: true, Multiple: false},
+		},
+		"",
+		"",
+	},
+	{
+		"json.RawMessage to myBlob",
+		&struct {
+			B json.RawMessage
+		}{
+			B: json.RawMessage("rawr"),
+		},
+		&B2{B: myBlob("rawr")},
+		"",
+		"",
+	},
+	{
+		"embedded time field",
+		&SpecialTime{MyTime: EmbeddedTime{now}},
+		&SpecialTime{MyTime: EmbeddedTime{now}},
+		"",
+		"",
+	},
+	{
+		"embedded time load",
+		&PropertyList{
+			Property{Name: "MyTime.", Value: now, NoIndex: false, Multiple: false},
+		},
+		&SpecialTime{MyTime: EmbeddedTime{now}},
+		"",
+		"",
+	},
+}
+
+// checkErr returns the empty string if either both want and err are zero,
+// or if want is a non-empty substring of err's string representation.
+func checkErr(want string, err error) string {
+	if err != nil {
+		got := err.Error()
+		if want == "" || strings.Index(got, want) == -1 {
+			return got
+		}
+	} else if want != "" {
+		return fmt.Sprintf("want error %q", want)
+	}
+	return ""
+}
+
+func TestRoundTrip(t *testing.T) {
+	for _, tc := range testCases {
+		p, err := saveEntity(testAppID, testKey0, tc.src)
+		if s := checkErr(tc.putErr, err); s != "" {
+			t.Errorf("%s: save: %s", tc.desc, s)
+			continue
+		}
+		if p == nil {
+			continue
+		}
+		var got interface{}
+		if _, ok := tc.want.(*PropertyList); ok {
+			got = new(PropertyList)
+		} else {
+			got = reflect.New(reflect.TypeOf(tc.want).Elem()).Interface()
+		}
+		err = loadEntity(got, p)
+		if s := checkErr(tc.getErr, err); s != "" {
+			t.Errorf("%s: load: %s", tc.desc, s)
+			continue
+		}
+		if pl, ok := got.(*PropertyList); ok {
+			// Sort by name to make sure we have a deterministic order.
+			sort.Stable(byName(*pl))
+		}
+		equal := false
+		if gotT, ok := got.(*T); ok {
+			// Round tripping a time.Time can result in a different time.Location: Local instead of UTC.
+			// We therefore test equality explicitly, instead of relying on reflect.DeepEqual.
+			equal = gotT.T.Equal(tc.want.(*T).T)
+		} else {
+			equal = reflect.DeepEqual(got, tc.want)
+		}
+		if !equal {
+			t.Errorf("%s: compare: got %v want %v", tc.desc, got, tc.want)
+			continue
+		}
+	}
+}
+
+type byName PropertyList
+
+func (s byName) Len() int           { return len(s) }
+func (s byName) Less(i, j int) bool { return s[i].Name < s[j].Name }
+func (s byName) Swap(i, j int)      { s[i], s[j] = s[j], s[i] }
+
+func TestQueryConstruction(t *testing.T) {
+	tests := []struct {
+		q, exp *Query
+		err    string
+	}{
+		{
+			q: NewQuery("Foo"),
+			exp: &Query{
+				kind:  "Foo",
+				limit: -1,
+			},
+		},
+		{
+			// Regular filtered query with standard spacing.
+			q: NewQuery("Foo").Filter("foo >", 7),
+			exp: &Query{
+				kind: "Foo",
+				filter: []filter{
+					{
+						FieldName: "foo",
+						Op:        greaterThan,
+						Value:     7,
+					},
+				},
+				limit: -1,
+			},
+		},
+		{
+			// Filtered query with no spacing.
+			q: NewQuery("Foo").Filter("foo=", 6),
+			exp: &Query{
+				kind: "Foo",
+				filter: []filter{
+					{
+						FieldName: "foo",
+						Op:        equal,
+						Value:     6,
+					},
+				},
+				limit: -1,
+			},
+		},
+		{
+			// Filtered query with funky spacing.
+			q: NewQuery("Foo").Filter(" foo< ", 8),
+			exp: &Query{
+				kind: "Foo",
+				filter: []filter{
+					{
+						FieldName: "foo",
+						Op:        lessThan,
+						Value:     8,
+					},
+				},
+				limit: -1,
+			},
+		},
+		{
+			// Filtered query with multicharacter op.
+			q: NewQuery("Foo").Filter("foo >=", 9),
+			exp: &Query{
+				kind: "Foo",
+				filter: []filter{
+					{
+						FieldName: "foo",
+						Op:        greaterEq,
+						Value:     9,
+					},
+				},
+				limit: -1,
+			},
+		},
+		{
+			// Query with ordering.
+			q: NewQuery("Foo").Order("bar"),
+			exp: &Query{
+				kind: "Foo",
+				order: []order{
+					{
+						FieldName: "bar",
+						Direction: ascending,
+					},
+				},
+				limit: -1,
+			},
+		},
+		{
+			// Query with reverse ordering, and funky spacing.
+			q: NewQuery("Foo").Order(" - bar"),
+			exp: &Query{
+				kind: "Foo",
+				order: []order{
+					{
+						FieldName: "bar",
+						Direction: descending,
+					},
+				},
+				limit: -1,
+			},
+		},
+		{
+			// Query with an empty ordering.
+			q:   NewQuery("Foo").Order(""),
+			err: "empty order",
+		},
+		{
+			// Query with a + ordering.
+			q:   NewQuery("Foo").Order("+bar"),
+			err: "invalid order",
+		},
+	}
+	for i, test := range tests {
+		if test.q.err != nil {
+			got := test.q.err.Error()
+			if !strings.Contains(got, test.err) {
+				t.Errorf("%d: error mismatch: got %q want something containing %q", i, got, test.err)
+			}
+			continue
+		}
+		if !reflect.DeepEqual(test.q, test.exp) {
+			t.Errorf("%d: mismatch: got %v want %v", i, test.q, test.exp)
+		}
+	}
+}
+
+func TestStringMeaning(t *testing.T) {
+	var xx [4]interface{}
+	xx[0] = &struct {
+		X string
+	}{"xx0"}
+	xx[1] = &struct {
+		X string `datastore:",noindex"`
+	}{"xx1"}
+	xx[2] = &struct {
+		X []byte
+	}{[]byte("xx2")}
+	xx[3] = &struct {
+		X []byte `datastore:",noindex"`
+	}{[]byte("xx3")}
+
+	indexed := [4]bool{
+		true,
+		false,
+		false, // A []byte is always no-index.
+		false,
+	}
+	want := [4]pb.Property_Meaning{
+		pb.Property_NO_MEANING,
+		pb.Property_TEXT,
+		pb.Property_BLOB,
+		pb.Property_BLOB,
+	}
+
+	for i, x := range xx {
+		props, err := SaveStruct(x)
+		if err != nil {
+			t.Errorf("i=%d: SaveStruct: %v", i, err)
+			continue
+		}
+		e, err := propertiesToProto("appID", testKey0, props)
+		if err != nil {
+			t.Errorf("i=%d: propertiesToProto: %v", i, err)
+			continue
+		}
+		var p *pb.Property
+		switch {
+		case indexed[i] && len(e.Property) == 1:
+			p = e.Property[0]
+		case !indexed[i] && len(e.RawProperty) == 1:
+			p = e.RawProperty[0]
+		default:
+			t.Errorf("i=%d: EntityProto did not have expected property slice", i)
+			continue
+		}
+		if got := p.GetMeaning(); got != want[i] {
+			t.Errorf("i=%d: meaning: got %v, want %v", i, got, want[i])
+			continue
+		}
+	}
+}
+
+func TestNamespaceResetting(t *testing.T) {
+	// These environment variables are necessary because *Query.Run will
+	// call internal.FullyQualifiedAppID which checks these variables or falls
+	// back to the Metadata service that is not available in tests.
+	environ := []struct {
+		key, value string
+	}{
+		{"GAE_LONG_APP_ID", "my-app-id"},
+		{"GAE_PARTITION", "1"},
+	}
+	for _, v := range environ {
+		old := os.Getenv(v.key)
+		os.Setenv(v.key, v.value)
+		v.value = old
+	}
+	defer func() { // Restore old environment after the test completes.
+		for _, v := range environ {
+			if v.value == "" {
+				os.Unsetenv(v.key)
+				continue
+			}
+			os.Setenv(v.key, v.value)
+		}
+	}()
+
+	namec := make(chan *string, 1)
+	c0 := aetesting.FakeSingleContext(t, "datastore_v3", "RunQuery", func(req *pb.Query, res *pb.QueryResult) error {
+		namec <- req.NameSpace
+		return fmt.Errorf("RPC error")
+	})
+
+	// Check that wrapping c0 in a namespace twice works correctly.
+	c1, err := appengine.Namespace(c0, "A")
+	if err != nil {
+		t.Fatalf("appengine.Namespace: %v", err)
+	}
+	c2, err := appengine.Namespace(c1, "") // should act as the original context
+	if err != nil {
+		t.Fatalf("appengine.Namespace: %v", err)
+	}
+
+	q := NewQuery("SomeKind")
+
+	q.Run(c0)
+	if ns := <-namec; ns != nil {
+		t.Errorf(`RunQuery with c0: ns = %q, want nil`, *ns)
+	}
+
+	q.Run(c1)
+	if ns := <-namec; ns == nil {
+		t.Error(`RunQuery with c1: ns = nil, want "A"`)
+	} else if *ns != "A" {
+		t.Errorf(`RunQuery with c1: ns = %q, want "A"`, *ns)
+	}
+
+	q.Run(c2)
+	if ns := <-namec; ns != nil {
+		t.Errorf(`RunQuery with c2: ns = %q, want nil`, *ns)
+	}
+}
diff --git a/v2/datastore/doc.go b/v2/datastore/doc.go
new file mode 100644
index 0000000..42b1f33
--- /dev/null
+++ b/v2/datastore/doc.go
@@ -0,0 +1,354 @@
+// Copyright 2011 Google Inc. All rights reserved.
+// Use of this source code is governed by the Apache 2.0
+// license that can be found in the LICENSE file.
+
+/*
+Package datastore provides a client for App Engine's datastore service.
+
+# Basic Operations
+
+Entities are the unit of storage and are associated with a key. A key
+consists of an optional parent key, a string application ID, a string kind
+(also known as an entity type), and either a StringID or an IntID. A
+StringID is also known as an entity name or key name.
+
+It is valid to create a key with a zero StringID and a zero IntID; this is
+called an incomplete key, and does not refer to any saved entity. Putting an
+entity into the datastore under an incomplete key will cause a unique key
+to be generated for that entity, with a non-zero IntID.
+
+An entity's contents are a mapping from case-sensitive field names to values.
+Valid value types are:
+  - signed integers (int, int8, int16, int32 and int64),
+  - bool,
+  - string,
+  - float32 and float64,
+  - []byte (up to 1 megabyte in length),
+  - any type whose underlying type is one of the above predeclared types,
+  - ByteString,
+  - *Key,
+  - time.Time (stored with microsecond precision),
+  - appengine.BlobKey,
+  - appengine.GeoPoint,
+  - structs whose fields are all valid value types,
+  - slices of any of the above.
+
+Slices of structs are valid, as are structs that contain slices. However, if
+one struct contains another, then at most one of those can be repeated. This
+disqualifies recursively defined struct types: any struct T that (directly or
+indirectly) contains a []T.
+
+The Get and Put functions load and save an entity's contents. An entity's
+contents are typically represented by a struct pointer.
+
+Example code:
+
+	type Entity struct {
+		Value string
+	}
+
+	func handle(w http.ResponseWriter, r *http.Request) {
+		ctx := appengine.NewContext(r)
+
+		k := datastore.NewKey(ctx, "Entity", "stringID", 0, nil)
+		e := new(Entity)
+		if err := datastore.Get(ctx, k, e); err != nil {
+			http.Error(w, err.Error(), 500)
+			return
+		}
+
+		old := e.Value
+		e.Value = r.URL.Path
+
+		if _, err := datastore.Put(ctx, k, e); err != nil {
+			http.Error(w, err.Error(), 500)
+			return
+		}
+
+		w.Header().Set("Content-Type", "text/plain; charset=utf-8")
+		fmt.Fprintf(w, "old=%q\nnew=%q\n", old, e.Value)
+	}
+
+GetMulti, PutMulti and DeleteMulti are batch versions of the Get, Put and
+Delete functions. They take a []*Key instead of a *Key, and may return an
+appengine.MultiError when encountering partial failure.
+
+# Properties
+
+An entity's contents can be represented by a variety of types. These are
+typically struct pointers, but can also be any type that implements the
+PropertyLoadSaver interface. If using a struct pointer, you do not have to
+explicitly implement the PropertyLoadSaver interface; the datastore will
+automatically convert via reflection. If a struct pointer does implement that
+interface then those methods will be used in preference to the default
+behavior for struct pointers. Struct pointers are more strongly typed and are
+easier to use; PropertyLoadSavers are more flexible.
+
+The actual types passed do not have to match between Get and Put calls or even
+across different calls to datastore. It is valid to put a *PropertyList and
+get that same entity as a *myStruct, or put a *myStruct0 and get a *myStruct1.
+Conceptually, any entity is saved as a sequence of properties, and is loaded
+into the destination value on a property-by-property basis. When loading into
+a struct pointer, an entity that cannot be completely represented (such as a
+missing field) will result in an ErrFieldMismatch error but it is up to the
+caller whether this error is fatal, recoverable or ignorable.
+
+By default, for struct pointers, all properties are potentially indexed, and
+the property name is the same as the field name (and hence must start with an
+upper case letter).
+
+Fields may have a `datastore:"name,options"` tag. The tag name is the
+property name, which must be one or more valid Go identifiers joined by ".",
+but may start with a lower case letter. An empty tag name means to just use the
+field name. A "-" tag name means that the datastore will ignore that field.
+
+The only valid options are "omitempty" and "noindex".
+
+If the options include "omitempty" and the value of the field is empty, then the field will be omitted on Save.
+The empty values are false, 0, any nil interface value, and any array, slice, map, or string of length zero.
+Struct field values will never be empty.
+
+If options include "noindex" then the field will not be indexed. All fields are indexed
+by default. Strings or byte slices longer than 1500 bytes cannot be indexed;
+fields used to store long strings and byte slices must be tagged with "noindex"
+or they will cause Put operations to fail.
+
+To use multiple options together, separate them by a comma.
+The order does not matter.
+
+If the options is "" then the comma may be omitted.
+
+Example code:
+
+	// A and B are renamed to a and b.
+	// A, C and J are not indexed.
+	// D's tag is equivalent to having no tag at all (E).
+	// I is ignored entirely by the datastore.
+	// J has tag information for both the datastore and json packages.
+	type TaggedStruct struct {
+		A int `datastore:"a,noindex"`
+		B int `datastore:"b"`
+		C int `datastore:",noindex"`
+		D int `datastore:""`
+		E int
+		I int `datastore:"-"`
+		J int `datastore:",noindex" json:"j"`
+	}
+
+# Structured Properties
+
+If the struct pointed to contains other structs, then the nested or embedded
+structs are flattened. For example, given these definitions:
+
+	type Inner1 struct {
+		W int32
+		X string
+	}
+
+	type Inner2 struct {
+		Y float64
+	}
+
+	type Inner3 struct {
+		Z bool
+	}
+
+	type Outer struct {
+		A int16
+		I []Inner1
+		J Inner2
+		Inner3
+	}
+
+then an Outer's properties would be equivalent to those of:
+
+	type OuterEquivalent struct {
+		A     int16
+		IDotW []int32  `datastore:"I.W"`
+		IDotX []string `datastore:"I.X"`
+		JDotY float64  `datastore:"J.Y"`
+		Z     bool
+	}
+
+If Outer's embedded Inner3 field was tagged as `datastore:"Foo"` then the
+equivalent field would instead be: FooDotZ bool `datastore:"Foo.Z"`.
+
+If an outer struct is tagged "noindex" then all of its implicit flattened
+fields are effectively "noindex".
+
+# The PropertyLoadSaver Interface
+
+An entity's contents can also be represented by any type that implements the
+PropertyLoadSaver interface. This type may be a struct pointer, but it does
+not have to be. The datastore package will call Load when getting the entity's
+contents, and Save when putting the entity's contents.
+Possible uses include deriving non-stored fields, verifying fields, or indexing
+a field only if its value is positive.
+
+Example code:
+
+	type CustomPropsExample struct {
+		I, J int
+		// Sum is not stored, but should always be equal to I + J.
+		Sum int `datastore:"-"`
+	}
+
+	func (x *CustomPropsExample) Load(ps []datastore.Property) error {
+		// Load I and J as usual.
+		if err := datastore.LoadStruct(x, ps); err != nil {
+			return err
+		}
+		// Derive the Sum field.
+		x.Sum = x.I + x.J
+		return nil
+	}
+
+	func (x *CustomPropsExample) Save() ([]datastore.Property, error) {
+		// Validate the Sum field.
+		if x.Sum != x.I + x.J {
+			return nil, errors.New("CustomPropsExample has inconsistent sum")
+		}
+		// Save I and J as usual. The code below is equivalent to calling
+		// "return datastore.SaveStruct(x)", but is done manually for
+		// demonstration purposes.
+		return []datastore.Property{
+			{
+				Name:  "I",
+				Value: int64(x.I),
+			},
+			{
+				Name:  "J",
+				Value: int64(x.J),
+			},
+		}, nil
+	}
+
+The *PropertyList type implements PropertyLoadSaver, and can therefore hold an
+arbitrary entity's contents.
+
+# Queries
+
+Queries retrieve entities based on their properties or key's ancestry. Running
+a query yields an iterator of results: either keys or (key, entity) pairs.
+Queries are re-usable and it is safe to call Query.Run from concurrent
+goroutines. Iterators are not safe for concurrent use.
+
+Queries are immutable, and are either created by calling NewQuery, or derived
+from an existing query by calling a method like Filter or Order that returns a
+new query value. A query is typically constructed by calling NewQuery followed
+by a chain of zero or more such methods. These methods are:
+  - Ancestor and Filter constrain the entities returned by running a query.
+  - Order affects the order in which they are returned.
+  - Project constrains the fields returned.
+  - Distinct de-duplicates projected entities.
+  - KeysOnly makes the iterator return only keys, not (key, entity) pairs.
+  - Start, End, Offset and Limit define which sub-sequence of matching entities
+    to return. Start and End take cursors, Offset and Limit take integers. Start
+    and Offset affect the first result, End and Limit affect the last result.
+    If both Start and Offset are set, then the offset is relative to Start.
+    If both End and Limit are set, then the earliest constraint wins. Limit is
+    relative to Start+Offset, not relative to End. As a special case, a
+    negative limit means unlimited.
+
+Example code:
+
+	type Widget struct {
+		Description string
+		Price       int
+	}
+
+	func handle(w http.ResponseWriter, r *http.Request) {
+		ctx := appengine.NewContext(r)
+		q := datastore.NewQuery("Widget").
+			Filter("Price <", 1000).
+			Order("-Price")
+		b := new(bytes.Buffer)
+		for t := q.Run(ctx); ; {
+			var x Widget
+			key, err := t.Next(&x)
+			if err == datastore.Done {
+				break
+			}
+			if err != nil {
+				serveError(ctx, w, err)
+				return
+			}
+			fmt.Fprintf(b, "Key=%v\nWidget=%#v\n\n", key, x)
+		}
+		w.Header().Set("Content-Type", "text/plain; charset=utf-8")
+		io.Copy(w, b)
+	}
+
+# Transactions
+
+RunInTransaction runs a function in a transaction.
+
+Example code:
+
+	type Counter struct {
+		Count int
+	}
+
+	func inc(ctx context.Context, key *datastore.Key) (int, error) {
+		var x Counter
+		if err := datastore.Get(ctx, key, &x); err != nil && err != datastore.ErrNoSuchEntity {
+			return 0, err
+		}
+		x.Count++
+		if _, err := datastore.Put(ctx, key, &x); err != nil {
+			return 0, err
+		}
+		return x.Count, nil
+	}
+
+	func handle(w http.ResponseWriter, r *http.Request) {
+		ctx := appengine.NewContext(r)
+		var count int
+		err := datastore.RunInTransaction(ctx, func(ctx context.Context) error {
+			var err1 error
+			count, err1 = inc(ctx, datastore.NewKey(ctx, "Counter", "singleton", 0, nil))
+			return err1
+		}, nil)
+		if err != nil {
+			serveError(ctx, w, err)
+			return
+		}
+		w.Header().Set("Content-Type", "text/plain; charset=utf-8")
+		fmt.Fprintf(w, "Count=%d", count)
+	}
+
+# Metadata
+
+The datastore package provides access to some of App Engine's datastore
+metadata. This metadata includes information about the entity groups,
+namespaces, entity kinds, and properties in the datastore, as well as the
+property representations for each property.
+
+Example code:
+
+	func handle(w http.ResponseWriter, r *http.Request) {
+		// Print all the kinds in the datastore, with all the indexed
+		// properties (and their representations) for each.
+		ctx := appengine.NewContext(r)
+
+		kinds, err := datastore.Kinds(ctx)
+		if err != nil {
+			serveError(ctx, w, err)
+			return
+		}
+
+		w.Header().Set("Content-Type", "text/plain; charset=utf-8")
+		for _, kind := range kinds {
+			fmt.Fprintf(w, "%s:\n", kind)
+			props, err := datastore.KindProperties(ctx, kind)
+			if err != nil {
+				fmt.Fprintln(w, "\t(unable to retrieve properties)")
+				continue
+			}
+			for p, rep := range props {
+				fmt.Fprintf(w, "\t-%s (%s)\n", p, strings.Join(rep, ", "))
+			}
+		}
+	}
+*/
+package datastore // import "google.golang.org/appengine/v2/datastore"
diff --git a/v2/datastore/internal/cloudkey/cloudkey.go b/v2/datastore/internal/cloudkey/cloudkey.go
new file mode 100644
index 0000000..9e75237
--- /dev/null
+++ b/v2/datastore/internal/cloudkey/cloudkey.go
@@ -0,0 +1,120 @@
+// Copyright 2019 Google Inc. All rights reserved.
+// Use of this source code is governed by the Apache 2.0
+// license that can be found in the LICENSE file.
+
+// Package cloudpb is a subset of types and functions, copied from cloud.google.com/go/datastore.
+//
+// They are copied here to provide compatibility to decode keys generated by the cloud.google.com/go/datastore package.
+package cloudkey
+
+import (
+	"encoding/base64"
+	"errors"
+	"strings"
+
+	"github.com/golang/protobuf/proto"
+	cloudpb "google.golang.org/appengine/v2/datastore/internal/cloudpb"
+)
+
+/////////////////////////////////////////////////////////////////////
+// Code below is copied from https://github.com/googleapis/google-cloud-go/blob/master/datastore/datastore.go
+/////////////////////////////////////////////////////////////////////
+
+var (
+	// ErrInvalidKey is returned when an invalid key is presented.
+	ErrInvalidKey = errors.New("datastore: invalid key")
+)
+
+/////////////////////////////////////////////////////////////////////
+// Code below is copied from https://github.com/googleapis/google-cloud-go/blob/master/datastore/key.go
+/////////////////////////////////////////////////////////////////////
+
+// Key represents the datastore key for a stored entity.
+type Key struct {
+	// Kind cannot be empty.
+	Kind string
+	// Either ID or Name must be zero for the Key to be valid.
+	// If both are zero, the Key is incomplete.
+	ID   int64
+	Name string
+	// Parent must either be a complete Key or nil.
+	Parent *Key
+
+	// Namespace provides the ability to partition your data for multiple
+	// tenants. In most cases, it is not necessary to specify a namespace.
+	// See docs on datastore multitenancy for details:
+	// https://cloud.google.com/datastore/docs/concepts/multitenancy
+	Namespace string
+}
+
+// DecodeKey decodes a key from the opaque representation returned by Encode.
+func DecodeKey(encoded string) (*Key, error) {
+	// Re-add padding.
+	if m := len(encoded) % 4; m != 0 {
+		encoded += strings.Repeat("=", 4-m)
+	}
+
+	b, err := base64.URLEncoding.DecodeString(encoded)
+	if err != nil {
+		return nil, err
+	}
+
+	pKey := new(cloudpb.Key)
+	if err := proto.Unmarshal(b, pKey); err != nil {
+		return nil, err
+	}
+	return protoToKey(pKey)
+}
+
+// valid returns whether the key is valid.
+func (k *Key) valid() bool {
+	if k == nil {
+		return false
+	}
+	for ; k != nil; k = k.Parent {
+		if k.Kind == "" {
+			return false
+		}
+		if k.Name != "" && k.ID != 0 {
+			return false
+		}
+		if k.Parent != nil {
+			if k.Parent.Incomplete() {
+				return false
+			}
+			if k.Parent.Namespace != k.Namespace {
+				return false
+			}
+		}
+	}
+	return true
+}
+
+// Incomplete reports whether the key does not refer to a stored entity.
+func (k *Key) Incomplete() bool {
+	return k.Name == "" && k.ID == 0
+}
+
+// protoToKey decodes a protocol buffer representation of a key into an
+// equivalent *Key object. If the key is invalid, protoToKey will return the
+// invalid key along with ErrInvalidKey.
+func protoToKey(p *cloudpb.Key) (*Key, error) {
+	var key *Key
+	var namespace string
+	if partition := p.PartitionId; partition != nil {
+		namespace = partition.NamespaceId
+	}
+	for _, el := range p.Path {
+		key = &Key{
+			Namespace: namespace,
+			Kind:      el.Kind,
+			ID:        el.GetId(),
+			Name:      el.GetName(),
+			Parent:    key,
+		}
+	}
+	if !key.valid() { // Also detects key == nil.
+		return key, ErrInvalidKey
+	}
+	return key, nil
+}
diff --git a/v2/datastore/internal/cloudpb/entity.pb.go b/v2/datastore/internal/cloudpb/entity.pb.go
new file mode 100644
index 0000000..af8195f
--- /dev/null
+++ b/v2/datastore/internal/cloudpb/entity.pb.go
@@ -0,0 +1,344 @@
+// Copyright 2019 Google Inc. All rights reserved.
+// Use of this source code is governed by the Apache 2.0
+// license that can be found in the LICENSE file.
+
+// Package cloudpb is a subset of protobufs, copied from google.golang.org/genproto/googleapis/datastore/v1.
+//
+// They are copied here to provide compatibility to decode keys generated by the cloud.google.com/go/datastore package.
+package cloudpb
+
+import (
+	"fmt"
+
+	"github.com/golang/protobuf/proto"
+)
+
+// A partition ID identifies a grouping of entities. The grouping is always
+// by project and namespace, however the namespace ID may be empty.
+//
+// A partition ID contains several dimensions:
+// project ID and namespace ID.
+//
+// Partition dimensions:
+//
+// - May be `""`.
+// - Must be valid UTF-8 bytes.
+// - Must have values that match regex `[A-Za-z\d\.\-_]{1,100}`
+// If the value of any dimension matches regex `__.*__`, the partition is
+// reserved/read-only.
+// A reserved/read-only partition ID is forbidden in certain documented
+// contexts.
+//
+// Foreign partition IDs (in which the project ID does
+// not match the context project ID ) are discouraged.
+// Reads and writes of foreign partition IDs may fail if the project is not in
+// an active state.
+type PartitionId struct {
+	// The ID of the project to which the entities belong.
+	ProjectId string `protobuf:"bytes,2,opt,name=project_id,json=projectId,proto3" json:"project_id,omitempty"`
+	// If not empty, the ID of the namespace to which the entities belong.
+	NamespaceId          string   `protobuf:"bytes,4,opt,name=namespace_id,json=namespaceId,proto3" json:"namespace_id,omitempty"`
+	XXX_NoUnkeyedLiteral struct{} `json:"-"`
+	XXX_unrecognized     []byte   `json:"-"`
+	XXX_sizecache        int32    `json:"-"`
+}
+
+func (m *PartitionId) Reset()         { *m = PartitionId{} }
+func (m *PartitionId) String() string { return proto.CompactTextString(m) }
+func (*PartitionId) ProtoMessage()    {}
+func (*PartitionId) Descriptor() ([]byte, []int) {
+	return fileDescriptor_entity_096a297364b049a5, []int{0}
+}
+func (m *PartitionId) XXX_Unmarshal(b []byte) error {
+	return xxx_messageInfo_PartitionId.Unmarshal(m, b)
+}
+func (m *PartitionId) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) {
+	return xxx_messageInfo_PartitionId.Marshal(b, m, deterministic)
+}
+func (dst *PartitionId) XXX_Merge(src proto.Message) {
+	xxx_messageInfo_PartitionId.Merge(dst, src)
+}
+func (m *PartitionId) XXX_Size() int {
+	return xxx_messageInfo_PartitionId.Size(m)
+}
+func (m *PartitionId) XXX_DiscardUnknown() {
+	xxx_messageInfo_PartitionId.DiscardUnknown(m)
+}
+
+var xxx_messageInfo_PartitionId proto.InternalMessageInfo
+
+func (m *PartitionId) GetProjectId() string {
+	if m != nil {
+		return m.ProjectId
+	}
+	return ""
+}
+
+func (m *PartitionId) GetNamespaceId() string {
+	if m != nil {
+		return m.NamespaceId
+	}
+	return ""
+}
+
+// A unique identifier for an entity.
+// If a key's partition ID or any of its path kinds or names are
+// reserved/read-only, the key is reserved/read-only.
+// A reserved/read-only key is forbidden in certain documented contexts.
+type Key struct {
+	// Entities are partitioned into subsets, currently identified by a project
+	// ID and namespace ID.
+	// Queries are scoped to a single partition.
+	PartitionId *PartitionId `protobuf:"bytes,1,opt,name=partition_id,json=partitionId,proto3" json:"partition_id,omitempty"`
+	// The entity path.
+	// An entity path consists of one or more elements composed of a kind and a
+	// string or numerical identifier, which identify entities. The first
+	// element identifies a _root entity_, the second element identifies
+	// a _child_ of the root entity, the third element identifies a child of the
+	// second entity, and so forth. The entities identified by all prefixes of
+	// the path are called the element's _ancestors_.
+	//
+	// An entity path is always fully complete: *all* of the entity's ancestors
+	// are required to be in the path along with the entity identifier itself.
+	// The only exception is that in some documented cases, the identifier in the
+	// last path element (for the entity) itself may be omitted. For example,
+	// the last path element of the key of `Mutation.insert` may have no
+	// identifier.
+	//
+	// A path can never be empty, and a path can have at most 100 elements.
+	Path                 []*Key_PathElement `protobuf:"bytes,2,rep,name=path,proto3" json:"path,omitempty"`
+	XXX_NoUnkeyedLiteral struct{}           `json:"-"`
+	XXX_unrecognized     []byte             `json:"-"`
+	XXX_sizecache        int32              `json:"-"`
+}
+
+func (m *Key) Reset()         { *m = Key{} }
+func (m *Key) String() string { return proto.CompactTextString(m) }
+func (*Key) ProtoMessage()    {}
+func (*Key) Descriptor() ([]byte, []int) {
+	return fileDescriptor_entity_096a297364b049a5, []int{1}
+}
+func (m *Key) XXX_Unmarshal(b []byte) error {
+	return xxx_messageInfo_Key.Unmarshal(m, b)
+}
+func (m *Key) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) {
+	return xxx_messageInfo_Key.Marshal(b, m, deterministic)
+}
+func (dst *Key) XXX_Merge(src proto.Message) {
+	xxx_messageInfo_Key.Merge(dst, src)
+}
+func (m *Key) XXX_Size() int {
+	return xxx_messageInfo_Key.Size(m)
+}
+func (m *Key) XXX_DiscardUnknown() {
+	xxx_messageInfo_Key.DiscardUnknown(m)
+}
+
+// A (kind, ID/name) pair used to construct a key path.
+//
+// If either name or ID is set, the element is complete.
+// If neither is set, the element is incomplete.
+type Key_PathElement struct {
+	// The kind of the entity.
+	// A kind matching regex `__.*__` is reserved/read-only.
+	// A kind must not contain more than 1500 bytes when UTF-8 encoded.
+	// Cannot be `""`.
+	Kind string `protobuf:"bytes,1,opt,name=kind,proto3" json:"kind,omitempty"`
+	// The type of ID.
+	//
+	// Types that are valid to be assigned to IdType:
+	//	*Key_PathElement_Id
+	//	*Key_PathElement_Name
+	IdType               isKey_PathElement_IdType `protobuf_oneof:"id_type"`
+	XXX_NoUnkeyedLiteral struct{}                 `json:"-"`
+	XXX_unrecognized     []byte                   `json:"-"`
+	XXX_sizecache        int32                    `json:"-"`
+}
+
+func (m *Key_PathElement) Reset()         { *m = Key_PathElement{} }
+func (m *Key_PathElement) String() string { return proto.CompactTextString(m) }
+func (*Key_PathElement) ProtoMessage()    {}
+func (*Key_PathElement) Descriptor() ([]byte, []int) {
+	return fileDescriptor_entity_096a297364b049a5, []int{1, 0}
+}
+func (m *Key_PathElement) XXX_Unmarshal(b []byte) error {
+	return xxx_messageInfo_Key_PathElement.Unmarshal(m, b)
+}
+func (m *Key_PathElement) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) {
+	return xxx_messageInfo_Key_PathElement.Marshal(b, m, deterministic)
+}
+func (dst *Key_PathElement) XXX_Merge(src proto.Message) {
+	xxx_messageInfo_Key_PathElement.Merge(dst, src)
+}
+func (m *Key_PathElement) XXX_Size() int {
+	return xxx_messageInfo_Key_PathElement.Size(m)
+}
+func (m *Key_PathElement) XXX_DiscardUnknown() {
+	xxx_messageInfo_Key_PathElement.DiscardUnknown(m)
+}
+
+var xxx_messageInfo_Key_PathElement proto.InternalMessageInfo
+
+func (m *Key_PathElement) GetKind() string {
+	if m != nil {
+		return m.Kind
+	}
+	return ""
+}
+
+type isKey_PathElement_IdType interface {
+	isKey_PathElement_IdType()
+}
+
+type Key_PathElement_Id struct {
+	Id int64 `protobuf:"varint,2,opt,name=id,proto3,oneof"`
+}
+
+type Key_PathElement_Name struct {
+	Name string `protobuf:"bytes,3,opt,name=name,proto3,oneof"`
+}
+
+func (*Key_PathElement_Id) isKey_PathElement_IdType() {}
+
+func (*Key_PathElement_Name) isKey_PathElement_IdType() {}
+
+func (m *Key_PathElement) GetIdType() isKey_PathElement_IdType {
+	if m != nil {
+		return m.IdType
+	}
+	return nil
+}
+
+func (m *Key_PathElement) GetId() int64 {
+	if x, ok := m.GetIdType().(*Key_PathElement_Id); ok {
+		return x.Id
+	}
+	return 0
+}
+
+func (m *Key_PathElement) GetName() string {
+	if x, ok := m.GetIdType().(*Key_PathElement_Name); ok {
+		return x.Name
+	}
+	return ""
+}
+
+// XXX_OneofFuncs is for the internal use of the proto package.
+func (*Key_PathElement) XXX_OneofFuncs() (func(msg proto.Message, b *proto.Buffer) error, func(msg proto.Message, tag, wire int, b *proto.Buffer) (bool, error), func(msg proto.Message) (n int), []interface{}) {
+	return _Key_PathElement_OneofMarshaler, _Key_PathElement_OneofUnmarshaler, _Key_PathElement_OneofSizer, []interface{}{
+		(*Key_PathElement_Id)(nil),
+		(*Key_PathElement_Name)(nil),
+	}
+}
+
+func _Key_PathElement_OneofMarshaler(msg proto.Message, b *proto.Buffer) error {
+	m := msg.(*Key_PathElement)
+	// id_type
+	switch x := m.IdType.(type) {
+	case *Key_PathElement_Id:
+		b.EncodeVarint(2<<3 | proto.WireVarint)
+		b.EncodeVarint(uint64(x.Id))
+	case *Key_PathElement_Name:
+		b.EncodeVarint(3<<3 | proto.WireBytes)
+		b.EncodeStringBytes(x.Name)
+	case nil:
+	default:
+		return fmt.Errorf("Key_PathElement.IdType has unexpected type %T", x)
+	}
+	return nil
+}
+
+func _Key_PathElement_OneofUnmarshaler(msg proto.Message, tag, wire int, b *proto.Buffer) (bool, error) {
+	m := msg.(*Key_PathElement)
+	switch tag {
+	case 2: // id_type.id
+		if wire != proto.WireVarint {
+			return true, proto.ErrInternalBadWireType
+		}
+		x, err := b.DecodeVarint()
+		m.IdType = &Key_PathElement_Id{int64(x)}
+		return true, err
+	case 3: // id_type.name
+		if wire != proto.WireBytes {
+			return true, proto.ErrInternalBadWireType
+		}
+		x, err := b.DecodeStringBytes()
+		m.IdType = &Key_PathElement_Name{x}
+		return true, err
+	default:
+		return false, nil
+	}
+}
+
+func _Key_PathElement_OneofSizer(msg proto.Message) (n int) {
+	m := msg.(*Key_PathElement)
+	// id_type
+	switch x := m.IdType.(type) {
+	case *Key_PathElement_Id:
+		n += 1 // tag and wire
+		n += proto.SizeVarint(uint64(x.Id))
+	case *Key_PathElement_Name:
+		n += 1 // tag and wire
+		n += proto.SizeVarint(uint64(len(x.Name)))
+		n += len(x.Name)
+	case nil:
+	default:
+		panic(fmt.Sprintf("proto: unexpected type %T in oneof", x))
+	}
+	return n
+}
+
+var fileDescriptor_entity_096a297364b049a5 = []byte{
+	// 780 bytes of a gzipped FileDescriptorProto
+	0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0x7c, 0x94, 0xff, 0x6e, 0xdc, 0x44,
+	0x10, 0xc7, 0xed, 0xbb, 0x5c, 0x1a, 0x8f, 0xdd, 0xa4, 0x6c, 0x2a, 0x61, 0x02, 0x28, 0x26, 0x80,
+	0x74, 0x02, 0xc9, 0x6e, 0xc2, 0x1f, 0x54, 0x14, 0xa4, 0x72, 0x25, 0xe0, 0x28, 0x15, 0x9c, 0x56,
+	0x55, 0x24, 0x50, 0xa4, 0xd3, 0xde, 0x79, 0xeb, 0x2e, 0x67, 0xef, 0x5a, 0xf6, 0x3a, 0xaa, 0xdf,
+	0x05, 0xf1, 0x00, 0x3c, 0x0a, 0x8f, 0x80, 0x78, 0x18, 0xb4, 0x3f, 0xec, 0x0b, 0xed, 0x35, 0xff,
+	0x79, 0x67, 0x3e, 0xdf, 0xd9, 0xef, 0xec, 0xce, 0x1a, 0xa2, 0x5c, 0x88, 0xbc, 0xa0, 0x49, 0x46,
+	0x24, 0x69, 0xa4, 0xa8, 0x69, 0x72, 0x73, 0x9a, 0x50, 0x2e, 0x99, 0xec, 0xe2, 0xaa, 0x16, 0x52,
+	0xa0, 0x43, 0x43, 0xc4, 0x03, 0x11, 0xdf, 0x9c, 0x1e, 0x7d, 0x64, 0x65, 0xa4, 0x62, 0x09, 0xe1,
+	0x5c, 0x48, 0x22, 0x99, 0xe0, 0x8d, 0x91, 0x0c, 0x59, 0xbd, 0x5a, 0xb6, 0x2f, 0x93, 0x46, 0xd6,
+	0xed, 0x4a, 0xda, 0xec, 0xf1, 0x9b, 0x59, 0xc9, 0x4a, 0xda, 0x48, 0x52, 0x56, 0x16, 0x08, 0x2d,
+	0x20, 0xbb, 0x8a, 0x26, 0x05, 0x91, 0x05, 0xcf, 0x4d, 0xe6, 0xe4, 0x17, 0xf0, 0xe7, 0xa4, 0x96,
+	0x4c, 0x6d, 0x76, 0x91, 0xa1, 0x8f, 0x01, 0xaa, 0x5a, 0xfc, 0x4e, 0x57, 0x72, 0xc1, 0xb2, 0x70,
+	0x14, 0xb9, 0x53, 0x0f, 0x7b, 0x36, 0x72, 0x91, 0xa1, 0x4f, 0x20, 0xe0, 0xa4, 0xa4, 0x4d, 0x45,
+	0x56, 0x54, 0x01, 0x3b, 0x1a, 0xf0, 0x87, 0xd8, 0x45, 0x76, 0xf2, 0x8f, 0x0b, 0xe3, 0x4b, 0xda,
+	0xa1, 0x67, 0x10, 0x54, 0x7d, 0x61, 0x85, 0xba, 0x91, 0x3b, 0xf5, 0xcf, 0xa2, 0x78, 0x4b, 0xef,
+	0xf1, 0x2d, 0x07, 0xd8, 0xaf, 0x6e, 0xd9, 0x79, 0x0c, 0x3b, 0x15, 0x91, 0xaf, 0xc2, 0x51, 0x34,
+	0x9e, 0xfa, 0x67, 0x9f, 0x6d, 0x15, 0x5f, 0xd2, 0x2e, 0x9e, 0x13, 0xf9, 0xea, 0xbc, 0xa0, 0x25,
+	0xe5, 0x12, 0x6b, 0xc5, 0xd1, 0x0b, 0xd5, 0xd7, 0x10, 0x44, 0x08, 0x76, 0xd6, 0x8c, 0x1b, 0x17,
+	0x1e, 0xd6, 0xdf, 0xe8, 0x01, 0x8c, 0x6c, 0x8f, 0xe3, 0xd4, 0xc1, 0x23, 0x96, 0xa1, 0x87, 0xb0,
+	0xa3, 0x5a, 0x09, 0xc7, 0x8a, 0x4a, 0x1d, 0xac, 0x57, 0x33, 0x0f, 0xee, 0xb1, 0x6c, 0xa1, 0x8e,
+	0xee, 0xe4, 0x29, 0xc0, 0xf7, 0x75, 0x4d, 0xba, 0x2b, 0x52, 0xb4, 0x14, 0x9d, 0xc1, 0xee, 0x8d,
+	0xfa, 0x68, 0x42, 0x57, 0xfb, 0x3b, 0xda, 0xea, 0x4f, 0xb3, 0xd8, 0x92, 0x27, 0x7f, 0x4c, 0x60,
+	0x62, 0xd4, 0x4f, 0x00, 0x78, 0x5b, 0x14, 0x0b, 0x9d, 0x08, 0xfd, 0xc8, 0x9d, 0xee, 0x6f, 0x2a,
+	0xf4, 0x37, 0x19, 0xff, 0xdc, 0x16, 0x85, 0xe6, 0x53, 0x07, 0x7b, 0xbc, 0x5f, 0xa0, 0xcf, 0xe1,
+	0xfe, 0x52, 0x88, 0x82, 0x12, 0x6e, 0xf5, 0xaa, 0xb1, 0xbd, 0xd4, 0xc1, 0x81, 0x0d, 0x0f, 0x18,
+	0xe3, 0x92, 0xe6, 0xb4, 0xb6, 0x58, 0xdf, 0x6d, 0x60, 0xc3, 0x06, 0xfb, 0x14, 0x82, 0x4c, 0xb4,
+	0xcb, 0x82, 0x5a, 0x4a, 0xf5, 0xef, 0xa6, 0x0e, 0xf6, 0x4d, 0xd4, 0x40, 0xe7, 0x70, 0x30, 0x8c,
+	0x95, 0xe5, 0x40, 0xdf, 0xe9, 0xdb, 0xa6, 0x5f, 0xf4, 0x5c, 0xea, 0xe0, 0xfd, 0x41, 0x64, 0xca,
+	0x7c, 0x0d, 0xde, 0x9a, 0x76, 0xb6, 0xc0, 0x44, 0x17, 0x08, 0xdf, 0x75, 0xaf, 0xa9, 0x83, 0xf7,
+	0xd6, 0xb4, 0x1b, 0x4c, 0x36, 0xb2, 0x66, 0x3c, 0xb7, 0xda, 0xf7, 0xec, 0x25, 0xf9, 0x26, 0x6a,
+	0xa0, 0x63, 0x80, 0x65, 0x21, 0x96, 0x16, 0x41, 0x91, 0x3b, 0x0d, 0xd4, 0xc1, 0xa9, 0x98, 0x01,
+	0xbe, 0x83, 0x83, 0x9c, 0x8a, 0x45, 0x25, 0x18, 0x97, 0x96, 0xda, 0xd3, 0x26, 0x0e, 0x7b, 0x13,
+	0xea, 0xa2, 0xe3, 0xe7, 0x44, 0x3e, 0xe7, 0x79, 0xea, 0xe0, 0xfb, 0x39, 0x15, 0x73, 0x05, 0x1b,
+	0xf9, 0x53, 0x08, 0xcc, 0x53, 0xb6, 0xda, 0x5d, 0xad, 0xfd, 0x70, 0x6b, 0x03, 0xe7, 0x1a, 0x54,
+	0x0e, 0x8d, 0xc4, 0x54, 0x98, 0x81, 0x4f, 0xd4, 0x08, 0xd9, 0x02, 0x9e, 0x2e, 0x70, 0xbc, 0xb5,
+	0xc0, 0x66, 0xd4, 0x52, 0x07, 0x03, 0xd9, 0x0c, 0x5e, 0x08, 0xf7, 0x4a, 0x4a, 0x38, 0xe3, 0x79,
+	0xb8, 0x1f, 0xb9, 0xd3, 0x09, 0xee, 0x97, 0xe8, 0x11, 0x3c, 0xa4, 0xaf, 0x57, 0x45, 0x9b, 0xd1,
+	0xc5, 0xcb, 0x5a, 0x94, 0x0b, 0xc6, 0x33, 0xfa, 0x9a, 0x36, 0xe1, 0xa1, 0x1a, 0x0f, 0x8c, 0x6c,
+	0xee, 0xc7, 0x5a, 0x94, 0x17, 0x26, 0x33, 0x0b, 0x00, 0xb4, 0x13, 0x33, 0xe0, 0xff, 0xba, 0xb0,
+	0x6b, 0x7c, 0xa3, 0x2f, 0x60, 0xbc, 0xa6, 0x9d, 0x7d, 0xb7, 0xef, 0xbc, 0x22, 0xac, 0x20, 0x74,
+	0xa9, 0x7f, 0x1b, 0x15, 0xad, 0x25, 0xa3, 0x4d, 0x38, 0xd6, 0xaf, 0xe1, 0xcb, 0x3b, 0x0e, 0x25,
+	0x9e, 0x0f, 0xf4, 0x39, 0x97, 0x75, 0x87, 0x6f, 0xc9, 0x8f, 0x7e, 0x85, 0x83, 0x37, 0xd2, 0xe8,
+	0xc1, 0xc6, 0x8b, 0x67, 0x76, 0x7c, 0x04, 0x93, 0xcd, 0x44, 0xdf, 0xfd, 0xf4, 0x0c, 0xf8, 0xcd,
+	0xe8, 0xb1, 0x3b, 0xfb, 0xd3, 0x85, 0xf7, 0x57, 0xa2, 0xdc, 0x06, 0xcf, 0x7c, 0x63, 0x6d, 0xae,
+	0x86, 0x78, 0xee, 0xfe, 0xf6, 0xad, 0x65, 0x72, 0x51, 0x10, 0x9e, 0xc7, 0xa2, 0xce, 0x93, 0x9c,
+	0x72, 0x3d, 0xe2, 0x89, 0x49, 0x91, 0x8a, 0x35, 0xff, 0xfb, 0xcb, 0x3f, 0x19, 0x16, 0x7f, 0x8d,
+	0x3e, 0xf8, 0xc9, 0xc8, 0x9f, 0x15, 0xa2, 0xcd, 0xe2, 0x1f, 0x86, 0x8d, 0xae, 0x4e, 0xff, 0xee,
+	0x73, 0xd7, 0x3a, 0x77, 0x3d, 0xe4, 0xae, 0xaf, 0x4e, 0x97, 0xbb, 0x7a, 0x83, 0xaf, 0xfe, 0x0b,
+	0x00, 0x00, 0xff, 0xff, 0xf3, 0xdd, 0x11, 0x96, 0x45, 0x06, 0x00, 0x00,
+}
+
+var xxx_messageInfo_Key proto.InternalMessageInfo
diff --git a/v2/datastore/key.go b/v2/datastore/key.go
new file mode 100644
index 0000000..3018786
--- /dev/null
+++ b/v2/datastore/key.go
@@ -0,0 +1,400 @@
+// Copyright 2011 Google Inc. All rights reserved.
+// Use of this source code is governed by the Apache 2.0
+// license that can be found in the LICENSE file.
+
+package datastore
+
+import (
+	"bytes"
+	"context"
+	"encoding/base64"
+	"encoding/gob"
+	"errors"
+	"fmt"
+	"strconv"
+	"strings"
+
+	"github.com/golang/protobuf/proto"
+
+	"google.golang.org/appengine/v2/internal"
+	pb "google.golang.org/appengine/v2/internal/datastore"
+)
+
+type KeyRangeCollisionError struct {
+	start int64
+	end   int64
+}
+
+func (e *KeyRangeCollisionError) Error() string {
+	return fmt.Sprintf("datastore: Collision when attempting to allocate range [%d, %d]",
+		e.start, e.end)
+}
+
+type KeyRangeContentionError struct {
+	start int64
+	end   int64
+}
+
+func (e *KeyRangeContentionError) Error() string {
+	return fmt.Sprintf("datastore: Contention when attempting to allocate range [%d, %d]",
+		e.start, e.end)
+}
+
+// Key represents the datastore key for a stored entity, and is immutable.
+type Key struct {
+	kind      string
+	stringID  string
+	intID     int64
+	parent    *Key
+	appID     string
+	namespace string
+}
+
+// Kind returns the key's kind (also known as entity type).
+func (k *Key) Kind() string {
+	return k.kind
+}
+
+// StringID returns the key's string ID (also known as an entity name or key
+// name), which may be "".
+func (k *Key) StringID() string {
+	return k.stringID
+}
+
+// IntID returns the key's integer ID, which may be 0.
+func (k *Key) IntID() int64 {
+	return k.intID
+}
+
+// Parent returns the key's parent key, which may be nil.
+func (k *Key) Parent() *Key {
+	return k.parent
+}
+
+// AppID returns the key's application ID.
+func (k *Key) AppID() string {
+	return k.appID
+}
+
+// Namespace returns the key's namespace.
+func (k *Key) Namespace() string {
+	return k.namespace
+}
+
+// Incomplete returns whether the key does not refer to a stored entity.
+// In particular, whether the key has a zero StringID and a zero IntID.
+func (k *Key) Incomplete() bool {
+	return k.stringID == "" && k.intID == 0
+}
+
+// valid returns whether the key is valid.
+func (k *Key) valid() bool {
+	if k == nil {
+		return false
+	}
+	for ; k != nil; k = k.parent {
+		if k.kind == "" || k.appID == "" {
+			return false
+		}
+		if k.stringID != "" && k.intID != 0 {
+			return false
+		}
+		if k.parent != nil {
+			if k.parent.Incomplete() {
+				return false
+			}
+			if k.parent.appID != k.appID || k.parent.namespace != k.namespace {
+				return false
+			}
+		}
+	}
+	return true
+}
+
+// Equal returns whether two keys are equal.
+func (k *Key) Equal(o *Key) bool {
+	for k != nil && o != nil {
+		if k.kind != o.kind || k.stringID != o.stringID || k.intID != o.intID || k.appID != o.appID || k.namespace != o.namespace {
+			return false
+		}
+		k, o = k.parent, o.parent
+	}
+	return k == o
+}
+
+// root returns the furthest ancestor of a key, which may be itself.
+func (k *Key) root() *Key {
+	for k.parent != nil {
+		k = k.parent
+	}
+	return k
+}
+
+// marshal marshals the key's string representation to the buffer.
+func (k *Key) marshal(b *bytes.Buffer) {
+	if k.parent != nil {
+		k.parent.marshal(b)
+	}
+	b.WriteByte('/')
+	b.WriteString(k.kind)
+	b.WriteByte(',')
+	if k.stringID != "" {
+		b.WriteString(k.stringID)
+	} else {
+		b.WriteString(strconv.FormatInt(k.intID, 10))
+	}
+}
+
+// String returns a string representation of the key.
+func (k *Key) String() string {
+	if k == nil {
+		return ""
+	}
+	b := bytes.NewBuffer(make([]byte, 0, 512))
+	k.marshal(b)
+	return b.String()
+}
+
+type gobKey struct {
+	Kind      string
+	StringID  string
+	IntID     int64
+	Parent    *gobKey
+	AppID     string
+	Namespace string
+}
+
+func keyToGobKey(k *Key) *gobKey {
+	if k == nil {
+		return nil
+	}
+	return &gobKey{
+		Kind:      k.kind,
+		StringID:  k.stringID,
+		IntID:     k.intID,
+		Parent:    keyToGobKey(k.parent),
+		AppID:     k.appID,
+		Namespace: k.namespace,
+	}
+}
+
+func gobKeyToKey(gk *gobKey) *Key {
+	if gk == nil {
+		return nil
+	}
+	return &Key{
+		kind:      gk.Kind,
+		stringID:  gk.StringID,
+		intID:     gk.IntID,
+		parent:    gobKeyToKey(gk.Parent),
+		appID:     gk.AppID,
+		namespace: gk.Namespace,
+	}
+}
+
+func (k *Key) GobEncode() ([]byte, error) {
+	buf := new(bytes.Buffer)
+	if err := gob.NewEncoder(buf).Encode(keyToGobKey(k)); err != nil {
+		return nil, err
+	}
+	return buf.Bytes(), nil
+}
+
+func (k *Key) GobDecode(buf []byte) error {
+	gk := new(gobKey)
+	if err := gob.NewDecoder(bytes.NewBuffer(buf)).Decode(gk); err != nil {
+		return err
+	}
+	*k = *gobKeyToKey(gk)
+	return nil
+}
+
+func (k *Key) MarshalJSON() ([]byte, error) {
+	return []byte(`"` + k.Encode() + `"`), nil
+}
+
+func (k *Key) UnmarshalJSON(buf []byte) error {
+	if len(buf) < 2 || buf[0] != '"' || buf[len(buf)-1] != '"' {
+		return errors.New("datastore: bad JSON key")
+	}
+	k2, err := DecodeKey(string(buf[1 : len(buf)-1]))
+	if err != nil {
+		return err
+	}
+	*k = *k2
+	return nil
+}
+
+// Encode returns an opaque representation of the key
+// suitable for use in HTML and URLs.
+// This is compatible with the Python and Java runtimes.
+func (k *Key) Encode() string {
+	ref := keyToProto("", k)
+
+	b, err := proto.Marshal(ref)
+	if err != nil {
+		panic(err)
+	}
+
+	// Trailing padding is stripped.
+	return strings.TrimRight(base64.URLEncoding.EncodeToString(b), "=")
+}
+
+// DecodeKey decodes a key from the opaque representation returned by Encode.
+func DecodeKey(encoded string) (*Key, error) {
+	// Re-add padding.
+	if m := len(encoded) % 4; m != 0 {
+		encoded += strings.Repeat("=", 4-m)
+	}
+
+	b, err := base64.URLEncoding.DecodeString(encoded)
+	if err != nil {
+		return nil, err
+	}
+
+	ref := new(pb.Reference)
+	if err := proto.Unmarshal(b, ref); err != nil {
+		// Couldn't decode it as an App Engine key, try decoding it as a key encoded by cloud.google.com/go/datastore.
+		if k := decodeCloudKey(encoded); k != nil {
+			return k, nil
+		}
+		return nil, err
+	}
+
+	return protoToKey(ref)
+}
+
+// NewIncompleteKey creates a new incomplete key.
+// kind cannot be empty.
+func NewIncompleteKey(c context.Context, kind string, parent *Key) *Key {
+	return NewKey(c, kind, "", 0, parent)
+}
+
+// NewKey creates a new key.
+// kind cannot be empty.
+// Either one or both of stringID and intID must be zero. If both are zero,
+// the key returned is incomplete.
+// parent must either be a complete key or nil.
+func NewKey(c context.Context, kind, stringID string, intID int64, parent *Key) *Key {
+	// If there's a parent key, use its namespace.
+	// Otherwise, use any namespace attached to the context.
+	var namespace string
+	if parent != nil {
+		namespace = parent.namespace
+	} else {
+		namespace = internal.NamespaceFromContext(c)
+	}
+
+	return &Key{
+		kind:      kind,
+		stringID:  stringID,
+		intID:     intID,
+		parent:    parent,
+		appID:     internal.FullyQualifiedAppID(c),
+		namespace: namespace,
+	}
+}
+
+// AllocateIDs returns a range of n integer IDs with the given kind and parent
+// combination. kind cannot be empty; parent may be nil. The IDs in the range
+// returned will not be used by the datastore's automatic ID sequence generator
+// and may be used with NewKey without conflict.
+//
+// The range is inclusive at the low end and exclusive at the high end. In
+// other words, valid intIDs x satisfy low <= x && x < high.
+//
+// If no error is returned, low + n == high.
+func AllocateIDs(c context.Context, kind string, parent *Key, n int) (low, high int64, err error) {
+	if kind == "" {
+		return 0, 0, errors.New("datastore: AllocateIDs given an empty kind")
+	}
+	if n < 0 {
+		return 0, 0, fmt.Errorf("datastore: AllocateIDs given a negative count: %d", n)
+	}
+	if n == 0 {
+		return 0, 0, nil
+	}
+	req := &pb.AllocateIdsRequest{
+		ModelKey: keyToProto("", NewIncompleteKey(c, kind, parent)),
+		Size:     proto.Int64(int64(n)),
+	}
+	res := &pb.AllocateIdsResponse{}
+	if err := internal.Call(c, "datastore_v3", "AllocateIds", req, res); err != nil {
+		return 0, 0, err
+	}
+	// The protobuf is inclusive at both ends. Idiomatic Go (e.g. slices, for loops)
+	// is inclusive at the low end and exclusive at the high end, so we add 1.
+	low = res.GetStart()
+	high = res.GetEnd() + 1
+	if low+int64(n) != high {
+		return 0, 0, fmt.Errorf("datastore: internal error: could not allocate %d IDs", n)
+	}
+	return low, high, nil
+}
+
+// AllocateIDRange allocates a range of IDs with specific endpoints.
+// The range is inclusive at both the low and high end. Once these IDs have been
+// allocated, you can manually assign them to newly created entities.
+//
+// The Datastore's automatic ID allocator never assigns a key that has already
+// been allocated (either through automatic ID allocation or through an explicit
+// AllocateIDs call). As a result, entities written to the given key range will
+// never be overwritten. However, writing entities with manually assigned keys in
+// this range may overwrite existing entities (or new entities written by a separate
+// request), depending on the error returned.
+//
+// Use this only if you have an existing numeric ID range that you want to reserve
+// (for example, bulk loading entities that already have IDs). If you don't care
+// about which IDs you receive, use AllocateIDs instead.
+//
+// AllocateIDRange returns nil if the range is successfully allocated. If one or more
+// entities with an ID in the given range already exist, it returns a KeyRangeCollisionError.
+// If the Datastore has already cached IDs in this range (e.g. from a previous call to
+// AllocateIDRange), it returns a KeyRangeContentionError. Errors of other types indicate
+// problems with arguments or an error returned directly from the Datastore.
+func AllocateIDRange(c context.Context, kind string, parent *Key, start, end int64) (err error) {
+	if kind == "" {
+		return errors.New("datastore: AllocateIDRange given an empty kind")
+	}
+
+	if start < 1 || end < 1 {
+		return errors.New("datastore: AllocateIDRange start and end must both be greater than 0")
+	}
+
+	if start > end {
+		return errors.New("datastore: AllocateIDRange start must be before end")
+	}
+
+	req := &pb.AllocateIdsRequest{
+		ModelKey: keyToProto("", NewIncompleteKey(c, kind, parent)),
+		Max:      proto.Int64(end),
+	}
+	res := &pb.AllocateIdsResponse{}
+	if err := internal.Call(c, "datastore_v3", "AllocateIds", req, res); err != nil {
+		return err
+	}
+
+	// Check for collisions, i.e. existing entities with IDs in this range.
+	// We could do this before the allocation, but we'd still have to do it
+	// afterward as well to catch the race condition where an entity is inserted
+	// after that initial check but before the allocation. Skip the up-front check
+	// and just do it once.
+	q := NewQuery(kind).Filter("__key__ >=", NewKey(c, kind, "", start, parent)).
+		Filter("__key__ <=", NewKey(c, kind, "", end, parent)).KeysOnly().Limit(1)
+
+	keys, err := q.GetAll(c, nil)
+	if err != nil {
+		return err
+	}
+	if len(keys) != 0 {
+		return &KeyRangeCollisionError{start: start, end: end}
+	}
+
+	// Check for a race condition, i.e. cases where the datastore may have
+	// cached ID batches that contain IDs in this range.
+	if start < res.GetStart() {
+		return &KeyRangeContentionError{start: start, end: end}
+	}
+
+	return nil
+}
diff --git a/v2/datastore/key_test.go b/v2/datastore/key_test.go
new file mode 100644
index 0000000..236e97d
--- /dev/null
+++ b/v2/datastore/key_test.go
@@ -0,0 +1,203 @@
+// Copyright 2011 Google Inc. All Rights Reserved.
+// Use of this source code is governed by the Apache 2.0
+// license that can be found in the LICENSE file.
+
+package datastore
+
+import (
+	"bytes"
+	"context"
+	"encoding/gob"
+	"encoding/json"
+	"testing"
+
+	"google.golang.org/appengine/v2/internal"
+)
+
+func TestKeyEncoding(t *testing.T) {
+	testCases := []struct {
+		desc string
+		key  *Key
+		exp  string
+	}{
+		{
+			desc: "A simple key with an int ID",
+			key: &Key{
+				kind:  "Person",
+				intID: 1,
+				appID: "glibrary",
+			},
+			exp: "aghnbGlicmFyeXIMCxIGUGVyc29uGAEM",
+		},
+		{
+			desc: "A simple key with a string ID",
+			key: &Key{
+				kind:     "Graph",
+				stringID: "graph:7-day-active",
+				appID:    "glibrary",
+			},
+			exp: "aghnbGlicmFyeXIdCxIFR3JhcGgiEmdyYXBoOjctZGF5LWFjdGl2ZQw",
+		},
+		{
+			desc: "A key with a parent",
+			key: &Key{
+				kind:  "WordIndex",
+				intID: 1033,
+				parent: &Key{
+					kind:  "WordIndex",
+					intID: 1020032,
+					appID: "glibrary",
+				},
+				appID: "glibrary",
+			},
+			exp: "aghnbGlicmFyeXIhCxIJV29yZEluZGV4GIChPgwLEglXb3JkSW5kZXgYiQgM",
+		},
+	}
+	for _, tc := range testCases {
+		enc := tc.key.Encode()
+		if enc != tc.exp {
+			t.Errorf("%s: got %q, want %q", tc.desc, enc, tc.exp)
+		}
+
+		key, err := DecodeKey(tc.exp)
+		if err != nil {
+			t.Errorf("%s: failed decoding key: %v", tc.desc, err)
+			continue
+		}
+		if !key.Equal(tc.key) {
+			t.Errorf("%s: decoded key %v, want %v", tc.desc, key, tc.key)
+		}
+	}
+}
+
+func TestKeyGob(t *testing.T) {
+	k := &Key{
+		kind:  "Gopher",
+		intID: 3,
+		parent: &Key{
+			kind:     "Mom",
+			stringID: "narwhal",
+			appID:    "gopher-con",
+		},
+		appID: "gopher-con",
+	}
+
+	buf := new(bytes.Buffer)
+	if err := gob.NewEncoder(buf).Encode(k); err != nil {
+		t.Fatalf("gob encode failed: %v", err)
+	}
+
+	k2 := new(Key)
+	if err := gob.NewDecoder(buf).Decode(k2); err != nil {
+		t.Fatalf("gob decode failed: %v", err)
+	}
+	if !k2.Equal(k) {
+		t.Errorf("gob round trip of %v produced %v", k, k2)
+	}
+}
+
+func TestNilKeyGob(t *testing.T) {
+	type S struct {
+		Key *Key
+	}
+	s1 := new(S)
+
+	buf := new(bytes.Buffer)
+	if err := gob.NewEncoder(buf).Encode(s1); err != nil {
+		t.Fatalf("gob encode failed: %v", err)
+	}
+
+	s2 := new(S)
+	if err := gob.NewDecoder(buf).Decode(s2); err != nil {
+		t.Fatalf("gob decode failed: %v", err)
+	}
+	if s2.Key != nil {
+		t.Errorf("gob round trip of nil key produced %v", s2.Key)
+	}
+}
+
+func TestKeyJSON(t *testing.T) {
+	k := &Key{
+		kind:  "Gopher",
+		intID: 2,
+		parent: &Key{
+			kind:     "Mom",
+			stringID: "narwhal",
+			appID:    "gopher-con",
+		},
+		appID: "gopher-con",
+	}
+	exp := `"` + k.Encode() + `"`
+
+	buf, err := json.Marshal(k)
+	if err != nil {
+		t.Fatalf("json.Marshal failed: %v", err)
+	}
+	if s := string(buf); s != exp {
+		t.Errorf("JSON encoding of key %v: got %q, want %q", k, s, exp)
+	}
+
+	k2 := new(Key)
+	if err := json.Unmarshal(buf, k2); err != nil {
+		t.Fatalf("json.Unmarshal failed: %v", err)
+	}
+	if !k2.Equal(k) {
+		t.Errorf("JSON round trip of %v produced %v", k, k2)
+	}
+}
+
+func TestNilKeyJSON(t *testing.T) {
+	type S struct {
+		Key *Key
+	}
+	s1 := new(S)
+
+	buf, err := json.Marshal(s1)
+	if err != nil {
+		t.Fatalf("json.Marshal failed: %v", err)
+	}
+
+	s2 := new(S)
+	if err := json.Unmarshal(buf, s2); err != nil {
+		t.Fatalf("json.Unmarshal failed: %v", err)
+	}
+	if s2.Key != nil {
+		t.Errorf("JSON round trip of nil key produced %v", s2.Key)
+	}
+}
+
+func TestIncompleteKeyWithParent(t *testing.T) {
+	c := internal.WithAppIDOverride(context.Background(), "s~some-app")
+
+	// fadduh is a complete key.
+	fadduh := NewKey(c, "Person", "", 1, nil)
+	if fadduh.Incomplete() {
+		t.Fatalf("fadduh is incomplete")
+	}
+
+	// robert is an incomplete key with fadduh as a parent.
+	robert := NewIncompleteKey(c, "Person", fadduh)
+	if !robert.Incomplete() {
+		t.Fatalf("robert is complete")
+	}
+
+	// Both should be valid keys.
+	if !fadduh.valid() {
+		t.Errorf("fadduh is invalid: %v", fadduh)
+	}
+	if !robert.valid() {
+		t.Errorf("robert is invalid: %v", robert)
+	}
+}
+
+func TestNamespace(t *testing.T) {
+	key := &Key{
+		kind:      "Person",
+		intID:     1,
+		appID:     "s~some-app",
+		namespace: "mynamespace",
+	}
+	if g, w := key.Namespace(), "mynamespace"; g != w {
+		t.Errorf("key.Namespace() = %q, want %q", g, w)
+	}
+}
diff --git a/v2/datastore/keycompat.go b/v2/datastore/keycompat.go
new file mode 100644
index 0000000..4a8f17d
--- /dev/null
+++ b/v2/datastore/keycompat.go
@@ -0,0 +1,88 @@
+// Copyright 2019 Google Inc. All rights reserved.
+// Use of this source code is governed by the Apache 2.0
+// license that can be found in the LICENSE file.
+
+package datastore
+
+import (
+	"context"
+	"sync"
+
+	"google.golang.org/appengine/v2/datastore/internal/cloudkey"
+	"google.golang.org/appengine/v2/internal"
+)
+
+var keyConversion struct {
+	mu    sync.RWMutex
+	appID string // read using getKeyConversionAppID
+}
+
+// EnableKeyConversion enables encoded key compatibility with the Cloud
+// Datastore client library (cloud.google.com/go/datastore). Encoded keys
+// generated by the Cloud Datastore client library will be decoded into App
+// Engine datastore keys.
+//
+// The context provided must be an App Engine context if running in App Engine
+// first generation runtime. This can be called in the /_ah/start handler. It is
+// safe to call multiple times, and is cheap to call, so can also be inserted as
+// middleware.
+//
+// Enabling key compatibility does not affect the encoding format used by
+// Key.Encode, it only expands the type of keys that are able to be decoded with
+// DecodeKey.
+func EnableKeyConversion(ctx context.Context) {
+	// Only attempt to set appID if it's unset.
+	// If already set, ignore.
+	if getKeyConversionAppID() != "" {
+		return
+	}
+
+	keyConversion.mu.Lock()
+	// Check again to avoid race where another goroutine set appID between the call
+	// to getKeyConversionAppID above and taking the write lock.
+	if keyConversion.appID == "" {
+		keyConversion.appID = internal.FullyQualifiedAppID(ctx)
+	}
+	keyConversion.mu.Unlock()
+}
+
+func getKeyConversionAppID() string {
+	keyConversion.mu.RLock()
+	appID := keyConversion.appID
+	keyConversion.mu.RUnlock()
+	return appID
+}
+
+// decodeCloudKey attempts to decode the given encoded key generated by the
+// Cloud Datastore client library (cloud.google.com/go/datastore), returning nil
+// if the key couldn't be decoded.
+func decodeCloudKey(encoded string) *Key {
+	appID := getKeyConversionAppID()
+	if appID == "" {
+		return nil
+	}
+
+	k, err := cloudkey.DecodeKey(encoded)
+	if err != nil {
+		return nil
+	}
+	return convertCloudKey(k, appID)
+}
+
+// convertCloudKey converts a Cloud Datastore key and converts it to an App
+// Engine Datastore key. Cloud Datastore keys don't include the project/app ID,
+// so we must add it back in.
+func convertCloudKey(key *cloudkey.Key, appID string) *Key {
+	if key == nil {
+		return nil
+	}
+	k := &Key{
+		intID:     key.ID,
+		kind:      key.Kind,
+		namespace: key.Namespace,
+		parent:    convertCloudKey(key.Parent, appID),
+		stringID:  key.Name,
+		appID:     appID,
+	}
+	return k
+}
diff --git a/v2/datastore/keycompat_test.go b/v2/datastore/keycompat_test.go
new file mode 100644
index 0000000..923fdac
--- /dev/null
+++ b/v2/datastore/keycompat_test.go
@@ -0,0 +1,89 @@
+// Copyright 2019 Google Inc. All Rights Reserved.
+// Use of this source code is governed by the Apache 2.0
+// license that can be found in the LICENSE file.
+
+package datastore
+
+import (
+	"reflect"
+	"testing"
+)
+
+func TestKeyConversion(t *testing.T) {
+	var tests = []struct {
+		desc       string
+		key        *Key
+		encodedKey string
+	}{
+		{
+			desc: "A control test for legacy to legacy key conversion int as the key",
+			key: &Key{
+				kind:  "Person",
+				intID: 1,
+				appID: "glibrary",
+			},
+			encodedKey: "aghnbGlicmFyeXIMCxIGUGVyc29uGAEM",
+		},
+		{
+			desc: "A control test for legacy to legacy key conversion string as the key",
+			key: &Key{
+				kind:     "Graph",
+				stringID: "graph:7-day-active",
+				appID:    "glibrary",
+			},
+			encodedKey: "aghnbGlicmFyeXIdCxIFR3JhcGgiEmdyYXBoOjctZGF5LWFjdGl2ZQw",
+		},
+
+		// These are keys encoded with cloud.google.com/go/datastore
+		// Standard int as the key
+		{
+			desc: "Convert new key format to old key with int id",
+			key: &Key{
+				kind:  "WordIndex",
+				intID: 1033,
+				appID: "glibrary",
+			},
+			encodedKey: "Eg4KCVdvcmRJbmRleBCJCA",
+		},
+		// These are keys encoded with cloud.google.com/go/datastore
+		// Standard string
+		{
+			desc: "Convert new key format to old key with string id",
+			key: &Key{
+				kind:     "WordIndex",
+				stringID: "IAmAnID",
+				appID:    "glibrary",
+			},
+			encodedKey: "EhQKCVdvcmRJbmRleBoHSUFtQW5JRA",
+		},
+
+		// These are keys encoded with cloud.google.com/go/datastore
+		// ID String with parent as string
+		{
+			desc: "Convert new key format to old key with string id with a parent",
+			key: &Key{
+				kind:     "WordIndex",
+				stringID: "IAmAnID",
+				appID:    "glibrary",
+				parent: &Key{
+					kind:     "LetterIndex",
+					stringID: "IAmAnotherID",
+					appID:    "glibrary",
+				},
+			},
+			encodedKey: "EhsKC0xldHRlckluZGV4GgxJQW1Bbm90aGVySUQSFAoJV29yZEluZGV4GgdJQW1BbklE",
+		},
+	}
+
+	// Simulate the key converter enablement
+	keyConversion.appID = "glibrary"
+	for _, tc := range tests {
+		dk, err := DecodeKey(tc.encodedKey)
+		if err != nil {
+			t.Fatalf("DecodeKey: %v", err)
+		}
+		if !reflect.DeepEqual(dk, tc.key) {
+			t.Errorf("%s: got %+v, want %+v", tc.desc, dk, tc.key)
+		}
+	}
+}
diff --git a/v2/datastore/load.go b/v2/datastore/load.go
new file mode 100644
index 0000000..8a90cfa
--- /dev/null
+++ b/v2/datastore/load.go
@@ -0,0 +1,429 @@
+// Copyright 2011 Google Inc. All rights reserved.
+// Use of this source code is governed by the Apache 2.0
+// license that can be found in the LICENSE file.
+
+package datastore
+
+import (
+	"fmt"
+	"reflect"
+	"strings"
+	"time"
+
+	"github.com/golang/protobuf/proto"
+	"google.golang.org/appengine/v2"
+	pb "google.golang.org/appengine/v2/internal/datastore"
+)
+
+var (
+	typeOfBlobKey    = reflect.TypeOf(appengine.BlobKey(""))
+	typeOfByteSlice  = reflect.TypeOf([]byte(nil))
+	typeOfByteString = reflect.TypeOf(ByteString(nil))
+	typeOfGeoPoint   = reflect.TypeOf(appengine.GeoPoint{})
+	typeOfTime       = reflect.TypeOf(time.Time{})
+	typeOfKeyPtr     = reflect.TypeOf(&Key{})
+	typeOfEntityPtr  = reflect.TypeOf(&Entity{})
+)
+
+// typeMismatchReason returns a string explaining why the property p could not
+// be stored in an entity field of type v.Type().
+func typeMismatchReason(pValue interface{}, v reflect.Value) string {
+	entityType := "empty"
+	switch pValue.(type) {
+	case int64:
+		entityType = "int"
+	case bool:
+		entityType = "bool"
+	case string:
+		entityType = "string"
+	case float64:
+		entityType = "float"
+	case *Key:
+		entityType = "*datastore.Key"
+	case time.Time:
+		entityType = "time.Time"
+	case appengine.BlobKey:
+		entityType = "appengine.BlobKey"
+	case appengine.GeoPoint:
+		entityType = "appengine.GeoPoint"
+	case ByteString:
+		entityType = "datastore.ByteString"
+	case []byte:
+		entityType = "[]byte"
+	}
+	return fmt.Sprintf("type mismatch: %s versus %v", entityType, v.Type())
+}
+
+type propertyLoader struct {
+	// m holds the number of times a substruct field like "Foo.Bar.Baz" has
+	// been seen so far. The map is constructed lazily.
+	m map[string]int
+}
+
+func (l *propertyLoader) load(codec *structCodec, structValue reflect.Value, p Property, requireSlice bool) string {
+	var v reflect.Value
+	var sliceIndex int
+
+	name := p.Name
+
+	// If name ends with a '.', the last field is anonymous.
+	// In this case, strings.Split will give us "" as the
+	// last element of our fields slice, which will match the ""
+	// field name in the substruct codec.
+	fields := strings.Split(name, ".")
+
+	for len(fields) > 0 {
+		var decoder fieldCodec
+		var ok bool
+
+		// Cut off the last field (delimited by ".") and find its parent
+		// in the codec.
+		// eg. for name "A.B.C.D", split off "A.B.C" and try to
+		// find a field in the codec with this name.
+		// Loop again with "A.B", etc.
+		for i := len(fields); i > 0; i-- {
+			parent := strings.Join(fields[:i], ".")
+			decoder, ok = codec.fields[parent]
+			if ok {
+				fields = fields[i:]
+				break
+			}
+		}
+
+		// If we never found a matching field in the codec, return
+		// error message.
+		if !ok {
+			return "no such struct field"
+		}
+
+		v = initField(structValue, decoder.path)
+		if !v.IsValid() {
+			return "no such struct field"
+		}
+		if !v.CanSet() {
+			return "cannot set struct field"
+		}
+
+		if decoder.structCodec != nil {
+			codec = decoder.structCodec
+			structValue = v
+		}
+
+		if v.Kind() == reflect.Slice && v.Type() != typeOfByteSlice {
+			if l.m == nil {
+				l.m = make(map[string]int)
+			}
+			sliceIndex = l.m[p.Name]
+			l.m[p.Name] = sliceIndex + 1
+			for v.Len() <= sliceIndex {
+				v.Set(reflect.Append(v, reflect.New(v.Type().Elem()).Elem()))
+			}
+			structValue = v.Index(sliceIndex)
+			requireSlice = false
+		}
+	}
+
+	var slice reflect.Value
+	if v.Kind() == reflect.Slice && v.Type().Elem().Kind() != reflect.Uint8 {
+		slice = v
+		v = reflect.New(v.Type().Elem()).Elem()
+	} else if requireSlice {
+		return "multiple-valued property requires a slice field type"
+	}
+
+	// Convert indexValues to a Go value with a meaning derived from the
+	// destination type.
+	pValue := p.Value
+	if iv, ok := pValue.(indexValue); ok {
+		meaning := pb.Property_NO_MEANING
+		switch v.Type() {
+		case typeOfBlobKey:
+			meaning = pb.Property_BLOBKEY
+		case typeOfByteSlice:
+			meaning = pb.Property_BLOB
+		case typeOfByteString:
+			meaning = pb.Property_BYTESTRING
+		case typeOfGeoPoint:
+			meaning = pb.Property_GEORSS_POINT
+		case typeOfTime:
+			meaning = pb.Property_GD_WHEN
+		case typeOfEntityPtr:
+			meaning = pb.Property_ENTITY_PROTO
+		}
+		var err error
+		pValue, err = propValue(iv.value, meaning)
+		if err != nil {
+			return err.Error()
+		}
+	}
+
+	if errReason := setVal(v, pValue); errReason != "" {
+		// Set the slice back to its zero value.
+		if slice.IsValid() {
+			slice.Set(reflect.Zero(slice.Type()))
+		}
+		return errReason
+	}
+
+	if slice.IsValid() {
+		slice.Index(sliceIndex).Set(v)
+	}
+
+	return ""
+}
+
+// setVal sets v to the value pValue.
+func setVal(v reflect.Value, pValue interface{}) string {
+	switch v.Kind() {
+	case reflect.Int, reflect.Int8, reflect.Int16, reflect.Int32, reflect.Int64:
+		x, ok := pValue.(int64)
+		if !ok && pValue != nil {
+			return typeMismatchReason(pValue, v)
+		}
+		if v.OverflowInt(x) {
+			return fmt.Sprintf("value %v overflows struct field of type %v", x, v.Type())
+		}
+		v.SetInt(x)
+	case reflect.Bool:
+		x, ok := pValue.(bool)
+		if !ok && pValue != nil {
+			return typeMismatchReason(pValue, v)
+		}
+		v.SetBool(x)
+	case reflect.String:
+		switch x := pValue.(type) {
+		case appengine.BlobKey:
+			v.SetString(string(x))
+		case ByteString:
+			v.SetString(string(x))
+		case string:
+			v.SetString(x)
+		default:
+			if pValue != nil {
+				return typeMismatchReason(pValue, v)
+			}
+		}
+	case reflect.Float32, reflect.Float64:
+		x, ok := pValue.(float64)
+		if !ok && pValue != nil {
+			return typeMismatchReason(pValue, v)
+		}
+		if v.OverflowFloat(x) {
+			return fmt.Sprintf("value %v overflows struct field of type %v", x, v.Type())
+		}
+		v.SetFloat(x)
+	case reflect.Ptr:
+		x, ok := pValue.(*Key)
+		if !ok && pValue != nil {
+			return typeMismatchReason(pValue, v)
+		}
+		if _, ok := v.Interface().(*Key); !ok {
+			return typeMismatchReason(pValue, v)
+		}
+		v.Set(reflect.ValueOf(x))
+	case reflect.Struct:
+		switch v.Type() {
+		case typeOfTime:
+			x, ok := pValue.(time.Time)
+			if !ok && pValue != nil {
+				return typeMismatchReason(pValue, v)
+			}
+			v.Set(reflect.ValueOf(x))
+		case typeOfGeoPoint:
+			x, ok := pValue.(appengine.GeoPoint)
+			if !ok && pValue != nil {
+				return typeMismatchReason(pValue, v)
+			}
+			v.Set(reflect.ValueOf(x))
+		default:
+			ent, ok := pValue.(*Entity)
+			if !ok {
+				return typeMismatchReason(pValue, v)
+			}
+
+			// Recursively load nested struct
+			pls, err := newStructPLS(v.Addr().Interface())
+			if err != nil {
+				return err.Error()
+			}
+
+			// if ent has a Key value and our struct has a Key field,
+			// load the Entity's Key value into the Key field on the struct.
+			if ent.Key != nil && pls.codec.keyField != -1 {
+
+				pls.v.Field(pls.codec.keyField).Set(reflect.ValueOf(ent.Key))
+			}
+
+			err = pls.Load(ent.Properties)
+			if err != nil {
+				return err.Error()
+			}
+		}
+	case reflect.Slice:
+		x, ok := pValue.([]byte)
+		if !ok {
+			if y, yok := pValue.(ByteString); yok {
+				x, ok = []byte(y), true
+			}
+		}
+		if !ok && pValue != nil {
+			return typeMismatchReason(pValue, v)
+		}
+		if v.Type().Elem().Kind() != reflect.Uint8 {
+			return typeMismatchReason(pValue, v)
+		}
+		v.SetBytes(x)
+	default:
+		return typeMismatchReason(pValue, v)
+	}
+	return ""
+}
+
+// initField is similar to reflect's Value.FieldByIndex, in that it
+// returns the nested struct field corresponding to index, but it
+// initialises any nil pointers encountered when traversing the structure.
+func initField(val reflect.Value, index []int) reflect.Value {
+	for _, i := range index[:len(index)-1] {
+		val = val.Field(i)
+		if val.Kind() == reflect.Ptr {
+			if val.IsNil() {
+				val.Set(reflect.New(val.Type().Elem()))
+			}
+			val = val.Elem()
+		}
+	}
+	return val.Field(index[len(index)-1])
+}
+
+// loadEntity loads an EntityProto into PropertyLoadSaver or struct pointer.
+func loadEntity(dst interface{}, src *pb.EntityProto) (err error) {
+	ent, err := protoToEntity(src)
+	if err != nil {
+		return err
+	}
+	if e, ok := dst.(PropertyLoadSaver); ok {
+		return e.Load(ent.Properties)
+	}
+	return LoadStruct(dst, ent.Properties)
+}
+
+func (s structPLS) Load(props []Property) error {
+	var fieldName, reason string
+	var l propertyLoader
+	for _, p := range props {
+		if errStr := l.load(s.codec, s.v, p, p.Multiple); errStr != "" {
+			// We don't return early, as we try to load as many properties as possible.
+			// It is valid to load an entity into a struct that cannot fully represent it.
+			// That case returns an error, but the caller is free to ignore it.
+			fieldName, reason = p.Name, errStr
+		}
+	}
+	if reason != "" {
+		return &ErrFieldMismatch{
+			StructType: s.v.Type(),
+			FieldName:  fieldName,
+			Reason:     reason,
+		}
+	}
+	return nil
+}
+
+func protoToEntity(src *pb.EntityProto) (*Entity, error) {
+	props, rawProps := src.Property, src.RawProperty
+	outProps := make([]Property, 0, len(props)+len(rawProps))
+	for {
+		var (
+			x       *pb.Property
+			noIndex bool
+		)
+		if len(props) > 0 {
+			x, props = props[0], props[1:]
+		} else if len(rawProps) > 0 {
+			x, rawProps = rawProps[0], rawProps[1:]
+			noIndex = true
+		} else {
+			break
+		}
+
+		var value interface{}
+		if x.Meaning != nil && *x.Meaning == pb.Property_INDEX_VALUE {
+			value = indexValue{x.Value}
+		} else {
+			var err error
+			value, err = propValue(x.Value, x.GetMeaning())
+			if err != nil {
+				return nil, err
+			}
+		}
+		outProps = append(outProps, Property{
+			Name:     x.GetName(),
+			Value:    value,
+			NoIndex:  noIndex,
+			Multiple: x.GetMultiple(),
+		})
+	}
+
+	var key *Key
+	if src.Key != nil {
+		// Ignore any error, since nested entity values
+		// are allowed to have an invalid key.
+		key, _ = protoToKey(src.Key)
+	}
+	return &Entity{key, outProps}, nil
+}
+
+// propValue returns a Go value that combines the raw PropertyValue with a
+// meaning. For example, an Int64Value with GD_WHEN becomes a time.Time.
+func propValue(v *pb.PropertyValue, m pb.Property_Meaning) (interface{}, error) {
+	switch {
+	case v.Int64Value != nil:
+		if m == pb.Property_GD_WHEN {
+			return fromUnixMicro(*v.Int64Value), nil
+		} else {
+			return *v.Int64Value, nil
+		}
+	case v.BooleanValue != nil:
+		return *v.BooleanValue, nil
+	case v.StringValue != nil:
+		if m == pb.Property_BLOB {
+			return []byte(*v.StringValue), nil
+		} else if m == pb.Property_BLOBKEY {
+			return appengine.BlobKey(*v.StringValue), nil
+		} else if m == pb.Property_BYTESTRING {
+			return ByteString(*v.StringValue), nil
+		} else if m == pb.Property_ENTITY_PROTO {
+			var ent pb.EntityProto
+			err := proto.Unmarshal([]byte(*v.StringValue), &ent)
+			if err != nil {
+				return nil, err
+			}
+			return protoToEntity(&ent)
+		} else {
+			return *v.StringValue, nil
+		}
+	case v.DoubleValue != nil:
+		return *v.DoubleValue, nil
+	case v.Referencevalue != nil:
+		key, err := referenceValueToKey(v.Referencevalue)
+		if err != nil {
+			return nil, err
+		}
+		return key, nil
+	case v.Pointvalue != nil:
+		// NOTE: Strangely, latitude maps to X, longitude to Y.
+		return appengine.GeoPoint{Lat: v.Pointvalue.GetX(), Lng: v.Pointvalue.GetY()}, nil
+	}
+	return nil, nil
+}
+
+// indexValue is a Property value that is created when entities are loaded from
+// an index, such as from a projection query.
+//
+// Such Property values do not contain all of the metadata required to be
+// faithfully represented as a Go value, and are instead represented as an
+// opaque indexValue. Load the properties into a concrete struct type (e.g. by
+// passing a struct pointer to Iterator.Next) to reconstruct actual Go values
+// of type int, string, time.Time, etc.
+type indexValue struct {
+	value *pb.PropertyValue
+}
diff --git a/v2/datastore/load_test.go b/v2/datastore/load_test.go
new file mode 100644
index 0000000..c481a76
--- /dev/null
+++ b/v2/datastore/load_test.go
@@ -0,0 +1,656 @@
+// Copyright 2016 Google Inc. All Rights Reserved.
+// Use of this source code is governed by the Apache 2.0
+// license that can be found in the LICENSE file.
+
+package datastore
+
+import (
+	"reflect"
+	"testing"
+
+	proto "github.com/golang/protobuf/proto"
+	pb "google.golang.org/appengine/v2/internal/datastore"
+)
+
+type Simple struct {
+	I int64
+}
+
+type SimpleWithTag struct {
+	I int64 `datastore:"II"`
+}
+
+type NestedSimpleWithTag struct {
+	A SimpleWithTag `datastore:"AA"`
+}
+
+type NestedSliceOfSimple struct {
+	A []Simple
+}
+
+type SimpleTwoFields struct {
+	S  string
+	SS string
+}
+
+type NestedSimpleAnonymous struct {
+	Simple
+	X string
+}
+
+type NestedSimple struct {
+	A Simple
+	I int64
+}
+
+type NestedSimple1 struct {
+	A Simple
+	X string
+}
+
+type NestedSimple2X struct {
+	AA NestedSimple
+	A  SimpleTwoFields
+	S  string
+}
+
+type BDotB struct {
+	B string `datastore:"B.B"`
+}
+
+type ABDotB struct {
+	A BDotB
+}
+
+type MultiAnonymous struct {
+	Simple
+	SimpleTwoFields
+	X string
+}
+
+var (
+	// these values need to be addressable
+	testString2 = "two"
+	testString3 = "three"
+	testInt64   = int64(2)
+
+	fieldNameI         = "I"
+	fieldNameX         = "X"
+	fieldNameS         = "S"
+	fieldNameSS        = "SS"
+	fieldNameADotI     = "A.I"
+	fieldNameAADotII   = "AA.II"
+	fieldNameADotBDotB = "A.B.B"
+)
+
+func TestLoadEntityNestedLegacy(t *testing.T) {
+	testCases := []struct {
+		desc string
+		src  *pb.EntityProto
+		want interface{}
+	}{
+		{
+			"nested",
+			&pb.EntityProto{
+				Key: keyToProto("some-app-id", testKey0),
+				Property: []*pb.Property{
+					&pb.Property{
+						Name: &fieldNameX,
+						Value: &pb.PropertyValue{
+							StringValue: &testString2,
+						},
+					},
+					&pb.Property{
+						Name: &fieldNameADotI,
+						Value: &pb.PropertyValue{
+							Int64Value: &testInt64,
+						},
+					},
+				},
+			},
+			&NestedSimple1{
+				A: Simple{I: testInt64},
+				X: testString2,
+			},
+		},
+		{
+			"nested with tag",
+			&pb.EntityProto{
+				Key: keyToProto("some-app-id", testKey0),
+				Property: []*pb.Property{
+					&pb.Property{
+						Name: &fieldNameAADotII,
+						Value: &pb.PropertyValue{
+							Int64Value: &testInt64,
+						},
+					},
+				},
+			},
+			&NestedSimpleWithTag{
+				A: SimpleWithTag{I: testInt64},
+			},
+		},
+		{
+			"nested with anonymous struct field",
+			&pb.EntityProto{
+				Key: keyToProto("some-app-id", testKey0),
+				Property: []*pb.Property{
+					&pb.Property{
+						Name: &fieldNameX,
+						Value: &pb.PropertyValue{
+							StringValue: &testString2,
+						},
+					},
+					&pb.Property{
+						Name: &fieldNameI,
+						Value: &pb.PropertyValue{
+							Int64Value: &testInt64,
+						},
+					},
+				},
+			},
+			&NestedSimpleAnonymous{
+				Simple: Simple{I: testInt64},
+				X:      testString2,
+			},
+		},
+		{
+			"nested with dotted field tag",
+			&pb.EntityProto{
+				Key: keyToProto("some-app-id", testKey0),
+				Property: []*pb.Property{
+					&pb.Property{
+						Name: &fieldNameADotBDotB,
+						Value: &pb.PropertyValue{
+							StringValue: &testString2,
+						},
+					},
+				},
+			},
+			&ABDotB{
+				A: BDotB{
+					B: testString2,
+				},
+			},
+		},
+		{
+			"nested with dotted field tag",
+			&pb.EntityProto{
+				Key: keyToProto("some-app-id", testKey0),
+				Property: []*pb.Property{
+					&pb.Property{
+						Name: &fieldNameI,
+						Value: &pb.PropertyValue{
+							Int64Value: &testInt64,
+						},
+					},
+					&pb.Property{
+						Name: &fieldNameS,
+						Value: &pb.PropertyValue{
+							StringValue: &testString2,
+						},
+					},
+					&pb.Property{
+						Name: &fieldNameSS,
+						Value: &pb.PropertyValue{
+							StringValue: &testString3,
+						},
+					},
+					&pb.Property{
+						Name: &fieldNameX,
+						Value: &pb.PropertyValue{
+							StringValue: &testString3,
+						},
+					},
+				},
+			},
+			&MultiAnonymous{
+				Simple:          Simple{I: testInt64},
+				SimpleTwoFields: SimpleTwoFields{S: "two", SS: "three"},
+				X:               "three",
+			},
+		},
+	}
+
+	for _, tc := range testCases {
+		dst := reflect.New(reflect.TypeOf(tc.want).Elem()).Interface()
+		err := loadEntity(dst, tc.src)
+		if err != nil {
+			t.Errorf("loadEntity: %s: %v", tc.desc, err)
+			continue
+		}
+
+		if !reflect.DeepEqual(tc.want, dst) {
+			t.Errorf("%s: compare:\ngot:  %#v\nwant: %#v", tc.desc, dst, tc.want)
+		}
+	}
+}
+
+type WithKey struct {
+	X string
+	I int64
+	K *Key `datastore:"__key__"`
+}
+
+type NestedWithKey struct {
+	N WithKey
+	Y string
+}
+
+var (
+	incompleteKey = newKey("", nil)
+	invalidKey    = newKey("s", incompleteKey)
+
+	// these values need to be addressable
+	fieldNameA     = "A"
+	fieldNameK     = "K"
+	fieldNameN     = "N"
+	fieldNameY     = "Y"
+	fieldNameAA    = "AA"
+	fieldNameII    = "II"
+	fieldNameBDotB = "B.B"
+
+	entityProtoMeaning = pb.Property_ENTITY_PROTO
+
+	TRUE  = true
+	FALSE = false
+)
+
+var (
+	simpleEntityProto, nestedSimpleEntityProto,
+	simpleTwoFieldsEntityProto, simpleWithTagEntityProto,
+	bDotBEntityProto, withKeyEntityProto string
+)
+
+func init() {
+	// simpleEntityProto corresponds to:
+	// Simple{I: testInt64}
+	simpleEntityProtob, err := proto.Marshal(&pb.EntityProto{
+		Key: keyToProto("", incompleteKey),
+		Property: []*pb.Property{
+			&pb.Property{
+				Name: &fieldNameI,
+				Value: &pb.PropertyValue{
+					Int64Value: &testInt64,
+				},
+				Multiple: &FALSE,
+			},
+		},
+		EntityGroup: &pb.Path{},
+	})
+	if err != nil {
+		panic(err)
+	}
+	simpleEntityProto = string(simpleEntityProtob)
+
+	// nestedSimpleEntityProto corresponds to:
+	// NestedSimple{
+	// 	A: Simple{I: testInt64},
+	// 	I: testInt64,
+	// }
+	nestedSimpleEntityProtob, err := proto.Marshal(&pb.EntityProto{
+		Key: keyToProto("", incompleteKey),
+		Property: []*pb.Property{
+			&pb.Property{
+				Name:    &fieldNameA,
+				Meaning: &entityProtoMeaning,
+				Value: &pb.PropertyValue{
+					StringValue: &simpleEntityProto,
+				},
+				Multiple: &FALSE,
+			},
+			&pb.Property{
+				Name:    &fieldNameI,
+				Meaning: &entityProtoMeaning,
+				Value: &pb.PropertyValue{
+					Int64Value: &testInt64,
+				},
+				Multiple: &FALSE,
+			},
+		},
+		EntityGroup: &pb.Path{},
+	})
+	if err != nil {
+		panic(err)
+	}
+	nestedSimpleEntityProto = string(nestedSimpleEntityProtob)
+
+	// simpleTwoFieldsEntityProto corresponds to:
+	// SimpleTwoFields{S: testString2, SS: testString3}
+	simpleTwoFieldsEntityProtob, err := proto.Marshal(&pb.EntityProto{
+		Key: keyToProto("", incompleteKey),
+		Property: []*pb.Property{
+			&pb.Property{
+				Name: &fieldNameS,
+				Value: &pb.PropertyValue{
+					StringValue: &testString2,
+				},
+				Multiple: &FALSE,
+			},
+			&pb.Property{
+				Name: &fieldNameSS,
+				Value: &pb.PropertyValue{
+					StringValue: &testString3,
+				},
+				Multiple: &FALSE,
+			},
+		},
+		EntityGroup: &pb.Path{},
+	})
+	if err != nil {
+		panic(err)
+	}
+	simpleTwoFieldsEntityProto = string(simpleTwoFieldsEntityProtob)
+
+	// simpleWithTagEntityProto corresponds to:
+	// SimpleWithTag{I: testInt64}
+	simpleWithTagEntityProtob, err := proto.Marshal(&pb.EntityProto{
+		Key: keyToProto("", incompleteKey),
+		Property: []*pb.Property{
+			&pb.Property{
+				Name: &fieldNameII,
+				Value: &pb.PropertyValue{
+					Int64Value: &testInt64,
+				},
+				Multiple: &FALSE,
+			},
+		},
+		EntityGroup: &pb.Path{},
+	})
+	if err != nil {
+		panic(err)
+	}
+	simpleWithTagEntityProto = string(simpleWithTagEntityProtob)
+
+	// bDotBEntityProto corresponds to:
+	// BDotB{
+	// 	B: testString2,
+	// }
+	bDotBEntityProtob, err := proto.Marshal(&pb.EntityProto{
+		Key: keyToProto("", incompleteKey),
+		Property: []*pb.Property{
+			&pb.Property{
+				Name: &fieldNameBDotB,
+				Value: &pb.PropertyValue{
+					StringValue: &testString2,
+				},
+				Multiple: &FALSE,
+			},
+		},
+		EntityGroup: &pb.Path{},
+	})
+	if err != nil {
+		panic(err)
+	}
+	bDotBEntityProto = string(bDotBEntityProtob)
+
+	// withKeyEntityProto corresponds to:
+	// WithKey{
+	// 	X: testString3,
+	// 	I: testInt64,
+	// 	K: testKey1a,
+	// }
+	withKeyEntityProtob, err := proto.Marshal(&pb.EntityProto{
+		Key: keyToProto("", testKey1a),
+		Property: []*pb.Property{
+			&pb.Property{
+				Name: &fieldNameX,
+				Value: &pb.PropertyValue{
+					StringValue: &testString3,
+				},
+				Multiple: &FALSE,
+			},
+			&pb.Property{
+				Name: &fieldNameI,
+				Value: &pb.PropertyValue{
+					Int64Value: &testInt64,
+				},
+				Multiple: &FALSE,
+			},
+		},
+		EntityGroup: &pb.Path{},
+	})
+	if err != nil {
+		panic(err)
+	}
+	withKeyEntityProto = string(withKeyEntityProtob)
+
+}
+
+func TestLoadEntityNested(t *testing.T) {
+	testCases := []struct {
+		desc string
+		src  *pb.EntityProto
+		want interface{}
+	}{
+		{
+			"nested basic",
+			&pb.EntityProto{
+				Key: keyToProto("some-app-id", testKey0),
+				Property: []*pb.Property{
+					&pb.Property{
+						Name:    &fieldNameA,
+						Meaning: &entityProtoMeaning,
+						Value: &pb.PropertyValue{
+							StringValue: &simpleEntityProto,
+						},
+					},
+					&pb.Property{
+						Name: &fieldNameI,
+						Value: &pb.PropertyValue{
+							Int64Value: &testInt64,
+						},
+					},
+				},
+			},
+			&NestedSimple{
+				A: Simple{I: 2},
+				I: 2,
+			},
+		},
+		{
+			"nested with struct tags",
+			&pb.EntityProto{
+				Key: keyToProto("some-app-id", testKey0),
+				Property: []*pb.Property{
+					&pb.Property{
+						Name:    &fieldNameAA,
+						Meaning: &entityProtoMeaning,
+						Value: &pb.PropertyValue{
+							StringValue: &simpleWithTagEntityProto,
+						},
+					},
+				},
+			},
+			&NestedSimpleWithTag{
+				A: SimpleWithTag{I: testInt64},
+			},
+		},
+		{
+			"nested 2x",
+			&pb.EntityProto{
+				Key: keyToProto("some-app-id", testKey0),
+				Property: []*pb.Property{
+					&pb.Property{
+						Name:    &fieldNameAA,
+						Meaning: &entityProtoMeaning,
+						Value: &pb.PropertyValue{
+							StringValue: &nestedSimpleEntityProto,
+						},
+					},
+					&pb.Property{
+						Name:    &fieldNameA,
+						Meaning: &entityProtoMeaning,
+						Value: &pb.PropertyValue{
+							StringValue: &simpleTwoFieldsEntityProto,
+						},
+					},
+					&pb.Property{
+						Name: &fieldNameS,
+						Value: &pb.PropertyValue{
+							StringValue: &testString3,
+						},
+					},
+				},
+			},
+			&NestedSimple2X{
+				AA: NestedSimple{
+					A: Simple{I: testInt64},
+					I: testInt64,
+				},
+				A: SimpleTwoFields{S: testString2, SS: testString3},
+				S: testString3,
+			},
+		},
+		{
+			"nested anonymous",
+			&pb.EntityProto{
+				Key: keyToProto("some-app-id", testKey0),
+				Property: []*pb.Property{
+					&pb.Property{
+						Name: &fieldNameI,
+						Value: &pb.PropertyValue{
+							Int64Value: &testInt64,
+						},
+					},
+					&pb.Property{
+						Name: &fieldNameX,
+						Value: &pb.PropertyValue{
+							StringValue: &testString2,
+						},
+					},
+				},
+			},
+			&NestedSimpleAnonymous{
+				Simple: Simple{I: testInt64},
+				X:      testString2,
+			},
+		},
+		{
+			"nested simple with slice",
+			&pb.EntityProto{
+				Key: keyToProto("some-app-id", testKey0),
+				Property: []*pb.Property{
+					&pb.Property{
+						Name:     &fieldNameA,
+						Meaning:  &entityProtoMeaning,
+						Multiple: &TRUE,
+						Value: &pb.PropertyValue{
+							StringValue: &simpleEntityProto,
+						},
+					},
+					&pb.Property{
+						Name:     &fieldNameA,
+						Meaning:  &entityProtoMeaning,
+						Multiple: &TRUE,
+						Value: &pb.PropertyValue{
+							StringValue: &simpleEntityProto,
+						},
+					},
+				},
+			},
+			&NestedSliceOfSimple{
+				A: []Simple{Simple{I: testInt64}, Simple{I: testInt64}},
+			},
+		},
+		{
+			"nested with multiple anonymous fields",
+			&pb.EntityProto{
+				Key: keyToProto("some-app-id", testKey0),
+				Property: []*pb.Property{
+					&pb.Property{
+						Name: &fieldNameI,
+						Value: &pb.PropertyValue{
+							Int64Value: &testInt64,
+						},
+					},
+					&pb.Property{
+						Name: &fieldNameS,
+						Value: &pb.PropertyValue{
+							StringValue: &testString2,
+						},
+					},
+					&pb.Property{
+						Name: &fieldNameSS,
+						Value: &pb.PropertyValue{
+							StringValue: &testString3,
+						},
+					},
+					&pb.Property{
+						Name: &fieldNameX,
+						Value: &pb.PropertyValue{
+							StringValue: &testString2,
+						},
+					},
+				},
+			},
+			&MultiAnonymous{
+				Simple:          Simple{I: testInt64},
+				SimpleTwoFields: SimpleTwoFields{S: testString2, SS: testString3},
+				X:               testString2,
+			},
+		},
+		{
+			"nested with dotted field tag",
+			&pb.EntityProto{
+				Key: keyToProto("some-app-id", testKey0),
+				Property: []*pb.Property{
+					&pb.Property{
+						Name:    &fieldNameA,
+						Meaning: &entityProtoMeaning,
+						Value: &pb.PropertyValue{
+							StringValue: &bDotBEntityProto,
+						},
+					},
+				},
+			},
+			&ABDotB{
+				A: BDotB{
+					B: testString2,
+				},
+			},
+		},
+		{
+			"nested entity with key",
+			&pb.EntityProto{
+				Key: keyToProto("some-app-id", testKey0),
+				Property: []*pb.Property{
+					&pb.Property{
+						Name: &fieldNameY,
+						Value: &pb.PropertyValue{
+							StringValue: &testString2,
+						},
+					},
+					&pb.Property{
+						Name:    &fieldNameN,
+						Meaning: &entityProtoMeaning,
+						Value: &pb.PropertyValue{
+							StringValue: &withKeyEntityProto,
+						},
+					},
+				},
+			},
+			&NestedWithKey{
+				Y: testString2,
+				N: WithKey{
+					X: testString3,
+					I: testInt64,
+					K: testKey1a,
+				},
+			},
+		},
+	}
+
+	for _, tc := range testCases {
+		dst := reflect.New(reflect.TypeOf(tc.want).Elem()).Interface()
+		err := loadEntity(dst, tc.src)
+		if err != nil {
+			t.Errorf("loadEntity: %s: %v", tc.desc, err)
+			continue
+		}
+
+		if !reflect.DeepEqual(tc.want, dst) {
+			t.Errorf("%s: compare:\ngot:  %#v\nwant: %#v", tc.desc, dst, tc.want)
+		}
+	}
+}
diff --git a/v2/datastore/metadata.go b/v2/datastore/metadata.go
new file mode 100644
index 0000000..e1b2d22
--- /dev/null
+++ b/v2/datastore/metadata.go
@@ -0,0 +1,79 @@
+// Copyright 2016 Google Inc. All rights reserved.
+// Use of this source code is governed by the Apache 2.0
+// license that can be found in the LICENSE file.
+
+package datastore
+
+import "context"
+
+// Datastore kinds for the metadata entities.
+const (
+	namespaceKind = "__namespace__"
+	kindKind      = "__kind__"
+	propertyKind  = "__property__"
+)
+
+// Namespaces returns all the datastore namespaces.
+func Namespaces(ctx context.Context) ([]string, error) {
+	// TODO(djd): Support range queries.
+	q := NewQuery(namespaceKind).KeysOnly()
+	keys, err := q.GetAll(ctx, nil)
+	if err != nil {
+		return nil, err
+	}
+	// The empty namespace key uses a numeric ID (==1), but luckily
+	// the string ID defaults to "" for numeric IDs anyway.
+	return keyNames(keys), nil
+}
+
+// Kinds returns the names of all the kinds in the current namespace.
+func Kinds(ctx context.Context) ([]string, error) {
+	// TODO(djd): Support range queries.
+	q := NewQuery(kindKind).KeysOnly()
+	keys, err := q.GetAll(ctx, nil)
+	if err != nil {
+		return nil, err
+	}
+	return keyNames(keys), nil
+}
+
+// keyNames returns a slice of the provided keys' names (string IDs).
+func keyNames(keys []*Key) []string {
+	n := make([]string, 0, len(keys))
+	for _, k := range keys {
+		n = append(n, k.StringID())
+	}
+	return n
+}
+
+// KindProperties returns all the indexed properties for the given kind.
+// The properties are returned as a map of property names to a slice of the
+// representation types. The representation types for the supported Go property
+// types are:
+//
+//	"INT64":     signed integers and time.Time
+//	"DOUBLE":    float32 and float64
+//	"BOOLEAN":   bool
+//	"STRING":    string, []byte and ByteString
+//	"POINT":     appengine.GeoPoint
+//	"REFERENCE": *Key
+//	"USER":      (not used in the Go runtime)
+func KindProperties(ctx context.Context, kind string) (map[string][]string, error) {
+	// TODO(djd): Support range queries.
+	kindKey := NewKey(ctx, kindKind, kind, 0, nil)
+	q := NewQuery(propertyKind).Ancestor(kindKey)
+
+	propMap := map[string][]string{}
+	props := []struct {
+		Repr []string `datastore:"property_representation"`
+	}{}
+
+	keys, err := q.GetAll(ctx, &props)
+	if err != nil {
+		return nil, err
+	}
+	for i, p := range props {
+		propMap[keys[i].StringID()] = p.Repr
+	}
+	return propMap, nil
+}
diff --git a/v2/datastore/prop.go b/v2/datastore/prop.go
new file mode 100644
index 0000000..5cb2079
--- /dev/null
+++ b/v2/datastore/prop.go
@@ -0,0 +1,330 @@
+// Copyright 2011 Google Inc. All rights reserved.
+// Use of this source code is governed by the Apache 2.0
+// license that can be found in the LICENSE file.
+
+package datastore
+
+import (
+	"fmt"
+	"reflect"
+	"strings"
+	"sync"
+	"unicode"
+)
+
+// Entities with more than this many indexed properties will not be saved.
+const maxIndexedProperties = 20000
+
+// []byte fields more than 1 megabyte long will not be loaded or saved.
+const maxBlobLen = 1 << 20
+
+// Property is a name/value pair plus some metadata. A datastore entity's
+// contents are loaded and saved as a sequence of Properties. An entity can
+// have multiple Properties with the same name, provided that p.Multiple is
+// true on all of that entity's Properties with that name.
+type Property struct {
+	// Name is the property name.
+	Name string
+	// Value is the property value. The valid types are:
+	//	- int64
+	//	- bool
+	//	- string
+	//	- float64
+	//	- ByteString
+	//	- *Key
+	//	- time.Time
+	//	- appengine.BlobKey
+	//	- appengine.GeoPoint
+	//	- []byte (up to 1 megabyte in length)
+	//	- *Entity (representing a nested struct)
+	// This set is smaller than the set of valid struct field types that the
+	// datastore can load and save. A Property Value cannot be a slice (apart
+	// from []byte); use multiple Properties instead. Also, a Value's type
+	// must be explicitly on the list above; it is not sufficient for the
+	// underlying type to be on that list. For example, a Value of "type
+	// myInt64 int64" is invalid. Smaller-width integers and floats are also
+	// invalid. Again, this is more restrictive than the set of valid struct
+	// field types.
+	//
+	// A Value will have an opaque type when loading entities from an index,
+	// such as via a projection query. Load entities into a struct instead
+	// of a PropertyLoadSaver when using a projection query.
+	//
+	// A Value may also be the nil interface value; this is equivalent to
+	// Python's None but not directly representable by a Go struct. Loading
+	// a nil-valued property into a struct will set that field to the zero
+	// value.
+	Value interface{}
+	// NoIndex is whether the datastore cannot index this property.
+	NoIndex bool
+	// Multiple is whether the entity can have multiple properties with
+	// the same name. Even if a particular instance only has one property with
+	// a certain name, Multiple should be true if a struct would best represent
+	// it as a field of type []T instead of type T.
+	Multiple bool
+}
+
+// An Entity is the value type for a nested struct.
+// This type is only used for a Property's Value.
+type Entity struct {
+	Key        *Key
+	Properties []Property
+}
+
+// ByteString is a short byte slice (up to 1500 bytes) that can be indexed.
+type ByteString []byte
+
+// PropertyLoadSaver can be converted from and to a slice of Properties.
+type PropertyLoadSaver interface {
+	Load([]Property) error
+	Save() ([]Property, error)
+}
+
+// PropertyList converts a []Property to implement PropertyLoadSaver.
+type PropertyList []Property
+
+var (
+	typeOfPropertyLoadSaver = reflect.TypeOf((*PropertyLoadSaver)(nil)).Elem()
+	typeOfPropertyList      = reflect.TypeOf(PropertyList(nil))
+)
+
+// Load loads all of the provided properties into l.
+// It does not first reset *l to an empty slice.
+func (l *PropertyList) Load(p []Property) error {
+	*l = append(*l, p...)
+	return nil
+}
+
+// Save saves all of l's properties as a slice or Properties.
+func (l *PropertyList) Save() ([]Property, error) {
+	return *l, nil
+}
+
+// validPropertyName returns whether name consists of one or more valid Go
+// identifiers joined by ".".
+func validPropertyName(name string) bool {
+	if name == "" {
+		return false
+	}
+	for _, s := range strings.Split(name, ".") {
+		if s == "" {
+			return false
+		}
+		first := true
+		for _, c := range s {
+			if first {
+				first = false
+				if c != '_' && !unicode.IsLetter(c) {
+					return false
+				}
+			} else {
+				if c != '_' && !unicode.IsLetter(c) && !unicode.IsDigit(c) {
+					return false
+				}
+			}
+		}
+	}
+	return true
+}
+
+// structCodec describes how to convert a struct to and from a sequence of
+// properties.
+type structCodec struct {
+	// fields gives the field codec for the structTag with the given name.
+	fields map[string]fieldCodec
+	// hasSlice is whether a struct or any of its nested or embedded structs
+	// has a slice-typed field (other than []byte).
+	hasSlice bool
+	// keyField is the index of a *Key field with structTag __key__.
+	// This field is not relevant for the top level struct, only for
+	// nested structs.
+	keyField int
+	// complete is whether the structCodec is complete. An incomplete
+	// structCodec may be encountered when walking a recursive struct.
+	complete bool
+}
+
+// fieldCodec is a struct field's index and, if that struct field's type is
+// itself a struct, that substruct's structCodec.
+type fieldCodec struct {
+	// path is the index path to the field
+	path    []int
+	noIndex bool
+	// omitEmpty indicates that the field should be omitted on save
+	// if empty.
+	omitEmpty bool
+	// structCodec is the codec fot the struct field at index 'path',
+	// or nil if the field is not a struct.
+	structCodec *structCodec
+}
+
+// structCodecs collects the structCodecs that have already been calculated.
+var (
+	structCodecsMutex sync.Mutex
+	structCodecs      = make(map[reflect.Type]*structCodec)
+)
+
+// getStructCodec returns the structCodec for the given struct type.
+func getStructCodec(t reflect.Type) (*structCodec, error) {
+	structCodecsMutex.Lock()
+	defer structCodecsMutex.Unlock()
+	return getStructCodecLocked(t)
+}
+
+// getStructCodecLocked implements getStructCodec. The structCodecsMutex must
+// be held when calling this function.
+func getStructCodecLocked(t reflect.Type) (ret *structCodec, retErr error) {
+	c, ok := structCodecs[t]
+	if ok {
+		return c, nil
+	}
+	c = &structCodec{
+		fields: make(map[string]fieldCodec),
+		// We initialize keyField to -1 so that the zero-value is not
+		// misinterpreted as index 0.
+		keyField: -1,
+	}
+
+	// Add c to the structCodecs map before we are sure it is good. If t is
+	// a recursive type, it needs to find the incomplete entry for itself in
+	// the map.
+	structCodecs[t] = c
+	defer func() {
+		if retErr != nil {
+			delete(structCodecs, t)
+		}
+	}()
+
+	for i := 0; i < t.NumField(); i++ {
+		f := t.Field(i)
+		// Skip unexported fields.
+		// Note that if f is an anonymous, unexported struct field,
+		// we will promote its fields.
+		if f.PkgPath != "" && !f.Anonymous {
+			continue
+		}
+
+		tags := strings.Split(f.Tag.Get("datastore"), ",")
+		name := tags[0]
+		opts := make(map[string]bool)
+		for _, t := range tags[1:] {
+			opts[t] = true
+		}
+		switch {
+		case name == "":
+			if !f.Anonymous {
+				name = f.Name
+			}
+		case name == "-":
+			continue
+		case name == "__key__":
+			if f.Type != typeOfKeyPtr {
+				return nil, fmt.Errorf("datastore: __key__ field on struct %v is not a *datastore.Key", t)
+			}
+			c.keyField = i
+		case !validPropertyName(name):
+			return nil, fmt.Errorf("datastore: struct tag has invalid property name: %q", name)
+		}
+
+		substructType, fIsSlice := reflect.Type(nil), false
+		switch f.Type.Kind() {
+		case reflect.Struct:
+			substructType = f.Type
+		case reflect.Slice:
+			if f.Type.Elem().Kind() == reflect.Struct {
+				substructType = f.Type.Elem()
+			}
+			fIsSlice = f.Type != typeOfByteSlice
+			c.hasSlice = c.hasSlice || fIsSlice
+		}
+
+		var sub *structCodec
+		if substructType != nil && substructType != typeOfTime && substructType != typeOfGeoPoint {
+			var err error
+			sub, err = getStructCodecLocked(substructType)
+			if err != nil {
+				return nil, err
+			}
+			if !sub.complete {
+				return nil, fmt.Errorf("datastore: recursive struct: field %q", f.Name)
+			}
+			if fIsSlice && sub.hasSlice {
+				return nil, fmt.Errorf(
+					"datastore: flattening nested structs leads to a slice of slices: field %q", f.Name)
+			}
+			c.hasSlice = c.hasSlice || sub.hasSlice
+			// If f is an anonymous struct field, we promote the substruct's fields up to this level
+			// in the linked list of struct codecs.
+			if f.Anonymous {
+				for subname, subfield := range sub.fields {
+					if name != "" {
+						subname = name + "." + subname
+					}
+					if _, ok := c.fields[subname]; ok {
+						return nil, fmt.Errorf("datastore: struct tag has repeated property name: %q", subname)
+					}
+					c.fields[subname] = fieldCodec{
+						path:        append([]int{i}, subfield.path...),
+						noIndex:     subfield.noIndex || opts["noindex"],
+						omitEmpty:   subfield.omitEmpty,
+						structCodec: subfield.structCodec,
+					}
+				}
+				continue
+			}
+		}
+
+		if _, ok := c.fields[name]; ok {
+			return nil, fmt.Errorf("datastore: struct tag has repeated property name: %q", name)
+		}
+		c.fields[name] = fieldCodec{
+			path:        []int{i},
+			noIndex:     opts["noindex"],
+			omitEmpty:   opts["omitempty"],
+			structCodec: sub,
+		}
+	}
+	c.complete = true
+	return c, nil
+}
+
+// structPLS adapts a struct to be a PropertyLoadSaver.
+type structPLS struct {
+	v     reflect.Value
+	codec *structCodec
+}
+
+// newStructPLS returns a structPLS, which implements the
+// PropertyLoadSaver interface, for the struct pointer p.
+func newStructPLS(p interface{}) (*structPLS, error) {
+	v := reflect.ValueOf(p)
+	if v.Kind() != reflect.Ptr || v.Elem().Kind() != reflect.Struct {
+		return nil, ErrInvalidEntityType
+	}
+	v = v.Elem()
+	codec, err := getStructCodec(v.Type())
+	if err != nil {
+		return nil, err
+	}
+	return &structPLS{v, codec}, nil
+}
+
+// LoadStruct loads the properties from p to dst.
+// dst must be a struct pointer.
+func LoadStruct(dst interface{}, p []Property) error {
+	x, err := newStructPLS(dst)
+	if err != nil {
+		return err
+	}
+	return x.Load(p)
+}
+
+// SaveStruct returns the properties from src as a slice of Properties.
+// src must be a struct pointer.
+func SaveStruct(src interface{}) ([]Property, error) {
+	x, err := newStructPLS(src)
+	if err != nil {
+		return nil, err
+	}
+	return x.Save()
+}
diff --git a/v2/datastore/prop_test.go b/v2/datastore/prop_test.go
new file mode 100644
index 0000000..1311629
--- /dev/null
+++ b/v2/datastore/prop_test.go
@@ -0,0 +1,672 @@
+// Copyright 2011 Google Inc. All Rights Reserved.
+// Use of this source code is governed by the Apache 2.0
+// license that can be found in the LICENSE file.
+
+package datastore
+
+import (
+	"reflect"
+	"sort"
+	"testing"
+	"time"
+
+	"google.golang.org/appengine/v2"
+)
+
+func TestValidPropertyName(t *testing.T) {
+	testCases := []struct {
+		name string
+		want bool
+	}{
+		// Invalid names.
+		{"", false},
+		{"'", false},
+		{".", false},
+		{"..", false},
+		{".foo", false},
+		{"0", false},
+		{"00", false},
+		{"X.X.4.X.X", false},
+		{"\n", false},
+		{"\x00", false},
+		{"abc\xffz", false},
+		{"foo.", false},
+		{"foo..", false},
+		{"foo..bar", false},
+		{"☃", false},
+		{`"`, false},
+		// Valid names.
+		{"AB", true},
+		{"Abc", true},
+		{"X.X.X.X.X", true},
+		{"_", true},
+		{"_0", true},
+		{"a", true},
+		{"a_B", true},
+		{"f00", true},
+		{"f0o", true},
+		{"fo0", true},
+		{"foo", true},
+		{"foo.bar", true},
+		{"foo.bar.baz", true},
+		{"世界", true},
+	}
+	for _, tc := range testCases {
+		got := validPropertyName(tc.name)
+		if got != tc.want {
+			t.Errorf("%q: got %v, want %v", tc.name, got, tc.want)
+		}
+	}
+}
+
+func TestStructCodec(t *testing.T) {
+	type oStruct struct {
+		O int
+	}
+	type pStruct struct {
+		P int
+		Q int
+	}
+	type rStruct struct {
+		R int
+		S pStruct
+		T oStruct
+		oStruct
+	}
+	type uStruct struct {
+		U int
+		v int
+	}
+	type vStruct struct {
+		V string `datastore:",noindex"`
+	}
+	oStructCodec := &structCodec{
+		fields: map[string]fieldCodec{
+			"O": {path: []int{0}},
+		},
+		complete: true,
+	}
+	pStructCodec := &structCodec{
+		fields: map[string]fieldCodec{
+			"P": {path: []int{0}},
+			"Q": {path: []int{1}},
+		},
+		complete: true,
+	}
+	rStructCodec := &structCodec{
+		fields: map[string]fieldCodec{
+			"R": {path: []int{0}},
+			"S": {path: []int{1}, structCodec: pStructCodec},
+			"T": {path: []int{2}, structCodec: oStructCodec},
+			"O": {path: []int{3, 0}},
+		},
+		complete: true,
+	}
+	uStructCodec := &structCodec{
+		fields: map[string]fieldCodec{
+			"U": {path: []int{0}},
+		},
+		complete: true,
+	}
+	vStructCodec := &structCodec{
+		fields: map[string]fieldCodec{
+			"V": {path: []int{0}, noIndex: true},
+		},
+		complete: true,
+	}
+
+	testCases := []struct {
+		desc        string
+		structValue interface{}
+		want        *structCodec
+	}{
+		{
+			"oStruct",
+			oStruct{},
+			oStructCodec,
+		},
+		{
+			"pStruct",
+			pStruct{},
+			pStructCodec,
+		},
+		{
+			"rStruct",
+			rStruct{},
+			rStructCodec,
+		},
+		{
+			"uStruct",
+			uStruct{},
+			uStructCodec,
+		},
+		{
+			"non-basic fields",
+			struct {
+				B appengine.BlobKey
+				K *Key
+				T time.Time
+			}{},
+			&structCodec{
+				fields: map[string]fieldCodec{
+					"B": {path: []int{0}},
+					"K": {path: []int{1}},
+					"T": {path: []int{2}},
+				},
+				complete: true,
+			},
+		},
+		{
+			"struct tags with ignored embed",
+			struct {
+				A       int `datastore:"a,noindex"`
+				B       int `datastore:"b"`
+				C       int `datastore:",noindex"`
+				D       int `datastore:""`
+				E       int
+				I       int `datastore:"-"`
+				J       int `datastore:",noindex" json:"j"`
+				oStruct `datastore:"-"`
+			}{},
+			&structCodec{
+				fields: map[string]fieldCodec{
+					"a": {path: []int{0}, noIndex: true},
+					"b": {path: []int{1}},
+					"C": {path: []int{2}, noIndex: true},
+					"D": {path: []int{3}},
+					"E": {path: []int{4}},
+					"J": {path: []int{6}, noIndex: true},
+				},
+				complete: true,
+			},
+		},
+		{
+			"unexported fields",
+			struct {
+				A int
+				b int
+				C int `datastore:"x"`
+				d int `datastore:"Y"`
+			}{},
+			&structCodec{
+				fields: map[string]fieldCodec{
+					"A": {path: []int{0}},
+					"x": {path: []int{2}},
+				},
+				complete: true,
+			},
+		},
+		{
+			"nested and embedded structs",
+			struct {
+				A   int
+				B   int
+				CC  oStruct
+				DDD rStruct
+				oStruct
+			}{},
+			&structCodec{
+				fields: map[string]fieldCodec{
+					"A":   {path: []int{0}},
+					"B":   {path: []int{1}},
+					"CC":  {path: []int{2}, structCodec: oStructCodec},
+					"DDD": {path: []int{3}, structCodec: rStructCodec},
+					"O":   {path: []int{4, 0}},
+				},
+				complete: true,
+			},
+		},
+		{
+			"struct tags with nested and embedded structs",
+			struct {
+				A       int     `datastore:"-"`
+				B       int     `datastore:"w"`
+				C       oStruct `datastore:"xx"`
+				D       rStruct `datastore:"y"`
+				oStruct `datastore:"z"`
+			}{},
+			&structCodec{
+				fields: map[string]fieldCodec{
+					"w":   {path: []int{1}},
+					"xx":  {path: []int{2}, structCodec: oStructCodec},
+					"y":   {path: []int{3}, structCodec: rStructCodec},
+					"z.O": {path: []int{4, 0}},
+				},
+				complete: true,
+			},
+		},
+		{
+			"unexported nested and embedded structs",
+			struct {
+				a int
+				B int
+				c uStruct
+				D uStruct
+				uStruct
+			}{},
+			&structCodec{
+				fields: map[string]fieldCodec{
+					"B": {path: []int{1}},
+					"D": {path: []int{3}, structCodec: uStructCodec},
+					"U": {path: []int{4, 0}},
+				},
+				complete: true,
+			},
+		},
+		{
+			"noindex nested struct",
+			struct {
+				A oStruct `datastore:",noindex"`
+			}{},
+			&structCodec{
+				fields: map[string]fieldCodec{
+					"A": {path: []int{0}, structCodec: oStructCodec, noIndex: true},
+				},
+				complete: true,
+			},
+		},
+		{
+			"noindex slice",
+			struct {
+				A []string `datastore:",noindex"`
+			}{},
+			&structCodec{
+				fields: map[string]fieldCodec{
+					"A": {path: []int{0}, noIndex: true},
+				},
+				hasSlice: true,
+				complete: true,
+			},
+		},
+		{
+			"noindex embedded struct slice",
+			struct {
+				// vStruct has a single field, V, also with noindex.
+				A []vStruct `datastore:",noindex"`
+			}{},
+			&structCodec{
+				fields: map[string]fieldCodec{
+					"A": {path: []int{0}, structCodec: vStructCodec, noIndex: true},
+				},
+				hasSlice: true,
+				complete: true,
+			},
+		},
+	}
+
+	for _, tc := range testCases {
+		got, err := getStructCodec(reflect.TypeOf(tc.structValue))
+		if err != nil {
+			t.Errorf("%s: getStructCodec: %v", tc.desc, err)
+			continue
+		}
+		// can't reflect.DeepEqual b/c element order in fields map may differ
+		if !isEqualStructCodec(got, tc.want) {
+			t.Errorf("%s\ngot  %+v\nwant %+v\n", tc.desc, got, tc.want)
+		}
+	}
+}
+
+func isEqualStructCodec(got, want *structCodec) bool {
+	if got.complete != want.complete {
+		return false
+	}
+	if got.hasSlice != want.hasSlice {
+		return false
+	}
+	if len(got.fields) != len(want.fields) {
+		return false
+	}
+	for name, wantF := range want.fields {
+		gotF := got.fields[name]
+		if !reflect.DeepEqual(wantF.path, gotF.path) {
+			return false
+		}
+		if wantF.noIndex != gotF.noIndex {
+			return false
+		}
+		if wantF.structCodec != nil {
+			if gotF.structCodec == nil {
+				return false
+			}
+			if !isEqualStructCodec(gotF.structCodec, wantF.structCodec) {
+				return false
+			}
+		}
+	}
+
+	return true
+}
+
+func TestRepeatedPropertyName(t *testing.T) {
+	good := []interface{}{
+		struct {
+			A int `datastore:"-"`
+		}{},
+		struct {
+			A int `datastore:"b"`
+			B int
+		}{},
+		struct {
+			A int
+			B int `datastore:"B"`
+		}{},
+		struct {
+			A int `datastore:"B"`
+			B int `datastore:"-"`
+		}{},
+		struct {
+			A int `datastore:"-"`
+			B int `datastore:"A"`
+		}{},
+		struct {
+			A int `datastore:"B"`
+			B int `datastore:"A"`
+		}{},
+		struct {
+			A int `datastore:"B"`
+			B int `datastore:"C"`
+			C int `datastore:"A"`
+		}{},
+		struct {
+			A int `datastore:"B"`
+			B int `datastore:"C"`
+			C int `datastore:"D"`
+		}{},
+	}
+	bad := []interface{}{
+		struct {
+			A int `datastore:"B"`
+			B int
+		}{},
+		struct {
+			A int
+			B int `datastore:"A"`
+		}{},
+		struct {
+			A int `datastore:"C"`
+			B int `datastore:"C"`
+		}{},
+		struct {
+			A int `datastore:"B"`
+			B int `datastore:"C"`
+			C int `datastore:"B"`
+		}{},
+	}
+	testGetStructCodec(t, good, bad)
+}
+
+func TestFlatteningNestedStructs(t *testing.T) {
+	type DeepGood struct {
+		A struct {
+			B []struct {
+				C struct {
+					D int
+				}
+			}
+		}
+	}
+	type DeepBad struct {
+		A struct {
+			B []struct {
+				C struct {
+					D []int
+				}
+			}
+		}
+	}
+	type ISay struct {
+		Tomato int
+	}
+	type YouSay struct {
+		Tomato int
+	}
+	type Tweedledee struct {
+		Dee int `datastore:"D"`
+	}
+	type Tweedledum struct {
+		Dum int `datastore:"D"`
+	}
+
+	good := []interface{}{
+		struct {
+			X []struct {
+				Y string
+			}
+		}{},
+		struct {
+			X []struct {
+				Y []byte
+			}
+		}{},
+		struct {
+			P []int
+			X struct {
+				Y []int
+			}
+		}{},
+		struct {
+			X struct {
+				Y []int
+			}
+			Q []int
+		}{},
+		struct {
+			P []int
+			X struct {
+				Y []int
+			}
+			Q []int
+		}{},
+		struct {
+			DeepGood
+		}{},
+		struct {
+			DG DeepGood
+		}{},
+		struct {
+			Foo struct {
+				Z int
+			} `datastore:"A"`
+			Bar struct {
+				Z int
+			} `datastore:"B"`
+		}{},
+	}
+	bad := []interface{}{
+		struct {
+			X []struct {
+				Y []string
+			}
+		}{},
+		struct {
+			X []struct {
+				Y []int
+			}
+		}{},
+		struct {
+			DeepBad
+		}{},
+		struct {
+			DB DeepBad
+		}{},
+		struct {
+			ISay
+			YouSay
+		}{},
+		struct {
+			Tweedledee
+			Tweedledum
+		}{},
+		struct {
+			Foo struct {
+				Z int
+			} `datastore:"A"`
+			Bar struct {
+				Z int
+			} `datastore:"A"`
+		}{},
+	}
+	testGetStructCodec(t, good, bad)
+}
+
+func testGetStructCodec(t *testing.T, good []interface{}, bad []interface{}) {
+	for _, x := range good {
+		if _, err := getStructCodec(reflect.TypeOf(x)); err != nil {
+			t.Errorf("type %T: got non-nil error (%s), want nil", x, err)
+		}
+	}
+	for _, x := range bad {
+		if _, err := getStructCodec(reflect.TypeOf(x)); err == nil {
+			t.Errorf("type %T: got nil error, want non-nil", x)
+		}
+	}
+}
+
+func TestNilKeyIsStored(t *testing.T) {
+	x := struct {
+		K *Key
+		I int
+	}{}
+	p := PropertyList{}
+	// Save x as properties.
+	p1, _ := SaveStruct(&x)
+	p.Load(p1)
+	// Set x's fields to non-zero.
+	x.K = &Key{}
+	x.I = 2
+	// Load x from properties.
+	p2, _ := p.Save()
+	LoadStruct(&x, p2)
+	// Check that x's fields were set to zero.
+	if x.K != nil {
+		t.Errorf("K field was not zero")
+	}
+	if x.I != 0 {
+		t.Errorf("I field was not zero")
+	}
+}
+
+func TestSaveStructOmitEmpty(t *testing.T) {
+	// Expected props names are sorted alphabetically
+	expectedPropNamesForSingles := []string{"EmptyValue", "NonEmptyValue", "OmitEmptyWithValue"}
+	expectedPropNamesForSlices := []string{"NonEmptyValue", "NonEmptyValue", "OmitEmptyWithValue", "OmitEmptyWithValue"}
+
+	testOmitted := func(expectedPropNames []string, src interface{}) {
+		// t.Helper() - this is available from Go version 1.9, but we also support Go versions 1.6, 1.7, 1.8
+		if props, err := SaveStruct(src); err != nil {
+			t.Fatal(err)
+		} else {
+			// Collect names for reporting if diffs from expected and for easier sorting
+			actualPropNames := make([]string, len(props))
+			for i := range props {
+				actualPropNames[i] = props[i].Name
+			}
+			// Sort actuals for comparing with already sorted expected names
+			sort.Sort(sort.StringSlice(actualPropNames))
+			if !reflect.DeepEqual(actualPropNames, expectedPropNames) {
+				t.Errorf("Expected this properties: %v, got: %v", expectedPropNames, actualPropNames)
+			}
+		}
+	}
+
+	testOmitted(expectedPropNamesForSingles, &struct {
+		EmptyValue         int
+		NonEmptyValue      int
+		OmitEmptyNoValue   int `datastore:",omitempty"`
+		OmitEmptyWithValue int `datastore:",omitempty"`
+	}{
+		NonEmptyValue:      1,
+		OmitEmptyWithValue: 2,
+	})
+
+	testOmitted(expectedPropNamesForSlices, &struct {
+		EmptyValue         []int
+		NonEmptyValue      []int
+		OmitEmptyNoValue   []int `datastore:",omitempty"`
+		OmitEmptyWithValue []int `datastore:",omitempty"`
+	}{
+		NonEmptyValue:      []int{1, 2},
+		OmitEmptyWithValue: []int{3, 4},
+	})
+
+	testOmitted(expectedPropNamesForSingles, &struct {
+		EmptyValue         bool
+		NonEmptyValue      bool
+		OmitEmptyNoValue   bool `datastore:",omitempty"`
+		OmitEmptyWithValue bool `datastore:",omitempty"`
+	}{
+		NonEmptyValue:      true,
+		OmitEmptyWithValue: true,
+	})
+
+	testOmitted(expectedPropNamesForSlices, &struct {
+		EmptyValue         []bool
+		NonEmptyValue      []bool
+		OmitEmptyNoValue   []bool `datastore:",omitempty"`
+		OmitEmptyWithValue []bool `datastore:",omitempty"`
+	}{
+		NonEmptyValue:      []bool{true, true},
+		OmitEmptyWithValue: []bool{true, true},
+	})
+
+	testOmitted(expectedPropNamesForSingles, &struct {
+		EmptyValue         string
+		NonEmptyValue      string
+		OmitEmptyNoValue   string `datastore:",omitempty"`
+		OmitEmptyWithValue string `datastore:",omitempty"`
+	}{
+		NonEmptyValue:      "s",
+		OmitEmptyWithValue: "s",
+	})
+
+	testOmitted(expectedPropNamesForSlices, &struct {
+		EmptyValue         []string
+		NonEmptyValue      []string
+		OmitEmptyNoValue   []string `datastore:",omitempty"`
+		OmitEmptyWithValue []string `datastore:",omitempty"`
+	}{
+		NonEmptyValue:      []string{"s1", "s2"},
+		OmitEmptyWithValue: []string{"s3", "s4"},
+	})
+
+	testOmitted(expectedPropNamesForSingles, &struct {
+		EmptyValue         float32
+		NonEmptyValue      float32
+		OmitEmptyNoValue   float32 `datastore:",omitempty"`
+		OmitEmptyWithValue float32 `datastore:",omitempty"`
+	}{
+		NonEmptyValue:      1.1,
+		OmitEmptyWithValue: 1.2,
+	})
+
+	testOmitted(expectedPropNamesForSlices, &struct {
+		EmptyValue         []float32
+		NonEmptyValue      []float32
+		OmitEmptyNoValue   []float32 `datastore:",omitempty"`
+		OmitEmptyWithValue []float32 `datastore:",omitempty"`
+	}{
+		NonEmptyValue:      []float32{1.1, 2.2},
+		OmitEmptyWithValue: []float32{3.3, 4.4},
+	})
+
+	testOmitted(expectedPropNamesForSingles, &struct {
+		EmptyValue         time.Time
+		NonEmptyValue      time.Time
+		OmitEmptyNoValue   time.Time `datastore:",omitempty"`
+		OmitEmptyWithValue time.Time `datastore:",omitempty"`
+	}{
+		NonEmptyValue:      now,
+		OmitEmptyWithValue: now,
+	})
+
+	testOmitted(expectedPropNamesForSlices, &struct {
+		EmptyValue         []time.Time
+		NonEmptyValue      []time.Time
+		OmitEmptyNoValue   []time.Time `datastore:",omitempty"`
+		OmitEmptyWithValue []time.Time `datastore:",omitempty"`
+	}{
+		NonEmptyValue:      []time.Time{now, now},
+		OmitEmptyWithValue: []time.Time{now, now},
+	})
+}
diff --git a/v2/datastore/query.go b/v2/datastore/query.go
new file mode 100644
index 0000000..4490fab
--- /dev/null
+++ b/v2/datastore/query.go
@@ -0,0 +1,774 @@
+// Copyright 2011 Google Inc. All rights reserved.
+// Use of this source code is governed by the Apache 2.0
+// license that can be found in the LICENSE file.
+
+package datastore
+
+import (
+	"context"
+	"encoding/base64"
+	"errors"
+	"fmt"
+	"math"
+	"reflect"
+	"strings"
+
+	"github.com/golang/protobuf/proto"
+
+	"google.golang.org/appengine/v2/internal"
+	pb "google.golang.org/appengine/v2/internal/datastore"
+)
+
+type operator int
+
+const (
+	lessThan operator = iota
+	lessEq
+	equal
+	greaterEq
+	greaterThan
+)
+
+var operatorToProto = map[operator]*pb.Query_Filter_Operator{
+	lessThan:    pb.Query_Filter_LESS_THAN.Enum(),
+	lessEq:      pb.Query_Filter_LESS_THAN_OR_EQUAL.Enum(),
+	equal:       pb.Query_Filter_EQUAL.Enum(),
+	greaterEq:   pb.Query_Filter_GREATER_THAN_OR_EQUAL.Enum(),
+	greaterThan: pb.Query_Filter_GREATER_THAN.Enum(),
+}
+
+// filter is a conditional filter on query results.
+type filter struct {
+	FieldName string
+	Op        operator
+	Value     interface{}
+}
+
+type sortDirection int
+
+const (
+	ascending sortDirection = iota
+	descending
+)
+
+var sortDirectionToProto = map[sortDirection]*pb.Query_Order_Direction{
+	ascending:  pb.Query_Order_ASCENDING.Enum(),
+	descending: pb.Query_Order_DESCENDING.Enum(),
+}
+
+// order is a sort order on query results.
+type order struct {
+	FieldName string
+	Direction sortDirection
+}
+
+// NewQuery creates a new Query for a specific entity kind.
+//
+// An empty kind means to return all entities, including entities created and
+// managed by other App Engine features, and is called a kindless query.
+// Kindless queries cannot include filters or sort orders on property values.
+func NewQuery(kind string) *Query {
+	return &Query{
+		kind:  kind,
+		limit: -1,
+	}
+}
+
+// Query represents a datastore query.
+type Query struct {
+	kind       string
+	ancestor   *Key
+	filter     []filter
+	order      []order
+	projection []string
+
+	distinct   bool
+	distinctOn []string
+	keysOnly   bool
+	eventual   bool
+	limit      int32
+	offset     int32
+	count      int32
+	start      *pb.CompiledCursor
+	end        *pb.CompiledCursor
+
+	err error
+}
+
+func (q *Query) clone() *Query {
+	x := *q
+	// Copy the contents of the slice-typed fields to a new backing store.
+	if len(q.filter) > 0 {
+		x.filter = make([]filter, len(q.filter))
+		copy(x.filter, q.filter)
+	}
+	if len(q.order) > 0 {
+		x.order = make([]order, len(q.order))
+		copy(x.order, q.order)
+	}
+	return &x
+}
+
+// Ancestor returns a derivative query with an ancestor filter.
+// The ancestor should not be nil.
+func (q *Query) Ancestor(ancestor *Key) *Query {
+	q = q.clone()
+	if ancestor == nil {
+		q.err = errors.New("datastore: nil query ancestor")
+		return q
+	}
+	q.ancestor = ancestor
+	return q
+}
+
+// EventualConsistency returns a derivative query that returns eventually
+// consistent results.
+// It only has an effect on ancestor queries.
+func (q *Query) EventualConsistency() *Query {
+	q = q.clone()
+	q.eventual = true
+	return q
+}
+
+// Filter returns a derivative query with a field-based filter.
+// The filterStr argument must be a field name followed by optional space,
+// followed by an operator, one of ">", "<", ">=", "<=", or "=".
+// Fields are compared against the provided value using the operator.
+// Multiple filters are AND'ed together.
+func (q *Query) Filter(filterStr string, value interface{}) *Query {
+	q = q.clone()
+	filterStr = strings.TrimSpace(filterStr)
+	if len(filterStr) < 1 {
+		q.err = errors.New("datastore: invalid filter: " + filterStr)
+		return q
+	}
+	f := filter{
+		FieldName: strings.TrimRight(filterStr, " ><=!"),
+		Value:     value,
+	}
+	switch op := strings.TrimSpace(filterStr[len(f.FieldName):]); op {
+	case "<=":
+		f.Op = lessEq
+	case ">=":
+		f.Op = greaterEq
+	case "<":
+		f.Op = lessThan
+	case ">":
+		f.Op = greaterThan
+	case "=":
+		f.Op = equal
+	default:
+		q.err = fmt.Errorf("datastore: invalid operator %q in filter %q", op, filterStr)
+		return q
+	}
+	q.filter = append(q.filter, f)
+	return q
+}
+
+// Order returns a derivative query with a field-based sort order. Orders are
+// applied in the order they are added. The default order is ascending; to sort
+// in descending order prefix the fieldName with a minus sign (-).
+func (q *Query) Order(fieldName string) *Query {
+	q = q.clone()
+	fieldName = strings.TrimSpace(fieldName)
+	o := order{
+		Direction: ascending,
+		FieldName: fieldName,
+	}
+	if strings.HasPrefix(fieldName, "-") {
+		o.Direction = descending
+		o.FieldName = strings.TrimSpace(fieldName[1:])
+	} else if strings.HasPrefix(fieldName, "+") {
+		q.err = fmt.Errorf("datastore: invalid order: %q", fieldName)
+		return q
+	}
+	if len(o.FieldName) == 0 {
+		q.err = errors.New("datastore: empty order")
+		return q
+	}
+	q.order = append(q.order, o)
+	return q
+}
+
+// Project returns a derivative query that yields only the given fields. It
+// cannot be used with KeysOnly.
+func (q *Query) Project(fieldNames ...string) *Query {
+	q = q.clone()
+	q.projection = append([]string(nil), fieldNames...)
+	return q
+}
+
+// Distinct returns a derivative query that yields de-duplicated entities with
+// respect to the set of projected fields. It is only used for projection
+// queries. Distinct cannot be used with DistinctOn.
+func (q *Query) Distinct() *Query {
+	q = q.clone()
+	q.distinct = true
+	return q
+}
+
+// DistinctOn returns a derivative query that yields de-duplicated entities with
+// respect to the set of the specified fields. It is only used for projection
+// queries. The field list should be a subset of the projected field list.
+// DistinctOn cannot be used with Distinct.
+func (q *Query) DistinctOn(fieldNames ...string) *Query {
+	q = q.clone()
+	q.distinctOn = fieldNames
+	return q
+}
+
+// KeysOnly returns a derivative query that yields only keys, not keys and
+// entities. It cannot be used with projection queries.
+func (q *Query) KeysOnly() *Query {
+	q = q.clone()
+	q.keysOnly = true
+	return q
+}
+
+// Limit returns a derivative query that has a limit on the number of results
+// returned. A negative value means unlimited.
+func (q *Query) Limit(limit int) *Query {
+	q = q.clone()
+	if limit < math.MinInt32 || limit > math.MaxInt32 {
+		q.err = errors.New("datastore: query limit overflow")
+		return q
+	}
+	q.limit = int32(limit)
+	return q
+}
+
+// Offset returns a derivative query that has an offset of how many keys to
+// skip over before returning results. A negative value is invalid.
+func (q *Query) Offset(offset int) *Query {
+	q = q.clone()
+	if offset < 0 {
+		q.err = errors.New("datastore: negative query offset")
+		return q
+	}
+	if offset > math.MaxInt32 {
+		q.err = errors.New("datastore: query offset overflow")
+		return q
+	}
+	q.offset = int32(offset)
+	return q
+}
+
+// BatchSize returns a derivative query to fetch the supplied number of results
+// at once. This value should be greater than zero, and equal to or less than
+// the Limit.
+func (q *Query) BatchSize(size int) *Query {
+	q = q.clone()
+	if size <= 0 || size > math.MaxInt32 {
+		q.err = errors.New("datastore: query batch size overflow")
+		return q
+	}
+	q.count = int32(size)
+	return q
+}
+
+// Start returns a derivative query with the given start point.
+func (q *Query) Start(c Cursor) *Query {
+	q = q.clone()
+	if c.cc == nil {
+		q.err = errors.New("datastore: invalid cursor")
+		return q
+	}
+	q.start = c.cc
+	return q
+}
+
+// End returns a derivative query with the given end point.
+func (q *Query) End(c Cursor) *Query {
+	q = q.clone()
+	if c.cc == nil {
+		q.err = errors.New("datastore: invalid cursor")
+		return q
+	}
+	q.end = c.cc
+	return q
+}
+
+// toProto converts the query to a protocol buffer.
+func (q *Query) toProto(dst *pb.Query, appID string) error {
+	if len(q.projection) != 0 && q.keysOnly {
+		return errors.New("datastore: query cannot both project and be keys-only")
+	}
+	if len(q.distinctOn) != 0 && q.distinct {
+		return errors.New("datastore: query cannot be both distinct and distinct-on")
+	}
+	dst.Reset()
+	dst.App = proto.String(appID)
+	if q.kind != "" {
+		dst.Kind = proto.String(q.kind)
+	}
+	if q.ancestor != nil {
+		dst.Ancestor = keyToProto(appID, q.ancestor)
+		if q.eventual {
+			dst.Strong = proto.Bool(false)
+		}
+	}
+	if q.projection != nil {
+		dst.PropertyName = q.projection
+		if len(q.distinctOn) != 0 {
+			dst.GroupByPropertyName = q.distinctOn
+		}
+		if q.distinct {
+			dst.GroupByPropertyName = q.projection
+		}
+	}
+	if q.keysOnly {
+		dst.KeysOnly = proto.Bool(true)
+		dst.RequirePerfectPlan = proto.Bool(true)
+	}
+	for _, qf := range q.filter {
+		if qf.FieldName == "" {
+			return errors.New("datastore: empty query filter field name")
+		}
+		p, errStr := valueToProto(appID, qf.FieldName, reflect.ValueOf(qf.Value), false)
+		if errStr != "" {
+			return errors.New("datastore: bad query filter value type: " + errStr)
+		}
+		xf := &pb.Query_Filter{
+			Op:       operatorToProto[qf.Op],
+			Property: []*pb.Property{p},
+		}
+		if xf.Op == nil {
+			return errors.New("datastore: unknown query filter operator")
+		}
+		dst.Filter = append(dst.Filter, xf)
+	}
+	for _, qo := range q.order {
+		if qo.FieldName == "" {
+			return errors.New("datastore: empty query order field name")
+		}
+		xo := &pb.Query_Order{
+			Property:  proto.String(qo.FieldName),
+			Direction: sortDirectionToProto[qo.Direction],
+		}
+		if xo.Direction == nil {
+			return errors.New("datastore: unknown query order direction")
+		}
+		dst.Order = append(dst.Order, xo)
+	}
+	if q.limit >= 0 {
+		dst.Limit = proto.Int32(q.limit)
+	}
+	if q.offset != 0 {
+		dst.Offset = proto.Int32(q.offset)
+	}
+	if q.count != 0 {
+		dst.Count = proto.Int32(q.count)
+	}
+	dst.CompiledCursor = q.start
+	dst.EndCompiledCursor = q.end
+	dst.Compile = proto.Bool(true)
+	return nil
+}
+
+// Count returns the number of results for the query.
+//
+// The running time and number of API calls made by Count scale linearly with
+// the sum of the query's offset and limit. Unless the result count is
+// expected to be small, it is best to specify a limit; otherwise Count will
+// continue until it finishes counting or the provided context expires.
+func (q *Query) Count(c context.Context) (int, error) {
+	// Check that the query is well-formed.
+	if q.err != nil {
+		return 0, q.err
+	}
+
+	// Run a copy of the query, with keysOnly true (if we're not a projection,
+	// since the two are incompatible), and an adjusted offset. We also set the
+	// limit to zero, as we don't want any actual entity data, just the number
+	// of skipped results.
+	newQ := q.clone()
+	newQ.keysOnly = len(newQ.projection) == 0
+	newQ.limit = 0
+	if q.limit < 0 {
+		// If the original query was unlimited, set the new query's offset to maximum.
+		newQ.offset = math.MaxInt32
+	} else {
+		newQ.offset = q.offset + q.limit
+		if newQ.offset < 0 {
+			// Do the best we can, in the presence of overflow.
+			newQ.offset = math.MaxInt32
+		}
+	}
+	req := &pb.Query{}
+	if err := newQ.toProto(req, internal.FullyQualifiedAppID(c)); err != nil {
+		return 0, err
+	}
+	res := &pb.QueryResult{}
+	if err := internal.Call(c, "datastore_v3", "RunQuery", req, res); err != nil {
+		return 0, err
+	}
+
+	// n is the count we will return. For example, suppose that our original
+	// query had an offset of 4 and a limit of 2008: the count will be 2008,
+	// provided that there are at least 2012 matching entities. However, the
+	// RPCs will only skip 1000 results at a time. The RPC sequence is:
+	//   call RunQuery with (offset, limit) = (2012, 0)  // 2012 == newQ.offset
+	//   response has (skippedResults, moreResults) = (1000, true)
+	//   n += 1000  // n == 1000
+	//   call Next     with (offset, limit) = (1012, 0)  // 1012 == newQ.offset - n
+	//   response has (skippedResults, moreResults) = (1000, true)
+	//   n += 1000  // n == 2000
+	//   call Next     with (offset, limit) = (12, 0)    // 12 == newQ.offset - n
+	//   response has (skippedResults, moreResults) = (12, false)
+	//   n += 12    // n == 2012
+	//   // exit the loop
+	//   n -= 4     // n == 2008
+	var n int32
+	for {
+		// The QueryResult should have no actual entity data, just skipped results.
+		if len(res.Result) != 0 {
+			return 0, errors.New("datastore: internal error: Count request returned too much data")
+		}
+		n += res.GetSkippedResults()
+		if !res.GetMoreResults() {
+			break
+		}
+		if err := callNext(c, res, newQ.offset-n, q.count); err != nil {
+			return 0, err
+		}
+	}
+	n -= q.offset
+	if n < 0 {
+		// If the offset was greater than the number of matching entities,
+		// return 0 instead of negative.
+		n = 0
+	}
+	return int(n), nil
+}
+
+// callNext issues a datastore_v3/Next RPC to advance a cursor, such as that
+// returned by a query with more results.
+func callNext(c context.Context, res *pb.QueryResult, offset, count int32) error {
+	if res.Cursor == nil {
+		return errors.New("datastore: internal error: server did not return a cursor")
+	}
+	req := &pb.NextRequest{
+		Cursor: res.Cursor,
+	}
+	if count >= 0 {
+		req.Count = proto.Int32(count)
+	}
+	if offset != 0 {
+		req.Offset = proto.Int32(offset)
+	}
+	if res.CompiledCursor != nil {
+		req.Compile = proto.Bool(true)
+	}
+	res.Reset()
+	return internal.Call(c, "datastore_v3", "Next", req, res)
+}
+
+// GetAll runs the query in the given context and returns all keys that match
+// that query, as well as appending the values to dst.
+//
+// dst must have type *[]S or *[]*S or *[]P, for some struct type S or some non-
+// interface, non-pointer type P such that P or *P implements PropertyLoadSaver.
+//
+// As a special case, *PropertyList is an invalid type for dst, even though a
+// PropertyList is a slice of structs. It is treated as invalid to avoid being
+// mistakenly passed when *[]PropertyList was intended.
+//
+// The keys returned by GetAll will be in a 1-1 correspondence with the entities
+// added to dst.
+//
+// If q is a “keys-only” query, GetAll ignores dst and only returns the keys.
+//
+// The running time and number of API calls made by GetAll scale linearly with
+// the sum of the query's offset and limit. Unless the result count is
+// expected to be small, it is best to specify a limit; otherwise GetAll will
+// continue until it finishes collecting results or the provided context
+// expires.
+func (q *Query) GetAll(c context.Context, dst interface{}) ([]*Key, error) {
+	var (
+		dv               reflect.Value
+		mat              multiArgType
+		elemType         reflect.Type
+		errFieldMismatch error
+	)
+	if !q.keysOnly {
+		dv = reflect.ValueOf(dst)
+		if dv.Kind() != reflect.Ptr || dv.IsNil() {
+			return nil, ErrInvalidEntityType
+		}
+		dv = dv.Elem()
+		mat, elemType = checkMultiArg(dv)
+		if mat == multiArgTypeInvalid || mat == multiArgTypeInterface {
+			return nil, ErrInvalidEntityType
+		}
+	}
+
+	var keys []*Key
+	for t := q.Run(c); ; {
+		k, e, err := t.next()
+		if err == Done {
+			break
+		}
+		if err != nil {
+			return keys, err
+		}
+		if !q.keysOnly {
+			ev := reflect.New(elemType)
+			if elemType.Kind() == reflect.Map {
+				// This is a special case. The zero values of a map type are
+				// not immediately useful; they have to be make'd.
+				//
+				// Funcs and channels are similar, in that a zero value is not useful,
+				// but even a freshly make'd channel isn't useful: there's no fixed
+				// channel buffer size that is always going to be large enough, and
+				// there's no goroutine to drain the other end. Theoretically, these
+				// types could be supported, for example by sniffing for a constructor
+				// method or requiring prior registration, but for now it's not a
+				// frequent enough concern to be worth it. Programmers can work around
+				// it by explicitly using Iterator.Next instead of the Query.GetAll
+				// convenience method.
+				x := reflect.MakeMap(elemType)
+				ev.Elem().Set(x)
+			}
+			if err = loadEntity(ev.Interface(), e); err != nil {
+				if _, ok := err.(*ErrFieldMismatch); ok {
+					// We continue loading entities even in the face of field mismatch errors.
+					// If we encounter any other error, that other error is returned. Otherwise,
+					// an ErrFieldMismatch is returned.
+					errFieldMismatch = err
+				} else {
+					return keys, err
+				}
+			}
+			if mat != multiArgTypeStructPtr {
+				ev = ev.Elem()
+			}
+			dv.Set(reflect.Append(dv, ev))
+		}
+		keys = append(keys, k)
+	}
+	return keys, errFieldMismatch
+}
+
+// Run runs the query in the given context.
+func (q *Query) Run(c context.Context) *Iterator {
+	if q.err != nil {
+		return &Iterator{err: q.err}
+	}
+	t := &Iterator{
+		c:      c,
+		limit:  q.limit,
+		count:  q.count,
+		q:      q,
+		prevCC: q.start,
+	}
+	var req pb.Query
+	if err := q.toProto(&req, internal.FullyQualifiedAppID(c)); err != nil {
+		t.err = err
+		return t
+	}
+	if err := internal.Call(c, "datastore_v3", "RunQuery", &req, &t.res); err != nil {
+		t.err = err
+		return t
+	}
+	offset := q.offset - t.res.GetSkippedResults()
+	var count int32
+	if t.count > 0 && (t.limit < 0 || t.count < t.limit) {
+		count = t.count
+	} else {
+		count = t.limit
+	}
+	for offset > 0 && t.res.GetMoreResults() {
+		t.prevCC = t.res.CompiledCursor
+		if err := callNext(t.c, &t.res, offset, count); err != nil {
+			t.err = err
+			break
+		}
+		skip := t.res.GetSkippedResults()
+		if skip < 0 {
+			t.err = errors.New("datastore: internal error: negative number of skipped_results")
+			break
+		}
+		offset -= skip
+	}
+	if offset < 0 {
+		t.err = errors.New("datastore: internal error: query offset was overshot")
+	}
+	return t
+}
+
+// Iterator is the result of running a query.
+type Iterator struct {
+	c   context.Context
+	err error
+	// res is the result of the most recent RunQuery or Next API call.
+	res pb.QueryResult
+	// i is how many elements of res.Result we have iterated over.
+	i int
+	// limit is the limit on the number of results this iterator should return.
+	// A negative value means unlimited.
+	limit int32
+	// count is the number of results this iterator should fetch at once. This
+	// should be equal to or greater than zero.
+	count int32
+	// q is the original query which yielded this iterator.
+	q *Query
+	// prevCC is the compiled cursor that marks the end of the previous batch
+	// of results.
+	prevCC *pb.CompiledCursor
+}
+
+// Done is returned when a query iteration has completed.
+var Done = errors.New("datastore: query has no more results")
+
+// Next returns the key of the next result. When there are no more results,
+// Done is returned as the error.
+//
+// If the query is not keys only and dst is non-nil, it also loads the entity
+// stored for that key into the struct pointer or PropertyLoadSaver dst, with
+// the same semantics and possible errors as for the Get function.
+func (t *Iterator) Next(dst interface{}) (*Key, error) {
+	k, e, err := t.next()
+	if err != nil {
+		return nil, err
+	}
+	if dst != nil && !t.q.keysOnly {
+		err = loadEntity(dst, e)
+	}
+	return k, err
+}
+
+func (t *Iterator) next() (*Key, *pb.EntityProto, error) {
+	if t.err != nil {
+		return nil, nil, t.err
+	}
+
+	// Issue datastore_v3/Next RPCs as necessary.
+	for t.i == len(t.res.Result) {
+		if !t.res.GetMoreResults() {
+			t.err = Done
+			return nil, nil, t.err
+		}
+		t.prevCC = t.res.CompiledCursor
+		var count int32
+		if t.count > 0 && (t.limit < 0 || t.count < t.limit) {
+			count = t.count
+		} else {
+			count = t.limit
+		}
+		if err := callNext(t.c, &t.res, 0, count); err != nil {
+			t.err = err
+			return nil, nil, t.err
+		}
+		if t.res.GetSkippedResults() != 0 {
+			t.err = errors.New("datastore: internal error: iterator has skipped results")
+			return nil, nil, t.err
+		}
+		t.i = 0
+		if t.limit >= 0 {
+			t.limit -= int32(len(t.res.Result))
+			if t.limit < 0 {
+				t.err = errors.New("datastore: internal error: query returned more results than the limit")
+				return nil, nil, t.err
+			}
+		}
+	}
+
+	// Extract the key from the t.i'th element of t.res.Result.
+	e := t.res.Result[t.i]
+	t.i++
+	if e.Key == nil {
+		return nil, nil, errors.New("datastore: internal error: server did not return a key")
+	}
+	k, err := protoToKey(e.Key)
+	if err != nil || k.Incomplete() {
+		return nil, nil, errors.New("datastore: internal error: server returned an invalid key")
+	}
+	return k, e, nil
+}
+
+// Cursor returns a cursor for the iterator's current location.
+func (t *Iterator) Cursor() (Cursor, error) {
+	if t.err != nil && t.err != Done {
+		return Cursor{}, t.err
+	}
+	// If we are at either end of the current batch of results,
+	// return the compiled cursor at that end.
+	skipped := t.res.GetSkippedResults()
+	if t.i == 0 && skipped == 0 {
+		if t.prevCC == nil {
+			// A nil pointer (of type *pb.CompiledCursor) means no constraint:
+			// passing it as the end cursor of a new query means unlimited results
+			// (glossing over the integer limit parameter for now).
+			// A non-nil pointer to an empty pb.CompiledCursor means the start:
+			// passing it as the end cursor of a new query means 0 results.
+			// If prevCC was nil, then the original query had no start cursor, but
+			// Iterator.Cursor should return "the start" instead of unlimited.
+			return Cursor{&zeroCC}, nil
+		}
+		return Cursor{t.prevCC}, nil
+	}
+	if t.i == len(t.res.Result) {
+		return Cursor{t.res.CompiledCursor}, nil
+	}
+	// Otherwise, re-run the query offset to this iterator's position, starting from
+	// the most recent compiled cursor. This is done on a best-effort basis, as it
+	// is racy; if a concurrent process has added or removed entities, then the
+	// cursor returned may be inconsistent.
+	q := t.q.clone()
+	q.start = t.prevCC
+	q.offset = skipped + int32(t.i)
+	q.limit = 0
+	q.keysOnly = len(q.projection) == 0
+	t1 := q.Run(t.c)
+	_, _, err := t1.next()
+	if err != Done {
+		if err == nil {
+			err = fmt.Errorf("datastore: internal error: zero-limit query did not have zero results")
+		}
+		return Cursor{}, err
+	}
+	return Cursor{t1.res.CompiledCursor}, nil
+}
+
+var zeroCC pb.CompiledCursor
+
+// Cursor is an iterator's position. It can be converted to and from an opaque
+// string. A cursor can be used from different HTTP requests, but only with a
+// query with the same kind, ancestor, filter and order constraints.
+type Cursor struct {
+	cc *pb.CompiledCursor
+}
+
+// String returns a base-64 string representation of a cursor.
+func (c Cursor) String() string {
+	if c.cc == nil {
+		return ""
+	}
+	b, err := proto.Marshal(c.cc)
+	if err != nil {
+		// The only way to construct a Cursor with a non-nil cc field is to
+		// unmarshal from the byte representation. We panic if the unmarshal
+		// succeeds but the marshaling of the unchanged protobuf value fails.
+		panic(fmt.Sprintf("datastore: internal error: malformed cursor: %v", err))
+	}
+	return strings.TrimRight(base64.URLEncoding.EncodeToString(b), "=")
+}
+
+// Decode decodes a cursor from its base-64 string representation.
+func DecodeCursor(s string) (Cursor, error) {
+	if s == "" {
+		return Cursor{&zeroCC}, nil
+	}
+	if n := len(s) % 4; n != 0 {
+		s += strings.Repeat("=", 4-n)
+	}
+	b, err := base64.URLEncoding.DecodeString(s)
+	if err != nil {
+		return Cursor{}, err
+	}
+	cc := &pb.CompiledCursor{}
+	if err := proto.Unmarshal(b, cc); err != nil {
+		return Cursor{}, err
+	}
+	return Cursor{cc}, nil
+}
diff --git a/v2/datastore/query_test.go b/v2/datastore/query_test.go
new file mode 100644
index 0000000..be0e0e7
--- /dev/null
+++ b/v2/datastore/query_test.go
@@ -0,0 +1,592 @@
+// Copyright 2011 Google Inc. All Rights Reserved.
+// Use of this source code is governed by the Apache 2.0
+// license that can be found in the LICENSE file.
+
+package datastore
+
+import (
+	"errors"
+	"fmt"
+	"reflect"
+	"strings"
+	"testing"
+
+	"github.com/golang/protobuf/proto"
+
+	"google.golang.org/appengine/v2/internal"
+	"google.golang.org/appengine/v2/internal/aetesting"
+	pb "google.golang.org/appengine/v2/internal/datastore"
+)
+
+var (
+	path1 = &pb.Path{
+		Element: []*pb.Path_Element{
+			{
+				Type: proto.String("Gopher"),
+				Id:   proto.Int64(6),
+			},
+		},
+	}
+	path2 = &pb.Path{
+		Element: []*pb.Path_Element{
+			{
+				Type: proto.String("Gopher"),
+				Id:   proto.Int64(6),
+			},
+			{
+				Type: proto.String("Gopher"),
+				Id:   proto.Int64(8),
+			},
+		},
+	}
+)
+
+func fakeRunQuery(in *pb.Query, out *pb.QueryResult) error {
+	expectedIn := &pb.Query{
+		App:     proto.String("dev~fake-app"),
+		Kind:    proto.String("Gopher"),
+		Compile: proto.Bool(true),
+	}
+	if !proto.Equal(in, expectedIn) {
+		return fmt.Errorf("unsupported argument: got %v want %v", in, expectedIn)
+	}
+	*out = pb.QueryResult{
+		Result: []*pb.EntityProto{
+			{
+				Key: &pb.Reference{
+					App:  proto.String("s~test-app"),
+					Path: path1,
+				},
+				EntityGroup: path1,
+				Property: []*pb.Property{
+					{
+						Meaning: pb.Property_TEXT.Enum(),
+						Name:    proto.String("Name"),
+						Value: &pb.PropertyValue{
+							StringValue: proto.String("George"),
+						},
+					},
+					{
+						Name: proto.String("Height"),
+						Value: &pb.PropertyValue{
+							Int64Value: proto.Int64(32),
+						},
+					},
+				},
+			},
+			{
+				Key: &pb.Reference{
+					App:  proto.String("s~test-app"),
+					Path: path2,
+				},
+				EntityGroup: path1, // ancestor is George
+				Property: []*pb.Property{
+					{
+						Meaning: pb.Property_TEXT.Enum(),
+						Name:    proto.String("Name"),
+						Value: &pb.PropertyValue{
+							StringValue: proto.String("Rufus"),
+						},
+					},
+					// No height for Rufus.
+				},
+			},
+		},
+		MoreResults: proto.Bool(false),
+	}
+	return nil
+}
+
+type StructThatImplementsPLS struct{}
+
+func (StructThatImplementsPLS) Load(p []Property) error   { return nil }
+func (StructThatImplementsPLS) Save() ([]Property, error) { return nil, nil }
+
+var _ PropertyLoadSaver = StructThatImplementsPLS{}
+
+type StructPtrThatImplementsPLS struct{}
+
+func (*StructPtrThatImplementsPLS) Load(p []Property) error   { return nil }
+func (*StructPtrThatImplementsPLS) Save() ([]Property, error) { return nil, nil }
+
+var _ PropertyLoadSaver = &StructPtrThatImplementsPLS{}
+
+type PropertyMap map[string]Property
+
+func (m PropertyMap) Load(props []Property) error {
+	for _, p := range props {
+		if p.Multiple {
+			return errors.New("PropertyMap does not support multiple properties")
+		}
+		m[p.Name] = p
+	}
+	return nil
+}
+
+func (m PropertyMap) Save() ([]Property, error) {
+	props := make([]Property, 0, len(m))
+	for _, p := range m {
+		if p.Multiple {
+			return nil, errors.New("PropertyMap does not support multiple properties")
+		}
+		props = append(props, p)
+	}
+	return props, nil
+}
+
+var _ PropertyLoadSaver = PropertyMap{}
+
+type Gopher struct {
+	Name   string
+	Height int
+}
+
+// typeOfEmptyInterface is the type of interface{}, but we can't use
+// reflect.TypeOf((interface{})(nil)) directly because TypeOf takes an
+// interface{}.
+var typeOfEmptyInterface = reflect.TypeOf((*interface{})(nil)).Elem()
+
+func TestCheckMultiArg(t *testing.T) {
+	testCases := []struct {
+		v        interface{}
+		mat      multiArgType
+		elemType reflect.Type
+	}{
+		// Invalid cases.
+		{nil, multiArgTypeInvalid, nil},
+		{Gopher{}, multiArgTypeInvalid, nil},
+		{&Gopher{}, multiArgTypeInvalid, nil},
+		{PropertyList{}, multiArgTypeInvalid, nil}, // This is a special case.
+		{PropertyMap{}, multiArgTypeInvalid, nil},
+		{[]*PropertyList(nil), multiArgTypeInvalid, nil},
+		{[]*PropertyMap(nil), multiArgTypeInvalid, nil},
+		{[]**Gopher(nil), multiArgTypeInvalid, nil},
+		{[]*interface{}(nil), multiArgTypeInvalid, nil},
+		// Valid cases.
+		{
+			[]PropertyList(nil),
+			multiArgTypePropertyLoadSaver,
+			reflect.TypeOf(PropertyList{}),
+		},
+		{
+			[]PropertyMap(nil),
+			multiArgTypePropertyLoadSaver,
+			reflect.TypeOf(PropertyMap{}),
+		},
+		{
+			[]StructThatImplementsPLS(nil),
+			multiArgTypePropertyLoadSaver,
+			reflect.TypeOf(StructThatImplementsPLS{}),
+		},
+		{
+			[]StructPtrThatImplementsPLS(nil),
+			multiArgTypePropertyLoadSaver,
+			reflect.TypeOf(StructPtrThatImplementsPLS{}),
+		},
+		{
+			[]Gopher(nil),
+			multiArgTypeStruct,
+			reflect.TypeOf(Gopher{}),
+		},
+		{
+			[]*Gopher(nil),
+			multiArgTypeStructPtr,
+			reflect.TypeOf(Gopher{}),
+		},
+		{
+			[]interface{}(nil),
+			multiArgTypeInterface,
+			typeOfEmptyInterface,
+		},
+	}
+	for _, tc := range testCases {
+		mat, elemType := checkMultiArg(reflect.ValueOf(tc.v))
+		if mat != tc.mat || elemType != tc.elemType {
+			t.Errorf("checkMultiArg(%T): got %v, %v want %v, %v",
+				tc.v, mat, elemType, tc.mat, tc.elemType)
+		}
+	}
+}
+
+func TestSimpleQuery(t *testing.T) {
+	struct1 := Gopher{Name: "George", Height: 32}
+	struct2 := Gopher{Name: "Rufus"}
+	pList1 := PropertyList{
+		{
+			Name:  "Name",
+			Value: "George",
+		},
+		{
+			Name:  "Height",
+			Value: int64(32),
+		},
+	}
+	pList2 := PropertyList{
+		{
+			Name:  "Name",
+			Value: "Rufus",
+		},
+	}
+	pMap1 := PropertyMap{
+		"Name": Property{
+			Name:  "Name",
+			Value: "George",
+		},
+		"Height": Property{
+			Name:  "Height",
+			Value: int64(32),
+		},
+	}
+	pMap2 := PropertyMap{
+		"Name": Property{
+			Name:  "Name",
+			Value: "Rufus",
+		},
+	}
+
+	testCases := []struct {
+		dst  interface{}
+		want interface{}
+	}{
+		// The destination must have type *[]P, *[]S or *[]*S, for some non-interface
+		// type P such that *P implements PropertyLoadSaver, or for some struct type S.
+		{new([]Gopher), &[]Gopher{struct1, struct2}},
+		{new([]*Gopher), &[]*Gopher{&struct1, &struct2}},
+		{new([]PropertyList), &[]PropertyList{pList1, pList2}},
+		{new([]PropertyMap), &[]PropertyMap{pMap1, pMap2}},
+
+		// Any other destination type is invalid.
+		{0, nil},
+		{Gopher{}, nil},
+		{PropertyList{}, nil},
+		{PropertyMap{}, nil},
+		{[]int{}, nil},
+		{[]Gopher{}, nil},
+		{[]PropertyList{}, nil},
+		{new(int), nil},
+		{new(Gopher), nil},
+		{new(PropertyList), nil}, // This is a special case.
+		{new(PropertyMap), nil},
+		{new([]int), nil},
+		{new([]map[int]int), nil},
+		{new([]map[string]Property), nil},
+		{new([]map[string]interface{}), nil},
+		{new([]*int), nil},
+		{new([]*map[int]int), nil},
+		{new([]*map[string]Property), nil},
+		{new([]*map[string]interface{}), nil},
+		{new([]**Gopher), nil},
+		{new([]*PropertyList), nil},
+		{new([]*PropertyMap), nil},
+	}
+	for _, tc := range testCases {
+		nCall := 0
+		c := aetesting.FakeSingleContext(t, "datastore_v3", "RunQuery", func(in *pb.Query, out *pb.QueryResult) error {
+			nCall++
+			return fakeRunQuery(in, out)
+		})
+		c = internal.WithAppIDOverride(c, "dev~fake-app")
+
+		var (
+			expectedErr   error
+			expectedNCall int
+		)
+		if tc.want == nil {
+			expectedErr = ErrInvalidEntityType
+		} else {
+			expectedNCall = 1
+		}
+		keys, err := NewQuery("Gopher").GetAll(c, tc.dst)
+		if err != expectedErr {
+			t.Errorf("dst type %T: got error [%v], want [%v]", tc.dst, err, expectedErr)
+			continue
+		}
+		if nCall != expectedNCall {
+			t.Errorf("dst type %T: Context.Call was called an incorrect number of times: got %d want %d", tc.dst, nCall, expectedNCall)
+			continue
+		}
+		if err != nil {
+			continue
+		}
+
+		key1 := NewKey(c, "Gopher", "", 6, nil)
+		expectedKeys := []*Key{
+			key1,
+			NewKey(c, "Gopher", "", 8, key1),
+		}
+		if l1, l2 := len(keys), len(expectedKeys); l1 != l2 {
+			t.Errorf("dst type %T: got %d keys, want %d keys", tc.dst, l1, l2)
+			continue
+		}
+		for i, key := range keys {
+			if key.AppID() != "s~test-app" {
+				t.Errorf(`dst type %T: Key #%d's AppID = %q, want "s~test-app"`, tc.dst, i, key.AppID())
+				continue
+			}
+			if !keysEqual(key, expectedKeys[i]) {
+				t.Errorf("dst type %T: got key #%d %v, want %v", tc.dst, i, key, expectedKeys[i])
+				continue
+			}
+		}
+
+		if !reflect.DeepEqual(tc.dst, tc.want) {
+			t.Errorf("dst type %T: Entities got %+v, want %+v", tc.dst, tc.dst, tc.want)
+			continue
+		}
+	}
+}
+
+// keysEqual is like (*Key).Equal, but ignores the App ID.
+func keysEqual(a, b *Key) bool {
+	for a != nil && b != nil {
+		if a.Kind() != b.Kind() || a.StringID() != b.StringID() || a.IntID() != b.IntID() {
+			return false
+		}
+		a, b = a.Parent(), b.Parent()
+	}
+	return a == b
+}
+
+func TestQueriesAreImmutable(t *testing.T) {
+	// Test that deriving q2 from q1 does not modify q1.
+	q0 := NewQuery("foo")
+	q1 := NewQuery("foo")
+	q2 := q1.Offset(2)
+	if !reflect.DeepEqual(q0, q1) {
+		t.Errorf("q0 and q1 were not equal")
+	}
+	if reflect.DeepEqual(q1, q2) {
+		t.Errorf("q1 and q2 were equal")
+	}
+
+	// Test that deriving from q4 twice does not conflict, even though
+	// q4 has a long list of order clauses. This tests that the arrays
+	// backed by a query's slice of orders are not shared.
+	f := func() *Query {
+		q := NewQuery("bar")
+		// 47 is an ugly number that is unlikely to be near a re-allocation
+		// point in repeated append calls. For example, it's not near a power
+		// of 2 or a multiple of 10.
+		for i := 0; i < 47; i++ {
+			q = q.Order(fmt.Sprintf("x%d", i))
+		}
+		return q
+	}
+	q3 := f().Order("y")
+	q4 := f()
+	q5 := q4.Order("y")
+	q6 := q4.Order("z")
+	if !reflect.DeepEqual(q3, q5) {
+		t.Errorf("q3 and q5 were not equal")
+	}
+	if reflect.DeepEqual(q5, q6) {
+		t.Errorf("q5 and q6 were equal")
+	}
+}
+
+func TestFilterParser(t *testing.T) {
+	testCases := []struct {
+		filterStr     string
+		wantOK        bool
+		wantFieldName string
+		wantOp        operator
+	}{
+		// Supported ops.
+		{"x<", true, "x", lessThan},
+		{"x <", true, "x", lessThan},
+		{"x  <", true, "x", lessThan},
+		{"   x   <  ", true, "x", lessThan},
+		{"x <=", true, "x", lessEq},
+		{"x =", true, "x", equal},
+		{"x >=", true, "x", greaterEq},
+		{"x >", true, "x", greaterThan},
+		{"in >", true, "in", greaterThan},
+		{"in>", true, "in", greaterThan},
+		// Valid but (currently) unsupported ops.
+		{"x!=", false, "", 0},
+		{"x !=", false, "", 0},
+		{" x  !=  ", false, "", 0},
+		{"x IN", false, "", 0},
+		{"x in", false, "", 0},
+		// Invalid ops.
+		{"x EQ", false, "", 0},
+		{"x lt", false, "", 0},
+		{"x <>", false, "", 0},
+		{"x >>", false, "", 0},
+		{"x ==", false, "", 0},
+		{"x =<", false, "", 0},
+		{"x =>", false, "", 0},
+		{"x !", false, "", 0},
+		{"x ", false, "", 0},
+		{"x", false, "", 0},
+	}
+	for _, tc := range testCases {
+		q := NewQuery("foo").Filter(tc.filterStr, 42)
+		if ok := q.err == nil; ok != tc.wantOK {
+			t.Errorf("%q: ok=%t, want %t", tc.filterStr, ok, tc.wantOK)
+			continue
+		}
+		if !tc.wantOK {
+			continue
+		}
+		if len(q.filter) != 1 {
+			t.Errorf("%q: len=%d, want %d", tc.filterStr, len(q.filter), 1)
+			continue
+		}
+		got, want := q.filter[0], filter{tc.wantFieldName, tc.wantOp, 42}
+		if got != want {
+			t.Errorf("%q: got %v, want %v", tc.filterStr, got, want)
+			continue
+		}
+	}
+}
+
+func TestQueryToProto(t *testing.T) {
+	// The context is required to make Keys for the test cases.
+	var got *pb.Query
+	NoErr := errors.New("No error")
+	c := aetesting.FakeSingleContext(t, "datastore_v3", "RunQuery", func(in *pb.Query, out *pb.QueryResult) error {
+		got = in
+		return NoErr // return a non-nil error so Run doesn't keep going.
+	})
+	c = internal.WithAppIDOverride(c, "dev~fake-app")
+
+	testCases := []struct {
+		desc  string
+		query *Query
+		want  *pb.Query
+		err   string
+	}{
+		{
+			desc:  "empty",
+			query: NewQuery(""),
+			want:  &pb.Query{},
+		},
+		{
+			desc:  "standard query",
+			query: NewQuery("kind").Order("-I").Filter("I >", 17).Filter("U =", "Dave").Limit(7).Offset(42).BatchSize(5),
+			want: &pb.Query{
+				Kind: proto.String("kind"),
+				Filter: []*pb.Query_Filter{
+					{
+						Op: pb.Query_Filter_GREATER_THAN.Enum(),
+						Property: []*pb.Property{
+							{
+								Name:     proto.String("I"),
+								Value:    &pb.PropertyValue{Int64Value: proto.Int64(17)},
+								Multiple: proto.Bool(false),
+							},
+						},
+					},
+					{
+						Op: pb.Query_Filter_EQUAL.Enum(),
+						Property: []*pb.Property{
+							{
+								Name:     proto.String("U"),
+								Value:    &pb.PropertyValue{StringValue: proto.String("Dave")},
+								Multiple: proto.Bool(false),
+							},
+						},
+					},
+				},
+				Order: []*pb.Query_Order{
+					{
+						Property:  proto.String("I"),
+						Direction: pb.Query_Order_DESCENDING.Enum(),
+					},
+				},
+				Limit:  proto.Int32(7),
+				Offset: proto.Int32(42),
+				Count:  proto.Int32(5),
+			},
+		},
+		{
+			desc:  "ancestor",
+			query: NewQuery("").Ancestor(NewKey(c, "kind", "Mummy", 0, nil)),
+			want: &pb.Query{
+				Ancestor: &pb.Reference{
+					App: proto.String("dev~fake-app"),
+					Path: &pb.Path{
+						Element: []*pb.Path_Element{{Type: proto.String("kind"), Name: proto.String("Mummy")}},
+					},
+				},
+			},
+		},
+		{
+			desc:  "projection",
+			query: NewQuery("").Project("A", "B"),
+			want: &pb.Query{
+				PropertyName: []string{"A", "B"},
+			},
+		},
+		{
+			desc:  "projection with distinct",
+			query: NewQuery("").Project("A", "B").Distinct(),
+			want: &pb.Query{
+				PropertyName:        []string{"A", "B"},
+				GroupByPropertyName: []string{"A", "B"},
+			},
+		},
+		{
+			desc:  "distinct on",
+			query: NewQuery("").Project("A", "B").DistinctOn("A"),
+			want: &pb.Query{
+				PropertyName:        []string{"A", "B"},
+				GroupByPropertyName: []string{"A"},
+			},
+		},
+		{
+			desc:  "keys only",
+			query: NewQuery("").KeysOnly(),
+			want: &pb.Query{
+				KeysOnly:           proto.Bool(true),
+				RequirePerfectPlan: proto.Bool(true),
+			},
+		},
+		{
+			desc:  "empty filter",
+			query: NewQuery("kind").Filter("=", 17),
+			err:   "empty query filter field nam",
+		},
+		{
+			desc:  "bad filter type",
+			query: NewQuery("kind").Filter("M =", map[string]bool{}),
+			err:   "bad query filter value type",
+		},
+		{
+			desc:  "bad filter operator",
+			query: NewQuery("kind").Filter("I <<=", 17),
+			err:   `invalid operator "<<=" in filter "I <<="`,
+		},
+		{
+			desc:  "empty order",
+			query: NewQuery("kind").Order(""),
+			err:   "empty order",
+		},
+		{
+			desc:  "bad order direction",
+			query: NewQuery("kind").Order("+I"),
+			err:   `invalid order: "+I`,
+		},
+	}
+
+	for _, tt := range testCases {
+		got = nil
+		if _, err := tt.query.Run(c).Next(nil); err != NoErr {
+			if tt.err == "" || !strings.Contains(err.Error(), tt.err) {
+				t.Errorf("%s: error %v, want %q", tt.desc, err, tt.err)
+			}
+			continue
+		}
+		if tt.err != "" {
+			t.Errorf("%s: no error, want %q", tt.desc, tt.err)
+			continue
+		}
+		// Fields that are common to all protos.
+		tt.want.App = proto.String("dev~fake-app")
+		tt.want.Compile = proto.Bool(true)
+		if !proto.Equal(got, tt.want) {
+			t.Errorf("%s:\ngot  %v\nwant %v", tt.desc, got, tt.want)
+		}
+	}
+}
diff --git a/v2/datastore/save.go b/v2/datastore/save.go
new file mode 100644
index 0000000..ce3ca3d
--- /dev/null
+++ b/v2/datastore/save.go
@@ -0,0 +1,333 @@
+// Copyright 2011 Google Inc. All rights reserved.
+// Use of this source code is governed by the Apache 2.0
+// license that can be found in the LICENSE file.
+
+package datastore
+
+import (
+	"errors"
+	"fmt"
+	"math"
+	"reflect"
+	"time"
+
+	"github.com/golang/protobuf/proto"
+
+	"google.golang.org/appengine/v2"
+	pb "google.golang.org/appengine/v2/internal/datastore"
+)
+
+func toUnixMicro(t time.Time) int64 {
+	// We cannot use t.UnixNano() / 1e3 because we want to handle times more than
+	// 2^63 nanoseconds (which is about 292 years) away from 1970, and those cannot
+	// be represented in the numerator of a single int64 divide.
+	return t.Unix()*1e6 + int64(t.Nanosecond()/1e3)
+}
+
+func fromUnixMicro(t int64) time.Time {
+	return time.Unix(t/1e6, (t%1e6)*1e3).UTC()
+}
+
+var (
+	minTime = time.Unix(int64(math.MinInt64)/1e6, (int64(math.MinInt64)%1e6)*1e3)
+	maxTime = time.Unix(int64(math.MaxInt64)/1e6, (int64(math.MaxInt64)%1e6)*1e3)
+)
+
+// valueToProto converts a named value to a newly allocated Property.
+// The returned error string is empty on success.
+func valueToProto(defaultAppID, name string, v reflect.Value, multiple bool) (p *pb.Property, errStr string) {
+	var (
+		pv          pb.PropertyValue
+		unsupported bool
+	)
+	switch v.Kind() {
+	case reflect.Invalid:
+		// No-op.
+	case reflect.Int, reflect.Int8, reflect.Int16, reflect.Int32, reflect.Int64:
+		pv.Int64Value = proto.Int64(v.Int())
+	case reflect.Bool:
+		pv.BooleanValue = proto.Bool(v.Bool())
+	case reflect.String:
+		pv.StringValue = proto.String(v.String())
+	case reflect.Float32, reflect.Float64:
+		pv.DoubleValue = proto.Float64(v.Float())
+	case reflect.Ptr:
+		if k, ok := v.Interface().(*Key); ok {
+			if k != nil {
+				pv.Referencevalue = keyToReferenceValue(defaultAppID, k)
+			}
+		} else {
+			unsupported = true
+		}
+	case reflect.Struct:
+		switch t := v.Interface().(type) {
+		case time.Time:
+			if t.Before(minTime) || t.After(maxTime) {
+				return nil, "time value out of range"
+			}
+			pv.Int64Value = proto.Int64(toUnixMicro(t))
+		case appengine.GeoPoint:
+			if !t.Valid() {
+				return nil, "invalid GeoPoint value"
+			}
+			// NOTE: Strangely, latitude maps to X, longitude to Y.
+			pv.Pointvalue = &pb.PropertyValue_PointValue{X: &t.Lat, Y: &t.Lng}
+		default:
+			unsupported = true
+		}
+	case reflect.Slice:
+		if b, ok := v.Interface().([]byte); ok {
+			pv.StringValue = proto.String(string(b))
+		} else {
+			// nvToProto should already catch slice values.
+			// If we get here, we have a slice of slice values.
+			unsupported = true
+		}
+	default:
+		unsupported = true
+	}
+	if unsupported {
+		return nil, "unsupported datastore value type: " + v.Type().String()
+	}
+	p = &pb.Property{
+		Name:     proto.String(name),
+		Value:    &pv,
+		Multiple: proto.Bool(multiple),
+	}
+	if v.IsValid() {
+		switch v.Interface().(type) {
+		case []byte:
+			p.Meaning = pb.Property_BLOB.Enum()
+		case ByteString:
+			p.Meaning = pb.Property_BYTESTRING.Enum()
+		case appengine.BlobKey:
+			p.Meaning = pb.Property_BLOBKEY.Enum()
+		case time.Time:
+			p.Meaning = pb.Property_GD_WHEN.Enum()
+		case appengine.GeoPoint:
+			p.Meaning = pb.Property_GEORSS_POINT.Enum()
+		}
+	}
+	return p, ""
+}
+
+type saveOpts struct {
+	noIndex   bool
+	multiple  bool
+	omitEmpty bool
+}
+
+// saveEntity saves an EntityProto into a PropertyLoadSaver or struct pointer.
+func saveEntity(defaultAppID string, key *Key, src interface{}) (*pb.EntityProto, error) {
+	var err error
+	var props []Property
+	if e, ok := src.(PropertyLoadSaver); ok {
+		props, err = e.Save()
+	} else {
+		props, err = SaveStruct(src)
+	}
+	if err != nil {
+		return nil, err
+	}
+	return propertiesToProto(defaultAppID, key, props)
+}
+
+func saveStructProperty(props *[]Property, name string, opts saveOpts, v reflect.Value) error {
+	if opts.omitEmpty && isEmptyValue(v) {
+		return nil
+	}
+	p := Property{
+		Name:     name,
+		NoIndex:  opts.noIndex,
+		Multiple: opts.multiple,
+	}
+	switch x := v.Interface().(type) {
+	case *Key:
+		p.Value = x
+	case time.Time:
+		p.Value = x
+	case appengine.BlobKey:
+		p.Value = x
+	case appengine.GeoPoint:
+		p.Value = x
+	case ByteString:
+		p.Value = x
+	default:
+		switch v.Kind() {
+		case reflect.Int, reflect.Int8, reflect.Int16, reflect.Int32, reflect.Int64:
+			p.Value = v.Int()
+		case reflect.Bool:
+			p.Value = v.Bool()
+		case reflect.String:
+			p.Value = v.String()
+		case reflect.Float32, reflect.Float64:
+			p.Value = v.Float()
+		case reflect.Slice:
+			if v.Type().Elem().Kind() == reflect.Uint8 {
+				p.NoIndex = true
+				p.Value = v.Bytes()
+			}
+		case reflect.Struct:
+			if !v.CanAddr() {
+				return fmt.Errorf("datastore: unsupported struct field: value is unaddressable")
+			}
+			sub, err := newStructPLS(v.Addr().Interface())
+			if err != nil {
+				return fmt.Errorf("datastore: unsupported struct field: %v", err)
+			}
+			return sub.save(props, name+".", opts)
+		}
+	}
+	if p.Value == nil {
+		return fmt.Errorf("datastore: unsupported struct field type: %v", v.Type())
+	}
+	*props = append(*props, p)
+	return nil
+}
+
+func (s structPLS) Save() ([]Property, error) {
+	var props []Property
+	if err := s.save(&props, "", saveOpts{}); err != nil {
+		return nil, err
+	}
+	return props, nil
+}
+
+func (s structPLS) save(props *[]Property, prefix string, opts saveOpts) error {
+	for name, f := range s.codec.fields {
+		name = prefix + name
+		v := s.v.FieldByIndex(f.path)
+		if !v.IsValid() || !v.CanSet() {
+			continue
+		}
+		var opts1 saveOpts
+		opts1.noIndex = opts.noIndex || f.noIndex
+		opts1.multiple = opts.multiple
+		opts1.omitEmpty = f.omitEmpty // don't propagate
+		// For slice fields that aren't []byte, save each element.
+		if v.Kind() == reflect.Slice && v.Type().Elem().Kind() != reflect.Uint8 {
+			opts1.multiple = true
+			for j := 0; j < v.Len(); j++ {
+				if err := saveStructProperty(props, name, opts1, v.Index(j)); err != nil {
+					return err
+				}
+			}
+			continue
+		}
+		// Otherwise, save the field itself.
+		if err := saveStructProperty(props, name, opts1, v); err != nil {
+			return err
+		}
+	}
+	return nil
+}
+
+func propertiesToProto(defaultAppID string, key *Key, props []Property) (*pb.EntityProto, error) {
+	e := &pb.EntityProto{
+		Key: keyToProto(defaultAppID, key),
+	}
+	if key.parent == nil {
+		e.EntityGroup = &pb.Path{}
+	} else {
+		e.EntityGroup = keyToProto(defaultAppID, key.root()).Path
+	}
+	prevMultiple := make(map[string]bool)
+
+	for _, p := range props {
+		if pm, ok := prevMultiple[p.Name]; ok {
+			if !pm || !p.Multiple {
+				return nil, fmt.Errorf("datastore: multiple Properties with Name %q, but Multiple is false", p.Name)
+			}
+		} else {
+			prevMultiple[p.Name] = p.Multiple
+		}
+
+		x := &pb.Property{
+			Name:     proto.String(p.Name),
+			Value:    new(pb.PropertyValue),
+			Multiple: proto.Bool(p.Multiple),
+		}
+		switch v := p.Value.(type) {
+		case int64:
+			x.Value.Int64Value = proto.Int64(v)
+		case bool:
+			x.Value.BooleanValue = proto.Bool(v)
+		case string:
+			x.Value.StringValue = proto.String(v)
+			if p.NoIndex {
+				x.Meaning = pb.Property_TEXT.Enum()
+			}
+		case float64:
+			x.Value.DoubleValue = proto.Float64(v)
+		case *Key:
+			if v != nil {
+				x.Value.Referencevalue = keyToReferenceValue(defaultAppID, v)
+			}
+		case time.Time:
+			if v.Before(minTime) || v.After(maxTime) {
+				return nil, fmt.Errorf("datastore: time value out of range")
+			}
+			x.Value.Int64Value = proto.Int64(toUnixMicro(v))
+			x.Meaning = pb.Property_GD_WHEN.Enum()
+		case appengine.BlobKey:
+			x.Value.StringValue = proto.String(string(v))
+			x.Meaning = pb.Property_BLOBKEY.Enum()
+		case appengine.GeoPoint:
+			if !v.Valid() {
+				return nil, fmt.Errorf("datastore: invalid GeoPoint value")
+			}
+			// NOTE: Strangely, latitude maps to X, longitude to Y.
+			x.Value.Pointvalue = &pb.PropertyValue_PointValue{X: &v.Lat, Y: &v.Lng}
+			x.Meaning = pb.Property_GEORSS_POINT.Enum()
+		case []byte:
+			x.Value.StringValue = proto.String(string(v))
+			x.Meaning = pb.Property_BLOB.Enum()
+			if !p.NoIndex {
+				return nil, fmt.Errorf("datastore: cannot index a []byte valued Property with Name %q", p.Name)
+			}
+		case ByteString:
+			x.Value.StringValue = proto.String(string(v))
+			x.Meaning = pb.Property_BYTESTRING.Enum()
+		default:
+			if p.Value != nil {
+				return nil, fmt.Errorf("datastore: invalid Value type for a Property with Name %q", p.Name)
+			}
+		}
+
+		if p.NoIndex {
+			e.RawProperty = append(e.RawProperty, x)
+		} else {
+			e.Property = append(e.Property, x)
+			if len(e.Property) > maxIndexedProperties {
+				return nil, errors.New("datastore: too many indexed properties")
+			}
+		}
+	}
+	return e, nil
+}
+
+// isEmptyValue is taken from the encoding/json package in the standard library.
+func isEmptyValue(v reflect.Value) bool {
+	switch v.Kind() {
+	case reflect.Array, reflect.Map, reflect.Slice, reflect.String:
+		// TODO(performance): Only reflect.String needed, other property types are not supported (copy/paste from json package)
+		return v.Len() == 0
+	case reflect.Bool:
+		return !v.Bool()
+	case reflect.Int, reflect.Int8, reflect.Int16, reflect.Int32, reflect.Int64:
+		return v.Int() == 0
+	case reflect.Uint, reflect.Uint8, reflect.Uint16, reflect.Uint32, reflect.Uint64, reflect.Uintptr:
+		// TODO(performance): Uint* are unsupported property types - should be removed (copy/paste from json package)
+		return v.Uint() == 0
+	case reflect.Float32, reflect.Float64:
+		return v.Float() == 0
+	case reflect.Interface, reflect.Ptr:
+		return v.IsNil()
+	case reflect.Struct:
+		switch x := v.Interface().(type) {
+		case time.Time:
+			return x.IsZero()
+		}
+	}
+	return false
+}
diff --git a/v2/datastore/time_test.go b/v2/datastore/time_test.go
new file mode 100644
index 0000000..ba74b44
--- /dev/null
+++ b/v2/datastore/time_test.go
@@ -0,0 +1,65 @@
+// Copyright 2012 Google Inc. All Rights Reserved.
+// Use of this source code is governed by the Apache 2.0
+// license that can be found in the LICENSE file.
+
+package datastore
+
+import (
+	"testing"
+	"time"
+)
+
+func TestUnixMicro(t *testing.T) {
+	// Test that all these time.Time values survive a round trip to unix micros.
+	testCases := []time.Time{
+		{},
+		time.Date(2, 1, 1, 0, 0, 0, 0, time.UTC),
+		time.Date(23, 1, 1, 0, 0, 0, 0, time.UTC),
+		time.Date(234, 1, 1, 0, 0, 0, 0, time.UTC),
+		time.Date(1000, 1, 1, 0, 0, 0, 0, time.UTC),
+		time.Date(1600, 1, 1, 0, 0, 0, 0, time.UTC),
+		time.Date(1700, 1, 1, 0, 0, 0, 0, time.UTC),
+		time.Date(1800, 1, 1, 0, 0, 0, 0, time.UTC),
+		time.Date(1900, 1, 1, 0, 0, 0, 0, time.UTC),
+		time.Unix(-1e6, -1000),
+		time.Unix(-1e6, 0),
+		time.Unix(-1e6, +1000),
+		time.Unix(-60, -1000),
+		time.Unix(-60, 0),
+		time.Unix(-60, +1000),
+		time.Unix(-1, -1000),
+		time.Unix(-1, 0),
+		time.Unix(-1, +1000),
+		time.Unix(0, -3000),
+		time.Unix(0, -2000),
+		time.Unix(0, -1000),
+		time.Unix(0, 0),
+		time.Unix(0, +1000),
+		time.Unix(0, +2000),
+		time.Unix(+60, -1000),
+		time.Unix(+60, 0),
+		time.Unix(+60, +1000),
+		time.Unix(+1e6, -1000),
+		time.Unix(+1e6, 0),
+		time.Unix(+1e6, +1000),
+		time.Date(1999, 12, 31, 23, 59, 59, 999000, time.UTC),
+		time.Date(2000, 1, 1, 0, 0, 0, 0, time.UTC),
+		time.Date(2006, 1, 2, 15, 4, 5, 678000, time.UTC),
+		time.Date(2009, 11, 10, 23, 0, 0, 0, time.UTC),
+		time.Date(3456, 1, 1, 0, 0, 0, 0, time.UTC),
+	}
+	for _, tc := range testCases {
+		got := fromUnixMicro(toUnixMicro(tc))
+		if !got.Equal(tc) {
+			t.Errorf("got %q, want %q", got, tc)
+		}
+	}
+
+	// Test that a time.Time that isn't an integral number of microseconds
+	// is not perfectly reconstructed after a round trip.
+	t0 := time.Unix(0, 123)
+	t1 := fromUnixMicro(toUnixMicro(t0))
+	if t1.Nanosecond()%1000 != 0 || t0.Nanosecond()%1000 == 0 {
+		t.Errorf("quantization to µs: got %q with %d ns, started with %d ns", t1, t1.Nanosecond(), t0.Nanosecond())
+	}
+}
diff --git a/v2/datastore/transaction.go b/v2/datastore/transaction.go
new file mode 100644
index 0000000..4a6629c
--- /dev/null
+++ b/v2/datastore/transaction.go
@@ -0,0 +1,95 @@
+// Copyright 2011 Google Inc. All rights reserved.
+// Use of this source code is governed by the Apache 2.0
+// license that can be found in the LICENSE file.
+
+package datastore
+
+import (
+	"context"
+	"errors"
+
+	"google.golang.org/appengine/v2/internal"
+	pb "google.golang.org/appengine/v2/internal/datastore"
+)
+
+func init() {
+	internal.RegisterTransactionSetter(func(x *pb.Query, t *pb.Transaction) {
+		x.Transaction = t
+	})
+	internal.RegisterTransactionSetter(func(x *pb.GetRequest, t *pb.Transaction) {
+		x.Transaction = t
+	})
+	internal.RegisterTransactionSetter(func(x *pb.PutRequest, t *pb.Transaction) {
+		x.Transaction = t
+	})
+	internal.RegisterTransactionSetter(func(x *pb.DeleteRequest, t *pb.Transaction) {
+		x.Transaction = t
+	})
+}
+
+// ErrConcurrentTransaction is returned when a transaction is rolled back due
+// to a conflict with a concurrent transaction.
+var ErrConcurrentTransaction = errors.New("datastore: concurrent transaction")
+
+// RunInTransaction runs f in a transaction. It calls f with a transaction
+// context tc that f should use for all App Engine operations.
+//
+// If f returns nil, RunInTransaction attempts to commit the transaction,
+// returning nil if it succeeds. If the commit fails due to a conflicting
+// transaction, RunInTransaction retries f, each time with a new transaction
+// context. It gives up and returns ErrConcurrentTransaction after three
+// failed attempts. The number of attempts can be configured by specifying
+// TransactionOptions.Attempts.
+//
+// If f returns non-nil, then any datastore changes will not be applied and
+// RunInTransaction returns that same error. The function f is not retried.
+//
+// Note that when f returns, the transaction is not yet committed. Calling code
+// must be careful not to assume that any of f's changes have been committed
+// until RunInTransaction returns nil.
+//
+// Since f may be called multiple times, f should usually be idempotent.
+// datastore.Get is not idempotent when unmarshaling slice fields.
+//
+// Nested transactions are not supported; c may not be a transaction context.
+func RunInTransaction(c context.Context, f func(tc context.Context) error, opts *TransactionOptions) error {
+	xg := false
+	if opts != nil {
+		xg = opts.XG
+	}
+	readOnly := false
+	if opts != nil {
+		readOnly = opts.ReadOnly
+	}
+	attempts := 3
+	if opts != nil && opts.Attempts > 0 {
+		attempts = opts.Attempts
+	}
+	var t *pb.Transaction
+	var err error
+	for i := 0; i < attempts; i++ {
+		if t, err = internal.RunTransactionOnce(c, f, xg, readOnly, t); err != internal.ErrConcurrentTransaction {
+			return err
+		}
+	}
+	return ErrConcurrentTransaction
+}
+
+// TransactionOptions are the options for running a transaction.
+type TransactionOptions struct {
+	// XG is whether the transaction can cross multiple entity groups. In
+	// comparison, a single group transaction is one where all datastore keys
+	// used have the same root key. Note that cross group transactions do not
+	// have the same behavior as single group transactions. In particular, it
+	// is much more likely to see partially applied transactions in different
+	// entity groups, in global queries.
+	// It is valid to set XG to true even if the transaction is within a
+	// single entity group.
+	XG bool
+	// Attempts controls the number of retries to perform when commits fail
+	// due to a conflicting transaction. If omitted, it defaults to 3.
+	Attempts int
+	// ReadOnly controls whether the transaction is a read only transaction.
+	// Read only transactions are potentially more efficient.
+	ReadOnly bool
+}
diff --git a/v2/delay/delay.go b/v2/delay/delay.go
new file mode 100644
index 0000000..10104ca
--- /dev/null
+++ b/v2/delay/delay.go
@@ -0,0 +1,375 @@
+// Copyright 2011 Google Inc. All rights reserved.
+// Use of this source code is governed by the Apache 2.0
+// license that can be found in the LICENSE file.
+
+/*
+Package delay provides a way to execute code outside of the scope of
+a user request by using the Task Queue API.
+To use a deferred function, you must register the function to be
+deferred as a top-level var. For example,
+    ```
+    var laterFunc = delay.MustRegister("key", myFunc)
+    func myFunc(ctx context.Context, a, b string) {...}
+    ```
+You can also inline with a function literal:
+    ```
+    var laterFunc = delay.MustRegister("key", func(ctx context.Context, a, b string) {...})
+    ```
+In the above example, "key" is a logical name for the function.
+The key needs to be globally unique across your entire application.
+To invoke the function in a deferred fashion, call the top-level item:
+    ```
+    laterFunc(ctx, "aaa", "bbb")
+    ```
+
+This will queue a task and return quickly; the function will be actually
+run in a new request. The delay package uses the Task Queue API to create
+tasks that call the reserved application path "/_ah/queue/go/delay".
+This path may only be marked as "login: admin" or have no access
+restriction; it will fail if marked as "login: required".
+*/
+
+package delay // import "google.golang.org/appengine/v2/delay"
+
+import (
+	"bytes"
+	stdctx "context"
+	"encoding/gob"
+	"errors"
+	"fmt"
+	"go/build"
+	stdlog "log"
+	"net/http"
+	"path/filepath"
+	"reflect"
+	"regexp"
+	"runtime"
+	"strings"
+
+	"golang.org/x/net/context"
+
+	"google.golang.org/appengine/v2"
+	"google.golang.org/appengine/v2/internal"
+	"google.golang.org/appengine/v2/log"
+	"google.golang.org/appengine/v2/taskqueue"
+)
+
+// Function represents a function that may have a delayed invocation.
+type Function struct {
+	fv  reflect.Value // Kind() == reflect.Func
+	key string
+	err error // any error during initialization
+}
+
+const (
+	// The HTTP path for invocations.
+	path = "/_ah/queue/go/delay"
+	// Use the default queue.
+	queue = ""
+)
+
+type contextKey int
+
+var (
+	// registry of all delayed functions
+	funcs = make(map[string]*Function)
+
+	// precomputed types
+	errorType = reflect.TypeOf((*error)(nil)).Elem()
+
+	// errors
+	errFirstArg         = errors.New("first argument must be context.Context")
+	errOutsideDelayFunc = errors.New("request headers are only available inside a delay.Func")
+
+	// context keys
+	headersContextKey contextKey = 0
+	stdContextType               = reflect.TypeOf((*stdctx.Context)(nil)).Elem()
+	netContextType               = reflect.TypeOf((*context.Context)(nil)).Elem()
+)
+
+func isContext(t reflect.Type) bool {
+	return t == stdContextType || t == netContextType
+}
+
+var modVersionPat = regexp.MustCompile("@v[^/]+")
+
+// fileKey finds a stable representation of the caller's file path.
+// For calls from package main: strip all leading path entries, leaving just the filename.
+// For calls from anywhere else, strip $GOPATH/src, leaving just the package path and file path.
+func fileKey(file string) (string, error) {
+	if !internal.IsSecondGen() {
+		return file, nil
+	}
+	// If the caller is in the same Dir as mainPath, then strip everything but the file name.
+	if filepath.Dir(file) == internal.MainPath {
+		return filepath.Base(file), nil
+	}
+	// If the path contains "gopath/src/", which is what the builder uses for
+	// apps which don't use go modules, strip everything up to and including src.
+	// Or, if the path starts with /tmp/staging, then we're importing a package
+	// from the app's module (and we must be using go modules), and we have a
+	// path like /tmp/staging1234/srv/... so strip everything up to and
+	// including the first /srv/.
+	// And be sure to look at the GOPATH, for local development.
+	s := string(filepath.Separator)
+	for _, s := range []string{filepath.Join("gopath", "src") + s, s + "srv" + s, filepath.Join(build.Default.GOPATH, "src") + s} {
+		if idx := strings.Index(file, s); idx > 0 {
+			return file[idx+len(s):], nil
+		}
+	}
+
+	// Finally, if that all fails then we must be using go modules, and the file is a module,
+	// so the path looks like /go/pkg/mod/github.com/foo/bar@v0.0.0-20181026220418-f595d03440dc/baz.go
+	// So... remove everything up to and including mod, plus the @.... version string.
+	m := "/mod/"
+	if idx := strings.Index(file, m); idx > 0 {
+		file = file[idx+len(m):]
+	} else {
+		return file, fmt.Errorf("fileKey: unknown file path format for %q", file)
+	}
+	return modVersionPat.ReplaceAllString(file, ""), nil
+}
+
+// Func declares a new function that can be called in a deferred fashion.
+// The second argument i must be a function with the first argument of
+// type context.Context.
+// To make the key globally unique, the SDK code will combine "key" with
+// the filename of the file in which myFunc is defined
+// (e.g., /some/path/myfile.go). This is convenient, but can lead to
+// failed deferred tasks if you refactor your code, or change from
+// GOPATH to go.mod, and then re-deploy with in-flight deferred tasks.
+//
+// This function Func must be called in a global scope to properly
+// register the function with the framework.
+//
+// Deprecated: Use MustRegister instead.
+func Func(key string, i interface{}) *Function {
+	// Derive unique, somewhat stable key for this func.
+	_, file, _, _ := runtime.Caller(1)
+	fk, err := fileKey(file)
+	if err != nil {
+		// Not fatal, but log the error
+		stdlog.Printf("delay: %v", err)
+	}
+	key = fk + ":" + key
+	f, err := registerFunction(key, i)
+	if err != nil {
+		return f
+	}
+	if old := funcs[f.key]; old != nil {
+		old.err = fmt.Errorf("multiple functions registered for %s", key)
+	}
+	funcs[f.key] = f
+	return f
+}
+
+// MustRegister declares a new function that can be called in a deferred fashion.
+// The second argument i must be a function with the first argument of
+// type context.Context.
+// MustRegister requires the key to be globally unique.
+//
+// This function MustRegister must be called in a global scope to properly
+// register the function with the framework.
+// See the package notes above for more details.
+func MustRegister(key string, i interface{}) *Function {
+	f, err := registerFunction(key, i)
+	if err != nil {
+		panic(err)
+	}
+
+	if old := funcs[f.key]; old != nil {
+		panic(fmt.Errorf("multiple functions registered for %q", key))
+	}
+	funcs[f.key] = f
+	return f
+}
+
+func registerFunction(key string, i interface{}) (*Function, error) {
+	f := &Function{fv: reflect.ValueOf(i)}
+	f.key = key
+
+	t := f.fv.Type()
+	if t.Kind() != reflect.Func {
+		f.err = errors.New("not a function")
+		return f, f.err
+	}
+	if t.NumIn() == 0 || !isContext(t.In(0)) {
+		f.err = errFirstArg
+		return f, errFirstArg
+	}
+
+	// Register the function's arguments with the gob package.
+	// This is required because they are marshaled inside a []interface{}.
+	// gob.Register only expects to be called during initialization;
+	// that's fine because this function expects the same.
+	for i := 0; i < t.NumIn(); i++ {
+		// Only concrete types may be registered. If the argument has
+		// interface type, the client is resposible for registering the
+		// concrete types it will hold.
+		if t.In(i).Kind() == reflect.Interface {
+			continue
+		}
+		gob.Register(reflect.Zero(t.In(i)).Interface())
+	}
+	return f, nil
+}
+
+type invocation struct {
+	Key  string
+	Args []interface{}
+}
+
+// Call invokes a delayed function.
+//
+//	err := f.Call(c, ...)
+//
+// is equivalent to
+//
+//	t, _ := f.Task(...)
+//	_, err := taskqueue.Add(c, t, "")
+func (f *Function) Call(c context.Context, args ...interface{}) error {
+	t, err := f.Task(args...)
+	if err != nil {
+		return err
+	}
+	_, err = taskqueueAdder(c, t, queue)
+	return err
+}
+
+// Task creates a Task that will invoke the function.
+// Its parameters may be tweaked before adding it to a queue.
+// Users should not modify the Path or Payload fields of the returned Task.
+func (f *Function) Task(args ...interface{}) (*taskqueue.Task, error) {
+	if f.err != nil {
+		return nil, fmt.Errorf("delay: func is invalid: %v", f.err)
+	}
+
+	nArgs := len(args) + 1 // +1 for the context.Context
+	ft := f.fv.Type()
+	minArgs := ft.NumIn()
+	if ft.IsVariadic() {
+		minArgs--
+	}
+	if nArgs < minArgs {
+		return nil, fmt.Errorf("delay: too few arguments to func: %d < %d", nArgs, minArgs)
+	}
+	if !ft.IsVariadic() && nArgs > minArgs {
+		return nil, fmt.Errorf("delay: too many arguments to func: %d > %d", nArgs, minArgs)
+	}
+
+	// Check arg types.
+	for i := 1; i < nArgs; i++ {
+		at := reflect.TypeOf(args[i-1])
+		var dt reflect.Type
+		if i < minArgs {
+			// not a variadic arg
+			dt = ft.In(i)
+		} else {
+			// a variadic arg
+			dt = ft.In(minArgs).Elem()
+		}
+		// nil arguments won't have a type, so they need special handling.
+		if at == nil {
+			// nil interface
+			switch dt.Kind() {
+			case reflect.Chan, reflect.Func, reflect.Interface, reflect.Map, reflect.Ptr, reflect.Slice:
+				continue // may be nil
+			}
+			return nil, fmt.Errorf("delay: argument %d has wrong type: %v is not nilable", i, dt)
+		}
+		switch at.Kind() {
+		case reflect.Chan, reflect.Func, reflect.Interface, reflect.Map, reflect.Ptr, reflect.Slice:
+			av := reflect.ValueOf(args[i-1])
+			if av.IsNil() {
+				// nil value in interface; not supported by gob, so we replace it
+				// with a nil interface value
+				args[i-1] = nil
+			}
+		}
+		if !at.AssignableTo(dt) {
+			return nil, fmt.Errorf("delay: argument %d has wrong type: %v is not assignable to %v", i, at, dt)
+		}
+	}
+
+	inv := invocation{
+		Key:  f.key,
+		Args: args,
+	}
+
+	buf := new(bytes.Buffer)
+	if err := gob.NewEncoder(buf).Encode(inv); err != nil {
+		return nil, fmt.Errorf("delay: gob encoding failed: %v", err)
+	}
+
+	return &taskqueue.Task{
+		Path:    path,
+		Payload: buf.Bytes(),
+	}, nil
+}
+
+// Request returns the special task-queue HTTP request headers for the current
+// task queue handler. Returns an error if called from outside a delay.Func.
+func RequestHeaders(c context.Context) (*taskqueue.RequestHeaders, error) {
+	if ret, ok := c.Value(headersContextKey).(*taskqueue.RequestHeaders); ok {
+		return ret, nil
+	}
+	return nil, errOutsideDelayFunc
+}
+
+var taskqueueAdder = taskqueue.Add // for testing
+
+func init() {
+	http.HandleFunc(path, func(w http.ResponseWriter, req *http.Request) {
+		runFunc(appengine.NewContext(req), w, req)
+	})
+}
+
+func runFunc(c context.Context, w http.ResponseWriter, req *http.Request) {
+	defer req.Body.Close()
+
+	c = context.WithValue(c, headersContextKey, taskqueue.ParseRequestHeaders(req.Header))
+
+	var inv invocation
+	if err := gob.NewDecoder(req.Body).Decode(&inv); err != nil {
+		log.Errorf(c, "delay: failed decoding task payload: %v", err)
+		log.Warningf(c, "delay: dropping task")
+		return
+	}
+
+	f := funcs[inv.Key]
+	if f == nil {
+		log.Errorf(c, "delay: no func with key %q found", inv.Key)
+		log.Warningf(c, "delay: dropping task")
+		return
+	}
+
+	ft := f.fv.Type()
+	in := []reflect.Value{reflect.ValueOf(c)}
+	for _, arg := range inv.Args {
+		var v reflect.Value
+		if arg != nil {
+			v = reflect.ValueOf(arg)
+		} else {
+			// Task was passed a nil argument, so we must construct
+			// the zero value for the argument here.
+			n := len(in) // we're constructing the nth argument
+			var at reflect.Type
+			if !ft.IsVariadic() || n < ft.NumIn()-1 {
+				at = ft.In(n)
+			} else {
+				at = ft.In(ft.NumIn() - 1).Elem()
+			}
+			v = reflect.Zero(at)
+		}
+		in = append(in, v)
+	}
+	out := f.fv.Call(in)
+
+	if n := ft.NumOut(); n > 0 && ft.Out(n-1) == errorType {
+		if errv := out[n-1]; !errv.IsNil() {
+			log.Errorf(c, "delay: func failed (will retry): %v", errv.Interface())
+			w.WriteHeader(http.StatusInternalServerError)
+			return
+		}
+	}
+}
diff --git a/v2/delay/delay_test.go b/v2/delay/delay_test.go
new file mode 100644
index 0000000..573a33f
--- /dev/null
+++ b/v2/delay/delay_test.go
@@ -0,0 +1,620 @@
+// Copyright 2011 Google Inc. All rights reserved.
+// Use of this source code is governed by the Apache 2.0
+// license that can be found in the LICENSE file.
+
+package delay
+
+import (
+	"bytes"
+	stdctx "context"
+	"encoding/gob"
+	"errors"
+	"fmt"
+	"net/http"
+	"net/http/httptest"
+	"os"
+	"path/filepath"
+	"reflect"
+	"testing"
+
+	"github.com/golang/protobuf/proto"
+	"golang.org/x/net/context"
+
+	"google.golang.org/appengine/v2/internal"
+	"google.golang.org/appengine/v2/taskqueue"
+)
+
+type CustomType struct {
+	N int
+}
+
+type CustomInterface interface {
+	N() int
+}
+
+type CustomImpl int
+
+func (c CustomImpl) N() int { return int(c) }
+
+// CustomImpl needs to be registered with gob.
+func init() {
+	gob.Register(CustomImpl(0))
+}
+
+var (
+	regFRuns = 0
+	regFMsg  = ""
+	regF     = func(c context.Context, arg string) {
+		regFRuns++
+		regFMsg = arg
+	}
+	regFunc     = Func("regFunc", regF)
+	regRegister = MustRegister("regRegister", regF)
+
+	custFTally = 0
+	custF      = func(c context.Context, ct *CustomType, ci CustomInterface) {
+		a, b := 2, 3
+		if ct != nil {
+			a = ct.N
+		}
+		if ci != nil {
+			b = ci.N()
+		}
+		custFTally += a + b
+	}
+	custFunc     = Func("custFunc", custF)
+	custRegister = MustRegister("custRegister", custF)
+
+	anotherCustFunc = Func("custFunc2", func(c context.Context, n int, ct *CustomType, ci CustomInterface) {
+	})
+
+	varFMsg = ""
+	varF    = func(c context.Context, format string, args ...int) {
+		// convert []int to []interface{} for fmt.Sprintf.
+		as := make([]interface{}, len(args))
+		for i, a := range args {
+			as[i] = a
+		}
+		varFMsg = fmt.Sprintf(format, as...)
+	}
+	varFunc     = Func("variadicFunc", varF)
+	varRegister = MustRegister("variadicRegister", varF)
+
+	errFRuns = 0
+	errFErr  = errors.New("error!")
+	errF     = func(c context.Context) error {
+		errFRuns++
+		if errFRuns == 1 {
+			return nil
+		}
+		return errFErr
+	}
+	errFunc     = Func("errFunc", errF)
+	errRegister = MustRegister("errRegister", errF)
+
+	dupeWhich = 0
+	dupe1F    = func(c context.Context) {
+		if dupeWhich == 0 {
+			dupeWhich = 1
+		}
+	}
+	dupe1Func = Func("dupe", dupe1F)
+	dupe2F    = func(c context.Context) {
+		if dupeWhich == 0 {
+			dupeWhich = 2
+		}
+	}
+	dupe2Func = Func("dupe", dupe2F)
+
+	requestFuncRuns    = 0
+	requestFuncHeaders *taskqueue.RequestHeaders
+	requestFuncErr     error
+	requestF           = func(c context.Context) {
+		requestFuncRuns++
+		requestFuncHeaders, requestFuncErr = RequestHeaders(c)
+	}
+	requestFunc     = Func("requestFunc", requestF)
+	requestRegister = MustRegister("requestRegister", requestF)
+
+	stdCtxRuns = 0
+	stdCtxF    = func(c stdctx.Context) {
+		stdCtxRuns++
+	}
+	stdCtxFunc     = Func("stdctxFunc", stdCtxF)
+	stdCtxRegister = MustRegister("stdctxRegister", stdCtxF)
+)
+
+type fakeContext struct {
+	ctx     context.Context
+	logging [][]interface{}
+}
+
+func newFakeContext() *fakeContext {
+	f := new(fakeContext)
+	f.ctx = internal.WithCallOverride(context.Background(), f.call)
+	f.ctx = internal.WithLogOverride(f.ctx, f.logf)
+	return f
+}
+
+func (f *fakeContext) call(ctx context.Context, service, method string, in, out proto.Message) error {
+	panic("should never be called")
+}
+
+var logLevels = map[int64]string{1: "INFO", 3: "ERROR"}
+
+func (f *fakeContext) logf(level int64, format string, args ...interface{}) {
+	f.logging = append(f.logging, append([]interface{}{logLevels[level], format}, args...))
+}
+
+func TestInvalidFunction(t *testing.T) {
+	c := newFakeContext()
+	invalidFunc := Func("invalid", func() {})
+
+	if got, want := invalidFunc.Call(c.ctx), fmt.Errorf("delay: func is invalid: %s", errFirstArg); got.Error() != want.Error() {
+		t.Errorf("Incorrect error: got %q, want %q", got, want)
+	}
+}
+
+func TestVariadicFunctionArguments(t *testing.T) {
+	// Check the argument type validation for variadic functions.
+	c := newFakeContext()
+
+	calls := 0
+	taskqueueAdder = func(c context.Context, t *taskqueue.Task, _ string) (*taskqueue.Task, error) {
+		calls++
+		return t, nil
+	}
+
+	for _, testTarget := range []*Function{varFunc, varRegister} {
+		// reset state
+		calls = 0
+		testTarget.Call(c.ctx, "hi")
+		testTarget.Call(c.ctx, "%d", 12)
+		testTarget.Call(c.ctx, "%d %d %d", 3, 1, 4)
+		if calls != 3 {
+			t.Errorf("Got %d calls to taskqueueAdder, want 3", calls)
+		}
+
+		if got, want := testTarget.Call(c.ctx, "%d %s", 12, "a string is bad"), errors.New("delay: argument 3 has wrong type: string is not assignable to int"); got.Error() != want.Error() {
+			t.Errorf("Incorrect error: got %q, want %q", got, want)
+		}
+	}
+}
+
+func TestBadArguments(t *testing.T) {
+	// Try running regFunc with different sets of inappropriate arguments.
+
+	c := newFakeContext()
+
+	tests := []struct {
+		args    []interface{} // all except context
+		wantErr string
+	}{
+		{
+			args:    nil,
+			wantErr: "delay: too few arguments to func: 1 < 2",
+		},
+		{
+			args:    []interface{}{"lala", 53},
+			wantErr: "delay: too many arguments to func: 3 > 2",
+		},
+		{
+			args:    []interface{}{53},
+			wantErr: "delay: argument 1 has wrong type: int is not assignable to string",
+		},
+	}
+	for _, testTarget := range []*Function{regFunc, regRegister} {
+		for i, tc := range tests {
+			got := testTarget.Call(c.ctx, tc.args...)
+			if got.Error() != tc.wantErr {
+				t.Errorf("Call %v: got %q, want %q", i, got, tc.wantErr)
+			}
+		}
+	}
+}
+
+func TestRunningFunction(t *testing.T) {
+	c := newFakeContext()
+	// Fake out the adding of a task.
+	var task *taskqueue.Task
+	taskqueueAdder = func(_ context.Context, tk *taskqueue.Task, queue string) (*taskqueue.Task, error) {
+		if queue != "" {
+			t.Errorf(`Got queue %q, expected ""`, queue)
+		}
+		task = tk
+		return tk, nil
+	}
+
+	for _, testTarget := range []*Function{regFunc, regRegister} {
+		regFRuns, regFMsg = 0, "" // reset state
+		const msg = "Why, hello!"
+		testTarget.Call(c.ctx, msg)
+
+		// Simulate the Task Queue service.
+		req, err := http.NewRequest("POST", path, bytes.NewBuffer(task.Payload))
+		if err != nil {
+			t.Fatalf("Failed making http.Request: %v", err)
+		}
+		rw := httptest.NewRecorder()
+		runFunc(c.ctx, rw, req)
+
+		if regFRuns != 1 {
+			t.Errorf("regFuncRuns: got %d, want 1", regFRuns)
+		}
+		if regFMsg != msg {
+			t.Errorf("regFuncMsg: got %q, want %q", regFMsg, msg)
+		}
+	}
+}
+
+func TestCustomType(t *testing.T) {
+	c := newFakeContext()
+
+	// Fake out the adding of a task.
+	var task *taskqueue.Task
+	taskqueueAdder = func(_ context.Context, tk *taskqueue.Task, queue string) (*taskqueue.Task, error) {
+		if queue != "" {
+			t.Errorf(`Got queue %q, expected ""`, queue)
+		}
+		task = tk
+		return tk, nil
+	}
+
+	for _, testTarget := range []*Function{custFunc, custRegister} {
+		custFTally = 0 // reset state
+		testTarget.Call(c.ctx, &CustomType{N: 11}, CustomImpl(13))
+
+		// Simulate the Task Queue service.
+		req, err := http.NewRequest("POST", path, bytes.NewBuffer(task.Payload))
+		if err != nil {
+			t.Fatalf("Failed making http.Request: %v", err)
+		}
+		rw := httptest.NewRecorder()
+		runFunc(c.ctx, rw, req)
+
+		if custFTally != 24 {
+			t.Errorf("custFTally = %d, want 24", custFTally)
+		}
+
+		// Try the same, but with nil values; one is a nil pointer (and thus a non-nil interface value),
+		// and the other is a nil interface value.
+		custFTally = 0 // reset state
+		testTarget.Call(c.ctx, (*CustomType)(nil), nil)
+
+		// Simulate the Task Queue service.
+		req, err = http.NewRequest("POST", path, bytes.NewBuffer(task.Payload))
+		if err != nil {
+			t.Fatalf("Failed making http.Request: %v", err)
+		}
+		rw = httptest.NewRecorder()
+		runFunc(c.ctx, rw, req)
+
+		if custFTally != 5 {
+			t.Errorf("custFTally = %d, want 5", custFTally)
+		}
+	}
+}
+
+func TestRunningVariadic(t *testing.T) {
+	c := newFakeContext()
+
+	// Fake out the adding of a task.
+	var task *taskqueue.Task
+	taskqueueAdder = func(_ context.Context, tk *taskqueue.Task, queue string) (*taskqueue.Task, error) {
+		if queue != "" {
+			t.Errorf(`Got queue %q, expected ""`, queue)
+		}
+		task = tk
+		return tk, nil
+	}
+
+	for _, testTarget := range []*Function{varFunc, varRegister} {
+		varFMsg = "" // reset state
+		testTarget.Call(c.ctx, "Amiga %d has %d KB RAM", 500, 512)
+
+		// Simulate the Task Queue service.
+		req, err := http.NewRequest("POST", path, bytes.NewBuffer(task.Payload))
+		if err != nil {
+			t.Fatalf("Failed making http.Request: %v", err)
+		}
+		rw := httptest.NewRecorder()
+		runFunc(c.ctx, rw, req)
+
+		const expected = "Amiga 500 has 512 KB RAM"
+		if varFMsg != expected {
+			t.Errorf("varFMsg = %q, want %q", varFMsg, expected)
+		}
+	}
+}
+
+func TestErrorFunction(t *testing.T) {
+	c := newFakeContext()
+
+	// Fake out the adding of a task.
+	var task *taskqueue.Task
+	taskqueueAdder = func(_ context.Context, tk *taskqueue.Task, queue string) (*taskqueue.Task, error) {
+		if queue != "" {
+			t.Errorf(`Got queue %q, expected ""`, queue)
+		}
+		task = tk
+		return tk, nil
+	}
+
+	for _, testTarget := range []*Function{errFunc, errRegister} {
+		// reset state
+		c.logging = [][]interface{}{}
+		errFRuns = 0
+		testTarget.Call(c.ctx)
+
+		// Simulate the Task Queue service.
+		// The first call should succeed; the second call should fail.
+		{
+			req, err := http.NewRequest("POST", path, bytes.NewBuffer(task.Payload))
+			if err != nil {
+				t.Fatalf("Failed making http.Request: %v", err)
+			}
+			rw := httptest.NewRecorder()
+			runFunc(c.ctx, rw, req)
+		}
+		{
+			req, err := http.NewRequest("POST", path, bytes.NewBuffer(task.Payload))
+			if err != nil {
+				t.Fatalf("Failed making http.Request: %v", err)
+			}
+			rw := httptest.NewRecorder()
+			runFunc(c.ctx, rw, req)
+			if rw.Code != http.StatusInternalServerError {
+				t.Errorf("Got status code %d, want %d", rw.Code, http.StatusInternalServerError)
+			}
+
+			wantLogging := [][]interface{}{
+				{"ERROR", "delay: func failed (will retry): %v", errFErr},
+			}
+			if !reflect.DeepEqual(c.logging, wantLogging) {
+				t.Errorf("Incorrect logging: got %+v, want %+v", c.logging, wantLogging)
+			}
+		}
+	}
+}
+
+func TestFuncDuplicateFunction(t *testing.T) {
+	c := newFakeContext()
+
+	// Fake out the adding of a task.
+	var task *taskqueue.Task
+	taskqueueAdder = func(_ context.Context, tk *taskqueue.Task, queue string) (*taskqueue.Task, error) {
+		if queue != "" {
+			t.Errorf(`Got queue %q, expected ""`, queue)
+		}
+		task = tk
+		return tk, nil
+	}
+
+	if err := dupe1Func.Call(c.ctx); err == nil {
+		t.Error("dupe1Func.Call did not return error")
+	}
+	if task != nil {
+		t.Error("dupe1Func.Call posted a task")
+	}
+	if err := dupe2Func.Call(c.ctx); err != nil {
+		t.Errorf("dupe2Func.Call error: %v", err)
+	}
+	if task == nil {
+		t.Fatalf("dupe2Func.Call did not post a task")
+	}
+
+	// Simulate the Task Queue service.
+	req, err := http.NewRequest("POST", path, bytes.NewBuffer(task.Payload))
+	if err != nil {
+		t.Fatalf("Failed making http.Request: %v", err)
+	}
+	rw := httptest.NewRecorder()
+	runFunc(c.ctx, rw, req)
+
+	if dupeWhich == 1 {
+		t.Error("dupe2Func.Call used old registered function")
+	} else if dupeWhich != 2 {
+		t.Errorf("dupeWhich = %d; want 2", dupeWhich)
+	}
+}
+
+func TestMustRegisterDuplicateFunction(t *testing.T) {
+	MustRegister("dupe", dupe1F)
+	defer func() {
+		err := recover()
+		if err == nil {
+			t.Error("MustRegister did not panic")
+		}
+		got := fmt.Sprintf("%s", err)
+		want := fmt.Sprintf("multiple functions registered for %q", "dupe")
+		if got != want {
+			t.Errorf("Incorrect error: got %q, want %q", got, want)
+		}
+	}()
+	MustRegister("dupe", dupe2F)
+}
+
+func TestInvalidFunction_MustRegister(t *testing.T) {
+	defer func() {
+		err := recover()
+		if err == nil {
+			t.Error("MustRegister did not panic")
+		}
+		if err != errFirstArg {
+			t.Errorf("Incorrect error: got %q, want %q", err, errFirstArg)
+		}
+	}()
+	MustRegister("invalid", func() {})
+}
+
+func TestGetRequestHeadersFromContext(t *testing.T) {
+	for _, testTarget := range []*Function{requestFunc, requestRegister} {
+		c := newFakeContext()
+
+		// Outside a delay.Func should return an error.
+		headers, err := RequestHeaders(c.ctx)
+		if headers != nil {
+			t.Errorf("RequestHeaders outside Func, got %v, want nil", headers)
+		}
+		if err != errOutsideDelayFunc {
+			t.Errorf("RequestHeaders outside Func err, got %v, want %v", err, errOutsideDelayFunc)
+		}
+
+		// Fake out the adding of a task.
+		var task *taskqueue.Task
+		taskqueueAdder = func(_ context.Context, tk *taskqueue.Task, queue string) (*taskqueue.Task, error) {
+			if queue != "" {
+				t.Errorf(`Got queue %q, expected ""`, queue)
+			}
+			task = tk
+			return tk, nil
+		}
+
+		testTarget.Call(c.ctx)
+
+		requestFuncRuns, requestFuncHeaders = 0, nil // reset state
+		// Simulate the Task Queue service.
+		req, err := http.NewRequest("POST", path, bytes.NewBuffer(task.Payload))
+		req.Header.Set("x-appengine-taskname", "foobar")
+		if err != nil {
+			t.Fatalf("Failed making http.Request: %v", err)
+		}
+		rw := httptest.NewRecorder()
+		runFunc(c.ctx, rw, req)
+
+		if requestFuncRuns != 1 {
+			t.Errorf("requestFuncRuns: got %d, want 1", requestFuncRuns)
+		}
+		if requestFuncHeaders.TaskName != "foobar" {
+			t.Errorf("requestFuncHeaders.TaskName: got %v, want 'foobar'", requestFuncHeaders.TaskName)
+		}
+		if requestFuncErr != nil {
+			t.Errorf("requestFuncErr: got %v, want nil", requestFuncErr)
+		}
+	}
+}
+
+func TestStandardContext(t *testing.T) {
+	// Fake out the adding of a task.
+	var task *taskqueue.Task
+	taskqueueAdder = func(_ context.Context, tk *taskqueue.Task, queue string) (*taskqueue.Task, error) {
+		if queue != "" {
+			t.Errorf(`Got queue %q, expected ""`, queue)
+		}
+		task = tk
+		return tk, nil
+	}
+
+	for _, testTarget := range []*Function{stdCtxFunc, stdCtxRegister} {
+		c := newFakeContext()
+		stdCtxRuns = 0 // reset state
+		if err := testTarget.Call(c.ctx); err != nil {
+			t.Fatal("Function.Call:", err)
+		}
+
+		// Simulate the Task Queue service.
+		req, err := http.NewRequest("POST", path, bytes.NewBuffer(task.Payload))
+		if err != nil {
+			t.Fatalf("Failed making http.Request: %v", err)
+		}
+		rw := httptest.NewRecorder()
+		runFunc(c.ctx, rw, req)
+
+		if stdCtxRuns != 1 {
+			t.Errorf("stdCtxRuns: got %d, want 1", stdCtxRuns)
+		}
+	}
+}
+
+func TestFileKey(t *testing.T) {
+	const firstGenTest = 0
+	tests := []struct {
+		mainPath string
+		file     string
+		want     string
+	}{
+		// first-gen
+		{
+			"",
+			filepath.FromSlash("srv/foo.go"),
+			filepath.FromSlash("srv/foo.go"),
+		},
+		// gopath
+		{
+			filepath.FromSlash("/tmp/staging1234/srv/"),
+			filepath.FromSlash("/tmp/staging1234/srv/foo.go"),
+			"foo.go",
+		},
+		{
+			filepath.FromSlash("/tmp/staging1234/srv/_gopath/src/example.com/foo"),
+			filepath.FromSlash("/tmp/staging1234/srv/_gopath/src/example.com/foo/foo.go"),
+			"foo.go",
+		},
+		{
+			filepath.FromSlash("/tmp/staging2234/srv/_gopath/src/example.com/foo"),
+			filepath.FromSlash("/tmp/staging2234/srv/_gopath/src/example.com/foo/bar/bar.go"),
+			filepath.FromSlash("example.com/foo/bar/bar.go"),
+		},
+		{
+			filepath.FromSlash("/tmp/staging3234/srv/_gopath/src/example.com/foo"),
+			filepath.FromSlash("/tmp/staging3234/srv/_gopath/src/example.com/bar/main.go"),
+			filepath.FromSlash("example.com/bar/main.go"),
+		},
+		{
+			filepath.FromSlash("/tmp/staging3234/srv/gopath/src/example.com/foo"),
+			filepath.FromSlash("/tmp/staging3234/srv/gopath/src/example.com/bar/main.go"),
+			filepath.FromSlash("example.com/bar/main.go"),
+		},
+		{
+			filepath.FromSlash(""),
+			filepath.FromSlash("/tmp/staging3234/srv/gopath/src/example.com/bar/main.go"),
+			filepath.FromSlash("example.com/bar/main.go"),
+		},
+		// go mod, same package
+		{
+			filepath.FromSlash("/tmp/staging3234/srv"),
+			filepath.FromSlash("/tmp/staging3234/srv/main.go"),
+			"main.go",
+		},
+		{
+			filepath.FromSlash("/tmp/staging3234/srv"),
+			filepath.FromSlash("/tmp/staging3234/srv/bar/main.go"),
+			filepath.FromSlash("bar/main.go"),
+		},
+		{
+			filepath.FromSlash("/tmp/staging3234/srv/cmd"),
+			filepath.FromSlash("/tmp/staging3234/srv/cmd/main.go"),
+			"main.go",
+		},
+		{
+			filepath.FromSlash("/tmp/staging3234/srv/cmd"),
+			filepath.FromSlash("/tmp/staging3234/srv/bar/main.go"),
+			filepath.FromSlash("bar/main.go"),
+		},
+		{
+			filepath.FromSlash(""),
+			filepath.FromSlash("/tmp/staging3234/srv/bar/main.go"),
+			filepath.FromSlash("bar/main.go"),
+		},
+		// go mod, other package
+		{
+			filepath.FromSlash("/tmp/staging3234/srv"),
+			filepath.FromSlash("/go/pkg/mod/github.com/foo/bar@v0.0.0-20181026220418-f595d03440dc/baz.go"),
+			filepath.FromSlash("github.com/foo/bar/baz.go"),
+		},
+	}
+	for i, tc := range tests {
+		if i > firstGenTest {
+			os.Setenv("GAE_ENV", "standard")
+		}
+		internal.MainPath = tc.mainPath
+		got, err := fileKey(tc.file)
+		if err != nil {
+			t.Errorf("Unexpected error, call %v, file %q: %v", i, tc.file, err)
+			continue
+		}
+		if got != tc.want {
+			t.Errorf("Call %v, file %q: got %q, want %q", i, tc.file, got, tc.want)
+		}
+	}
+}
diff --git a/v2/errors.go b/v2/errors.go
new file mode 100644
index 0000000..a241852
--- /dev/null
+++ b/v2/errors.go
@@ -0,0 +1,46 @@
+// Copyright 2011 Google Inc. All rights reserved.
+// Use of this source code is governed by the Apache 2.0
+// license that can be found in the LICENSE file.
+
+// This file provides error functions for common API failure modes.
+
+package appengine
+
+import (
+	"fmt"
+
+	"google.golang.org/appengine/v2/internal"
+)
+
+// IsOverQuota reports whether err represents an API call failure
+// due to insufficient available quota.
+func IsOverQuota(err error) bool {
+	callErr, ok := err.(*internal.CallError)
+	return ok && callErr.Code == 4
+}
+
+// MultiError is returned by batch operations when there are errors with
+// particular elements. Errors will be in a one-to-one correspondence with
+// the input elements; successful elements will have a nil entry.
+type MultiError []error
+
+func (m MultiError) Error() string {
+	s, n := "", 0
+	for _, e := range m {
+		if e != nil {
+			if n == 0 {
+				s = e.Error()
+			}
+			n++
+		}
+	}
+	switch n {
+	case 0:
+		return "(0 errors)"
+	case 1:
+		return s
+	case 2:
+		return s + " (and 1 other error)"
+	}
+	return fmt.Sprintf("%s (and %d other errors)", s, n-1)
+}
diff --git a/v2/go.mod b/v2/go.mod
new file mode 100644
index 0000000..94d1b6d
--- /dev/null
+++ b/v2/go.mod
@@ -0,0 +1,9 @@
+module google.golang.org/appengine/v2
+
+go 1.11
+
+require (
+	github.com/golang/protobuf v1.3.1
+	golang.org/x/net v0.0.0-20220708220712-1185a9018129
+	golang.org/x/text v0.3.7
+)
diff --git a/v2/go.sum b/v2/go.sum
new file mode 100644
index 0000000..4d7f992
--- /dev/null
+++ b/v2/go.sum
@@ -0,0 +1,10 @@
+github.com/golang/protobuf v1.3.1 h1:YF8+flBXS5eO826T4nzqPrxfhQThhXl0YzfuUPu4SBg=
+github.com/golang/protobuf v1.3.1/go.mod h1:6lQm79b+lXiMfvg/cZm0SGofjICqVBUtrP5yJMmIC1U=
+golang.org/x/net v0.0.0-20220708220712-1185a9018129 h1:vucSRfWwTsoXro7P+3Cjlr6flUMtzCwzlvkxEQtHHB0=
+golang.org/x/net v0.0.0-20220708220712-1185a9018129/go.mod h1:XRhObCWvk6IyKnWLug+ECip1KBveYUHfp+8e9klMJ9c=
+golang.org/x/sys v0.0.0-20210615035016-665e8c7367d1/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
+golang.org/x/sys v0.0.0-20220520151302-bc2c85ada10a/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
+golang.org/x/term v0.0.0-20210927222741-03fcf44c2211/go.mod h1:jbD1KX2456YbFQfuXm/mYQcufACuNUgVhRMnK/tPxf8=
+golang.org/x/text v0.3.7 h1:olpwvP2KacW1ZWvsR7uQhoyTYvKAupfQrRGBFM352Gk=
+golang.org/x/text v0.3.7/go.mod h1:u+2+/6zg+i71rQMx5EYifcz6MCKuco9NR6JIITiCfzQ=
+golang.org/x/tools v0.0.0-20180917221912-90fa682c2a6e/go.mod h1:n7NCudcB/nEzxVGmLbDWY5pfWTLqBcC2KZ6jyYvM4mQ=
diff --git a/v2/identity.go b/v2/identity.go
new file mode 100644
index 0000000..78408e0
--- /dev/null
+++ b/v2/identity.go
@@ -0,0 +1,141 @@
+// Copyright 2011 Google Inc. All rights reserved.
+// Use of this source code is governed by the Apache 2.0
+// license that can be found in the LICENSE file.
+
+package appengine
+
+import (
+	"context"
+	"time"
+
+	"google.golang.org/appengine/v2/internal"
+	pb "google.golang.org/appengine/v2/internal/app_identity"
+	modpb "google.golang.org/appengine/v2/internal/modules"
+)
+
+// AppID returns the application ID for the current application.
+// The string will be a plain application ID (e.g. "appid"), with a
+// domain prefix for custom domain deployments (e.g. "example.com:appid").
+func AppID(c context.Context) string { return internal.AppID(c) }
+
+// DefaultVersionHostname returns the standard hostname of the default version
+// of the current application (e.g. "my-app.appspot.com"). This is suitable for
+// use in constructing URLs.
+func DefaultVersionHostname(c context.Context) string {
+	return internal.DefaultVersionHostname(c)
+}
+
+// ModuleName returns the module name of the current instance.
+func ModuleName(c context.Context) string {
+	return internal.ModuleName(c)
+}
+
+// ModuleHostname returns a hostname of a module instance.
+// If module is the empty string, it refers to the module of the current instance.
+// If version is empty, it refers to the version of the current instance if valid,
+// or the default version of the module of the current instance.
+// If instance is empty, ModuleHostname returns the load-balancing hostname.
+func ModuleHostname(c context.Context, module, version, instance string) (string, error) {
+	req := &modpb.GetHostnameRequest{}
+	if module != "" {
+		req.Module = &module
+	}
+	if version != "" {
+		req.Version = &version
+	}
+	if instance != "" {
+		req.Instance = &instance
+	}
+	res := &modpb.GetHostnameResponse{}
+	if err := internal.Call(c, "modules", "GetHostname", req, res); err != nil {
+		return "", err
+	}
+	return *res.Hostname, nil
+}
+
+// VersionID returns the version ID for the current application.
+// It will be of the form "X.Y", where X is specified in app.yaml,
+// and Y is a number generated when each version of the app is uploaded.
+// It does not include a module name.
+func VersionID(c context.Context) string { return internal.VersionID(c) }
+
+// InstanceID returns a mostly-unique identifier for this instance.
+func InstanceID() string { return internal.InstanceID() }
+
+// Datacenter returns an identifier for the datacenter that the instance is running in.
+func Datacenter(c context.Context) string { return internal.Datacenter(c) }
+
+// ServerSoftware returns the App Engine release version.
+// In production, it looks like "Google App Engine/X.Y.Z".
+// In the development appserver, it looks like "Development/X.Y".
+func ServerSoftware() string { return internal.ServerSoftware() }
+
+// RequestID returns a string that uniquely identifies the request.
+func RequestID(c context.Context) string { return internal.RequestID(c) }
+
+// AccessToken generates an OAuth2 access token for the specified scopes on
+// behalf of service account of this application. This token will expire after
+// the returned time.
+func AccessToken(c context.Context, scopes ...string) (token string, expiry time.Time, err error) {
+	req := &pb.GetAccessTokenRequest{Scope: scopes}
+	res := &pb.GetAccessTokenResponse{}
+
+	err = internal.Call(c, "app_identity_service", "GetAccessToken", req, res)
+	if err != nil {
+		return "", time.Time{}, err
+	}
+	return res.GetAccessToken(), time.Unix(res.GetExpirationTime(), 0), nil
+}
+
+// Certificate represents a public certificate for the app.
+type Certificate struct {
+	KeyName string
+	Data    []byte // PEM-encoded X.509 certificate
+}
+
+// PublicCertificates retrieves the public certificates for the app.
+// They can be used to verify a signature returned by SignBytes.
+func PublicCertificates(c context.Context) ([]Certificate, error) {
+	req := &pb.GetPublicCertificateForAppRequest{}
+	res := &pb.GetPublicCertificateForAppResponse{}
+	if err := internal.Call(c, "app_identity_service", "GetPublicCertificatesForApp", req, res); err != nil {
+		return nil, err
+	}
+	var cs []Certificate
+	for _, pc := range res.PublicCertificateList {
+		cs = append(cs, Certificate{
+			KeyName: pc.GetKeyName(),
+			Data:    []byte(pc.GetX509CertificatePem()),
+		})
+	}
+	return cs, nil
+}
+
+// ServiceAccount returns a string representing the service account name, in
+// the form of an email address (typically app_id@appspot.gserviceaccount.com).
+func ServiceAccount(c context.Context) (string, error) {
+	req := &pb.GetServiceAccountNameRequest{}
+	res := &pb.GetServiceAccountNameResponse{}
+
+	err := internal.Call(c, "app_identity_service", "GetServiceAccountName", req, res)
+	if err != nil {
+		return "", err
+	}
+	return res.GetServiceAccountName(), err
+}
+
+// SignBytes signs bytes using a private key unique to your application.
+func SignBytes(c context.Context, bytes []byte) (keyName string, signature []byte, err error) {
+	req := &pb.SignForAppRequest{BytesToSign: bytes}
+	res := &pb.SignForAppResponse{}
+
+	if err := internal.Call(c, "app_identity_service", "SignForApp", req, res); err != nil {
+		return "", nil, err
+	}
+	return res.GetKeyName(), res.GetSignatureBytes(), nil
+}
+
+func init() {
+	internal.RegisterErrorCodeMap("app_identity_service", pb.AppIdentityServiceError_ErrorCode_name)
+	internal.RegisterErrorCodeMap("modules", modpb.ModulesServiceError_ErrorCode_name)
+}
diff --git a/v2/image/image.go b/v2/image/image.go
new file mode 100644
index 0000000..90f8121
--- /dev/null
+++ b/v2/image/image.go
@@ -0,0 +1,66 @@
+// Copyright 2012 Google Inc. All rights reserved.
+// Use of this source code is governed by the Apache 2.0
+// license that can be found in the LICENSE file.
+
+// Package image provides image services.
+package image // import "google.golang.org/appengine/v2/image"
+
+import (
+	"context"
+	"fmt"
+	"net/url"
+
+	"google.golang.org/appengine/v2"
+	"google.golang.org/appengine/v2/internal"
+	pb "google.golang.org/appengine/v2/internal/image"
+)
+
+type ServingURLOptions struct {
+	Secure bool // whether the URL should use HTTPS
+
+	// Size must be between zero and 1600.
+	// If Size is non-zero, a resized version of the image is served,
+	// and Size is the served image's longest dimension. The aspect ratio is preserved.
+	// If Crop is true the image is cropped from the center instead of being resized.
+	Size int
+	Crop bool
+}
+
+// ServingURL returns a URL that will serve an image from Blobstore.
+func ServingURL(c context.Context, key appengine.BlobKey, opts *ServingURLOptions) (*url.URL, error) {
+	req := &pb.ImagesGetUrlBaseRequest{
+		BlobKey: (*string)(&key),
+	}
+	if opts != nil && opts.Secure {
+		req.CreateSecureUrl = &opts.Secure
+	}
+	res := &pb.ImagesGetUrlBaseResponse{}
+	if err := internal.Call(c, "images", "GetUrlBase", req, res); err != nil {
+		return nil, err
+	}
+
+	// The URL may have suffixes added to dynamically resize or crop:
+	// - adding "=s32" will serve the image resized to 32 pixels, preserving the aspect ratio.
+	// - adding "=s32-c" is the same as "=s32" except it will be cropped.
+	u := *res.Url
+	if opts != nil && opts.Size > 0 {
+		u += fmt.Sprintf("=s%d", opts.Size)
+		if opts.Crop {
+			u += "-c"
+		}
+	}
+	return url.Parse(u)
+}
+
+// DeleteServingURL deletes the serving URL for an image.
+func DeleteServingURL(c context.Context, key appengine.BlobKey) error {
+	req := &pb.ImagesDeleteUrlBaseRequest{
+		BlobKey: (*string)(&key),
+	}
+	res := &pb.ImagesDeleteUrlBaseResponse{}
+	return internal.Call(c, "images", "DeleteUrlBase", req, res)
+}
+
+func init() {
+	internal.RegisterErrorCodeMap("images", pb.ImagesServiceError_ErrorCode_name)
+}
diff --git a/v2/internal/aetesting/fake.go b/v2/internal/aetesting/fake.go
new file mode 100644
index 0000000..20ecb89
--- /dev/null
+++ b/v2/internal/aetesting/fake.go
@@ -0,0 +1,81 @@
+// Copyright 2011 Google Inc. All rights reserved.
+// Use of this source code is governed by the Apache 2.0
+// license that can be found in the LICENSE file.
+
+// Package aetesting provides utilities for testing App Engine packages.
+// This is not for testing user applications.
+package aetesting
+
+import (
+	"context"
+	"fmt"
+	"net/http"
+	"reflect"
+	"testing"
+
+	"github.com/golang/protobuf/proto"
+
+	"google.golang.org/appengine/v2/internal"
+)
+
+// FakeSingleContext returns a context whose Call invocations will be serviced
+// by f, which should be a function that has two arguments of the input and output
+// protocol buffer type, and one error return.
+func FakeSingleContext(t *testing.T, service, method string, f interface{}) context.Context {
+	fv := reflect.ValueOf(f)
+	if fv.Kind() != reflect.Func {
+		t.Fatal("not a function")
+	}
+	ft := fv.Type()
+	if ft.NumIn() != 2 || ft.NumOut() != 1 {
+		t.Fatalf("f has %d in and %d out, want 2 in and 1 out", ft.NumIn(), ft.NumOut())
+	}
+	for i := 0; i < 2; i++ {
+		at := ft.In(i)
+		if !at.Implements(protoMessageType) {
+			t.Fatalf("arg %d does not implement proto.Message", i)
+		}
+	}
+	if ft.Out(0) != errorType {
+		t.Fatalf("f's return is %v, want error", ft.Out(0))
+	}
+	s := &single{
+		t:       t,
+		service: service,
+		method:  method,
+		f:       fv,
+	}
+	return internal.WithCallOverride(internal.ContextForTesting(&http.Request{}), s.call)
+}
+
+var (
+	protoMessageType = reflect.TypeOf((*proto.Message)(nil)).Elem()
+	errorType        = reflect.TypeOf((*error)(nil)).Elem()
+)
+
+type single struct {
+	t               *testing.T
+	service, method string
+	f               reflect.Value
+}
+
+func (s *single) call(ctx context.Context, service, method string, in, out proto.Message) error {
+	if service == "__go__" {
+		if method == "GetNamespace" {
+			return nil // always yield an empty namespace
+		}
+		return fmt.Errorf("Unknown API call /%s.%s", service, method)
+	}
+	if service != s.service || method != s.method {
+		s.t.Fatalf("Unexpected call to /%s.%s", service, method)
+	}
+	ins := []reflect.Value{
+		reflect.ValueOf(in),
+		reflect.ValueOf(out),
+	}
+	outs := s.f.Call(ins)
+	if outs[0].IsNil() {
+		return nil
+	}
+	return outs[0].Interface().(error)
+}
diff --git a/v2/internal/api.go b/v2/internal/api.go
new file mode 100644
index 0000000..41b8e25
--- /dev/null
+++ b/v2/internal/api.go
@@ -0,0 +1,482 @@
+// Copyright 2011 Google Inc. All rights reserved.
+// Use of this source code is governed by the Apache 2.0
+// license that can be found in the LICENSE file.
+
+package internal
+
+import (
+	"bytes"
+	netcontext "context"
+	"errors"
+	"fmt"
+	"io"
+	"io/ioutil"
+	"net"
+	"net/http"
+	"net/url"
+	"os"
+	"runtime"
+	"strconv"
+	"sync/atomic"
+	"time"
+
+	"github.com/golang/protobuf/proto"
+
+	remotepb "google.golang.org/appengine/v2/internal/remote_api"
+)
+
+const (
+	apiPath = "/rpc_http"
+)
+
+var (
+	// Incoming headers.
+	ticketHeader       = http.CanonicalHeaderKey("X-AppEngine-API-Ticket")
+	dapperHeader       = http.CanonicalHeaderKey("X-Google-DapperTraceInfo")
+	traceHeader        = http.CanonicalHeaderKey("X-Cloud-Trace-Context")
+	curNamespaceHeader = http.CanonicalHeaderKey("X-AppEngine-Current-Namespace")
+	userIPHeader       = http.CanonicalHeaderKey("X-AppEngine-User-IP")
+	remoteAddrHeader   = http.CanonicalHeaderKey("X-AppEngine-Remote-Addr")
+	devRequestIdHeader = http.CanonicalHeaderKey("X-Appengine-Dev-Request-Id")
+
+	// Outgoing headers.
+	apiEndpointHeader      = http.CanonicalHeaderKey("X-Google-RPC-Service-Endpoint")
+	apiEndpointHeaderValue = []string{"app-engine-apis"}
+	apiMethodHeader        = http.CanonicalHeaderKey("X-Google-RPC-Service-Method")
+	apiMethodHeaderValue   = []string{"/VMRemoteAPI.CallRemoteAPI"}
+	apiDeadlineHeader      = http.CanonicalHeaderKey("X-Google-RPC-Service-Deadline")
+	apiContentType         = http.CanonicalHeaderKey("Content-Type")
+	apiContentTypeValue    = []string{"application/octet-stream"}
+	logFlushHeader         = http.CanonicalHeaderKey("X-AppEngine-Log-Flush-Count")
+
+	apiHTTPClient = &http.Client{
+		Transport: &http.Transport{
+			Proxy:               http.ProxyFromEnvironment,
+			Dial:                limitDial,
+			MaxIdleConns:        1000,
+			MaxIdleConnsPerHost: 10000,
+			IdleConnTimeout:     90 * time.Second,
+		},
+	}
+
+	logStream io.Writer        = os.Stderr // For test hooks.
+	timeNow   func() time.Time = time.Now  // For test hooks.
+)
+
+func apiURL(ctx netcontext.Context) *url.URL {
+	host, port := "appengine.googleapis.internal", "10001"
+	if h := os.Getenv("API_HOST"); h != "" {
+		host = h
+	}
+	if hostOverride := ctx.Value(apiHostOverrideKey); hostOverride != nil {
+		host = hostOverride.(string)
+	}
+	if p := os.Getenv("API_PORT"); p != "" {
+		port = p
+	}
+	if portOverride := ctx.Value(apiPortOverrideKey); portOverride != nil {
+		port = portOverride.(string)
+	}
+	return &url.URL{
+		Scheme: "http",
+		Host:   host + ":" + port,
+		Path:   apiPath,
+	}
+}
+
+// Middleware wraps an http handler so that it can make GAE API calls
+func Middleware(next http.Handler) http.Handler {
+	return handleHTTPMiddleware(executeRequestSafelyMiddleware(next))
+}
+
+func handleHTTPMiddleware(next http.Handler) http.Handler {
+	return http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) {
+		c := &context{
+			req:       r,
+			outHeader: w.Header(),
+		}
+		r = r.WithContext(withContext(r.Context(), c))
+		c.req = r
+
+		// Patch up RemoteAddr so it looks reasonable.
+		if addr := r.Header.Get(userIPHeader); addr != "" {
+			r.RemoteAddr = addr
+		} else if addr = r.Header.Get(remoteAddrHeader); addr != "" {
+			r.RemoteAddr = addr
+		} else {
+			// Should not normally reach here, but pick a sensible default anyway.
+			r.RemoteAddr = "127.0.0.1"
+		}
+		// The address in the headers will most likely be of these forms:
+		//	123.123.123.123
+		//	2001:db8::1
+		// net/http.Request.RemoteAddr is specified to be in "IP:port" form.
+		if _, _, err := net.SplitHostPort(r.RemoteAddr); err != nil {
+			// Assume the remote address is only a host; add a default port.
+			r.RemoteAddr = net.JoinHostPort(r.RemoteAddr, "80")
+		}
+
+		next.ServeHTTP(c, r)
+		c.outHeader = nil // make sure header changes aren't respected any more
+
+		// Avoid nil Write call if c.Write is never called.
+		if c.outCode != 0 {
+			w.WriteHeader(c.outCode)
+		}
+		if c.outBody != nil {
+			w.Write(c.outBody)
+		}
+	})
+}
+
+func executeRequestSafelyMiddleware(next http.Handler) http.Handler {
+	return http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) {
+		defer func() {
+			if x := recover(); x != nil {
+				c := w.(*context)
+				logf(c, 4, "%s", renderPanic(x)) // 4 == critical
+				c.outCode = 500
+			}
+		}()
+
+		next.ServeHTTP(w, r)
+	})
+}
+
+func renderPanic(x interface{}) string {
+	buf := make([]byte, 16<<10) // 16 KB should be plenty
+	buf = buf[:runtime.Stack(buf, false)]
+
+	// Remove the first few stack frames:
+	//   this func
+	//   the recover closure in the caller
+	// That will root the stack trace at the site of the panic.
+	const (
+		skipStart  = "internal.renderPanic"
+		skipFrames = 2
+	)
+	start := bytes.Index(buf, []byte(skipStart))
+	p := start
+	for i := 0; i < skipFrames*2 && p+1 < len(buf); i++ {
+		p = bytes.IndexByte(buf[p+1:], '\n') + p + 1
+		if p < 0 {
+			break
+		}
+	}
+	if p >= 0 {
+		// buf[start:p+1] is the block to remove.
+		// Copy buf[p+1:] over buf[start:] and shrink buf.
+		copy(buf[start:], buf[p+1:])
+		buf = buf[:len(buf)-(p+1-start)]
+	}
+
+	// Add panic heading.
+	head := fmt.Sprintf("panic: %v\n\n", x)
+	if len(head) > len(buf) {
+		// Extremely unlikely to happen.
+		return head
+	}
+	copy(buf[len(head):], buf)
+	copy(buf, head)
+
+	return string(buf)
+}
+
+// context represents the context of an in-flight HTTP request.
+// It implements the appengine.Context and http.ResponseWriter interfaces.
+type context struct {
+	req *http.Request
+
+	outCode   int
+	outHeader http.Header
+	outBody   []byte
+}
+
+var contextKey = "holds a *context"
+
+// jointContext joins two contexts in a superficial way.
+// It takes values and timeouts from a base context, and only values from another context.
+type jointContext struct {
+	base       netcontext.Context
+	valuesOnly netcontext.Context
+}
+
+func (c jointContext) Deadline() (time.Time, bool) {
+	return c.base.Deadline()
+}
+
+func (c jointContext) Done() <-chan struct{} {
+	return c.base.Done()
+}
+
+func (c jointContext) Err() error {
+	return c.base.Err()
+}
+
+func (c jointContext) Value(key interface{}) interface{} {
+	if val := c.base.Value(key); val != nil {
+		return val
+	}
+	return c.valuesOnly.Value(key)
+}
+
+// fromContext returns the App Engine context or nil if ctx is not
+// derived from an App Engine context.
+func fromContext(ctx netcontext.Context) *context {
+	c, _ := ctx.Value(&contextKey).(*context)
+	return c
+}
+
+func withContext(parent netcontext.Context, c *context) netcontext.Context {
+	ctx := netcontext.WithValue(parent, &contextKey, c)
+	if ns := c.req.Header.Get(curNamespaceHeader); ns != "" {
+		ctx = withNamespace(ctx, ns)
+	}
+	return ctx
+}
+
+func toContext(c *context) netcontext.Context {
+	return withContext(netcontext.Background(), c)
+}
+
+func IncomingHeaders(ctx netcontext.Context) http.Header {
+	if c := fromContext(ctx); c != nil {
+		return c.req.Header
+	}
+	return nil
+}
+
+func ReqContext(req *http.Request) netcontext.Context {
+	return req.Context()
+}
+
+func WithContext(parent netcontext.Context, req *http.Request) netcontext.Context {
+	return jointContext{
+		base:       parent,
+		valuesOnly: req.Context(),
+	}
+}
+
+// RegisterTestRequest registers the HTTP request req for testing, such that
+// any API calls are sent to the provided URL. It returns a closure to delete
+// the registration.
+// It should only be used by aetest package.
+func RegisterTestRequest(req *http.Request, apiURL *url.URL, appID string) *http.Request {
+	ctx := req.Context()
+	ctx = withAPIHostOverride(ctx, apiURL.Hostname())
+	ctx = withAPIPortOverride(ctx, apiURL.Port())
+	ctx = WithAppIDOverride(ctx, appID)
+
+	// use the unregistered request as a placeholder so that withContext can read the headers
+	c := &context{req: req}
+	c.req = req.WithContext(withContext(ctx, c))
+	return c.req
+}
+
+var errTimeout = &CallError{
+	Detail:  "Deadline exceeded",
+	Code:    int32(remotepb.RpcError_CANCELLED),
+	Timeout: true,
+}
+
+func (c *context) Header() http.Header { return c.outHeader }
+
+// Copied from $GOROOT/src/pkg/net/http/transfer.go. Some response status
+// codes do not permit a response body (nor response entity headers such as
+// Content-Length, Content-Type, etc).
+func bodyAllowedForStatus(status int) bool {
+	switch {
+	case status >= 100 && status <= 199:
+		return false
+	case status == 204:
+		return false
+	case status == 304:
+		return false
+	}
+	return true
+}
+
+func (c *context) Write(b []byte) (int, error) {
+	if c.outCode == 0 {
+		c.WriteHeader(http.StatusOK)
+	}
+	if len(b) > 0 && !bodyAllowedForStatus(c.outCode) {
+		return 0, http.ErrBodyNotAllowed
+	}
+	c.outBody = append(c.outBody, b...)
+	return len(b), nil
+}
+
+func (c *context) WriteHeader(code int) {
+	if c.outCode != 0 {
+		logf(c, 3, "WriteHeader called multiple times on request.") // error level
+		return
+	}
+	c.outCode = code
+}
+
+func post(ctx netcontext.Context, body []byte, timeout time.Duration) (b []byte, err error) {
+	apiURL := apiURL(ctx)
+	hreq := &http.Request{
+		Method: "POST",
+		URL:    apiURL,
+		Header: http.Header{
+			apiEndpointHeader: apiEndpointHeaderValue,
+			apiMethodHeader:   apiMethodHeaderValue,
+			apiContentType:    apiContentTypeValue,
+			apiDeadlineHeader: []string{strconv.FormatFloat(timeout.Seconds(), 'f', -1, 64)},
+		},
+		Body:          ioutil.NopCloser(bytes.NewReader(body)),
+		ContentLength: int64(len(body)),
+		Host:          apiURL.Host,
+	}
+	c := fromContext(ctx)
+	if c != nil {
+		if info := c.req.Header.Get(dapperHeader); info != "" {
+			hreq.Header.Set(dapperHeader, info)
+		}
+		if info := c.req.Header.Get(traceHeader); info != "" {
+			hreq.Header.Set(traceHeader, info)
+		}
+	}
+
+	tr := apiHTTPClient.Transport.(*http.Transport)
+
+	var timedOut int32 // atomic; set to 1 if timed out
+	t := time.AfterFunc(timeout, func() {
+		atomic.StoreInt32(&timedOut, 1)
+		tr.CancelRequest(hreq)
+	})
+	defer t.Stop()
+	defer func() {
+		// Check if timeout was exceeded.
+		if atomic.LoadInt32(&timedOut) != 0 {
+			err = errTimeout
+		}
+	}()
+
+	hresp, err := apiHTTPClient.Do(hreq)
+	if err != nil {
+		return nil, &CallError{
+			Detail: fmt.Sprintf("service bridge HTTP failed: %v", err),
+			Code:   int32(remotepb.RpcError_UNKNOWN),
+		}
+	}
+	defer hresp.Body.Close()
+	hrespBody, err := ioutil.ReadAll(hresp.Body)
+	if hresp.StatusCode != 200 {
+		return nil, &CallError{
+			Detail: fmt.Sprintf("service bridge returned HTTP %d (%q)", hresp.StatusCode, hrespBody),
+			Code:   int32(remotepb.RpcError_UNKNOWN),
+		}
+	}
+	if err != nil {
+		return nil, &CallError{
+			Detail: fmt.Sprintf("service bridge response bad: %v", err),
+			Code:   int32(remotepb.RpcError_UNKNOWN),
+		}
+	}
+	return hrespBody, nil
+}
+
+func Call(ctx netcontext.Context, service, method string, in, out proto.Message) error {
+	if ns := NamespaceFromContext(ctx); ns != "" {
+		if fn, ok := NamespaceMods[service]; ok {
+			fn(in, ns)
+		}
+	}
+
+	if f, ctx, ok := callOverrideFromContext(ctx); ok {
+		return f(ctx, service, method, in, out)
+	}
+
+	// Handle already-done contexts quickly.
+	select {
+	case <-ctx.Done():
+		return ctx.Err()
+	default:
+	}
+
+	c := fromContext(ctx)
+
+	// Apply transaction modifications if we're in a transaction.
+	if t := transactionFromContext(ctx); t != nil {
+		if t.finished {
+			return errors.New("transaction context has expired")
+		}
+		applyTransaction(in, &t.transaction)
+	}
+
+	// Default RPC timeout is 60s.
+	timeout := 60 * time.Second
+	if deadline, ok := ctx.Deadline(); ok {
+		timeout = deadline.Sub(time.Now())
+	}
+
+	data, err := proto.Marshal(in)
+	if err != nil {
+		return err
+	}
+
+	ticket := ""
+	if c != nil {
+		ticket = c.req.Header.Get(ticketHeader)
+		if dri := c.req.Header.Get(devRequestIdHeader); IsDevAppServer() && dri != "" {
+			ticket = dri
+		}
+	}
+	req := &remotepb.Request{
+		ServiceName: &service,
+		Method:      &method,
+		Request:     data,
+		RequestId:   &ticket,
+	}
+	hreqBody, err := proto.Marshal(req)
+	if err != nil {
+		return err
+	}
+
+	hrespBody, err := post(ctx, hreqBody, timeout)
+	if err != nil {
+		return err
+	}
+
+	res := &remotepb.Response{}
+	if err := proto.Unmarshal(hrespBody, res); err != nil {
+		return err
+	}
+	if res.RpcError != nil {
+		ce := &CallError{
+			Detail: res.RpcError.GetDetail(),
+			Code:   *res.RpcError.Code,
+		}
+		switch remotepb.RpcError_ErrorCode(ce.Code) {
+		case remotepb.RpcError_CANCELLED, remotepb.RpcError_DEADLINE_EXCEEDED:
+			ce.Timeout = true
+		}
+		return ce
+	}
+	if res.ApplicationError != nil {
+		return &APIError{
+			Service: *req.ServiceName,
+			Detail:  res.ApplicationError.GetDetail(),
+			Code:    *res.ApplicationError.Code,
+		}
+	}
+	if res.Exception != nil || res.JavaException != nil {
+		// This shouldn't happen, but let's be defensive.
+		return &CallError{
+			Detail: "service bridge returned exception",
+			Code:   int32(remotepb.RpcError_UNKNOWN),
+		}
+	}
+	return proto.Unmarshal(res.Response, out)
+}
+
+func (c *context) Request() *http.Request {
+	return c.req
+}
+
+func ContextForTesting(req *http.Request) netcontext.Context {
+	return toContext(&context{req: req})
+}
diff --git a/v2/internal/api_common.go b/v2/internal/api_common.go
new file mode 100644
index 0000000..f6101d3
--- /dev/null
+++ b/v2/internal/api_common.go
@@ -0,0 +1,141 @@
+// Copyright 2015 Google Inc. All rights reserved.
+// Use of this source code is governed by the Apache 2.0
+// license that can be found in the LICENSE file.
+
+package internal
+
+import (
+	netcontext "context"
+	"errors"
+	"os"
+
+	"github.com/golang/protobuf/proto"
+)
+
+type ctxKey string
+
+func (c ctxKey) String() string {
+	return "appengine context key: " + string(c)
+}
+
+var errNotAppEngineContext = errors.New("not an App Engine context")
+
+type CallOverrideFunc func(ctx netcontext.Context, service, method string, in, out proto.Message) error
+
+var callOverrideKey = "holds []CallOverrideFunc"
+
+func WithCallOverride(ctx netcontext.Context, f CallOverrideFunc) netcontext.Context {
+	// We avoid appending to any existing call override
+	// so we don't risk overwriting a popped stack below.
+	var cofs []CallOverrideFunc
+	if uf, ok := ctx.Value(&callOverrideKey).([]CallOverrideFunc); ok {
+		cofs = append(cofs, uf...)
+	}
+	cofs = append(cofs, f)
+	return netcontext.WithValue(ctx, &callOverrideKey, cofs)
+}
+
+func callOverrideFromContext(ctx netcontext.Context) (CallOverrideFunc, netcontext.Context, bool) {
+	cofs, _ := ctx.Value(&callOverrideKey).([]CallOverrideFunc)
+	if len(cofs) == 0 {
+		return nil, nil, false
+	}
+	// We found a list of overrides; grab the last, and reconstitute a
+	// context that will hide it.
+	f := cofs[len(cofs)-1]
+	ctx = netcontext.WithValue(ctx, &callOverrideKey, cofs[:len(cofs)-1])
+	return f, ctx, true
+}
+
+type logOverrideFunc func(level int64, format string, args ...interface{})
+
+var logOverrideKey = "holds a logOverrideFunc"
+
+func WithLogOverride(ctx netcontext.Context, f logOverrideFunc) netcontext.Context {
+	return netcontext.WithValue(ctx, &logOverrideKey, f)
+}
+
+var appIDOverrideKey = "holds a string, being the full app ID"
+
+func WithAppIDOverride(ctx netcontext.Context, appID string) netcontext.Context {
+	return netcontext.WithValue(ctx, &appIDOverrideKey, appID)
+}
+
+var apiHostOverrideKey = ctxKey("holds a string, being the alternate API_HOST")
+
+func withAPIHostOverride(ctx netcontext.Context, apiHost string) netcontext.Context {
+	return netcontext.WithValue(ctx, apiHostOverrideKey, apiHost)
+}
+
+var apiPortOverrideKey = ctxKey("holds a string, being the alternate API_PORT")
+
+func withAPIPortOverride(ctx netcontext.Context, apiPort string) netcontext.Context {
+	return netcontext.WithValue(ctx, apiPortOverrideKey, apiPort)
+}
+
+var namespaceKey = "holds the namespace string"
+
+func withNamespace(ctx netcontext.Context, ns string) netcontext.Context {
+	return netcontext.WithValue(ctx, &namespaceKey, ns)
+}
+
+func NamespaceFromContext(ctx netcontext.Context) string {
+	// If there's no namespace, return the empty string.
+	ns, _ := ctx.Value(&namespaceKey).(string)
+	return ns
+}
+
+// FullyQualifiedAppID returns the fully-qualified application ID.
+// This may contain a partition prefix (e.g. "s~" for High Replication apps),
+// or a domain prefix (e.g. "example.com:").
+func FullyQualifiedAppID(ctx netcontext.Context) string {
+	if id, ok := ctx.Value(&appIDOverrideKey).(string); ok {
+		return id
+	}
+	return fullyQualifiedAppID(ctx)
+}
+
+func Logf(ctx netcontext.Context, level int64, format string, args ...interface{}) {
+	if f, ok := ctx.Value(&logOverrideKey).(logOverrideFunc); ok {
+		f(level, format, args...)
+		return
+	}
+	c := fromContext(ctx)
+	if c == nil {
+		panic(errNotAppEngineContext)
+	}
+	logf(c, level, format, args...)
+}
+
+// NamespacedContext wraps a Context to support namespaces.
+func NamespacedContext(ctx netcontext.Context, namespace string) netcontext.Context {
+	return withNamespace(ctx, namespace)
+}
+
+// SetTestEnv sets the env variables for testing background ticket in Flex.
+func SetTestEnv() func() {
+	var environ = []struct {
+		key, value string
+	}{
+		{"GAE_LONG_APP_ID", "my-app-id"},
+		{"GAE_MINOR_VERSION", "067924799508853122"},
+		{"GAE_MODULE_INSTANCE", "0"},
+		{"GAE_MODULE_NAME", "default"},
+		{"GAE_MODULE_VERSION", "20150612t184001"},
+	}
+
+	for _, v := range environ {
+		old := os.Getenv(v.key)
+		os.Setenv(v.key, v.value)
+		v.value = old
+	}
+	return func() { // Restore old environment after the test completes.
+		for _, v := range environ {
+			if v.value == "" {
+				os.Unsetenv(v.key)
+				continue
+			}
+			os.Setenv(v.key, v.value)
+		}
+	}
+}
diff --git a/v2/internal/api_race_test.go b/v2/internal/api_race_test.go
new file mode 100644
index 0000000..845d0da
--- /dev/null
+++ b/v2/internal/api_race_test.go
@@ -0,0 +1,10 @@
+// Copyright 2014 Google Inc. All rights reserved.
+// Use of this source code is governed by the Apache 2.0
+// license that can be found in the LICENSE file.
+
+//go:build race
+// +build race
+
+package internal
+
+func init() { raceDetector = true }
diff --git a/v2/internal/api_test.go b/v2/internal/api_test.go
new file mode 100644
index 0000000..e073c9b
--- /dev/null
+++ b/v2/internal/api_test.go
@@ -0,0 +1,424 @@
+// Copyright 2014 Google Inc. All rights reserved.
+// Use of this source code is governed by the Apache 2.0
+// license that can be found in the LICENSE file.
+
+package internal
+
+import (
+	"bufio"
+	"bytes"
+	netcontext "context"
+	"fmt"
+	"io"
+	"io/ioutil"
+	"net/http"
+	"net/http/httptest"
+	"net/url"
+	"os"
+	"os/exec"
+	"strings"
+	"sync/atomic"
+	"testing"
+	"time"
+
+	"github.com/golang/protobuf/proto"
+
+	basepb "google.golang.org/appengine/v2/internal/base"
+	remotepb "google.golang.org/appengine/v2/internal/remote_api"
+)
+
+const testTicketHeader = "X-Magic-Ticket-Header"
+
+func init() {
+	ticketHeader = testTicketHeader
+}
+
+type fakeAPIHandler struct {
+	hang chan int // used for RunSlowly RPC
+
+	LogFlushes int32 // atomic
+
+	allowMissingTicket bool
+}
+
+func (f *fakeAPIHandler) ServeHTTP(w http.ResponseWriter, r *http.Request) {
+	writeResponse := func(res *remotepb.Response) {
+		hresBody, err := proto.Marshal(res)
+		if err != nil {
+			http.Error(w, fmt.Sprintf("Failed encoding API response: %v", err), 500)
+			return
+		}
+		w.Write(hresBody)
+	}
+
+	if r.URL.Path != "/rpc_http" {
+		http.NotFound(w, r)
+		return
+	}
+	hreqBody, err := ioutil.ReadAll(r.Body)
+	if err != nil {
+		http.Error(w, fmt.Sprintf("Bad body: %v", err), 500)
+		return
+	}
+	apiReq := &remotepb.Request{}
+	if err := proto.Unmarshal(hreqBody, apiReq); err != nil {
+		http.Error(w, fmt.Sprintf("Bad encoded API request: %v", err), 500)
+		return
+	}
+	if *apiReq.RequestId != "s3cr3t" && !f.allowMissingTicket {
+		writeResponse(&remotepb.Response{
+			RpcError: &remotepb.RpcError{
+				Code:   proto.Int32(int32(remotepb.RpcError_SECURITY_VIOLATION)),
+				Detail: proto.String("bad security ticket"),
+			},
+		})
+		return
+	}
+	if got, want := r.Header.Get(dapperHeader), "trace-001"; got != want {
+		writeResponse(&remotepb.Response{
+			RpcError: &remotepb.RpcError{
+				Code:   proto.Int32(int32(remotepb.RpcError_BAD_REQUEST)),
+				Detail: proto.String(fmt.Sprintf("trace info = %q, want %q", got, want)),
+			},
+		})
+		return
+	}
+
+	service, method := *apiReq.ServiceName, *apiReq.Method
+	var resOut proto.Message
+	if service == "actordb" && method == "LookupActor" {
+		req := &basepb.StringProto{}
+		res := &basepb.StringProto{}
+		if err := proto.Unmarshal(apiReq.Request, req); err != nil {
+			http.Error(w, fmt.Sprintf("Bad encoded request: %v", err), 500)
+			return
+		}
+		if *req.Value == "Doctor Who" {
+			res.Value = proto.String("David Tennant")
+		}
+		resOut = res
+	}
+	if service == "errors" {
+		switch method {
+		case "Non200":
+			http.Error(w, "I'm a little teapot.", 418)
+			return
+		case "ShortResponse":
+			w.Header().Set("Content-Length", "100")
+			w.Write([]byte("way too short"))
+			return
+		case "OverQuota":
+			writeResponse(&remotepb.Response{
+				RpcError: &remotepb.RpcError{
+					Code:   proto.Int32(int32(remotepb.RpcError_OVER_QUOTA)),
+					Detail: proto.String("you are hogging the resources!"),
+				},
+			})
+			return
+		case "RunSlowly":
+			// TestAPICallRPCFailure creates f.hang, but does not strobe it
+			// until Call returns with remotepb.RpcError_CANCELLED.
+			// This is here to force a happens-before relationship between
+			// the httptest server handler and shutdown.
+			<-f.hang
+			resOut = &basepb.VoidProto{}
+		}
+	}
+	if service == "logservice" && method == "Flush" {
+		// Pretend log flushing is slow.
+		time.Sleep(50 * time.Millisecond)
+		atomic.AddInt32(&f.LogFlushes, 1)
+		resOut = &basepb.VoidProto{}
+	}
+
+	encOut, err := proto.Marshal(resOut)
+	if err != nil {
+		http.Error(w, fmt.Sprintf("Failed encoding response: %v", err), 500)
+		return
+	}
+	writeResponse(&remotepb.Response{
+		Response: encOut,
+	})
+}
+
+func setup() (f *fakeAPIHandler, c *context, cleanup func()) {
+	f = &fakeAPIHandler{}
+	srv := httptest.NewServer(f)
+	u, err := url.Parse(srv.URL + apiPath)
+	restoreAPIHost := restoreEnvVar("API_HOST")
+	restoreAPIPort := restoreEnvVar("API_HOST")
+	os.Setenv("API_HOST", u.Hostname())
+	os.Setenv("API_PORT", u.Port())
+	if err != nil {
+		panic(fmt.Sprintf("url.Parse(%q): %v", srv.URL+apiPath, err))
+	}
+	return f, &context{
+			req: &http.Request{
+				Header: http.Header{
+					ticketHeader: []string{"s3cr3t"},
+					dapperHeader: []string{"trace-001"},
+				},
+			},
+		}, func() {
+			restoreAPIHost()
+			restoreAPIPort()
+			srv.Close()
+		}
+}
+
+func restoreEnvVar(key string) (cleanup func()) {
+	oldval, ok := os.LookupEnv(key)
+	return func() {
+		if ok {
+			os.Setenv(key, oldval)
+		} else {
+			os.Unsetenv(key)
+		}
+	}
+}
+
+func TestAPICall(t *testing.T) {
+	_, c, cleanup := setup()
+	defer cleanup()
+
+	req := &basepb.StringProto{
+		Value: proto.String("Doctor Who"),
+	}
+	res := &basepb.StringProto{}
+	err := Call(toContext(c), "actordb", "LookupActor", req, res)
+	if err != nil {
+		t.Fatalf("API call failed: %v", err)
+	}
+	if got, want := *res.Value, "David Tennant"; got != want {
+		t.Errorf("Response is %q, want %q", got, want)
+	}
+}
+
+func TestAPICallTicketUnavailable(t *testing.T) {
+	resetEnv := SetTestEnv()
+	defer resetEnv()
+	f, c, cleanup := setup()
+	defer cleanup()
+	f.allowMissingTicket = true
+
+	c.req.Header.Set(ticketHeader, "")
+	req := &basepb.StringProto{
+		Value: proto.String("Doctor Who"),
+	}
+	res := &basepb.StringProto{}
+	err := Call(toContext(c), "actordb", "LookupActor", req, res)
+	if err != nil {
+		t.Fatalf("API call failed: %v", err)
+	}
+	if got, want := *res.Value, "David Tennant"; got != want {
+		t.Errorf("Response is %q, want %q", got, want)
+	}
+}
+
+func TestAPICallRPCFailure(t *testing.T) {
+	f, c, cleanup := setup()
+	defer cleanup()
+
+	testCases := []struct {
+		method string
+		code   remotepb.RpcError_ErrorCode
+	}{
+		{"Non200", remotepb.RpcError_UNKNOWN},
+		{"ShortResponse", remotepb.RpcError_UNKNOWN},
+		{"OverQuota", remotepb.RpcError_OVER_QUOTA},
+		{"RunSlowly", remotepb.RpcError_CANCELLED},
+	}
+	f.hang = make(chan int) // only for RunSlowly
+	for _, tc := range testCases {
+		ctx, _ := netcontext.WithTimeout(toContext(c), 100*time.Millisecond)
+		err := Call(ctx, "errors", tc.method, &basepb.VoidProto{}, &basepb.VoidProto{})
+		ce, ok := err.(*CallError)
+		if !ok {
+			t.Errorf("%s: API call error is %T (%v), want *CallError", tc.method, err, err)
+			continue
+		}
+		if ce.Code != int32(tc.code) {
+			t.Errorf("%s: ce.Code = %d, want %d", tc.method, ce.Code, tc.code)
+		}
+		if tc.method == "RunSlowly" {
+			f.hang <- 1 // release the HTTP handler
+		}
+	}
+}
+
+func TestAPICallDialFailure(t *testing.T) {
+	// See what happens if the API host is unresponsive.
+	// This should time out quickly, not hang forever.
+	// We intentially don't set up the fakeAPIHandler for this test to cause the dail failure.
+	start := time.Now()
+	err := Call(netcontext.Background(), "foo", "bar", &basepb.VoidProto{}, &basepb.VoidProto{})
+	const max = 1 * time.Second
+	if taken := time.Since(start); taken > max {
+		t.Errorf("Dial hang took too long: %v > %v", taken, max)
+	}
+	if err == nil {
+		t.Error("Call did not fail")
+	}
+}
+
+func TestRemoteAddr(t *testing.T) {
+	var addr string
+	http.HandleFunc("/remote_addr", func(w http.ResponseWriter, r *http.Request) {
+		addr = r.RemoteAddr
+	})
+
+	testCases := []struct {
+		headers http.Header
+		addr    string
+	}{
+		{http.Header{"X-Appengine-User-Ip": []string{"10.5.2.1"}}, "10.5.2.1:80"},
+		{http.Header{"X-Appengine-Remote-Addr": []string{"1.2.3.4"}}, "1.2.3.4:80"},
+		{http.Header{"X-Appengine-Remote-Addr": []string{"1.2.3.4:8080"}}, "1.2.3.4:8080"},
+		{
+			http.Header{"X-Appengine-Remote-Addr": []string{"2401:fa00:9:1:7646:a0ff:fe90:ca66"}},
+			"[2401:fa00:9:1:7646:a0ff:fe90:ca66]:80",
+		},
+		{
+			http.Header{"X-Appengine-Remote-Addr": []string{"[::1]:http"}},
+			"[::1]:http",
+		},
+		{http.Header{}, "127.0.0.1:80"},
+	}
+
+	for _, tc := range testCases {
+		r := &http.Request{
+			Method: "GET",
+			URL:    &url.URL{Scheme: "http", Path: "/remote_addr"},
+			Header: tc.headers,
+			Body:   ioutil.NopCloser(bytes.NewReader(nil)),
+		}
+		Middleware(http.DefaultServeMux).ServeHTTP(httptest.NewRecorder(), r)
+		if addr != tc.addr {
+			t.Errorf("Header %v, got %q, want %q", tc.headers, addr, tc.addr)
+		}
+	}
+}
+
+func TestPanickingHandler(t *testing.T) {
+	http.HandleFunc("/panic", func(http.ResponseWriter, *http.Request) {
+		panic("whoops!")
+	})
+	r := &http.Request{
+		Method: "GET",
+		URL:    &url.URL{Scheme: "http", Path: "/panic"},
+		Body:   ioutil.NopCloser(bytes.NewReader(nil)),
+	}
+	rec := httptest.NewRecorder()
+	Middleware(http.DefaultServeMux).ServeHTTP(rec, r)
+	if rec.Code != 500 {
+		t.Errorf("Panicking handler returned HTTP %d, want HTTP %d", rec.Code, 500)
+	}
+}
+
+var raceDetector = false
+
+func TestAPICallAllocations(t *testing.T) {
+	if raceDetector {
+		t.Skip("not running under race detector")
+	}
+
+	// Run the test API server in a subprocess so we aren't counting its allocations.
+	cleanup := launchHelperProcess(t)
+	defer cleanup()
+	c := &context{
+		req: &http.Request{
+			Header: http.Header{
+				ticketHeader: []string{"s3cr3t"},
+				dapperHeader: []string{"trace-001"},
+			},
+		},
+	}
+
+	req := &basepb.StringProto{
+		Value: proto.String("Doctor Who"),
+	}
+	res := &basepb.StringProto{}
+	var apiErr error
+	avg := testing.AllocsPerRun(100, func() {
+		ctx, _ := netcontext.WithTimeout(toContext(c), 100*time.Millisecond)
+		if err := Call(ctx, "actordb", "LookupActor", req, res); err != nil && apiErr == nil {
+			apiErr = err // get the first error only
+		}
+	})
+	if apiErr != nil {
+		t.Errorf("API call failed: %v", apiErr)
+	}
+
+	// Lots of room for improvement...
+	const min, max float64 = 60, 86
+	if avg < min || max < avg {
+		t.Errorf("Allocations per API call = %g, want in [%g,%g]", avg, min, max)
+	}
+}
+
+func launchHelperProcess(t *testing.T) (cleanup func()) {
+	cmd := exec.Command(os.Args[0], "-test.run=TestHelperProcess")
+	cmd.Env = []string{"GO_WANT_HELPER_PROCESS=1"}
+	stdin, err := cmd.StdinPipe()
+	if err != nil {
+		t.Fatalf("StdinPipe: %v", err)
+	}
+	stdout, err := cmd.StdoutPipe()
+	if err != nil {
+		t.Fatalf("StdoutPipe: %v", err)
+	}
+	if err := cmd.Start(); err != nil {
+		t.Fatalf("Starting helper process: %v", err)
+	}
+
+	scan := bufio.NewScanner(stdout)
+	var u *url.URL
+	for scan.Scan() {
+		line := scan.Text()
+		if hp := strings.TrimPrefix(line, helperProcessMagic); hp != line {
+			var err error
+			u, err = url.Parse(hp)
+			if err != nil {
+				t.Fatalf("Failed to parse %q: %v", hp, err)
+			}
+			break
+		}
+	}
+	if err := scan.Err(); err != nil {
+		t.Fatalf("Scanning helper process stdout: %v", err)
+	}
+	if u == nil {
+		t.Fatal("Helper process never reported")
+	}
+
+	restoreAPIHost := restoreEnvVar("API_HOST")
+	restoreAPIPort := restoreEnvVar("API_HOST")
+	os.Setenv("API_HOST", u.Hostname())
+	os.Setenv("API_PORT", u.Port())
+	return func() {
+		restoreAPIHost()
+		restoreAPIPort()
+		stdin.Close()
+		if err := cmd.Wait(); err != nil {
+			t.Errorf("Helper process did not exit cleanly: %v", err)
+		}
+	}
+}
+
+const helperProcessMagic = "A lovely helper process is listening at "
+
+// This isn't a real test. It's used as a helper process.
+func TestHelperProcess(*testing.T) {
+	if os.Getenv("GO_WANT_HELPER_PROCESS") != "1" {
+		return
+	}
+	defer os.Exit(0)
+
+	f := &fakeAPIHandler{}
+	srv := httptest.NewServer(f)
+	defer srv.Close()
+	fmt.Println(helperProcessMagic + srv.URL + apiPath)
+
+	// Wait for stdin to be closed.
+	io.Copy(ioutil.Discard, os.Stdin)
+}
diff --git a/v2/internal/app_id.go b/v2/internal/app_id.go
new file mode 100644
index 0000000..11df8c0
--- /dev/null
+++ b/v2/internal/app_id.go
@@ -0,0 +1,28 @@
+// Copyright 2011 Google Inc. All rights reserved.
+// Use of this source code is governed by the Apache 2.0
+// license that can be found in the LICENSE file.
+
+package internal
+
+import (
+	"strings"
+)
+
+func parseFullAppID(appid string) (partition, domain, displayID string) {
+	if i := strings.Index(appid, "~"); i != -1 {
+		partition, appid = appid[:i], appid[i+1:]
+	}
+	if i := strings.Index(appid, ":"); i != -1 {
+		domain, appid = appid[:i], appid[i+1:]
+	}
+	return partition, domain, appid
+}
+
+// appID returns "appid" or "domain.com:appid".
+func appID(fullAppID string) string {
+	_, dom, dis := parseFullAppID(fullAppID)
+	if dom != "" {
+		return dom + ":" + dis
+	}
+	return dis
+}
diff --git a/v2/internal/app_id_test.go b/v2/internal/app_id_test.go
new file mode 100644
index 0000000..e69195c
--- /dev/null
+++ b/v2/internal/app_id_test.go
@@ -0,0 +1,34 @@
+// Copyright 2011 Google Inc. All Rights Reserved.
+// Use of this source code is governed by the Apache 2.0
+// license that can be found in the LICENSE file.
+
+package internal
+
+import (
+	"testing"
+)
+
+func TestAppIDParsing(t *testing.T) {
+	testCases := []struct {
+		in                           string
+		partition, domain, displayID string
+	}{
+		{"simple-app-id", "", "", "simple-app-id"},
+		{"domain.com:domain-app-id", "", "domain.com", "domain-app-id"},
+		{"part~partition-app-id", "part", "", "partition-app-id"},
+		{"part~domain.com:display", "part", "domain.com", "display"},
+	}
+
+	for _, tc := range testCases {
+		part, dom, dis := parseFullAppID(tc.in)
+		if part != tc.partition {
+			t.Errorf("partition of %q: got %q, want %q", tc.in, part, tc.partition)
+		}
+		if dom != tc.domain {
+			t.Errorf("domain of %q: got %q, want %q", tc.in, dom, tc.domain)
+		}
+		if dis != tc.displayID {
+			t.Errorf("displayID of %q: got %q, want %q", tc.in, dis, tc.displayID)
+		}
+	}
+}
diff --git a/v2/internal/app_identity/app_identity_service.pb.go b/v2/internal/app_identity/app_identity_service.pb.go
new file mode 100644
index 0000000..8dc87ce
--- /dev/null
+++ b/v2/internal/app_identity/app_identity_service.pb.go
@@ -0,0 +1,611 @@
+// Code generated by protoc-gen-go. DO NOT EDIT.
+// source: google.golang.org/appengine/internal/app_identity/app_identity_service.proto
+
+package app_identity
+
+import proto "github.com/golang/protobuf/proto"
+import fmt "fmt"
+import math "math"
+
+// Reference imports to suppress errors if they are not otherwise used.
+var _ = proto.Marshal
+var _ = fmt.Errorf
+var _ = math.Inf
+
+// This is a compile-time assertion to ensure that this generated file
+// is compatible with the proto package it is being compiled against.
+// A compilation error at this line likely means your copy of the
+// proto package needs to be updated.
+const _ = proto.ProtoPackageIsVersion2 // please upgrade the proto package
+
+type AppIdentityServiceError_ErrorCode int32
+
+const (
+	AppIdentityServiceError_SUCCESS           AppIdentityServiceError_ErrorCode = 0
+	AppIdentityServiceError_UNKNOWN_SCOPE     AppIdentityServiceError_ErrorCode = 9
+	AppIdentityServiceError_BLOB_TOO_LARGE    AppIdentityServiceError_ErrorCode = 1000
+	AppIdentityServiceError_DEADLINE_EXCEEDED AppIdentityServiceError_ErrorCode = 1001
+	AppIdentityServiceError_NOT_A_VALID_APP   AppIdentityServiceError_ErrorCode = 1002
+	AppIdentityServiceError_UNKNOWN_ERROR     AppIdentityServiceError_ErrorCode = 1003
+	AppIdentityServiceError_NOT_ALLOWED       AppIdentityServiceError_ErrorCode = 1005
+	AppIdentityServiceError_NOT_IMPLEMENTED   AppIdentityServiceError_ErrorCode = 1006
+)
+
+var AppIdentityServiceError_ErrorCode_name = map[int32]string{
+	0:    "SUCCESS",
+	9:    "UNKNOWN_SCOPE",
+	1000: "BLOB_TOO_LARGE",
+	1001: "DEADLINE_EXCEEDED",
+	1002: "NOT_A_VALID_APP",
+	1003: "UNKNOWN_ERROR",
+	1005: "NOT_ALLOWED",
+	1006: "NOT_IMPLEMENTED",
+}
+var AppIdentityServiceError_ErrorCode_value = map[string]int32{
+	"SUCCESS":           0,
+	"UNKNOWN_SCOPE":     9,
+	"BLOB_TOO_LARGE":    1000,
+	"DEADLINE_EXCEEDED": 1001,
+	"NOT_A_VALID_APP":   1002,
+	"UNKNOWN_ERROR":     1003,
+	"NOT_ALLOWED":       1005,
+	"NOT_IMPLEMENTED":   1006,
+}
+
+func (x AppIdentityServiceError_ErrorCode) Enum() *AppIdentityServiceError_ErrorCode {
+	p := new(AppIdentityServiceError_ErrorCode)
+	*p = x
+	return p
+}
+func (x AppIdentityServiceError_ErrorCode) String() string {
+	return proto.EnumName(AppIdentityServiceError_ErrorCode_name, int32(x))
+}
+func (x *AppIdentityServiceError_ErrorCode) UnmarshalJSON(data []byte) error {
+	value, err := proto.UnmarshalJSONEnum(AppIdentityServiceError_ErrorCode_value, data, "AppIdentityServiceError_ErrorCode")
+	if err != nil {
+		return err
+	}
+	*x = AppIdentityServiceError_ErrorCode(value)
+	return nil
+}
+func (AppIdentityServiceError_ErrorCode) EnumDescriptor() ([]byte, []int) {
+	return fileDescriptor_app_identity_service_08a6e3f74b04cfa4, []int{0, 0}
+}
+
+type AppIdentityServiceError struct {
+	XXX_NoUnkeyedLiteral struct{} `json:"-"`
+	XXX_unrecognized     []byte   `json:"-"`
+	XXX_sizecache        int32    `json:"-"`
+}
+
+func (m *AppIdentityServiceError) Reset()         { *m = AppIdentityServiceError{} }
+func (m *AppIdentityServiceError) String() string { return proto.CompactTextString(m) }
+func (*AppIdentityServiceError) ProtoMessage()    {}
+func (*AppIdentityServiceError) Descriptor() ([]byte, []int) {
+	return fileDescriptor_app_identity_service_08a6e3f74b04cfa4, []int{0}
+}
+func (m *AppIdentityServiceError) XXX_Unmarshal(b []byte) error {
+	return xxx_messageInfo_AppIdentityServiceError.Unmarshal(m, b)
+}
+func (m *AppIdentityServiceError) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) {
+	return xxx_messageInfo_AppIdentityServiceError.Marshal(b, m, deterministic)
+}
+func (dst *AppIdentityServiceError) XXX_Merge(src proto.Message) {
+	xxx_messageInfo_AppIdentityServiceError.Merge(dst, src)
+}
+func (m *AppIdentityServiceError) XXX_Size() int {
+	return xxx_messageInfo_AppIdentityServiceError.Size(m)
+}
+func (m *AppIdentityServiceError) XXX_DiscardUnknown() {
+	xxx_messageInfo_AppIdentityServiceError.DiscardUnknown(m)
+}
+
+var xxx_messageInfo_AppIdentityServiceError proto.InternalMessageInfo
+
+type SignForAppRequest struct {
+	BytesToSign          []byte   `protobuf:"bytes,1,opt,name=bytes_to_sign,json=bytesToSign" json:"bytes_to_sign,omitempty"`
+	XXX_NoUnkeyedLiteral struct{} `json:"-"`
+	XXX_unrecognized     []byte   `json:"-"`
+	XXX_sizecache        int32    `json:"-"`
+}
+
+func (m *SignForAppRequest) Reset()         { *m = SignForAppRequest{} }
+func (m *SignForAppRequest) String() string { return proto.CompactTextString(m) }
+func (*SignForAppRequest) ProtoMessage()    {}
+func (*SignForAppRequest) Descriptor() ([]byte, []int) {
+	return fileDescriptor_app_identity_service_08a6e3f74b04cfa4, []int{1}
+}
+func (m *SignForAppRequest) XXX_Unmarshal(b []byte) error {
+	return xxx_messageInfo_SignForAppRequest.Unmarshal(m, b)
+}
+func (m *SignForAppRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) {
+	return xxx_messageInfo_SignForAppRequest.Marshal(b, m, deterministic)
+}
+func (dst *SignForAppRequest) XXX_Merge(src proto.Message) {
+	xxx_messageInfo_SignForAppRequest.Merge(dst, src)
+}
+func (m *SignForAppRequest) XXX_Size() int {
+	return xxx_messageInfo_SignForAppRequest.Size(m)
+}
+func (m *SignForAppRequest) XXX_DiscardUnknown() {
+	xxx_messageInfo_SignForAppRequest.DiscardUnknown(m)
+}
+
+var xxx_messageInfo_SignForAppRequest proto.InternalMessageInfo
+
+func (m *SignForAppRequest) GetBytesToSign() []byte {
+	if m != nil {
+		return m.BytesToSign
+	}
+	return nil
+}
+
+type SignForAppResponse struct {
+	KeyName              *string  `protobuf:"bytes,1,opt,name=key_name,json=keyName" json:"key_name,omitempty"`
+	SignatureBytes       []byte   `protobuf:"bytes,2,opt,name=signature_bytes,json=signatureBytes" json:"signature_bytes,omitempty"`
+	XXX_NoUnkeyedLiteral struct{} `json:"-"`
+	XXX_unrecognized     []byte   `json:"-"`
+	XXX_sizecache        int32    `json:"-"`
+}
+
+func (m *SignForAppResponse) Reset()         { *m = SignForAppResponse{} }
+func (m *SignForAppResponse) String() string { return proto.CompactTextString(m) }
+func (*SignForAppResponse) ProtoMessage()    {}
+func (*SignForAppResponse) Descriptor() ([]byte, []int) {
+	return fileDescriptor_app_identity_service_08a6e3f74b04cfa4, []int{2}
+}
+func (m *SignForAppResponse) XXX_Unmarshal(b []byte) error {
+	return xxx_messageInfo_SignForAppResponse.Unmarshal(m, b)
+}
+func (m *SignForAppResponse) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) {
+	return xxx_messageInfo_SignForAppResponse.Marshal(b, m, deterministic)
+}
+func (dst *SignForAppResponse) XXX_Merge(src proto.Message) {
+	xxx_messageInfo_SignForAppResponse.Merge(dst, src)
+}
+func (m *SignForAppResponse) XXX_Size() int {
+	return xxx_messageInfo_SignForAppResponse.Size(m)
+}
+func (m *SignForAppResponse) XXX_DiscardUnknown() {
+	xxx_messageInfo_SignForAppResponse.DiscardUnknown(m)
+}
+
+var xxx_messageInfo_SignForAppResponse proto.InternalMessageInfo
+
+func (m *SignForAppResponse) GetKeyName() string {
+	if m != nil && m.KeyName != nil {
+		return *m.KeyName
+	}
+	return ""
+}
+
+func (m *SignForAppResponse) GetSignatureBytes() []byte {
+	if m != nil {
+		return m.SignatureBytes
+	}
+	return nil
+}
+
+type GetPublicCertificateForAppRequest struct {
+	XXX_NoUnkeyedLiteral struct{} `json:"-"`
+	XXX_unrecognized     []byte   `json:"-"`
+	XXX_sizecache        int32    `json:"-"`
+}
+
+func (m *GetPublicCertificateForAppRequest) Reset()         { *m = GetPublicCertificateForAppRequest{} }
+func (m *GetPublicCertificateForAppRequest) String() string { return proto.CompactTextString(m) }
+func (*GetPublicCertificateForAppRequest) ProtoMessage()    {}
+func (*GetPublicCertificateForAppRequest) Descriptor() ([]byte, []int) {
+	return fileDescriptor_app_identity_service_08a6e3f74b04cfa4, []int{3}
+}
+func (m *GetPublicCertificateForAppRequest) XXX_Unmarshal(b []byte) error {
+	return xxx_messageInfo_GetPublicCertificateForAppRequest.Unmarshal(m, b)
+}
+func (m *GetPublicCertificateForAppRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) {
+	return xxx_messageInfo_GetPublicCertificateForAppRequest.Marshal(b, m, deterministic)
+}
+func (dst *GetPublicCertificateForAppRequest) XXX_Merge(src proto.Message) {
+	xxx_messageInfo_GetPublicCertificateForAppRequest.Merge(dst, src)
+}
+func (m *GetPublicCertificateForAppRequest) XXX_Size() int {
+	return xxx_messageInfo_GetPublicCertificateForAppRequest.Size(m)
+}
+func (m *GetPublicCertificateForAppRequest) XXX_DiscardUnknown() {
+	xxx_messageInfo_GetPublicCertificateForAppRequest.DiscardUnknown(m)
+}
+
+var xxx_messageInfo_GetPublicCertificateForAppRequest proto.InternalMessageInfo
+
+type PublicCertificate struct {
+	KeyName              *string  `protobuf:"bytes,1,opt,name=key_name,json=keyName" json:"key_name,omitempty"`
+	X509CertificatePem   *string  `protobuf:"bytes,2,opt,name=x509_certificate_pem,json=x509CertificatePem" json:"x509_certificate_pem,omitempty"`
+	XXX_NoUnkeyedLiteral struct{} `json:"-"`
+	XXX_unrecognized     []byte   `json:"-"`
+	XXX_sizecache        int32    `json:"-"`
+}
+
+func (m *PublicCertificate) Reset()         { *m = PublicCertificate{} }
+func (m *PublicCertificate) String() string { return proto.CompactTextString(m) }
+func (*PublicCertificate) ProtoMessage()    {}
+func (*PublicCertificate) Descriptor() ([]byte, []int) {
+	return fileDescriptor_app_identity_service_08a6e3f74b04cfa4, []int{4}
+}
+func (m *PublicCertificate) XXX_Unmarshal(b []byte) error {
+	return xxx_messageInfo_PublicCertificate.Unmarshal(m, b)
+}
+func (m *PublicCertificate) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) {
+	return xxx_messageInfo_PublicCertificate.Marshal(b, m, deterministic)
+}
+func (dst *PublicCertificate) XXX_Merge(src proto.Message) {
+	xxx_messageInfo_PublicCertificate.Merge(dst, src)
+}
+func (m *PublicCertificate) XXX_Size() int {
+	return xxx_messageInfo_PublicCertificate.Size(m)
+}
+func (m *PublicCertificate) XXX_DiscardUnknown() {
+	xxx_messageInfo_PublicCertificate.DiscardUnknown(m)
+}
+
+var xxx_messageInfo_PublicCertificate proto.InternalMessageInfo
+
+func (m *PublicCertificate) GetKeyName() string {
+	if m != nil && m.KeyName != nil {
+		return *m.KeyName
+	}
+	return ""
+}
+
+func (m *PublicCertificate) GetX509CertificatePem() string {
+	if m != nil && m.X509CertificatePem != nil {
+		return *m.X509CertificatePem
+	}
+	return ""
+}
+
+type GetPublicCertificateForAppResponse struct {
+	PublicCertificateList      []*PublicCertificate `protobuf:"bytes,1,rep,name=public_certificate_list,json=publicCertificateList" json:"public_certificate_list,omitempty"`
+	MaxClientCacheTimeInSecond *int64               `protobuf:"varint,2,opt,name=max_client_cache_time_in_second,json=maxClientCacheTimeInSecond" json:"max_client_cache_time_in_second,omitempty"`
+	XXX_NoUnkeyedLiteral       struct{}             `json:"-"`
+	XXX_unrecognized           []byte               `json:"-"`
+	XXX_sizecache              int32                `json:"-"`
+}
+
+func (m *GetPublicCertificateForAppResponse) Reset()         { *m = GetPublicCertificateForAppResponse{} }
+func (m *GetPublicCertificateForAppResponse) String() string { return proto.CompactTextString(m) }
+func (*GetPublicCertificateForAppResponse) ProtoMessage()    {}
+func (*GetPublicCertificateForAppResponse) Descriptor() ([]byte, []int) {
+	return fileDescriptor_app_identity_service_08a6e3f74b04cfa4, []int{5}
+}
+func (m *GetPublicCertificateForAppResponse) XXX_Unmarshal(b []byte) error {
+	return xxx_messageInfo_GetPublicCertificateForAppResponse.Unmarshal(m, b)
+}
+func (m *GetPublicCertificateForAppResponse) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) {
+	return xxx_messageInfo_GetPublicCertificateForAppResponse.Marshal(b, m, deterministic)
+}
+func (dst *GetPublicCertificateForAppResponse) XXX_Merge(src proto.Message) {
+	xxx_messageInfo_GetPublicCertificateForAppResponse.Merge(dst, src)
+}
+func (m *GetPublicCertificateForAppResponse) XXX_Size() int {
+	return xxx_messageInfo_GetPublicCertificateForAppResponse.Size(m)
+}
+func (m *GetPublicCertificateForAppResponse) XXX_DiscardUnknown() {
+	xxx_messageInfo_GetPublicCertificateForAppResponse.DiscardUnknown(m)
+}
+
+var xxx_messageInfo_GetPublicCertificateForAppResponse proto.InternalMessageInfo
+
+func (m *GetPublicCertificateForAppResponse) GetPublicCertificateList() []*PublicCertificate {
+	if m != nil {
+		return m.PublicCertificateList
+	}
+	return nil
+}
+
+func (m *GetPublicCertificateForAppResponse) GetMaxClientCacheTimeInSecond() int64 {
+	if m != nil && m.MaxClientCacheTimeInSecond != nil {
+		return *m.MaxClientCacheTimeInSecond
+	}
+	return 0
+}
+
+type GetServiceAccountNameRequest struct {
+	XXX_NoUnkeyedLiteral struct{} `json:"-"`
+	XXX_unrecognized     []byte   `json:"-"`
+	XXX_sizecache        int32    `json:"-"`
+}
+
+func (m *GetServiceAccountNameRequest) Reset()         { *m = GetServiceAccountNameRequest{} }
+func (m *GetServiceAccountNameRequest) String() string { return proto.CompactTextString(m) }
+func (*GetServiceAccountNameRequest) ProtoMessage()    {}
+func (*GetServiceAccountNameRequest) Descriptor() ([]byte, []int) {
+	return fileDescriptor_app_identity_service_08a6e3f74b04cfa4, []int{6}
+}
+func (m *GetServiceAccountNameRequest) XXX_Unmarshal(b []byte) error {
+	return xxx_messageInfo_GetServiceAccountNameRequest.Unmarshal(m, b)
+}
+func (m *GetServiceAccountNameRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) {
+	return xxx_messageInfo_GetServiceAccountNameRequest.Marshal(b, m, deterministic)
+}
+func (dst *GetServiceAccountNameRequest) XXX_Merge(src proto.Message) {
+	xxx_messageInfo_GetServiceAccountNameRequest.Merge(dst, src)
+}
+func (m *GetServiceAccountNameRequest) XXX_Size() int {
+	return xxx_messageInfo_GetServiceAccountNameRequest.Size(m)
+}
+func (m *GetServiceAccountNameRequest) XXX_DiscardUnknown() {
+	xxx_messageInfo_GetServiceAccountNameRequest.DiscardUnknown(m)
+}
+
+var xxx_messageInfo_GetServiceAccountNameRequest proto.InternalMessageInfo
+
+type GetServiceAccountNameResponse struct {
+	ServiceAccountName   *string  `protobuf:"bytes,1,opt,name=service_account_name,json=serviceAccountName" json:"service_account_name,omitempty"`
+	XXX_NoUnkeyedLiteral struct{} `json:"-"`
+	XXX_unrecognized     []byte   `json:"-"`
+	XXX_sizecache        int32    `json:"-"`
+}
+
+func (m *GetServiceAccountNameResponse) Reset()         { *m = GetServiceAccountNameResponse{} }
+func (m *GetServiceAccountNameResponse) String() string { return proto.CompactTextString(m) }
+func (*GetServiceAccountNameResponse) ProtoMessage()    {}
+func (*GetServiceAccountNameResponse) Descriptor() ([]byte, []int) {
+	return fileDescriptor_app_identity_service_08a6e3f74b04cfa4, []int{7}
+}
+func (m *GetServiceAccountNameResponse) XXX_Unmarshal(b []byte) error {
+	return xxx_messageInfo_GetServiceAccountNameResponse.Unmarshal(m, b)
+}
+func (m *GetServiceAccountNameResponse) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) {
+	return xxx_messageInfo_GetServiceAccountNameResponse.Marshal(b, m, deterministic)
+}
+func (dst *GetServiceAccountNameResponse) XXX_Merge(src proto.Message) {
+	xxx_messageInfo_GetServiceAccountNameResponse.Merge(dst, src)
+}
+func (m *GetServiceAccountNameResponse) XXX_Size() int {
+	return xxx_messageInfo_GetServiceAccountNameResponse.Size(m)
+}
+func (m *GetServiceAccountNameResponse) XXX_DiscardUnknown() {
+	xxx_messageInfo_GetServiceAccountNameResponse.DiscardUnknown(m)
+}
+
+var xxx_messageInfo_GetServiceAccountNameResponse proto.InternalMessageInfo
+
+func (m *GetServiceAccountNameResponse) GetServiceAccountName() string {
+	if m != nil && m.ServiceAccountName != nil {
+		return *m.ServiceAccountName
+	}
+	return ""
+}
+
+type GetAccessTokenRequest struct {
+	Scope                []string `protobuf:"bytes,1,rep,name=scope" json:"scope,omitempty"`
+	ServiceAccountId     *int64   `protobuf:"varint,2,opt,name=service_account_id,json=serviceAccountId" json:"service_account_id,omitempty"`
+	ServiceAccountName   *string  `protobuf:"bytes,3,opt,name=service_account_name,json=serviceAccountName" json:"service_account_name,omitempty"`
+	XXX_NoUnkeyedLiteral struct{} `json:"-"`
+	XXX_unrecognized     []byte   `json:"-"`
+	XXX_sizecache        int32    `json:"-"`
+}
+
+func (m *GetAccessTokenRequest) Reset()         { *m = GetAccessTokenRequest{} }
+func (m *GetAccessTokenRequest) String() string { return proto.CompactTextString(m) }
+func (*GetAccessTokenRequest) ProtoMessage()    {}
+func (*GetAccessTokenRequest) Descriptor() ([]byte, []int) {
+	return fileDescriptor_app_identity_service_08a6e3f74b04cfa4, []int{8}
+}
+func (m *GetAccessTokenRequest) XXX_Unmarshal(b []byte) error {
+	return xxx_messageInfo_GetAccessTokenRequest.Unmarshal(m, b)
+}
+func (m *GetAccessTokenRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) {
+	return xxx_messageInfo_GetAccessTokenRequest.Marshal(b, m, deterministic)
+}
+func (dst *GetAccessTokenRequest) XXX_Merge(src proto.Message) {
+	xxx_messageInfo_GetAccessTokenRequest.Merge(dst, src)
+}
+func (m *GetAccessTokenRequest) XXX_Size() int {
+	return xxx_messageInfo_GetAccessTokenRequest.Size(m)
+}
+func (m *GetAccessTokenRequest) XXX_DiscardUnknown() {
+	xxx_messageInfo_GetAccessTokenRequest.DiscardUnknown(m)
+}
+
+var xxx_messageInfo_GetAccessTokenRequest proto.InternalMessageInfo
+
+func (m *GetAccessTokenRequest) GetScope() []string {
+	if m != nil {
+		return m.Scope
+	}
+	return nil
+}
+
+func (m *GetAccessTokenRequest) GetServiceAccountId() int64 {
+	if m != nil && m.ServiceAccountId != nil {
+		return *m.ServiceAccountId
+	}
+	return 0
+}
+
+func (m *GetAccessTokenRequest) GetServiceAccountName() string {
+	if m != nil && m.ServiceAccountName != nil {
+		return *m.ServiceAccountName
+	}
+	return ""
+}
+
+type GetAccessTokenResponse struct {
+	AccessToken          *string  `protobuf:"bytes,1,opt,name=access_token,json=accessToken" json:"access_token,omitempty"`
+	ExpirationTime       *int64   `protobuf:"varint,2,opt,name=expiration_time,json=expirationTime" json:"expiration_time,omitempty"`
+	XXX_NoUnkeyedLiteral struct{} `json:"-"`
+	XXX_unrecognized     []byte   `json:"-"`
+	XXX_sizecache        int32    `json:"-"`
+}
+
+func (m *GetAccessTokenResponse) Reset()         { *m = GetAccessTokenResponse{} }
+func (m *GetAccessTokenResponse) String() string { return proto.CompactTextString(m) }
+func (*GetAccessTokenResponse) ProtoMessage()    {}
+func (*GetAccessTokenResponse) Descriptor() ([]byte, []int) {
+	return fileDescriptor_app_identity_service_08a6e3f74b04cfa4, []int{9}
+}
+func (m *GetAccessTokenResponse) XXX_Unmarshal(b []byte) error {
+	return xxx_messageInfo_GetAccessTokenResponse.Unmarshal(m, b)
+}
+func (m *GetAccessTokenResponse) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) {
+	return xxx_messageInfo_GetAccessTokenResponse.Marshal(b, m, deterministic)
+}
+func (dst *GetAccessTokenResponse) XXX_Merge(src proto.Message) {
+	xxx_messageInfo_GetAccessTokenResponse.Merge(dst, src)
+}
+func (m *GetAccessTokenResponse) XXX_Size() int {
+	return xxx_messageInfo_GetAccessTokenResponse.Size(m)
+}
+func (m *GetAccessTokenResponse) XXX_DiscardUnknown() {
+	xxx_messageInfo_GetAccessTokenResponse.DiscardUnknown(m)
+}
+
+var xxx_messageInfo_GetAccessTokenResponse proto.InternalMessageInfo
+
+func (m *GetAccessTokenResponse) GetAccessToken() string {
+	if m != nil && m.AccessToken != nil {
+		return *m.AccessToken
+	}
+	return ""
+}
+
+func (m *GetAccessTokenResponse) GetExpirationTime() int64 {
+	if m != nil && m.ExpirationTime != nil {
+		return *m.ExpirationTime
+	}
+	return 0
+}
+
+type GetDefaultGcsBucketNameRequest struct {
+	XXX_NoUnkeyedLiteral struct{} `json:"-"`
+	XXX_unrecognized     []byte   `json:"-"`
+	XXX_sizecache        int32    `json:"-"`
+}
+
+func (m *GetDefaultGcsBucketNameRequest) Reset()         { *m = GetDefaultGcsBucketNameRequest{} }
+func (m *GetDefaultGcsBucketNameRequest) String() string { return proto.CompactTextString(m) }
+func (*GetDefaultGcsBucketNameRequest) ProtoMessage()    {}
+func (*GetDefaultGcsBucketNameRequest) Descriptor() ([]byte, []int) {
+	return fileDescriptor_app_identity_service_08a6e3f74b04cfa4, []int{10}
+}
+func (m *GetDefaultGcsBucketNameRequest) XXX_Unmarshal(b []byte) error {
+	return xxx_messageInfo_GetDefaultGcsBucketNameRequest.Unmarshal(m, b)
+}
+func (m *GetDefaultGcsBucketNameRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) {
+	return xxx_messageInfo_GetDefaultGcsBucketNameRequest.Marshal(b, m, deterministic)
+}
+func (dst *GetDefaultGcsBucketNameRequest) XXX_Merge(src proto.Message) {
+	xxx_messageInfo_GetDefaultGcsBucketNameRequest.Merge(dst, src)
+}
+func (m *GetDefaultGcsBucketNameRequest) XXX_Size() int {
+	return xxx_messageInfo_GetDefaultGcsBucketNameRequest.Size(m)
+}
+func (m *GetDefaultGcsBucketNameRequest) XXX_DiscardUnknown() {
+	xxx_messageInfo_GetDefaultGcsBucketNameRequest.DiscardUnknown(m)
+}
+
+var xxx_messageInfo_GetDefaultGcsBucketNameRequest proto.InternalMessageInfo
+
+type GetDefaultGcsBucketNameResponse struct {
+	DefaultGcsBucketName *string  `protobuf:"bytes,1,opt,name=default_gcs_bucket_name,json=defaultGcsBucketName" json:"default_gcs_bucket_name,omitempty"`
+	XXX_NoUnkeyedLiteral struct{} `json:"-"`
+	XXX_unrecognized     []byte   `json:"-"`
+	XXX_sizecache        int32    `json:"-"`
+}
+
+func (m *GetDefaultGcsBucketNameResponse) Reset()         { *m = GetDefaultGcsBucketNameResponse{} }
+func (m *GetDefaultGcsBucketNameResponse) String() string { return proto.CompactTextString(m) }
+func (*GetDefaultGcsBucketNameResponse) ProtoMessage()    {}
+func (*GetDefaultGcsBucketNameResponse) Descriptor() ([]byte, []int) {
+	return fileDescriptor_app_identity_service_08a6e3f74b04cfa4, []int{11}
+}
+func (m *GetDefaultGcsBucketNameResponse) XXX_Unmarshal(b []byte) error {
+	return xxx_messageInfo_GetDefaultGcsBucketNameResponse.Unmarshal(m, b)
+}
+func (m *GetDefaultGcsBucketNameResponse) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) {
+	return xxx_messageInfo_GetDefaultGcsBucketNameResponse.Marshal(b, m, deterministic)
+}
+func (dst *GetDefaultGcsBucketNameResponse) XXX_Merge(src proto.Message) {
+	xxx_messageInfo_GetDefaultGcsBucketNameResponse.Merge(dst, src)
+}
+func (m *GetDefaultGcsBucketNameResponse) XXX_Size() int {
+	return xxx_messageInfo_GetDefaultGcsBucketNameResponse.Size(m)
+}
+func (m *GetDefaultGcsBucketNameResponse) XXX_DiscardUnknown() {
+	xxx_messageInfo_GetDefaultGcsBucketNameResponse.DiscardUnknown(m)
+}
+
+var xxx_messageInfo_GetDefaultGcsBucketNameResponse proto.InternalMessageInfo
+
+func (m *GetDefaultGcsBucketNameResponse) GetDefaultGcsBucketName() string {
+	if m != nil && m.DefaultGcsBucketName != nil {
+		return *m.DefaultGcsBucketName
+	}
+	return ""
+}
+
+func init() {
+	proto.RegisterType((*AppIdentityServiceError)(nil), "appengine.AppIdentityServiceError")
+	proto.RegisterType((*SignForAppRequest)(nil), "appengine.SignForAppRequest")
+	proto.RegisterType((*SignForAppResponse)(nil), "appengine.SignForAppResponse")
+	proto.RegisterType((*GetPublicCertificateForAppRequest)(nil), "appengine.GetPublicCertificateForAppRequest")
+	proto.RegisterType((*PublicCertificate)(nil), "appengine.PublicCertificate")
+	proto.RegisterType((*GetPublicCertificateForAppResponse)(nil), "appengine.GetPublicCertificateForAppResponse")
+	proto.RegisterType((*GetServiceAccountNameRequest)(nil), "appengine.GetServiceAccountNameRequest")
+	proto.RegisterType((*GetServiceAccountNameResponse)(nil), "appengine.GetServiceAccountNameResponse")
+	proto.RegisterType((*GetAccessTokenRequest)(nil), "appengine.GetAccessTokenRequest")
+	proto.RegisterType((*GetAccessTokenResponse)(nil), "appengine.GetAccessTokenResponse")
+	proto.RegisterType((*GetDefaultGcsBucketNameRequest)(nil), "appengine.GetDefaultGcsBucketNameRequest")
+	proto.RegisterType((*GetDefaultGcsBucketNameResponse)(nil), "appengine.GetDefaultGcsBucketNameResponse")
+}
+
+func init() {
+	proto.RegisterFile("google.golang.org/appengine/v2/internal/app_identity/app_identity_service.proto", fileDescriptor_app_identity_service_08a6e3f74b04cfa4)
+}
+
+var fileDescriptor_app_identity_service_08a6e3f74b04cfa4 = []byte{
+	// 676 bytes of a gzipped FileDescriptorProto
+	0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0x7c, 0x54, 0xdb, 0x6e, 0xda, 0x58,
+	0x14, 0x1d, 0x26, 0x1a, 0x31, 0x6c, 0x12, 0x62, 0xce, 0x90, 0xcb, 0x8c, 0x32, 0xb9, 0x78, 0x1e,
+	0x26, 0x0f, 0x15, 0x89, 0x2a, 0x45, 0x55, 0x1f, 0x8d, 0xed, 0x22, 0x54, 0x07, 0x53, 0x43, 0x9a,
+	0xa8, 0x2f, 0xa7, 0xce, 0x61, 0xc7, 0x3d, 0x02, 0x9f, 0xe3, 0xda, 0x87, 0x0a, 0x3e, 0xa2, 0x3f,
+	0xd2, 0x9f, 0xe8, 0x5b, 0xbf, 0xa5, 0x17, 0xb5, 0xdf, 0x50, 0xd9, 0x38, 0x5c, 0x92, 0x92, 0x37,
+	0xbc, 0xf6, 0x5a, 0xcb, 0x6b, 0x2f, 0x6d, 0x0c, 0x4e, 0x20, 0x65, 0x30, 0xc4, 0x7a, 0x20, 0x87,
+	0xbe, 0x08, 0xea, 0x32, 0x0e, 0x4e, 0xfc, 0x28, 0x42, 0x11, 0x70, 0x81, 0x27, 0x5c, 0x28, 0x8c,
+	0x85, 0x3f, 0x4c, 0x21, 0xca, 0xfb, 0x28, 0x14, 0x57, 0x93, 0xa5, 0x07, 0x9a, 0x60, 0xfc, 0x8e,
+	0x33, 0xac, 0x47, 0xb1, 0x54, 0x92, 0x94, 0x66, 0x5a, 0xfd, 0x53, 0x01, 0x76, 0x8c, 0x28, 0x6a,
+	0xe5, 0xc4, 0xee, 0x94, 0x67, 0xc7, 0xb1, 0x8c, 0xf5, 0x0f, 0x05, 0x28, 0x65, 0xbf, 0x4c, 0xd9,
+	0x47, 0x52, 0x86, 0x62, 0xf7, 0xc2, 0x34, 0xed, 0x6e, 0x57, 0xfb, 0x8d, 0x54, 0x61, 0xe3, 0xa2,
+	0xfd, 0xbc, 0xed, 0x5e, 0xb6, 0x69, 0xd7, 0x74, 0x3b, 0xb6, 0x56, 0x22, 0x7f, 0x41, 0xa5, 0xe1,
+	0xb8, 0x0d, 0xda, 0x73, 0x5d, 0xea, 0x18, 0x5e, 0xd3, 0xd6, 0x3e, 0x17, 0xc9, 0x36, 0x54, 0x2d,
+	0xdb, 0xb0, 0x9c, 0x56, 0xdb, 0xa6, 0xf6, 0x95, 0x69, 0xdb, 0x96, 0x6d, 0x69, 0x5f, 0x8a, 0xa4,
+	0x06, 0x9b, 0x6d, 0xb7, 0x47, 0x0d, 0xfa, 0xd2, 0x70, 0x5a, 0x16, 0x35, 0x3a, 0x1d, 0xed, 0x6b,
+	0x91, 0x90, 0xb9, 0xab, 0xed, 0x79, 0xae, 0xa7, 0x7d, 0x2b, 0x12, 0x0d, 0xca, 0x19, 0xd3, 0x71,
+	0xdc, 0x4b, 0xdb, 0xd2, 0xbe, 0xcf, 0xb4, 0xad, 0xf3, 0x8e, 0x63, 0x9f, 0xdb, 0xed, 0x9e, 0x6d,
+	0x69, 0x3f, 0x8a, 0xfa, 0x13, 0xa8, 0x76, 0x79, 0x20, 0x9e, 0xc9, 0xd8, 0x88, 0x22, 0x0f, 0xdf,
+	0x8e, 0x30, 0x51, 0x44, 0x87, 0x8d, 0xeb, 0x89, 0xc2, 0x84, 0x2a, 0x49, 0x13, 0x1e, 0x88, 0xdd,
+	0xc2, 0x61, 0xe1, 0x78, 0xdd, 0x2b, 0x67, 0x60, 0x4f, 0xa6, 0x02, 0xfd, 0x0a, 0xc8, 0xa2, 0x30,
+	0x89, 0xa4, 0x48, 0x90, 0xfc, 0x0d, 0x7f, 0x0e, 0x70, 0x42, 0x85, 0x1f, 0x62, 0x26, 0x2a, 0x79,
+	0xc5, 0x01, 0x4e, 0xda, 0x7e, 0x88, 0xe4, 0x7f, 0xd8, 0x4c, 0xbd, 0x7c, 0x35, 0x8a, 0x91, 0x66,
+	0x4e, 0xbb, 0xbf, 0x67, 0xb6, 0x95, 0x19, 0xdc, 0x48, 0x51, 0xfd, 0x3f, 0x38, 0x6a, 0xa2, 0xea,
+	0x8c, 0xae, 0x87, 0x9c, 0x99, 0x18, 0x2b, 0x7e, 0xc3, 0x99, 0xaf, 0x70, 0x29, 0xa2, 0xfe, 0x1a,
+	0xaa, 0xf7, 0x18, 0x0f, 0xbd, 0xfd, 0x14, 0x6a, 0xe3, 0xb3, 0xd3, 0xa7, 0x94, 0xcd, 0xe9, 0x34,
+	0xc2, 0x30, 0x8b, 0x50, 0xf2, 0x48, 0x3a, 0x5b, 0x70, 0xea, 0x60, 0xa8, 0x7f, 0x2c, 0x80, 0xfe,
+	0x50, 0x8e, 0x7c, 0xe3, 0x1e, 0xec, 0x44, 0x19, 0x65, 0xc9, 0x7a, 0xc8, 0x13, 0xb5, 0x5b, 0x38,
+	0x5c, 0x3b, 0x2e, 0x3f, 0xde, 0xab, 0xcf, 0xce, 0xa6, 0x7e, 0xcf, 0xcc, 0xdb, 0x8a, 0xee, 0x42,
+	0x0e, 0x4f, 0x14, 0x31, 0xe1, 0x20, 0xf4, 0xc7, 0x94, 0x0d, 0x39, 0x0a, 0x45, 0x99, 0xcf, 0xde,
+	0x20, 0x55, 0x3c, 0x44, 0xca, 0x05, 0x4d, 0x90, 0x49, 0xd1, 0xcf, 0x92, 0xaf, 0x79, 0xff, 0x84,
+	0xfe, 0xd8, 0xcc, 0x58, 0x66, 0x4a, 0xea, 0xf1, 0x10, 0x5b, 0xa2, 0x9b, 0x31, 0xf4, 0x7d, 0xd8,
+	0x6b, 0xa2, 0xca, 0x6f, 0xd3, 0x60, 0x4c, 0x8e, 0x84, 0x4a, 0xcb, 0xb8, 0xed, 0xf0, 0x05, 0xfc,
+	0xbb, 0x62, 0x9e, 0xef, 0x76, 0x0a, 0xb5, 0xfc, 0x1f, 0x40, 0xfd, 0xe9, 0x78, 0xb1, 0x5b, 0x92,
+	0xdc, 0x53, 0xea, 0xef, 0x0b, 0xb0, 0xd5, 0x44, 0x65, 0x30, 0x86, 0x49, 0xd2, 0x93, 0x03, 0x14,
+	0xb7, 0x37, 0x55, 0x83, 0x3f, 0x12, 0x26, 0x23, 0xcc, 0x5a, 0x29, 0x79, 0xd3, 0x07, 0xf2, 0x08,
+	0xc8, 0xdd, 0x37, 0xf0, 0xdb, 0xd5, 0xb4, 0x65, 0xff, 0x56, 0x7f, 0x65, 0x9e, 0xb5, 0x95, 0x79,
+	0xfa, 0xb0, 0x7d, 0x37, 0x4e, 0xbe, 0xdb, 0x11, 0xac, 0xfb, 0x19, 0x4c, 0x55, 0x8a, 0xe7, 0x3b,
+	0x95, 0xfd, 0x39, 0x35, 0xbd, 0x58, 0x1c, 0x47, 0x3c, 0xf6, 0x15, 0x97, 0x22, 0xab, 0x3f, 0x4f,
+	0x56, 0x99, 0xc3, 0x69, 0xe1, 0xfa, 0x21, 0xec, 0x37, 0x51, 0x59, 0x78, 0xe3, 0x8f, 0x86, 0xaa,
+	0xc9, 0x92, 0xc6, 0x88, 0x0d, 0x70, 0xa9, 0xea, 0x2b, 0x38, 0x58, 0xc9, 0xc8, 0x03, 0x9d, 0xc1,
+	0x4e, 0x7f, 0x3a, 0xa7, 0x01, 0x4b, 0xe8, 0x75, 0xc6, 0x58, 0xec, 0xbb, 0xd6, 0xff, 0x85, 0xbc,
+	0x51, 0x79, 0xb5, 0xbe, 0xf8, 0xc9, 0xfa, 0x19, 0x00, 0x00, 0xff, 0xff, 0x37, 0x4c, 0x56, 0x38,
+	0xf3, 0x04, 0x00, 0x00,
+}
diff --git a/v2/internal/app_identity/app_identity_service.proto b/v2/internal/app_identity/app_identity_service.proto
new file mode 100644
index 0000000..19610ca
--- /dev/null
+++ b/v2/internal/app_identity/app_identity_service.proto
@@ -0,0 +1,64 @@
+syntax = "proto2";
+option go_package = "app_identity";
+
+package appengine;
+
+message AppIdentityServiceError {
+  enum ErrorCode {
+    SUCCESS = 0;
+    UNKNOWN_SCOPE = 9;
+    BLOB_TOO_LARGE = 1000;
+    DEADLINE_EXCEEDED = 1001;
+    NOT_A_VALID_APP = 1002;
+    UNKNOWN_ERROR = 1003;
+    NOT_ALLOWED = 1005;
+    NOT_IMPLEMENTED = 1006;
+  }
+}
+
+message SignForAppRequest {
+  optional bytes bytes_to_sign = 1;
+}
+
+message SignForAppResponse {
+  optional string key_name = 1;
+  optional bytes signature_bytes = 2;
+}
+
+message GetPublicCertificateForAppRequest {
+}
+
+message PublicCertificate {
+  optional string key_name = 1;
+  optional string x509_certificate_pem = 2;
+}
+
+message GetPublicCertificateForAppResponse {
+  repeated PublicCertificate public_certificate_list = 1;
+  optional int64 max_client_cache_time_in_second = 2;
+}
+
+message GetServiceAccountNameRequest {
+}
+
+message GetServiceAccountNameResponse {
+  optional string service_account_name = 1;
+}
+
+message GetAccessTokenRequest {
+  repeated string scope = 1;
+  optional int64 service_account_id = 2;
+  optional string service_account_name = 3;
+}
+
+message GetAccessTokenResponse {
+  optional string access_token = 1;
+  optional int64 expiration_time = 2;
+}
+
+message GetDefaultGcsBucketNameRequest {
+}
+
+message GetDefaultGcsBucketNameResponse {
+  optional string default_gcs_bucket_name = 1;
+}
diff --git a/v2/internal/base/api_base.pb.go b/v2/internal/base/api_base.pb.go
new file mode 100644
index 0000000..c678b5f
--- /dev/null
+++ b/v2/internal/base/api_base.pb.go
@@ -0,0 +1,308 @@
+// Code generated by protoc-gen-go. DO NOT EDIT.
+// source: google.golang.org/appengine/internal/base/api_base.proto
+
+package base
+
+import proto "github.com/golang/protobuf/proto"
+import fmt "fmt"
+import math "math"
+
+// Reference imports to suppress errors if they are not otherwise used.
+var _ = proto.Marshal
+var _ = fmt.Errorf
+var _ = math.Inf
+
+// This is a compile-time assertion to ensure that this generated file
+// is compatible with the proto package it is being compiled against.
+// A compilation error at this line likely means your copy of the
+// proto package needs to be updated.
+const _ = proto.ProtoPackageIsVersion2 // please upgrade the proto package
+
+type StringProto struct {
+	Value                *string  `protobuf:"bytes,1,req,name=value" json:"value,omitempty"`
+	XXX_NoUnkeyedLiteral struct{} `json:"-"`
+	XXX_unrecognized     []byte   `json:"-"`
+	XXX_sizecache        int32    `json:"-"`
+}
+
+func (m *StringProto) Reset()         { *m = StringProto{} }
+func (m *StringProto) String() string { return proto.CompactTextString(m) }
+func (*StringProto) ProtoMessage()    {}
+func (*StringProto) Descriptor() ([]byte, []int) {
+	return fileDescriptor_api_base_9d49f8792e0c1140, []int{0}
+}
+func (m *StringProto) XXX_Unmarshal(b []byte) error {
+	return xxx_messageInfo_StringProto.Unmarshal(m, b)
+}
+func (m *StringProto) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) {
+	return xxx_messageInfo_StringProto.Marshal(b, m, deterministic)
+}
+func (dst *StringProto) XXX_Merge(src proto.Message) {
+	xxx_messageInfo_StringProto.Merge(dst, src)
+}
+func (m *StringProto) XXX_Size() int {
+	return xxx_messageInfo_StringProto.Size(m)
+}
+func (m *StringProto) XXX_DiscardUnknown() {
+	xxx_messageInfo_StringProto.DiscardUnknown(m)
+}
+
+var xxx_messageInfo_StringProto proto.InternalMessageInfo
+
+func (m *StringProto) GetValue() string {
+	if m != nil && m.Value != nil {
+		return *m.Value
+	}
+	return ""
+}
+
+type Integer32Proto struct {
+	Value                *int32   `protobuf:"varint,1,req,name=value" json:"value,omitempty"`
+	XXX_NoUnkeyedLiteral struct{} `json:"-"`
+	XXX_unrecognized     []byte   `json:"-"`
+	XXX_sizecache        int32    `json:"-"`
+}
+
+func (m *Integer32Proto) Reset()         { *m = Integer32Proto{} }
+func (m *Integer32Proto) String() string { return proto.CompactTextString(m) }
+func (*Integer32Proto) ProtoMessage()    {}
+func (*Integer32Proto) Descriptor() ([]byte, []int) {
+	return fileDescriptor_api_base_9d49f8792e0c1140, []int{1}
+}
+func (m *Integer32Proto) XXX_Unmarshal(b []byte) error {
+	return xxx_messageInfo_Integer32Proto.Unmarshal(m, b)
+}
+func (m *Integer32Proto) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) {
+	return xxx_messageInfo_Integer32Proto.Marshal(b, m, deterministic)
+}
+func (dst *Integer32Proto) XXX_Merge(src proto.Message) {
+	xxx_messageInfo_Integer32Proto.Merge(dst, src)
+}
+func (m *Integer32Proto) XXX_Size() int {
+	return xxx_messageInfo_Integer32Proto.Size(m)
+}
+func (m *Integer32Proto) XXX_DiscardUnknown() {
+	xxx_messageInfo_Integer32Proto.DiscardUnknown(m)
+}
+
+var xxx_messageInfo_Integer32Proto proto.InternalMessageInfo
+
+func (m *Integer32Proto) GetValue() int32 {
+	if m != nil && m.Value != nil {
+		return *m.Value
+	}
+	return 0
+}
+
+type Integer64Proto struct {
+	Value                *int64   `protobuf:"varint,1,req,name=value" json:"value,omitempty"`
+	XXX_NoUnkeyedLiteral struct{} `json:"-"`
+	XXX_unrecognized     []byte   `json:"-"`
+	XXX_sizecache        int32    `json:"-"`
+}
+
+func (m *Integer64Proto) Reset()         { *m = Integer64Proto{} }
+func (m *Integer64Proto) String() string { return proto.CompactTextString(m) }
+func (*Integer64Proto) ProtoMessage()    {}
+func (*Integer64Proto) Descriptor() ([]byte, []int) {
+	return fileDescriptor_api_base_9d49f8792e0c1140, []int{2}
+}
+func (m *Integer64Proto) XXX_Unmarshal(b []byte) error {
+	return xxx_messageInfo_Integer64Proto.Unmarshal(m, b)
+}
+func (m *Integer64Proto) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) {
+	return xxx_messageInfo_Integer64Proto.Marshal(b, m, deterministic)
+}
+func (dst *Integer64Proto) XXX_Merge(src proto.Message) {
+	xxx_messageInfo_Integer64Proto.Merge(dst, src)
+}
+func (m *Integer64Proto) XXX_Size() int {
+	return xxx_messageInfo_Integer64Proto.Size(m)
+}
+func (m *Integer64Proto) XXX_DiscardUnknown() {
+	xxx_messageInfo_Integer64Proto.DiscardUnknown(m)
+}
+
+var xxx_messageInfo_Integer64Proto proto.InternalMessageInfo
+
+func (m *Integer64Proto) GetValue() int64 {
+	if m != nil && m.Value != nil {
+		return *m.Value
+	}
+	return 0
+}
+
+type BoolProto struct {
+	Value                *bool    `protobuf:"varint,1,req,name=value" json:"value,omitempty"`
+	XXX_NoUnkeyedLiteral struct{} `json:"-"`
+	XXX_unrecognized     []byte   `json:"-"`
+	XXX_sizecache        int32    `json:"-"`
+}
+
+func (m *BoolProto) Reset()         { *m = BoolProto{} }
+func (m *BoolProto) String() string { return proto.CompactTextString(m) }
+func (*BoolProto) ProtoMessage()    {}
+func (*BoolProto) Descriptor() ([]byte, []int) {
+	return fileDescriptor_api_base_9d49f8792e0c1140, []int{3}
+}
+func (m *BoolProto) XXX_Unmarshal(b []byte) error {
+	return xxx_messageInfo_BoolProto.Unmarshal(m, b)
+}
+func (m *BoolProto) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) {
+	return xxx_messageInfo_BoolProto.Marshal(b, m, deterministic)
+}
+func (dst *BoolProto) XXX_Merge(src proto.Message) {
+	xxx_messageInfo_BoolProto.Merge(dst, src)
+}
+func (m *BoolProto) XXX_Size() int {
+	return xxx_messageInfo_BoolProto.Size(m)
+}
+func (m *BoolProto) XXX_DiscardUnknown() {
+	xxx_messageInfo_BoolProto.DiscardUnknown(m)
+}
+
+var xxx_messageInfo_BoolProto proto.InternalMessageInfo
+
+func (m *BoolProto) GetValue() bool {
+	if m != nil && m.Value != nil {
+		return *m.Value
+	}
+	return false
+}
+
+type DoubleProto struct {
+	Value                *float64 `protobuf:"fixed64,1,req,name=value" json:"value,omitempty"`
+	XXX_NoUnkeyedLiteral struct{} `json:"-"`
+	XXX_unrecognized     []byte   `json:"-"`
+	XXX_sizecache        int32    `json:"-"`
+}
+
+func (m *DoubleProto) Reset()         { *m = DoubleProto{} }
+func (m *DoubleProto) String() string { return proto.CompactTextString(m) }
+func (*DoubleProto) ProtoMessage()    {}
+func (*DoubleProto) Descriptor() ([]byte, []int) {
+	return fileDescriptor_api_base_9d49f8792e0c1140, []int{4}
+}
+func (m *DoubleProto) XXX_Unmarshal(b []byte) error {
+	return xxx_messageInfo_DoubleProto.Unmarshal(m, b)
+}
+func (m *DoubleProto) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) {
+	return xxx_messageInfo_DoubleProto.Marshal(b, m, deterministic)
+}
+func (dst *DoubleProto) XXX_Merge(src proto.Message) {
+	xxx_messageInfo_DoubleProto.Merge(dst, src)
+}
+func (m *DoubleProto) XXX_Size() int {
+	return xxx_messageInfo_DoubleProto.Size(m)
+}
+func (m *DoubleProto) XXX_DiscardUnknown() {
+	xxx_messageInfo_DoubleProto.DiscardUnknown(m)
+}
+
+var xxx_messageInfo_DoubleProto proto.InternalMessageInfo
+
+func (m *DoubleProto) GetValue() float64 {
+	if m != nil && m.Value != nil {
+		return *m.Value
+	}
+	return 0
+}
+
+type BytesProto struct {
+	Value                []byte   `protobuf:"bytes,1,req,name=value" json:"value,omitempty"`
+	XXX_NoUnkeyedLiteral struct{} `json:"-"`
+	XXX_unrecognized     []byte   `json:"-"`
+	XXX_sizecache        int32    `json:"-"`
+}
+
+func (m *BytesProto) Reset()         { *m = BytesProto{} }
+func (m *BytesProto) String() string { return proto.CompactTextString(m) }
+func (*BytesProto) ProtoMessage()    {}
+func (*BytesProto) Descriptor() ([]byte, []int) {
+	return fileDescriptor_api_base_9d49f8792e0c1140, []int{5}
+}
+func (m *BytesProto) XXX_Unmarshal(b []byte) error {
+	return xxx_messageInfo_BytesProto.Unmarshal(m, b)
+}
+func (m *BytesProto) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) {
+	return xxx_messageInfo_BytesProto.Marshal(b, m, deterministic)
+}
+func (dst *BytesProto) XXX_Merge(src proto.Message) {
+	xxx_messageInfo_BytesProto.Merge(dst, src)
+}
+func (m *BytesProto) XXX_Size() int {
+	return xxx_messageInfo_BytesProto.Size(m)
+}
+func (m *BytesProto) XXX_DiscardUnknown() {
+	xxx_messageInfo_BytesProto.DiscardUnknown(m)
+}
+
+var xxx_messageInfo_BytesProto proto.InternalMessageInfo
+
+func (m *BytesProto) GetValue() []byte {
+	if m != nil {
+		return m.Value
+	}
+	return nil
+}
+
+type VoidProto struct {
+	XXX_NoUnkeyedLiteral struct{} `json:"-"`
+	XXX_unrecognized     []byte   `json:"-"`
+	XXX_sizecache        int32    `json:"-"`
+}
+
+func (m *VoidProto) Reset()         { *m = VoidProto{} }
+func (m *VoidProto) String() string { return proto.CompactTextString(m) }
+func (*VoidProto) ProtoMessage()    {}
+func (*VoidProto) Descriptor() ([]byte, []int) {
+	return fileDescriptor_api_base_9d49f8792e0c1140, []int{6}
+}
+func (m *VoidProto) XXX_Unmarshal(b []byte) error {
+	return xxx_messageInfo_VoidProto.Unmarshal(m, b)
+}
+func (m *VoidProto) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) {
+	return xxx_messageInfo_VoidProto.Marshal(b, m, deterministic)
+}
+func (dst *VoidProto) XXX_Merge(src proto.Message) {
+	xxx_messageInfo_VoidProto.Merge(dst, src)
+}
+func (m *VoidProto) XXX_Size() int {
+	return xxx_messageInfo_VoidProto.Size(m)
+}
+func (m *VoidProto) XXX_DiscardUnknown() {
+	xxx_messageInfo_VoidProto.DiscardUnknown(m)
+}
+
+var xxx_messageInfo_VoidProto proto.InternalMessageInfo
+
+func init() {
+	proto.RegisterType((*StringProto)(nil), "appengine.base.StringProto")
+	proto.RegisterType((*Integer32Proto)(nil), "appengine.base.Integer32Proto")
+	proto.RegisterType((*Integer64Proto)(nil), "appengine.base.Integer64Proto")
+	proto.RegisterType((*BoolProto)(nil), "appengine.base.BoolProto")
+	proto.RegisterType((*DoubleProto)(nil), "appengine.base.DoubleProto")
+	proto.RegisterType((*BytesProto)(nil), "appengine.base.BytesProto")
+	proto.RegisterType((*VoidProto)(nil), "appengine.base.VoidProto")
+}
+
+func init() {
+	proto.RegisterFile("google.golang.org/appengine/v2/internal/base/api_base.proto", fileDescriptor_api_base_9d49f8792e0c1140)
+}
+
+var fileDescriptor_api_base_9d49f8792e0c1140 = []byte{
+	// 199 bytes of a gzipped FileDescriptorProto
+	0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0x6c, 0xcf, 0x3f, 0x4b, 0xc6, 0x30,
+	0x10, 0x06, 0x70, 0x5a, 0xad, 0xb4, 0x57, 0xe9, 0x20, 0x0e, 0x1d, 0xb5, 0x05, 0x71, 0x4a, 0x40,
+	0x45, 0x9c, 0x83, 0x8b, 0x9b, 0x28, 0x38, 0xb8, 0x48, 0x8a, 0xc7, 0x11, 0x08, 0xb9, 0x90, 0xa6,
+	0x82, 0xdf, 0x5e, 0xda, 0xd2, 0xfa, 0xc2, 0x9b, 0xed, 0xfe, 0xfc, 0xe0, 0xe1, 0x81, 0x27, 0x62,
+	0x26, 0x8b, 0x82, 0xd8, 0x6a, 0x47, 0x82, 0x03, 0x49, 0xed, 0x3d, 0x3a, 0x32, 0x0e, 0xa5, 0x71,
+	0x11, 0x83, 0xd3, 0x56, 0x0e, 0x7a, 0x44, 0xa9, 0xbd, 0xf9, 0x9a, 0x07, 0xe1, 0x03, 0x47, 0xbe,
+	0x68, 0x76, 0x27, 0xe6, 0x6b, 0xd7, 0x43, 0xfd, 0x1e, 0x83, 0x71, 0xf4, 0xba, 0xbc, 0x2f, 0xa1,
+	0xf8, 0xd1, 0x76, 0xc2, 0x36, 0xbb, 0xca, 0x6f, 0xab, 0xb7, 0x75, 0xe9, 0x6e, 0xa0, 0x79, 0x71,
+	0x11, 0x09, 0xc3, 0xfd, 0x5d, 0xc2, 0x15, 0xc7, 0xee, 0xf1, 0x21, 0xe1, 0x4e, 0x36, 0x77, 0x0d,
+	0x95, 0x62, 0xb6, 0x09, 0x52, 0x6e, 0xa4, 0x87, 0xfa, 0x99, 0xa7, 0xc1, 0x62, 0x02, 0x65, 0xff,
+	0x79, 0xa0, 0x7e, 0x23, 0x8e, 0xab, 0x69, 0x0f, 0xcd, 0xb9, 0xca, 0xcb, 0xdd, 0xd5, 0x50, 0x7d,
+	0xb0, 0xf9, 0x5e, 0x98, 0x3a, 0xfb, 0x3c, 0x9d, 0x9b, 0xff, 0x05, 0x00, 0x00, 0xff, 0xff, 0xba,
+	0x37, 0x25, 0xea, 0x44, 0x01, 0x00, 0x00,
+}
diff --git a/v2/internal/base/api_base.proto b/v2/internal/base/api_base.proto
new file mode 100644
index 0000000..56cd7a3
--- /dev/null
+++ b/v2/internal/base/api_base.proto
@@ -0,0 +1,33 @@
+// Built-in base types for API calls. Primarily useful as return types.
+
+syntax = "proto2";
+option go_package = "base";
+
+package appengine.base;
+
+message StringProto {
+  required string value = 1;
+}
+
+message Integer32Proto {
+  required int32 value = 1;
+}
+
+message Integer64Proto {
+  required int64 value = 1;
+}
+
+message BoolProto {
+  required bool value = 1;
+}
+
+message DoubleProto {
+  required double value = 1;
+}
+
+message BytesProto {
+  required bytes value = 1 [ctype=CORD];
+}
+
+message VoidProto {
+}
diff --git a/v2/internal/blobstore/blobstore_service.pb.go b/v2/internal/blobstore/blobstore_service.pb.go
new file mode 100644
index 0000000..5ff598e
--- /dev/null
+++ b/v2/internal/blobstore/blobstore_service.pb.go
@@ -0,0 +1,666 @@
+// Code generated by protoc-gen-go. DO NOT EDIT.
+// source: google.golang.org/appengine/internal/blobstore/blobstore_service.proto
+
+package blobstore
+
+import proto "github.com/golang/protobuf/proto"
+import fmt "fmt"
+import math "math"
+
+// Reference imports to suppress errors if they are not otherwise used.
+var _ = proto.Marshal
+var _ = fmt.Errorf
+var _ = math.Inf
+
+// This is a compile-time assertion to ensure that this generated file
+// is compatible with the proto package it is being compiled against.
+// A compilation error at this line likely means your copy of the
+// proto package needs to be updated.
+const _ = proto.ProtoPackageIsVersion2 // please upgrade the proto package
+
+type BlobstoreServiceError_ErrorCode int32
+
+const (
+	BlobstoreServiceError_OK                        BlobstoreServiceError_ErrorCode = 0
+	BlobstoreServiceError_INTERNAL_ERROR            BlobstoreServiceError_ErrorCode = 1
+	BlobstoreServiceError_URL_TOO_LONG              BlobstoreServiceError_ErrorCode = 2
+	BlobstoreServiceError_PERMISSION_DENIED         BlobstoreServiceError_ErrorCode = 3
+	BlobstoreServiceError_BLOB_NOT_FOUND            BlobstoreServiceError_ErrorCode = 4
+	BlobstoreServiceError_DATA_INDEX_OUT_OF_RANGE   BlobstoreServiceError_ErrorCode = 5
+	BlobstoreServiceError_BLOB_FETCH_SIZE_TOO_LARGE BlobstoreServiceError_ErrorCode = 6
+	BlobstoreServiceError_ARGUMENT_OUT_OF_RANGE     BlobstoreServiceError_ErrorCode = 8
+	BlobstoreServiceError_INVALID_BLOB_KEY          BlobstoreServiceError_ErrorCode = 9
+)
+
+var BlobstoreServiceError_ErrorCode_name = map[int32]string{
+	0: "OK",
+	1: "INTERNAL_ERROR",
+	2: "URL_TOO_LONG",
+	3: "PERMISSION_DENIED",
+	4: "BLOB_NOT_FOUND",
+	5: "DATA_INDEX_OUT_OF_RANGE",
+	6: "BLOB_FETCH_SIZE_TOO_LARGE",
+	8: "ARGUMENT_OUT_OF_RANGE",
+	9: "INVALID_BLOB_KEY",
+}
+var BlobstoreServiceError_ErrorCode_value = map[string]int32{
+	"OK":                        0,
+	"INTERNAL_ERROR":            1,
+	"URL_TOO_LONG":              2,
+	"PERMISSION_DENIED":         3,
+	"BLOB_NOT_FOUND":            4,
+	"DATA_INDEX_OUT_OF_RANGE":   5,
+	"BLOB_FETCH_SIZE_TOO_LARGE": 6,
+	"ARGUMENT_OUT_OF_RANGE":     8,
+	"INVALID_BLOB_KEY":          9,
+}
+
+func (x BlobstoreServiceError_ErrorCode) Enum() *BlobstoreServiceError_ErrorCode {
+	p := new(BlobstoreServiceError_ErrorCode)
+	*p = x
+	return p
+}
+func (x BlobstoreServiceError_ErrorCode) String() string {
+	return proto.EnumName(BlobstoreServiceError_ErrorCode_name, int32(x))
+}
+func (x *BlobstoreServiceError_ErrorCode) UnmarshalJSON(data []byte) error {
+	value, err := proto.UnmarshalJSONEnum(BlobstoreServiceError_ErrorCode_value, data, "BlobstoreServiceError_ErrorCode")
+	if err != nil {
+		return err
+	}
+	*x = BlobstoreServiceError_ErrorCode(value)
+	return nil
+}
+func (BlobstoreServiceError_ErrorCode) EnumDescriptor() ([]byte, []int) {
+	return fileDescriptor_blobstore_service_3604fb6033ea2e2e, []int{0, 0}
+}
+
+type BlobstoreServiceError struct {
+	XXX_NoUnkeyedLiteral struct{} `json:"-"`
+	XXX_unrecognized     []byte   `json:"-"`
+	XXX_sizecache        int32    `json:"-"`
+}
+
+func (m *BlobstoreServiceError) Reset()         { *m = BlobstoreServiceError{} }
+func (m *BlobstoreServiceError) String() string { return proto.CompactTextString(m) }
+func (*BlobstoreServiceError) ProtoMessage()    {}
+func (*BlobstoreServiceError) Descriptor() ([]byte, []int) {
+	return fileDescriptor_blobstore_service_3604fb6033ea2e2e, []int{0}
+}
+func (m *BlobstoreServiceError) XXX_Unmarshal(b []byte) error {
+	return xxx_messageInfo_BlobstoreServiceError.Unmarshal(m, b)
+}
+func (m *BlobstoreServiceError) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) {
+	return xxx_messageInfo_BlobstoreServiceError.Marshal(b, m, deterministic)
+}
+func (dst *BlobstoreServiceError) XXX_Merge(src proto.Message) {
+	xxx_messageInfo_BlobstoreServiceError.Merge(dst, src)
+}
+func (m *BlobstoreServiceError) XXX_Size() int {
+	return xxx_messageInfo_BlobstoreServiceError.Size(m)
+}
+func (m *BlobstoreServiceError) XXX_DiscardUnknown() {
+	xxx_messageInfo_BlobstoreServiceError.DiscardUnknown(m)
+}
+
+var xxx_messageInfo_BlobstoreServiceError proto.InternalMessageInfo
+
+type CreateUploadURLRequest struct {
+	SuccessPath               *string  `protobuf:"bytes,1,req,name=success_path,json=successPath" json:"success_path,omitempty"`
+	MaxUploadSizeBytes        *int64   `protobuf:"varint,2,opt,name=max_upload_size_bytes,json=maxUploadSizeBytes" json:"max_upload_size_bytes,omitempty"`
+	MaxUploadSizePerBlobBytes *int64   `protobuf:"varint,3,opt,name=max_upload_size_per_blob_bytes,json=maxUploadSizePerBlobBytes" json:"max_upload_size_per_blob_bytes,omitempty"`
+	GsBucketName              *string  `protobuf:"bytes,4,opt,name=gs_bucket_name,json=gsBucketName" json:"gs_bucket_name,omitempty"`
+	UrlExpiryTimeSeconds      *int32   `protobuf:"varint,5,opt,name=url_expiry_time_seconds,json=urlExpiryTimeSeconds" json:"url_expiry_time_seconds,omitempty"`
+	XXX_NoUnkeyedLiteral      struct{} `json:"-"`
+	XXX_unrecognized          []byte   `json:"-"`
+	XXX_sizecache             int32    `json:"-"`
+}
+
+func (m *CreateUploadURLRequest) Reset()         { *m = CreateUploadURLRequest{} }
+func (m *CreateUploadURLRequest) String() string { return proto.CompactTextString(m) }
+func (*CreateUploadURLRequest) ProtoMessage()    {}
+func (*CreateUploadURLRequest) Descriptor() ([]byte, []int) {
+	return fileDescriptor_blobstore_service_3604fb6033ea2e2e, []int{1}
+}
+func (m *CreateUploadURLRequest) XXX_Unmarshal(b []byte) error {
+	return xxx_messageInfo_CreateUploadURLRequest.Unmarshal(m, b)
+}
+func (m *CreateUploadURLRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) {
+	return xxx_messageInfo_CreateUploadURLRequest.Marshal(b, m, deterministic)
+}
+func (dst *CreateUploadURLRequest) XXX_Merge(src proto.Message) {
+	xxx_messageInfo_CreateUploadURLRequest.Merge(dst, src)
+}
+func (m *CreateUploadURLRequest) XXX_Size() int {
+	return xxx_messageInfo_CreateUploadURLRequest.Size(m)
+}
+func (m *CreateUploadURLRequest) XXX_DiscardUnknown() {
+	xxx_messageInfo_CreateUploadURLRequest.DiscardUnknown(m)
+}
+
+var xxx_messageInfo_CreateUploadURLRequest proto.InternalMessageInfo
+
+func (m *CreateUploadURLRequest) GetSuccessPath() string {
+	if m != nil && m.SuccessPath != nil {
+		return *m.SuccessPath
+	}
+	return ""
+}
+
+func (m *CreateUploadURLRequest) GetMaxUploadSizeBytes() int64 {
+	if m != nil && m.MaxUploadSizeBytes != nil {
+		return *m.MaxUploadSizeBytes
+	}
+	return 0
+}
+
+func (m *CreateUploadURLRequest) GetMaxUploadSizePerBlobBytes() int64 {
+	if m != nil && m.MaxUploadSizePerBlobBytes != nil {
+		return *m.MaxUploadSizePerBlobBytes
+	}
+	return 0
+}
+
+func (m *CreateUploadURLRequest) GetGsBucketName() string {
+	if m != nil && m.GsBucketName != nil {
+		return *m.GsBucketName
+	}
+	return ""
+}
+
+func (m *CreateUploadURLRequest) GetUrlExpiryTimeSeconds() int32 {
+	if m != nil && m.UrlExpiryTimeSeconds != nil {
+		return *m.UrlExpiryTimeSeconds
+	}
+	return 0
+}
+
+type CreateUploadURLResponse struct {
+	Url                  *string  `protobuf:"bytes,1,req,name=url" json:"url,omitempty"`
+	XXX_NoUnkeyedLiteral struct{} `json:"-"`
+	XXX_unrecognized     []byte   `json:"-"`
+	XXX_sizecache        int32    `json:"-"`
+}
+
+func (m *CreateUploadURLResponse) Reset()         { *m = CreateUploadURLResponse{} }
+func (m *CreateUploadURLResponse) String() string { return proto.CompactTextString(m) }
+func (*CreateUploadURLResponse) ProtoMessage()    {}
+func (*CreateUploadURLResponse) Descriptor() ([]byte, []int) {
+	return fileDescriptor_blobstore_service_3604fb6033ea2e2e, []int{2}
+}
+func (m *CreateUploadURLResponse) XXX_Unmarshal(b []byte) error {
+	return xxx_messageInfo_CreateUploadURLResponse.Unmarshal(m, b)
+}
+func (m *CreateUploadURLResponse) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) {
+	return xxx_messageInfo_CreateUploadURLResponse.Marshal(b, m, deterministic)
+}
+func (dst *CreateUploadURLResponse) XXX_Merge(src proto.Message) {
+	xxx_messageInfo_CreateUploadURLResponse.Merge(dst, src)
+}
+func (m *CreateUploadURLResponse) XXX_Size() int {
+	return xxx_messageInfo_CreateUploadURLResponse.Size(m)
+}
+func (m *CreateUploadURLResponse) XXX_DiscardUnknown() {
+	xxx_messageInfo_CreateUploadURLResponse.DiscardUnknown(m)
+}
+
+var xxx_messageInfo_CreateUploadURLResponse proto.InternalMessageInfo
+
+func (m *CreateUploadURLResponse) GetUrl() string {
+	if m != nil && m.Url != nil {
+		return *m.Url
+	}
+	return ""
+}
+
+type DeleteBlobRequest struct {
+	BlobKey              []string `protobuf:"bytes,1,rep,name=blob_key,json=blobKey" json:"blob_key,omitempty"`
+	Token                *string  `protobuf:"bytes,2,opt,name=token" json:"token,omitempty"`
+	XXX_NoUnkeyedLiteral struct{} `json:"-"`
+	XXX_unrecognized     []byte   `json:"-"`
+	XXX_sizecache        int32    `json:"-"`
+}
+
+func (m *DeleteBlobRequest) Reset()         { *m = DeleteBlobRequest{} }
+func (m *DeleteBlobRequest) String() string { return proto.CompactTextString(m) }
+func (*DeleteBlobRequest) ProtoMessage()    {}
+func (*DeleteBlobRequest) Descriptor() ([]byte, []int) {
+	return fileDescriptor_blobstore_service_3604fb6033ea2e2e, []int{3}
+}
+func (m *DeleteBlobRequest) XXX_Unmarshal(b []byte) error {
+	return xxx_messageInfo_DeleteBlobRequest.Unmarshal(m, b)
+}
+func (m *DeleteBlobRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) {
+	return xxx_messageInfo_DeleteBlobRequest.Marshal(b, m, deterministic)
+}
+func (dst *DeleteBlobRequest) XXX_Merge(src proto.Message) {
+	xxx_messageInfo_DeleteBlobRequest.Merge(dst, src)
+}
+func (m *DeleteBlobRequest) XXX_Size() int {
+	return xxx_messageInfo_DeleteBlobRequest.Size(m)
+}
+func (m *DeleteBlobRequest) XXX_DiscardUnknown() {
+	xxx_messageInfo_DeleteBlobRequest.DiscardUnknown(m)
+}
+
+var xxx_messageInfo_DeleteBlobRequest proto.InternalMessageInfo
+
+func (m *DeleteBlobRequest) GetBlobKey() []string {
+	if m != nil {
+		return m.BlobKey
+	}
+	return nil
+}
+
+func (m *DeleteBlobRequest) GetToken() string {
+	if m != nil && m.Token != nil {
+		return *m.Token
+	}
+	return ""
+}
+
+type FetchDataRequest struct {
+	BlobKey              *string  `protobuf:"bytes,1,req,name=blob_key,json=blobKey" json:"blob_key,omitempty"`
+	StartIndex           *int64   `protobuf:"varint,2,req,name=start_index,json=startIndex" json:"start_index,omitempty"`
+	EndIndex             *int64   `protobuf:"varint,3,req,name=end_index,json=endIndex" json:"end_index,omitempty"`
+	XXX_NoUnkeyedLiteral struct{} `json:"-"`
+	XXX_unrecognized     []byte   `json:"-"`
+	XXX_sizecache        int32    `json:"-"`
+}
+
+func (m *FetchDataRequest) Reset()         { *m = FetchDataRequest{} }
+func (m *FetchDataRequest) String() string { return proto.CompactTextString(m) }
+func (*FetchDataRequest) ProtoMessage()    {}
+func (*FetchDataRequest) Descriptor() ([]byte, []int) {
+	return fileDescriptor_blobstore_service_3604fb6033ea2e2e, []int{4}
+}
+func (m *FetchDataRequest) XXX_Unmarshal(b []byte) error {
+	return xxx_messageInfo_FetchDataRequest.Unmarshal(m, b)
+}
+func (m *FetchDataRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) {
+	return xxx_messageInfo_FetchDataRequest.Marshal(b, m, deterministic)
+}
+func (dst *FetchDataRequest) XXX_Merge(src proto.Message) {
+	xxx_messageInfo_FetchDataRequest.Merge(dst, src)
+}
+func (m *FetchDataRequest) XXX_Size() int {
+	return xxx_messageInfo_FetchDataRequest.Size(m)
+}
+func (m *FetchDataRequest) XXX_DiscardUnknown() {
+	xxx_messageInfo_FetchDataRequest.DiscardUnknown(m)
+}
+
+var xxx_messageInfo_FetchDataRequest proto.InternalMessageInfo
+
+func (m *FetchDataRequest) GetBlobKey() string {
+	if m != nil && m.BlobKey != nil {
+		return *m.BlobKey
+	}
+	return ""
+}
+
+func (m *FetchDataRequest) GetStartIndex() int64 {
+	if m != nil && m.StartIndex != nil {
+		return *m.StartIndex
+	}
+	return 0
+}
+
+func (m *FetchDataRequest) GetEndIndex() int64 {
+	if m != nil && m.EndIndex != nil {
+		return *m.EndIndex
+	}
+	return 0
+}
+
+type FetchDataResponse struct {
+	Data                 []byte   `protobuf:"bytes,1000,req,name=data" json:"data,omitempty"`
+	XXX_NoUnkeyedLiteral struct{} `json:"-"`
+	XXX_unrecognized     []byte   `json:"-"`
+	XXX_sizecache        int32    `json:"-"`
+}
+
+func (m *FetchDataResponse) Reset()         { *m = FetchDataResponse{} }
+func (m *FetchDataResponse) String() string { return proto.CompactTextString(m) }
+func (*FetchDataResponse) ProtoMessage()    {}
+func (*FetchDataResponse) Descriptor() ([]byte, []int) {
+	return fileDescriptor_blobstore_service_3604fb6033ea2e2e, []int{5}
+}
+func (m *FetchDataResponse) XXX_Unmarshal(b []byte) error {
+	return xxx_messageInfo_FetchDataResponse.Unmarshal(m, b)
+}
+func (m *FetchDataResponse) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) {
+	return xxx_messageInfo_FetchDataResponse.Marshal(b, m, deterministic)
+}
+func (dst *FetchDataResponse) XXX_Merge(src proto.Message) {
+	xxx_messageInfo_FetchDataResponse.Merge(dst, src)
+}
+func (m *FetchDataResponse) XXX_Size() int {
+	return xxx_messageInfo_FetchDataResponse.Size(m)
+}
+func (m *FetchDataResponse) XXX_DiscardUnknown() {
+	xxx_messageInfo_FetchDataResponse.DiscardUnknown(m)
+}
+
+var xxx_messageInfo_FetchDataResponse proto.InternalMessageInfo
+
+func (m *FetchDataResponse) GetData() []byte {
+	if m != nil {
+		return m.Data
+	}
+	return nil
+}
+
+type CloneBlobRequest struct {
+	BlobKey              []byte   `protobuf:"bytes,1,req,name=blob_key,json=blobKey" json:"blob_key,omitempty"`
+	MimeType             []byte   `protobuf:"bytes,2,req,name=mime_type,json=mimeType" json:"mime_type,omitempty"`
+	TargetAppId          []byte   `protobuf:"bytes,3,req,name=target_app_id,json=targetAppId" json:"target_app_id,omitempty"`
+	XXX_NoUnkeyedLiteral struct{} `json:"-"`
+	XXX_unrecognized     []byte   `json:"-"`
+	XXX_sizecache        int32    `json:"-"`
+}
+
+func (m *CloneBlobRequest) Reset()         { *m = CloneBlobRequest{} }
+func (m *CloneBlobRequest) String() string { return proto.CompactTextString(m) }
+func (*CloneBlobRequest) ProtoMessage()    {}
+func (*CloneBlobRequest) Descriptor() ([]byte, []int) {
+	return fileDescriptor_blobstore_service_3604fb6033ea2e2e, []int{6}
+}
+func (m *CloneBlobRequest) XXX_Unmarshal(b []byte) error {
+	return xxx_messageInfo_CloneBlobRequest.Unmarshal(m, b)
+}
+func (m *CloneBlobRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) {
+	return xxx_messageInfo_CloneBlobRequest.Marshal(b, m, deterministic)
+}
+func (dst *CloneBlobRequest) XXX_Merge(src proto.Message) {
+	xxx_messageInfo_CloneBlobRequest.Merge(dst, src)
+}
+func (m *CloneBlobRequest) XXX_Size() int {
+	return xxx_messageInfo_CloneBlobRequest.Size(m)
+}
+func (m *CloneBlobRequest) XXX_DiscardUnknown() {
+	xxx_messageInfo_CloneBlobRequest.DiscardUnknown(m)
+}
+
+var xxx_messageInfo_CloneBlobRequest proto.InternalMessageInfo
+
+func (m *CloneBlobRequest) GetBlobKey() []byte {
+	if m != nil {
+		return m.BlobKey
+	}
+	return nil
+}
+
+func (m *CloneBlobRequest) GetMimeType() []byte {
+	if m != nil {
+		return m.MimeType
+	}
+	return nil
+}
+
+func (m *CloneBlobRequest) GetTargetAppId() []byte {
+	if m != nil {
+		return m.TargetAppId
+	}
+	return nil
+}
+
+type CloneBlobResponse struct {
+	BlobKey              []byte   `protobuf:"bytes,1,req,name=blob_key,json=blobKey" json:"blob_key,omitempty"`
+	XXX_NoUnkeyedLiteral struct{} `json:"-"`
+	XXX_unrecognized     []byte   `json:"-"`
+	XXX_sizecache        int32    `json:"-"`
+}
+
+func (m *CloneBlobResponse) Reset()         { *m = CloneBlobResponse{} }
+func (m *CloneBlobResponse) String() string { return proto.CompactTextString(m) }
+func (*CloneBlobResponse) ProtoMessage()    {}
+func (*CloneBlobResponse) Descriptor() ([]byte, []int) {
+	return fileDescriptor_blobstore_service_3604fb6033ea2e2e, []int{7}
+}
+func (m *CloneBlobResponse) XXX_Unmarshal(b []byte) error {
+	return xxx_messageInfo_CloneBlobResponse.Unmarshal(m, b)
+}
+func (m *CloneBlobResponse) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) {
+	return xxx_messageInfo_CloneBlobResponse.Marshal(b, m, deterministic)
+}
+func (dst *CloneBlobResponse) XXX_Merge(src proto.Message) {
+	xxx_messageInfo_CloneBlobResponse.Merge(dst, src)
+}
+func (m *CloneBlobResponse) XXX_Size() int {
+	return xxx_messageInfo_CloneBlobResponse.Size(m)
+}
+func (m *CloneBlobResponse) XXX_DiscardUnknown() {
+	xxx_messageInfo_CloneBlobResponse.DiscardUnknown(m)
+}
+
+var xxx_messageInfo_CloneBlobResponse proto.InternalMessageInfo
+
+func (m *CloneBlobResponse) GetBlobKey() []byte {
+	if m != nil {
+		return m.BlobKey
+	}
+	return nil
+}
+
+type DecodeBlobKeyRequest struct {
+	BlobKey              []string `protobuf:"bytes,1,rep,name=blob_key,json=blobKey" json:"blob_key,omitempty"`
+	XXX_NoUnkeyedLiteral struct{} `json:"-"`
+	XXX_unrecognized     []byte   `json:"-"`
+	XXX_sizecache        int32    `json:"-"`
+}
+
+func (m *DecodeBlobKeyRequest) Reset()         { *m = DecodeBlobKeyRequest{} }
+func (m *DecodeBlobKeyRequest) String() string { return proto.CompactTextString(m) }
+func (*DecodeBlobKeyRequest) ProtoMessage()    {}
+func (*DecodeBlobKeyRequest) Descriptor() ([]byte, []int) {
+	return fileDescriptor_blobstore_service_3604fb6033ea2e2e, []int{8}
+}
+func (m *DecodeBlobKeyRequest) XXX_Unmarshal(b []byte) error {
+	return xxx_messageInfo_DecodeBlobKeyRequest.Unmarshal(m, b)
+}
+func (m *DecodeBlobKeyRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) {
+	return xxx_messageInfo_DecodeBlobKeyRequest.Marshal(b, m, deterministic)
+}
+func (dst *DecodeBlobKeyRequest) XXX_Merge(src proto.Message) {
+	xxx_messageInfo_DecodeBlobKeyRequest.Merge(dst, src)
+}
+func (m *DecodeBlobKeyRequest) XXX_Size() int {
+	return xxx_messageInfo_DecodeBlobKeyRequest.Size(m)
+}
+func (m *DecodeBlobKeyRequest) XXX_DiscardUnknown() {
+	xxx_messageInfo_DecodeBlobKeyRequest.DiscardUnknown(m)
+}
+
+var xxx_messageInfo_DecodeBlobKeyRequest proto.InternalMessageInfo
+
+func (m *DecodeBlobKeyRequest) GetBlobKey() []string {
+	if m != nil {
+		return m.BlobKey
+	}
+	return nil
+}
+
+type DecodeBlobKeyResponse struct {
+	Decoded              []string `protobuf:"bytes,1,rep,name=decoded" json:"decoded,omitempty"`
+	XXX_NoUnkeyedLiteral struct{} `json:"-"`
+	XXX_unrecognized     []byte   `json:"-"`
+	XXX_sizecache        int32    `json:"-"`
+}
+
+func (m *DecodeBlobKeyResponse) Reset()         { *m = DecodeBlobKeyResponse{} }
+func (m *DecodeBlobKeyResponse) String() string { return proto.CompactTextString(m) }
+func (*DecodeBlobKeyResponse) ProtoMessage()    {}
+func (*DecodeBlobKeyResponse) Descriptor() ([]byte, []int) {
+	return fileDescriptor_blobstore_service_3604fb6033ea2e2e, []int{9}
+}
+func (m *DecodeBlobKeyResponse) XXX_Unmarshal(b []byte) error {
+	return xxx_messageInfo_DecodeBlobKeyResponse.Unmarshal(m, b)
+}
+func (m *DecodeBlobKeyResponse) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) {
+	return xxx_messageInfo_DecodeBlobKeyResponse.Marshal(b, m, deterministic)
+}
+func (dst *DecodeBlobKeyResponse) XXX_Merge(src proto.Message) {
+	xxx_messageInfo_DecodeBlobKeyResponse.Merge(dst, src)
+}
+func (m *DecodeBlobKeyResponse) XXX_Size() int {
+	return xxx_messageInfo_DecodeBlobKeyResponse.Size(m)
+}
+func (m *DecodeBlobKeyResponse) XXX_DiscardUnknown() {
+	xxx_messageInfo_DecodeBlobKeyResponse.DiscardUnknown(m)
+}
+
+var xxx_messageInfo_DecodeBlobKeyResponse proto.InternalMessageInfo
+
+func (m *DecodeBlobKeyResponse) GetDecoded() []string {
+	if m != nil {
+		return m.Decoded
+	}
+	return nil
+}
+
+type CreateEncodedGoogleStorageKeyRequest struct {
+	Filename             *string  `protobuf:"bytes,1,req,name=filename" json:"filename,omitempty"`
+	XXX_NoUnkeyedLiteral struct{} `json:"-"`
+	XXX_unrecognized     []byte   `json:"-"`
+	XXX_sizecache        int32    `json:"-"`
+}
+
+func (m *CreateEncodedGoogleStorageKeyRequest) Reset()         { *m = CreateEncodedGoogleStorageKeyRequest{} }
+func (m *CreateEncodedGoogleStorageKeyRequest) String() string { return proto.CompactTextString(m) }
+func (*CreateEncodedGoogleStorageKeyRequest) ProtoMessage()    {}
+func (*CreateEncodedGoogleStorageKeyRequest) Descriptor() ([]byte, []int) {
+	return fileDescriptor_blobstore_service_3604fb6033ea2e2e, []int{10}
+}
+func (m *CreateEncodedGoogleStorageKeyRequest) XXX_Unmarshal(b []byte) error {
+	return xxx_messageInfo_CreateEncodedGoogleStorageKeyRequest.Unmarshal(m, b)
+}
+func (m *CreateEncodedGoogleStorageKeyRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) {
+	return xxx_messageInfo_CreateEncodedGoogleStorageKeyRequest.Marshal(b, m, deterministic)
+}
+func (dst *CreateEncodedGoogleStorageKeyRequest) XXX_Merge(src proto.Message) {
+	xxx_messageInfo_CreateEncodedGoogleStorageKeyRequest.Merge(dst, src)
+}
+func (m *CreateEncodedGoogleStorageKeyRequest) XXX_Size() int {
+	return xxx_messageInfo_CreateEncodedGoogleStorageKeyRequest.Size(m)
+}
+func (m *CreateEncodedGoogleStorageKeyRequest) XXX_DiscardUnknown() {
+	xxx_messageInfo_CreateEncodedGoogleStorageKeyRequest.DiscardUnknown(m)
+}
+
+var xxx_messageInfo_CreateEncodedGoogleStorageKeyRequest proto.InternalMessageInfo
+
+func (m *CreateEncodedGoogleStorageKeyRequest) GetFilename() string {
+	if m != nil && m.Filename != nil {
+		return *m.Filename
+	}
+	return ""
+}
+
+type CreateEncodedGoogleStorageKeyResponse struct {
+	BlobKey              *string  `protobuf:"bytes,1,req,name=blob_key,json=blobKey" json:"blob_key,omitempty"`
+	XXX_NoUnkeyedLiteral struct{} `json:"-"`
+	XXX_unrecognized     []byte   `json:"-"`
+	XXX_sizecache        int32    `json:"-"`
+}
+
+func (m *CreateEncodedGoogleStorageKeyResponse) Reset()         { *m = CreateEncodedGoogleStorageKeyResponse{} }
+func (m *CreateEncodedGoogleStorageKeyResponse) String() string { return proto.CompactTextString(m) }
+func (*CreateEncodedGoogleStorageKeyResponse) ProtoMessage()    {}
+func (*CreateEncodedGoogleStorageKeyResponse) Descriptor() ([]byte, []int) {
+	return fileDescriptor_blobstore_service_3604fb6033ea2e2e, []int{11}
+}
+func (m *CreateEncodedGoogleStorageKeyResponse) XXX_Unmarshal(b []byte) error {
+	return xxx_messageInfo_CreateEncodedGoogleStorageKeyResponse.Unmarshal(m, b)
+}
+func (m *CreateEncodedGoogleStorageKeyResponse) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) {
+	return xxx_messageInfo_CreateEncodedGoogleStorageKeyResponse.Marshal(b, m, deterministic)
+}
+func (dst *CreateEncodedGoogleStorageKeyResponse) XXX_Merge(src proto.Message) {
+	xxx_messageInfo_CreateEncodedGoogleStorageKeyResponse.Merge(dst, src)
+}
+func (m *CreateEncodedGoogleStorageKeyResponse) XXX_Size() int {
+	return xxx_messageInfo_CreateEncodedGoogleStorageKeyResponse.Size(m)
+}
+func (m *CreateEncodedGoogleStorageKeyResponse) XXX_DiscardUnknown() {
+	xxx_messageInfo_CreateEncodedGoogleStorageKeyResponse.DiscardUnknown(m)
+}
+
+var xxx_messageInfo_CreateEncodedGoogleStorageKeyResponse proto.InternalMessageInfo
+
+func (m *CreateEncodedGoogleStorageKeyResponse) GetBlobKey() string {
+	if m != nil && m.BlobKey != nil {
+		return *m.BlobKey
+	}
+	return ""
+}
+
+func init() {
+	proto.RegisterType((*BlobstoreServiceError)(nil), "appengine.BlobstoreServiceError")
+	proto.RegisterType((*CreateUploadURLRequest)(nil), "appengine.CreateUploadURLRequest")
+	proto.RegisterType((*CreateUploadURLResponse)(nil), "appengine.CreateUploadURLResponse")
+	proto.RegisterType((*DeleteBlobRequest)(nil), "appengine.DeleteBlobRequest")
+	proto.RegisterType((*FetchDataRequest)(nil), "appengine.FetchDataRequest")
+	proto.RegisterType((*FetchDataResponse)(nil), "appengine.FetchDataResponse")
+	proto.RegisterType((*CloneBlobRequest)(nil), "appengine.CloneBlobRequest")
+	proto.RegisterType((*CloneBlobResponse)(nil), "appengine.CloneBlobResponse")
+	proto.RegisterType((*DecodeBlobKeyRequest)(nil), "appengine.DecodeBlobKeyRequest")
+	proto.RegisterType((*DecodeBlobKeyResponse)(nil), "appengine.DecodeBlobKeyResponse")
+	proto.RegisterType((*CreateEncodedGoogleStorageKeyRequest)(nil), "appengine.CreateEncodedGoogleStorageKeyRequest")
+	proto.RegisterType((*CreateEncodedGoogleStorageKeyResponse)(nil), "appengine.CreateEncodedGoogleStorageKeyResponse")
+}
+
+func init() {
+	proto.RegisterFile("google.golang.org/appengine/v2/internal/blobstore/blobstore_service.proto", fileDescriptor_blobstore_service_3604fb6033ea2e2e)
+}
+
+var fileDescriptor_blobstore_service_3604fb6033ea2e2e = []byte{
+	// 737 bytes of a gzipped FileDescriptorProto
+	0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0x84, 0x54, 0xe1, 0x6e, 0xe3, 0x44,
+	0x10, 0xc6, 0x4e, 0x7b, 0x8d, 0xa7, 0xe1, 0xe4, 0xae, 0x1a, 0x9a, 0x52, 0x01, 0xc1, 0x3a, 0xa4,
+	0x48, 0xa0, 0x56, 0xfd, 0xc1, 0x03, 0xd8, 0xb5, 0x13, 0xac, 0xe6, 0xec, 0x6a, 0xe3, 0x20, 0xb8,
+	0x3f, 0xab, 0x6d, 0x3c, 0xb8, 0x56, 0x1d, 0xaf, 0x59, 0x6f, 0x50, 0x73, 0x0f, 0xc1, 0xbb, 0xf1,
+	0x16, 0x48, 0xbc, 0x04, 0xf2, 0xda, 0x6d, 0x73, 0x07, 0x77, 0xf7, 0x6f, 0xe7, 0xfb, 0xf6, 0x9b,
+	0xf9, 0x66, 0x66, 0xb5, 0x30, 0xcd, 0x84, 0xc8, 0x0a, 0x3c, 0xcf, 0x44, 0xc1, 0xcb, 0xec, 0x5c,
+	0xc8, 0xec, 0x82, 0x57, 0x15, 0x96, 0x59, 0x5e, 0xe2, 0x45, 0x5e, 0x2a, 0x94, 0x25, 0x2f, 0x2e,
+	0x6e, 0x0b, 0x71, 0x5b, 0x2b, 0x21, 0xf1, 0xf9, 0xc4, 0x6a, 0x94, 0x7f, 0xe4, 0x2b, 0x3c, 0xaf,
+	0xa4, 0x50, 0x82, 0x58, 0x4f, 0x2a, 0xe7, 0x1f, 0x03, 0x86, 0xde, 0xe3, 0xb5, 0x45, 0x7b, 0x2b,
+	0x90, 0x52, 0x48, 0xe7, 0x2f, 0x03, 0x2c, 0x7d, 0xba, 0x12, 0x29, 0x92, 0x17, 0x60, 0xc6, 0xd7,
+	0xf6, 0x67, 0x84, 0xc0, 0xcb, 0x30, 0x4a, 0x02, 0x1a, 0xb9, 0x73, 0x16, 0x50, 0x1a, 0x53, 0xdb,
+	0x20, 0x36, 0x0c, 0x96, 0x74, 0xce, 0x92, 0x38, 0x66, 0xf3, 0x38, 0x9a, 0xd9, 0x26, 0x19, 0xc2,
+	0xd1, 0x4d, 0x40, 0x5f, 0x87, 0x8b, 0x45, 0x18, 0x47, 0xcc, 0x0f, 0xa2, 0x30, 0xf0, 0xed, 0x5e,
+	0x23, 0xf6, 0xe6, 0xb1, 0xc7, 0xa2, 0x38, 0x61, 0xd3, 0x78, 0x19, 0xf9, 0xf6, 0x1e, 0x39, 0x83,
+	0x13, 0xdf, 0x4d, 0x5c, 0x16, 0x46, 0x7e, 0xf0, 0x0b, 0x8b, 0x97, 0x09, 0x8b, 0xa7, 0x8c, 0xba,
+	0xd1, 0x2c, 0xb0, 0xf7, 0xc9, 0x57, 0x70, 0xaa, 0x05, 0xd3, 0x20, 0xb9, 0xfa, 0x89, 0x2d, 0xc2,
+	0x37, 0x41, 0x5b, 0xc5, 0xa5, 0xb3, 0xc0, 0x7e, 0x41, 0x4e, 0x61, 0xe8, 0xd2, 0xd9, 0xf2, 0x75,
+	0x10, 0x25, 0xef, 0x2a, 0xfb, 0xe4, 0x18, 0xec, 0x30, 0xfa, 0xd9, 0x9d, 0x87, 0x3e, 0xd3, 0x19,
+	0xae, 0x83, 0x5f, 0x6d, 0xcb, 0xf9, 0xd3, 0x84, 0x2f, 0xae, 0x24, 0x72, 0x85, 0xcb, 0xaa, 0x10,
+	0x3c, 0x5d, 0xd2, 0x39, 0xc5, 0xdf, 0x37, 0x58, 0x2b, 0xf2, 0x2d, 0x0c, 0xea, 0xcd, 0x6a, 0x85,
+	0x75, 0xcd, 0x2a, 0xae, 0xee, 0x46, 0xc6, 0xd8, 0x9c, 0x58, 0xf4, 0xb0, 0xc3, 0x6e, 0xb8, 0xba,
+	0x23, 0x97, 0x30, 0x5c, 0xf3, 0x07, 0xb6, 0xd1, 0x52, 0x56, 0xe7, 0x6f, 0x91, 0xdd, 0x6e, 0x15,
+	0xd6, 0x23, 0x73, 0x6c, 0x4c, 0x7a, 0x94, 0xac, 0xf9, 0x43, 0x9b, 0x76, 0x91, 0xbf, 0x45, 0xaf,
+	0x61, 0x88, 0x0b, 0x5f, 0xbf, 0x2f, 0xa9, 0x50, 0xb2, 0x66, 0x31, 0x9d, 0xb6, 0xa7, 0xb5, 0xa7,
+	0xef, 0x68, 0x6f, 0x50, 0x36, 0x3b, 0x69, 0x53, 0xbc, 0x82, 0x97, 0x59, 0xcd, 0x6e, 0x37, 0xab,
+	0x7b, 0x54, 0xac, 0xe4, 0x6b, 0x1c, 0xed, 0x8d, 0x8d, 0x89, 0x45, 0x07, 0x59, 0xed, 0x69, 0x30,
+	0xe2, 0x6b, 0x24, 0x3f, 0xc2, 0xc9, 0x46, 0x16, 0x0c, 0x1f, 0xaa, 0x5c, 0x6e, 0x99, 0xca, 0xd7,
+	0xcd, 0xce, 0x57, 0xa2, 0x4c, 0xeb, 0xd1, 0xfe, 0xd8, 0x98, 0xec, 0xd3, 0xe3, 0x8d, 0x2c, 0x02,
+	0xcd, 0x26, 0xf9, 0x1a, 0x17, 0x2d, 0xe7, 0x7c, 0x0f, 0x27, 0xff, 0x99, 0x47, 0x5d, 0x89, 0xb2,
+	0x46, 0x62, 0x43, 0x6f, 0x23, 0x8b, 0x6e, 0x0e, 0xcd, 0xd1, 0xf1, 0xe1, 0xc8, 0xc7, 0x02, 0x15,
+	0x36, 0xe6, 0x1e, 0xe7, 0x76, 0x0a, 0x7d, 0xdd, 0xcd, 0x3d, 0x6e, 0x47, 0xc6, 0xb8, 0x37, 0xb1,
+	0xe8, 0x41, 0x13, 0x5f, 0xe3, 0x96, 0x1c, 0xc3, 0xbe, 0x12, 0xf7, 0x58, 0xea, 0xf9, 0x58, 0xb4,
+	0x0d, 0x9c, 0x7b, 0xb0, 0xa7, 0xa8, 0x56, 0x77, 0x3e, 0x57, 0xfc, 0xff, 0x93, 0x98, 0xbb, 0x49,
+	0xbe, 0x81, 0xc3, 0x5a, 0x71, 0xa9, 0x58, 0x5e, 0xa6, 0xf8, 0x30, 0x32, 0xc7, 0xe6, 0xa4, 0x47,
+	0x41, 0x43, 0x61, 0x83, 0x90, 0x33, 0xb0, 0xb0, 0x4c, 0x3b, 0xba, 0xa7, 0xe9, 0x3e, 0x96, 0xa9,
+	0x26, 0x9d, 0x1f, 0xe0, 0x68, 0xa7, 0x58, 0xd7, 0xd9, 0x09, 0xec, 0xa5, 0x5c, 0xf1, 0xd1, 0xdf,
+	0x07, 0x63, 0x73, 0x32, 0xf0, 0xcc, 0xbe, 0x41, 0x35, 0xe0, 0x94, 0x60, 0x5f, 0x15, 0xa2, 0xfc,
+	0x48, 0x7f, 0xe6, 0x64, 0xf0, 0x6c, 0xed, 0x0c, 0xac, 0x75, 0x33, 0x68, 0xb5, 0xad, 0x50, 0x1b,
+	0x1b, 0xd0, 0x7e, 0x03, 0x24, 0xdb, 0x0a, 0x89, 0x03, 0x9f, 0x2b, 0x2e, 0x33, 0x54, 0x8c, 0x57,
+	0x15, 0xcb, 0x53, 0x6d, 0x6d, 0x40, 0x0f, 0x5b, 0xd0, 0xad, 0xaa, 0x30, 0x75, 0xce, 0xe1, 0x68,
+	0xa7, 0x5e, 0xe7, 0xee, 0xc3, 0x05, 0x9d, 0x4b, 0x38, 0xf6, 0x71, 0x25, 0x52, 0x2d, 0xb8, 0xc6,
+	0xed, 0xa7, 0x77, 0xe0, 0x5c, 0xc2, 0xf0, 0x3d, 0x49, 0x57, 0x66, 0x04, 0x07, 0xa9, 0x26, 0xd2,
+	0x47, 0x49, 0x17, 0x3a, 0x1e, 0xbc, 0x6a, 0xdf, 0x44, 0x50, 0x6a, 0x60, 0xa6, 0x3f, 0x9d, 0x85,
+	0x12, 0x92, 0x67, 0xb8, 0x53, 0xf5, 0x4b, 0xe8, 0xff, 0x96, 0x17, 0xa8, 0x9f, 0x64, 0xbb, 0xb4,
+	0xa7, 0xd8, 0xf1, 0xe0, 0xbb, 0x4f, 0xe4, 0xf8, 0x40, 0xb7, 0xcf, 0xd6, 0xbd, 0xc3, 0x37, 0xd6,
+	0xd3, 0x07, 0xf6, 0x6f, 0x00, 0x00, 0x00, 0xff, 0xff, 0xc1, 0xfb, 0x81, 0x94, 0xfb, 0x04, 0x00,
+	0x00,
+}
diff --git a/v2/internal/blobstore/blobstore_service.proto b/v2/internal/blobstore/blobstore_service.proto
new file mode 100644
index 0000000..33b2650
--- /dev/null
+++ b/v2/internal/blobstore/blobstore_service.proto
@@ -0,0 +1,71 @@
+syntax = "proto2";
+option go_package = "blobstore";
+
+package appengine;
+
+message BlobstoreServiceError {
+  enum ErrorCode {
+    OK = 0;
+    INTERNAL_ERROR = 1;
+    URL_TOO_LONG = 2;
+    PERMISSION_DENIED = 3;
+    BLOB_NOT_FOUND = 4;
+    DATA_INDEX_OUT_OF_RANGE = 5;
+    BLOB_FETCH_SIZE_TOO_LARGE = 6;
+    ARGUMENT_OUT_OF_RANGE = 8;
+    INVALID_BLOB_KEY = 9;
+  }
+}
+
+message CreateUploadURLRequest {
+  required string success_path = 1;
+  optional int64 max_upload_size_bytes = 2;
+  optional int64 max_upload_size_per_blob_bytes = 3;
+  optional string gs_bucket_name = 4;
+  optional int32 url_expiry_time_seconds = 5;
+}
+
+message CreateUploadURLResponse {
+  required string url = 1;
+}
+
+message DeleteBlobRequest {
+  repeated string blob_key = 1;
+  optional string token = 2;
+}
+
+message FetchDataRequest {
+  required string blob_key = 1;
+  required int64 start_index = 2;
+  required int64 end_index = 3;
+}
+
+message FetchDataResponse {
+  required bytes data = 1000 [ctype = CORD];
+}
+
+message CloneBlobRequest {
+  required bytes blob_key = 1;
+  required bytes mime_type = 2;
+  required bytes target_app_id = 3;
+}
+
+message CloneBlobResponse {
+  required bytes blob_key = 1;
+}
+
+message DecodeBlobKeyRequest {
+  repeated string blob_key = 1;
+}
+
+message DecodeBlobKeyResponse {
+  repeated string decoded = 1;
+}
+
+message CreateEncodedGoogleStorageKeyRequest {
+  required string filename = 1;
+}
+
+message CreateEncodedGoogleStorageKeyResponse {
+  required string blob_key = 1;
+}
diff --git a/v2/internal/capability/capability_service.pb.go b/v2/internal/capability/capability_service.pb.go
new file mode 100644
index 0000000..9eb744f
--- /dev/null
+++ b/v2/internal/capability/capability_service.pb.go
@@ -0,0 +1,203 @@
+// Code generated by protoc-gen-go. DO NOT EDIT.
+// source: google.golang.org/appengine/internal/capability/capability_service.proto
+
+package capability
+
+import proto "github.com/golang/protobuf/proto"
+import fmt "fmt"
+import math "math"
+
+// Reference imports to suppress errors if they are not otherwise used.
+var _ = proto.Marshal
+var _ = fmt.Errorf
+var _ = math.Inf
+
+// This is a compile-time assertion to ensure that this generated file
+// is compatible with the proto package it is being compiled against.
+// A compilation error at this line likely means your copy of the
+// proto package needs to be updated.
+const _ = proto.ProtoPackageIsVersion2 // please upgrade the proto package
+
+type IsEnabledResponse_SummaryStatus int32
+
+const (
+	IsEnabledResponse_DEFAULT          IsEnabledResponse_SummaryStatus = 0
+	IsEnabledResponse_ENABLED          IsEnabledResponse_SummaryStatus = 1
+	IsEnabledResponse_SCHEDULED_FUTURE IsEnabledResponse_SummaryStatus = 2
+	IsEnabledResponse_SCHEDULED_NOW    IsEnabledResponse_SummaryStatus = 3
+	IsEnabledResponse_DISABLED         IsEnabledResponse_SummaryStatus = 4
+	IsEnabledResponse_UNKNOWN          IsEnabledResponse_SummaryStatus = 5
+)
+
+var IsEnabledResponse_SummaryStatus_name = map[int32]string{
+	0: "DEFAULT",
+	1: "ENABLED",
+	2: "SCHEDULED_FUTURE",
+	3: "SCHEDULED_NOW",
+	4: "DISABLED",
+	5: "UNKNOWN",
+}
+var IsEnabledResponse_SummaryStatus_value = map[string]int32{
+	"DEFAULT":          0,
+	"ENABLED":          1,
+	"SCHEDULED_FUTURE": 2,
+	"SCHEDULED_NOW":    3,
+	"DISABLED":         4,
+	"UNKNOWN":          5,
+}
+
+func (x IsEnabledResponse_SummaryStatus) Enum() *IsEnabledResponse_SummaryStatus {
+	p := new(IsEnabledResponse_SummaryStatus)
+	*p = x
+	return p
+}
+func (x IsEnabledResponse_SummaryStatus) String() string {
+	return proto.EnumName(IsEnabledResponse_SummaryStatus_name, int32(x))
+}
+func (x *IsEnabledResponse_SummaryStatus) UnmarshalJSON(data []byte) error {
+	value, err := proto.UnmarshalJSONEnum(IsEnabledResponse_SummaryStatus_value, data, "IsEnabledResponse_SummaryStatus")
+	if err != nil {
+		return err
+	}
+	*x = IsEnabledResponse_SummaryStatus(value)
+	return nil
+}
+func (IsEnabledResponse_SummaryStatus) EnumDescriptor() ([]byte, []int) {
+	return fileDescriptor_capability_service_030277ff00db7e72, []int{1, 0}
+}
+
+type IsEnabledRequest struct {
+	Package              *string  `protobuf:"bytes,1,req,name=package" json:"package,omitempty"`
+	Capability           []string `protobuf:"bytes,2,rep,name=capability" json:"capability,omitempty"`
+	Call                 []string `protobuf:"bytes,3,rep,name=call" json:"call,omitempty"`
+	XXX_NoUnkeyedLiteral struct{} `json:"-"`
+	XXX_unrecognized     []byte   `json:"-"`
+	XXX_sizecache        int32    `json:"-"`
+}
+
+func (m *IsEnabledRequest) Reset()         { *m = IsEnabledRequest{} }
+func (m *IsEnabledRequest) String() string { return proto.CompactTextString(m) }
+func (*IsEnabledRequest) ProtoMessage()    {}
+func (*IsEnabledRequest) Descriptor() ([]byte, []int) {
+	return fileDescriptor_capability_service_030277ff00db7e72, []int{0}
+}
+func (m *IsEnabledRequest) XXX_Unmarshal(b []byte) error {
+	return xxx_messageInfo_IsEnabledRequest.Unmarshal(m, b)
+}
+func (m *IsEnabledRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) {
+	return xxx_messageInfo_IsEnabledRequest.Marshal(b, m, deterministic)
+}
+func (dst *IsEnabledRequest) XXX_Merge(src proto.Message) {
+	xxx_messageInfo_IsEnabledRequest.Merge(dst, src)
+}
+func (m *IsEnabledRequest) XXX_Size() int {
+	return xxx_messageInfo_IsEnabledRequest.Size(m)
+}
+func (m *IsEnabledRequest) XXX_DiscardUnknown() {
+	xxx_messageInfo_IsEnabledRequest.DiscardUnknown(m)
+}
+
+var xxx_messageInfo_IsEnabledRequest proto.InternalMessageInfo
+
+func (m *IsEnabledRequest) GetPackage() string {
+	if m != nil && m.Package != nil {
+		return *m.Package
+	}
+	return ""
+}
+
+func (m *IsEnabledRequest) GetCapability() []string {
+	if m != nil {
+		return m.Capability
+	}
+	return nil
+}
+
+func (m *IsEnabledRequest) GetCall() []string {
+	if m != nil {
+		return m.Call
+	}
+	return nil
+}
+
+type IsEnabledResponse struct {
+	SummaryStatus        *IsEnabledResponse_SummaryStatus `protobuf:"varint,1,opt,name=summary_status,json=summaryStatus,enum=appengine.IsEnabledResponse_SummaryStatus" json:"summary_status,omitempty"`
+	TimeUntilScheduled   *int64                           `protobuf:"varint,2,opt,name=time_until_scheduled,json=timeUntilScheduled" json:"time_until_scheduled,omitempty"`
+	XXX_NoUnkeyedLiteral struct{}                         `json:"-"`
+	XXX_unrecognized     []byte                           `json:"-"`
+	XXX_sizecache        int32                            `json:"-"`
+}
+
+func (m *IsEnabledResponse) Reset()         { *m = IsEnabledResponse{} }
+func (m *IsEnabledResponse) String() string { return proto.CompactTextString(m) }
+func (*IsEnabledResponse) ProtoMessage()    {}
+func (*IsEnabledResponse) Descriptor() ([]byte, []int) {
+	return fileDescriptor_capability_service_030277ff00db7e72, []int{1}
+}
+func (m *IsEnabledResponse) XXX_Unmarshal(b []byte) error {
+	return xxx_messageInfo_IsEnabledResponse.Unmarshal(m, b)
+}
+func (m *IsEnabledResponse) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) {
+	return xxx_messageInfo_IsEnabledResponse.Marshal(b, m, deterministic)
+}
+func (dst *IsEnabledResponse) XXX_Merge(src proto.Message) {
+	xxx_messageInfo_IsEnabledResponse.Merge(dst, src)
+}
+func (m *IsEnabledResponse) XXX_Size() int {
+	return xxx_messageInfo_IsEnabledResponse.Size(m)
+}
+func (m *IsEnabledResponse) XXX_DiscardUnknown() {
+	xxx_messageInfo_IsEnabledResponse.DiscardUnknown(m)
+}
+
+var xxx_messageInfo_IsEnabledResponse proto.InternalMessageInfo
+
+func (m *IsEnabledResponse) GetSummaryStatus() IsEnabledResponse_SummaryStatus {
+	if m != nil && m.SummaryStatus != nil {
+		return *m.SummaryStatus
+	}
+	return IsEnabledResponse_DEFAULT
+}
+
+func (m *IsEnabledResponse) GetTimeUntilScheduled() int64 {
+	if m != nil && m.TimeUntilScheduled != nil {
+		return *m.TimeUntilScheduled
+	}
+	return 0
+}
+
+func init() {
+	proto.RegisterType((*IsEnabledRequest)(nil), "appengine.IsEnabledRequest")
+	proto.RegisterType((*IsEnabledResponse)(nil), "appengine.IsEnabledResponse")
+}
+
+func init() {
+	proto.RegisterFile("google.golang.org/appengine/v2/internal/capability/capability_service.proto", fileDescriptor_capability_service_030277ff00db7e72)
+}
+
+var fileDescriptor_capability_service_030277ff00db7e72 = []byte{
+	// 359 bytes of a gzipped FileDescriptorProto
+	0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0x7c, 0x91, 0xd1, 0x8a, 0x9b, 0x40,
+	0x14, 0x86, 0xa3, 0xa6, 0xa4, 0x9e, 0x26, 0xc1, 0x0c, 0xb9, 0x90, 0xb6, 0x14, 0xf1, 0x4a, 0x7a,
+	0x61, 0x4a, 0xde, 0x20, 0x89, 0x86, 0x84, 0x06, 0x43, 0x35, 0x12, 0x28, 0x14, 0x3b, 0x31, 0x83,
+	0x95, 0x8e, 0xa3, 0xeb, 0x8c, 0x0b, 0x79, 0x82, 0x7d, 0xed, 0x45, 0x43, 0x8c, 0xcb, 0x2e, 0x7b,
+	0x77, 0xce, 0xf9, 0xf9, 0xfe, 0x99, 0x73, 0x7e, 0xd8, 0x24, 0x79, 0x9e, 0x50, 0x62, 0x27, 0x39,
+	0xc5, 0x2c, 0xb1, 0xf3, 0x32, 0x99, 0xe1, 0xa2, 0x20, 0x2c, 0x49, 0x19, 0x99, 0xa5, 0x4c, 0x90,
+	0x92, 0x61, 0x3a, 0x8b, 0x71, 0x81, 0x4f, 0x29, 0x4d, 0xc5, 0xa5, 0x53, 0x46, 0x9c, 0x94, 0x8f,
+	0x69, 0x4c, 0xec, 0xa2, 0xcc, 0x45, 0x8e, 0xd4, 0x96, 0x33, 0xff, 0x82, 0xb6, 0xe5, 0x2e, 0xc3,
+	0x27, 0x4a, 0xce, 0x3e, 0x79, 0xa8, 0x08, 0x17, 0x48, 0x87, 0x41, 0x81, 0xe3, 0xff, 0x38, 0x21,
+	0xba, 0x64, 0xc8, 0x96, 0xea, 0xdf, 0x5a, 0xf4, 0x0d, 0xe0, 0x6e, 0xaa, 0xcb, 0x86, 0x62, 0xa9,
+	0x7e, 0x67, 0x82, 0x10, 0xf4, 0x63, 0x4c, 0xa9, 0xae, 0x34, 0x4a, 0x53, 0x9b, 0x4f, 0x32, 0x4c,
+	0x3a, 0x4f, 0xf0, 0x22, 0x67, 0x9c, 0xa0, 0x5f, 0x30, 0xe6, 0x55, 0x96, 0xe1, 0xf2, 0x12, 0x71,
+	0x81, 0x45, 0xc5, 0x75, 0xc9, 0x90, 0xac, 0xf1, 0xfc, 0xbb, 0xdd, 0xfe, 0xcd, 0x7e, 0x45, 0xd9,
+	0xc1, 0x15, 0x09, 0x1a, 0xc2, 0x1f, 0xf1, 0x6e, 0x8b, 0x7e, 0xc0, 0x54, 0xa4, 0x19, 0x89, 0x2a,
+	0x26, 0x52, 0x1a, 0xf1, 0xf8, 0x1f, 0x39, 0x57, 0x94, 0x9c, 0x75, 0xd9, 0x90, 0x2c, 0xc5, 0x47,
+	0xb5, 0x16, 0xd6, 0x52, 0x70, 0x53, 0xcc, 0x0c, 0x46, 0x2f, 0x1c, 0xd1, 0x27, 0x18, 0x38, 0xee,
+	0x7a, 0x11, 0xee, 0x0e, 0x5a, 0xaf, 0x6e, 0x5c, 0x6f, 0xb1, 0xdc, 0xb9, 0x8e, 0x26, 0xa1, 0x29,
+	0x68, 0xc1, 0x6a, 0xe3, 0x3a, 0xe1, 0xce, 0x75, 0xa2, 0x75, 0x78, 0x08, 0x7d, 0x57, 0x93, 0xd1,
+	0x04, 0x46, 0xf7, 0xa9, 0xb7, 0x3f, 0x6a, 0x0a, 0x1a, 0xc2, 0x47, 0x67, 0x1b, 0x5c, 0xb1, 0x7e,
+	0xed, 0x11, 0x7a, 0x3f, 0xbd, 0xfd, 0xd1, 0xd3, 0x3e, 0xcc, 0xff, 0xc0, 0x64, 0xd5, 0xde, 0x2a,
+	0xb8, 0x26, 0x82, 0x36, 0xa0, 0xb6, 0x7b, 0xa2, 0x2f, 0x6f, 0x6f, 0xdf, 0xc4, 0xf2, 0xf9, 0xeb,
+	0x7b, 0xa7, 0x31, 0x7b, 0xcb, 0xe1, 0xef, 0x4e, 0x14, 0xcf, 0x01, 0x00, 0x00, 0xff, 0xff, 0xc0,
+	0x03, 0x26, 0x25, 0x2e, 0x02, 0x00, 0x00,
+}
diff --git a/v2/internal/capability/capability_service.proto b/v2/internal/capability/capability_service.proto
new file mode 100644
index 0000000..5660ab6
--- /dev/null
+++ b/v2/internal/capability/capability_service.proto
@@ -0,0 +1,28 @@
+syntax = "proto2";
+option go_package = "capability";
+
+package appengine;
+
+message IsEnabledRequest {
+  required string package = 1;
+  repeated string capability = 2;
+  repeated string call = 3;
+}
+
+message IsEnabledResponse {
+  enum SummaryStatus {
+    DEFAULT = 0;
+    ENABLED = 1;
+    SCHEDULED_FUTURE = 2;
+    SCHEDULED_NOW = 3;
+    DISABLED = 4;
+    UNKNOWN = 5;
+  }
+  optional SummaryStatus summary_status = 1;
+
+  optional int64 time_until_scheduled = 2;
+}
+
+service CapabilityService {
+  rpc IsEnabled(IsEnabledRequest) returns (IsEnabledResponse) {};
+}
diff --git a/v2/internal/datastore/datastore_v3.pb.go b/v2/internal/datastore/datastore_v3.pb.go
new file mode 100644
index 0000000..b2ca254
--- /dev/null
+++ b/v2/internal/datastore/datastore_v3.pb.go
@@ -0,0 +1,4367 @@
+// Code generated by protoc-gen-go. DO NOT EDIT.
+// source: google.golang.org/appengine/internal/datastore/datastore_v3.proto
+
+package datastore
+
+import proto "github.com/golang/protobuf/proto"
+import fmt "fmt"
+import math "math"
+
+// Reference imports to suppress errors if they are not otherwise used.
+var _ = proto.Marshal
+var _ = fmt.Errorf
+var _ = math.Inf
+
+// This is a compile-time assertion to ensure that this generated file
+// is compatible with the proto package it is being compiled against.
+// A compilation error at this line likely means your copy of the
+// proto package needs to be updated.
+const _ = proto.ProtoPackageIsVersion2 // please upgrade the proto package
+
+type Property_Meaning int32
+
+const (
+	Property_NO_MEANING       Property_Meaning = 0
+	Property_BLOB             Property_Meaning = 14
+	Property_TEXT             Property_Meaning = 15
+	Property_BYTESTRING       Property_Meaning = 16
+	Property_ATOM_CATEGORY    Property_Meaning = 1
+	Property_ATOM_LINK        Property_Meaning = 2
+	Property_ATOM_TITLE       Property_Meaning = 3
+	Property_ATOM_CONTENT     Property_Meaning = 4
+	Property_ATOM_SUMMARY     Property_Meaning = 5
+	Property_ATOM_AUTHOR      Property_Meaning = 6
+	Property_GD_WHEN          Property_Meaning = 7
+	Property_GD_EMAIL         Property_Meaning = 8
+	Property_GEORSS_POINT     Property_Meaning = 9
+	Property_GD_IM            Property_Meaning = 10
+	Property_GD_PHONENUMBER   Property_Meaning = 11
+	Property_GD_POSTALADDRESS Property_Meaning = 12
+	Property_GD_RATING        Property_Meaning = 13
+	Property_BLOBKEY          Property_Meaning = 17
+	Property_ENTITY_PROTO     Property_Meaning = 19
+	Property_INDEX_VALUE      Property_Meaning = 18
+)
+
+var Property_Meaning_name = map[int32]string{
+	0:  "NO_MEANING",
+	14: "BLOB",
+	15: "TEXT",
+	16: "BYTESTRING",
+	1:  "ATOM_CATEGORY",
+	2:  "ATOM_LINK",
+	3:  "ATOM_TITLE",
+	4:  "ATOM_CONTENT",
+	5:  "ATOM_SUMMARY",
+	6:  "ATOM_AUTHOR",
+	7:  "GD_WHEN",
+	8:  "GD_EMAIL",
+	9:  "GEORSS_POINT",
+	10: "GD_IM",
+	11: "GD_PHONENUMBER",
+	12: "GD_POSTALADDRESS",
+	13: "GD_RATING",
+	17: "BLOBKEY",
+	19: "ENTITY_PROTO",
+	18: "INDEX_VALUE",
+}
+var Property_Meaning_value = map[string]int32{
+	"NO_MEANING":       0,
+	"BLOB":             14,
+	"TEXT":             15,
+	"BYTESTRING":       16,
+	"ATOM_CATEGORY":    1,
+	"ATOM_LINK":        2,
+	"ATOM_TITLE":       3,
+	"ATOM_CONTENT":     4,
+	"ATOM_SUMMARY":     5,
+	"ATOM_AUTHOR":      6,
+	"GD_WHEN":          7,
+	"GD_EMAIL":         8,
+	"GEORSS_POINT":     9,
+	"GD_IM":            10,
+	"GD_PHONENUMBER":   11,
+	"GD_POSTALADDRESS": 12,
+	"GD_RATING":        13,
+	"BLOBKEY":          17,
+	"ENTITY_PROTO":     19,
+	"INDEX_VALUE":      18,
+}
+
+func (x Property_Meaning) Enum() *Property_Meaning {
+	p := new(Property_Meaning)
+	*p = x
+	return p
+}
+func (x Property_Meaning) String() string {
+	return proto.EnumName(Property_Meaning_name, int32(x))
+}
+func (x *Property_Meaning) UnmarshalJSON(data []byte) error {
+	value, err := proto.UnmarshalJSONEnum(Property_Meaning_value, data, "Property_Meaning")
+	if err != nil {
+		return err
+	}
+	*x = Property_Meaning(value)
+	return nil
+}
+func (Property_Meaning) EnumDescriptor() ([]byte, []int) {
+	return fileDescriptor_datastore_v3_83b17b80c34f6179, []int{2, 0}
+}
+
+type Property_FtsTokenizationOption int32
+
+const (
+	Property_HTML Property_FtsTokenizationOption = 1
+	Property_ATOM Property_FtsTokenizationOption = 2
+)
+
+var Property_FtsTokenizationOption_name = map[int32]string{
+	1: "HTML",
+	2: "ATOM",
+}
+var Property_FtsTokenizationOption_value = map[string]int32{
+	"HTML": 1,
+	"ATOM": 2,
+}
+
+func (x Property_FtsTokenizationOption) Enum() *Property_FtsTokenizationOption {
+	p := new(Property_FtsTokenizationOption)
+	*p = x
+	return p
+}
+func (x Property_FtsTokenizationOption) String() string {
+	return proto.EnumName(Property_FtsTokenizationOption_name, int32(x))
+}
+func (x *Property_FtsTokenizationOption) UnmarshalJSON(data []byte) error {
+	value, err := proto.UnmarshalJSONEnum(Property_FtsTokenizationOption_value, data, "Property_FtsTokenizationOption")
+	if err != nil {
+		return err
+	}
+	*x = Property_FtsTokenizationOption(value)
+	return nil
+}
+func (Property_FtsTokenizationOption) EnumDescriptor() ([]byte, []int) {
+	return fileDescriptor_datastore_v3_83b17b80c34f6179, []int{2, 1}
+}
+
+type EntityProto_Kind int32
+
+const (
+	EntityProto_GD_CONTACT EntityProto_Kind = 1
+	EntityProto_GD_EVENT   EntityProto_Kind = 2
+	EntityProto_GD_MESSAGE EntityProto_Kind = 3
+)
+
+var EntityProto_Kind_name = map[int32]string{
+	1: "GD_CONTACT",
+	2: "GD_EVENT",
+	3: "GD_MESSAGE",
+}
+var EntityProto_Kind_value = map[string]int32{
+	"GD_CONTACT": 1,
+	"GD_EVENT":   2,
+	"GD_MESSAGE": 3,
+}
+
+func (x EntityProto_Kind) Enum() *EntityProto_Kind {
+	p := new(EntityProto_Kind)
+	*p = x
+	return p
+}
+func (x EntityProto_Kind) String() string {
+	return proto.EnumName(EntityProto_Kind_name, int32(x))
+}
+func (x *EntityProto_Kind) UnmarshalJSON(data []byte) error {
+	value, err := proto.UnmarshalJSONEnum(EntityProto_Kind_value, data, "EntityProto_Kind")
+	if err != nil {
+		return err
+	}
+	*x = EntityProto_Kind(value)
+	return nil
+}
+func (EntityProto_Kind) EnumDescriptor() ([]byte, []int) {
+	return fileDescriptor_datastore_v3_83b17b80c34f6179, []int{6, 0}
+}
+
+type Index_Property_Direction int32
+
+const (
+	Index_Property_ASCENDING  Index_Property_Direction = 1
+	Index_Property_DESCENDING Index_Property_Direction = 2
+)
+
+var Index_Property_Direction_name = map[int32]string{
+	1: "ASCENDING",
+	2: "DESCENDING",
+}
+var Index_Property_Direction_value = map[string]int32{
+	"ASCENDING":  1,
+	"DESCENDING": 2,
+}
+
+func (x Index_Property_Direction) Enum() *Index_Property_Direction {
+	p := new(Index_Property_Direction)
+	*p = x
+	return p
+}
+func (x Index_Property_Direction) String() string {
+	return proto.EnumName(Index_Property_Direction_name, int32(x))
+}
+func (x *Index_Property_Direction) UnmarshalJSON(data []byte) error {
+	value, err := proto.UnmarshalJSONEnum(Index_Property_Direction_value, data, "Index_Property_Direction")
+	if err != nil {
+		return err
+	}
+	*x = Index_Property_Direction(value)
+	return nil
+}
+func (Index_Property_Direction) EnumDescriptor() ([]byte, []int) {
+	return fileDescriptor_datastore_v3_83b17b80c34f6179, []int{8, 0, 0}
+}
+
+type CompositeIndex_State int32
+
+const (
+	CompositeIndex_WRITE_ONLY CompositeIndex_State = 1
+	CompositeIndex_READ_WRITE CompositeIndex_State = 2
+	CompositeIndex_DELETED    CompositeIndex_State = 3
+	CompositeIndex_ERROR      CompositeIndex_State = 4
+)
+
+var CompositeIndex_State_name = map[int32]string{
+	1: "WRITE_ONLY",
+	2: "READ_WRITE",
+	3: "DELETED",
+	4: "ERROR",
+}
+var CompositeIndex_State_value = map[string]int32{
+	"WRITE_ONLY": 1,
+	"READ_WRITE": 2,
+	"DELETED":    3,
+	"ERROR":      4,
+}
+
+func (x CompositeIndex_State) Enum() *CompositeIndex_State {
+	p := new(CompositeIndex_State)
+	*p = x
+	return p
+}
+func (x CompositeIndex_State) String() string {
+	return proto.EnumName(CompositeIndex_State_name, int32(x))
+}
+func (x *CompositeIndex_State) UnmarshalJSON(data []byte) error {
+	value, err := proto.UnmarshalJSONEnum(CompositeIndex_State_value, data, "CompositeIndex_State")
+	if err != nil {
+		return err
+	}
+	*x = CompositeIndex_State(value)
+	return nil
+}
+func (CompositeIndex_State) EnumDescriptor() ([]byte, []int) {
+	return fileDescriptor_datastore_v3_83b17b80c34f6179, []int{9, 0}
+}
+
+type Snapshot_Status int32
+
+const (
+	Snapshot_INACTIVE Snapshot_Status = 0
+	Snapshot_ACTIVE   Snapshot_Status = 1
+)
+
+var Snapshot_Status_name = map[int32]string{
+	0: "INACTIVE",
+	1: "ACTIVE",
+}
+var Snapshot_Status_value = map[string]int32{
+	"INACTIVE": 0,
+	"ACTIVE":   1,
+}
+
+func (x Snapshot_Status) Enum() *Snapshot_Status {
+	p := new(Snapshot_Status)
+	*p = x
+	return p
+}
+func (x Snapshot_Status) String() string {
+	return proto.EnumName(Snapshot_Status_name, int32(x))
+}
+func (x *Snapshot_Status) UnmarshalJSON(data []byte) error {
+	value, err := proto.UnmarshalJSONEnum(Snapshot_Status_value, data, "Snapshot_Status")
+	if err != nil {
+		return err
+	}
+	*x = Snapshot_Status(value)
+	return nil
+}
+func (Snapshot_Status) EnumDescriptor() ([]byte, []int) {
+	return fileDescriptor_datastore_v3_83b17b80c34f6179, []int{12, 0}
+}
+
+type Query_Hint int32
+
+const (
+	Query_ORDER_FIRST    Query_Hint = 1
+	Query_ANCESTOR_FIRST Query_Hint = 2
+	Query_FILTER_FIRST   Query_Hint = 3
+)
+
+var Query_Hint_name = map[int32]string{
+	1: "ORDER_FIRST",
+	2: "ANCESTOR_FIRST",
+	3: "FILTER_FIRST",
+}
+var Query_Hint_value = map[string]int32{
+	"ORDER_FIRST":    1,
+	"ANCESTOR_FIRST": 2,
+	"FILTER_FIRST":   3,
+}
+
+func (x Query_Hint) Enum() *Query_Hint {
+	p := new(Query_Hint)
+	*p = x
+	return p
+}
+func (x Query_Hint) String() string {
+	return proto.EnumName(Query_Hint_name, int32(x))
+}
+func (x *Query_Hint) UnmarshalJSON(data []byte) error {
+	value, err := proto.UnmarshalJSONEnum(Query_Hint_value, data, "Query_Hint")
+	if err != nil {
+		return err
+	}
+	*x = Query_Hint(value)
+	return nil
+}
+func (Query_Hint) EnumDescriptor() ([]byte, []int) {
+	return fileDescriptor_datastore_v3_83b17b80c34f6179, []int{15, 0}
+}
+
+type Query_Filter_Operator int32
+
+const (
+	Query_Filter_LESS_THAN             Query_Filter_Operator = 1
+	Query_Filter_LESS_THAN_OR_EQUAL    Query_Filter_Operator = 2
+	Query_Filter_GREATER_THAN          Query_Filter_Operator = 3
+	Query_Filter_GREATER_THAN_OR_EQUAL Query_Filter_Operator = 4
+	Query_Filter_EQUAL                 Query_Filter_Operator = 5
+	Query_Filter_IN                    Query_Filter_Operator = 6
+	Query_Filter_EXISTS                Query_Filter_Operator = 7
+)
+
+var Query_Filter_Operator_name = map[int32]string{
+	1: "LESS_THAN",
+	2: "LESS_THAN_OR_EQUAL",
+	3: "GREATER_THAN",
+	4: "GREATER_THAN_OR_EQUAL",
+	5: "EQUAL",
+	6: "IN",
+	7: "EXISTS",
+}
+var Query_Filter_Operator_value = map[string]int32{
+	"LESS_THAN":             1,
+	"LESS_THAN_OR_EQUAL":    2,
+	"GREATER_THAN":          3,
+	"GREATER_THAN_OR_EQUAL": 4,
+	"EQUAL":                 5,
+	"IN":                    6,
+	"EXISTS":                7,
+}
+
+func (x Query_Filter_Operator) Enum() *Query_Filter_Operator {
+	p := new(Query_Filter_Operator)
+	*p = x
+	return p
+}
+func (x Query_Filter_Operator) String() string {
+	return proto.EnumName(Query_Filter_Operator_name, int32(x))
+}
+func (x *Query_Filter_Operator) UnmarshalJSON(data []byte) error {
+	value, err := proto.UnmarshalJSONEnum(Query_Filter_Operator_value, data, "Query_Filter_Operator")
+	if err != nil {
+		return err
+	}
+	*x = Query_Filter_Operator(value)
+	return nil
+}
+func (Query_Filter_Operator) EnumDescriptor() ([]byte, []int) {
+	return fileDescriptor_datastore_v3_83b17b80c34f6179, []int{15, 0, 0}
+}
+
+type Query_Order_Direction int32
+
+const (
+	Query_Order_ASCENDING  Query_Order_Direction = 1
+	Query_Order_DESCENDING Query_Order_Direction = 2
+)
+
+var Query_Order_Direction_name = map[int32]string{
+	1: "ASCENDING",
+	2: "DESCENDING",
+}
+var Query_Order_Direction_value = map[string]int32{
+	"ASCENDING":  1,
+	"DESCENDING": 2,
+}
+
+func (x Query_Order_Direction) Enum() *Query_Order_Direction {
+	p := new(Query_Order_Direction)
+	*p = x
+	return p
+}
+func (x Query_Order_Direction) String() string {
+	return proto.EnumName(Query_Order_Direction_name, int32(x))
+}
+func (x *Query_Order_Direction) UnmarshalJSON(data []byte) error {
+	value, err := proto.UnmarshalJSONEnum(Query_Order_Direction_value, data, "Query_Order_Direction")
+	if err != nil {
+		return err
+	}
+	*x = Query_Order_Direction(value)
+	return nil
+}
+func (Query_Order_Direction) EnumDescriptor() ([]byte, []int) {
+	return fileDescriptor_datastore_v3_83b17b80c34f6179, []int{15, 1, 0}
+}
+
+type Error_ErrorCode int32
+
+const (
+	Error_BAD_REQUEST                  Error_ErrorCode = 1
+	Error_CONCURRENT_TRANSACTION       Error_ErrorCode = 2
+	Error_INTERNAL_ERROR               Error_ErrorCode = 3
+	Error_NEED_INDEX                   Error_ErrorCode = 4
+	Error_TIMEOUT                      Error_ErrorCode = 5
+	Error_PERMISSION_DENIED            Error_ErrorCode = 6
+	Error_BIGTABLE_ERROR               Error_ErrorCode = 7
+	Error_COMMITTED_BUT_STILL_APPLYING Error_ErrorCode = 8
+	Error_CAPABILITY_DISABLED          Error_ErrorCode = 9
+	Error_TRY_ALTERNATE_BACKEND        Error_ErrorCode = 10
+	Error_SAFE_TIME_TOO_OLD            Error_ErrorCode = 11
+)
+
+var Error_ErrorCode_name = map[int32]string{
+	1:  "BAD_REQUEST",
+	2:  "CONCURRENT_TRANSACTION",
+	3:  "INTERNAL_ERROR",
+	4:  "NEED_INDEX",
+	5:  "TIMEOUT",
+	6:  "PERMISSION_DENIED",
+	7:  "BIGTABLE_ERROR",
+	8:  "COMMITTED_BUT_STILL_APPLYING",
+	9:  "CAPABILITY_DISABLED",
+	10: "TRY_ALTERNATE_BACKEND",
+	11: "SAFE_TIME_TOO_OLD",
+}
+var Error_ErrorCode_value = map[string]int32{
+	"BAD_REQUEST":                  1,
+	"CONCURRENT_TRANSACTION":       2,
+	"INTERNAL_ERROR":               3,
+	"NEED_INDEX":                   4,
+	"TIMEOUT":                      5,
+	"PERMISSION_DENIED":            6,
+	"BIGTABLE_ERROR":               7,
+	"COMMITTED_BUT_STILL_APPLYING": 8,
+	"CAPABILITY_DISABLED":          9,
+	"TRY_ALTERNATE_BACKEND":        10,
+	"SAFE_TIME_TOO_OLD":            11,
+}
+
+func (x Error_ErrorCode) Enum() *Error_ErrorCode {
+	p := new(Error_ErrorCode)
+	*p = x
+	return p
+}
+func (x Error_ErrorCode) String() string {
+	return proto.EnumName(Error_ErrorCode_name, int32(x))
+}
+func (x *Error_ErrorCode) UnmarshalJSON(data []byte) error {
+	value, err := proto.UnmarshalJSONEnum(Error_ErrorCode_value, data, "Error_ErrorCode")
+	if err != nil {
+		return err
+	}
+	*x = Error_ErrorCode(value)
+	return nil
+}
+func (Error_ErrorCode) EnumDescriptor() ([]byte, []int) {
+	return fileDescriptor_datastore_v3_83b17b80c34f6179, []int{19, 0}
+}
+
+type PutRequest_AutoIdPolicy int32
+
+const (
+	PutRequest_CURRENT    PutRequest_AutoIdPolicy = 0
+	PutRequest_SEQUENTIAL PutRequest_AutoIdPolicy = 1
+)
+
+var PutRequest_AutoIdPolicy_name = map[int32]string{
+	0: "CURRENT",
+	1: "SEQUENTIAL",
+}
+var PutRequest_AutoIdPolicy_value = map[string]int32{
+	"CURRENT":    0,
+	"SEQUENTIAL": 1,
+}
+
+func (x PutRequest_AutoIdPolicy) Enum() *PutRequest_AutoIdPolicy {
+	p := new(PutRequest_AutoIdPolicy)
+	*p = x
+	return p
+}
+func (x PutRequest_AutoIdPolicy) String() string {
+	return proto.EnumName(PutRequest_AutoIdPolicy_name, int32(x))
+}
+func (x *PutRequest_AutoIdPolicy) UnmarshalJSON(data []byte) error {
+	value, err := proto.UnmarshalJSONEnum(PutRequest_AutoIdPolicy_value, data, "PutRequest_AutoIdPolicy")
+	if err != nil {
+		return err
+	}
+	*x = PutRequest_AutoIdPolicy(value)
+	return nil
+}
+func (PutRequest_AutoIdPolicy) EnumDescriptor() ([]byte, []int) {
+	return fileDescriptor_datastore_v3_83b17b80c34f6179, []int{23, 0}
+}
+
+type BeginTransactionRequest_TransactionMode int32
+
+const (
+	BeginTransactionRequest_UNKNOWN    BeginTransactionRequest_TransactionMode = 0
+	BeginTransactionRequest_READ_ONLY  BeginTransactionRequest_TransactionMode = 1
+	BeginTransactionRequest_READ_WRITE BeginTransactionRequest_TransactionMode = 2
+)
+
+var BeginTransactionRequest_TransactionMode_name = map[int32]string{
+	0: "UNKNOWN",
+	1: "READ_ONLY",
+	2: "READ_WRITE",
+}
+var BeginTransactionRequest_TransactionMode_value = map[string]int32{
+	"UNKNOWN":    0,
+	"READ_ONLY":  1,
+	"READ_WRITE": 2,
+}
+
+func (x BeginTransactionRequest_TransactionMode) Enum() *BeginTransactionRequest_TransactionMode {
+	p := new(BeginTransactionRequest_TransactionMode)
+	*p = x
+	return p
+}
+func (x BeginTransactionRequest_TransactionMode) String() string {
+	return proto.EnumName(BeginTransactionRequest_TransactionMode_name, int32(x))
+}
+func (x *BeginTransactionRequest_TransactionMode) UnmarshalJSON(data []byte) error {
+	value, err := proto.UnmarshalJSONEnum(BeginTransactionRequest_TransactionMode_value, data, "BeginTransactionRequest_TransactionMode")
+	if err != nil {
+		return err
+	}
+	*x = BeginTransactionRequest_TransactionMode(value)
+	return nil
+}
+func (BeginTransactionRequest_TransactionMode) EnumDescriptor() ([]byte, []int) {
+	return fileDescriptor_datastore_v3_83b17b80c34f6179, []int{36, 0}
+}
+
+type Action struct {
+	XXX_NoUnkeyedLiteral struct{} `json:"-"`
+	XXX_unrecognized     []byte   `json:"-"`
+	XXX_sizecache        int32    `json:"-"`
+}
+
+func (m *Action) Reset()         { *m = Action{} }
+func (m *Action) String() string { return proto.CompactTextString(m) }
+func (*Action) ProtoMessage()    {}
+func (*Action) Descriptor() ([]byte, []int) {
+	return fileDescriptor_datastore_v3_83b17b80c34f6179, []int{0}
+}
+func (m *Action) XXX_Unmarshal(b []byte) error {
+	return xxx_messageInfo_Action.Unmarshal(m, b)
+}
+func (m *Action) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) {
+	return xxx_messageInfo_Action.Marshal(b, m, deterministic)
+}
+func (dst *Action) XXX_Merge(src proto.Message) {
+	xxx_messageInfo_Action.Merge(dst, src)
+}
+func (m *Action) XXX_Size() int {
+	return xxx_messageInfo_Action.Size(m)
+}
+func (m *Action) XXX_DiscardUnknown() {
+	xxx_messageInfo_Action.DiscardUnknown(m)
+}
+
+var xxx_messageInfo_Action proto.InternalMessageInfo
+
+type PropertyValue struct {
+	Int64Value           *int64                        `protobuf:"varint,1,opt,name=int64Value" json:"int64Value,omitempty"`
+	BooleanValue         *bool                         `protobuf:"varint,2,opt,name=booleanValue" json:"booleanValue,omitempty"`
+	StringValue          *string                       `protobuf:"bytes,3,opt,name=stringValue" json:"stringValue,omitempty"`
+	DoubleValue          *float64                      `protobuf:"fixed64,4,opt,name=doubleValue" json:"doubleValue,omitempty"`
+	Pointvalue           *PropertyValue_PointValue     `protobuf:"group,5,opt,name=PointValue,json=pointvalue" json:"pointvalue,omitempty"`
+	Uservalue            *PropertyValue_UserValue      `protobuf:"group,8,opt,name=UserValue,json=uservalue" json:"uservalue,omitempty"`
+	Referencevalue       *PropertyValue_ReferenceValue `protobuf:"group,12,opt,name=ReferenceValue,json=referencevalue" json:"referencevalue,omitempty"`
+	XXX_NoUnkeyedLiteral struct{}                      `json:"-"`
+	XXX_unrecognized     []byte                        `json:"-"`
+	XXX_sizecache        int32                         `json:"-"`
+}
+
+func (m *PropertyValue) Reset()         { *m = PropertyValue{} }
+func (m *PropertyValue) String() string { return proto.CompactTextString(m) }
+func (*PropertyValue) ProtoMessage()    {}
+func (*PropertyValue) Descriptor() ([]byte, []int) {
+	return fileDescriptor_datastore_v3_83b17b80c34f6179, []int{1}
+}
+func (m *PropertyValue) XXX_Unmarshal(b []byte) error {
+	return xxx_messageInfo_PropertyValue.Unmarshal(m, b)
+}
+func (m *PropertyValue) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) {
+	return xxx_messageInfo_PropertyValue.Marshal(b, m, deterministic)
+}
+func (dst *PropertyValue) XXX_Merge(src proto.Message) {
+	xxx_messageInfo_PropertyValue.Merge(dst, src)
+}
+func (m *PropertyValue) XXX_Size() int {
+	return xxx_messageInfo_PropertyValue.Size(m)
+}
+func (m *PropertyValue) XXX_DiscardUnknown() {
+	xxx_messageInfo_PropertyValue.DiscardUnknown(m)
+}
+
+var xxx_messageInfo_PropertyValue proto.InternalMessageInfo
+
+func (m *PropertyValue) GetInt64Value() int64 {
+	if m != nil && m.Int64Value != nil {
+		return *m.Int64Value
+	}
+	return 0
+}
+
+func (m *PropertyValue) GetBooleanValue() bool {
+	if m != nil && m.BooleanValue != nil {
+		return *m.BooleanValue
+	}
+	return false
+}
+
+func (m *PropertyValue) GetStringValue() string {
+	if m != nil && m.StringValue != nil {
+		return *m.StringValue
+	}
+	return ""
+}
+
+func (m *PropertyValue) GetDoubleValue() float64 {
+	if m != nil && m.DoubleValue != nil {
+		return *m.DoubleValue
+	}
+	return 0
+}
+
+func (m *PropertyValue) GetPointvalue() *PropertyValue_PointValue {
+	if m != nil {
+		return m.Pointvalue
+	}
+	return nil
+}
+
+func (m *PropertyValue) GetUservalue() *PropertyValue_UserValue {
+	if m != nil {
+		return m.Uservalue
+	}
+	return nil
+}
+
+func (m *PropertyValue) GetReferencevalue() *PropertyValue_ReferenceValue {
+	if m != nil {
+		return m.Referencevalue
+	}
+	return nil
+}
+
+type PropertyValue_PointValue struct {
+	X                    *float64 `protobuf:"fixed64,6,req,name=x" json:"x,omitempty"`
+	Y                    *float64 `protobuf:"fixed64,7,req,name=y" json:"y,omitempty"`
+	XXX_NoUnkeyedLiteral struct{} `json:"-"`
+	XXX_unrecognized     []byte   `json:"-"`
+	XXX_sizecache        int32    `json:"-"`
+}
+
+func (m *PropertyValue_PointValue) Reset()         { *m = PropertyValue_PointValue{} }
+func (m *PropertyValue_PointValue) String() string { return proto.CompactTextString(m) }
+func (*PropertyValue_PointValue) ProtoMessage()    {}
+func (*PropertyValue_PointValue) Descriptor() ([]byte, []int) {
+	return fileDescriptor_datastore_v3_83b17b80c34f6179, []int{1, 0}
+}
+func (m *PropertyValue_PointValue) XXX_Unmarshal(b []byte) error {
+	return xxx_messageInfo_PropertyValue_PointValue.Unmarshal(m, b)
+}
+func (m *PropertyValue_PointValue) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) {
+	return xxx_messageInfo_PropertyValue_PointValue.Marshal(b, m, deterministic)
+}
+func (dst *PropertyValue_PointValue) XXX_Merge(src proto.Message) {
+	xxx_messageInfo_PropertyValue_PointValue.Merge(dst, src)
+}
+func (m *PropertyValue_PointValue) XXX_Size() int {
+	return xxx_messageInfo_PropertyValue_PointValue.Size(m)
+}
+func (m *PropertyValue_PointValue) XXX_DiscardUnknown() {
+	xxx_messageInfo_PropertyValue_PointValue.DiscardUnknown(m)
+}
+
+var xxx_messageInfo_PropertyValue_PointValue proto.InternalMessageInfo
+
+func (m *PropertyValue_PointValue) GetX() float64 {
+	if m != nil && m.X != nil {
+		return *m.X
+	}
+	return 0
+}
+
+func (m *PropertyValue_PointValue) GetY() float64 {
+	if m != nil && m.Y != nil {
+		return *m.Y
+	}
+	return 0
+}
+
+type PropertyValue_UserValue struct {
+	Email                *string  `protobuf:"bytes,9,req,name=email" json:"email,omitempty"`
+	AuthDomain           *string  `protobuf:"bytes,10,req,name=auth_domain,json=authDomain" json:"auth_domain,omitempty"`
+	Nickname             *string  `protobuf:"bytes,11,opt,name=nickname" json:"nickname,omitempty"`
+	FederatedIdentity    *string  `protobuf:"bytes,21,opt,name=federated_identity,json=federatedIdentity" json:"federated_identity,omitempty"`
+	FederatedProvider    *string  `protobuf:"bytes,22,opt,name=federated_provider,json=federatedProvider" json:"federated_provider,omitempty"`
+	XXX_NoUnkeyedLiteral struct{} `json:"-"`
+	XXX_unrecognized     []byte   `json:"-"`
+	XXX_sizecache        int32    `json:"-"`
+}
+
+func (m *PropertyValue_UserValue) Reset()         { *m = PropertyValue_UserValue{} }
+func (m *PropertyValue_UserValue) String() string { return proto.CompactTextString(m) }
+func (*PropertyValue_UserValue) ProtoMessage()    {}
+func (*PropertyValue_UserValue) Descriptor() ([]byte, []int) {
+	return fileDescriptor_datastore_v3_83b17b80c34f6179, []int{1, 1}
+}
+func (m *PropertyValue_UserValue) XXX_Unmarshal(b []byte) error {
+	return xxx_messageInfo_PropertyValue_UserValue.Unmarshal(m, b)
+}
+func (m *PropertyValue_UserValue) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) {
+	return xxx_messageInfo_PropertyValue_UserValue.Marshal(b, m, deterministic)
+}
+func (dst *PropertyValue_UserValue) XXX_Merge(src proto.Message) {
+	xxx_messageInfo_PropertyValue_UserValue.Merge(dst, src)
+}
+func (m *PropertyValue_UserValue) XXX_Size() int {
+	return xxx_messageInfo_PropertyValue_UserValue.Size(m)
+}
+func (m *PropertyValue_UserValue) XXX_DiscardUnknown() {
+	xxx_messageInfo_PropertyValue_UserValue.DiscardUnknown(m)
+}
+
+var xxx_messageInfo_PropertyValue_UserValue proto.InternalMessageInfo
+
+func (m *PropertyValue_UserValue) GetEmail() string {
+	if m != nil && m.Email != nil {
+		return *m.Email
+	}
+	return ""
+}
+
+func (m *PropertyValue_UserValue) GetAuthDomain() string {
+	if m != nil && m.AuthDomain != nil {
+		return *m.AuthDomain
+	}
+	return ""
+}
+
+func (m *PropertyValue_UserValue) GetNickname() string {
+	if m != nil && m.Nickname != nil {
+		return *m.Nickname
+	}
+	return ""
+}
+
+func (m *PropertyValue_UserValue) GetFederatedIdentity() string {
+	if m != nil && m.FederatedIdentity != nil {
+		return *m.FederatedIdentity
+	}
+	return ""
+}
+
+func (m *PropertyValue_UserValue) GetFederatedProvider() string {
+	if m != nil && m.FederatedProvider != nil {
+		return *m.FederatedProvider
+	}
+	return ""
+}
+
+type PropertyValue_ReferenceValue struct {
+	App                  *string                                     `protobuf:"bytes,13,req,name=app" json:"app,omitempty"`
+	NameSpace            *string                                     `protobuf:"bytes,20,opt,name=name_space,json=nameSpace" json:"name_space,omitempty"`
+	Pathelement          []*PropertyValue_ReferenceValue_PathElement `protobuf:"group,14,rep,name=PathElement,json=pathelement" json:"pathelement,omitempty"`
+	XXX_NoUnkeyedLiteral struct{}                                    `json:"-"`
+	XXX_unrecognized     []byte                                      `json:"-"`
+	XXX_sizecache        int32                                       `json:"-"`
+}
+
+func (m *PropertyValue_ReferenceValue) Reset()         { *m = PropertyValue_ReferenceValue{} }
+func (m *PropertyValue_ReferenceValue) String() string { return proto.CompactTextString(m) }
+func (*PropertyValue_ReferenceValue) ProtoMessage()    {}
+func (*PropertyValue_ReferenceValue) Descriptor() ([]byte, []int) {
+	return fileDescriptor_datastore_v3_83b17b80c34f6179, []int{1, 2}
+}
+func (m *PropertyValue_ReferenceValue) XXX_Unmarshal(b []byte) error {
+	return xxx_messageInfo_PropertyValue_ReferenceValue.Unmarshal(m, b)
+}
+func (m *PropertyValue_ReferenceValue) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) {
+	return xxx_messageInfo_PropertyValue_ReferenceValue.Marshal(b, m, deterministic)
+}
+func (dst *PropertyValue_ReferenceValue) XXX_Merge(src proto.Message) {
+	xxx_messageInfo_PropertyValue_ReferenceValue.Merge(dst, src)
+}
+func (m *PropertyValue_ReferenceValue) XXX_Size() int {
+	return xxx_messageInfo_PropertyValue_ReferenceValue.Size(m)
+}
+func (m *PropertyValue_ReferenceValue) XXX_DiscardUnknown() {
+	xxx_messageInfo_PropertyValue_ReferenceValue.DiscardUnknown(m)
+}
+
+var xxx_messageInfo_PropertyValue_ReferenceValue proto.InternalMessageInfo
+
+func (m *PropertyValue_ReferenceValue) GetApp() string {
+	if m != nil && m.App != nil {
+		return *m.App
+	}
+	return ""
+}
+
+func (m *PropertyValue_ReferenceValue) GetNameSpace() string {
+	if m != nil && m.NameSpace != nil {
+		return *m.NameSpace
+	}
+	return ""
+}
+
+func (m *PropertyValue_ReferenceValue) GetPathelement() []*PropertyValue_ReferenceValue_PathElement {
+	if m != nil {
+		return m.Pathelement
+	}
+	return nil
+}
+
+type PropertyValue_ReferenceValue_PathElement struct {
+	Type                 *string  `protobuf:"bytes,15,req,name=type" json:"type,omitempty"`
+	Id                   *int64   `protobuf:"varint,16,opt,name=id" json:"id,omitempty"`
+	Name                 *string  `protobuf:"bytes,17,opt,name=name" json:"name,omitempty"`
+	XXX_NoUnkeyedLiteral struct{} `json:"-"`
+	XXX_unrecognized     []byte   `json:"-"`
+	XXX_sizecache        int32    `json:"-"`
+}
+
+func (m *PropertyValue_ReferenceValue_PathElement) Reset() {
+	*m = PropertyValue_ReferenceValue_PathElement{}
+}
+func (m *PropertyValue_ReferenceValue_PathElement) String() string { return proto.CompactTextString(m) }
+func (*PropertyValue_ReferenceValue_PathElement) ProtoMessage()    {}
+func (*PropertyValue_ReferenceValue_PathElement) Descriptor() ([]byte, []int) {
+	return fileDescriptor_datastore_v3_83b17b80c34f6179, []int{1, 2, 0}
+}
+func (m *PropertyValue_ReferenceValue_PathElement) XXX_Unmarshal(b []byte) error {
+	return xxx_messageInfo_PropertyValue_ReferenceValue_PathElement.Unmarshal(m, b)
+}
+func (m *PropertyValue_ReferenceValue_PathElement) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) {
+	return xxx_messageInfo_PropertyValue_ReferenceValue_PathElement.Marshal(b, m, deterministic)
+}
+func (dst *PropertyValue_ReferenceValue_PathElement) XXX_Merge(src proto.Message) {
+	xxx_messageInfo_PropertyValue_ReferenceValue_PathElement.Merge(dst, src)
+}
+func (m *PropertyValue_ReferenceValue_PathElement) XXX_Size() int {
+	return xxx_messageInfo_PropertyValue_ReferenceValue_PathElement.Size(m)
+}
+func (m *PropertyValue_ReferenceValue_PathElement) XXX_DiscardUnknown() {
+	xxx_messageInfo_PropertyValue_ReferenceValue_PathElement.DiscardUnknown(m)
+}
+
+var xxx_messageInfo_PropertyValue_ReferenceValue_PathElement proto.InternalMessageInfo
+
+func (m *PropertyValue_ReferenceValue_PathElement) GetType() string {
+	if m != nil && m.Type != nil {
+		return *m.Type
+	}
+	return ""
+}
+
+func (m *PropertyValue_ReferenceValue_PathElement) GetId() int64 {
+	if m != nil && m.Id != nil {
+		return *m.Id
+	}
+	return 0
+}
+
+func (m *PropertyValue_ReferenceValue_PathElement) GetName() string {
+	if m != nil && m.Name != nil {
+		return *m.Name
+	}
+	return ""
+}
+
+type Property struct {
+	Meaning               *Property_Meaning               `protobuf:"varint,1,opt,name=meaning,enum=appengine.Property_Meaning,def=0" json:"meaning,omitempty"`
+	MeaningUri            *string                         `protobuf:"bytes,2,opt,name=meaning_uri,json=meaningUri" json:"meaning_uri,omitempty"`
+	Name                  *string                         `protobuf:"bytes,3,req,name=name" json:"name,omitempty"`
+	Value                 *PropertyValue                  `protobuf:"bytes,5,req,name=value" json:"value,omitempty"`
+	Multiple              *bool                           `protobuf:"varint,4,req,name=multiple" json:"multiple,omitempty"`
+	Searchable            *bool                           `protobuf:"varint,6,opt,name=searchable,def=0" json:"searchable,omitempty"`
+	FtsTokenizationOption *Property_FtsTokenizationOption `protobuf:"varint,8,opt,name=fts_tokenization_option,json=ftsTokenizationOption,enum=appengine.Property_FtsTokenizationOption" json:"fts_tokenization_option,omitempty"`
+	Locale                *string                         `protobuf:"bytes,9,opt,name=locale,def=en" json:"locale,omitempty"`
+	XXX_NoUnkeyedLiteral  struct{}                        `json:"-"`
+	XXX_unrecognized      []byte                          `json:"-"`
+	XXX_sizecache         int32                           `json:"-"`
+}
+
+func (m *Property) Reset()         { *m = Property{} }
+func (m *Property) String() string { return proto.CompactTextString(m) }
+func (*Property) ProtoMessage()    {}
+func (*Property) Descriptor() ([]byte, []int) {
+	return fileDescriptor_datastore_v3_83b17b80c34f6179, []int{2}
+}
+func (m *Property) XXX_Unmarshal(b []byte) error {
+	return xxx_messageInfo_Property.Unmarshal(m, b)
+}
+func (m *Property) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) {
+	return xxx_messageInfo_Property.Marshal(b, m, deterministic)
+}
+func (dst *Property) XXX_Merge(src proto.Message) {
+	xxx_messageInfo_Property.Merge(dst, src)
+}
+func (m *Property) XXX_Size() int {
+	return xxx_messageInfo_Property.Size(m)
+}
+func (m *Property) XXX_DiscardUnknown() {
+	xxx_messageInfo_Property.DiscardUnknown(m)
+}
+
+var xxx_messageInfo_Property proto.InternalMessageInfo
+
+const Default_Property_Meaning Property_Meaning = Property_NO_MEANING
+const Default_Property_Searchable bool = false
+const Default_Property_Locale string = "en"
+
+func (m *Property) GetMeaning() Property_Meaning {
+	if m != nil && m.Meaning != nil {
+		return *m.Meaning
+	}
+	return Default_Property_Meaning
+}
+
+func (m *Property) GetMeaningUri() string {
+	if m != nil && m.MeaningUri != nil {
+		return *m.MeaningUri
+	}
+	return ""
+}
+
+func (m *Property) GetName() string {
+	if m != nil && m.Name != nil {
+		return *m.Name
+	}
+	return ""
+}
+
+func (m *Property) GetValue() *PropertyValue {
+	if m != nil {
+		return m.Value
+	}
+	return nil
+}
+
+func (m *Property) GetMultiple() bool {
+	if m != nil && m.Multiple != nil {
+		return *m.Multiple
+	}
+	return false
+}
+
+func (m *Property) GetSearchable() bool {
+	if m != nil && m.Searchable != nil {
+		return *m.Searchable
+	}
+	return Default_Property_Searchable
+}
+
+func (m *Property) GetFtsTokenizationOption() Property_FtsTokenizationOption {
+	if m != nil && m.FtsTokenizationOption != nil {
+		return *m.FtsTokenizationOption
+	}
+	return Property_HTML
+}
+
+func (m *Property) GetLocale() string {
+	if m != nil && m.Locale != nil {
+		return *m.Locale
+	}
+	return Default_Property_Locale
+}
+
+type Path struct {
+	Element              []*Path_Element `protobuf:"group,1,rep,name=Element,json=element" json:"element,omitempty"`
+	XXX_NoUnkeyedLiteral struct{}        `json:"-"`
+	XXX_unrecognized     []byte          `json:"-"`
+	XXX_sizecache        int32           `json:"-"`
+}
+
+func (m *Path) Reset()         { *m = Path{} }
+func (m *Path) String() string { return proto.CompactTextString(m) }
+func (*Path) ProtoMessage()    {}
+func (*Path) Descriptor() ([]byte, []int) {
+	return fileDescriptor_datastore_v3_83b17b80c34f6179, []int{3}
+}
+func (m *Path) XXX_Unmarshal(b []byte) error {
+	return xxx_messageInfo_Path.Unmarshal(m, b)
+}
+func (m *Path) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) {
+	return xxx_messageInfo_Path.Marshal(b, m, deterministic)
+}
+func (dst *Path) XXX_Merge(src proto.Message) {
+	xxx_messageInfo_Path.Merge(dst, src)
+}
+func (m *Path) XXX_Size() int {
+	return xxx_messageInfo_Path.Size(m)
+}
+func (m *Path) XXX_DiscardUnknown() {
+	xxx_messageInfo_Path.DiscardUnknown(m)
+}
+
+var xxx_messageInfo_Path proto.InternalMessageInfo
+
+func (m *Path) GetElement() []*Path_Element {
+	if m != nil {
+		return m.Element
+	}
+	return nil
+}
+
+type Path_Element struct {
+	Type                 *string  `protobuf:"bytes,2,req,name=type" json:"type,omitempty"`
+	Id                   *int64   `protobuf:"varint,3,opt,name=id" json:"id,omitempty"`
+	Name                 *string  `protobuf:"bytes,4,opt,name=name" json:"name,omitempty"`
+	XXX_NoUnkeyedLiteral struct{} `json:"-"`
+	XXX_unrecognized     []byte   `json:"-"`
+	XXX_sizecache        int32    `json:"-"`
+}
+
+func (m *Path_Element) Reset()         { *m = Path_Element{} }
+func (m *Path_Element) String() string { return proto.CompactTextString(m) }
+func (*Path_Element) ProtoMessage()    {}
+func (*Path_Element) Descriptor() ([]byte, []int) {
+	return fileDescriptor_datastore_v3_83b17b80c34f6179, []int{3, 0}
+}
+func (m *Path_Element) XXX_Unmarshal(b []byte) error {
+	return xxx_messageInfo_Path_Element.Unmarshal(m, b)
+}
+func (m *Path_Element) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) {
+	return xxx_messageInfo_Path_Element.Marshal(b, m, deterministic)
+}
+func (dst *Path_Element) XXX_Merge(src proto.Message) {
+	xxx_messageInfo_Path_Element.Merge(dst, src)
+}
+func (m *Path_Element) XXX_Size() int {
+	return xxx_messageInfo_Path_Element.Size(m)
+}
+func (m *Path_Element) XXX_DiscardUnknown() {
+	xxx_messageInfo_Path_Element.DiscardUnknown(m)
+}
+
+var xxx_messageInfo_Path_Element proto.InternalMessageInfo
+
+func (m *Path_Element) GetType() string {
+	if m != nil && m.Type != nil {
+		return *m.Type
+	}
+	return ""
+}
+
+func (m *Path_Element) GetId() int64 {
+	if m != nil && m.Id != nil {
+		return *m.Id
+	}
+	return 0
+}
+
+func (m *Path_Element) GetName() string {
+	if m != nil && m.Name != nil {
+		return *m.Name
+	}
+	return ""
+}
+
+type Reference struct {
+	App                  *string  `protobuf:"bytes,13,req,name=app" json:"app,omitempty"`
+	NameSpace            *string  `protobuf:"bytes,20,opt,name=name_space,json=nameSpace" json:"name_space,omitempty"`
+	Path                 *Path    `protobuf:"bytes,14,req,name=path" json:"path,omitempty"`
+	XXX_NoUnkeyedLiteral struct{} `json:"-"`
+	XXX_unrecognized     []byte   `json:"-"`
+	XXX_sizecache        int32    `json:"-"`
+}
+
+func (m *Reference) Reset()         { *m = Reference{} }
+func (m *Reference) String() string { return proto.CompactTextString(m) }
+func (*Reference) ProtoMessage()    {}
+func (*Reference) Descriptor() ([]byte, []int) {
+	return fileDescriptor_datastore_v3_83b17b80c34f6179, []int{4}
+}
+func (m *Reference) XXX_Unmarshal(b []byte) error {
+	return xxx_messageInfo_Reference.Unmarshal(m, b)
+}
+func (m *Reference) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) {
+	return xxx_messageInfo_Reference.Marshal(b, m, deterministic)
+}
+func (dst *Reference) XXX_Merge(src proto.Message) {
+	xxx_messageInfo_Reference.Merge(dst, src)
+}
+func (m *Reference) XXX_Size() int {
+	return xxx_messageInfo_Reference.Size(m)
+}
+func (m *Reference) XXX_DiscardUnknown() {
+	xxx_messageInfo_Reference.DiscardUnknown(m)
+}
+
+var xxx_messageInfo_Reference proto.InternalMessageInfo
+
+func (m *Reference) GetApp() string {
+	if m != nil && m.App != nil {
+		return *m.App
+	}
+	return ""
+}
+
+func (m *Reference) GetNameSpace() string {
+	if m != nil && m.NameSpace != nil {
+		return *m.NameSpace
+	}
+	return ""
+}
+
+func (m *Reference) GetPath() *Path {
+	if m != nil {
+		return m.Path
+	}
+	return nil
+}
+
+type User struct {
+	Email                *string  `protobuf:"bytes,1,req,name=email" json:"email,omitempty"`
+	AuthDomain           *string  `protobuf:"bytes,2,req,name=auth_domain,json=authDomain" json:"auth_domain,omitempty"`
+	Nickname             *string  `protobuf:"bytes,3,opt,name=nickname" json:"nickname,omitempty"`
+	FederatedIdentity    *string  `protobuf:"bytes,6,opt,name=federated_identity,json=federatedIdentity" json:"federated_identity,omitempty"`
+	FederatedProvider    *string  `protobuf:"bytes,7,opt,name=federated_provider,json=federatedProvider" json:"federated_provider,omitempty"`
+	XXX_NoUnkeyedLiteral struct{} `json:"-"`
+	XXX_unrecognized     []byte   `json:"-"`
+	XXX_sizecache        int32    `json:"-"`
+}
+
+func (m *User) Reset()         { *m = User{} }
+func (m *User) String() string { return proto.CompactTextString(m) }
+func (*User) ProtoMessage()    {}
+func (*User) Descriptor() ([]byte, []int) {
+	return fileDescriptor_datastore_v3_83b17b80c34f6179, []int{5}
+}
+func (m *User) XXX_Unmarshal(b []byte) error {
+	return xxx_messageInfo_User.Unmarshal(m, b)
+}
+func (m *User) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) {
+	return xxx_messageInfo_User.Marshal(b, m, deterministic)
+}
+func (dst *User) XXX_Merge(src proto.Message) {
+	xxx_messageInfo_User.Merge(dst, src)
+}
+func (m *User) XXX_Size() int {
+	return xxx_messageInfo_User.Size(m)
+}
+func (m *User) XXX_DiscardUnknown() {
+	xxx_messageInfo_User.DiscardUnknown(m)
+}
+
+var xxx_messageInfo_User proto.InternalMessageInfo
+
+func (m *User) GetEmail() string {
+	if m != nil && m.Email != nil {
+		return *m.Email
+	}
+	return ""
+}
+
+func (m *User) GetAuthDomain() string {
+	if m != nil && m.AuthDomain != nil {
+		return *m.AuthDomain
+	}
+	return ""
+}
+
+func (m *User) GetNickname() string {
+	if m != nil && m.Nickname != nil {
+		return *m.Nickname
+	}
+	return ""
+}
+
+func (m *User) GetFederatedIdentity() string {
+	if m != nil && m.FederatedIdentity != nil {
+		return *m.FederatedIdentity
+	}
+	return ""
+}
+
+func (m *User) GetFederatedProvider() string {
+	if m != nil && m.FederatedProvider != nil {
+		return *m.FederatedProvider
+	}
+	return ""
+}
+
+type EntityProto struct {
+	Key                  *Reference        `protobuf:"bytes,13,req,name=key" json:"key,omitempty"`
+	EntityGroup          *Path             `protobuf:"bytes,16,req,name=entity_group,json=entityGroup" json:"entity_group,omitempty"`
+	Owner                *User             `protobuf:"bytes,17,opt,name=owner" json:"owner,omitempty"`
+	Kind                 *EntityProto_Kind `protobuf:"varint,4,opt,name=kind,enum=appengine.EntityProto_Kind" json:"kind,omitempty"`
+	KindUri              *string           `protobuf:"bytes,5,opt,name=kind_uri,json=kindUri" json:"kind_uri,omitempty"`
+	Property             []*Property       `protobuf:"bytes,14,rep,name=property" json:"property,omitempty"`
+	RawProperty          []*Property       `protobuf:"bytes,15,rep,name=raw_property,json=rawProperty" json:"raw_property,omitempty"`
+	Rank                 *int32            `protobuf:"varint,18,opt,name=rank" json:"rank,omitempty"`
+	XXX_NoUnkeyedLiteral struct{}          `json:"-"`
+	XXX_unrecognized     []byte            `json:"-"`
+	XXX_sizecache        int32             `json:"-"`
+}
+
+func (m *EntityProto) Reset()         { *m = EntityProto{} }
+func (m *EntityProto) String() string { return proto.CompactTextString(m) }
+func (*EntityProto) ProtoMessage()    {}
+func (*EntityProto) Descriptor() ([]byte, []int) {
+	return fileDescriptor_datastore_v3_83b17b80c34f6179, []int{6}
+}
+func (m *EntityProto) XXX_Unmarshal(b []byte) error {
+	return xxx_messageInfo_EntityProto.Unmarshal(m, b)
+}
+func (m *EntityProto) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) {
+	return xxx_messageInfo_EntityProto.Marshal(b, m, deterministic)
+}
+func (dst *EntityProto) XXX_Merge(src proto.Message) {
+	xxx_messageInfo_EntityProto.Merge(dst, src)
+}
+func (m *EntityProto) XXX_Size() int {
+	return xxx_messageInfo_EntityProto.Size(m)
+}
+func (m *EntityProto) XXX_DiscardUnknown() {
+	xxx_messageInfo_EntityProto.DiscardUnknown(m)
+}
+
+var xxx_messageInfo_EntityProto proto.InternalMessageInfo
+
+func (m *EntityProto) GetKey() *Reference {
+	if m != nil {
+		return m.Key
+	}
+	return nil
+}
+
+func (m *EntityProto) GetEntityGroup() *Path {
+	if m != nil {
+		return m.EntityGroup
+	}
+	return nil
+}
+
+func (m *EntityProto) GetOwner() *User {
+	if m != nil {
+		return m.Owner
+	}
+	return nil
+}
+
+func (m *EntityProto) GetKind() EntityProto_Kind {
+	if m != nil && m.Kind != nil {
+		return *m.Kind
+	}
+	return EntityProto_GD_CONTACT
+}
+
+func (m *EntityProto) GetKindUri() string {
+	if m != nil && m.KindUri != nil {
+		return *m.KindUri
+	}
+	return ""
+}
+
+func (m *EntityProto) GetProperty() []*Property {
+	if m != nil {
+		return m.Property
+	}
+	return nil
+}
+
+func (m *EntityProto) GetRawProperty() []*Property {
+	if m != nil {
+		return m.RawProperty
+	}
+	return nil
+}
+
+func (m *EntityProto) GetRank() int32 {
+	if m != nil && m.Rank != nil {
+		return *m.Rank
+	}
+	return 0
+}
+
+type CompositeProperty struct {
+	IndexId              *int64   `protobuf:"varint,1,req,name=index_id,json=indexId" json:"index_id,omitempty"`
+	Value                []string `protobuf:"bytes,2,rep,name=value" json:"value,omitempty"`
+	XXX_NoUnkeyedLiteral struct{} `json:"-"`
+	XXX_unrecognized     []byte   `json:"-"`
+	XXX_sizecache        int32    `json:"-"`
+}
+
+func (m *CompositeProperty) Reset()         { *m = CompositeProperty{} }
+func (m *CompositeProperty) String() string { return proto.CompactTextString(m) }
+func (*CompositeProperty) ProtoMessage()    {}
+func (*CompositeProperty) Descriptor() ([]byte, []int) {
+	return fileDescriptor_datastore_v3_83b17b80c34f6179, []int{7}
+}
+func (m *CompositeProperty) XXX_Unmarshal(b []byte) error {
+	return xxx_messageInfo_CompositeProperty.Unmarshal(m, b)
+}
+func (m *CompositeProperty) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) {
+	return xxx_messageInfo_CompositeProperty.Marshal(b, m, deterministic)
+}
+func (dst *CompositeProperty) XXX_Merge(src proto.Message) {
+	xxx_messageInfo_CompositeProperty.Merge(dst, src)
+}
+func (m *CompositeProperty) XXX_Size() int {
+	return xxx_messageInfo_CompositeProperty.Size(m)
+}
+func (m *CompositeProperty) XXX_DiscardUnknown() {
+	xxx_messageInfo_CompositeProperty.DiscardUnknown(m)
+}
+
+var xxx_messageInfo_CompositeProperty proto.InternalMessageInfo
+
+func (m *CompositeProperty) GetIndexId() int64 {
+	if m != nil && m.IndexId != nil {
+		return *m.IndexId
+	}
+	return 0
+}
+
+func (m *CompositeProperty) GetValue() []string {
+	if m != nil {
+		return m.Value
+	}
+	return nil
+}
+
+type Index struct {
+	EntityType           *string           `protobuf:"bytes,1,req,name=entity_type,json=entityType" json:"entity_type,omitempty"`
+	Ancestor             *bool             `protobuf:"varint,5,req,name=ancestor" json:"ancestor,omitempty"`
+	Property             []*Index_Property `protobuf:"group,2,rep,name=Property,json=property" json:"property,omitempty"`
+	XXX_NoUnkeyedLiteral struct{}          `json:"-"`
+	XXX_unrecognized     []byte            `json:"-"`
+	XXX_sizecache        int32             `json:"-"`
+}
+
+func (m *Index) Reset()         { *m = Index{} }
+func (m *Index) String() string { return proto.CompactTextString(m) }
+func (*Index) ProtoMessage()    {}
+func (*Index) Descriptor() ([]byte, []int) {
+	return fileDescriptor_datastore_v3_83b17b80c34f6179, []int{8}
+}
+func (m *Index) XXX_Unmarshal(b []byte) error {
+	return xxx_messageInfo_Index.Unmarshal(m, b)
+}
+func (m *Index) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) {
+	return xxx_messageInfo_Index.Marshal(b, m, deterministic)
+}
+func (dst *Index) XXX_Merge(src proto.Message) {
+	xxx_messageInfo_Index.Merge(dst, src)
+}
+func (m *Index) XXX_Size() int {
+	return xxx_messageInfo_Index.Size(m)
+}
+func (m *Index) XXX_DiscardUnknown() {
+	xxx_messageInfo_Index.DiscardUnknown(m)
+}
+
+var xxx_messageInfo_Index proto.InternalMessageInfo
+
+func (m *Index) GetEntityType() string {
+	if m != nil && m.EntityType != nil {
+		return *m.EntityType
+	}
+	return ""
+}
+
+func (m *Index) GetAncestor() bool {
+	if m != nil && m.Ancestor != nil {
+		return *m.Ancestor
+	}
+	return false
+}
+
+func (m *Index) GetProperty() []*Index_Property {
+	if m != nil {
+		return m.Property
+	}
+	return nil
+}
+
+type Index_Property struct {
+	Name                 *string                   `protobuf:"bytes,3,req,name=name" json:"name,omitempty"`
+	Direction            *Index_Property_Direction `protobuf:"varint,4,opt,name=direction,enum=appengine.Index_Property_Direction,def=1" json:"direction,omitempty"`
+	XXX_NoUnkeyedLiteral struct{}                  `json:"-"`
+	XXX_unrecognized     []byte                    `json:"-"`
+	XXX_sizecache        int32                     `json:"-"`
+}
+
+func (m *Index_Property) Reset()         { *m = Index_Property{} }
+func (m *Index_Property) String() string { return proto.CompactTextString(m) }
+func (*Index_Property) ProtoMessage()    {}
+func (*Index_Property) Descriptor() ([]byte, []int) {
+	return fileDescriptor_datastore_v3_83b17b80c34f6179, []int{8, 0}
+}
+func (m *Index_Property) XXX_Unmarshal(b []byte) error {
+	return xxx_messageInfo_Index_Property.Unmarshal(m, b)
+}
+func (m *Index_Property) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) {
+	return xxx_messageInfo_Index_Property.Marshal(b, m, deterministic)
+}
+func (dst *Index_Property) XXX_Merge(src proto.Message) {
+	xxx_messageInfo_Index_Property.Merge(dst, src)
+}
+func (m *Index_Property) XXX_Size() int {
+	return xxx_messageInfo_Index_Property.Size(m)
+}
+func (m *Index_Property) XXX_DiscardUnknown() {
+	xxx_messageInfo_Index_Property.DiscardUnknown(m)
+}
+
+var xxx_messageInfo_Index_Property proto.InternalMessageInfo
+
+const Default_Index_Property_Direction Index_Property_Direction = Index_Property_ASCENDING
+
+func (m *Index_Property) GetName() string {
+	if m != nil && m.Name != nil {
+		return *m.Name
+	}
+	return ""
+}
+
+func (m *Index_Property) GetDirection() Index_Property_Direction {
+	if m != nil && m.Direction != nil {
+		return *m.Direction
+	}
+	return Default_Index_Property_Direction
+}
+
+type CompositeIndex struct {
+	AppId                *string               `protobuf:"bytes,1,req,name=app_id,json=appId" json:"app_id,omitempty"`
+	Id                   *int64                `protobuf:"varint,2,req,name=id" json:"id,omitempty"`
+	Definition           *Index                `protobuf:"bytes,3,req,name=definition" json:"definition,omitempty"`
+	State                *CompositeIndex_State `protobuf:"varint,4,req,name=state,enum=appengine.CompositeIndex_State" json:"state,omitempty"`
+	OnlyUseIfRequired    *bool                 `protobuf:"varint,6,opt,name=only_use_if_required,json=onlyUseIfRequired,def=0" json:"only_use_if_required,omitempty"`
+	XXX_NoUnkeyedLiteral struct{}              `json:"-"`
+	XXX_unrecognized     []byte                `json:"-"`
+	XXX_sizecache        int32                 `json:"-"`
+}
+
+func (m *CompositeIndex) Reset()         { *m = CompositeIndex{} }
+func (m *CompositeIndex) String() string { return proto.CompactTextString(m) }
+func (*CompositeIndex) ProtoMessage()    {}
+func (*CompositeIndex) Descriptor() ([]byte, []int) {
+	return fileDescriptor_datastore_v3_83b17b80c34f6179, []int{9}
+}
+func (m *CompositeIndex) XXX_Unmarshal(b []byte) error {
+	return xxx_messageInfo_CompositeIndex.Unmarshal(m, b)
+}
+func (m *CompositeIndex) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) {
+	return xxx_messageInfo_CompositeIndex.Marshal(b, m, deterministic)
+}
+func (dst *CompositeIndex) XXX_Merge(src proto.Message) {
+	xxx_messageInfo_CompositeIndex.Merge(dst, src)
+}
+func (m *CompositeIndex) XXX_Size() int {
+	return xxx_messageInfo_CompositeIndex.Size(m)
+}
+func (m *CompositeIndex) XXX_DiscardUnknown() {
+	xxx_messageInfo_CompositeIndex.DiscardUnknown(m)
+}
+
+var xxx_messageInfo_CompositeIndex proto.InternalMessageInfo
+
+const Default_CompositeIndex_OnlyUseIfRequired bool = false
+
+func (m *CompositeIndex) GetAppId() string {
+	if m != nil && m.AppId != nil {
+		return *m.AppId
+	}
+	return ""
+}
+
+func (m *CompositeIndex) GetId() int64 {
+	if m != nil && m.Id != nil {
+		return *m.Id
+	}
+	return 0
+}
+
+func (m *CompositeIndex) GetDefinition() *Index {
+	if m != nil {
+		return m.Definition
+	}
+	return nil
+}
+
+func (m *CompositeIndex) GetState() CompositeIndex_State {
+	if m != nil && m.State != nil {
+		return *m.State
+	}
+	return CompositeIndex_WRITE_ONLY
+}
+
+func (m *CompositeIndex) GetOnlyUseIfRequired() bool {
+	if m != nil && m.OnlyUseIfRequired != nil {
+		return *m.OnlyUseIfRequired
+	}
+	return Default_CompositeIndex_OnlyUseIfRequired
+}
+
+type IndexPostfix struct {
+	IndexValue           []*IndexPostfix_IndexValue `protobuf:"bytes,1,rep,name=index_value,json=indexValue" json:"index_value,omitempty"`
+	Key                  *Reference                 `protobuf:"bytes,2,opt,name=key" json:"key,omitempty"`
+	Before               *bool                      `protobuf:"varint,3,opt,name=before,def=1" json:"before,omitempty"`
+	XXX_NoUnkeyedLiteral struct{}                   `json:"-"`
+	XXX_unrecognized     []byte                     `json:"-"`
+	XXX_sizecache        int32                      `json:"-"`
+}
+
+func (m *IndexPostfix) Reset()         { *m = IndexPostfix{} }
+func (m *IndexPostfix) String() string { return proto.CompactTextString(m) }
+func (*IndexPostfix) ProtoMessage()    {}
+func (*IndexPostfix) Descriptor() ([]byte, []int) {
+	return fileDescriptor_datastore_v3_83b17b80c34f6179, []int{10}
+}
+func (m *IndexPostfix) XXX_Unmarshal(b []byte) error {
+	return xxx_messageInfo_IndexPostfix.Unmarshal(m, b)
+}
+func (m *IndexPostfix) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) {
+	return xxx_messageInfo_IndexPostfix.Marshal(b, m, deterministic)
+}
+func (dst *IndexPostfix) XXX_Merge(src proto.Message) {
+	xxx_messageInfo_IndexPostfix.Merge(dst, src)
+}
+func (m *IndexPostfix) XXX_Size() int {
+	return xxx_messageInfo_IndexPostfix.Size(m)
+}
+func (m *IndexPostfix) XXX_DiscardUnknown() {
+	xxx_messageInfo_IndexPostfix.DiscardUnknown(m)
+}
+
+var xxx_messageInfo_IndexPostfix proto.InternalMessageInfo
+
+const Default_IndexPostfix_Before bool = true
+
+func (m *IndexPostfix) GetIndexValue() []*IndexPostfix_IndexValue {
+	if m != nil {
+		return m.IndexValue
+	}
+	return nil
+}
+
+func (m *IndexPostfix) GetKey() *Reference {
+	if m != nil {
+		return m.Key
+	}
+	return nil
+}
+
+func (m *IndexPostfix) GetBefore() bool {
+	if m != nil && m.Before != nil {
+		return *m.Before
+	}
+	return Default_IndexPostfix_Before
+}
+
+type IndexPostfix_IndexValue struct {
+	PropertyName         *string        `protobuf:"bytes,1,req,name=property_name,json=propertyName" json:"property_name,omitempty"`
+	Value                *PropertyValue `protobuf:"bytes,2,req,name=value" json:"value,omitempty"`
+	XXX_NoUnkeyedLiteral struct{}       `json:"-"`
+	XXX_unrecognized     []byte         `json:"-"`
+	XXX_sizecache        int32          `json:"-"`
+}
+
+func (m *IndexPostfix_IndexValue) Reset()         { *m = IndexPostfix_IndexValue{} }
+func (m *IndexPostfix_IndexValue) String() string { return proto.CompactTextString(m) }
+func (*IndexPostfix_IndexValue) ProtoMessage()    {}
+func (*IndexPostfix_IndexValue) Descriptor() ([]byte, []int) {
+	return fileDescriptor_datastore_v3_83b17b80c34f6179, []int{10, 0}
+}
+func (m *IndexPostfix_IndexValue) XXX_Unmarshal(b []byte) error {
+	return xxx_messageInfo_IndexPostfix_IndexValue.Unmarshal(m, b)
+}
+func (m *IndexPostfix_IndexValue) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) {
+	return xxx_messageInfo_IndexPostfix_IndexValue.Marshal(b, m, deterministic)
+}
+func (dst *IndexPostfix_IndexValue) XXX_Merge(src proto.Message) {
+	xxx_messageInfo_IndexPostfix_IndexValue.Merge(dst, src)
+}
+func (m *IndexPostfix_IndexValue) XXX_Size() int {
+	return xxx_messageInfo_IndexPostfix_IndexValue.Size(m)
+}
+func (m *IndexPostfix_IndexValue) XXX_DiscardUnknown() {
+	xxx_messageInfo_IndexPostfix_IndexValue.DiscardUnknown(m)
+}
+
+var xxx_messageInfo_IndexPostfix_IndexValue proto.InternalMessageInfo
+
+func (m *IndexPostfix_IndexValue) GetPropertyName() string {
+	if m != nil && m.PropertyName != nil {
+		return *m.PropertyName
+	}
+	return ""
+}
+
+func (m *IndexPostfix_IndexValue) GetValue() *PropertyValue {
+	if m != nil {
+		return m.Value
+	}
+	return nil
+}
+
+type IndexPosition struct {
+	Key                  *string  `protobuf:"bytes,1,opt,name=key" json:"key,omitempty"`
+	Before               *bool    `protobuf:"varint,2,opt,name=before,def=1" json:"before,omitempty"`
+	XXX_NoUnkeyedLiteral struct{} `json:"-"`
+	XXX_unrecognized     []byte   `json:"-"`
+	XXX_sizecache        int32    `json:"-"`
+}
+
+func (m *IndexPosition) Reset()         { *m = IndexPosition{} }
+func (m *IndexPosition) String() string { return proto.CompactTextString(m) }
+func (*IndexPosition) ProtoMessage()    {}
+func (*IndexPosition) Descriptor() ([]byte, []int) {
+	return fileDescriptor_datastore_v3_83b17b80c34f6179, []int{11}
+}
+func (m *IndexPosition) XXX_Unmarshal(b []byte) error {
+	return xxx_messageInfo_IndexPosition.Unmarshal(m, b)
+}
+func (m *IndexPosition) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) {
+	return xxx_messageInfo_IndexPosition.Marshal(b, m, deterministic)
+}
+func (dst *IndexPosition) XXX_Merge(src proto.Message) {
+	xxx_messageInfo_IndexPosition.Merge(dst, src)
+}
+func (m *IndexPosition) XXX_Size() int {
+	return xxx_messageInfo_IndexPosition.Size(m)
+}
+func (m *IndexPosition) XXX_DiscardUnknown() {
+	xxx_messageInfo_IndexPosition.DiscardUnknown(m)
+}
+
+var xxx_messageInfo_IndexPosition proto.InternalMessageInfo
+
+const Default_IndexPosition_Before bool = true
+
+func (m *IndexPosition) GetKey() string {
+	if m != nil && m.Key != nil {
+		return *m.Key
+	}
+	return ""
+}
+
+func (m *IndexPosition) GetBefore() bool {
+	if m != nil && m.Before != nil {
+		return *m.Before
+	}
+	return Default_IndexPosition_Before
+}
+
+type Snapshot struct {
+	Ts                   *int64   `protobuf:"varint,1,req,name=ts" json:"ts,omitempty"`
+	XXX_NoUnkeyedLiteral struct{} `json:"-"`
+	XXX_unrecognized     []byte   `json:"-"`
+	XXX_sizecache        int32    `json:"-"`
+}
+
+func (m *Snapshot) Reset()         { *m = Snapshot{} }
+func (m *Snapshot) String() string { return proto.CompactTextString(m) }
+func (*Snapshot) ProtoMessage()    {}
+func (*Snapshot) Descriptor() ([]byte, []int) {
+	return fileDescriptor_datastore_v3_83b17b80c34f6179, []int{12}
+}
+func (m *Snapshot) XXX_Unmarshal(b []byte) error {
+	return xxx_messageInfo_Snapshot.Unmarshal(m, b)
+}
+func (m *Snapshot) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) {
+	return xxx_messageInfo_Snapshot.Marshal(b, m, deterministic)
+}
+func (dst *Snapshot) XXX_Merge(src proto.Message) {
+	xxx_messageInfo_Snapshot.Merge(dst, src)
+}
+func (m *Snapshot) XXX_Size() int {
+	return xxx_messageInfo_Snapshot.Size(m)
+}
+func (m *Snapshot) XXX_DiscardUnknown() {
+	xxx_messageInfo_Snapshot.DiscardUnknown(m)
+}
+
+var xxx_messageInfo_Snapshot proto.InternalMessageInfo
+
+func (m *Snapshot) GetTs() int64 {
+	if m != nil && m.Ts != nil {
+		return *m.Ts
+	}
+	return 0
+}
+
+type InternalHeader struct {
+	Qos                  *string  `protobuf:"bytes,1,opt,name=qos" json:"qos,omitempty"`
+	XXX_NoUnkeyedLiteral struct{} `json:"-"`
+	XXX_unrecognized     []byte   `json:"-"`
+	XXX_sizecache        int32    `json:"-"`
+}
+
+func (m *InternalHeader) Reset()         { *m = InternalHeader{} }
+func (m *InternalHeader) String() string { return proto.CompactTextString(m) }
+func (*InternalHeader) ProtoMessage()    {}
+func (*InternalHeader) Descriptor() ([]byte, []int) {
+	return fileDescriptor_datastore_v3_83b17b80c34f6179, []int{13}
+}
+func (m *InternalHeader) XXX_Unmarshal(b []byte) error {
+	return xxx_messageInfo_InternalHeader.Unmarshal(m, b)
+}
+func (m *InternalHeader) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) {
+	return xxx_messageInfo_InternalHeader.Marshal(b, m, deterministic)
+}
+func (dst *InternalHeader) XXX_Merge(src proto.Message) {
+	xxx_messageInfo_InternalHeader.Merge(dst, src)
+}
+func (m *InternalHeader) XXX_Size() int {
+	return xxx_messageInfo_InternalHeader.Size(m)
+}
+func (m *InternalHeader) XXX_DiscardUnknown() {
+	xxx_messageInfo_InternalHeader.DiscardUnknown(m)
+}
+
+var xxx_messageInfo_InternalHeader proto.InternalMessageInfo
+
+func (m *InternalHeader) GetQos() string {
+	if m != nil && m.Qos != nil {
+		return *m.Qos
+	}
+	return ""
+}
+
+type Transaction struct {
+	Header               *InternalHeader `protobuf:"bytes,4,opt,name=header" json:"header,omitempty"`
+	Handle               *uint64         `protobuf:"fixed64,1,req,name=handle" json:"handle,omitempty"`
+	App                  *string         `protobuf:"bytes,2,req,name=app" json:"app,omitempty"`
+	MarkChanges          *bool           `protobuf:"varint,3,opt,name=mark_changes,json=markChanges,def=0" json:"mark_changes,omitempty"`
+	XXX_NoUnkeyedLiteral struct{}        `json:"-"`
+	XXX_unrecognized     []byte          `json:"-"`
+	XXX_sizecache        int32           `json:"-"`
+}
+
+func (m *Transaction) Reset()         { *m = Transaction{} }
+func (m *Transaction) String() string { return proto.CompactTextString(m) }
+func (*Transaction) ProtoMessage()    {}
+func (*Transaction) Descriptor() ([]byte, []int) {
+	return fileDescriptor_datastore_v3_83b17b80c34f6179, []int{14}
+}
+func (m *Transaction) XXX_Unmarshal(b []byte) error {
+	return xxx_messageInfo_Transaction.Unmarshal(m, b)
+}
+func (m *Transaction) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) {
+	return xxx_messageInfo_Transaction.Marshal(b, m, deterministic)
+}
+func (dst *Transaction) XXX_Merge(src proto.Message) {
+	xxx_messageInfo_Transaction.Merge(dst, src)
+}
+func (m *Transaction) XXX_Size() int {
+	return xxx_messageInfo_Transaction.Size(m)
+}
+func (m *Transaction) XXX_DiscardUnknown() {
+	xxx_messageInfo_Transaction.DiscardUnknown(m)
+}
+
+var xxx_messageInfo_Transaction proto.InternalMessageInfo
+
+const Default_Transaction_MarkChanges bool = false
+
+func (m *Transaction) GetHeader() *InternalHeader {
+	if m != nil {
+		return m.Header
+	}
+	return nil
+}
+
+func (m *Transaction) GetHandle() uint64 {
+	if m != nil && m.Handle != nil {
+		return *m.Handle
+	}
+	return 0
+}
+
+func (m *Transaction) GetApp() string {
+	if m != nil && m.App != nil {
+		return *m.App
+	}
+	return ""
+}
+
+func (m *Transaction) GetMarkChanges() bool {
+	if m != nil && m.MarkChanges != nil {
+		return *m.MarkChanges
+	}
+	return Default_Transaction_MarkChanges
+}
+
+type Query struct {
+	Header               *InternalHeader   `protobuf:"bytes,39,opt,name=header" json:"header,omitempty"`
+	App                  *string           `protobuf:"bytes,1,req,name=app" json:"app,omitempty"`
+	NameSpace            *string           `protobuf:"bytes,29,opt,name=name_space,json=nameSpace" json:"name_space,omitempty"`
+	Kind                 *string           `protobuf:"bytes,3,opt,name=kind" json:"kind,omitempty"`
+	Ancestor             *Reference        `protobuf:"bytes,17,opt,name=ancestor" json:"ancestor,omitempty"`
+	Filter               []*Query_Filter   `protobuf:"group,4,rep,name=Filter,json=filter" json:"filter,omitempty"`
+	SearchQuery          *string           `protobuf:"bytes,8,opt,name=search_query,json=searchQuery" json:"search_query,omitempty"`
+	Order                []*Query_Order    `protobuf:"group,9,rep,name=Order,json=order" json:"order,omitempty"`
+	Hint                 *Query_Hint       `protobuf:"varint,18,opt,name=hint,enum=appengine.Query_Hint" json:"hint,omitempty"`
+	Count                *int32            `protobuf:"varint,23,opt,name=count" json:"count,omitempty"`
+	Offset               *int32            `protobuf:"varint,12,opt,name=offset,def=0" json:"offset,omitempty"`
+	Limit                *int32            `protobuf:"varint,16,opt,name=limit" json:"limit,omitempty"`
+	CompiledCursor       *CompiledCursor   `protobuf:"bytes,30,opt,name=compiled_cursor,json=compiledCursor" json:"compiled_cursor,omitempty"`
+	EndCompiledCursor    *CompiledCursor   `protobuf:"bytes,31,opt,name=end_compiled_cursor,json=endCompiledCursor" json:"end_compiled_cursor,omitempty"`
+	CompositeIndex       []*CompositeIndex `protobuf:"bytes,19,rep,name=composite_index,json=compositeIndex" json:"composite_index,omitempty"`
+	RequirePerfectPlan   *bool             `protobuf:"varint,20,opt,name=require_perfect_plan,json=requirePerfectPlan,def=0" json:"require_perfect_plan,omitempty"`
+	KeysOnly             *bool             `protobuf:"varint,21,opt,name=keys_only,json=keysOnly,def=0" json:"keys_only,omitempty"`
+	Transaction          *Transaction      `protobuf:"bytes,22,opt,name=transaction" json:"transaction,omitempty"`
+	Compile              *bool             `protobuf:"varint,25,opt,name=compile,def=0" json:"compile,omitempty"`
+	FailoverMs           *int64            `protobuf:"varint,26,opt,name=failover_ms,json=failoverMs" json:"failover_ms,omitempty"`
+	Strong               *bool             `protobuf:"varint,32,opt,name=strong" json:"strong,omitempty"`
+	PropertyName         []string          `protobuf:"bytes,33,rep,name=property_name,json=propertyName" json:"property_name,omitempty"`
+	GroupByPropertyName  []string          `protobuf:"bytes,34,rep,name=group_by_property_name,json=groupByPropertyName" json:"group_by_property_name,omitempty"`
+	Distinct             *bool             `protobuf:"varint,24,opt,name=distinct" json:"distinct,omitempty"`
+	MinSafeTimeSeconds   *int64            `protobuf:"varint,35,opt,name=min_safe_time_seconds,json=minSafeTimeSeconds" json:"min_safe_time_seconds,omitempty"`
+	SafeReplicaName      []string          `protobuf:"bytes,36,rep,name=safe_replica_name,json=safeReplicaName" json:"safe_replica_name,omitempty"`
+	PersistOffset        *bool             `protobuf:"varint,37,opt,name=persist_offset,json=persistOffset,def=0" json:"persist_offset,omitempty"`
+	XXX_NoUnkeyedLiteral struct{}          `json:"-"`
+	XXX_unrecognized     []byte            `json:"-"`
+	XXX_sizecache        int32             `json:"-"`
+}
+
+func (m *Query) Reset()         { *m = Query{} }
+func (m *Query) String() string { return proto.CompactTextString(m) }
+func (*Query) ProtoMessage()    {}
+func (*Query) Descriptor() ([]byte, []int) {
+	return fileDescriptor_datastore_v3_83b17b80c34f6179, []int{15}
+}
+func (m *Query) XXX_Unmarshal(b []byte) error {
+	return xxx_messageInfo_Query.Unmarshal(m, b)
+}
+func (m *Query) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) {
+	return xxx_messageInfo_Query.Marshal(b, m, deterministic)
+}
+func (dst *Query) XXX_Merge(src proto.Message) {
+	xxx_messageInfo_Query.Merge(dst, src)
+}
+func (m *Query) XXX_Size() int {
+	return xxx_messageInfo_Query.Size(m)
+}
+func (m *Query) XXX_DiscardUnknown() {
+	xxx_messageInfo_Query.DiscardUnknown(m)
+}
+
+var xxx_messageInfo_Query proto.InternalMessageInfo
+
+const Default_Query_Offset int32 = 0
+const Default_Query_RequirePerfectPlan bool = false
+const Default_Query_KeysOnly bool = false
+const Default_Query_Compile bool = false
+const Default_Query_PersistOffset bool = false
+
+func (m *Query) GetHeader() *InternalHeader {
+	if m != nil {
+		return m.Header
+	}
+	return nil
+}
+
+func (m *Query) GetApp() string {
+	if m != nil && m.App != nil {
+		return *m.App
+	}
+	return ""
+}
+
+func (m *Query) GetNameSpace() string {
+	if m != nil && m.NameSpace != nil {
+		return *m.NameSpace
+	}
+	return ""
+}
+
+func (m *Query) GetKind() string {
+	if m != nil && m.Kind != nil {
+		return *m.Kind
+	}
+	return ""
+}
+
+func (m *Query) GetAncestor() *Reference {
+	if m != nil {
+		return m.Ancestor
+	}
+	return nil
+}
+
+func (m *Query) GetFilter() []*Query_Filter {
+	if m != nil {
+		return m.Filter
+	}
+	return nil
+}
+
+func (m *Query) GetSearchQuery() string {
+	if m != nil && m.SearchQuery != nil {
+		return *m.SearchQuery
+	}
+	return ""
+}
+
+func (m *Query) GetOrder() []*Query_Order {
+	if m != nil {
+		return m.Order
+	}
+	return nil
+}
+
+func (m *Query) GetHint() Query_Hint {
+	if m != nil && m.Hint != nil {
+		return *m.Hint
+	}
+	return Query_ORDER_FIRST
+}
+
+func (m *Query) GetCount() int32 {
+	if m != nil && m.Count != nil {
+		return *m.Count
+	}
+	return 0
+}
+
+func (m *Query) GetOffset() int32 {
+	if m != nil && m.Offset != nil {
+		return *m.Offset
+	}
+	return Default_Query_Offset
+}
+
+func (m *Query) GetLimit() int32 {
+	if m != nil && m.Limit != nil {
+		return *m.Limit
+	}
+	return 0
+}
+
+func (m *Query) GetCompiledCursor() *CompiledCursor {
+	if m != nil {
+		return m.CompiledCursor
+	}
+	return nil
+}
+
+func (m *Query) GetEndCompiledCursor() *CompiledCursor {
+	if m != nil {
+		return m.EndCompiledCursor
+	}
+	return nil
+}
+
+func (m *Query) GetCompositeIndex() []*CompositeIndex {
+	if m != nil {
+		return m.CompositeIndex
+	}
+	return nil
+}
+
+func (m *Query) GetRequirePerfectPlan() bool {
+	if m != nil && m.RequirePerfectPlan != nil {
+		return *m.RequirePerfectPlan
+	}
+	return Default_Query_RequirePerfectPlan
+}
+
+func (m *Query) GetKeysOnly() bool {
+	if m != nil && m.KeysOnly != nil {
+		return *m.KeysOnly
+	}
+	return Default_Query_KeysOnly
+}
+
+func (m *Query) GetTransaction() *Transaction {
+	if m != nil {
+		return m.Transaction
+	}
+	return nil
+}
+
+func (m *Query) GetCompile() bool {
+	if m != nil && m.Compile != nil {
+		return *m.Compile
+	}
+	return Default_Query_Compile
+}
+
+func (m *Query) GetFailoverMs() int64 {
+	if m != nil && m.FailoverMs != nil {
+		return *m.FailoverMs
+	}
+	return 0
+}
+
+func (m *Query) GetStrong() bool {
+	if m != nil && m.Strong != nil {
+		return *m.Strong
+	}
+	return false
+}
+
+func (m *Query) GetPropertyName() []string {
+	if m != nil {
+		return m.PropertyName
+	}
+	return nil
+}
+
+func (m *Query) GetGroupByPropertyName() []string {
+	if m != nil {
+		return m.GroupByPropertyName
+	}
+	return nil
+}
+
+func (m *Query) GetDistinct() bool {
+	if m != nil && m.Distinct != nil {
+		return *m.Distinct
+	}
+	return false
+}
+
+func (m *Query) GetMinSafeTimeSeconds() int64 {
+	if m != nil && m.MinSafeTimeSeconds != nil {
+		return *m.MinSafeTimeSeconds
+	}
+	return 0
+}
+
+func (m *Query) GetSafeReplicaName() []string {
+	if m != nil {
+		return m.SafeReplicaName
+	}
+	return nil
+}
+
+func (m *Query) GetPersistOffset() bool {
+	if m != nil && m.PersistOffset != nil {
+		return *m.PersistOffset
+	}
+	return Default_Query_PersistOffset
+}
+
+type Query_Filter struct {
+	Op                   *Query_Filter_Operator `protobuf:"varint,6,req,name=op,enum=appengine.Query_Filter_Operator" json:"op,omitempty"`
+	Property             []*Property            `protobuf:"bytes,14,rep,name=property" json:"property,omitempty"`
+	XXX_NoUnkeyedLiteral struct{}               `json:"-"`
+	XXX_unrecognized     []byte                 `json:"-"`
+	XXX_sizecache        int32                  `json:"-"`
+}
+
+func (m *Query_Filter) Reset()         { *m = Query_Filter{} }
+func (m *Query_Filter) String() string { return proto.CompactTextString(m) }
+func (*Query_Filter) ProtoMessage()    {}
+func (*Query_Filter) Descriptor() ([]byte, []int) {
+	return fileDescriptor_datastore_v3_83b17b80c34f6179, []int{15, 0}
+}
+func (m *Query_Filter) XXX_Unmarshal(b []byte) error {
+	return xxx_messageInfo_Query_Filter.Unmarshal(m, b)
+}
+func (m *Query_Filter) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) {
+	return xxx_messageInfo_Query_Filter.Marshal(b, m, deterministic)
+}
+func (dst *Query_Filter) XXX_Merge(src proto.Message) {
+	xxx_messageInfo_Query_Filter.Merge(dst, src)
+}
+func (m *Query_Filter) XXX_Size() int {
+	return xxx_messageInfo_Query_Filter.Size(m)
+}
+func (m *Query_Filter) XXX_DiscardUnknown() {
+	xxx_messageInfo_Query_Filter.DiscardUnknown(m)
+}
+
+var xxx_messageInfo_Query_Filter proto.InternalMessageInfo
+
+func (m *Query_Filter) GetOp() Query_Filter_Operator {
+	if m != nil && m.Op != nil {
+		return *m.Op
+	}
+	return Query_Filter_LESS_THAN
+}
+
+func (m *Query_Filter) GetProperty() []*Property {
+	if m != nil {
+		return m.Property
+	}
+	return nil
+}
+
+type Query_Order struct {
+	Property             *string                `protobuf:"bytes,10,req,name=property" json:"property,omitempty"`
+	Direction            *Query_Order_Direction `protobuf:"varint,11,opt,name=direction,enum=appengine.Query_Order_Direction,def=1" json:"direction,omitempty"`
+	XXX_NoUnkeyedLiteral struct{}               `json:"-"`
+	XXX_unrecognized     []byte                 `json:"-"`
+	XXX_sizecache        int32                  `json:"-"`
+}
+
+func (m *Query_Order) Reset()         { *m = Query_Order{} }
+func (m *Query_Order) String() string { return proto.CompactTextString(m) }
+func (*Query_Order) ProtoMessage()    {}
+func (*Query_Order) Descriptor() ([]byte, []int) {
+	return fileDescriptor_datastore_v3_83b17b80c34f6179, []int{15, 1}
+}
+func (m *Query_Order) XXX_Unmarshal(b []byte) error {
+	return xxx_messageInfo_Query_Order.Unmarshal(m, b)
+}
+func (m *Query_Order) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) {
+	return xxx_messageInfo_Query_Order.Marshal(b, m, deterministic)
+}
+func (dst *Query_Order) XXX_Merge(src proto.Message) {
+	xxx_messageInfo_Query_Order.Merge(dst, src)
+}
+func (m *Query_Order) XXX_Size() int {
+	return xxx_messageInfo_Query_Order.Size(m)
+}
+func (m *Query_Order) XXX_DiscardUnknown() {
+	xxx_messageInfo_Query_Order.DiscardUnknown(m)
+}
+
+var xxx_messageInfo_Query_Order proto.InternalMessageInfo
+
+const Default_Query_Order_Direction Query_Order_Direction = Query_Order_ASCENDING
+
+func (m *Query_Order) GetProperty() string {
+	if m != nil && m.Property != nil {
+		return *m.Property
+	}
+	return ""
+}
+
+func (m *Query_Order) GetDirection() Query_Order_Direction {
+	if m != nil && m.Direction != nil {
+		return *m.Direction
+	}
+	return Default_Query_Order_Direction
+}
+
+type CompiledQuery struct {
+	Primaryscan          *CompiledQuery_PrimaryScan     `protobuf:"group,1,req,name=PrimaryScan,json=primaryscan" json:"primaryscan,omitempty"`
+	Mergejoinscan        []*CompiledQuery_MergeJoinScan `protobuf:"group,7,rep,name=MergeJoinScan,json=mergejoinscan" json:"mergejoinscan,omitempty"`
+	IndexDef             *Index                         `protobuf:"bytes,21,opt,name=index_def,json=indexDef" json:"index_def,omitempty"`
+	Offset               *int32                         `protobuf:"varint,10,opt,name=offset,def=0" json:"offset,omitempty"`
+	Limit                *int32                         `protobuf:"varint,11,opt,name=limit" json:"limit,omitempty"`
+	KeysOnly             *bool                          `protobuf:"varint,12,req,name=keys_only,json=keysOnly" json:"keys_only,omitempty"`
+	PropertyName         []string                       `protobuf:"bytes,24,rep,name=property_name,json=propertyName" json:"property_name,omitempty"`
+	DistinctInfixSize    *int32                         `protobuf:"varint,25,opt,name=distinct_infix_size,json=distinctInfixSize" json:"distinct_infix_size,omitempty"`
+	Entityfilter         *CompiledQuery_EntityFilter    `protobuf:"group,13,opt,name=EntityFilter,json=entityfilter" json:"entityfilter,omitempty"`
+	XXX_NoUnkeyedLiteral struct{}                       `json:"-"`
+	XXX_unrecognized     []byte                         `json:"-"`
+	XXX_sizecache        int32                          `json:"-"`
+}
+
+func (m *CompiledQuery) Reset()         { *m = CompiledQuery{} }
+func (m *CompiledQuery) String() string { return proto.CompactTextString(m) }
+func (*CompiledQuery) ProtoMessage()    {}
+func (*CompiledQuery) Descriptor() ([]byte, []int) {
+	return fileDescriptor_datastore_v3_83b17b80c34f6179, []int{16}
+}
+func (m *CompiledQuery) XXX_Unmarshal(b []byte) error {
+	return xxx_messageInfo_CompiledQuery.Unmarshal(m, b)
+}
+func (m *CompiledQuery) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) {
+	return xxx_messageInfo_CompiledQuery.Marshal(b, m, deterministic)
+}
+func (dst *CompiledQuery) XXX_Merge(src proto.Message) {
+	xxx_messageInfo_CompiledQuery.Merge(dst, src)
+}
+func (m *CompiledQuery) XXX_Size() int {
+	return xxx_messageInfo_CompiledQuery.Size(m)
+}
+func (m *CompiledQuery) XXX_DiscardUnknown() {
+	xxx_messageInfo_CompiledQuery.DiscardUnknown(m)
+}
+
+var xxx_messageInfo_CompiledQuery proto.InternalMessageInfo
+
+const Default_CompiledQuery_Offset int32 = 0
+
+func (m *CompiledQuery) GetPrimaryscan() *CompiledQuery_PrimaryScan {
+	if m != nil {
+		return m.Primaryscan
+	}
+	return nil
+}
+
+func (m *CompiledQuery) GetMergejoinscan() []*CompiledQuery_MergeJoinScan {
+	if m != nil {
+		return m.Mergejoinscan
+	}
+	return nil
+}
+
+func (m *CompiledQuery) GetIndexDef() *Index {
+	if m != nil {
+		return m.IndexDef
+	}
+	return nil
+}
+
+func (m *CompiledQuery) GetOffset() int32 {
+	if m != nil && m.Offset != nil {
+		return *m.Offset
+	}
+	return Default_CompiledQuery_Offset
+}
+
+func (m *CompiledQuery) GetLimit() int32 {
+	if m != nil && m.Limit != nil {
+		return *m.Limit
+	}
+	return 0
+}
+
+func (m *CompiledQuery) GetKeysOnly() bool {
+	if m != nil && m.KeysOnly != nil {
+		return *m.KeysOnly
+	}
+	return false
+}
+
+func (m *CompiledQuery) GetPropertyName() []string {
+	if m != nil {
+		return m.PropertyName
+	}
+	return nil
+}
+
+func (m *CompiledQuery) GetDistinctInfixSize() int32 {
+	if m != nil && m.DistinctInfixSize != nil {
+		return *m.DistinctInfixSize
+	}
+	return 0
+}
+
+func (m *CompiledQuery) GetEntityfilter() *CompiledQuery_EntityFilter {
+	if m != nil {
+		return m.Entityfilter
+	}
+	return nil
+}
+
+type CompiledQuery_PrimaryScan struct {
+	IndexName                  *string  `protobuf:"bytes,2,opt,name=index_name,json=indexName" json:"index_name,omitempty"`
+	StartKey                   *string  `protobuf:"bytes,3,opt,name=start_key,json=startKey" json:"start_key,omitempty"`
+	StartInclusive             *bool    `protobuf:"varint,4,opt,name=start_inclusive,json=startInclusive" json:"start_inclusive,omitempty"`
+	EndKey                     *string  `protobuf:"bytes,5,opt,name=end_key,json=endKey" json:"end_key,omitempty"`
+	EndInclusive               *bool    `protobuf:"varint,6,opt,name=end_inclusive,json=endInclusive" json:"end_inclusive,omitempty"`
+	StartPostfixValue          []string `protobuf:"bytes,22,rep,name=start_postfix_value,json=startPostfixValue" json:"start_postfix_value,omitempty"`
+	EndPostfixValue            []string `protobuf:"bytes,23,rep,name=end_postfix_value,json=endPostfixValue" json:"end_postfix_value,omitempty"`
+	EndUnappliedLogTimestampUs *int64   `protobuf:"varint,19,opt,name=end_unapplied_log_timestamp_us,json=endUnappliedLogTimestampUs" json:"end_unapplied_log_timestamp_us,omitempty"`
+	XXX_NoUnkeyedLiteral       struct{} `json:"-"`
+	XXX_unrecognized           []byte   `json:"-"`
+	XXX_sizecache              int32    `json:"-"`
+}
+
+func (m *CompiledQuery_PrimaryScan) Reset()         { *m = CompiledQuery_PrimaryScan{} }
+func (m *CompiledQuery_PrimaryScan) String() string { return proto.CompactTextString(m) }
+func (*CompiledQuery_PrimaryScan) ProtoMessage()    {}
+func (*CompiledQuery_PrimaryScan) Descriptor() ([]byte, []int) {
+	return fileDescriptor_datastore_v3_83b17b80c34f6179, []int{16, 0}
+}
+func (m *CompiledQuery_PrimaryScan) XXX_Unmarshal(b []byte) error {
+	return xxx_messageInfo_CompiledQuery_PrimaryScan.Unmarshal(m, b)
+}
+func (m *CompiledQuery_PrimaryScan) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) {
+	return xxx_messageInfo_CompiledQuery_PrimaryScan.Marshal(b, m, deterministic)
+}
+func (dst *CompiledQuery_PrimaryScan) XXX_Merge(src proto.Message) {
+	xxx_messageInfo_CompiledQuery_PrimaryScan.Merge(dst, src)
+}
+func (m *CompiledQuery_PrimaryScan) XXX_Size() int {
+	return xxx_messageInfo_CompiledQuery_PrimaryScan.Size(m)
+}
+func (m *CompiledQuery_PrimaryScan) XXX_DiscardUnknown() {
+	xxx_messageInfo_CompiledQuery_PrimaryScan.DiscardUnknown(m)
+}
+
+var xxx_messageInfo_CompiledQuery_PrimaryScan proto.InternalMessageInfo
+
+func (m *CompiledQuery_PrimaryScan) GetIndexName() string {
+	if m != nil && m.IndexName != nil {
+		return *m.IndexName
+	}
+	return ""
+}
+
+func (m *CompiledQuery_PrimaryScan) GetStartKey() string {
+	if m != nil && m.StartKey != nil {
+		return *m.StartKey
+	}
+	return ""
+}
+
+func (m *CompiledQuery_PrimaryScan) GetStartInclusive() bool {
+	if m != nil && m.StartInclusive != nil {
+		return *m.StartInclusive
+	}
+	return false
+}
+
+func (m *CompiledQuery_PrimaryScan) GetEndKey() string {
+	if m != nil && m.EndKey != nil {
+		return *m.EndKey
+	}
+	return ""
+}
+
+func (m *CompiledQuery_PrimaryScan) GetEndInclusive() bool {
+	if m != nil && m.EndInclusive != nil {
+		return *m.EndInclusive
+	}
+	return false
+}
+
+func (m *CompiledQuery_PrimaryScan) GetStartPostfixValue() []string {
+	if m != nil {
+		return m.StartPostfixValue
+	}
+	return nil
+}
+
+func (m *CompiledQuery_PrimaryScan) GetEndPostfixValue() []string {
+	if m != nil {
+		return m.EndPostfixValue
+	}
+	return nil
+}
+
+func (m *CompiledQuery_PrimaryScan) GetEndUnappliedLogTimestampUs() int64 {
+	if m != nil && m.EndUnappliedLogTimestampUs != nil {
+		return *m.EndUnappliedLogTimestampUs
+	}
+	return 0
+}
+
+type CompiledQuery_MergeJoinScan struct {
+	IndexName            *string  `protobuf:"bytes,8,req,name=index_name,json=indexName" json:"index_name,omitempty"`
+	PrefixValue          []string `protobuf:"bytes,9,rep,name=prefix_value,json=prefixValue" json:"prefix_value,omitempty"`
+	ValuePrefix          *bool    `protobuf:"varint,20,opt,name=value_prefix,json=valuePrefix,def=0" json:"value_prefix,omitempty"`
+	XXX_NoUnkeyedLiteral struct{} `json:"-"`
+	XXX_unrecognized     []byte   `json:"-"`
+	XXX_sizecache        int32    `json:"-"`
+}
+
+func (m *CompiledQuery_MergeJoinScan) Reset()         { *m = CompiledQuery_MergeJoinScan{} }
+func (m *CompiledQuery_MergeJoinScan) String() string { return proto.CompactTextString(m) }
+func (*CompiledQuery_MergeJoinScan) ProtoMessage()    {}
+func (*CompiledQuery_MergeJoinScan) Descriptor() ([]byte, []int) {
+	return fileDescriptor_datastore_v3_83b17b80c34f6179, []int{16, 1}
+}
+func (m *CompiledQuery_MergeJoinScan) XXX_Unmarshal(b []byte) error {
+	return xxx_messageInfo_CompiledQuery_MergeJoinScan.Unmarshal(m, b)
+}
+func (m *CompiledQuery_MergeJoinScan) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) {
+	return xxx_messageInfo_CompiledQuery_MergeJoinScan.Marshal(b, m, deterministic)
+}
+func (dst *CompiledQuery_MergeJoinScan) XXX_Merge(src proto.Message) {
+	xxx_messageInfo_CompiledQuery_MergeJoinScan.Merge(dst, src)
+}
+func (m *CompiledQuery_MergeJoinScan) XXX_Size() int {
+	return xxx_messageInfo_CompiledQuery_MergeJoinScan.Size(m)
+}
+func (m *CompiledQuery_MergeJoinScan) XXX_DiscardUnknown() {
+	xxx_messageInfo_CompiledQuery_MergeJoinScan.DiscardUnknown(m)
+}
+
+var xxx_messageInfo_CompiledQuery_MergeJoinScan proto.InternalMessageInfo
+
+const Default_CompiledQuery_MergeJoinScan_ValuePrefix bool = false
+
+func (m *CompiledQuery_MergeJoinScan) GetIndexName() string {
+	if m != nil && m.IndexName != nil {
+		return *m.IndexName
+	}
+	return ""
+}
+
+func (m *CompiledQuery_MergeJoinScan) GetPrefixValue() []string {
+	if m != nil {
+		return m.PrefixValue
+	}
+	return nil
+}
+
+func (m *CompiledQuery_MergeJoinScan) GetValuePrefix() bool {
+	if m != nil && m.ValuePrefix != nil {
+		return *m.ValuePrefix
+	}
+	return Default_CompiledQuery_MergeJoinScan_ValuePrefix
+}
+
+type CompiledQuery_EntityFilter struct {
+	Distinct             *bool      `protobuf:"varint,14,opt,name=distinct,def=0" json:"distinct,omitempty"`
+	Kind                 *string    `protobuf:"bytes,17,opt,name=kind" json:"kind,omitempty"`
+	Ancestor             *Reference `protobuf:"bytes,18,opt,name=ancestor" json:"ancestor,omitempty"`
+	XXX_NoUnkeyedLiteral struct{}   `json:"-"`
+	XXX_unrecognized     []byte     `json:"-"`
+	XXX_sizecache        int32      `json:"-"`
+}
+
+func (m *CompiledQuery_EntityFilter) Reset()         { *m = CompiledQuery_EntityFilter{} }
+func (m *CompiledQuery_EntityFilter) String() string { return proto.CompactTextString(m) }
+func (*CompiledQuery_EntityFilter) ProtoMessage()    {}
+func (*CompiledQuery_EntityFilter) Descriptor() ([]byte, []int) {
+	return fileDescriptor_datastore_v3_83b17b80c34f6179, []int{16, 2}
+}
+func (m *CompiledQuery_EntityFilter) XXX_Unmarshal(b []byte) error {
+	return xxx_messageInfo_CompiledQuery_EntityFilter.Unmarshal(m, b)
+}
+func (m *CompiledQuery_EntityFilter) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) {
+	return xxx_messageInfo_CompiledQuery_EntityFilter.Marshal(b, m, deterministic)
+}
+func (dst *CompiledQuery_EntityFilter) XXX_Merge(src proto.Message) {
+	xxx_messageInfo_CompiledQuery_EntityFilter.Merge(dst, src)
+}
+func (m *CompiledQuery_EntityFilter) XXX_Size() int {
+	return xxx_messageInfo_CompiledQuery_EntityFilter.Size(m)
+}
+func (m *CompiledQuery_EntityFilter) XXX_DiscardUnknown() {
+	xxx_messageInfo_CompiledQuery_EntityFilter.DiscardUnknown(m)
+}
+
+var xxx_messageInfo_CompiledQuery_EntityFilter proto.InternalMessageInfo
+
+const Default_CompiledQuery_EntityFilter_Distinct bool = false
+
+func (m *CompiledQuery_EntityFilter) GetDistinct() bool {
+	if m != nil && m.Distinct != nil {
+		return *m.Distinct
+	}
+	return Default_CompiledQuery_EntityFilter_Distinct
+}
+
+func (m *CompiledQuery_EntityFilter) GetKind() string {
+	if m != nil && m.Kind != nil {
+		return *m.Kind
+	}
+	return ""
+}
+
+func (m *CompiledQuery_EntityFilter) GetAncestor() *Reference {
+	if m != nil {
+		return m.Ancestor
+	}
+	return nil
+}
+
+type CompiledCursor struct {
+	Position             *CompiledCursor_Position `protobuf:"group,2,opt,name=Position,json=position" json:"position,omitempty"`
+	XXX_NoUnkeyedLiteral struct{}                 `json:"-"`
+	XXX_unrecognized     []byte                   `json:"-"`
+	XXX_sizecache        int32                    `json:"-"`
+}
+
+func (m *CompiledCursor) Reset()         { *m = CompiledCursor{} }
+func (m *CompiledCursor) String() string { return proto.CompactTextString(m) }
+func (*CompiledCursor) ProtoMessage()    {}
+func (*CompiledCursor) Descriptor() ([]byte, []int) {
+	return fileDescriptor_datastore_v3_83b17b80c34f6179, []int{17}
+}
+func (m *CompiledCursor) XXX_Unmarshal(b []byte) error {
+	return xxx_messageInfo_CompiledCursor.Unmarshal(m, b)
+}
+func (m *CompiledCursor) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) {
+	return xxx_messageInfo_CompiledCursor.Marshal(b, m, deterministic)
+}
+func (dst *CompiledCursor) XXX_Merge(src proto.Message) {
+	xxx_messageInfo_CompiledCursor.Merge(dst, src)
+}
+func (m *CompiledCursor) XXX_Size() int {
+	return xxx_messageInfo_CompiledCursor.Size(m)
+}
+func (m *CompiledCursor) XXX_DiscardUnknown() {
+	xxx_messageInfo_CompiledCursor.DiscardUnknown(m)
+}
+
+var xxx_messageInfo_CompiledCursor proto.InternalMessageInfo
+
+func (m *CompiledCursor) GetPosition() *CompiledCursor_Position {
+	if m != nil {
+		return m.Position
+	}
+	return nil
+}
+
+type CompiledCursor_Position struct {
+	StartKey             *string                               `protobuf:"bytes,27,opt,name=start_key,json=startKey" json:"start_key,omitempty"`
+	Indexvalue           []*CompiledCursor_Position_IndexValue `protobuf:"group,29,rep,name=IndexValue,json=indexvalue" json:"indexvalue,omitempty"`
+	Key                  *Reference                            `protobuf:"bytes,32,opt,name=key" json:"key,omitempty"`
+	StartInclusive       *bool                                 `protobuf:"varint,28,opt,name=start_inclusive,json=startInclusive,def=1" json:"start_inclusive,omitempty"`
+	XXX_NoUnkeyedLiteral struct{}                              `json:"-"`
+	XXX_unrecognized     []byte                                `json:"-"`
+	XXX_sizecache        int32                                 `json:"-"`
+}
+
+func (m *CompiledCursor_Position) Reset()         { *m = CompiledCursor_Position{} }
+func (m *CompiledCursor_Position) String() string { return proto.CompactTextString(m) }
+func (*CompiledCursor_Position) ProtoMessage()    {}
+func (*CompiledCursor_Position) Descriptor() ([]byte, []int) {
+	return fileDescriptor_datastore_v3_83b17b80c34f6179, []int{17, 0}
+}
+func (m *CompiledCursor_Position) XXX_Unmarshal(b []byte) error {
+	return xxx_messageInfo_CompiledCursor_Position.Unmarshal(m, b)
+}
+func (m *CompiledCursor_Position) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) {
+	return xxx_messageInfo_CompiledCursor_Position.Marshal(b, m, deterministic)
+}
+func (dst *CompiledCursor_Position) XXX_Merge(src proto.Message) {
+	xxx_messageInfo_CompiledCursor_Position.Merge(dst, src)
+}
+func (m *CompiledCursor_Position) XXX_Size() int {
+	return xxx_messageInfo_CompiledCursor_Position.Size(m)
+}
+func (m *CompiledCursor_Position) XXX_DiscardUnknown() {
+	xxx_messageInfo_CompiledCursor_Position.DiscardUnknown(m)
+}
+
+var xxx_messageInfo_CompiledCursor_Position proto.InternalMessageInfo
+
+const Default_CompiledCursor_Position_StartInclusive bool = true
+
+func (m *CompiledCursor_Position) GetStartKey() string {
+	if m != nil && m.StartKey != nil {
+		return *m.StartKey
+	}
+	return ""
+}
+
+func (m *CompiledCursor_Position) GetIndexvalue() []*CompiledCursor_Position_IndexValue {
+	if m != nil {
+		return m.Indexvalue
+	}
+	return nil
+}
+
+func (m *CompiledCursor_Position) GetKey() *Reference {
+	if m != nil {
+		return m.Key
+	}
+	return nil
+}
+
+func (m *CompiledCursor_Position) GetStartInclusive() bool {
+	if m != nil && m.StartInclusive != nil {
+		return *m.StartInclusive
+	}
+	return Default_CompiledCursor_Position_StartInclusive
+}
+
+type CompiledCursor_Position_IndexValue struct {
+	Property             *string        `protobuf:"bytes,30,opt,name=property" json:"property,omitempty"`
+	Value                *PropertyValue `protobuf:"bytes,31,req,name=value" json:"value,omitempty"`
+	XXX_NoUnkeyedLiteral struct{}       `json:"-"`
+	XXX_unrecognized     []byte         `json:"-"`
+	XXX_sizecache        int32          `json:"-"`
+}
+
+func (m *CompiledCursor_Position_IndexValue) Reset()         { *m = CompiledCursor_Position_IndexValue{} }
+func (m *CompiledCursor_Position_IndexValue) String() string { return proto.CompactTextString(m) }
+func (*CompiledCursor_Position_IndexValue) ProtoMessage()    {}
+func (*CompiledCursor_Position_IndexValue) Descriptor() ([]byte, []int) {
+	return fileDescriptor_datastore_v3_83b17b80c34f6179, []int{17, 0, 0}
+}
+func (m *CompiledCursor_Position_IndexValue) XXX_Unmarshal(b []byte) error {
+	return xxx_messageInfo_CompiledCursor_Position_IndexValue.Unmarshal(m, b)
+}
+func (m *CompiledCursor_Position_IndexValue) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) {
+	return xxx_messageInfo_CompiledCursor_Position_IndexValue.Marshal(b, m, deterministic)
+}
+func (dst *CompiledCursor_Position_IndexValue) XXX_Merge(src proto.Message) {
+	xxx_messageInfo_CompiledCursor_Position_IndexValue.Merge(dst, src)
+}
+func (m *CompiledCursor_Position_IndexValue) XXX_Size() int {
+	return xxx_messageInfo_CompiledCursor_Position_IndexValue.Size(m)
+}
+func (m *CompiledCursor_Position_IndexValue) XXX_DiscardUnknown() {
+	xxx_messageInfo_CompiledCursor_Position_IndexValue.DiscardUnknown(m)
+}
+
+var xxx_messageInfo_CompiledCursor_Position_IndexValue proto.InternalMessageInfo
+
+func (m *CompiledCursor_Position_IndexValue) GetProperty() string {
+	if m != nil && m.Property != nil {
+		return *m.Property
+	}
+	return ""
+}
+
+func (m *CompiledCursor_Position_IndexValue) GetValue() *PropertyValue {
+	if m != nil {
+		return m.Value
+	}
+	return nil
+}
+
+type Cursor struct {
+	Cursor               *uint64  `protobuf:"fixed64,1,req,name=cursor" json:"cursor,omitempty"`
+	App                  *string  `protobuf:"bytes,2,opt,name=app" json:"app,omitempty"`
+	XXX_NoUnkeyedLiteral struct{} `json:"-"`
+	XXX_unrecognized     []byte   `json:"-"`
+	XXX_sizecache        int32    `json:"-"`
+}
+
+func (m *Cursor) Reset()         { *m = Cursor{} }
+func (m *Cursor) String() string { return proto.CompactTextString(m) }
+func (*Cursor) ProtoMessage()    {}
+func (*Cursor) Descriptor() ([]byte, []int) {
+	return fileDescriptor_datastore_v3_83b17b80c34f6179, []int{18}
+}
+func (m *Cursor) XXX_Unmarshal(b []byte) error {
+	return xxx_messageInfo_Cursor.Unmarshal(m, b)
+}
+func (m *Cursor) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) {
+	return xxx_messageInfo_Cursor.Marshal(b, m, deterministic)
+}
+func (dst *Cursor) XXX_Merge(src proto.Message) {
+	xxx_messageInfo_Cursor.Merge(dst, src)
+}
+func (m *Cursor) XXX_Size() int {
+	return xxx_messageInfo_Cursor.Size(m)
+}
+func (m *Cursor) XXX_DiscardUnknown() {
+	xxx_messageInfo_Cursor.DiscardUnknown(m)
+}
+
+var xxx_messageInfo_Cursor proto.InternalMessageInfo
+
+func (m *Cursor) GetCursor() uint64 {
+	if m != nil && m.Cursor != nil {
+		return *m.Cursor
+	}
+	return 0
+}
+
+func (m *Cursor) GetApp() string {
+	if m != nil && m.App != nil {
+		return *m.App
+	}
+	return ""
+}
+
+type Error struct {
+	XXX_NoUnkeyedLiteral struct{} `json:"-"`
+	XXX_unrecognized     []byte   `json:"-"`
+	XXX_sizecache        int32    `json:"-"`
+}
+
+func (m *Error) Reset()         { *m = Error{} }
+func (m *Error) String() string { return proto.CompactTextString(m) }
+func (*Error) ProtoMessage()    {}
+func (*Error) Descriptor() ([]byte, []int) {
+	return fileDescriptor_datastore_v3_83b17b80c34f6179, []int{19}
+}
+func (m *Error) XXX_Unmarshal(b []byte) error {
+	return xxx_messageInfo_Error.Unmarshal(m, b)
+}
+func (m *Error) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) {
+	return xxx_messageInfo_Error.Marshal(b, m, deterministic)
+}
+func (dst *Error) XXX_Merge(src proto.Message) {
+	xxx_messageInfo_Error.Merge(dst, src)
+}
+func (m *Error) XXX_Size() int {
+	return xxx_messageInfo_Error.Size(m)
+}
+func (m *Error) XXX_DiscardUnknown() {
+	xxx_messageInfo_Error.DiscardUnknown(m)
+}
+
+var xxx_messageInfo_Error proto.InternalMessageInfo
+
+type Cost struct {
+	IndexWrites             *int32           `protobuf:"varint,1,opt,name=index_writes,json=indexWrites" json:"index_writes,omitempty"`
+	IndexWriteBytes         *int32           `protobuf:"varint,2,opt,name=index_write_bytes,json=indexWriteBytes" json:"index_write_bytes,omitempty"`
+	EntityWrites            *int32           `protobuf:"varint,3,opt,name=entity_writes,json=entityWrites" json:"entity_writes,omitempty"`
+	EntityWriteBytes        *int32           `protobuf:"varint,4,opt,name=entity_write_bytes,json=entityWriteBytes" json:"entity_write_bytes,omitempty"`
+	Commitcost              *Cost_CommitCost `protobuf:"group,5,opt,name=CommitCost,json=commitcost" json:"commitcost,omitempty"`
+	ApproximateStorageDelta *int32           `protobuf:"varint,8,opt,name=approximate_storage_delta,json=approximateStorageDelta" json:"approximate_storage_delta,omitempty"`
+	IdSequenceUpdates       *int32           `protobuf:"varint,9,opt,name=id_sequence_updates,json=idSequenceUpdates" json:"id_sequence_updates,omitempty"`
+	XXX_NoUnkeyedLiteral    struct{}         `json:"-"`
+	XXX_unrecognized        []byte           `json:"-"`
+	XXX_sizecache           int32            `json:"-"`
+}
+
+func (m *Cost) Reset()         { *m = Cost{} }
+func (m *Cost) String() string { return proto.CompactTextString(m) }
+func (*Cost) ProtoMessage()    {}
+func (*Cost) Descriptor() ([]byte, []int) {
+	return fileDescriptor_datastore_v3_83b17b80c34f6179, []int{20}
+}
+func (m *Cost) XXX_Unmarshal(b []byte) error {
+	return xxx_messageInfo_Cost.Unmarshal(m, b)
+}
+func (m *Cost) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) {
+	return xxx_messageInfo_Cost.Marshal(b, m, deterministic)
+}
+func (dst *Cost) XXX_Merge(src proto.Message) {
+	xxx_messageInfo_Cost.Merge(dst, src)
+}
+func (m *Cost) XXX_Size() int {
+	return xxx_messageInfo_Cost.Size(m)
+}
+func (m *Cost) XXX_DiscardUnknown() {
+	xxx_messageInfo_Cost.DiscardUnknown(m)
+}
+
+var xxx_messageInfo_Cost proto.InternalMessageInfo
+
+func (m *Cost) GetIndexWrites() int32 {
+	if m != nil && m.IndexWrites != nil {
+		return *m.IndexWrites
+	}
+	return 0
+}
+
+func (m *Cost) GetIndexWriteBytes() int32 {
+	if m != nil && m.IndexWriteBytes != nil {
+		return *m.IndexWriteBytes
+	}
+	return 0
+}
+
+func (m *Cost) GetEntityWrites() int32 {
+	if m != nil && m.EntityWrites != nil {
+		return *m.EntityWrites
+	}
+	return 0
+}
+
+func (m *Cost) GetEntityWriteBytes() int32 {
+	if m != nil && m.EntityWriteBytes != nil {
+		return *m.EntityWriteBytes
+	}
+	return 0
+}
+
+func (m *Cost) GetCommitcost() *Cost_CommitCost {
+	if m != nil {
+		return m.Commitcost
+	}
+	return nil
+}
+
+func (m *Cost) GetApproximateStorageDelta() int32 {
+	if m != nil && m.ApproximateStorageDelta != nil {
+		return *m.ApproximateStorageDelta
+	}
+	return 0
+}
+
+func (m *Cost) GetIdSequenceUpdates() int32 {
+	if m != nil && m.IdSequenceUpdates != nil {
+		return *m.IdSequenceUpdates
+	}
+	return 0
+}
+
+type Cost_CommitCost struct {
+	RequestedEntityPuts    *int32   `protobuf:"varint,6,opt,name=requested_entity_puts,json=requestedEntityPuts" json:"requested_entity_puts,omitempty"`
+	RequestedEntityDeletes *int32   `protobuf:"varint,7,opt,name=requested_entity_deletes,json=requestedEntityDeletes" json:"requested_entity_deletes,omitempty"`
+	XXX_NoUnkeyedLiteral   struct{} `json:"-"`
+	XXX_unrecognized       []byte   `json:"-"`
+	XXX_sizecache          int32    `json:"-"`
+}
+
+func (m *Cost_CommitCost) Reset()         { *m = Cost_CommitCost{} }
+func (m *Cost_CommitCost) String() string { return proto.CompactTextString(m) }
+func (*Cost_CommitCost) ProtoMessage()    {}
+func (*Cost_CommitCost) Descriptor() ([]byte, []int) {
+	return fileDescriptor_datastore_v3_83b17b80c34f6179, []int{20, 0}
+}
+func (m *Cost_CommitCost) XXX_Unmarshal(b []byte) error {
+	return xxx_messageInfo_Cost_CommitCost.Unmarshal(m, b)
+}
+func (m *Cost_CommitCost) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) {
+	return xxx_messageInfo_Cost_CommitCost.Marshal(b, m, deterministic)
+}
+func (dst *Cost_CommitCost) XXX_Merge(src proto.Message) {
+	xxx_messageInfo_Cost_CommitCost.Merge(dst, src)
+}
+func (m *Cost_CommitCost) XXX_Size() int {
+	return xxx_messageInfo_Cost_CommitCost.Size(m)
+}
+func (m *Cost_CommitCost) XXX_DiscardUnknown() {
+	xxx_messageInfo_Cost_CommitCost.DiscardUnknown(m)
+}
+
+var xxx_messageInfo_Cost_CommitCost proto.InternalMessageInfo
+
+func (m *Cost_CommitCost) GetRequestedEntityPuts() int32 {
+	if m != nil && m.RequestedEntityPuts != nil {
+		return *m.RequestedEntityPuts
+	}
+	return 0
+}
+
+func (m *Cost_CommitCost) GetRequestedEntityDeletes() int32 {
+	if m != nil && m.RequestedEntityDeletes != nil {
+		return *m.RequestedEntityDeletes
+	}
+	return 0
+}
+
+type GetRequest struct {
+	Header               *InternalHeader `protobuf:"bytes,6,opt,name=header" json:"header,omitempty"`
+	Key                  []*Reference    `protobuf:"bytes,1,rep,name=key" json:"key,omitempty"`
+	Transaction          *Transaction    `protobuf:"bytes,2,opt,name=transaction" json:"transaction,omitempty"`
+	FailoverMs           *int64          `protobuf:"varint,3,opt,name=failover_ms,json=failoverMs" json:"failover_ms,omitempty"`
+	Strong               *bool           `protobuf:"varint,4,opt,name=strong" json:"strong,omitempty"`
+	AllowDeferred        *bool           `protobuf:"varint,5,opt,name=allow_deferred,json=allowDeferred,def=0" json:"allow_deferred,omitempty"`
+	XXX_NoUnkeyedLiteral struct{}        `json:"-"`
+	XXX_unrecognized     []byte          `json:"-"`
+	XXX_sizecache        int32           `json:"-"`
+}
+
+func (m *GetRequest) Reset()         { *m = GetRequest{} }
+func (m *GetRequest) String() string { return proto.CompactTextString(m) }
+func (*GetRequest) ProtoMessage()    {}
+func (*GetRequest) Descriptor() ([]byte, []int) {
+	return fileDescriptor_datastore_v3_83b17b80c34f6179, []int{21}
+}
+func (m *GetRequest) XXX_Unmarshal(b []byte) error {
+	return xxx_messageInfo_GetRequest.Unmarshal(m, b)
+}
+func (m *GetRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) {
+	return xxx_messageInfo_GetRequest.Marshal(b, m, deterministic)
+}
+func (dst *GetRequest) XXX_Merge(src proto.Message) {
+	xxx_messageInfo_GetRequest.Merge(dst, src)
+}
+func (m *GetRequest) XXX_Size() int {
+	return xxx_messageInfo_GetRequest.Size(m)
+}
+func (m *GetRequest) XXX_DiscardUnknown() {
+	xxx_messageInfo_GetRequest.DiscardUnknown(m)
+}
+
+var xxx_messageInfo_GetRequest proto.InternalMessageInfo
+
+const Default_GetRequest_AllowDeferred bool = false
+
+func (m *GetRequest) GetHeader() *InternalHeader {
+	if m != nil {
+		return m.Header
+	}
+	return nil
+}
+
+func (m *GetRequest) GetKey() []*Reference {
+	if m != nil {
+		return m.Key
+	}
+	return nil
+}
+
+func (m *GetRequest) GetTransaction() *Transaction {
+	if m != nil {
+		return m.Transaction
+	}
+	return nil
+}
+
+func (m *GetRequest) GetFailoverMs() int64 {
+	if m != nil && m.FailoverMs != nil {
+		return *m.FailoverMs
+	}
+	return 0
+}
+
+func (m *GetRequest) GetStrong() bool {
+	if m != nil && m.Strong != nil {
+		return *m.Strong
+	}
+	return false
+}
+
+func (m *GetRequest) GetAllowDeferred() bool {
+	if m != nil && m.AllowDeferred != nil {
+		return *m.AllowDeferred
+	}
+	return Default_GetRequest_AllowDeferred
+}
+
+type GetResponse struct {
+	Entity               []*GetResponse_Entity `protobuf:"group,1,rep,name=Entity,json=entity" json:"entity,omitempty"`
+	Deferred             []*Reference          `protobuf:"bytes,5,rep,name=deferred" json:"deferred,omitempty"`
+	InOrder              *bool                 `protobuf:"varint,6,opt,name=in_order,json=inOrder,def=1" json:"in_order,omitempty"`
+	XXX_NoUnkeyedLiteral struct{}              `json:"-"`
+	XXX_unrecognized     []byte                `json:"-"`
+	XXX_sizecache        int32                 `json:"-"`
+}
+
+func (m *GetResponse) Reset()         { *m = GetResponse{} }
+func (m *GetResponse) String() string { return proto.CompactTextString(m) }
+func (*GetResponse) ProtoMessage()    {}
+func (*GetResponse) Descriptor() ([]byte, []int) {
+	return fileDescriptor_datastore_v3_83b17b80c34f6179, []int{22}
+}
+func (m *GetResponse) XXX_Unmarshal(b []byte) error {
+	return xxx_messageInfo_GetResponse.Unmarshal(m, b)
+}
+func (m *GetResponse) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) {
+	return xxx_messageInfo_GetResponse.Marshal(b, m, deterministic)
+}
+func (dst *GetResponse) XXX_Merge(src proto.Message) {
+	xxx_messageInfo_GetResponse.Merge(dst, src)
+}
+func (m *GetResponse) XXX_Size() int {
+	return xxx_messageInfo_GetResponse.Size(m)
+}
+func (m *GetResponse) XXX_DiscardUnknown() {
+	xxx_messageInfo_GetResponse.DiscardUnknown(m)
+}
+
+var xxx_messageInfo_GetResponse proto.InternalMessageInfo
+
+const Default_GetResponse_InOrder bool = true
+
+func (m *GetResponse) GetEntity() []*GetResponse_Entity {
+	if m != nil {
+		return m.Entity
+	}
+	return nil
+}
+
+func (m *GetResponse) GetDeferred() []*Reference {
+	if m != nil {
+		return m.Deferred
+	}
+	return nil
+}
+
+func (m *GetResponse) GetInOrder() bool {
+	if m != nil && m.InOrder != nil {
+		return *m.InOrder
+	}
+	return Default_GetResponse_InOrder
+}
+
+type GetResponse_Entity struct {
+	Entity               *EntityProto `protobuf:"bytes,2,opt,name=entity" json:"entity,omitempty"`
+	Key                  *Reference   `protobuf:"bytes,4,opt,name=key" json:"key,omitempty"`
+	Version              *int64       `protobuf:"varint,3,opt,name=version" json:"version,omitempty"`
+	XXX_NoUnkeyedLiteral struct{}     `json:"-"`
+	XXX_unrecognized     []byte       `json:"-"`
+	XXX_sizecache        int32        `json:"-"`
+}
+
+func (m *GetResponse_Entity) Reset()         { *m = GetResponse_Entity{} }
+func (m *GetResponse_Entity) String() string { return proto.CompactTextString(m) }
+func (*GetResponse_Entity) ProtoMessage()    {}
+func (*GetResponse_Entity) Descriptor() ([]byte, []int) {
+	return fileDescriptor_datastore_v3_83b17b80c34f6179, []int{22, 0}
+}
+func (m *GetResponse_Entity) XXX_Unmarshal(b []byte) error {
+	return xxx_messageInfo_GetResponse_Entity.Unmarshal(m, b)
+}
+func (m *GetResponse_Entity) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) {
+	return xxx_messageInfo_GetResponse_Entity.Marshal(b, m, deterministic)
+}
+func (dst *GetResponse_Entity) XXX_Merge(src proto.Message) {
+	xxx_messageInfo_GetResponse_Entity.Merge(dst, src)
+}
+func (m *GetResponse_Entity) XXX_Size() int {
+	return xxx_messageInfo_GetResponse_Entity.Size(m)
+}
+func (m *GetResponse_Entity) XXX_DiscardUnknown() {
+	xxx_messageInfo_GetResponse_Entity.DiscardUnknown(m)
+}
+
+var xxx_messageInfo_GetResponse_Entity proto.InternalMessageInfo
+
+func (m *GetResponse_Entity) GetEntity() *EntityProto {
+	if m != nil {
+		return m.Entity
+	}
+	return nil
+}
+
+func (m *GetResponse_Entity) GetKey() *Reference {
+	if m != nil {
+		return m.Key
+	}
+	return nil
+}
+
+func (m *GetResponse_Entity) GetVersion() int64 {
+	if m != nil && m.Version != nil {
+		return *m.Version
+	}
+	return 0
+}
+
+type PutRequest struct {
+	Header               *InternalHeader          `protobuf:"bytes,11,opt,name=header" json:"header,omitempty"`
+	Entity               []*EntityProto           `protobuf:"bytes,1,rep,name=entity" json:"entity,omitempty"`
+	Transaction          *Transaction             `protobuf:"bytes,2,opt,name=transaction" json:"transaction,omitempty"`
+	CompositeIndex       []*CompositeIndex        `protobuf:"bytes,3,rep,name=composite_index,json=compositeIndex" json:"composite_index,omitempty"`
+	Trusted              *bool                    `protobuf:"varint,4,opt,name=trusted,def=0" json:"trusted,omitempty"`
+	Force                *bool                    `protobuf:"varint,7,opt,name=force,def=0" json:"force,omitempty"`
+	MarkChanges          *bool                    `protobuf:"varint,8,opt,name=mark_changes,json=markChanges,def=0" json:"mark_changes,omitempty"`
+	Snapshot             []*Snapshot              `protobuf:"bytes,9,rep,name=snapshot" json:"snapshot,omitempty"`
+	AutoIdPolicy         *PutRequest_AutoIdPolicy `protobuf:"varint,10,opt,name=auto_id_policy,json=autoIdPolicy,enum=appengine.PutRequest_AutoIdPolicy,def=0" json:"auto_id_policy,omitempty"`
+	XXX_NoUnkeyedLiteral struct{}                 `json:"-"`
+	XXX_unrecognized     []byte                   `json:"-"`
+	XXX_sizecache        int32                    `json:"-"`
+}
+
+func (m *PutRequest) Reset()         { *m = PutRequest{} }
+func (m *PutRequest) String() string { return proto.CompactTextString(m) }
+func (*PutRequest) ProtoMessage()    {}
+func (*PutRequest) Descriptor() ([]byte, []int) {
+	return fileDescriptor_datastore_v3_83b17b80c34f6179, []int{23}
+}
+func (m *PutRequest) XXX_Unmarshal(b []byte) error {
+	return xxx_messageInfo_PutRequest.Unmarshal(m, b)
+}
+func (m *PutRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) {
+	return xxx_messageInfo_PutRequest.Marshal(b, m, deterministic)
+}
+func (dst *PutRequest) XXX_Merge(src proto.Message) {
+	xxx_messageInfo_PutRequest.Merge(dst, src)
+}
+func (m *PutRequest) XXX_Size() int {
+	return xxx_messageInfo_PutRequest.Size(m)
+}
+func (m *PutRequest) XXX_DiscardUnknown() {
+	xxx_messageInfo_PutRequest.DiscardUnknown(m)
+}
+
+var xxx_messageInfo_PutRequest proto.InternalMessageInfo
+
+const Default_PutRequest_Trusted bool = false
+const Default_PutRequest_Force bool = false
+const Default_PutRequest_MarkChanges bool = false
+const Default_PutRequest_AutoIdPolicy PutRequest_AutoIdPolicy = PutRequest_CURRENT
+
+func (m *PutRequest) GetHeader() *InternalHeader {
+	if m != nil {
+		return m.Header
+	}
+	return nil
+}
+
+func (m *PutRequest) GetEntity() []*EntityProto {
+	if m != nil {
+		return m.Entity
+	}
+	return nil
+}
+
+func (m *PutRequest) GetTransaction() *Transaction {
+	if m != nil {
+		return m.Transaction
+	}
+	return nil
+}
+
+func (m *PutRequest) GetCompositeIndex() []*CompositeIndex {
+	if m != nil {
+		return m.CompositeIndex
+	}
+	return nil
+}
+
+func (m *PutRequest) GetTrusted() bool {
+	if m != nil && m.Trusted != nil {
+		return *m.Trusted
+	}
+	return Default_PutRequest_Trusted
+}
+
+func (m *PutRequest) GetForce() bool {
+	if m != nil && m.Force != nil {
+		return *m.Force
+	}
+	return Default_PutRequest_Force
+}
+
+func (m *PutRequest) GetMarkChanges() bool {
+	if m != nil && m.MarkChanges != nil {
+		return *m.MarkChanges
+	}
+	return Default_PutRequest_MarkChanges
+}
+
+func (m *PutRequest) GetSnapshot() []*Snapshot {
+	if m != nil {
+		return m.Snapshot
+	}
+	return nil
+}
+
+func (m *PutRequest) GetAutoIdPolicy() PutRequest_AutoIdPolicy {
+	if m != nil && m.AutoIdPolicy != nil {
+		return *m.AutoIdPolicy
+	}
+	return Default_PutRequest_AutoIdPolicy
+}
+
+type PutResponse struct {
+	Key                  []*Reference `protobuf:"bytes,1,rep,name=key" json:"key,omitempty"`
+	Cost                 *Cost        `protobuf:"bytes,2,opt,name=cost" json:"cost,omitempty"`
+	Version              []int64      `protobuf:"varint,3,rep,name=version" json:"version,omitempty"`
+	XXX_NoUnkeyedLiteral struct{}     `json:"-"`
+	XXX_unrecognized     []byte       `json:"-"`
+	XXX_sizecache        int32        `json:"-"`
+}
+
+func (m *PutResponse) Reset()         { *m = PutResponse{} }
+func (m *PutResponse) String() string { return proto.CompactTextString(m) }
+func (*PutResponse) ProtoMessage()    {}
+func (*PutResponse) Descriptor() ([]byte, []int) {
+	return fileDescriptor_datastore_v3_83b17b80c34f6179, []int{24}
+}
+func (m *PutResponse) XXX_Unmarshal(b []byte) error {
+	return xxx_messageInfo_PutResponse.Unmarshal(m, b)
+}
+func (m *PutResponse) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) {
+	return xxx_messageInfo_PutResponse.Marshal(b, m, deterministic)
+}
+func (dst *PutResponse) XXX_Merge(src proto.Message) {
+	xxx_messageInfo_PutResponse.Merge(dst, src)
+}
+func (m *PutResponse) XXX_Size() int {
+	return xxx_messageInfo_PutResponse.Size(m)
+}
+func (m *PutResponse) XXX_DiscardUnknown() {
+	xxx_messageInfo_PutResponse.DiscardUnknown(m)
+}
+
+var xxx_messageInfo_PutResponse proto.InternalMessageInfo
+
+func (m *PutResponse) GetKey() []*Reference {
+	if m != nil {
+		return m.Key
+	}
+	return nil
+}
+
+func (m *PutResponse) GetCost() *Cost {
+	if m != nil {
+		return m.Cost
+	}
+	return nil
+}
+
+func (m *PutResponse) GetVersion() []int64 {
+	if m != nil {
+		return m.Version
+	}
+	return nil
+}
+
+type TouchRequest struct {
+	Header               *InternalHeader   `protobuf:"bytes,10,opt,name=header" json:"header,omitempty"`
+	Key                  []*Reference      `protobuf:"bytes,1,rep,name=key" json:"key,omitempty"`
+	CompositeIndex       []*CompositeIndex `protobuf:"bytes,2,rep,name=composite_index,json=compositeIndex" json:"composite_index,omitempty"`
+	Force                *bool             `protobuf:"varint,3,opt,name=force,def=0" json:"force,omitempty"`
+	Snapshot             []*Snapshot       `protobuf:"bytes,9,rep,name=snapshot" json:"snapshot,omitempty"`
+	XXX_NoUnkeyedLiteral struct{}          `json:"-"`
+	XXX_unrecognized     []byte            `json:"-"`
+	XXX_sizecache        int32             `json:"-"`
+}
+
+func (m *TouchRequest) Reset()         { *m = TouchRequest{} }
+func (m *TouchRequest) String() string { return proto.CompactTextString(m) }
+func (*TouchRequest) ProtoMessage()    {}
+func (*TouchRequest) Descriptor() ([]byte, []int) {
+	return fileDescriptor_datastore_v3_83b17b80c34f6179, []int{25}
+}
+func (m *TouchRequest) XXX_Unmarshal(b []byte) error {
+	return xxx_messageInfo_TouchRequest.Unmarshal(m, b)
+}
+func (m *TouchRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) {
+	return xxx_messageInfo_TouchRequest.Marshal(b, m, deterministic)
+}
+func (dst *TouchRequest) XXX_Merge(src proto.Message) {
+	xxx_messageInfo_TouchRequest.Merge(dst, src)
+}
+func (m *TouchRequest) XXX_Size() int {
+	return xxx_messageInfo_TouchRequest.Size(m)
+}
+func (m *TouchRequest) XXX_DiscardUnknown() {
+	xxx_messageInfo_TouchRequest.DiscardUnknown(m)
+}
+
+var xxx_messageInfo_TouchRequest proto.InternalMessageInfo
+
+const Default_TouchRequest_Force bool = false
+
+func (m *TouchRequest) GetHeader() *InternalHeader {
+	if m != nil {
+		return m.Header
+	}
+	return nil
+}
+
+func (m *TouchRequest) GetKey() []*Reference {
+	if m != nil {
+		return m.Key
+	}
+	return nil
+}
+
+func (m *TouchRequest) GetCompositeIndex() []*CompositeIndex {
+	if m != nil {
+		return m.CompositeIndex
+	}
+	return nil
+}
+
+func (m *TouchRequest) GetForce() bool {
+	if m != nil && m.Force != nil {
+		return *m.Force
+	}
+	return Default_TouchRequest_Force
+}
+
+func (m *TouchRequest) GetSnapshot() []*Snapshot {
+	if m != nil {
+		return m.Snapshot
+	}
+	return nil
+}
+
+type TouchResponse struct {
+	Cost                 *Cost    `protobuf:"bytes,1,opt,name=cost" json:"cost,omitempty"`
+	XXX_NoUnkeyedLiteral struct{} `json:"-"`
+	XXX_unrecognized     []byte   `json:"-"`
+	XXX_sizecache        int32    `json:"-"`
+}
+
+func (m *TouchResponse) Reset()         { *m = TouchResponse{} }
+func (m *TouchResponse) String() string { return proto.CompactTextString(m) }
+func (*TouchResponse) ProtoMessage()    {}
+func (*TouchResponse) Descriptor() ([]byte, []int) {
+	return fileDescriptor_datastore_v3_83b17b80c34f6179, []int{26}
+}
+func (m *TouchResponse) XXX_Unmarshal(b []byte) error {
+	return xxx_messageInfo_TouchResponse.Unmarshal(m, b)
+}
+func (m *TouchResponse) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) {
+	return xxx_messageInfo_TouchResponse.Marshal(b, m, deterministic)
+}
+func (dst *TouchResponse) XXX_Merge(src proto.Message) {
+	xxx_messageInfo_TouchResponse.Merge(dst, src)
+}
+func (m *TouchResponse) XXX_Size() int {
+	return xxx_messageInfo_TouchResponse.Size(m)
+}
+func (m *TouchResponse) XXX_DiscardUnknown() {
+	xxx_messageInfo_TouchResponse.DiscardUnknown(m)
+}
+
+var xxx_messageInfo_TouchResponse proto.InternalMessageInfo
+
+func (m *TouchResponse) GetCost() *Cost {
+	if m != nil {
+		return m.Cost
+	}
+	return nil
+}
+
+type DeleteRequest struct {
+	Header               *InternalHeader `protobuf:"bytes,10,opt,name=header" json:"header,omitempty"`
+	Key                  []*Reference    `protobuf:"bytes,6,rep,name=key" json:"key,omitempty"`
+	Transaction          *Transaction    `protobuf:"bytes,5,opt,name=transaction" json:"transaction,omitempty"`
+	Trusted              *bool           `protobuf:"varint,4,opt,name=trusted,def=0" json:"trusted,omitempty"`
+	Force                *bool           `protobuf:"varint,7,opt,name=force,def=0" json:"force,omitempty"`
+	MarkChanges          *bool           `protobuf:"varint,8,opt,name=mark_changes,json=markChanges,def=0" json:"mark_changes,omitempty"`
+	Snapshot             []*Snapshot     `protobuf:"bytes,9,rep,name=snapshot" json:"snapshot,omitempty"`
+	XXX_NoUnkeyedLiteral struct{}        `json:"-"`
+	XXX_unrecognized     []byte          `json:"-"`
+	XXX_sizecache        int32           `json:"-"`
+}
+
+func (m *DeleteRequest) Reset()         { *m = DeleteRequest{} }
+func (m *DeleteRequest) String() string { return proto.CompactTextString(m) }
+func (*DeleteRequest) ProtoMessage()    {}
+func (*DeleteRequest) Descriptor() ([]byte, []int) {
+	return fileDescriptor_datastore_v3_83b17b80c34f6179, []int{27}
+}
+func (m *DeleteRequest) XXX_Unmarshal(b []byte) error {
+	return xxx_messageInfo_DeleteRequest.Unmarshal(m, b)
+}
+func (m *DeleteRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) {
+	return xxx_messageInfo_DeleteRequest.Marshal(b, m, deterministic)
+}
+func (dst *DeleteRequest) XXX_Merge(src proto.Message) {
+	xxx_messageInfo_DeleteRequest.Merge(dst, src)
+}
+func (m *DeleteRequest) XXX_Size() int {
+	return xxx_messageInfo_DeleteRequest.Size(m)
+}
+func (m *DeleteRequest) XXX_DiscardUnknown() {
+	xxx_messageInfo_DeleteRequest.DiscardUnknown(m)
+}
+
+var xxx_messageInfo_DeleteRequest proto.InternalMessageInfo
+
+const Default_DeleteRequest_Trusted bool = false
+const Default_DeleteRequest_Force bool = false
+const Default_DeleteRequest_MarkChanges bool = false
+
+func (m *DeleteRequest) GetHeader() *InternalHeader {
+	if m != nil {
+		return m.Header
+	}
+	return nil
+}
+
+func (m *DeleteRequest) GetKey() []*Reference {
+	if m != nil {
+		return m.Key
+	}
+	return nil
+}
+
+func (m *DeleteRequest) GetTransaction() *Transaction {
+	if m != nil {
+		return m.Transaction
+	}
+	return nil
+}
+
+func (m *DeleteRequest) GetTrusted() bool {
+	if m != nil && m.Trusted != nil {
+		return *m.Trusted
+	}
+	return Default_DeleteRequest_Trusted
+}
+
+func (m *DeleteRequest) GetForce() bool {
+	if m != nil && m.Force != nil {
+		return *m.Force
+	}
+	return Default_DeleteRequest_Force
+}
+
+func (m *DeleteRequest) GetMarkChanges() bool {
+	if m != nil && m.MarkChanges != nil {
+		return *m.MarkChanges
+	}
+	return Default_DeleteRequest_MarkChanges
+}
+
+func (m *DeleteRequest) GetSnapshot() []*Snapshot {
+	if m != nil {
+		return m.Snapshot
+	}
+	return nil
+}
+
+type DeleteResponse struct {
+	Cost                 *Cost    `protobuf:"bytes,1,opt,name=cost" json:"cost,omitempty"`
+	Version              []int64  `protobuf:"varint,3,rep,name=version" json:"version,omitempty"`
+	XXX_NoUnkeyedLiteral struct{} `json:"-"`
+	XXX_unrecognized     []byte   `json:"-"`
+	XXX_sizecache        int32    `json:"-"`
+}
+
+func (m *DeleteResponse) Reset()         { *m = DeleteResponse{} }
+func (m *DeleteResponse) String() string { return proto.CompactTextString(m) }
+func (*DeleteResponse) ProtoMessage()    {}
+func (*DeleteResponse) Descriptor() ([]byte, []int) {
+	return fileDescriptor_datastore_v3_83b17b80c34f6179, []int{28}
+}
+func (m *DeleteResponse) XXX_Unmarshal(b []byte) error {
+	return xxx_messageInfo_DeleteResponse.Unmarshal(m, b)
+}
+func (m *DeleteResponse) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) {
+	return xxx_messageInfo_DeleteResponse.Marshal(b, m, deterministic)
+}
+func (dst *DeleteResponse) XXX_Merge(src proto.Message) {
+	xxx_messageInfo_DeleteResponse.Merge(dst, src)
+}
+func (m *DeleteResponse) XXX_Size() int {
+	return xxx_messageInfo_DeleteResponse.Size(m)
+}
+func (m *DeleteResponse) XXX_DiscardUnknown() {
+	xxx_messageInfo_DeleteResponse.DiscardUnknown(m)
+}
+
+var xxx_messageInfo_DeleteResponse proto.InternalMessageInfo
+
+func (m *DeleteResponse) GetCost() *Cost {
+	if m != nil {
+		return m.Cost
+	}
+	return nil
+}
+
+func (m *DeleteResponse) GetVersion() []int64 {
+	if m != nil {
+		return m.Version
+	}
+	return nil
+}
+
+type NextRequest struct {
+	Header               *InternalHeader `protobuf:"bytes,5,opt,name=header" json:"header,omitempty"`
+	Cursor               *Cursor         `protobuf:"bytes,1,req,name=cursor" json:"cursor,omitempty"`
+	Count                *int32          `protobuf:"varint,2,opt,name=count" json:"count,omitempty"`
+	Offset               *int32          `protobuf:"varint,4,opt,name=offset,def=0" json:"offset,omitempty"`
+	Compile              *bool           `protobuf:"varint,3,opt,name=compile,def=0" json:"compile,omitempty"`
+	XXX_NoUnkeyedLiteral struct{}        `json:"-"`
+	XXX_unrecognized     []byte          `json:"-"`
+	XXX_sizecache        int32           `json:"-"`
+}
+
+func (m *NextRequest) Reset()         { *m = NextRequest{} }
+func (m *NextRequest) String() string { return proto.CompactTextString(m) }
+func (*NextRequest) ProtoMessage()    {}
+func (*NextRequest) Descriptor() ([]byte, []int) {
+	return fileDescriptor_datastore_v3_83b17b80c34f6179, []int{29}
+}
+func (m *NextRequest) XXX_Unmarshal(b []byte) error {
+	return xxx_messageInfo_NextRequest.Unmarshal(m, b)
+}
+func (m *NextRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) {
+	return xxx_messageInfo_NextRequest.Marshal(b, m, deterministic)
+}
+func (dst *NextRequest) XXX_Merge(src proto.Message) {
+	xxx_messageInfo_NextRequest.Merge(dst, src)
+}
+func (m *NextRequest) XXX_Size() int {
+	return xxx_messageInfo_NextRequest.Size(m)
+}
+func (m *NextRequest) XXX_DiscardUnknown() {
+	xxx_messageInfo_NextRequest.DiscardUnknown(m)
+}
+
+var xxx_messageInfo_NextRequest proto.InternalMessageInfo
+
+const Default_NextRequest_Offset int32 = 0
+const Default_NextRequest_Compile bool = false
+
+func (m *NextRequest) GetHeader() *InternalHeader {
+	if m != nil {
+		return m.Header
+	}
+	return nil
+}
+
+func (m *NextRequest) GetCursor() *Cursor {
+	if m != nil {
+		return m.Cursor
+	}
+	return nil
+}
+
+func (m *NextRequest) GetCount() int32 {
+	if m != nil && m.Count != nil {
+		return *m.Count
+	}
+	return 0
+}
+
+func (m *NextRequest) GetOffset() int32 {
+	if m != nil && m.Offset != nil {
+		return *m.Offset
+	}
+	return Default_NextRequest_Offset
+}
+
+func (m *NextRequest) GetCompile() bool {
+	if m != nil && m.Compile != nil {
+		return *m.Compile
+	}
+	return Default_NextRequest_Compile
+}
+
+type QueryResult struct {
+	Cursor               *Cursor           `protobuf:"bytes,1,opt,name=cursor" json:"cursor,omitempty"`
+	Result               []*EntityProto    `protobuf:"bytes,2,rep,name=result" json:"result,omitempty"`
+	SkippedResults       *int32            `protobuf:"varint,7,opt,name=skipped_results,json=skippedResults" json:"skipped_results,omitempty"`
+	MoreResults          *bool             `protobuf:"varint,3,req,name=more_results,json=moreResults" json:"more_results,omitempty"`
+	KeysOnly             *bool             `protobuf:"varint,4,opt,name=keys_only,json=keysOnly" json:"keys_only,omitempty"`
+	IndexOnly            *bool             `protobuf:"varint,9,opt,name=index_only,json=indexOnly" json:"index_only,omitempty"`
+	SmallOps             *bool             `protobuf:"varint,10,opt,name=small_ops,json=smallOps" json:"small_ops,omitempty"`
+	CompiledQuery        *CompiledQuery    `protobuf:"bytes,5,opt,name=compiled_query,json=compiledQuery" json:"compiled_query,omitempty"`
+	CompiledCursor       *CompiledCursor   `protobuf:"bytes,6,opt,name=compiled_cursor,json=compiledCursor" json:"compiled_cursor,omitempty"`
+	Index                []*CompositeIndex `protobuf:"bytes,8,rep,name=index" json:"index,omitempty"`
+	Version              []int64           `protobuf:"varint,11,rep,name=version" json:"version,omitempty"`
+	XXX_NoUnkeyedLiteral struct{}          `json:"-"`
+	XXX_unrecognized     []byte            `json:"-"`
+	XXX_sizecache        int32             `json:"-"`
+}
+
+func (m *QueryResult) Reset()         { *m = QueryResult{} }
+func (m *QueryResult) String() string { return proto.CompactTextString(m) }
+func (*QueryResult) ProtoMessage()    {}
+func (*QueryResult) Descriptor() ([]byte, []int) {
+	return fileDescriptor_datastore_v3_83b17b80c34f6179, []int{30}
+}
+func (m *QueryResult) XXX_Unmarshal(b []byte) error {
+	return xxx_messageInfo_QueryResult.Unmarshal(m, b)
+}
+func (m *QueryResult) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) {
+	return xxx_messageInfo_QueryResult.Marshal(b, m, deterministic)
+}
+func (dst *QueryResult) XXX_Merge(src proto.Message) {
+	xxx_messageInfo_QueryResult.Merge(dst, src)
+}
+func (m *QueryResult) XXX_Size() int {
+	return xxx_messageInfo_QueryResult.Size(m)
+}
+func (m *QueryResult) XXX_DiscardUnknown() {
+	xxx_messageInfo_QueryResult.DiscardUnknown(m)
+}
+
+var xxx_messageInfo_QueryResult proto.InternalMessageInfo
+
+func (m *QueryResult) GetCursor() *Cursor {
+	if m != nil {
+		return m.Cursor
+	}
+	return nil
+}
+
+func (m *QueryResult) GetResult() []*EntityProto {
+	if m != nil {
+		return m.Result
+	}
+	return nil
+}
+
+func (m *QueryResult) GetSkippedResults() int32 {
+	if m != nil && m.SkippedResults != nil {
+		return *m.SkippedResults
+	}
+	return 0
+}
+
+func (m *QueryResult) GetMoreResults() bool {
+	if m != nil && m.MoreResults != nil {
+		return *m.MoreResults
+	}
+	return false
+}
+
+func (m *QueryResult) GetKeysOnly() bool {
+	if m != nil && m.KeysOnly != nil {
+		return *m.KeysOnly
+	}
+	return false
+}
+
+func (m *QueryResult) GetIndexOnly() bool {
+	if m != nil && m.IndexOnly != nil {
+		return *m.IndexOnly
+	}
+	return false
+}
+
+func (m *QueryResult) GetSmallOps() bool {
+	if m != nil && m.SmallOps != nil {
+		return *m.SmallOps
+	}
+	return false
+}
+
+func (m *QueryResult) GetCompiledQuery() *CompiledQuery {
+	if m != nil {
+		return m.CompiledQuery
+	}
+	return nil
+}
+
+func (m *QueryResult) GetCompiledCursor() *CompiledCursor {
+	if m != nil {
+		return m.CompiledCursor
+	}
+	return nil
+}
+
+func (m *QueryResult) GetIndex() []*CompositeIndex {
+	if m != nil {
+		return m.Index
+	}
+	return nil
+}
+
+func (m *QueryResult) GetVersion() []int64 {
+	if m != nil {
+		return m.Version
+	}
+	return nil
+}
+
+type AllocateIdsRequest struct {
+	Header               *InternalHeader `protobuf:"bytes,4,opt,name=header" json:"header,omitempty"`
+	ModelKey             *Reference      `protobuf:"bytes,1,opt,name=model_key,json=modelKey" json:"model_key,omitempty"`
+	Size                 *int64          `protobuf:"varint,2,opt,name=size" json:"size,omitempty"`
+	Max                  *int64          `protobuf:"varint,3,opt,name=max" json:"max,omitempty"`
+	Reserve              []*Reference    `protobuf:"bytes,5,rep,name=reserve" json:"reserve,omitempty"`
+	XXX_NoUnkeyedLiteral struct{}        `json:"-"`
+	XXX_unrecognized     []byte          `json:"-"`
+	XXX_sizecache        int32           `json:"-"`
+}
+
+func (m *AllocateIdsRequest) Reset()         { *m = AllocateIdsRequest{} }
+func (m *AllocateIdsRequest) String() string { return proto.CompactTextString(m) }
+func (*AllocateIdsRequest) ProtoMessage()    {}
+func (*AllocateIdsRequest) Descriptor() ([]byte, []int) {
+	return fileDescriptor_datastore_v3_83b17b80c34f6179, []int{31}
+}
+func (m *AllocateIdsRequest) XXX_Unmarshal(b []byte) error {
+	return xxx_messageInfo_AllocateIdsRequest.Unmarshal(m, b)
+}
+func (m *AllocateIdsRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) {
+	return xxx_messageInfo_AllocateIdsRequest.Marshal(b, m, deterministic)
+}
+func (dst *AllocateIdsRequest) XXX_Merge(src proto.Message) {
+	xxx_messageInfo_AllocateIdsRequest.Merge(dst, src)
+}
+func (m *AllocateIdsRequest) XXX_Size() int {
+	return xxx_messageInfo_AllocateIdsRequest.Size(m)
+}
+func (m *AllocateIdsRequest) XXX_DiscardUnknown() {
+	xxx_messageInfo_AllocateIdsRequest.DiscardUnknown(m)
+}
+
+var xxx_messageInfo_AllocateIdsRequest proto.InternalMessageInfo
+
+func (m *AllocateIdsRequest) GetHeader() *InternalHeader {
+	if m != nil {
+		return m.Header
+	}
+	return nil
+}
+
+func (m *AllocateIdsRequest) GetModelKey() *Reference {
+	if m != nil {
+		return m.ModelKey
+	}
+	return nil
+}
+
+func (m *AllocateIdsRequest) GetSize() int64 {
+	if m != nil && m.Size != nil {
+		return *m.Size
+	}
+	return 0
+}
+
+func (m *AllocateIdsRequest) GetMax() int64 {
+	if m != nil && m.Max != nil {
+		return *m.Max
+	}
+	return 0
+}
+
+func (m *AllocateIdsRequest) GetReserve() []*Reference {
+	if m != nil {
+		return m.Reserve
+	}
+	return nil
+}
+
+type AllocateIdsResponse struct {
+	Start                *int64   `protobuf:"varint,1,req,name=start" json:"start,omitempty"`
+	End                  *int64   `protobuf:"varint,2,req,name=end" json:"end,omitempty"`
+	Cost                 *Cost    `protobuf:"bytes,3,opt,name=cost" json:"cost,omitempty"`
+	XXX_NoUnkeyedLiteral struct{} `json:"-"`
+	XXX_unrecognized     []byte   `json:"-"`
+	XXX_sizecache        int32    `json:"-"`
+}
+
+func (m *AllocateIdsResponse) Reset()         { *m = AllocateIdsResponse{} }
+func (m *AllocateIdsResponse) String() string { return proto.CompactTextString(m) }
+func (*AllocateIdsResponse) ProtoMessage()    {}
+func (*AllocateIdsResponse) Descriptor() ([]byte, []int) {
+	return fileDescriptor_datastore_v3_83b17b80c34f6179, []int{32}
+}
+func (m *AllocateIdsResponse) XXX_Unmarshal(b []byte) error {
+	return xxx_messageInfo_AllocateIdsResponse.Unmarshal(m, b)
+}
+func (m *AllocateIdsResponse) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) {
+	return xxx_messageInfo_AllocateIdsResponse.Marshal(b, m, deterministic)
+}
+func (dst *AllocateIdsResponse) XXX_Merge(src proto.Message) {
+	xxx_messageInfo_AllocateIdsResponse.Merge(dst, src)
+}
+func (m *AllocateIdsResponse) XXX_Size() int {
+	return xxx_messageInfo_AllocateIdsResponse.Size(m)
+}
+func (m *AllocateIdsResponse) XXX_DiscardUnknown() {
+	xxx_messageInfo_AllocateIdsResponse.DiscardUnknown(m)
+}
+
+var xxx_messageInfo_AllocateIdsResponse proto.InternalMessageInfo
+
+func (m *AllocateIdsResponse) GetStart() int64 {
+	if m != nil && m.Start != nil {
+		return *m.Start
+	}
+	return 0
+}
+
+func (m *AllocateIdsResponse) GetEnd() int64 {
+	if m != nil && m.End != nil {
+		return *m.End
+	}
+	return 0
+}
+
+func (m *AllocateIdsResponse) GetCost() *Cost {
+	if m != nil {
+		return m.Cost
+	}
+	return nil
+}
+
+type CompositeIndices struct {
+	Index                []*CompositeIndex `protobuf:"bytes,1,rep,name=index" json:"index,omitempty"`
+	XXX_NoUnkeyedLiteral struct{}          `json:"-"`
+	XXX_unrecognized     []byte            `json:"-"`
+	XXX_sizecache        int32             `json:"-"`
+}
+
+func (m *CompositeIndices) Reset()         { *m = CompositeIndices{} }
+func (m *CompositeIndices) String() string { return proto.CompactTextString(m) }
+func (*CompositeIndices) ProtoMessage()    {}
+func (*CompositeIndices) Descriptor() ([]byte, []int) {
+	return fileDescriptor_datastore_v3_83b17b80c34f6179, []int{33}
+}
+func (m *CompositeIndices) XXX_Unmarshal(b []byte) error {
+	return xxx_messageInfo_CompositeIndices.Unmarshal(m, b)
+}
+func (m *CompositeIndices) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) {
+	return xxx_messageInfo_CompositeIndices.Marshal(b, m, deterministic)
+}
+func (dst *CompositeIndices) XXX_Merge(src proto.Message) {
+	xxx_messageInfo_CompositeIndices.Merge(dst, src)
+}
+func (m *CompositeIndices) XXX_Size() int {
+	return xxx_messageInfo_CompositeIndices.Size(m)
+}
+func (m *CompositeIndices) XXX_DiscardUnknown() {
+	xxx_messageInfo_CompositeIndices.DiscardUnknown(m)
+}
+
+var xxx_messageInfo_CompositeIndices proto.InternalMessageInfo
+
+func (m *CompositeIndices) GetIndex() []*CompositeIndex {
+	if m != nil {
+		return m.Index
+	}
+	return nil
+}
+
+type AddActionsRequest struct {
+	Header               *InternalHeader `protobuf:"bytes,3,opt,name=header" json:"header,omitempty"`
+	Transaction          *Transaction    `protobuf:"bytes,1,req,name=transaction" json:"transaction,omitempty"`
+	Action               []*Action       `protobuf:"bytes,2,rep,name=action" json:"action,omitempty"`
+	XXX_NoUnkeyedLiteral struct{}        `json:"-"`
+	XXX_unrecognized     []byte          `json:"-"`
+	XXX_sizecache        int32           `json:"-"`
+}
+
+func (m *AddActionsRequest) Reset()         { *m = AddActionsRequest{} }
+func (m *AddActionsRequest) String() string { return proto.CompactTextString(m) }
+func (*AddActionsRequest) ProtoMessage()    {}
+func (*AddActionsRequest) Descriptor() ([]byte, []int) {
+	return fileDescriptor_datastore_v3_83b17b80c34f6179, []int{34}
+}
+func (m *AddActionsRequest) XXX_Unmarshal(b []byte) error {
+	return xxx_messageInfo_AddActionsRequest.Unmarshal(m, b)
+}
+func (m *AddActionsRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) {
+	return xxx_messageInfo_AddActionsRequest.Marshal(b, m, deterministic)
+}
+func (dst *AddActionsRequest) XXX_Merge(src proto.Message) {
+	xxx_messageInfo_AddActionsRequest.Merge(dst, src)
+}
+func (m *AddActionsRequest) XXX_Size() int {
+	return xxx_messageInfo_AddActionsRequest.Size(m)
+}
+func (m *AddActionsRequest) XXX_DiscardUnknown() {
+	xxx_messageInfo_AddActionsRequest.DiscardUnknown(m)
+}
+
+var xxx_messageInfo_AddActionsRequest proto.InternalMessageInfo
+
+func (m *AddActionsRequest) GetHeader() *InternalHeader {
+	if m != nil {
+		return m.Header
+	}
+	return nil
+}
+
+func (m *AddActionsRequest) GetTransaction() *Transaction {
+	if m != nil {
+		return m.Transaction
+	}
+	return nil
+}
+
+func (m *AddActionsRequest) GetAction() []*Action {
+	if m != nil {
+		return m.Action
+	}
+	return nil
+}
+
+type AddActionsResponse struct {
+	XXX_NoUnkeyedLiteral struct{} `json:"-"`
+	XXX_unrecognized     []byte   `json:"-"`
+	XXX_sizecache        int32    `json:"-"`
+}
+
+func (m *AddActionsResponse) Reset()         { *m = AddActionsResponse{} }
+func (m *AddActionsResponse) String() string { return proto.CompactTextString(m) }
+func (*AddActionsResponse) ProtoMessage()    {}
+func (*AddActionsResponse) Descriptor() ([]byte, []int) {
+	return fileDescriptor_datastore_v3_83b17b80c34f6179, []int{35}
+}
+func (m *AddActionsResponse) XXX_Unmarshal(b []byte) error {
+	return xxx_messageInfo_AddActionsResponse.Unmarshal(m, b)
+}
+func (m *AddActionsResponse) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) {
+	return xxx_messageInfo_AddActionsResponse.Marshal(b, m, deterministic)
+}
+func (dst *AddActionsResponse) XXX_Merge(src proto.Message) {
+	xxx_messageInfo_AddActionsResponse.Merge(dst, src)
+}
+func (m *AddActionsResponse) XXX_Size() int {
+	return xxx_messageInfo_AddActionsResponse.Size(m)
+}
+func (m *AddActionsResponse) XXX_DiscardUnknown() {
+	xxx_messageInfo_AddActionsResponse.DiscardUnknown(m)
+}
+
+var xxx_messageInfo_AddActionsResponse proto.InternalMessageInfo
+
+type BeginTransactionRequest struct {
+	Header               *InternalHeader                          `protobuf:"bytes,3,opt,name=header" json:"header,omitempty"`
+	App                  *string                                  `protobuf:"bytes,1,req,name=app" json:"app,omitempty"`
+	AllowMultipleEg      *bool                                    `protobuf:"varint,2,opt,name=allow_multiple_eg,json=allowMultipleEg,def=0" json:"allow_multiple_eg,omitempty"`
+	DatabaseId           *string                                  `protobuf:"bytes,4,opt,name=database_id,json=databaseId" json:"database_id,omitempty"`
+	Mode                 *BeginTransactionRequest_TransactionMode `protobuf:"varint,5,opt,name=mode,enum=appengine.BeginTransactionRequest_TransactionMode,def=0" json:"mode,omitempty"`
+	PreviousTransaction  *Transaction                             `protobuf:"bytes,7,opt,name=previous_transaction,json=previousTransaction" json:"previous_transaction,omitempty"`
+	XXX_NoUnkeyedLiteral struct{}                                 `json:"-"`
+	XXX_unrecognized     []byte                                   `json:"-"`
+	XXX_sizecache        int32                                    `json:"-"`
+}
+
+func (m *BeginTransactionRequest) Reset()         { *m = BeginTransactionRequest{} }
+func (m *BeginTransactionRequest) String() string { return proto.CompactTextString(m) }
+func (*BeginTransactionRequest) ProtoMessage()    {}
+func (*BeginTransactionRequest) Descriptor() ([]byte, []int) {
+	return fileDescriptor_datastore_v3_83b17b80c34f6179, []int{36}
+}
+func (m *BeginTransactionRequest) XXX_Unmarshal(b []byte) error {
+	return xxx_messageInfo_BeginTransactionRequest.Unmarshal(m, b)
+}
+func (m *BeginTransactionRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) {
+	return xxx_messageInfo_BeginTransactionRequest.Marshal(b, m, deterministic)
+}
+func (dst *BeginTransactionRequest) XXX_Merge(src proto.Message) {
+	xxx_messageInfo_BeginTransactionRequest.Merge(dst, src)
+}
+func (m *BeginTransactionRequest) XXX_Size() int {
+	return xxx_messageInfo_BeginTransactionRequest.Size(m)
+}
+func (m *BeginTransactionRequest) XXX_DiscardUnknown() {
+	xxx_messageInfo_BeginTransactionRequest.DiscardUnknown(m)
+}
+
+var xxx_messageInfo_BeginTransactionRequest proto.InternalMessageInfo
+
+const Default_BeginTransactionRequest_AllowMultipleEg bool = false
+const Default_BeginTransactionRequest_Mode BeginTransactionRequest_TransactionMode = BeginTransactionRequest_UNKNOWN
+
+func (m *BeginTransactionRequest) GetHeader() *InternalHeader {
+	if m != nil {
+		return m.Header
+	}
+	return nil
+}
+
+func (m *BeginTransactionRequest) GetApp() string {
+	if m != nil && m.App != nil {
+		return *m.App
+	}
+	return ""
+}
+
+func (m *BeginTransactionRequest) GetAllowMultipleEg() bool {
+	if m != nil && m.AllowMultipleEg != nil {
+		return *m.AllowMultipleEg
+	}
+	return Default_BeginTransactionRequest_AllowMultipleEg
+}
+
+func (m *BeginTransactionRequest) GetDatabaseId() string {
+	if m != nil && m.DatabaseId != nil {
+		return *m.DatabaseId
+	}
+	return ""
+}
+
+func (m *BeginTransactionRequest) GetMode() BeginTransactionRequest_TransactionMode {
+	if m != nil && m.Mode != nil {
+		return *m.Mode
+	}
+	return Default_BeginTransactionRequest_Mode
+}
+
+func (m *BeginTransactionRequest) GetPreviousTransaction() *Transaction {
+	if m != nil {
+		return m.PreviousTransaction
+	}
+	return nil
+}
+
+type CommitResponse struct {
+	Cost                 *Cost                     `protobuf:"bytes,1,opt,name=cost" json:"cost,omitempty"`
+	Version              []*CommitResponse_Version `protobuf:"group,3,rep,name=Version,json=version" json:"version,omitempty"`
+	XXX_NoUnkeyedLiteral struct{}                  `json:"-"`
+	XXX_unrecognized     []byte                    `json:"-"`
+	XXX_sizecache        int32                     `json:"-"`
+}
+
+func (m *CommitResponse) Reset()         { *m = CommitResponse{} }
+func (m *CommitResponse) String() string { return proto.CompactTextString(m) }
+func (*CommitResponse) ProtoMessage()    {}
+func (*CommitResponse) Descriptor() ([]byte, []int) {
+	return fileDescriptor_datastore_v3_83b17b80c34f6179, []int{37}
+}
+func (m *CommitResponse) XXX_Unmarshal(b []byte) error {
+	return xxx_messageInfo_CommitResponse.Unmarshal(m, b)
+}
+func (m *CommitResponse) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) {
+	return xxx_messageInfo_CommitResponse.Marshal(b, m, deterministic)
+}
+func (dst *CommitResponse) XXX_Merge(src proto.Message) {
+	xxx_messageInfo_CommitResponse.Merge(dst, src)
+}
+func (m *CommitResponse) XXX_Size() int {
+	return xxx_messageInfo_CommitResponse.Size(m)
+}
+func (m *CommitResponse) XXX_DiscardUnknown() {
+	xxx_messageInfo_CommitResponse.DiscardUnknown(m)
+}
+
+var xxx_messageInfo_CommitResponse proto.InternalMessageInfo
+
+func (m *CommitResponse) GetCost() *Cost {
+	if m != nil {
+		return m.Cost
+	}
+	return nil
+}
+
+func (m *CommitResponse) GetVersion() []*CommitResponse_Version {
+	if m != nil {
+		return m.Version
+	}
+	return nil
+}
+
+type CommitResponse_Version struct {
+	RootEntityKey        *Reference `protobuf:"bytes,4,req,name=root_entity_key,json=rootEntityKey" json:"root_entity_key,omitempty"`
+	Version              *int64     `protobuf:"varint,5,req,name=version" json:"version,omitempty"`
+	XXX_NoUnkeyedLiteral struct{}   `json:"-"`
+	XXX_unrecognized     []byte     `json:"-"`
+	XXX_sizecache        int32      `json:"-"`
+}
+
+func (m *CommitResponse_Version) Reset()         { *m = CommitResponse_Version{} }
+func (m *CommitResponse_Version) String() string { return proto.CompactTextString(m) }
+func (*CommitResponse_Version) ProtoMessage()    {}
+func (*CommitResponse_Version) Descriptor() ([]byte, []int) {
+	return fileDescriptor_datastore_v3_83b17b80c34f6179, []int{37, 0}
+}
+func (m *CommitResponse_Version) XXX_Unmarshal(b []byte) error {
+	return xxx_messageInfo_CommitResponse_Version.Unmarshal(m, b)
+}
+func (m *CommitResponse_Version) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) {
+	return xxx_messageInfo_CommitResponse_Version.Marshal(b, m, deterministic)
+}
+func (dst *CommitResponse_Version) XXX_Merge(src proto.Message) {
+	xxx_messageInfo_CommitResponse_Version.Merge(dst, src)
+}
+func (m *CommitResponse_Version) XXX_Size() int {
+	return xxx_messageInfo_CommitResponse_Version.Size(m)
+}
+func (m *CommitResponse_Version) XXX_DiscardUnknown() {
+	xxx_messageInfo_CommitResponse_Version.DiscardUnknown(m)
+}
+
+var xxx_messageInfo_CommitResponse_Version proto.InternalMessageInfo
+
+func (m *CommitResponse_Version) GetRootEntityKey() *Reference {
+	if m != nil {
+		return m.RootEntityKey
+	}
+	return nil
+}
+
+func (m *CommitResponse_Version) GetVersion() int64 {
+	if m != nil && m.Version != nil {
+		return *m.Version
+	}
+	return 0
+}
+
+func init() {
+	proto.RegisterType((*Action)(nil), "appengine.Action")
+	proto.RegisterType((*PropertyValue)(nil), "appengine.PropertyValue")
+	proto.RegisterType((*PropertyValue_PointValue)(nil), "appengine.PropertyValue.PointValue")
+	proto.RegisterType((*PropertyValue_UserValue)(nil), "appengine.PropertyValue.UserValue")
+	proto.RegisterType((*PropertyValue_ReferenceValue)(nil), "appengine.PropertyValue.ReferenceValue")
+	proto.RegisterType((*PropertyValue_ReferenceValue_PathElement)(nil), "appengine.PropertyValue.ReferenceValue.PathElement")
+	proto.RegisterType((*Property)(nil), "appengine.Property")
+	proto.RegisterType((*Path)(nil), "appengine.Path")
+	proto.RegisterType((*Path_Element)(nil), "appengine.Path.Element")
+	proto.RegisterType((*Reference)(nil), "appengine.Reference")
+	proto.RegisterType((*User)(nil), "appengine.User")
+	proto.RegisterType((*EntityProto)(nil), "appengine.EntityProto")
+	proto.RegisterType((*CompositeProperty)(nil), "appengine.CompositeProperty")
+	proto.RegisterType((*Index)(nil), "appengine.Index")
+	proto.RegisterType((*Index_Property)(nil), "appengine.Index.Property")
+	proto.RegisterType((*CompositeIndex)(nil), "appengine.CompositeIndex")
+	proto.RegisterType((*IndexPostfix)(nil), "appengine.IndexPostfix")
+	proto.RegisterType((*IndexPostfix_IndexValue)(nil), "appengine.IndexPostfix.IndexValue")
+	proto.RegisterType((*IndexPosition)(nil), "appengine.IndexPosition")
+	proto.RegisterType((*Snapshot)(nil), "appengine.Snapshot")
+	proto.RegisterType((*InternalHeader)(nil), "appengine.InternalHeader")
+	proto.RegisterType((*Transaction)(nil), "appengine.Transaction")
+	proto.RegisterType((*Query)(nil), "appengine.Query")
+	proto.RegisterType((*Query_Filter)(nil), "appengine.Query.Filter")
+	proto.RegisterType((*Query_Order)(nil), "appengine.Query.Order")
+	proto.RegisterType((*CompiledQuery)(nil), "appengine.CompiledQuery")
+	proto.RegisterType((*CompiledQuery_PrimaryScan)(nil), "appengine.CompiledQuery.PrimaryScan")
+	proto.RegisterType((*CompiledQuery_MergeJoinScan)(nil), "appengine.CompiledQuery.MergeJoinScan")
+	proto.RegisterType((*CompiledQuery_EntityFilter)(nil), "appengine.CompiledQuery.EntityFilter")
+	proto.RegisterType((*CompiledCursor)(nil), "appengine.CompiledCursor")
+	proto.RegisterType((*CompiledCursor_Position)(nil), "appengine.CompiledCursor.Position")
+	proto.RegisterType((*CompiledCursor_Position_IndexValue)(nil), "appengine.CompiledCursor.Position.IndexValue")
+	proto.RegisterType((*Cursor)(nil), "appengine.Cursor")
+	proto.RegisterType((*Error)(nil), "appengine.Error")
+	proto.RegisterType((*Cost)(nil), "appengine.Cost")
+	proto.RegisterType((*Cost_CommitCost)(nil), "appengine.Cost.CommitCost")
+	proto.RegisterType((*GetRequest)(nil), "appengine.GetRequest")
+	proto.RegisterType((*GetResponse)(nil), "appengine.GetResponse")
+	proto.RegisterType((*GetResponse_Entity)(nil), "appengine.GetResponse.Entity")
+	proto.RegisterType((*PutRequest)(nil), "appengine.PutRequest")
+	proto.RegisterType((*PutResponse)(nil), "appengine.PutResponse")
+	proto.RegisterType((*TouchRequest)(nil), "appengine.TouchRequest")
+	proto.RegisterType((*TouchResponse)(nil), "appengine.TouchResponse")
+	proto.RegisterType((*DeleteRequest)(nil), "appengine.DeleteRequest")
+	proto.RegisterType((*DeleteResponse)(nil), "appengine.DeleteResponse")
+	proto.RegisterType((*NextRequest)(nil), "appengine.NextRequest")
+	proto.RegisterType((*QueryResult)(nil), "appengine.QueryResult")
+	proto.RegisterType((*AllocateIdsRequest)(nil), "appengine.AllocateIdsRequest")
+	proto.RegisterType((*AllocateIdsResponse)(nil), "appengine.AllocateIdsResponse")
+	proto.RegisterType((*CompositeIndices)(nil), "appengine.CompositeIndices")
+	proto.RegisterType((*AddActionsRequest)(nil), "appengine.AddActionsRequest")
+	proto.RegisterType((*AddActionsResponse)(nil), "appengine.AddActionsResponse")
+	proto.RegisterType((*BeginTransactionRequest)(nil), "appengine.BeginTransactionRequest")
+	proto.RegisterType((*CommitResponse)(nil), "appengine.CommitResponse")
+	proto.RegisterType((*CommitResponse_Version)(nil), "appengine.CommitResponse.Version")
+}
+
+func init() {
+	proto.RegisterFile("google.golang.org/appengine/v2/internal/datastore/datastore_v3.proto", fileDescriptor_datastore_v3_83b17b80c34f6179)
+}
+
+var fileDescriptor_datastore_v3_83b17b80c34f6179 = []byte{
+	// 4156 bytes of a gzipped FileDescriptorProto
+	0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0xcc, 0x5a, 0xcd, 0x73, 0xe3, 0x46,
+	0x76, 0x37, 0xc1, 0xef, 0x47, 0x89, 0x82, 0x5a, 0xf3, 0xc1, 0xa1, 0x3f, 0x46, 0xc6, 0xac, 0x6d,
+	0xd9, 0x6b, 0x73, 0x6c, 0xf9, 0x23, 0x5b, 0x4a, 0x76, 0x1d, 0x4a, 0xc4, 0x68, 0x90, 0xa1, 0x48,
+	0xb9, 0x09, 0xd9, 0x9e, 0x5c, 0x50, 0x18, 0xa2, 0x29, 0x21, 0x43, 0x02, 0x30, 0x00, 0x6a, 0x46,
+	0x93, 0xe4, 0x90, 0x4b, 0x2a, 0x55, 0x5b, 0xa9, 0x1c, 0x92, 0x4a, 0x25, 0xf9, 0x07, 0x72, 0xc8,
+	0x39, 0x95, 0xaa, 0x54, 0xf6, 0x98, 0x5b, 0x0e, 0x7b, 0xc9, 0x31, 0x95, 0x73, 0xf2, 0x27, 0x24,
+	0x39, 0xa4, 0xfa, 0x75, 0x03, 0x02, 0x28, 0x4a, 0x23, 0x6d, 0xf6, 0x90, 0x13, 0xd1, 0xef, 0xfd,
+	0xba, 0xf1, 0xfa, 0xf5, 0xfb, 0x6c, 0x10, 0xba, 0xc7, 0xbe, 0x7f, 0x3c, 0x65, 0x9d, 0x63, 0x7f,
+	0x6a, 0x7b, 0xc7, 0x1d, 0x3f, 0x3c, 0x7e, 0x68, 0x07, 0x01, 0xf3, 0x8e, 0x5d, 0x8f, 0x3d, 0x74,
+	0xbd, 0x98, 0x85, 0x9e, 0x3d, 0x7d, 0xe8, 0xd8, 0xb1, 0x1d, 0xc5, 0x7e, 0xc8, 0xce, 0x9f, 0xac,
+	0xd3, 0xcf, 0x3b, 0x41, 0xe8, 0xc7, 0x3e, 0xa9, 0xa7, 0x13, 0xb4, 0x1a, 0x54, 0xba, 0xe3, 0xd8,
+	0xf5, 0x3d, 0xed, 0x1f, 0x2b, 0xb0, 0x7a, 0x18, 0xfa, 0x01, 0x0b, 0xe3, 0xb3, 0x6f, 0xed, 0xe9,
+	0x9c, 0x91, 0x77, 0x00, 0x5c, 0x2f, 0xfe, 0xea, 0x0b, 0x1c, 0xb5, 0x0a, 0x9b, 0x85, 0xad, 0x22,
+	0xcd, 0x50, 0x88, 0x06, 0x2b, 0xcf, 0x7c, 0x7f, 0xca, 0x6c, 0x4f, 0x20, 0x94, 0xcd, 0xc2, 0x56,
+	0x8d, 0xe6, 0x68, 0x64, 0x13, 0x1a, 0x51, 0x1c, 0xba, 0xde, 0xb1, 0x80, 0x14, 0x37, 0x0b, 0x5b,
+	0x75, 0x9a, 0x25, 0x71, 0x84, 0xe3, 0xcf, 0x9f, 0x4d, 0x99, 0x40, 0x94, 0x36, 0x0b, 0x5b, 0x05,
+	0x9a, 0x25, 0x91, 0x3d, 0x80, 0xc0, 0x77, 0xbd, 0xf8, 0x14, 0x01, 0xe5, 0xcd, 0xc2, 0x16, 0x6c,
+	0x3f, 0xe8, 0xa4, 0x7b, 0xe8, 0xe4, 0xa4, 0xee, 0x1c, 0x72, 0x28, 0x3e, 0xd2, 0xcc, 0x34, 0xf2,
+	0xdb, 0x50, 0x9f, 0x47, 0x2c, 0x14, 0x6b, 0xd4, 0x70, 0x0d, 0xed, 0xd2, 0x35, 0x8e, 0x22, 0x16,
+	0x8a, 0x25, 0xce, 0x27, 0x91, 0x21, 0x34, 0x43, 0x36, 0x61, 0x21, 0xf3, 0xc6, 0x4c, 0x2c, 0xb3,
+	0x82, 0xcb, 0x7c, 0x70, 0xe9, 0x32, 0x34, 0x81, 0x8b, 0xb5, 0x16, 0xa6, 0xb7, 0xb7, 0x00, 0xce,
+	0x85, 0x25, 0x2b, 0x50, 0x78, 0xd9, 0xaa, 0x6c, 0x2a, 0x5b, 0x05, 0x5a, 0x78, 0xc9, 0x47, 0x67,
+	0xad, 0xaa, 0x18, 0x9d, 0xb5, 0xff, 0xa9, 0x00, 0xf5, 0x54, 0x26, 0x72, 0x0b, 0xca, 0x6c, 0x66,
+	0xbb, 0xd3, 0x56, 0x7d, 0x53, 0xd9, 0xaa, 0x53, 0x31, 0x20, 0xf7, 0xa1, 0x61, 0xcf, 0xe3, 0x13,
+	0xcb, 0xf1, 0x67, 0xb6, 0xeb, 0xb5, 0x00, 0x79, 0xc0, 0x49, 0x3d, 0xa4, 0x90, 0x36, 0xd4, 0x3c,
+	0x77, 0xfc, 0xdc, 0xb3, 0x67, 0xac, 0xd5, 0xc0, 0x73, 0x48, 0xc7, 0xe4, 0x13, 0x20, 0x13, 0xe6,
+	0xb0, 0xd0, 0x8e, 0x99, 0x63, 0xb9, 0x0e, 0xf3, 0x62, 0x37, 0x3e, 0x6b, 0xdd, 0x46, 0xd4, 0x7a,
+	0xca, 0x31, 0x24, 0x23, 0x0f, 0x0f, 0x42, 0xff, 0xd4, 0x75, 0x58, 0xd8, 0xba, 0xb3, 0x00, 0x3f,
+	0x94, 0x8c, 0xf6, 0xbf, 0x17, 0xa0, 0x99, 0xd7, 0x05, 0x51, 0xa1, 0x68, 0x07, 0x41, 0x6b, 0x15,
+	0xa5, 0xe4, 0x8f, 0xe4, 0x6d, 0x00, 0x2e, 0x8a, 0x15, 0x05, 0xf6, 0x98, 0xb5, 0x6e, 0xe1, 0x5a,
+	0x75, 0x4e, 0x19, 0x71, 0x02, 0x39, 0x82, 0x46, 0x60, 0xc7, 0x27, 0x6c, 0xca, 0x66, 0xcc, 0x8b,
+	0x5b, 0xcd, 0xcd, 0xe2, 0x16, 0x6c, 0x7f, 0x7e, 0x4d, 0xd5, 0x77, 0x0e, 0xed, 0xf8, 0x44, 0x17,
+	0x53, 0x69, 0x76, 0x9d, 0xb6, 0x0e, 0x8d, 0x0c, 0x8f, 0x10, 0x28, 0xc5, 0x67, 0x01, 0x6b, 0xad,
+	0xa1, 0x5c, 0xf8, 0x4c, 0x9a, 0xa0, 0xb8, 0x4e, 0x4b, 0x45, 0xf3, 0x57, 0x5c, 0x87, 0x63, 0x50,
+	0x87, 0xeb, 0x28, 0x22, 0x3e, 0x6b, 0xff, 0x51, 0x86, 0x5a, 0x22, 0x00, 0xe9, 0x42, 0x75, 0xc6,
+	0x6c, 0xcf, 0xf5, 0x8e, 0xd1, 0x69, 0x9a, 0xdb, 0x6f, 0x2e, 0x11, 0xb3, 0x73, 0x20, 0x20, 0x3b,
+	0x30, 0x18, 0x5a, 0x07, 0x7a, 0x77, 0x60, 0x0c, 0xf6, 0x69, 0x32, 0x8f, 0x1f, 0xa6, 0x7c, 0xb4,
+	0xe6, 0xa1, 0x8b, 0x9e, 0x55, 0xa7, 0x20, 0x49, 0x47, 0xa1, 0x9b, 0x0a, 0x51, 0x14, 0x82, 0xe2,
+	0x21, 0x76, 0xa0, 0x9c, 0xb8, 0x88, 0xb2, 0xd5, 0xd8, 0x6e, 0x5d, 0xa6, 0x1c, 0x2a, 0x60, 0xdc,
+	0x20, 0x66, 0xf3, 0x69, 0xec, 0x06, 0x53, 0xee, 0x76, 0xca, 0x56, 0x8d, 0xa6, 0x63, 0xf2, 0x1e,
+	0x40, 0xc4, 0xec, 0x70, 0x7c, 0x62, 0x3f, 0x9b, 0xb2, 0x56, 0x85, 0x7b, 0xf6, 0x4e, 0x79, 0x62,
+	0x4f, 0x23, 0x46, 0x33, 0x0c, 0x62, 0xc3, 0xdd, 0x49, 0x1c, 0x59, 0xb1, 0xff, 0x9c, 0x79, 0xee,
+	0x2b, 0x9b, 0x07, 0x12, 0xcb, 0x0f, 0xf8, 0x0f, 0xfa, 0x58, 0x73, 0xfb, 0xc3, 0x65, 0x5b, 0x7f,
+	0x14, 0x47, 0x66, 0x66, 0xc6, 0x10, 0x27, 0xd0, 0xdb, 0x93, 0x65, 0x64, 0xd2, 0x86, 0xca, 0xd4,
+	0x1f, 0xdb, 0x53, 0xd6, 0xaa, 0x73, 0x2d, 0xec, 0x28, 0xcc, 0xa3, 0x92, 0xa2, 0xfd, 0xb3, 0x02,
+	0x55, 0xa9, 0x47, 0xd2, 0x84, 0x8c, 0x26, 0xd5, 0x37, 0x48, 0x0d, 0x4a, 0xbb, 0xfd, 0xe1, 0xae,
+	0xda, 0xe4, 0x4f, 0xa6, 0xfe, 0xbd, 0xa9, 0xae, 0x71, 0xcc, 0xee, 0x53, 0x53, 0x1f, 0x99, 0x94,
+	0x63, 0x54, 0xb2, 0x0e, 0xab, 0x5d, 0x73, 0x78, 0x60, 0xed, 0x75, 0x4d, 0x7d, 0x7f, 0x48, 0x9f,
+	0xaa, 0x05, 0xb2, 0x0a, 0x75, 0x24, 0xf5, 0x8d, 0xc1, 0x13, 0x55, 0xe1, 0x33, 0x70, 0x68, 0x1a,
+	0x66, 0x5f, 0x57, 0x8b, 0x44, 0x85, 0x15, 0x31, 0x63, 0x38, 0x30, 0xf5, 0x81, 0xa9, 0x96, 0x52,
+	0xca, 0xe8, 0xe8, 0xe0, 0xa0, 0x4b, 0x9f, 0xaa, 0x65, 0xb2, 0x06, 0x0d, 0xa4, 0x74, 0x8f, 0xcc,
+	0xc7, 0x43, 0xaa, 0x56, 0x48, 0x03, 0xaa, 0xfb, 0x3d, 0xeb, 0xbb, 0xc7, 0xfa, 0x40, 0xad, 0x92,
+	0x15, 0xa8, 0xed, 0xf7, 0x2c, 0xfd, 0xa0, 0x6b, 0xf4, 0xd5, 0x1a, 0x9f, 0xbd, 0xaf, 0x0f, 0xe9,
+	0x68, 0x64, 0x1d, 0x0e, 0x8d, 0x81, 0xa9, 0xd6, 0x49, 0x1d, 0xca, 0xfb, 0x3d, 0xcb, 0x38, 0x50,
+	0x81, 0x10, 0x68, 0xee, 0xf7, 0xac, 0xc3, 0xc7, 0xc3, 0x81, 0x3e, 0x38, 0x3a, 0xd8, 0xd5, 0xa9,
+	0xda, 0x20, 0xb7, 0x40, 0xe5, 0xb4, 0xe1, 0xc8, 0xec, 0xf6, 0xbb, 0xbd, 0x1e, 0xd5, 0x47, 0x23,
+	0x75, 0x85, 0x4b, 0xbd, 0xdf, 0xb3, 0x68, 0xd7, 0xe4, 0xfb, 0x5a, 0xe5, 0x2f, 0xe4, 0x7b, 0x7f,
+	0xa2, 0x3f, 0x55, 0xd7, 0xf9, 0x2b, 0xf4, 0x81, 0x69, 0x98, 0x4f, 0xad, 0x43, 0x3a, 0x34, 0x87,
+	0xea, 0x06, 0x17, 0xd0, 0x18, 0xf4, 0xf4, 0xef, 0xad, 0x6f, 0xbb, 0xfd, 0x23, 0x5d, 0x25, 0xda,
+	0x8f, 0xe1, 0xf6, 0xd2, 0x33, 0xe1, 0xaa, 0x7b, 0x6c, 0x1e, 0xf4, 0xd5, 0x02, 0x7f, 0xe2, 0x9b,
+	0x52, 0x15, 0xed, 0x0f, 0xa0, 0xc4, 0x5d, 0x86, 0x7c, 0x06, 0xd5, 0xc4, 0x1b, 0x0b, 0xe8, 0x8d,
+	0x77, 0xb3, 0x67, 0x6d, 0xc7, 0x27, 0x9d, 0xc4, 0xe3, 0x12, 0x5c, 0xbb, 0x0b, 0xd5, 0x45, 0x4f,
+	0x53, 0x2e, 0x78, 0x5a, 0xf1, 0x82, 0xa7, 0x95, 0x32, 0x9e, 0x66, 0x43, 0x3d, 0xf5, 0xed, 0x9b,
+	0x47, 0x91, 0x07, 0x50, 0xe2, 0xde, 0xdf, 0x6a, 0xa2, 0x87, 0xac, 0x2d, 0x08, 0x4c, 0x91, 0xa9,
+	0xfd, 0x43, 0x01, 0x4a, 0x3c, 0xda, 0x9e, 0x07, 0xda, 0xc2, 0x15, 0x81, 0x56, 0xb9, 0x32, 0xd0,
+	0x16, 0xaf, 0x15, 0x68, 0x2b, 0x37, 0x0b, 0xb4, 0xd5, 0x4b, 0x02, 0xad, 0xf6, 0x67, 0x45, 0x68,
+	0xe8, 0x38, 0xf3, 0x10, 0x13, 0xfd, 0xfb, 0x50, 0x7c, 0xce, 0xce, 0x50, 0x3f, 0x8d, 0xed, 0x5b,
+	0x99, 0xdd, 0xa6, 0x2a, 0xa4, 0x1c, 0x40, 0xb6, 0x61, 0x45, 0xbc, 0xd0, 0x3a, 0x0e, 0xfd, 0x79,
+	0xd0, 0x52, 0x97, 0xab, 0xa7, 0x21, 0x40, 0xfb, 0x1c, 0x43, 0xde, 0x83, 0xb2, 0xff, 0xc2, 0x63,
+	0x21, 0xc6, 0xc1, 0x3c, 0x98, 0x2b, 0x8f, 0x0a, 0x2e, 0x79, 0x08, 0xa5, 0xe7, 0xae, 0xe7, 0xe0,
+	0x19, 0xe6, 0x23, 0x61, 0x46, 0xd0, 0xce, 0x13, 0xd7, 0x73, 0x28, 0x02, 0xc9, 0x3d, 0xa8, 0xf1,
+	0x5f, 0x8c, 0x7b, 0x65, 0xdc, 0x68, 0x95, 0x8f, 0x79, 0xd0, 0x7b, 0x08, 0xb5, 0x40, 0xc6, 0x10,
+	0x4c, 0x00, 0x8d, 0xed, 0x8d, 0x25, 0xe1, 0x85, 0xa6, 0x20, 0xf2, 0x15, 0xac, 0x84, 0xf6, 0x0b,
+	0x2b, 0x9d, 0xb4, 0x76, 0xf9, 0xa4, 0x46, 0x68, 0xbf, 0x48, 0x23, 0x38, 0x81, 0x52, 0x68, 0x7b,
+	0xcf, 0x5b, 0x64, 0xb3, 0xb0, 0x55, 0xa6, 0xf8, 0xac, 0x7d, 0x01, 0x25, 0x2e, 0x25, 0x8f, 0x08,
+	0xfb, 0x3d, 0xf4, 0xff, 0xee, 0x9e, 0xa9, 0x16, 0x12, 0x7f, 0xfe, 0x96, 0x47, 0x03, 0x45, 0x72,
+	0x0f, 0xf4, 0xd1, 0xa8, 0xbb, 0xaf, 0xab, 0x45, 0xad, 0x07, 0xeb, 0x7b, 0xfe, 0x2c, 0xf0, 0x23,
+	0x37, 0x66, 0xe9, 0xf2, 0xf7, 0xa0, 0xe6, 0x7a, 0x0e, 0x7b, 0x69, 0xb9, 0x0e, 0x9a, 0x56, 0x91,
+	0x56, 0x71, 0x6c, 0x38, 0xdc, 0xe4, 0x4e, 0x65, 0x31, 0x55, 0xe4, 0x26, 0x87, 0x03, 0xed, 0x2f,
+	0x15, 0x28, 0x1b, 0x1c, 0xc1, 0x8d, 0x4f, 0x9e, 0x14, 0x7a, 0x8f, 0x30, 0x4c, 0x10, 0x24, 0x93,
+	0xfb, 0x50, 0x1b, 0x6a, 0xb6, 0x37, 0x66, 0xbc, 0xe2, 0xc3, 0x3c, 0x50, 0xa3, 0xe9, 0x98, 0x7c,
+	0x99, 0xd1, 0x9f, 0x82, 0x2e, 0x7b, 0x2f, 0xa3, 0x0a, 0x7c, 0xc1, 0x12, 0x2d, 0xb6, 0xff, 0xaa,
+	0x90, 0x49, 0x6e, 0xcb, 0x12, 0x4f, 0x1f, 0xea, 0x8e, 0x1b, 0x32, 0xac, 0x23, 0xe5, 0x41, 0x3f,
+	0xb8, 0x74, 0xe1, 0x4e, 0x2f, 0x81, 0xee, 0xd4, 0xbb, 0xa3, 0x3d, 0x7d, 0xd0, 0xe3, 0x99, 0xef,
+	0x7c, 0x01, 0xed, 0x23, 0xa8, 0xa7, 0x10, 0x0c, 0xc7, 0x09, 0x48, 0x2d, 0x70, 0xf5, 0xf6, 0xf4,
+	0x74, 0xac, 0x68, 0x7f, 0xad, 0x40, 0x33, 0xd5, 0xaf, 0xd0, 0xd0, 0x6d, 0xa8, 0xd8, 0x41, 0x90,
+	0xa8, 0xb6, 0x4e, 0xcb, 0x76, 0x10, 0x18, 0x8e, 0x8c, 0x2d, 0x0a, 0x6a, 0x9b, 0xc7, 0x96, 0x4f,
+	0x01, 0x1c, 0x36, 0x71, 0x3d, 0x17, 0x85, 0x2e, 0xa2, 0xc1, 0xab, 0x8b, 0x42, 0xd3, 0x0c, 0x86,
+	0x7c, 0x09, 0xe5, 0x28, 0xb6, 0x63, 0x91, 0x2b, 0x9b, 0xdb, 0xf7, 0x33, 0xe0, 0xbc, 0x08, 0x9d,
+	0x11, 0x87, 0x51, 0x81, 0x26, 0x5f, 0xc1, 0x2d, 0xdf, 0x9b, 0x9e, 0x59, 0xf3, 0x88, 0x59, 0xee,
+	0xc4, 0x0a, 0xd9, 0x0f, 0x73, 0x37, 0x64, 0x4e, 0x3e, 0xa7, 0xae, 0x73, 0xc8, 0x51, 0xc4, 0x8c,
+	0x09, 0x95, 0x7c, 0xed, 0x6b, 0x28, 0xe3, 0x3a, 0x7c, 0xcf, 0xdf, 0x51, 0xc3, 0xd4, 0xad, 0xe1,
+	0xa0, 0xff, 0x54, 0xe8, 0x80, 0xea, 0xdd, 0x9e, 0x85, 0x44, 0x55, 0xe1, 0xc1, 0xbe, 0xa7, 0xf7,
+	0x75, 0x53, 0xef, 0xa9, 0x45, 0x9e, 0x3d, 0x74, 0x4a, 0x87, 0x54, 0x2d, 0x69, 0xff, 0x53, 0x80,
+	0x15, 0x94, 0xe7, 0xd0, 0x8f, 0xe2, 0x89, 0xfb, 0x92, 0xec, 0x41, 0x43, 0x98, 0xdd, 0xa9, 0x2c,
+	0xe8, 0xb9, 0x33, 0x68, 0x8b, 0x7b, 0x96, 0x68, 0x31, 0x90, 0x75, 0xb4, 0x9b, 0x3e, 0x27, 0x21,
+	0x45, 0x41, 0xa7, 0xbf, 0x22, 0xa4, 0xbc, 0x05, 0x95, 0x67, 0x6c, 0xe2, 0x87, 0x22, 0x04, 0xd6,
+	0x76, 0x4a, 0x71, 0x38, 0x67, 0x54, 0xd2, 0xda, 0x36, 0xc0, 0xf9, 0xfa, 0xe4, 0x01, 0xac, 0x26,
+	0xc6, 0x66, 0xa1, 0x71, 0x89, 0x93, 0x5b, 0x49, 0x88, 0x83, 0x5c, 0x75, 0xa3, 0x5c, 0xab, 0xba,
+	0xd1, 0xbe, 0x86, 0xd5, 0x64, 0x3f, 0xe2, 0xfc, 0x54, 0x21, 0x79, 0x01, 0x63, 0xca, 0x82, 0x8c,
+	0xca, 0x45, 0x19, 0xb5, 0x9f, 0x41, 0x6d, 0xe4, 0xd9, 0x41, 0x74, 0xe2, 0xc7, 0xdc, 0x7a, 0xe2,
+	0x48, 0xfa, 0xaa, 0x12, 0x47, 0x9a, 0x06, 0x15, 0x7e, 0x38, 0xf3, 0x88, 0xbb, 0xbf, 0x31, 0xe8,
+	0xee, 0x99, 0xc6, 0xb7, 0xba, 0xfa, 0x06, 0x01, 0xa8, 0xc8, 0xe7, 0x82, 0xa6, 0x41, 0xd3, 0x90,
+	0xed, 0xd8, 0x63, 0x66, 0x3b, 0x2c, 0xe4, 0x12, 0xfc, 0xe0, 0x47, 0x89, 0x04, 0x3f, 0xf8, 0x91,
+	0xf6, 0x17, 0x05, 0x68, 0x98, 0xa1, 0xed, 0x45, 0xb6, 0x30, 0xf7, 0xcf, 0xa0, 0x72, 0x82, 0x58,
+	0x74, 0xa3, 0xc6, 0x82, 0x7f, 0x66, 0x17, 0xa3, 0x12, 0x48, 0xee, 0x40, 0xe5, 0xc4, 0xf6, 0x9c,
+	0xa9, 0xd0, 0x5a, 0x85, 0xca, 0x51, 0x92, 0x1b, 0x95, 0xf3, 0xdc, 0xb8, 0x05, 0x2b, 0x33, 0x3b,
+	0x7c, 0x6e, 0x8d, 0x4f, 0x6c, 0xef, 0x98, 0x45, 0xf2, 0x60, 0xa4, 0x05, 0x36, 0x38, 0x6b, 0x4f,
+	0x70, 0xb4, 0xbf, 0x5f, 0x81, 0xf2, 0x37, 0x73, 0x16, 0x9e, 0x65, 0x04, 0xfa, 0xe0, 0xba, 0x02,
+	0xc9, 0x17, 0x17, 0x2e, 0x4b, 0xca, 0x6f, 0x2f, 0x26, 0x65, 0x22, 0x53, 0x84, 0xc8, 0x95, 0x22,
+	0x0b, 0x7c, 0x9a, 0x09, 0x63, 0xeb, 0x57, 0xd8, 0xda, 0x79, 0x70, 0x7b, 0x08, 0x95, 0x89, 0x3b,
+	0x8d, 0x51, 0x75, 0x8b, 0xd5, 0x08, 0xee, 0xa5, 0xf3, 0x08, 0xd9, 0x54, 0xc2, 0xc8, 0xbb, 0xb0,
+	0x22, 0x2a, 0x59, 0xeb, 0x07, 0xce, 0xc6, 0x82, 0x95, 0xf7, 0xa6, 0x48, 0x13, 0xbb, 0xff, 0x18,
+	0xca, 0x7e, 0xc8, 0x37, 0x5f, 0xc7, 0x25, 0xef, 0x5c, 0x58, 0x72, 0xc8, 0xb9, 0x54, 0x80, 0xc8,
+	0x87, 0x50, 0x3a, 0x71, 0xbd, 0x18, 0xb3, 0x46, 0x73, 0xfb, 0xf6, 0x05, 0xf0, 0x63, 0xd7, 0x8b,
+	0x29, 0x42, 0x78, 0x98, 0x1f, 0xfb, 0x73, 0x2f, 0x6e, 0xdd, 0xc5, 0x0c, 0x23, 0x06, 0xe4, 0x1e,
+	0x54, 0xfc, 0xc9, 0x24, 0x62, 0x31, 0x76, 0x96, 0xe5, 0x9d, 0xc2, 0xa7, 0x54, 0x12, 0xf8, 0x84,
+	0xa9, 0x3b, 0x73, 0x63, 0xec, 0x43, 0xca, 0x54, 0x0c, 0xc8, 0x2e, 0xac, 0x8d, 0xfd, 0x59, 0xe0,
+	0x4e, 0x99, 0x63, 0x8d, 0xe7, 0x61, 0xe4, 0x87, 0xad, 0x77, 0x2e, 0x1c, 0xd3, 0x9e, 0x44, 0xec,
+	0x21, 0x80, 0x36, 0xc7, 0xb9, 0x31, 0x31, 0x60, 0x83, 0x79, 0x8e, 0xb5, 0xb8, 0xce, 0xfd, 0xd7,
+	0xad, 0xb3, 0xce, 0x3c, 0x27, 0x4f, 0x4a, 0xc4, 0xc1, 0x48, 0x68, 0x61, 0xcc, 0x68, 0x6d, 0x60,
+	0x90, 0xb9, 0x77, 0x69, 0xac, 0x14, 0xe2, 0x64, 0xc2, 0xf7, 0x6f, 0xc0, 0x2d, 0x19, 0x22, 0xad,
+	0x80, 0x85, 0x13, 0x36, 0x8e, 0xad, 0x60, 0x6a, 0x7b, 0x58, 0xca, 0xa5, 0xc6, 0x4a, 0x24, 0xe4,
+	0x50, 0x20, 0x0e, 0xa7, 0xb6, 0x47, 0x34, 0xa8, 0x3f, 0x67, 0x67, 0x91, 0xc5, 0x23, 0x29, 0x76,
+	0xae, 0x29, 0xba, 0xc6, 0xe9, 0x43, 0x6f, 0x7a, 0x46, 0x7e, 0x02, 0x8d, 0xf8, 0xdc, 0xdb, 0xb0,
+	0x61, 0x6d, 0xe4, 0x4e, 0x35, 0xe3, 0x8b, 0x34, 0x0b, 0x25, 0xf7, 0xa1, 0x2a, 0x35, 0xd4, 0xba,
+	0x97, 0x5d, 0x3b, 0xa1, 0xf2, 0xc4, 0x3c, 0xb1, 0xdd, 0xa9, 0x7f, 0xca, 0x42, 0x6b, 0x16, 0xb5,
+	0xda, 0xe2, 0xb6, 0x24, 0x21, 0x1d, 0x44, 0xdc, 0x4f, 0xa3, 0x38, 0xf4, 0xbd, 0xe3, 0xd6, 0x26,
+	0xde, 0x93, 0xc8, 0xd1, 0xc5, 0xe0, 0xf7, 0x2e, 0x66, 0xfe, 0x7c, 0xf0, 0xfb, 0x1c, 0xee, 0x60,
+	0x65, 0x66, 0x3d, 0x3b, 0xb3, 0xf2, 0x68, 0x0d, 0xd1, 0x1b, 0xc8, 0xdd, 0x3d, 0x3b, 0xcc, 0x4e,
+	0x6a, 0x43, 0xcd, 0x71, 0xa3, 0xd8, 0xf5, 0xc6, 0x71, 0xab, 0x85, 0xef, 0x4c, 0xc7, 0xe4, 0x33,
+	0xb8, 0x3d, 0x73, 0x3d, 0x2b, 0xb2, 0x27, 0xcc, 0x8a, 0x5d, 0xee, 0x9b, 0x6c, 0xec, 0x7b, 0x4e,
+	0xd4, 0x7a, 0x80, 0x82, 0x93, 0x99, 0xeb, 0x8d, 0xec, 0x09, 0x33, 0xdd, 0x19, 0x1b, 0x09, 0x0e,
+	0xf9, 0x08, 0xd6, 0x11, 0x1e, 0xb2, 0x60, 0xea, 0x8e, 0x6d, 0xf1, 0xfa, 0x1f, 0xe1, 0xeb, 0xd7,
+	0x38, 0x83, 0x0a, 0x3a, 0xbe, 0xfa, 0x63, 0x68, 0x06, 0x2c, 0x8c, 0xdc, 0x28, 0xb6, 0xa4, 0x45,
+	0xbf, 0x97, 0xd5, 0xda, 0xaa, 0x64, 0x0e, 0x91, 0xd7, 0xfe, 0xcf, 0x02, 0x54, 0x84, 0x73, 0x92,
+	0x4f, 0x41, 0xf1, 0x03, 0xbc, 0x06, 0x69, 0x6e, 0x6f, 0x5e, 0xe2, 0xc1, 0x9d, 0x61, 0xc0, 0xeb,
+	0x5e, 0x3f, 0xa4, 0x8a, 0x1f, 0xdc, 0xb8, 0x28, 0xd4, 0xfe, 0x10, 0x6a, 0xc9, 0x02, 0xbc, 0xbc,
+	0xe8, 0xeb, 0xa3, 0x91, 0x65, 0x3e, 0xee, 0x0e, 0xd4, 0x02, 0xb9, 0x03, 0x24, 0x1d, 0x5a, 0x43,
+	0x6a, 0xe9, 0xdf, 0x1c, 0x75, 0xfb, 0xaa, 0x82, 0x5d, 0x1a, 0xd5, 0xbb, 0xa6, 0x4e, 0x05, 0xb2,
+	0x48, 0xee, 0xc1, 0xed, 0x2c, 0xe5, 0x1c, 0x5c, 0xc2, 0x14, 0x8c, 0x8f, 0x65, 0x52, 0x01, 0xc5,
+	0x18, 0xa8, 0x15, 0x9e, 0x16, 0xf4, 0xef, 0x8d, 0x91, 0x39, 0x52, 0xab, 0xed, 0xbf, 0x29, 0x40,
+	0x19, 0xc3, 0x06, 0x3f, 0x9f, 0x54, 0x72, 0x71, 0x5d, 0x73, 0x5e, 0xb9, 0x1a, 0xd9, 0x92, 0xaa,
+	0x81, 0x01, 0x65, 0x73, 0x79, 0xf4, 0xf9, 0xb5, 0xd6, 0x53, 0x3f, 0x85, 0x12, 0x8f, 0x52, 0xbc,
+	0x43, 0x1c, 0xd2, 0x9e, 0x4e, 0xad, 0x47, 0x06, 0x1d, 0xf1, 0x2a, 0x97, 0x40, 0xb3, 0x3b, 0xd8,
+	0xd3, 0x47, 0xe6, 0x30, 0xa1, 0xa1, 0x56, 0x1e, 0x19, 0x7d, 0x33, 0x45, 0x15, 0xb5, 0x9f, 0xd7,
+	0x60, 0x35, 0x89, 0x09, 0x22, 0x82, 0x3e, 0x82, 0x46, 0x10, 0xba, 0x33, 0x3b, 0x3c, 0x8b, 0xc6,
+	0xb6, 0x87, 0x49, 0x01, 0xb6, 0x7f, 0xb4, 0x24, 0xaa, 0x88, 0x1d, 0x1d, 0x0a, 0xec, 0x68, 0x6c,
+	0x7b, 0x34, 0x3b, 0x91, 0xf4, 0x61, 0x75, 0xc6, 0xc2, 0x63, 0xf6, 0x7b, 0xbe, 0xeb, 0xe1, 0x4a,
+	0x55, 0x8c, 0xc8, 0xef, 0x5f, 0xba, 0xd2, 0x01, 0x47, 0xff, 0x8e, 0xef, 0x7a, 0xb8, 0x56, 0x7e,
+	0x32, 0xf9, 0x04, 0xea, 0xa2, 0x12, 0x72, 0xd8, 0x04, 0x63, 0xc5, 0xb2, 0xda, 0x4f, 0xd4, 0xe8,
+	0x3d, 0x36, 0xc9, 0xc4, 0x65, 0xb8, 0x34, 0x2e, 0x37, 0xb2, 0x71, 0xf9, 0xcd, 0x6c, 0x2c, 0x5a,
+	0x11, 0x55, 0x78, 0x1a, 0x84, 0x2e, 0x38, 0x7c, 0x6b, 0x89, 0xc3, 0x77, 0x60, 0x23, 0xf1, 0x55,
+	0xcb, 0xf5, 0x26, 0xee, 0x4b, 0x2b, 0x72, 0x5f, 0x89, 0xd8, 0x53, 0xa6, 0xeb, 0x09, 0xcb, 0xe0,
+	0x9c, 0x91, 0xfb, 0x8a, 0x11, 0x23, 0xe9, 0xe0, 0x64, 0x0e, 0x5c, 0xc5, 0xab, 0xc9, 0xf7, 0x2e,
+	0x55, 0x8f, 0x68, 0xbe, 0x64, 0x46, 0xcc, 0x4d, 0x6d, 0xff, 0x52, 0x81, 0x46, 0xe6, 0x1c, 0x78,
+	0xf6, 0x16, 0xca, 0x42, 0x61, 0xc5, 0x55, 0x94, 0x50, 0x1f, 0x4a, 0xfa, 0x26, 0xd4, 0xa3, 0xd8,
+	0x0e, 0x63, 0x8b, 0x17, 0x57, 0xb2, 0xdd, 0x45, 0xc2, 0x13, 0x76, 0x46, 0x3e, 0x80, 0x35, 0xc1,
+	0x74, 0xbd, 0xf1, 0x74, 0x1e, 0xb9, 0xa7, 0xa2, 0x99, 0xaf, 0xd1, 0x26, 0x92, 0x8d, 0x84, 0x4a,
+	0xee, 0x42, 0x95, 0x67, 0x21, 0xbe, 0x86, 0x68, 0xfa, 0x2a, 0xcc, 0x73, 0xf8, 0x0a, 0x0f, 0x60,
+	0x95, 0x33, 0xce, 0xe7, 0x57, 0xc4, 0x2d, 0x33, 0xf3, 0x9c, 0xf3, 0xd9, 0x1d, 0xd8, 0x10, 0xaf,
+	0x09, 0x44, 0xf1, 0x2a, 0x2b, 0xdc, 0x3b, 0xa8, 0xd8, 0x75, 0x64, 0xc9, 0xb2, 0x56, 0x14, 0x9c,
+	0x1f, 0x01, 0xcf, 0x5e, 0x0b, 0xe8, 0xbb, 0x22, 0x94, 0x31, 0xcf, 0xc9, 0x61, 0x77, 0xe1, 0x1d,
+	0x8e, 0x9d, 0x7b, 0x76, 0x10, 0x4c, 0x5d, 0xe6, 0x58, 0x53, 0xff, 0x18, 0x43, 0x66, 0x14, 0xdb,
+	0xb3, 0xc0, 0x9a, 0x47, 0xad, 0x0d, 0x0c, 0x99, 0x6d, 0xe6, 0x39, 0x47, 0x09, 0xa8, 0xef, 0x1f,
+	0x9b, 0x09, 0xe4, 0x28, 0x6a, 0xff, 0x3e, 0xac, 0xe6, 0xec, 0x71, 0x41, 0xa7, 0x35, 0x74, 0xfe,
+	0x8c, 0x4e, 0xdf, 0x85, 0x95, 0x20, 0x64, 0xe7, 0xa2, 0xd5, 0x51, 0xb4, 0x86, 0xa0, 0x09, 0xb1,
+	0xb6, 0x60, 0x05, 0x79, 0x96, 0x20, 0xe6, 0xf3, 0x63, 0x03, 0x59, 0x87, 0xc8, 0x69, 0xbf, 0x80,
+	0x95, 0xec, 0x69, 0x93, 0x77, 0x33, 0x69, 0xa1, 0x99, 0xcb, 0x93, 0x69, 0x76, 0x48, 0x2a, 0xb2,
+	0xf5, 0x4b, 0x2a, 0x32, 0x72, 0x9d, 0x8a, 0x4c, 0xfb, 0x2f, 0xd9, 0x9c, 0x65, 0x2a, 0x84, 0x9f,
+	0x41, 0x2d, 0x90, 0xf5, 0x38, 0x5a, 0x52, 0xfe, 0x12, 0x3e, 0x0f, 0xee, 0x24, 0x95, 0x3b, 0x4d,
+	0xe7, 0xb4, 0xff, 0x56, 0x81, 0x5a, 0x5a, 0xd0, 0xe7, 0x2c, 0xef, 0xcd, 0x05, 0xcb, 0x3b, 0x90,
+	0x1a, 0x16, 0x0a, 0x7c, 0x1b, 0xa3, 0xc5, 0x27, 0xaf, 0x7f, 0xd7, 0xc5, 0xb6, 0xe7, 0x34, 0xdb,
+	0xf6, 0x6c, 0xbe, 0xae, 0xed, 0xf9, 0xe4, 0xa2, 0xc1, 0xbf, 0x95, 0xe9, 0x2d, 0x16, 0xcc, 0xbe,
+	0xfd, 0x7d, 0xae, 0x0f, 0xca, 0x26, 0x84, 0x77, 0xc4, 0x7e, 0xd2, 0x84, 0x90, 0xb6, 0x3f, 0xf7,
+	0xaf, 0xd7, 0xfe, 0x6c, 0x43, 0x45, 0xea, 0xfc, 0x0e, 0x54, 0x64, 0x4d, 0x27, 0x1b, 0x04, 0x31,
+	0x3a, 0x6f, 0x10, 0x0a, 0xb2, 0x4e, 0xd7, 0x7e, 0xae, 0x40, 0x59, 0x0f, 0x43, 0x3f, 0xd4, 0xfe,
+	0x48, 0x81, 0x3a, 0x3e, 0xed, 0xf9, 0x0e, 0xe3, 0xd9, 0x60, 0xb7, 0xdb, 0xb3, 0xa8, 0xfe, 0xcd,
+	0x91, 0x8e, 0xd9, 0xa0, 0x0d, 0x77, 0xf6, 0x86, 0x83, 0xbd, 0x23, 0x4a, 0xf5, 0x81, 0x69, 0x99,
+	0xb4, 0x3b, 0x18, 0xf1, 0xb6, 0x67, 0x38, 0x50, 0x15, 0x9e, 0x29, 0x8c, 0x81, 0xa9, 0xd3, 0x41,
+	0xb7, 0x6f, 0x89, 0x56, 0xb4, 0x88, 0x77, 0xb3, 0xba, 0xde, 0xb3, 0xf0, 0xd6, 0x51, 0x2d, 0xf1,
+	0x96, 0xd5, 0x34, 0x0e, 0xf4, 0xe1, 0x91, 0xa9, 0x96, 0xc9, 0x6d, 0x58, 0x3f, 0xd4, 0xe9, 0x81,
+	0x31, 0x1a, 0x19, 0xc3, 0x81, 0xd5, 0xd3, 0x07, 0x86, 0xde, 0x53, 0x2b, 0x7c, 0x9d, 0x5d, 0x63,
+	0xdf, 0xec, 0xee, 0xf6, 0x75, 0xb9, 0x4e, 0x95, 0x6c, 0xc2, 0x5b, 0x7b, 0xc3, 0x83, 0x03, 0xc3,
+	0x34, 0xf5, 0x9e, 0xb5, 0x7b, 0x64, 0x5a, 0x23, 0xd3, 0xe8, 0xf7, 0xad, 0xee, 0xe1, 0x61, 0xff,
+	0x29, 0x4f, 0x60, 0x35, 0x72, 0x17, 0x36, 0xf6, 0xba, 0x87, 0xdd, 0x5d, 0xa3, 0x6f, 0x98, 0x4f,
+	0xad, 0x9e, 0x31, 0xe2, 0xf3, 0x7b, 0x6a, 0x9d, 0x27, 0x6c, 0x93, 0x3e, 0xb5, 0xba, 0x7d, 0x14,
+	0xcd, 0xd4, 0xad, 0xdd, 0xee, 0xde, 0x13, 0x7d, 0xd0, 0x53, 0x81, 0x0b, 0x30, 0xea, 0x3e, 0xd2,
+	0x2d, 0x2e, 0x92, 0x65, 0x0e, 0x87, 0xd6, 0xb0, 0xdf, 0x53, 0x1b, 0xda, 0xbf, 0x14, 0xa1, 0xb4,
+	0xe7, 0x47, 0x31, 0xf7, 0x46, 0xe1, 0xac, 0x2f, 0x42, 0x37, 0x66, 0xa2, 0x7f, 0x2b, 0x53, 0xd1,
+	0x4b, 0x7f, 0x87, 0x24, 0x1e, 0x50, 0x32, 0x10, 0xeb, 0xd9, 0x19, 0xc7, 0x29, 0x88, 0x5b, 0x3b,
+	0xc7, 0xed, 0x72, 0xb2, 0x88, 0x68, 0x78, 0x85, 0x23, 0xd7, 0x2b, 0x22, 0x4e, 0x06, 0x61, 0xb9,
+	0xe0, 0xc7, 0x40, 0xb2, 0x20, 0xb9, 0x62, 0x09, 0x91, 0x6a, 0x06, 0x29, 0x96, 0xdc, 0x01, 0x18,
+	0xfb, 0xb3, 0x99, 0x1b, 0x8f, 0xfd, 0x28, 0x96, 0x5f, 0xc8, 0xda, 0x39, 0x63, 0x8f, 0x62, 0x6e,
+	0xf1, 0x33, 0x37, 0xe6, 0x8f, 0x34, 0x83, 0x26, 0x3b, 0x70, 0xcf, 0x0e, 0x82, 0xd0, 0x7f, 0xe9,
+	0xce, 0xec, 0x98, 0x59, 0xdc, 0x73, 0xed, 0x63, 0x66, 0x39, 0x6c, 0x1a, 0xdb, 0xd8, 0x13, 0x95,
+	0xe9, 0xdd, 0x0c, 0x60, 0x24, 0xf8, 0x3d, 0xce, 0xe6, 0x71, 0xd7, 0x75, 0xac, 0x88, 0xfd, 0x30,
+	0xe7, 0x1e, 0x60, 0xcd, 0x03, 0xc7, 0xe6, 0x62, 0xd6, 0x45, 0x96, 0x72, 0x9d, 0x91, 0xe4, 0x1c,
+	0x09, 0x46, 0xfb, 0x15, 0xc0, 0xb9, 0x14, 0x64, 0x1b, 0x6e, 0xf3, 0x3a, 0x9e, 0x45, 0x31, 0x73,
+	0x2c, 0xb9, 0xdb, 0x60, 0x1e, 0x47, 0x18, 0xe2, 0xcb, 0x74, 0x23, 0x65, 0xca, 0x9b, 0xc2, 0x79,
+	0x1c, 0x91, 0x9f, 0x40, 0xeb, 0xc2, 0x1c, 0x87, 0x4d, 0x19, 0x7f, 0x6d, 0x15, 0xa7, 0xdd, 0x59,
+	0x98, 0xd6, 0x13, 0x5c, 0xed, 0x4f, 0x14, 0x80, 0x7d, 0x16, 0x53, 0xc1, 0xcd, 0x34, 0xb6, 0x95,
+	0xeb, 0x36, 0xb6, 0xef, 0x27, 0x17, 0x08, 0xc5, 0xab, 0x63, 0xc0, 0x42, 0x97, 0xa1, 0xdc, 0xa4,
+	0xcb, 0xc8, 0x35, 0x11, 0xc5, 0x2b, 0x9a, 0x88, 0x52, 0xae, 0x89, 0xf8, 0x18, 0x9a, 0xf6, 0x74,
+	0xea, 0xbf, 0xe0, 0x05, 0x0d, 0x0b, 0x43, 0xe6, 0xa0, 0x11, 0x9c, 0xd7, 0xdb, 0xc8, 0xec, 0x49,
+	0x9e, 0xf6, 0xe7, 0x0a, 0x34, 0x50, 0x15, 0x51, 0xe0, 0x7b, 0x11, 0x23, 0x5f, 0x42, 0x45, 0x5e,
+	0x44, 0x8b, 0x8b, 0xfc, 0xb7, 0x33, 0xb2, 0x66, 0x70, 0xb2, 0x68, 0xa0, 0x12, 0xcc, 0x33, 0x42,
+	0xe6, 0x75, 0x97, 0x2b, 0x25, 0x45, 0x91, 0xfb, 0x50, 0x73, 0x3d, 0x4b, 0xb4, 0xd4, 0x95, 0x4c,
+	0x58, 0xac, 0xba, 0x1e, 0xd6, 0xb2, 0xed, 0x57, 0x50, 0x11, 0x2f, 0x21, 0x9d, 0x54, 0xa6, 0x8b,
+	0xfa, 0xcb, 0xdc, 0x1c, 0xa7, 0xc2, 0xc8, 0xc3, 0x29, 0xbd, 0x2e, 0x40, 0xb7, 0xa0, 0x7a, 0xca,
+	0x9b, 0x0f, 0xbc, 0xf4, 0xe3, 0xea, 0x4d, 0x86, 0xda, 0x1f, 0x97, 0x00, 0x0e, 0xe7, 0x4b, 0x0c,
+	0xa4, 0x71, 0x5d, 0x03, 0xe9, 0xe4, 0xf4, 0xf8, 0x7a, 0x99, 0x7f, 0x75, 0x43, 0x59, 0xd2, 0x69,
+	0x17, 0x6f, 0xda, 0x69, 0xdf, 0x87, 0x6a, 0x1c, 0xce, 0xb9, 0xa3, 0x08, 0x63, 0x4a, 0x5b, 0x5a,
+	0x49, 0x25, 0x6f, 0x42, 0x79, 0xe2, 0x87, 0x63, 0x86, 0x8e, 0x95, 0xb2, 0x05, 0xed, 0xc2, 0x65,
+	0x52, 0xed, 0xb2, 0xcb, 0x24, 0xde, 0xa0, 0x45, 0xf2, 0x1e, 0x0d, 0x0b, 0x99, 0x7c, 0x83, 0x96,
+	0x5c, 0xb1, 0xd1, 0x14, 0x44, 0xbe, 0x81, 0xa6, 0x3d, 0x8f, 0x7d, 0xcb, 0xe5, 0x15, 0xda, 0xd4,
+	0x1d, 0x9f, 0x61, 0xd9, 0xdd, 0xcc, 0x7f, 0xaf, 0x4f, 0x0f, 0xaa, 0xd3, 0x9d, 0xc7, 0xbe, 0xe1,
+	0x1c, 0x22, 0x72, 0xa7, 0x2a, 0x93, 0x12, 0x5d, 0xb1, 0x33, 0x64, 0xed, 0xc7, 0xb0, 0x92, 0x85,
+	0xf1, 0x04, 0x24, 0x81, 0xea, 0x1b, 0x3c, 0x3b, 0x8d, 0x78, 0x6a, 0x1b, 0x98, 0x46, 0xb7, 0xaf,
+	0x16, 0xb4, 0x18, 0x1a, 0xb8, 0xbc, 0xf4, 0x8e, 0xeb, 0xba, 0xfd, 0x03, 0x28, 0x61, 0xf8, 0x55,
+	0x2e, 0x7c, 0x0f, 0xc1, 0x98, 0x8b, 0xcc, 0xbc, 0xf9, 0x15, 0xb3, 0xe6, 0xf7, 0xdf, 0x05, 0x58,
+	0x31, 0xfd, 0xf9, 0xf8, 0xe4, 0xa2, 0x01, 0xc2, 0xaf, 0x3b, 0x42, 0x2d, 0x31, 0x1f, 0xe5, 0xa6,
+	0xe6, 0x93, 0x5a, 0x47, 0x71, 0x89, 0x75, 0xdc, 0xf4, 0xcc, 0xb5, 0x2f, 0x60, 0x55, 0x6e, 0x5e,
+	0x6a, 0x3d, 0xd1, 0x66, 0xe1, 0x0a, 0x6d, 0x6a, 0xbf, 0x50, 0x60, 0x55, 0xc4, 0xf7, 0xff, 0xbb,
+	0xd2, 0x2a, 0x37, 0x0c, 0xeb, 0xe5, 0x1b, 0x5d, 0x1e, 0xfd, 0xbf, 0xf4, 0x34, 0x6d, 0x08, 0xcd,
+	0x44, 0x7d, 0x37, 0x50, 0xfb, 0x15, 0x46, 0xfc, 0x8b, 0x02, 0x34, 0x06, 0xec, 0xe5, 0x92, 0x20,
+	0x5a, 0xbe, 0xee, 0x71, 0x7c, 0x98, 0x2b, 0x57, 0x1b, 0xdb, 0xeb, 0x59, 0x19, 0xc4, 0xd5, 0x63,
+	0x52, 0xc1, 0xa6, 0xb7, 0xa8, 0xca, 0xf2, 0x5b, 0xd4, 0xd2, 0x62, 0xb7, 0x9e, 0xb9, 0xc5, 0x2b,
+	0x2e, 0xbb, 0xc5, 0xd3, 0xfe, 0xad, 0x08, 0x0d, 0x6c, 0x90, 0x29, 0x8b, 0xe6, 0xd3, 0x38, 0x27,
+	0x4c, 0xe1, 0x6a, 0x61, 0x3a, 0x50, 0x09, 0x71, 0x92, 0x74, 0xa5, 0x4b, 0x83, 0xbf, 0x40, 0x61,
+	0x6b, 0xfc, 0xdc, 0x0d, 0x02, 0xe6, 0x58, 0x82, 0x92, 0x14, 0x30, 0x4d, 0x49, 0x16, 0x22, 0x44,
+	0xbc, 0xfc, 0x9c, 0xf9, 0x21, 0x4b, 0x51, 0x45, 0xbc, 0x4f, 0x68, 0x70, 0x5a, 0x02, 0xc9, 0xdd,
+	0x37, 0x88, 0xca, 0xe0, 0xfc, 0xbe, 0x21, 0xed, 0x35, 0x91, 0x5b, 0x47, 0xae, 0xe8, 0x35, 0x91,
+	0xcd, 0xbb, 0xa8, 0x99, 0x3d, 0x9d, 0x5a, 0x7e, 0x10, 0xa1, 0xd3, 0xd4, 0x68, 0x0d, 0x09, 0xc3,
+	0x20, 0x22, 0x5f, 0x43, 0x7a, 0x5d, 0x2c, 0x6f, 0xc9, 0xc5, 0x39, 0xb6, 0x2e, 0xbb, 0x58, 0xa0,
+	0xab, 0xe3, 0xdc, 0xfd, 0xcf, 0x92, 0x1b, 0xea, 0xca, 0x4d, 0x6f, 0xa8, 0x1f, 0x42, 0x59, 0xc4,
+	0xa8, 0xda, 0xeb, 0x62, 0x94, 0xc0, 0x65, 0xed, 0xb3, 0x91, 0xb7, 0xcf, 0x5f, 0x16, 0x80, 0x74,
+	0xa7, 0x53, 0x7f, 0x6c, 0xc7, 0xcc, 0x70, 0xa2, 0x8b, 0x66, 0x7a, 0xed, 0xcf, 0x2e, 0x9f, 0x41,
+	0x7d, 0xe6, 0x3b, 0x6c, 0x6a, 0x25, 0xdf, 0x94, 0x2e, 0xad, 0x7e, 0x10, 0xc6, 0x5b, 0x52, 0x02,
+	0x25, 0xbc, 0xc4, 0x51, 0xb0, 0xee, 0xc0, 0x67, 0xde, 0x84, 0xcd, 0xec, 0x97, 0xb2, 0x14, 0xe1,
+	0x8f, 0xa4, 0x03, 0xd5, 0x90, 0x45, 0x2c, 0x3c, 0x65, 0x57, 0x16, 0x55, 0x09, 0x48, 0x7b, 0x06,
+	0x1b, 0xb9, 0x1d, 0x49, 0x47, 0xbe, 0x85, 0x5f, 0x2b, 0xc3, 0x58, 0x7e, 0xb4, 0x12, 0x03, 0xfe,
+	0x3a, 0xe6, 0x25, 0x9f, 0x41, 0xf9, 0x63, 0xea, 0xf0, 0xc5, 0xab, 0xe2, 0xec, 0x1e, 0xa8, 0x59,
+	0x4d, 0xbb, 0x63, 0x0c, 0x36, 0xf2, 0x54, 0x0a, 0xd7, 0x3b, 0x15, 0xed, 0xef, 0x0a, 0xb0, 0xde,
+	0x75, 0x1c, 0xf1, 0x77, 0xc3, 0x25, 0xaa, 0x2f, 0x5e, 0x57, 0xf5, 0x0b, 0x81, 0x58, 0x84, 0x89,
+	0x6b, 0x05, 0xe2, 0x0f, 0xa1, 0x92, 0xd6, 0x5a, 0xc5, 0x05, 0x77, 0x16, 0x72, 0x51, 0x09, 0xd0,
+	0x6e, 0x01, 0xc9, 0x0a, 0x2b, 0xb4, 0xaa, 0xfd, 0x69, 0x11, 0xee, 0xee, 0xb2, 0x63, 0xd7, 0xcb,
+	0xbe, 0xe2, 0x57, 0xdf, 0xc9, 0xc5, 0x4f, 0x65, 0x9f, 0xc1, 0xba, 0x28, 0xe4, 0x93, 0x7f, 0x62,
+	0x59, 0xec, 0x58, 0x7e, 0x9d, 0x94, 0xb1, 0x6a, 0x0d, 0xf9, 0x07, 0x92, 0xad, 0xe3, 0x7f, 0xc5,
+	0x1c, 0x3b, 0xb6, 0x9f, 0xd9, 0x11, 0xb3, 0x5c, 0x47, 0xfe, 0x59, 0x06, 0x12, 0x92, 0xe1, 0x90,
+	0x21, 0x94, 0xb8, 0x0d, 0xa2, 0xeb, 0x36, 0xb7, 0xb7, 0x33, 0x62, 0x5d, 0xb2, 0x95, 0xac, 0x02,
+	0x0f, 0x7c, 0x87, 0xed, 0x54, 0x8f, 0x06, 0x4f, 0x06, 0xc3, 0xef, 0x06, 0x14, 0x17, 0x22, 0x06,
+	0xdc, 0x0a, 0x42, 0x76, 0xea, 0xfa, 0xf3, 0xc8, 0xca, 0x9e, 0x44, 0xf5, 0xca, 0x94, 0xb8, 0x91,
+	0xcc, 0xc9, 0x10, 0xb5, 0x9f, 0xc2, 0xda, 0xc2, 0xcb, 0x78, 0x6d, 0x26, 0x5f, 0xa7, 0xbe, 0x41,
+	0x56, 0xa1, 0x8e, 0x1f, 0xbb, 0x97, 0x7f, 0xfb, 0xd6, 0xfe, 0xb5, 0x80, 0x57, 0x4c, 0x33, 0x37,
+	0xbe, 0x59, 0x06, 0xfb, 0xcd, 0x7c, 0x06, 0x83, 0xed, 0x77, 0xf3, 0xe6, 0x9b, 0x59, 0xb0, 0xf3,
+	0xad, 0x00, 0xa6, 0x41, 0xa4, 0x6d, 0x43, 0x55, 0xd2, 0xc8, 0x6f, 0xc1, 0x5a, 0xe8, 0xfb, 0x71,
+	0xd2, 0x89, 0x8a, 0x0e, 0xe4, 0xf2, 0x3f, 0xdb, 0xac, 0x72, 0xb0, 0x48, 0x06, 0x4f, 0xf2, 0xbd,
+	0x48, 0x59, 0xfc, 0x0d, 0x44, 0x0e, 0x77, 0x1b, 0xbf, 0x5b, 0x4f, 0xff, 0xb7, 0xfb, 0xbf, 0x01,
+	0x00, 0x00, 0xff, 0xff, 0x35, 0x9f, 0x30, 0x98, 0xf2, 0x2b, 0x00, 0x00,
+}
diff --git a/v2/internal/datastore/datastore_v3.proto b/v2/internal/datastore/datastore_v3.proto
new file mode 100755
index 0000000..497b4d9
--- /dev/null
+++ b/v2/internal/datastore/datastore_v3.proto
@@ -0,0 +1,551 @@
+syntax = "proto2";
+option go_package = "datastore";
+
+package appengine;
+
+message Action{}
+
+message PropertyValue {
+  optional int64 int64Value = 1;
+  optional bool booleanValue = 2;
+  optional string stringValue = 3;
+  optional double doubleValue = 4;
+
+  optional group PointValue = 5 {
+    required double x = 6;
+    required double y = 7;
+  }
+
+  optional group UserValue = 8 {
+    required string email = 9;
+    required string auth_domain = 10;
+    optional string nickname = 11;
+    optional string federated_identity = 21;
+    optional string federated_provider = 22;
+  }
+
+  optional group ReferenceValue = 12 {
+    required string app = 13;
+    optional string name_space = 20;
+    repeated group PathElement = 14 {
+      required string type = 15;
+      optional int64 id = 16;
+      optional string name = 17;
+    }
+  }
+}
+
+message Property {
+  enum Meaning {
+    NO_MEANING = 0;
+    BLOB = 14;
+    TEXT = 15;
+    BYTESTRING = 16;
+
+    ATOM_CATEGORY = 1;
+    ATOM_LINK = 2;
+    ATOM_TITLE = 3;
+    ATOM_CONTENT = 4;
+    ATOM_SUMMARY = 5;
+    ATOM_AUTHOR = 6;
+
+    GD_WHEN = 7;
+    GD_EMAIL = 8;
+    GEORSS_POINT = 9;
+    GD_IM = 10;
+
+    GD_PHONENUMBER = 11;
+    GD_POSTALADDRESS = 12;
+
+    GD_RATING = 13;
+
+    BLOBKEY = 17;
+    ENTITY_PROTO = 19;
+
+    INDEX_VALUE = 18;
+  };
+
+  optional Meaning meaning = 1 [default = NO_MEANING];
+  optional string meaning_uri = 2;
+
+  required string name = 3;
+
+  required PropertyValue value = 5;
+
+  required bool multiple = 4;
+
+  optional bool searchable = 6 [default=false];
+
+  enum FtsTokenizationOption {
+    HTML = 1;
+    ATOM = 2;
+  }
+
+  optional FtsTokenizationOption fts_tokenization_option = 8;
+
+  optional string locale = 9 [default = "en"];
+}
+
+message Path {
+  repeated group Element = 1 {
+    required string type = 2;
+    optional int64 id = 3;
+    optional string name = 4;
+  }
+}
+
+message Reference {
+  required string app = 13;
+  optional string name_space = 20;
+  required Path path = 14;
+}
+
+message User {
+  required string email = 1;
+  required string auth_domain = 2;
+  optional string nickname = 3;
+  optional string federated_identity = 6;
+  optional string federated_provider = 7;
+}
+
+message EntityProto {
+  required Reference key = 13;
+  required Path entity_group = 16;
+  optional User owner = 17;
+
+  enum Kind {
+    GD_CONTACT = 1;
+    GD_EVENT = 2;
+    GD_MESSAGE = 3;
+  }
+  optional Kind kind = 4;
+  optional string kind_uri = 5;
+
+  repeated Property property = 14;
+  repeated Property raw_property = 15;
+
+  optional int32 rank = 18;
+}
+
+message CompositeProperty {
+  required int64 index_id = 1;
+  repeated string value = 2;
+}
+
+message Index {
+  required string entity_type = 1;
+  required bool ancestor = 5;
+  repeated group Property = 2 {
+    required string name = 3;
+    enum Direction {
+      ASCENDING = 1;
+      DESCENDING = 2;
+    }
+    optional Direction direction = 4 [default = ASCENDING];
+  }
+}
+
+message CompositeIndex {
+  required string app_id = 1;
+  required int64 id = 2;
+  required Index definition = 3;
+
+  enum State {
+    WRITE_ONLY = 1;
+    READ_WRITE = 2;
+    DELETED = 3;
+    ERROR = 4;
+  }
+  required State state = 4;
+
+  optional bool only_use_if_required = 6 [default = false];
+}
+
+message IndexPostfix {
+  message IndexValue {
+    required string property_name = 1;
+    required PropertyValue value = 2;
+  }
+
+  repeated IndexValue index_value = 1;
+
+  optional Reference key = 2;
+
+  optional bool before = 3 [default=true];
+}
+
+message IndexPosition {
+  optional string key = 1;
+
+  optional bool before = 2 [default=true];
+}
+
+message Snapshot {
+  enum Status {
+    INACTIVE = 0;
+    ACTIVE = 1;
+  }
+
+  required int64 ts = 1;
+}
+
+message InternalHeader {
+  optional string qos = 1;
+}
+
+message Transaction {
+  optional InternalHeader header = 4;
+  required fixed64 handle = 1;
+  required string app = 2;
+  optional bool mark_changes = 3 [default = false];
+}
+
+message Query {
+  optional InternalHeader header = 39;
+
+  required string app = 1;
+  optional string name_space = 29;
+
+  optional string kind = 3;
+  optional Reference ancestor = 17;
+
+  repeated group Filter = 4 {
+    enum Operator {
+      LESS_THAN = 1;
+      LESS_THAN_OR_EQUAL = 2;
+      GREATER_THAN = 3;
+      GREATER_THAN_OR_EQUAL = 4;
+      EQUAL = 5;
+      IN = 6;
+      EXISTS = 7;
+    }
+
+    required Operator op = 6;
+    repeated Property property = 14;
+  }
+
+  optional string search_query = 8;
+
+  repeated group Order = 9 {
+    enum Direction {
+      ASCENDING = 1;
+      DESCENDING = 2;
+    }
+
+    required string property = 10;
+    optional Direction direction = 11 [default = ASCENDING];
+  }
+
+  enum Hint {
+    ORDER_FIRST = 1;
+    ANCESTOR_FIRST = 2;
+    FILTER_FIRST = 3;
+  }
+  optional Hint hint = 18;
+
+  optional int32 count = 23;
+
+  optional int32 offset = 12 [default = 0];
+
+  optional int32 limit = 16;
+
+  optional CompiledCursor compiled_cursor = 30;
+  optional CompiledCursor end_compiled_cursor = 31;
+
+  repeated CompositeIndex composite_index = 19;
+
+  optional bool require_perfect_plan = 20 [default = false];
+
+  optional bool keys_only = 21 [default = false];
+
+  optional Transaction transaction = 22;
+
+  optional bool compile = 25 [default = false];
+
+  optional int64 failover_ms = 26;
+
+  optional bool strong = 32;
+
+  repeated string property_name = 33;
+
+  repeated string group_by_property_name = 34;
+
+  optional bool distinct = 24;
+
+  optional int64 min_safe_time_seconds = 35;
+
+  repeated string safe_replica_name = 36;
+
+  optional bool persist_offset = 37 [default=false];
+}
+
+message CompiledQuery {
+  required group PrimaryScan = 1 {
+    optional string index_name = 2;
+
+    optional string start_key = 3;
+    optional bool start_inclusive = 4;
+    optional string end_key = 5;
+    optional bool end_inclusive = 6;
+
+    repeated string start_postfix_value = 22;
+    repeated string end_postfix_value = 23;
+
+    optional int64 end_unapplied_log_timestamp_us = 19;
+  }
+
+  repeated group MergeJoinScan = 7 {
+    required string index_name = 8;
+
+    repeated string prefix_value = 9;
+
+    optional bool value_prefix = 20 [default=false];
+  }
+
+  optional Index index_def = 21;
+
+  optional int32 offset = 10 [default = 0];
+
+  optional int32 limit = 11;
+
+  required bool keys_only = 12;
+
+  repeated string property_name = 24;
+
+  optional int32 distinct_infix_size = 25;
+
+  optional group EntityFilter = 13 {
+    optional bool distinct = 14 [default=false];
+
+    optional string kind = 17;
+    optional Reference ancestor = 18;
+  }
+}
+
+message CompiledCursor {
+  optional group Position = 2 {
+    optional string start_key = 27;
+
+    repeated group IndexValue = 29 {
+      optional string property = 30;
+      required PropertyValue value = 31;
+    }
+
+    optional Reference key = 32;
+
+    optional bool start_inclusive = 28 [default=true];
+  }
+}
+
+message Cursor {
+  required fixed64 cursor = 1;
+
+  optional string app = 2;
+}
+
+message Error {
+  enum ErrorCode {
+    BAD_REQUEST = 1;
+    CONCURRENT_TRANSACTION = 2;
+    INTERNAL_ERROR = 3;
+    NEED_INDEX = 4;
+    TIMEOUT = 5;
+    PERMISSION_DENIED = 6;
+    BIGTABLE_ERROR = 7;
+    COMMITTED_BUT_STILL_APPLYING = 8;
+    CAPABILITY_DISABLED = 9;
+    TRY_ALTERNATE_BACKEND = 10;
+    SAFE_TIME_TOO_OLD = 11;
+  }
+}
+
+message Cost {
+  optional int32 index_writes = 1;
+  optional int32 index_write_bytes = 2;
+  optional int32 entity_writes = 3;
+  optional int32 entity_write_bytes = 4;
+  optional group CommitCost = 5 {
+    optional int32 requested_entity_puts = 6;
+    optional int32 requested_entity_deletes = 7;
+  };
+  optional int32 approximate_storage_delta = 8;
+  optional int32 id_sequence_updates = 9;
+}
+
+message GetRequest {
+  optional InternalHeader header = 6;
+
+  repeated Reference key = 1;
+  optional Transaction transaction = 2;
+
+  optional int64 failover_ms = 3;
+
+  optional bool strong = 4;
+
+  optional bool allow_deferred = 5 [default=false];
+}
+
+message GetResponse {
+  repeated group Entity = 1 {
+    optional EntityProto entity = 2;
+    optional Reference key = 4;
+
+    optional int64 version = 3;
+  }
+
+  repeated Reference deferred = 5;
+
+  optional bool in_order = 6 [default=true];
+}
+
+message PutRequest {
+  optional InternalHeader header = 11;
+
+  repeated EntityProto entity = 1;
+  optional Transaction transaction = 2;
+  repeated CompositeIndex composite_index = 3;
+
+  optional bool trusted = 4 [default = false];
+
+  optional bool force = 7 [default = false];
+
+  optional bool mark_changes = 8 [default = false];
+  repeated Snapshot snapshot = 9;
+
+  enum AutoIdPolicy {
+    CURRENT = 0;
+    SEQUENTIAL = 1;
+  }
+  optional AutoIdPolicy auto_id_policy = 10 [default = CURRENT];
+}
+
+message PutResponse {
+  repeated Reference key = 1;
+  optional Cost cost = 2;
+  repeated int64 version = 3;
+}
+
+message TouchRequest {
+  optional InternalHeader header = 10;
+
+  repeated Reference key = 1;
+  repeated CompositeIndex composite_index = 2;
+  optional bool force = 3 [default = false];
+  repeated Snapshot snapshot = 9;
+}
+
+message TouchResponse {
+  optional Cost cost = 1;
+}
+
+message DeleteRequest {
+  optional InternalHeader header = 10;
+
+  repeated Reference key = 6;
+  optional Transaction transaction = 5;
+
+  optional bool trusted = 4 [default = false];
+
+  optional bool force = 7 [default = false];
+
+  optional bool mark_changes = 8 [default = false];
+  repeated Snapshot snapshot = 9;
+}
+
+message DeleteResponse {
+  optional Cost cost = 1;
+  repeated int64 version = 3;
+}
+
+message NextRequest {
+  optional InternalHeader header = 5;
+
+  required Cursor cursor = 1;
+  optional int32 count = 2;
+
+  optional int32 offset = 4 [default = 0];
+
+  optional bool compile = 3 [default = false];
+}
+
+message QueryResult {
+  optional Cursor cursor = 1;
+
+  repeated EntityProto result = 2;
+
+  optional int32 skipped_results = 7;
+
+  required bool more_results = 3;
+
+  optional bool keys_only = 4;
+
+  optional bool index_only = 9;
+
+  optional bool small_ops = 10;
+
+  optional CompiledQuery compiled_query = 5;
+
+  optional CompiledCursor compiled_cursor = 6;
+
+  repeated CompositeIndex index = 8;
+
+  repeated int64 version = 11;
+}
+
+message AllocateIdsRequest {
+  optional InternalHeader header = 4;
+
+  optional Reference model_key = 1;
+
+  optional int64 size = 2;
+
+  optional int64 max = 3;
+
+  repeated Reference reserve = 5;
+}
+
+message AllocateIdsResponse {
+  required int64 start = 1;
+  required int64 end = 2;
+  optional Cost cost = 3;
+}
+
+message CompositeIndices {
+  repeated CompositeIndex index = 1;
+}
+
+message AddActionsRequest {
+  optional InternalHeader header = 3;
+
+  required Transaction transaction = 1;
+  repeated Action action = 2;
+}
+
+message AddActionsResponse {
+}
+
+message BeginTransactionRequest {
+  optional InternalHeader header = 3;
+
+  required string app = 1;
+  optional bool allow_multiple_eg = 2 [default = false];
+  optional string database_id = 4;
+
+  enum TransactionMode {
+    UNKNOWN = 0;
+    READ_ONLY = 1;
+    READ_WRITE = 2;
+  }
+  optional TransactionMode mode = 5 [default = UNKNOWN];
+
+  optional Transaction previous_transaction = 7;
+}
+
+message CommitResponse {
+  optional Cost cost = 1;
+
+  repeated group Version = 3 {
+    required Reference root_entity_key = 4;
+    required int64 version = 5;
+  }
+}
diff --git a/v2/internal/identity.go b/v2/internal/identity.go
new file mode 100644
index 0000000..ea1dbb9
--- /dev/null
+++ b/v2/internal/identity.go
@@ -0,0 +1,171 @@
+// Copyright 2011 Google Inc. All rights reserved.
+// Use of this source code is governed by the Apache 2.0
+// license that can be found in the LICENSE file.
+
+package internal
+
+import (
+	netcontext "context"
+	"log"
+	"net/http"
+	"os"
+	"strings"
+)
+
+// These functions are implementations of the wrapper functions
+// in ../appengine/identity.go. See that file for commentary.
+
+const (
+	hDefaultVersionHostname = "X-AppEngine-Default-Version-Hostname"
+	hRequestLogId           = "X-AppEngine-Request-Log-Id"
+	hDatacenter             = "X-AppEngine-Datacenter"
+)
+
+var (
+	// This is set to true in identity_flex.go, which is behind the appenginevm build tag.
+	appengineFlex bool
+)
+
+// AppID is the implementation of the wrapper function of the same name in
+// ../identity.go. See that file for commentary.
+func AppID(c netcontext.Context) string {
+	return appID(FullyQualifiedAppID(c))
+}
+
+// IsStandard is the implementation of the wrapper function of the same name in
+// ../appengine.go. See that file for commentary.
+func IsStandard() bool {
+	return IsSecondGen()
+}
+
+// IsStandard is the implementation of the wrapper function of the same name in
+// ../appengine.go. See that file for commentary.
+func IsSecondGen() bool {
+	// Second-gen runtimes set $GAE_ENV so we use that to check if we're on a second-gen runtime.
+	return os.Getenv("GAE_ENV") == "standard"
+}
+
+// IsFlex is the implementation of the wrapper function of the same name in
+// ../appengine.go. See that file for commentary.
+func IsFlex() bool {
+	return appengineFlex
+}
+
+// IsAppEngine is the implementation of the wrapper function of the same name in
+// ../appengine.go. See that file for commentary.
+func IsAppEngine() bool {
+	return IsStandard() || IsFlex()
+}
+
+func ctxHeaders(ctx netcontext.Context) http.Header {
+	c := fromContext(ctx)
+	if c == nil {
+		return nil
+	}
+	return c.Request().Header
+}
+
+func DefaultVersionHostname(ctx netcontext.Context) string {
+	return ctxHeaders(ctx).Get(hDefaultVersionHostname)
+}
+
+func RequestID(ctx netcontext.Context) string {
+	return ctxHeaders(ctx).Get(hRequestLogId)
+}
+
+func Datacenter(ctx netcontext.Context) string {
+	if dc := ctxHeaders(ctx).Get(hDatacenter); dc != "" {
+		return dc
+	}
+	// If the header isn't set, read zone from the metadata service.
+	// It has the format projects/[NUMERIC_PROJECT_ID]/zones/[ZONE]
+	zone, err := getMetadata("instance/zone")
+	if err != nil {
+		log.Printf("Datacenter: %v", err)
+		return ""
+	}
+	parts := strings.Split(string(zone), "/")
+	if len(parts) == 0 {
+		return ""
+	}
+	return parts[len(parts)-1]
+}
+
+func ServerSoftware() string {
+	// TODO(dsymonds): Remove fallback when we've verified this.
+	if s := os.Getenv("SERVER_SOFTWARE"); s != "" {
+		return s
+	}
+	if s := os.Getenv("GAE_ENV"); s != "" {
+		return s
+	}
+	return "Google App Engine/1.x.x"
+}
+
+// TODO(dsymonds): Remove the metadata fetches.
+
+func ModuleName(_ netcontext.Context) string {
+	if s := os.Getenv("GAE_MODULE_NAME"); s != "" {
+		return s
+	}
+	if s := os.Getenv("GAE_SERVICE"); s != "" {
+		return s
+	}
+	return string(mustGetMetadata("instance/attributes/gae_backend_name"))
+}
+
+func VersionID(_ netcontext.Context) string {
+	if s1, s2 := os.Getenv("GAE_MODULE_VERSION"), os.Getenv("GAE_MINOR_VERSION"); s1 != "" && s2 != "" {
+		return s1 + "." + s2
+	}
+	if s1, s2 := os.Getenv("GAE_VERSION"), os.Getenv("GAE_DEPLOYMENT_ID"); s1 != "" && s2 != "" {
+		return s1 + "." + s2
+	}
+	return string(mustGetMetadata("instance/attributes/gae_backend_version")) + "." + string(mustGetMetadata("instance/attributes/gae_backend_minor_version"))
+}
+
+func InstanceID() string {
+	if s := os.Getenv("GAE_MODULE_INSTANCE"); s != "" {
+		return s
+	}
+	if s := os.Getenv("GAE_INSTANCE"); s != "" {
+		return s
+	}
+	return string(mustGetMetadata("instance/attributes/gae_backend_instance"))
+}
+
+func partitionlessAppID() string {
+	// gae_project has everything except the partition prefix.
+	if appID := os.Getenv("GAE_LONG_APP_ID"); appID != "" {
+		return appID
+	}
+	return projectID()
+}
+
+func projectID() string {
+	if project := os.Getenv("GOOGLE_CLOUD_PROJECT"); project != "" {
+		return project
+	}
+	return string(mustGetMetadata("instance/attributes/gae_project"))
+}
+
+func fullyQualifiedAppID(_ netcontext.Context) string {
+	if s := os.Getenv("GAE_APPLICATION"); s != "" {
+		return s
+	}
+	appID := partitionlessAppID()
+
+	part := os.Getenv("GAE_PARTITION")
+	if part == "" {
+		part = string(mustGetMetadata("instance/attributes/gae_partition"))
+	}
+
+	if part != "" {
+		appID = part + "~" + appID
+	}
+	return appID
+}
+
+func IsDevAppServer() bool {
+	return os.Getenv("RUN_WITH_DEVAPPSERVER") != "" || os.Getenv("GAE_ENV") == "localdev"
+}
diff --git a/v2/internal/identity_flex.go b/v2/internal/identity_flex.go
new file mode 100644
index 0000000..4201b6b
--- /dev/null
+++ b/v2/internal/identity_flex.go
@@ -0,0 +1,12 @@
+// Copyright 2018 Google LLC. All rights reserved.
+// Use of this source code is governed by the Apache 2.0
+// license that can be found in the LICENSE file.
+
+//go:build appenginevm
+// +build appenginevm
+
+package internal
+
+func init() {
+	appengineFlex = true
+}
diff --git a/v2/internal/identity_test.go b/v2/internal/identity_test.go
new file mode 100644
index 0000000..d58d6b9
--- /dev/null
+++ b/v2/internal/identity_test.go
@@ -0,0 +1,47 @@
+package internal
+
+import (
+	"os"
+	"testing"
+)
+
+func TestIsDevAppServer(t *testing.T) {
+	tests := []struct {
+		desc string // See http://go/gotip/episodes/25 for naming guidance.
+		env  map[string]string
+		want bool
+	}{
+		{desc: "empty", env: map[string]string{}, want: false},
+		{desc: "legacy", env: map[string]string{"RUN_WITH_DEVAPPSERVER": "1"}, want: true},
+		{desc: "new", env: map[string]string{"GAE_ENV": "localdev"}, want: true},
+	}
+	for _, test := range tests {
+		t.Run(test.desc, func(t *testing.T) {
+			for key, value := range test.env {
+				defer setenv(t, key, value)()
+			}
+			if got := IsDevAppServer(); got != test.want {
+				t.Errorf("env=%v IsDevAppServer() got %v, want %v", test.env, got, test.want)
+			}
+		})
+	}
+}
+
+// setenv is a backport of https://pkg.go.dev/testing#T.Setenv
+func setenv(t *testing.T, key, value string) func() {
+	t.Helper()
+	prevValue, ok := os.LookupEnv(key)
+
+	if err := os.Setenv(key, value); err != nil {
+		t.Fatalf("cannot set environment variable: %v", err)
+	}
+
+	if ok {
+		return func() {
+			os.Setenv(key, prevValue)
+		}
+	}
+	return func() {
+		os.Unsetenv(key)
+	}
+}
diff --git a/v2/internal/image/images_service.pb.go b/v2/internal/image/images_service.pb.go
new file mode 100644
index 0000000..d57230e
--- /dev/null
+++ b/v2/internal/image/images_service.pb.go
@@ -0,0 +1,1375 @@
+// Code generated by protoc-gen-go. DO NOT EDIT.
+// source: google.golang.org/appengine/internal/image/images_service.proto
+
+package image
+
+import proto "github.com/golang/protobuf/proto"
+import fmt "fmt"
+import math "math"
+
+// Reference imports to suppress errors if they are not otherwise used.
+var _ = proto.Marshal
+var _ = fmt.Errorf
+var _ = math.Inf
+
+// This is a compile-time assertion to ensure that this generated file
+// is compatible with the proto package it is being compiled against.
+// A compilation error at this line likely means your copy of the
+// proto package needs to be updated.
+const _ = proto.ProtoPackageIsVersion2 // please upgrade the proto package
+
+type ImagesServiceError_ErrorCode int32
+
+const (
+	ImagesServiceError_UNSPECIFIED_ERROR  ImagesServiceError_ErrorCode = 1
+	ImagesServiceError_BAD_TRANSFORM_DATA ImagesServiceError_ErrorCode = 2
+	ImagesServiceError_NOT_IMAGE          ImagesServiceError_ErrorCode = 3
+	ImagesServiceError_BAD_IMAGE_DATA     ImagesServiceError_ErrorCode = 4
+	ImagesServiceError_IMAGE_TOO_LARGE    ImagesServiceError_ErrorCode = 5
+	ImagesServiceError_INVALID_BLOB_KEY   ImagesServiceError_ErrorCode = 6
+	ImagesServiceError_ACCESS_DENIED      ImagesServiceError_ErrorCode = 7
+	ImagesServiceError_OBJECT_NOT_FOUND   ImagesServiceError_ErrorCode = 8
+)
+
+var ImagesServiceError_ErrorCode_name = map[int32]string{
+	1: "UNSPECIFIED_ERROR",
+	2: "BAD_TRANSFORM_DATA",
+	3: "NOT_IMAGE",
+	4: "BAD_IMAGE_DATA",
+	5: "IMAGE_TOO_LARGE",
+	6: "INVALID_BLOB_KEY",
+	7: "ACCESS_DENIED",
+	8: "OBJECT_NOT_FOUND",
+}
+var ImagesServiceError_ErrorCode_value = map[string]int32{
+	"UNSPECIFIED_ERROR":  1,
+	"BAD_TRANSFORM_DATA": 2,
+	"NOT_IMAGE":          3,
+	"BAD_IMAGE_DATA":     4,
+	"IMAGE_TOO_LARGE":    5,
+	"INVALID_BLOB_KEY":   6,
+	"ACCESS_DENIED":      7,
+	"OBJECT_NOT_FOUND":   8,
+}
+
+func (x ImagesServiceError_ErrorCode) Enum() *ImagesServiceError_ErrorCode {
+	p := new(ImagesServiceError_ErrorCode)
+	*p = x
+	return p
+}
+func (x ImagesServiceError_ErrorCode) String() string {
+	return proto.EnumName(ImagesServiceError_ErrorCode_name, int32(x))
+}
+func (x *ImagesServiceError_ErrorCode) UnmarshalJSON(data []byte) error {
+	value, err := proto.UnmarshalJSONEnum(ImagesServiceError_ErrorCode_value, data, "ImagesServiceError_ErrorCode")
+	if err != nil {
+		return err
+	}
+	*x = ImagesServiceError_ErrorCode(value)
+	return nil
+}
+func (ImagesServiceError_ErrorCode) EnumDescriptor() ([]byte, []int) {
+	return fileDescriptor_images_service_42a9d451721edce4, []int{0, 0}
+}
+
+type ImagesServiceTransform_Type int32
+
+const (
+	ImagesServiceTransform_RESIZE           ImagesServiceTransform_Type = 1
+	ImagesServiceTransform_ROTATE           ImagesServiceTransform_Type = 2
+	ImagesServiceTransform_HORIZONTAL_FLIP  ImagesServiceTransform_Type = 3
+	ImagesServiceTransform_VERTICAL_FLIP    ImagesServiceTransform_Type = 4
+	ImagesServiceTransform_CROP             ImagesServiceTransform_Type = 5
+	ImagesServiceTransform_IM_FEELING_LUCKY ImagesServiceTransform_Type = 6
+)
+
+var ImagesServiceTransform_Type_name = map[int32]string{
+	1: "RESIZE",
+	2: "ROTATE",
+	3: "HORIZONTAL_FLIP",
+	4: "VERTICAL_FLIP",
+	5: "CROP",
+	6: "IM_FEELING_LUCKY",
+}
+var ImagesServiceTransform_Type_value = map[string]int32{
+	"RESIZE":           1,
+	"ROTATE":           2,
+	"HORIZONTAL_FLIP":  3,
+	"VERTICAL_FLIP":    4,
+	"CROP":             5,
+	"IM_FEELING_LUCKY": 6,
+}
+
+func (x ImagesServiceTransform_Type) Enum() *ImagesServiceTransform_Type {
+	p := new(ImagesServiceTransform_Type)
+	*p = x
+	return p
+}
+func (x ImagesServiceTransform_Type) String() string {
+	return proto.EnumName(ImagesServiceTransform_Type_name, int32(x))
+}
+func (x *ImagesServiceTransform_Type) UnmarshalJSON(data []byte) error {
+	value, err := proto.UnmarshalJSONEnum(ImagesServiceTransform_Type_value, data, "ImagesServiceTransform_Type")
+	if err != nil {
+		return err
+	}
+	*x = ImagesServiceTransform_Type(value)
+	return nil
+}
+func (ImagesServiceTransform_Type) EnumDescriptor() ([]byte, []int) {
+	return fileDescriptor_images_service_42a9d451721edce4, []int{1, 0}
+}
+
+type InputSettings_ORIENTATION_CORRECTION_TYPE int32
+
+const (
+	InputSettings_UNCHANGED_ORIENTATION InputSettings_ORIENTATION_CORRECTION_TYPE = 0
+	InputSettings_CORRECT_ORIENTATION   InputSettings_ORIENTATION_CORRECTION_TYPE = 1
+)
+
+var InputSettings_ORIENTATION_CORRECTION_TYPE_name = map[int32]string{
+	0: "UNCHANGED_ORIENTATION",
+	1: "CORRECT_ORIENTATION",
+}
+var InputSettings_ORIENTATION_CORRECTION_TYPE_value = map[string]int32{
+	"UNCHANGED_ORIENTATION": 0,
+	"CORRECT_ORIENTATION":   1,
+}
+
+func (x InputSettings_ORIENTATION_CORRECTION_TYPE) Enum() *InputSettings_ORIENTATION_CORRECTION_TYPE {
+	p := new(InputSettings_ORIENTATION_CORRECTION_TYPE)
+	*p = x
+	return p
+}
+func (x InputSettings_ORIENTATION_CORRECTION_TYPE) String() string {
+	return proto.EnumName(InputSettings_ORIENTATION_CORRECTION_TYPE_name, int32(x))
+}
+func (x *InputSettings_ORIENTATION_CORRECTION_TYPE) UnmarshalJSON(data []byte) error {
+	value, err := proto.UnmarshalJSONEnum(InputSettings_ORIENTATION_CORRECTION_TYPE_value, data, "InputSettings_ORIENTATION_CORRECTION_TYPE")
+	if err != nil {
+		return err
+	}
+	*x = InputSettings_ORIENTATION_CORRECTION_TYPE(value)
+	return nil
+}
+func (InputSettings_ORIENTATION_CORRECTION_TYPE) EnumDescriptor() ([]byte, []int) {
+	return fileDescriptor_images_service_42a9d451721edce4, []int{4, 0}
+}
+
+type OutputSettings_MIME_TYPE int32
+
+const (
+	OutputSettings_PNG  OutputSettings_MIME_TYPE = 0
+	OutputSettings_JPEG OutputSettings_MIME_TYPE = 1
+	OutputSettings_WEBP OutputSettings_MIME_TYPE = 2
+)
+
+var OutputSettings_MIME_TYPE_name = map[int32]string{
+	0: "PNG",
+	1: "JPEG",
+	2: "WEBP",
+}
+var OutputSettings_MIME_TYPE_value = map[string]int32{
+	"PNG":  0,
+	"JPEG": 1,
+	"WEBP": 2,
+}
+
+func (x OutputSettings_MIME_TYPE) Enum() *OutputSettings_MIME_TYPE {
+	p := new(OutputSettings_MIME_TYPE)
+	*p = x
+	return p
+}
+func (x OutputSettings_MIME_TYPE) String() string {
+	return proto.EnumName(OutputSettings_MIME_TYPE_name, int32(x))
+}
+func (x *OutputSettings_MIME_TYPE) UnmarshalJSON(data []byte) error {
+	value, err := proto.UnmarshalJSONEnum(OutputSettings_MIME_TYPE_value, data, "OutputSettings_MIME_TYPE")
+	if err != nil {
+		return err
+	}
+	*x = OutputSettings_MIME_TYPE(value)
+	return nil
+}
+func (OutputSettings_MIME_TYPE) EnumDescriptor() ([]byte, []int) {
+	return fileDescriptor_images_service_42a9d451721edce4, []int{5, 0}
+}
+
+type CompositeImageOptions_ANCHOR int32
+
+const (
+	CompositeImageOptions_TOP_LEFT     CompositeImageOptions_ANCHOR = 0
+	CompositeImageOptions_TOP          CompositeImageOptions_ANCHOR = 1
+	CompositeImageOptions_TOP_RIGHT    CompositeImageOptions_ANCHOR = 2
+	CompositeImageOptions_LEFT         CompositeImageOptions_ANCHOR = 3
+	CompositeImageOptions_CENTER       CompositeImageOptions_ANCHOR = 4
+	CompositeImageOptions_RIGHT        CompositeImageOptions_ANCHOR = 5
+	CompositeImageOptions_BOTTOM_LEFT  CompositeImageOptions_ANCHOR = 6
+	CompositeImageOptions_BOTTOM       CompositeImageOptions_ANCHOR = 7
+	CompositeImageOptions_BOTTOM_RIGHT CompositeImageOptions_ANCHOR = 8
+)
+
+var CompositeImageOptions_ANCHOR_name = map[int32]string{
+	0: "TOP_LEFT",
+	1: "TOP",
+	2: "TOP_RIGHT",
+	3: "LEFT",
+	4: "CENTER",
+	5: "RIGHT",
+	6: "BOTTOM_LEFT",
+	7: "BOTTOM",
+	8: "BOTTOM_RIGHT",
+}
+var CompositeImageOptions_ANCHOR_value = map[string]int32{
+	"TOP_LEFT":     0,
+	"TOP":          1,
+	"TOP_RIGHT":    2,
+	"LEFT":         3,
+	"CENTER":       4,
+	"RIGHT":        5,
+	"BOTTOM_LEFT":  6,
+	"BOTTOM":       7,
+	"BOTTOM_RIGHT": 8,
+}
+
+func (x CompositeImageOptions_ANCHOR) Enum() *CompositeImageOptions_ANCHOR {
+	p := new(CompositeImageOptions_ANCHOR)
+	*p = x
+	return p
+}
+func (x CompositeImageOptions_ANCHOR) String() string {
+	return proto.EnumName(CompositeImageOptions_ANCHOR_name, int32(x))
+}
+func (x *CompositeImageOptions_ANCHOR) UnmarshalJSON(data []byte) error {
+	value, err := proto.UnmarshalJSONEnum(CompositeImageOptions_ANCHOR_value, data, "CompositeImageOptions_ANCHOR")
+	if err != nil {
+		return err
+	}
+	*x = CompositeImageOptions_ANCHOR(value)
+	return nil
+}
+func (CompositeImageOptions_ANCHOR) EnumDescriptor() ([]byte, []int) {
+	return fileDescriptor_images_service_42a9d451721edce4, []int{8, 0}
+}
+
+type ImagesServiceError struct {
+	XXX_NoUnkeyedLiteral struct{} `json:"-"`
+	XXX_unrecognized     []byte   `json:"-"`
+	XXX_sizecache        int32    `json:"-"`
+}
+
+func (m *ImagesServiceError) Reset()         { *m = ImagesServiceError{} }
+func (m *ImagesServiceError) String() string { return proto.CompactTextString(m) }
+func (*ImagesServiceError) ProtoMessage()    {}
+func (*ImagesServiceError) Descriptor() ([]byte, []int) {
+	return fileDescriptor_images_service_42a9d451721edce4, []int{0}
+}
+func (m *ImagesServiceError) XXX_Unmarshal(b []byte) error {
+	return xxx_messageInfo_ImagesServiceError.Unmarshal(m, b)
+}
+func (m *ImagesServiceError) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) {
+	return xxx_messageInfo_ImagesServiceError.Marshal(b, m, deterministic)
+}
+func (dst *ImagesServiceError) XXX_Merge(src proto.Message) {
+	xxx_messageInfo_ImagesServiceError.Merge(dst, src)
+}
+func (m *ImagesServiceError) XXX_Size() int {
+	return xxx_messageInfo_ImagesServiceError.Size(m)
+}
+func (m *ImagesServiceError) XXX_DiscardUnknown() {
+	xxx_messageInfo_ImagesServiceError.DiscardUnknown(m)
+}
+
+var xxx_messageInfo_ImagesServiceError proto.InternalMessageInfo
+
+type ImagesServiceTransform struct {
+	XXX_NoUnkeyedLiteral struct{} `json:"-"`
+	XXX_unrecognized     []byte   `json:"-"`
+	XXX_sizecache        int32    `json:"-"`
+}
+
+func (m *ImagesServiceTransform) Reset()         { *m = ImagesServiceTransform{} }
+func (m *ImagesServiceTransform) String() string { return proto.CompactTextString(m) }
+func (*ImagesServiceTransform) ProtoMessage()    {}
+func (*ImagesServiceTransform) Descriptor() ([]byte, []int) {
+	return fileDescriptor_images_service_42a9d451721edce4, []int{1}
+}
+func (m *ImagesServiceTransform) XXX_Unmarshal(b []byte) error {
+	return xxx_messageInfo_ImagesServiceTransform.Unmarshal(m, b)
+}
+func (m *ImagesServiceTransform) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) {
+	return xxx_messageInfo_ImagesServiceTransform.Marshal(b, m, deterministic)
+}
+func (dst *ImagesServiceTransform) XXX_Merge(src proto.Message) {
+	xxx_messageInfo_ImagesServiceTransform.Merge(dst, src)
+}
+func (m *ImagesServiceTransform) XXX_Size() int {
+	return xxx_messageInfo_ImagesServiceTransform.Size(m)
+}
+func (m *ImagesServiceTransform) XXX_DiscardUnknown() {
+	xxx_messageInfo_ImagesServiceTransform.DiscardUnknown(m)
+}
+
+var xxx_messageInfo_ImagesServiceTransform proto.InternalMessageInfo
+
+type Transform struct {
+	Width                *int32   `protobuf:"varint,1,opt,name=width" json:"width,omitempty"`
+	Height               *int32   `protobuf:"varint,2,opt,name=height" json:"height,omitempty"`
+	CropToFit            *bool    `protobuf:"varint,11,opt,name=crop_to_fit,json=cropToFit,def=0" json:"crop_to_fit,omitempty"`
+	CropOffsetX          *float32 `protobuf:"fixed32,12,opt,name=crop_offset_x,json=cropOffsetX,def=0.5" json:"crop_offset_x,omitempty"`
+	CropOffsetY          *float32 `protobuf:"fixed32,13,opt,name=crop_offset_y,json=cropOffsetY,def=0.5" json:"crop_offset_y,omitempty"`
+	Rotate               *int32   `protobuf:"varint,3,opt,name=rotate,def=0" json:"rotate,omitempty"`
+	HorizontalFlip       *bool    `protobuf:"varint,4,opt,name=horizontal_flip,json=horizontalFlip,def=0" json:"horizontal_flip,omitempty"`
+	VerticalFlip         *bool    `protobuf:"varint,5,opt,name=vertical_flip,json=verticalFlip,def=0" json:"vertical_flip,omitempty"`
+	CropLeftX            *float32 `protobuf:"fixed32,6,opt,name=crop_left_x,json=cropLeftX,def=0" json:"crop_left_x,omitempty"`
+	CropTopY             *float32 `protobuf:"fixed32,7,opt,name=crop_top_y,json=cropTopY,def=0" json:"crop_top_y,omitempty"`
+	CropRightX           *float32 `protobuf:"fixed32,8,opt,name=crop_right_x,json=cropRightX,def=1" json:"crop_right_x,omitempty"`
+	CropBottomY          *float32 `protobuf:"fixed32,9,opt,name=crop_bottom_y,json=cropBottomY,def=1" json:"crop_bottom_y,omitempty"`
+	Autolevels           *bool    `protobuf:"varint,10,opt,name=autolevels,def=0" json:"autolevels,omitempty"`
+	AllowStretch         *bool    `protobuf:"varint,14,opt,name=allow_stretch,json=allowStretch,def=0" json:"allow_stretch,omitempty"`
+	XXX_NoUnkeyedLiteral struct{} `json:"-"`
+	XXX_unrecognized     []byte   `json:"-"`
+	XXX_sizecache        int32    `json:"-"`
+}
+
+func (m *Transform) Reset()         { *m = Transform{} }
+func (m *Transform) String() string { return proto.CompactTextString(m) }
+func (*Transform) ProtoMessage()    {}
+func (*Transform) Descriptor() ([]byte, []int) {
+	return fileDescriptor_images_service_42a9d451721edce4, []int{2}
+}
+func (m *Transform) XXX_Unmarshal(b []byte) error {
+	return xxx_messageInfo_Transform.Unmarshal(m, b)
+}
+func (m *Transform) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) {
+	return xxx_messageInfo_Transform.Marshal(b, m, deterministic)
+}
+func (dst *Transform) XXX_Merge(src proto.Message) {
+	xxx_messageInfo_Transform.Merge(dst, src)
+}
+func (m *Transform) XXX_Size() int {
+	return xxx_messageInfo_Transform.Size(m)
+}
+func (m *Transform) XXX_DiscardUnknown() {
+	xxx_messageInfo_Transform.DiscardUnknown(m)
+}
+
+var xxx_messageInfo_Transform proto.InternalMessageInfo
+
+const Default_Transform_CropToFit bool = false
+const Default_Transform_CropOffsetX float32 = 0.5
+const Default_Transform_CropOffsetY float32 = 0.5
+const Default_Transform_Rotate int32 = 0
+const Default_Transform_HorizontalFlip bool = false
+const Default_Transform_VerticalFlip bool = false
+const Default_Transform_CropLeftX float32 = 0
+const Default_Transform_CropTopY float32 = 0
+const Default_Transform_CropRightX float32 = 1
+const Default_Transform_CropBottomY float32 = 1
+const Default_Transform_Autolevels bool = false
+const Default_Transform_AllowStretch bool = false
+
+func (m *Transform) GetWidth() int32 {
+	if m != nil && m.Width != nil {
+		return *m.Width
+	}
+	return 0
+}
+
+func (m *Transform) GetHeight() int32 {
+	if m != nil && m.Height != nil {
+		return *m.Height
+	}
+	return 0
+}
+
+func (m *Transform) GetCropToFit() bool {
+	if m != nil && m.CropToFit != nil {
+		return *m.CropToFit
+	}
+	return Default_Transform_CropToFit
+}
+
+func (m *Transform) GetCropOffsetX() float32 {
+	if m != nil && m.CropOffsetX != nil {
+		return *m.CropOffsetX
+	}
+	return Default_Transform_CropOffsetX
+}
+
+func (m *Transform) GetCropOffsetY() float32 {
+	if m != nil && m.CropOffsetY != nil {
+		return *m.CropOffsetY
+	}
+	return Default_Transform_CropOffsetY
+}
+
+func (m *Transform) GetRotate() int32 {
+	if m != nil && m.Rotate != nil {
+		return *m.Rotate
+	}
+	return Default_Transform_Rotate
+}
+
+func (m *Transform) GetHorizontalFlip() bool {
+	if m != nil && m.HorizontalFlip != nil {
+		return *m.HorizontalFlip
+	}
+	return Default_Transform_HorizontalFlip
+}
+
+func (m *Transform) GetVerticalFlip() bool {
+	if m != nil && m.VerticalFlip != nil {
+		return *m.VerticalFlip
+	}
+	return Default_Transform_VerticalFlip
+}
+
+func (m *Transform) GetCropLeftX() float32 {
+	if m != nil && m.CropLeftX != nil {
+		return *m.CropLeftX
+	}
+	return Default_Transform_CropLeftX
+}
+
+func (m *Transform) GetCropTopY() float32 {
+	if m != nil && m.CropTopY != nil {
+		return *m.CropTopY
+	}
+	return Default_Transform_CropTopY
+}
+
+func (m *Transform) GetCropRightX() float32 {
+	if m != nil && m.CropRightX != nil {
+		return *m.CropRightX
+	}
+	return Default_Transform_CropRightX
+}
+
+func (m *Transform) GetCropBottomY() float32 {
+	if m != nil && m.CropBottomY != nil {
+		return *m.CropBottomY
+	}
+	return Default_Transform_CropBottomY
+}
+
+func (m *Transform) GetAutolevels() bool {
+	if m != nil && m.Autolevels != nil {
+		return *m.Autolevels
+	}
+	return Default_Transform_Autolevels
+}
+
+func (m *Transform) GetAllowStretch() bool {
+	if m != nil && m.AllowStretch != nil {
+		return *m.AllowStretch
+	}
+	return Default_Transform_AllowStretch
+}
+
+type ImageData struct {
+	Content              []byte   `protobuf:"bytes,1,req,name=content" json:"content,omitempty"`
+	BlobKey              *string  `protobuf:"bytes,2,opt,name=blob_key,json=blobKey" json:"blob_key,omitempty"`
+	Width                *int32   `protobuf:"varint,3,opt,name=width" json:"width,omitempty"`
+	Height               *int32   `protobuf:"varint,4,opt,name=height" json:"height,omitempty"`
+	XXX_NoUnkeyedLiteral struct{} `json:"-"`
+	XXX_unrecognized     []byte   `json:"-"`
+	XXX_sizecache        int32    `json:"-"`
+}
+
+func (m *ImageData) Reset()         { *m = ImageData{} }
+func (m *ImageData) String() string { return proto.CompactTextString(m) }
+func (*ImageData) ProtoMessage()    {}
+func (*ImageData) Descriptor() ([]byte, []int) {
+	return fileDescriptor_images_service_42a9d451721edce4, []int{3}
+}
+func (m *ImageData) XXX_Unmarshal(b []byte) error {
+	return xxx_messageInfo_ImageData.Unmarshal(m, b)
+}
+func (m *ImageData) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) {
+	return xxx_messageInfo_ImageData.Marshal(b, m, deterministic)
+}
+func (dst *ImageData) XXX_Merge(src proto.Message) {
+	xxx_messageInfo_ImageData.Merge(dst, src)
+}
+func (m *ImageData) XXX_Size() int {
+	return xxx_messageInfo_ImageData.Size(m)
+}
+func (m *ImageData) XXX_DiscardUnknown() {
+	xxx_messageInfo_ImageData.DiscardUnknown(m)
+}
+
+var xxx_messageInfo_ImageData proto.InternalMessageInfo
+
+func (m *ImageData) GetContent() []byte {
+	if m != nil {
+		return m.Content
+	}
+	return nil
+}
+
+func (m *ImageData) GetBlobKey() string {
+	if m != nil && m.BlobKey != nil {
+		return *m.BlobKey
+	}
+	return ""
+}
+
+func (m *ImageData) GetWidth() int32 {
+	if m != nil && m.Width != nil {
+		return *m.Width
+	}
+	return 0
+}
+
+func (m *ImageData) GetHeight() int32 {
+	if m != nil && m.Height != nil {
+		return *m.Height
+	}
+	return 0
+}
+
+type InputSettings struct {
+	CorrectExifOrientation     *InputSettings_ORIENTATION_CORRECTION_TYPE `protobuf:"varint,1,opt,name=correct_exif_orientation,json=correctExifOrientation,enum=appengine.InputSettings_ORIENTATION_CORRECTION_TYPE,def=0" json:"correct_exif_orientation,omitempty"`
+	ParseMetadata              *bool                                      `protobuf:"varint,2,opt,name=parse_metadata,json=parseMetadata,def=0" json:"parse_metadata,omitempty"`
+	TransparentSubstitutionRgb *int32                                     `protobuf:"varint,3,opt,name=transparent_substitution_rgb,json=transparentSubstitutionRgb" json:"transparent_substitution_rgb,omitempty"`
+	XXX_NoUnkeyedLiteral       struct{}                                   `json:"-"`
+	XXX_unrecognized           []byte                                     `json:"-"`
+	XXX_sizecache              int32                                      `json:"-"`
+}
+
+func (m *InputSettings) Reset()         { *m = InputSettings{} }
+func (m *InputSettings) String() string { return proto.CompactTextString(m) }
+func (*InputSettings) ProtoMessage()    {}
+func (*InputSettings) Descriptor() ([]byte, []int) {
+	return fileDescriptor_images_service_42a9d451721edce4, []int{4}
+}
+func (m *InputSettings) XXX_Unmarshal(b []byte) error {
+	return xxx_messageInfo_InputSettings.Unmarshal(m, b)
+}
+func (m *InputSettings) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) {
+	return xxx_messageInfo_InputSettings.Marshal(b, m, deterministic)
+}
+func (dst *InputSettings) XXX_Merge(src proto.Message) {
+	xxx_messageInfo_InputSettings.Merge(dst, src)
+}
+func (m *InputSettings) XXX_Size() int {
+	return xxx_messageInfo_InputSettings.Size(m)
+}
+func (m *InputSettings) XXX_DiscardUnknown() {
+	xxx_messageInfo_InputSettings.DiscardUnknown(m)
+}
+
+var xxx_messageInfo_InputSettings proto.InternalMessageInfo
+
+const Default_InputSettings_CorrectExifOrientation InputSettings_ORIENTATION_CORRECTION_TYPE = InputSettings_UNCHANGED_ORIENTATION
+const Default_InputSettings_ParseMetadata bool = false
+
+func (m *InputSettings) GetCorrectExifOrientation() InputSettings_ORIENTATION_CORRECTION_TYPE {
+	if m != nil && m.CorrectExifOrientation != nil {
+		return *m.CorrectExifOrientation
+	}
+	return Default_InputSettings_CorrectExifOrientation
+}
+
+func (m *InputSettings) GetParseMetadata() bool {
+	if m != nil && m.ParseMetadata != nil {
+		return *m.ParseMetadata
+	}
+	return Default_InputSettings_ParseMetadata
+}
+
+func (m *InputSettings) GetTransparentSubstitutionRgb() int32 {
+	if m != nil && m.TransparentSubstitutionRgb != nil {
+		return *m.TransparentSubstitutionRgb
+	}
+	return 0
+}
+
+type OutputSettings struct {
+	MimeType             *OutputSettings_MIME_TYPE `protobuf:"varint,1,opt,name=mime_type,json=mimeType,enum=appengine.OutputSettings_MIME_TYPE,def=0" json:"mime_type,omitempty"`
+	Quality              *int32                    `protobuf:"varint,2,opt,name=quality" json:"quality,omitempty"`
+	XXX_NoUnkeyedLiteral struct{}                  `json:"-"`
+	XXX_unrecognized     []byte                    `json:"-"`
+	XXX_sizecache        int32                     `json:"-"`
+}
+
+func (m *OutputSettings) Reset()         { *m = OutputSettings{} }
+func (m *OutputSettings) String() string { return proto.CompactTextString(m) }
+func (*OutputSettings) ProtoMessage()    {}
+func (*OutputSettings) Descriptor() ([]byte, []int) {
+	return fileDescriptor_images_service_42a9d451721edce4, []int{5}
+}
+func (m *OutputSettings) XXX_Unmarshal(b []byte) error {
+	return xxx_messageInfo_OutputSettings.Unmarshal(m, b)
+}
+func (m *OutputSettings) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) {
+	return xxx_messageInfo_OutputSettings.Marshal(b, m, deterministic)
+}
+func (dst *OutputSettings) XXX_Merge(src proto.Message) {
+	xxx_messageInfo_OutputSettings.Merge(dst, src)
+}
+func (m *OutputSettings) XXX_Size() int {
+	return xxx_messageInfo_OutputSettings.Size(m)
+}
+func (m *OutputSettings) XXX_DiscardUnknown() {
+	xxx_messageInfo_OutputSettings.DiscardUnknown(m)
+}
+
+var xxx_messageInfo_OutputSettings proto.InternalMessageInfo
+
+const Default_OutputSettings_MimeType OutputSettings_MIME_TYPE = OutputSettings_PNG
+
+func (m *OutputSettings) GetMimeType() OutputSettings_MIME_TYPE {
+	if m != nil && m.MimeType != nil {
+		return *m.MimeType
+	}
+	return Default_OutputSettings_MimeType
+}
+
+func (m *OutputSettings) GetQuality() int32 {
+	if m != nil && m.Quality != nil {
+		return *m.Quality
+	}
+	return 0
+}
+
+type ImagesTransformRequest struct {
+	Image                *ImageData      `protobuf:"bytes,1,req,name=image" json:"image,omitempty"`
+	Transform            []*Transform    `protobuf:"bytes,2,rep,name=transform" json:"transform,omitempty"`
+	Output               *OutputSettings `protobuf:"bytes,3,req,name=output" json:"output,omitempty"`
+	Input                *InputSettings  `protobuf:"bytes,4,opt,name=input" json:"input,omitempty"`
+	XXX_NoUnkeyedLiteral struct{}        `json:"-"`
+	XXX_unrecognized     []byte          `json:"-"`
+	XXX_sizecache        int32           `json:"-"`
+}
+
+func (m *ImagesTransformRequest) Reset()         { *m = ImagesTransformRequest{} }
+func (m *ImagesTransformRequest) String() string { return proto.CompactTextString(m) }
+func (*ImagesTransformRequest) ProtoMessage()    {}
+func (*ImagesTransformRequest) Descriptor() ([]byte, []int) {
+	return fileDescriptor_images_service_42a9d451721edce4, []int{6}
+}
+func (m *ImagesTransformRequest) XXX_Unmarshal(b []byte) error {
+	return xxx_messageInfo_ImagesTransformRequest.Unmarshal(m, b)
+}
+func (m *ImagesTransformRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) {
+	return xxx_messageInfo_ImagesTransformRequest.Marshal(b, m, deterministic)
+}
+func (dst *ImagesTransformRequest) XXX_Merge(src proto.Message) {
+	xxx_messageInfo_ImagesTransformRequest.Merge(dst, src)
+}
+func (m *ImagesTransformRequest) XXX_Size() int {
+	return xxx_messageInfo_ImagesTransformRequest.Size(m)
+}
+func (m *ImagesTransformRequest) XXX_DiscardUnknown() {
+	xxx_messageInfo_ImagesTransformRequest.DiscardUnknown(m)
+}
+
+var xxx_messageInfo_ImagesTransformRequest proto.InternalMessageInfo
+
+func (m *ImagesTransformRequest) GetImage() *ImageData {
+	if m != nil {
+		return m.Image
+	}
+	return nil
+}
+
+func (m *ImagesTransformRequest) GetTransform() []*Transform {
+	if m != nil {
+		return m.Transform
+	}
+	return nil
+}
+
+func (m *ImagesTransformRequest) GetOutput() *OutputSettings {
+	if m != nil {
+		return m.Output
+	}
+	return nil
+}
+
+func (m *ImagesTransformRequest) GetInput() *InputSettings {
+	if m != nil {
+		return m.Input
+	}
+	return nil
+}
+
+type ImagesTransformResponse struct {
+	Image                *ImageData `protobuf:"bytes,1,req,name=image" json:"image,omitempty"`
+	SourceMetadata       *string    `protobuf:"bytes,2,opt,name=source_metadata,json=sourceMetadata" json:"source_metadata,omitempty"`
+	XXX_NoUnkeyedLiteral struct{}   `json:"-"`
+	XXX_unrecognized     []byte     `json:"-"`
+	XXX_sizecache        int32      `json:"-"`
+}
+
+func (m *ImagesTransformResponse) Reset()         { *m = ImagesTransformResponse{} }
+func (m *ImagesTransformResponse) String() string { return proto.CompactTextString(m) }
+func (*ImagesTransformResponse) ProtoMessage()    {}
+func (*ImagesTransformResponse) Descriptor() ([]byte, []int) {
+	return fileDescriptor_images_service_42a9d451721edce4, []int{7}
+}
+func (m *ImagesTransformResponse) XXX_Unmarshal(b []byte) error {
+	return xxx_messageInfo_ImagesTransformResponse.Unmarshal(m, b)
+}
+func (m *ImagesTransformResponse) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) {
+	return xxx_messageInfo_ImagesTransformResponse.Marshal(b, m, deterministic)
+}
+func (dst *ImagesTransformResponse) XXX_Merge(src proto.Message) {
+	xxx_messageInfo_ImagesTransformResponse.Merge(dst, src)
+}
+func (m *ImagesTransformResponse) XXX_Size() int {
+	return xxx_messageInfo_ImagesTransformResponse.Size(m)
+}
+func (m *ImagesTransformResponse) XXX_DiscardUnknown() {
+	xxx_messageInfo_ImagesTransformResponse.DiscardUnknown(m)
+}
+
+var xxx_messageInfo_ImagesTransformResponse proto.InternalMessageInfo
+
+func (m *ImagesTransformResponse) GetImage() *ImageData {
+	if m != nil {
+		return m.Image
+	}
+	return nil
+}
+
+func (m *ImagesTransformResponse) GetSourceMetadata() string {
+	if m != nil && m.SourceMetadata != nil {
+		return *m.SourceMetadata
+	}
+	return ""
+}
+
+type CompositeImageOptions struct {
+	SourceIndex          *int32                        `protobuf:"varint,1,req,name=source_index,json=sourceIndex" json:"source_index,omitempty"`
+	XOffset              *int32                        `protobuf:"varint,2,req,name=x_offset,json=xOffset" json:"x_offset,omitempty"`
+	YOffset              *int32                        `protobuf:"varint,3,req,name=y_offset,json=yOffset" json:"y_offset,omitempty"`
+	Opacity              *float32                      `protobuf:"fixed32,4,req,name=opacity" json:"opacity,omitempty"`
+	Anchor               *CompositeImageOptions_ANCHOR `protobuf:"varint,5,req,name=anchor,enum=appengine.CompositeImageOptions_ANCHOR" json:"anchor,omitempty"`
+	XXX_NoUnkeyedLiteral struct{}                      `json:"-"`
+	XXX_unrecognized     []byte                        `json:"-"`
+	XXX_sizecache        int32                         `json:"-"`
+}
+
+func (m *CompositeImageOptions) Reset()         { *m = CompositeImageOptions{} }
+func (m *CompositeImageOptions) String() string { return proto.CompactTextString(m) }
+func (*CompositeImageOptions) ProtoMessage()    {}
+func (*CompositeImageOptions) Descriptor() ([]byte, []int) {
+	return fileDescriptor_images_service_42a9d451721edce4, []int{8}
+}
+func (m *CompositeImageOptions) XXX_Unmarshal(b []byte) error {
+	return xxx_messageInfo_CompositeImageOptions.Unmarshal(m, b)
+}
+func (m *CompositeImageOptions) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) {
+	return xxx_messageInfo_CompositeImageOptions.Marshal(b, m, deterministic)
+}
+func (dst *CompositeImageOptions) XXX_Merge(src proto.Message) {
+	xxx_messageInfo_CompositeImageOptions.Merge(dst, src)
+}
+func (m *CompositeImageOptions) XXX_Size() int {
+	return xxx_messageInfo_CompositeImageOptions.Size(m)
+}
+func (m *CompositeImageOptions) XXX_DiscardUnknown() {
+	xxx_messageInfo_CompositeImageOptions.DiscardUnknown(m)
+}
+
+var xxx_messageInfo_CompositeImageOptions proto.InternalMessageInfo
+
+func (m *CompositeImageOptions) GetSourceIndex() int32 {
+	if m != nil && m.SourceIndex != nil {
+		return *m.SourceIndex
+	}
+	return 0
+}
+
+func (m *CompositeImageOptions) GetXOffset() int32 {
+	if m != nil && m.XOffset != nil {
+		return *m.XOffset
+	}
+	return 0
+}
+
+func (m *CompositeImageOptions) GetYOffset() int32 {
+	if m != nil && m.YOffset != nil {
+		return *m.YOffset
+	}
+	return 0
+}
+
+func (m *CompositeImageOptions) GetOpacity() float32 {
+	if m != nil && m.Opacity != nil {
+		return *m.Opacity
+	}
+	return 0
+}
+
+func (m *CompositeImageOptions) GetAnchor() CompositeImageOptions_ANCHOR {
+	if m != nil && m.Anchor != nil {
+		return *m.Anchor
+	}
+	return CompositeImageOptions_TOP_LEFT
+}
+
+type ImagesCanvas struct {
+	Width                *int32          `protobuf:"varint,1,req,name=width" json:"width,omitempty"`
+	Height               *int32          `protobuf:"varint,2,req,name=height" json:"height,omitempty"`
+	Output               *OutputSettings `protobuf:"bytes,3,req,name=output" json:"output,omitempty"`
+	Color                *int32          `protobuf:"varint,4,opt,name=color,def=-1" json:"color,omitempty"`
+	XXX_NoUnkeyedLiteral struct{}        `json:"-"`
+	XXX_unrecognized     []byte          `json:"-"`
+	XXX_sizecache        int32           `json:"-"`
+}
+
+func (m *ImagesCanvas) Reset()         { *m = ImagesCanvas{} }
+func (m *ImagesCanvas) String() string { return proto.CompactTextString(m) }
+func (*ImagesCanvas) ProtoMessage()    {}
+func (*ImagesCanvas) Descriptor() ([]byte, []int) {
+	return fileDescriptor_images_service_42a9d451721edce4, []int{9}
+}
+func (m *ImagesCanvas) XXX_Unmarshal(b []byte) error {
+	return xxx_messageInfo_ImagesCanvas.Unmarshal(m, b)
+}
+func (m *ImagesCanvas) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) {
+	return xxx_messageInfo_ImagesCanvas.Marshal(b, m, deterministic)
+}
+func (dst *ImagesCanvas) XXX_Merge(src proto.Message) {
+	xxx_messageInfo_ImagesCanvas.Merge(dst, src)
+}
+func (m *ImagesCanvas) XXX_Size() int {
+	return xxx_messageInfo_ImagesCanvas.Size(m)
+}
+func (m *ImagesCanvas) XXX_DiscardUnknown() {
+	xxx_messageInfo_ImagesCanvas.DiscardUnknown(m)
+}
+
+var xxx_messageInfo_ImagesCanvas proto.InternalMessageInfo
+
+const Default_ImagesCanvas_Color int32 = -1
+
+func (m *ImagesCanvas) GetWidth() int32 {
+	if m != nil && m.Width != nil {
+		return *m.Width
+	}
+	return 0
+}
+
+func (m *ImagesCanvas) GetHeight() int32 {
+	if m != nil && m.Height != nil {
+		return *m.Height
+	}
+	return 0
+}
+
+func (m *ImagesCanvas) GetOutput() *OutputSettings {
+	if m != nil {
+		return m.Output
+	}
+	return nil
+}
+
+func (m *ImagesCanvas) GetColor() int32 {
+	if m != nil && m.Color != nil {
+		return *m.Color
+	}
+	return Default_ImagesCanvas_Color
+}
+
+type ImagesCompositeRequest struct {
+	Image                []*ImageData             `protobuf:"bytes,1,rep,name=image" json:"image,omitempty"`
+	Options              []*CompositeImageOptions `protobuf:"bytes,2,rep,name=options" json:"options,omitempty"`
+	Canvas               *ImagesCanvas            `protobuf:"bytes,3,req,name=canvas" json:"canvas,omitempty"`
+	XXX_NoUnkeyedLiteral struct{}                 `json:"-"`
+	XXX_unrecognized     []byte                   `json:"-"`
+	XXX_sizecache        int32                    `json:"-"`
+}
+
+func (m *ImagesCompositeRequest) Reset()         { *m = ImagesCompositeRequest{} }
+func (m *ImagesCompositeRequest) String() string { return proto.CompactTextString(m) }
+func (*ImagesCompositeRequest) ProtoMessage()    {}
+func (*ImagesCompositeRequest) Descriptor() ([]byte, []int) {
+	return fileDescriptor_images_service_42a9d451721edce4, []int{10}
+}
+func (m *ImagesCompositeRequest) XXX_Unmarshal(b []byte) error {
+	return xxx_messageInfo_ImagesCompositeRequest.Unmarshal(m, b)
+}
+func (m *ImagesCompositeRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) {
+	return xxx_messageInfo_ImagesCompositeRequest.Marshal(b, m, deterministic)
+}
+func (dst *ImagesCompositeRequest) XXX_Merge(src proto.Message) {
+	xxx_messageInfo_ImagesCompositeRequest.Merge(dst, src)
+}
+func (m *ImagesCompositeRequest) XXX_Size() int {
+	return xxx_messageInfo_ImagesCompositeRequest.Size(m)
+}
+func (m *ImagesCompositeRequest) XXX_DiscardUnknown() {
+	xxx_messageInfo_ImagesCompositeRequest.DiscardUnknown(m)
+}
+
+var xxx_messageInfo_ImagesCompositeRequest proto.InternalMessageInfo
+
+func (m *ImagesCompositeRequest) GetImage() []*ImageData {
+	if m != nil {
+		return m.Image
+	}
+	return nil
+}
+
+func (m *ImagesCompositeRequest) GetOptions() []*CompositeImageOptions {
+	if m != nil {
+		return m.Options
+	}
+	return nil
+}
+
+func (m *ImagesCompositeRequest) GetCanvas() *ImagesCanvas {
+	if m != nil {
+		return m.Canvas
+	}
+	return nil
+}
+
+type ImagesCompositeResponse struct {
+	Image                *ImageData `protobuf:"bytes,1,req,name=image" json:"image,omitempty"`
+	XXX_NoUnkeyedLiteral struct{}   `json:"-"`
+	XXX_unrecognized     []byte     `json:"-"`
+	XXX_sizecache        int32      `json:"-"`
+}
+
+func (m *ImagesCompositeResponse) Reset()         { *m = ImagesCompositeResponse{} }
+func (m *ImagesCompositeResponse) String() string { return proto.CompactTextString(m) }
+func (*ImagesCompositeResponse) ProtoMessage()    {}
+func (*ImagesCompositeResponse) Descriptor() ([]byte, []int) {
+	return fileDescriptor_images_service_42a9d451721edce4, []int{11}
+}
+func (m *ImagesCompositeResponse) XXX_Unmarshal(b []byte) error {
+	return xxx_messageInfo_ImagesCompositeResponse.Unmarshal(m, b)
+}
+func (m *ImagesCompositeResponse) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) {
+	return xxx_messageInfo_ImagesCompositeResponse.Marshal(b, m, deterministic)
+}
+func (dst *ImagesCompositeResponse) XXX_Merge(src proto.Message) {
+	xxx_messageInfo_ImagesCompositeResponse.Merge(dst, src)
+}
+func (m *ImagesCompositeResponse) XXX_Size() int {
+	return xxx_messageInfo_ImagesCompositeResponse.Size(m)
+}
+func (m *ImagesCompositeResponse) XXX_DiscardUnknown() {
+	xxx_messageInfo_ImagesCompositeResponse.DiscardUnknown(m)
+}
+
+var xxx_messageInfo_ImagesCompositeResponse proto.InternalMessageInfo
+
+func (m *ImagesCompositeResponse) GetImage() *ImageData {
+	if m != nil {
+		return m.Image
+	}
+	return nil
+}
+
+type ImagesHistogramRequest struct {
+	Image                *ImageData `protobuf:"bytes,1,req,name=image" json:"image,omitempty"`
+	XXX_NoUnkeyedLiteral struct{}   `json:"-"`
+	XXX_unrecognized     []byte     `json:"-"`
+	XXX_sizecache        int32      `json:"-"`
+}
+
+func (m *ImagesHistogramRequest) Reset()         { *m = ImagesHistogramRequest{} }
+func (m *ImagesHistogramRequest) String() string { return proto.CompactTextString(m) }
+func (*ImagesHistogramRequest) ProtoMessage()    {}
+func (*ImagesHistogramRequest) Descriptor() ([]byte, []int) {
+	return fileDescriptor_images_service_42a9d451721edce4, []int{12}
+}
+func (m *ImagesHistogramRequest) XXX_Unmarshal(b []byte) error {
+	return xxx_messageInfo_ImagesHistogramRequest.Unmarshal(m, b)
+}
+func (m *ImagesHistogramRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) {
+	return xxx_messageInfo_ImagesHistogramRequest.Marshal(b, m, deterministic)
+}
+func (dst *ImagesHistogramRequest) XXX_Merge(src proto.Message) {
+	xxx_messageInfo_ImagesHistogramRequest.Merge(dst, src)
+}
+func (m *ImagesHistogramRequest) XXX_Size() int {
+	return xxx_messageInfo_ImagesHistogramRequest.Size(m)
+}
+func (m *ImagesHistogramRequest) XXX_DiscardUnknown() {
+	xxx_messageInfo_ImagesHistogramRequest.DiscardUnknown(m)
+}
+
+var xxx_messageInfo_ImagesHistogramRequest proto.InternalMessageInfo
+
+func (m *ImagesHistogramRequest) GetImage() *ImageData {
+	if m != nil {
+		return m.Image
+	}
+	return nil
+}
+
+type ImagesHistogram struct {
+	Red                  []int32  `protobuf:"varint,1,rep,name=red" json:"red,omitempty"`
+	Green                []int32  `protobuf:"varint,2,rep,name=green" json:"green,omitempty"`
+	Blue                 []int32  `protobuf:"varint,3,rep,name=blue" json:"blue,omitempty"`
+	XXX_NoUnkeyedLiteral struct{} `json:"-"`
+	XXX_unrecognized     []byte   `json:"-"`
+	XXX_sizecache        int32    `json:"-"`
+}
+
+func (m *ImagesHistogram) Reset()         { *m = ImagesHistogram{} }
+func (m *ImagesHistogram) String() string { return proto.CompactTextString(m) }
+func (*ImagesHistogram) ProtoMessage()    {}
+func (*ImagesHistogram) Descriptor() ([]byte, []int) {
+	return fileDescriptor_images_service_42a9d451721edce4, []int{13}
+}
+func (m *ImagesHistogram) XXX_Unmarshal(b []byte) error {
+	return xxx_messageInfo_ImagesHistogram.Unmarshal(m, b)
+}
+func (m *ImagesHistogram) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) {
+	return xxx_messageInfo_ImagesHistogram.Marshal(b, m, deterministic)
+}
+func (dst *ImagesHistogram) XXX_Merge(src proto.Message) {
+	xxx_messageInfo_ImagesHistogram.Merge(dst, src)
+}
+func (m *ImagesHistogram) XXX_Size() int {
+	return xxx_messageInfo_ImagesHistogram.Size(m)
+}
+func (m *ImagesHistogram) XXX_DiscardUnknown() {
+	xxx_messageInfo_ImagesHistogram.DiscardUnknown(m)
+}
+
+var xxx_messageInfo_ImagesHistogram proto.InternalMessageInfo
+
+func (m *ImagesHistogram) GetRed() []int32 {
+	if m != nil {
+		return m.Red
+	}
+	return nil
+}
+
+func (m *ImagesHistogram) GetGreen() []int32 {
+	if m != nil {
+		return m.Green
+	}
+	return nil
+}
+
+func (m *ImagesHistogram) GetBlue() []int32 {
+	if m != nil {
+		return m.Blue
+	}
+	return nil
+}
+
+type ImagesHistogramResponse struct {
+	Histogram            *ImagesHistogram `protobuf:"bytes,1,req,name=histogram" json:"histogram,omitempty"`
+	XXX_NoUnkeyedLiteral struct{}         `json:"-"`
+	XXX_unrecognized     []byte           `json:"-"`
+	XXX_sizecache        int32            `json:"-"`
+}
+
+func (m *ImagesHistogramResponse) Reset()         { *m = ImagesHistogramResponse{} }
+func (m *ImagesHistogramResponse) String() string { return proto.CompactTextString(m) }
+func (*ImagesHistogramResponse) ProtoMessage()    {}
+func (*ImagesHistogramResponse) Descriptor() ([]byte, []int) {
+	return fileDescriptor_images_service_42a9d451721edce4, []int{14}
+}
+func (m *ImagesHistogramResponse) XXX_Unmarshal(b []byte) error {
+	return xxx_messageInfo_ImagesHistogramResponse.Unmarshal(m, b)
+}
+func (m *ImagesHistogramResponse) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) {
+	return xxx_messageInfo_ImagesHistogramResponse.Marshal(b, m, deterministic)
+}
+func (dst *ImagesHistogramResponse) XXX_Merge(src proto.Message) {
+	xxx_messageInfo_ImagesHistogramResponse.Merge(dst, src)
+}
+func (m *ImagesHistogramResponse) XXX_Size() int {
+	return xxx_messageInfo_ImagesHistogramResponse.Size(m)
+}
+func (m *ImagesHistogramResponse) XXX_DiscardUnknown() {
+	xxx_messageInfo_ImagesHistogramResponse.DiscardUnknown(m)
+}
+
+var xxx_messageInfo_ImagesHistogramResponse proto.InternalMessageInfo
+
+func (m *ImagesHistogramResponse) GetHistogram() *ImagesHistogram {
+	if m != nil {
+		return m.Histogram
+	}
+	return nil
+}
+
+type ImagesGetUrlBaseRequest struct {
+	BlobKey              *string  `protobuf:"bytes,1,req,name=blob_key,json=blobKey" json:"blob_key,omitempty"`
+	CreateSecureUrl      *bool    `protobuf:"varint,2,opt,name=create_secure_url,json=createSecureUrl,def=0" json:"create_secure_url,omitempty"`
+	XXX_NoUnkeyedLiteral struct{} `json:"-"`
+	XXX_unrecognized     []byte   `json:"-"`
+	XXX_sizecache        int32    `json:"-"`
+}
+
+func (m *ImagesGetUrlBaseRequest) Reset()         { *m = ImagesGetUrlBaseRequest{} }
+func (m *ImagesGetUrlBaseRequest) String() string { return proto.CompactTextString(m) }
+func (*ImagesGetUrlBaseRequest) ProtoMessage()    {}
+func (*ImagesGetUrlBaseRequest) Descriptor() ([]byte, []int) {
+	return fileDescriptor_images_service_42a9d451721edce4, []int{15}
+}
+func (m *ImagesGetUrlBaseRequest) XXX_Unmarshal(b []byte) error {
+	return xxx_messageInfo_ImagesGetUrlBaseRequest.Unmarshal(m, b)
+}
+func (m *ImagesGetUrlBaseRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) {
+	return xxx_messageInfo_ImagesGetUrlBaseRequest.Marshal(b, m, deterministic)
+}
+func (dst *ImagesGetUrlBaseRequest) XXX_Merge(src proto.Message) {
+	xxx_messageInfo_ImagesGetUrlBaseRequest.Merge(dst, src)
+}
+func (m *ImagesGetUrlBaseRequest) XXX_Size() int {
+	return xxx_messageInfo_ImagesGetUrlBaseRequest.Size(m)
+}
+func (m *ImagesGetUrlBaseRequest) XXX_DiscardUnknown() {
+	xxx_messageInfo_ImagesGetUrlBaseRequest.DiscardUnknown(m)
+}
+
+var xxx_messageInfo_ImagesGetUrlBaseRequest proto.InternalMessageInfo
+
+const Default_ImagesGetUrlBaseRequest_CreateSecureUrl bool = false
+
+func (m *ImagesGetUrlBaseRequest) GetBlobKey() string {
+	if m != nil && m.BlobKey != nil {
+		return *m.BlobKey
+	}
+	return ""
+}
+
+func (m *ImagesGetUrlBaseRequest) GetCreateSecureUrl() bool {
+	if m != nil && m.CreateSecureUrl != nil {
+		return *m.CreateSecureUrl
+	}
+	return Default_ImagesGetUrlBaseRequest_CreateSecureUrl
+}
+
+type ImagesGetUrlBaseResponse struct {
+	Url                  *string  `protobuf:"bytes,1,req,name=url" json:"url,omitempty"`
+	XXX_NoUnkeyedLiteral struct{} `json:"-"`
+	XXX_unrecognized     []byte   `json:"-"`
+	XXX_sizecache        int32    `json:"-"`
+}
+
+func (m *ImagesGetUrlBaseResponse) Reset()         { *m = ImagesGetUrlBaseResponse{} }
+func (m *ImagesGetUrlBaseResponse) String() string { return proto.CompactTextString(m) }
+func (*ImagesGetUrlBaseResponse) ProtoMessage()    {}
+func (*ImagesGetUrlBaseResponse) Descriptor() ([]byte, []int) {
+	return fileDescriptor_images_service_42a9d451721edce4, []int{16}
+}
+func (m *ImagesGetUrlBaseResponse) XXX_Unmarshal(b []byte) error {
+	return xxx_messageInfo_ImagesGetUrlBaseResponse.Unmarshal(m, b)
+}
+func (m *ImagesGetUrlBaseResponse) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) {
+	return xxx_messageInfo_ImagesGetUrlBaseResponse.Marshal(b, m, deterministic)
+}
+func (dst *ImagesGetUrlBaseResponse) XXX_Merge(src proto.Message) {
+	xxx_messageInfo_ImagesGetUrlBaseResponse.Merge(dst, src)
+}
+func (m *ImagesGetUrlBaseResponse) XXX_Size() int {
+	return xxx_messageInfo_ImagesGetUrlBaseResponse.Size(m)
+}
+func (m *ImagesGetUrlBaseResponse) XXX_DiscardUnknown() {
+	xxx_messageInfo_ImagesGetUrlBaseResponse.DiscardUnknown(m)
+}
+
+var xxx_messageInfo_ImagesGetUrlBaseResponse proto.InternalMessageInfo
+
+func (m *ImagesGetUrlBaseResponse) GetUrl() string {
+	if m != nil && m.Url != nil {
+		return *m.Url
+	}
+	return ""
+}
+
+type ImagesDeleteUrlBaseRequest struct {
+	BlobKey              *string  `protobuf:"bytes,1,req,name=blob_key,json=blobKey" json:"blob_key,omitempty"`
+	XXX_NoUnkeyedLiteral struct{} `json:"-"`
+	XXX_unrecognized     []byte   `json:"-"`
+	XXX_sizecache        int32    `json:"-"`
+}
+
+func (m *ImagesDeleteUrlBaseRequest) Reset()         { *m = ImagesDeleteUrlBaseRequest{} }
+func (m *ImagesDeleteUrlBaseRequest) String() string { return proto.CompactTextString(m) }
+func (*ImagesDeleteUrlBaseRequest) ProtoMessage()    {}
+func (*ImagesDeleteUrlBaseRequest) Descriptor() ([]byte, []int) {
+	return fileDescriptor_images_service_42a9d451721edce4, []int{17}
+}
+func (m *ImagesDeleteUrlBaseRequest) XXX_Unmarshal(b []byte) error {
+	return xxx_messageInfo_ImagesDeleteUrlBaseRequest.Unmarshal(m, b)
+}
+func (m *ImagesDeleteUrlBaseRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) {
+	return xxx_messageInfo_ImagesDeleteUrlBaseRequest.Marshal(b, m, deterministic)
+}
+func (dst *ImagesDeleteUrlBaseRequest) XXX_Merge(src proto.Message) {
+	xxx_messageInfo_ImagesDeleteUrlBaseRequest.Merge(dst, src)
+}
+func (m *ImagesDeleteUrlBaseRequest) XXX_Size() int {
+	return xxx_messageInfo_ImagesDeleteUrlBaseRequest.Size(m)
+}
+func (m *ImagesDeleteUrlBaseRequest) XXX_DiscardUnknown() {
+	xxx_messageInfo_ImagesDeleteUrlBaseRequest.DiscardUnknown(m)
+}
+
+var xxx_messageInfo_ImagesDeleteUrlBaseRequest proto.InternalMessageInfo
+
+func (m *ImagesDeleteUrlBaseRequest) GetBlobKey() string {
+	if m != nil && m.BlobKey != nil {
+		return *m.BlobKey
+	}
+	return ""
+}
+
+type ImagesDeleteUrlBaseResponse struct {
+	XXX_NoUnkeyedLiteral struct{} `json:"-"`
+	XXX_unrecognized     []byte   `json:"-"`
+	XXX_sizecache        int32    `json:"-"`
+}
+
+func (m *ImagesDeleteUrlBaseResponse) Reset()         { *m = ImagesDeleteUrlBaseResponse{} }
+func (m *ImagesDeleteUrlBaseResponse) String() string { return proto.CompactTextString(m) }
+func (*ImagesDeleteUrlBaseResponse) ProtoMessage()    {}
+func (*ImagesDeleteUrlBaseResponse) Descriptor() ([]byte, []int) {
+	return fileDescriptor_images_service_42a9d451721edce4, []int{18}
+}
+func (m *ImagesDeleteUrlBaseResponse) XXX_Unmarshal(b []byte) error {
+	return xxx_messageInfo_ImagesDeleteUrlBaseResponse.Unmarshal(m, b)
+}
+func (m *ImagesDeleteUrlBaseResponse) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) {
+	return xxx_messageInfo_ImagesDeleteUrlBaseResponse.Marshal(b, m, deterministic)
+}
+func (dst *ImagesDeleteUrlBaseResponse) XXX_Merge(src proto.Message) {
+	xxx_messageInfo_ImagesDeleteUrlBaseResponse.Merge(dst, src)
+}
+func (m *ImagesDeleteUrlBaseResponse) XXX_Size() int {
+	return xxx_messageInfo_ImagesDeleteUrlBaseResponse.Size(m)
+}
+func (m *ImagesDeleteUrlBaseResponse) XXX_DiscardUnknown() {
+	xxx_messageInfo_ImagesDeleteUrlBaseResponse.DiscardUnknown(m)
+}
+
+var xxx_messageInfo_ImagesDeleteUrlBaseResponse proto.InternalMessageInfo
+
+func init() {
+	proto.RegisterType((*ImagesServiceError)(nil), "appengine.ImagesServiceError")
+	proto.RegisterType((*ImagesServiceTransform)(nil), "appengine.ImagesServiceTransform")
+	proto.RegisterType((*Transform)(nil), "appengine.Transform")
+	proto.RegisterType((*ImageData)(nil), "appengine.ImageData")
+	proto.RegisterType((*InputSettings)(nil), "appengine.InputSettings")
+	proto.RegisterType((*OutputSettings)(nil), "appengine.OutputSettings")
+	proto.RegisterType((*ImagesTransformRequest)(nil), "appengine.ImagesTransformRequest")
+	proto.RegisterType((*ImagesTransformResponse)(nil), "appengine.ImagesTransformResponse")
+	proto.RegisterType((*CompositeImageOptions)(nil), "appengine.CompositeImageOptions")
+	proto.RegisterType((*ImagesCanvas)(nil), "appengine.ImagesCanvas")
+	proto.RegisterType((*ImagesCompositeRequest)(nil), "appengine.ImagesCompositeRequest")
+	proto.RegisterType((*ImagesCompositeResponse)(nil), "appengine.ImagesCompositeResponse")
+	proto.RegisterType((*ImagesHistogramRequest)(nil), "appengine.ImagesHistogramRequest")
+	proto.RegisterType((*ImagesHistogram)(nil), "appengine.ImagesHistogram")
+	proto.RegisterType((*ImagesHistogramResponse)(nil), "appengine.ImagesHistogramResponse")
+	proto.RegisterType((*ImagesGetUrlBaseRequest)(nil), "appengine.ImagesGetUrlBaseRequest")
+	proto.RegisterType((*ImagesGetUrlBaseResponse)(nil), "appengine.ImagesGetUrlBaseResponse")
+	proto.RegisterType((*ImagesDeleteUrlBaseRequest)(nil), "appengine.ImagesDeleteUrlBaseRequest")
+	proto.RegisterType((*ImagesDeleteUrlBaseResponse)(nil), "appengine.ImagesDeleteUrlBaseResponse")
+}
+
+func init() {
+	proto.RegisterFile("google.golang.org/appengine/v2/internal/image/images_service.proto", fileDescriptor_images_service_42a9d451721edce4)
+}
+
+var fileDescriptor_images_service_42a9d451721edce4 = []byte{
+	// 1460 bytes of a gzipped FileDescriptorProto
+	0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0x9c, 0x56, 0xdd, 0x6e, 0xe3, 0xc6,
+	0x15, 0x5e, 0x52, 0xff, 0xc7, 0xb2, 0xcc, 0x9d, 0xec, 0x0f, 0x77, 0x93, 0xa2, 0x0a, 0x83, 0xc5,
+	0x1a, 0x41, 0x2a, 0xaf, 0x8d, 0x16, 0x2d, 0x7c, 0x93, 0xea, 0x87, 0x92, 0x99, 0x95, 0x44, 0x75,
+	0x44, 0xa7, 0xeb, 0xbd, 0x19, 0xd0, 0xf2, 0x48, 0x26, 0x4a, 0x73, 0x98, 0xe1, 0xc8, 0xb1, 0x7a,
+	0x51, 0xf4, 0xa6, 0x17, 0x05, 0xfa, 0x06, 0x7d, 0x8a, 0xbe, 0x45, 0x81, 0xbe, 0x41, 0xfb, 0x32,
+	0xc5, 0x0c, 0x49, 0x99, 0xf6, 0x3a, 0x4d, 0xb3, 0x37, 0xc2, 0xcc, 0x39, 0xdf, 0xf9, 0x9d, 0x8f,
+	0xe7, 0x08, 0xbe, 0x5e, 0x31, 0xb6, 0x0a, 0x69, 0x67, 0xc5, 0x42, 0x3f, 0x5a, 0x75, 0x18, 0x5f,
+	0x1d, 0xf8, 0x71, 0x4c, 0xa3, 0x55, 0x10, 0xd1, 0x83, 0x20, 0x12, 0x94, 0x47, 0x7e, 0x78, 0x10,
+	0x5c, 0xf9, 0x2b, 0x9a, 0xfe, 0x26, 0x24, 0xa1, 0xfc, 0x3a, 0x58, 0xd0, 0x4e, 0xcc, 0x99, 0x60,
+	0xa8, 0xb1, 0x85, 0x5b, 0xff, 0xd4, 0x00, 0x39, 0x0a, 0x33, 0x4f, 0x21, 0x36, 0xe7, 0x8c, 0x5b,
+	0xff, 0xd0, 0xa0, 0xa1, 0x4e, 0x7d, 0x76, 0x41, 0xd1, 0x53, 0x78, 0x7c, 0x3a, 0x9d, 0xcf, 0xec,
+	0xbe, 0x33, 0x74, 0xec, 0x01, 0xb1, 0x31, 0x76, 0xb1, 0xa1, 0xa1, 0x67, 0x80, 0x7a, 0xdd, 0x01,
+	0xf1, 0x70, 0x77, 0x3a, 0x1f, 0xba, 0x78, 0x42, 0x06, 0x5d, 0xaf, 0x6b, 0xe8, 0x68, 0x17, 0x1a,
+	0x53, 0xd7, 0x23, 0xce, 0xa4, 0x3b, 0xb2, 0x8d, 0x12, 0x42, 0xd0, 0x92, 0x30, 0x75, 0x4d, 0x21,
+	0x65, 0xf4, 0x09, 0xec, 0xa5, 0x77, 0xcf, 0x75, 0xc9, 0xb8, 0x8b, 0x47, 0xb6, 0x51, 0x41, 0x4f,
+	0xc0, 0x70, 0xa6, 0xdf, 0x76, 0xc7, 0xce, 0x80, 0xf4, 0xc6, 0x6e, 0x8f, 0xbc, 0xb5, 0xcf, 0x8c,
+	0x2a, 0x7a, 0x0c, 0xbb, 0xdd, 0x7e, 0xdf, 0x9e, 0xcf, 0xc9, 0xc0, 0x9e, 0x3a, 0xf6, 0xc0, 0xa8,
+	0x49, 0xa0, 0xdb, 0xfb, 0xc6, 0xee, 0x7b, 0x44, 0xc6, 0x19, 0xba, 0xa7, 0xd3, 0x81, 0x51, 0xb7,
+	0xfe, 0xac, 0xc1, 0xb3, 0x3b, 0xa5, 0x78, 0xdc, 0x8f, 0x92, 0x25, 0xe3, 0x57, 0xd6, 0x12, 0xca,
+	0xde, 0x26, 0xa6, 0x08, 0xa0, 0x8a, 0xed, 0xb9, 0xf3, 0xde, 0x36, 0x34, 0x75, 0x76, 0xbd, 0xae,
+	0x67, 0x1b, 0xba, 0x4c, 0xe7, 0xc4, 0xc5, 0xce, 0x7b, 0x77, 0xea, 0x75, 0xc7, 0x64, 0x38, 0x76,
+	0x66, 0x46, 0x49, 0x06, 0xfe, 0xd6, 0xc6, 0x9e, 0xd3, 0xcf, 0x45, 0x65, 0x54, 0x87, 0x72, 0x1f,
+	0xbb, 0xb3, 0x2c, 0xd7, 0x09, 0x19, 0xda, 0xf6, 0xd8, 0x99, 0x8e, 0xc8, 0xf8, 0xb4, 0xff, 0xf6,
+	0xcc, 0xa8, 0x5a, 0x7f, 0x2b, 0x43, 0x63, 0x1b, 0x15, 0x3d, 0x81, 0xca, 0xf7, 0xc1, 0x85, 0xb8,
+	0x34, 0xb5, 0xb6, 0xb6, 0x5f, 0xc1, 0xe9, 0x05, 0x3d, 0x83, 0xea, 0x25, 0x0d, 0x56, 0x97, 0xc2,
+	0xd4, 0x95, 0x38, 0xbb, 0xa1, 0x57, 0xb0, 0xb3, 0xe0, 0x2c, 0x26, 0x82, 0x91, 0x65, 0x20, 0xcc,
+	0x9d, 0xb6, 0xb6, 0x5f, 0x3f, 0xae, 0x2c, 0xfd, 0x30, 0xa1, 0xb8, 0x21, 0x35, 0x1e, 0x1b, 0x06,
+	0x02, 0xbd, 0x86, 0x5d, 0x05, 0x63, 0xcb, 0x65, 0x42, 0x05, 0xb9, 0x31, 0x9b, 0x6d, 0x6d, 0x5f,
+	0x3f, 0x2e, 0xbd, 0xe9, 0xfc, 0x0a, 0x2b, 0x07, 0xae, 0x52, 0xbc, 0xbb, 0x0f, 0xdc, 0x98, 0xbb,
+	0x0f, 0x02, 0xcf, 0xd0, 0x0b, 0xa8, 0x72, 0x26, 0x7c, 0x41, 0xcd, 0x92, 0x4c, 0xe8, 0x58, 0x7b,
+	0x83, 0x33, 0x01, 0xea, 0xc0, 0xde, 0x25, 0xe3, 0xc1, 0x1f, 0x59, 0x24, 0xfc, 0x90, 0x2c, 0xc3,
+	0x20, 0x36, 0xcb, 0xc5, 0xbc, 0x5a, 0xb7, 0xda, 0x61, 0x18, 0xc4, 0xe8, 0x4b, 0xd8, 0xbd, 0xa6,
+	0x5c, 0x04, 0x8b, 0x1c, 0x5d, 0x29, 0xa2, 0x9b, 0xb9, 0x4e, 0x61, 0x3f, 0xcf, 0xea, 0x0d, 0xe9,
+	0x52, 0x96, 0x51, 0x55, 0xd9, 0x69, 0x6f, 0xd2, 0x5a, 0xc7, 0x74, 0x29, 0xde, 0xa1, 0x9f, 0x03,
+	0x64, 0x2d, 0x89, 0xc9, 0xc6, 0xac, 0xe5, 0x88, 0x7a, 0xda, 0x8d, 0xf8, 0x0c, 0x7d, 0x01, 0x4d,
+	0x05, 0xe0, 0xb2, 0x83, 0xe4, 0xc6, 0xac, 0xa7, 0x90, 0x43, 0xac, 0xec, 0xb0, 0x94, 0xbe, 0x43,
+	0xaf, 0xb2, 0x46, 0x9c, 0x33, 0x21, 0xd8, 0x15, 0xd9, 0x98, 0x8d, 0x1c, 0xa5, 0x12, 0xe8, 0x29,
+	0xf1, 0x19, 0x7a, 0x05, 0xe0, 0xaf, 0x05, 0x0b, 0xe9, 0x35, 0x0d, 0x13, 0x13, 0x8a, 0x89, 0x17,
+	0x14, 0xb2, 0x44, 0x3f, 0x0c, 0xd9, 0xf7, 0x24, 0x11, 0x9c, 0x8a, 0xc5, 0xa5, 0xd9, 0xba, 0x53,
+	0xa2, 0xd2, 0xcd, 0x53, 0x95, 0xc5, 0xa1, 0xa1, 0x08, 0x39, 0xf0, 0x85, 0x8f, 0x3e, 0x83, 0xda,
+	0x82, 0x45, 0x82, 0x46, 0xc2, 0xd4, 0xda, 0xfa, 0x7e, 0xb3, 0xa7, 0xd7, 0x35, 0x9c, 0x8b, 0xd0,
+	0x0b, 0xa8, 0x9f, 0x87, 0xec, 0x9c, 0xfc, 0x81, 0x6e, 0x14, 0x2f, 0x1a, 0xb8, 0x26, 0xef, 0x6f,
+	0xe9, 0xe6, 0x96, 0x46, 0xa5, 0x87, 0x69, 0x54, 0x2e, 0xd2, 0xc8, 0xfa, 0xb7, 0x0e, 0xbb, 0x4e,
+	0x14, 0xaf, 0xc5, 0x9c, 0x0a, 0x11, 0x44, 0xab, 0x04, 0xfd, 0x45, 0x03, 0x73, 0xc1, 0x38, 0xa7,
+	0x0b, 0x41, 0xe8, 0x4d, 0xb0, 0x24, 0x8c, 0x07, 0x34, 0x12, 0xbe, 0x08, 0x58, 0xa4, 0xa8, 0xd9,
+	0x3a, 0xfa, 0x65, 0x67, 0x3b, 0x11, 0x3a, 0x77, 0x8c, 0x3b, 0x2e, 0x76, 0xec, 0xa9, 0xd7, 0xf5,
+	0x1c, 0x77, 0x4a, 0xfa, 0x2e, 0xc6, 0x76, 0x5f, 0x1d, 0xbd, 0xb3, 0x99, 0x7d, 0xfc, 0xf4, 0x74,
+	0xda, 0x3f, 0xe9, 0x4e, 0x47, 0xf6, 0x80, 0x14, 0x60, 0xf8, 0x59, 0x16, 0xcc, 0xbe, 0x09, 0x96,
+	0xee, 0x6d, 0x28, 0xf4, 0x15, 0xb4, 0x62, 0x9f, 0x27, 0x94, 0x5c, 0x51, 0xe1, 0x5f, 0xf8, 0xc2,
+	0x57, 0x85, 0x6e, 0x5b, 0xb7, 0xab, 0x94, 0x93, 0x4c, 0x87, 0x7e, 0x0b, 0x9f, 0x09, 0xf9, 0x25,
+	0xc5, 0x3e, 0xa7, 0x91, 0x20, 0xc9, 0xfa, 0x3c, 0x11, 0x81, 0x58, 0x4b, 0x4f, 0x84, 0xaf, 0xce,
+	0xb3, 0x66, 0xbc, 0x2c, 0x60, 0xe6, 0x05, 0x08, 0x5e, 0x9d, 0x5b, 0xbf, 0x83, 0x4f, 0xff, 0x47,
+	0xf6, 0xe8, 0x05, 0x3c, 0x9c, 0xbf, 0xf1, 0x08, 0x3d, 0x87, 0x4f, 0x32, 0xf4, 0x1d, 0x85, 0x66,
+	0xfd, 0x5d, 0x83, 0x96, 0xbb, 0x16, 0xc5, 0xee, 0xda, 0xd0, 0xb8, 0x0a, 0xae, 0x28, 0x11, 0x9b,
+	0x98, 0x66, 0xdd, 0xfc, 0xa2, 0xd0, 0xcd, 0xbb, 0xe8, 0xce, 0xc4, 0x99, 0xd8, 0x69, 0xf3, 0x4a,
+	0xb3, 0xe9, 0x08, 0xd7, 0xa5, 0xa9, 0x9a, 0x4c, 0x26, 0xd4, 0xbe, 0x5b, 0xfb, 0x61, 0x20, 0x36,
+	0xd9, 0x58, 0xc8, 0xaf, 0xd6, 0x3e, 0x34, 0xb6, 0x56, 0xa8, 0x06, 0xd2, 0xce, 0x78, 0x24, 0x27,
+	0xd1, 0x37, 0x33, 0x7b, 0x64, 0x68, 0xf2, 0xf4, 0x7b, 0xbb, 0x37, 0x33, 0x74, 0xeb, 0x3f, 0xdb,
+	0x01, 0xb8, 0x9d, 0x41, 0x98, 0x7e, 0xb7, 0xa6, 0x89, 0x40, 0x5f, 0x42, 0x45, 0x6d, 0x02, 0x45,
+	0xbd, 0x9d, 0xa3, 0x27, 0xc5, 0xf7, 0xce, 0x19, 0x8a, 0x53, 0x08, 0x3a, 0x82, 0x86, 0xc8, 0xed,
+	0x4d, 0xbd, 0x5d, 0xba, 0x87, 0xbf, 0xf5, 0x7d, 0x0b, 0x43, 0x87, 0x50, 0x65, 0xaa, 0x52, 0xb3,
+	0xa4, 0x02, 0xbc, 0xf8, 0xc1, 0x16, 0xe0, 0x0c, 0x88, 0x3a, 0x50, 0x09, 0x24, 0xd5, 0x14, 0x7f,
+	0x77, 0x8e, 0xcc, 0x1f, 0xa2, 0x20, 0x4e, 0x61, 0x56, 0x04, 0xcf, 0x3f, 0x28, 0x2e, 0x89, 0x59,
+	0x94, 0xd0, 0x9f, 0x54, 0xdd, 0x6b, 0xd8, 0x4b, 0xd8, 0x9a, 0x2f, 0xee, 0xd1, 0xb0, 0x81, 0x5b,
+	0xa9, 0x38, 0x27, 0xa0, 0xf5, 0x2f, 0x1d, 0x9e, 0xf6, 0xd9, 0x55, 0xcc, 0x92, 0x40, 0x50, 0xe5,
+	0xc6, 0x8d, 0x25, 0xb5, 0x12, 0xf4, 0x39, 0x34, 0x33, 0x17, 0x41, 0x74, 0x41, 0x6f, 0x54, 0xd4,
+	0x0a, 0xde, 0x49, 0x65, 0x8e, 0x14, 0xc9, 0xcf, 0xf9, 0x26, 0x9b, 0xbc, 0xa6, 0xae, 0xd4, 0xb5,
+	0x9b, 0x74, 0xde, 0x4a, 0xd5, 0x26, 0x57, 0x95, 0x52, 0xd5, 0x26, 0x53, 0x99, 0x50, 0x63, 0xb1,
+	0xbf, 0x90, 0x24, 0x28, 0xb7, 0xf5, 0x7d, 0x1d, 0xe7, 0x57, 0xf4, 0x35, 0x54, 0xfd, 0x68, 0x71,
+	0xc9, 0xb8, 0x59, 0x69, 0xeb, 0xfb, 0xad, 0xa3, 0xd7, 0x85, 0x12, 0x1f, 0x4c, 0xb2, 0xd3, 0x9d,
+	0xf6, 0x4f, 0x5c, 0x8c, 0x33, 0x33, 0xeb, 0x4f, 0x50, 0x4d, 0x25, 0xa8, 0x09, 0x75, 0xcf, 0x9d,
+	0x91, 0xb1, 0x3d, 0xf4, 0x8c, 0x47, 0x92, 0x50, 0x9e, 0x3b, 0x33, 0x34, 0xb9, 0xb4, 0xa5, 0x18,
+	0x3b, 0xa3, 0x13, 0xcf, 0xd0, 0x25, 0xab, 0x14, 0xa2, 0x24, 0xf7, 0x64, 0xdf, 0x9e, 0x7a, 0x36,
+	0x36, 0xca, 0xa8, 0x01, 0x95, 0x14, 0x50, 0x41, 0x7b, 0xb0, 0xd3, 0x73, 0x3d, 0xcf, 0x9d, 0xa4,
+	0x9e, 0xaa, 0x12, 0x97, 0x0a, 0x8c, 0x1a, 0x32, 0xa0, 0x99, 0x29, 0x53, 0x78, 0xdd, 0xfa, 0xab,
+	0x06, 0xcd, 0xf4, 0xf9, 0xfa, 0x7e, 0x74, 0xed, 0x27, 0xc5, 0xe5, 0xa8, 0x3f, 0xbc, 0x1c, 0xf5,
+	0xc2, 0x72, 0xfc, 0x08, 0x7e, 0x99, 0x50, 0x59, 0xb0, 0x90, 0xf1, 0x74, 0x3e, 0x1e, 0xeb, 0xbf,
+	0x38, 0xc4, 0xa9, 0x40, 0xfe, 0xb9, 0xc9, 0xbe, 0x93, 0x6d, 0xeb, 0x1e, 0xf8, 0x4e, 0x4a, 0x3f,
+	0xc6, 0xa4, 0x63, 0xf9, 0x5a, 0xaa, 0xd9, 0xd9, 0x57, 0xd2, 0xfe, 0xb1, 0x47, 0xc1, 0xb9, 0x01,
+	0x3a, 0x80, 0xea, 0x42, 0xf5, 0x21, 0xab, 0xe7, 0xf9, 0xfd, 0x40, 0x59, 0x9b, 0x70, 0x06, 0xb3,
+	0xec, 0x9c, 0xfd, 0x85, 0x94, 0x7f, 0x3a, 0xfb, 0xad, 0x41, 0x5e, 0xf9, 0x49, 0x90, 0x08, 0xb6,
+	0xe2, 0xfe, 0xc7, 0x4c, 0x08, 0x6b, 0x02, 0x7b, 0xf7, 0xbc, 0x20, 0x03, 0x4a, 0x9c, 0x5e, 0xa8,
+	0xb6, 0x55, 0xb0, 0x3c, 0xca, 0x07, 0x5e, 0x71, 0x4a, 0x23, 0xd5, 0x9c, 0x0a, 0x4e, 0x2f, 0x08,
+	0x41, 0xf9, 0x3c, 0x5c, 0xcb, 0xbf, 0x1a, 0x52, 0xa8, 0xce, 0xd6, 0x3c, 0xaf, 0xad, 0x90, 0x54,
+	0x56, 0xdb, 0x6f, 0xa0, 0x71, 0x99, 0x0b, 0xb3, 0xcc, 0x5e, 0x7e, 0xd0, 0xaa, 0x5b, 0xb3, 0x5b,
+	0xb0, 0xb5, 0xca, 0x9d, 0x8e, 0xa8, 0x38, 0xe5, 0x61, 0xcf, 0x4f, 0xb6, 0x8f, 0x5c, 0xdc, 0xb5,
+	0xd2, 0x67, 0x61, 0xd7, 0x1e, 0xc2, 0xe3, 0x05, 0xa7, 0xbe, 0xa0, 0x24, 0xa1, 0x8b, 0x35, 0xa7,
+	0x64, 0xcd, 0xc3, 0xbb, 0x6b, 0x6a, 0x2f, 0xd5, 0xcf, 0x95, 0xfa, 0x94, 0x87, 0xd6, 0x57, 0x60,
+	0x7e, 0x18, 0x28, 0x4b, 0xdf, 0x80, 0x92, 0x74, 0x90, 0x06, 0x91, 0x47, 0xeb, 0xd7, 0xf0, 0x32,
+	0x45, 0x0f, 0x68, 0x48, 0x05, 0xfd, 0xbf, 0x33, 0xb3, 0x7e, 0x06, 0x9f, 0x3e, 0x68, 0x98, 0x46,
+	0xea, 0xd5, 0xde, 0xa7, 0x6f, 0xf3, 0xdf, 0x00, 0x00, 0x00, 0xff, 0xff, 0xfa, 0x74, 0x30, 0x89,
+	0x1d, 0x0c, 0x00, 0x00,
+}
diff --git a/v2/internal/image/images_service.proto b/v2/internal/image/images_service.proto
new file mode 100644
index 0000000..f0d2ed5
--- /dev/null
+++ b/v2/internal/image/images_service.proto
@@ -0,0 +1,162 @@
+syntax = "proto2";
+option go_package = "image";
+
+package appengine;
+
+message ImagesServiceError {
+  enum ErrorCode {
+    UNSPECIFIED_ERROR = 1;
+    BAD_TRANSFORM_DATA = 2;
+    NOT_IMAGE = 3;
+    BAD_IMAGE_DATA = 4;
+    IMAGE_TOO_LARGE = 5;
+    INVALID_BLOB_KEY = 6;
+    ACCESS_DENIED = 7;
+    OBJECT_NOT_FOUND = 8;
+  }
+}
+
+message ImagesServiceTransform {
+  enum Type {
+    RESIZE = 1;
+    ROTATE = 2;
+    HORIZONTAL_FLIP = 3;
+    VERTICAL_FLIP = 4;
+    CROP = 5;
+    IM_FEELING_LUCKY = 6;
+  }
+}
+
+message Transform {
+  optional int32 width = 1;
+  optional int32 height = 2;
+  optional bool crop_to_fit = 11 [default = false];
+  optional float crop_offset_x = 12 [default = 0.5];
+  optional float crop_offset_y = 13 [default = 0.5];
+
+  optional int32 rotate = 3 [default = 0];
+
+  optional bool horizontal_flip = 4 [default = false];
+
+  optional bool vertical_flip = 5 [default = false];
+
+  optional float crop_left_x = 6 [default = 0.0];
+  optional float crop_top_y = 7 [default = 0.0];
+  optional float crop_right_x = 8 [default = 1.0];
+  optional float crop_bottom_y = 9 [default = 1.0];
+
+  optional bool autolevels = 10 [default = false];
+
+  optional bool allow_stretch = 14 [default = false];
+}
+
+message ImageData {
+  required bytes content = 1 [ctype=CORD];
+  optional string blob_key = 2;
+
+  optional int32 width = 3;
+  optional int32 height = 4;
+}
+
+message InputSettings {
+  enum ORIENTATION_CORRECTION_TYPE {
+    UNCHANGED_ORIENTATION = 0;
+    CORRECT_ORIENTATION = 1;
+  }
+  optional ORIENTATION_CORRECTION_TYPE correct_exif_orientation = 1
+      [default=UNCHANGED_ORIENTATION];
+  optional bool parse_metadata = 2 [default=false];
+  optional int32 transparent_substitution_rgb = 3;
+}
+
+message OutputSettings {
+  enum MIME_TYPE {
+    PNG = 0;
+    JPEG = 1;
+    WEBP = 2;
+  }
+
+  optional MIME_TYPE mime_type = 1 [default=PNG];
+  optional int32 quality = 2;
+}
+
+message ImagesTransformRequest {
+  required ImageData image = 1;
+  repeated Transform transform = 2;
+  required OutputSettings output = 3;
+  optional InputSettings input = 4;
+}
+
+message ImagesTransformResponse {
+  required ImageData image = 1;
+  optional string source_metadata = 2;
+}
+
+message CompositeImageOptions {
+  required int32 source_index = 1;
+  required int32 x_offset = 2;
+  required int32 y_offset = 3;
+  required float opacity = 4;
+
+  enum ANCHOR {
+    TOP_LEFT = 0;
+    TOP = 1;
+    TOP_RIGHT = 2;
+    LEFT = 3;
+    CENTER = 4;
+    RIGHT = 5;
+    BOTTOM_LEFT = 6;
+    BOTTOM = 7;
+    BOTTOM_RIGHT = 8;
+  }
+
+  required ANCHOR anchor = 5;
+}
+
+message ImagesCanvas {
+  required int32 width = 1;
+  required int32 height = 2;
+  required OutputSettings output = 3;
+  optional int32 color = 4 [default=-1];
+}
+
+message ImagesCompositeRequest {
+  repeated ImageData image = 1;
+  repeated CompositeImageOptions options = 2;
+  required ImagesCanvas canvas = 3;
+}
+
+message ImagesCompositeResponse {
+  required ImageData image = 1;
+}
+
+message ImagesHistogramRequest {
+  required ImageData image = 1;
+}
+
+message ImagesHistogram {
+  repeated int32 red = 1;
+  repeated int32 green = 2;
+  repeated int32 blue = 3;
+}
+
+message ImagesHistogramResponse {
+  required ImagesHistogram histogram = 1;
+}
+
+message ImagesGetUrlBaseRequest {
+  required string blob_key = 1;
+
+  optional bool create_secure_url = 2 [default = false];
+}
+
+message ImagesGetUrlBaseResponse {
+  required string url = 1;
+}
+
+message ImagesDeleteUrlBaseRequest {
+  required string blob_key = 1;
+}
+
+message ImagesDeleteUrlBaseResponse {
+}
diff --git a/v2/internal/internal.go b/v2/internal/internal.go
new file mode 100644
index 0000000..36316bb
--- /dev/null
+++ b/v2/internal/internal.go
@@ -0,0 +1,110 @@
+// Copyright 2011 Google Inc. All rights reserved.
+// Use of this source code is governed by the Apache 2.0
+// license that can be found in the LICENSE file.
+
+// Package internal provides support for package appengine.
+//
+// Programs should not use this package directly. Its API is not stable.
+// Use packages appengine and appengine/* instead.
+package internal
+
+import (
+	"fmt"
+
+	"github.com/golang/protobuf/proto"
+
+	remotepb "google.golang.org/appengine/v2/internal/remote_api"
+)
+
+// errorCodeMaps is a map of service name to the error code map for the service.
+var errorCodeMaps = make(map[string]map[int32]string)
+
+// RegisterErrorCodeMap is called from API implementations to register their
+// error code map. This should only be called from init functions.
+func RegisterErrorCodeMap(service string, m map[int32]string) {
+	errorCodeMaps[service] = m
+}
+
+type timeoutCodeKey struct {
+	service string
+	code    int32
+}
+
+// timeoutCodes is the set of service+code pairs that represent timeouts.
+var timeoutCodes = make(map[timeoutCodeKey]bool)
+
+func RegisterTimeoutErrorCode(service string, code int32) {
+	timeoutCodes[timeoutCodeKey{service, code}] = true
+}
+
+// APIError is the type returned by appengine.Context's Call method
+// when an API call fails in an API-specific way. This may be, for instance,
+// a taskqueue API call failing with TaskQueueServiceError::UNKNOWN_QUEUE.
+type APIError struct {
+	Service string
+	Detail  string
+	Code    int32 // API-specific error code
+}
+
+func (e *APIError) Error() string {
+	if e.Code == 0 {
+		if e.Detail == "" {
+			return "APIError <empty>"
+		}
+		return e.Detail
+	}
+	s := fmt.Sprintf("API error %d", e.Code)
+	if m, ok := errorCodeMaps[e.Service]; ok {
+		s += " (" + e.Service + ": " + m[e.Code] + ")"
+	} else {
+		// Shouldn't happen, but provide a bit more detail if it does.
+		s = e.Service + " " + s
+	}
+	if e.Detail != "" {
+		s += ": " + e.Detail
+	}
+	return s
+}
+
+func (e *APIError) IsTimeout() bool {
+	return timeoutCodes[timeoutCodeKey{e.Service, e.Code}]
+}
+
+// CallError is the type returned by appengine.Context's Call method when an
+// API call fails in a generic way, such as RpcError::CAPABILITY_DISABLED.
+type CallError struct {
+	Detail string
+	Code   int32
+	// TODO: Remove this if we get a distinguishable error code.
+	Timeout bool
+}
+
+func (e *CallError) Error() string {
+	var msg string
+	switch remotepb.RpcError_ErrorCode(e.Code) {
+	case remotepb.RpcError_UNKNOWN:
+		return e.Detail
+	case remotepb.RpcError_OVER_QUOTA:
+		msg = "Over quota"
+	case remotepb.RpcError_CAPABILITY_DISABLED:
+		msg = "Capability disabled"
+	case remotepb.RpcError_CANCELLED:
+		msg = "Canceled"
+	default:
+		msg = fmt.Sprintf("Call error %d", e.Code)
+	}
+	s := msg + ": " + e.Detail
+	if e.Timeout {
+		s += " (timeout)"
+	}
+	return s
+}
+
+func (e *CallError) IsTimeout() bool {
+	return e.Timeout
+}
+
+// NamespaceMods is a map from API service to a function that will mutate an RPC request to attach a namespace.
+// The function should be prepared to be called on the same message more than once; it should only modify the
+// RPC request the first time.
+var NamespaceMods = make(map[string]func(m proto.Message, namespace string))
diff --git a/v2/internal/internal_vm_test.go b/v2/internal/internal_vm_test.go
new file mode 100644
index 0000000..dc684f5
--- /dev/null
+++ b/v2/internal/internal_vm_test.go
@@ -0,0 +1,58 @@
+// Copyright 2014 Google Inc. All rights reserved.
+// Use of this source code is governed by the Apache 2.0
+// license that can be found in the LICENSE file.
+
+package internal
+
+import (
+	"io"
+	"io/ioutil"
+	"net/http"
+	"net/http/httptest"
+	"testing"
+)
+
+func TestInstallingHealthChecker(t *testing.T) {
+	try := func(desc string, mux *http.ServeMux, wantCode int, wantBody string) {
+		installHealthChecker(mux)
+		srv := httptest.NewServer(mux)
+		defer srv.Close()
+
+		resp, err := http.Get(srv.URL + "/_ah/health")
+		if err != nil {
+			t.Errorf("%s: http.Get: %v", desc, err)
+			return
+		}
+		defer resp.Body.Close()
+		body, err := ioutil.ReadAll(resp.Body)
+		if err != nil {
+			t.Errorf("%s: reading body: %v", desc, err)
+			return
+		}
+
+		if resp.StatusCode != wantCode {
+			t.Errorf("%s: got HTTP %d, want %d", desc, resp.StatusCode, wantCode)
+			return
+		}
+		if wantBody != "" && string(body) != wantBody {
+			t.Errorf("%s: got HTTP body %q, want %q", desc, body, wantBody)
+			return
+		}
+	}
+
+	// If there's no handlers, or only a root handler, a health checker should be installed.
+	try("empty mux", http.NewServeMux(), 200, "ok")
+	mux := http.NewServeMux()
+	mux.HandleFunc("/", func(w http.ResponseWriter, r *http.Request) {
+		io.WriteString(w, "root handler")
+	})
+	try("mux with root handler", mux, 200, "ok")
+
+	// If there's a custom health check handler, one should not be installed.
+	mux = http.NewServeMux()
+	mux.HandleFunc("/_ah/health", func(w http.ResponseWriter, r *http.Request) {
+		w.WriteHeader(418)
+		io.WriteString(w, "I'm short and stout!")
+	})
+	try("mux with custom health checker", mux, 418, "I'm short and stout!")
+}
diff --git a/v2/internal/log.go b/v2/internal/log.go
new file mode 100644
index 0000000..210df59
--- /dev/null
+++ b/v2/internal/log.go
@@ -0,0 +1,123 @@
+// Copyright 2021 Google Inc. All rights reserved.
+// Use of this source code is governed by the Apache 2.0
+// license that can be found in the LICENSE file.
+
+package internal
+
+import (
+	"encoding/json"
+	"fmt"
+	"regexp"
+	"strings"
+)
+
+var (
+	logLevelName = map[int64]string{
+		0: "DEBUG",
+		1: "INFO",
+		2: "WARNING",
+		3: "ERROR",
+		4: "CRITICAL",
+	}
+	traceContextRe = regexp.MustCompile(`^(\w+)/(\d+)(?:;o=[01])?$`)
+
+	// maxLogMessage is the largest message that will be logged without chunking, reserving room for prefixes.
+	// See http://cloud/logging/quotas#log-limits
+	maxLogMessage = 255000
+)
+
+func logf(c *context, level int64, format string, args ...interface{}) {
+	if c == nil {
+		panic("not an App Engine context")
+	}
+
+	if !IsStandard() {
+		s := strings.TrimRight(fmt.Sprintf(format, args...), "\n")
+		now := timeNow().UTC()
+		timestamp := fmt.Sprintf("%d/%02d/%02d %02d:%02d:%02d", now.Year(), now.Month(), now.Day(), now.Hour(), now.Minute(), now.Second())
+		fmt.Fprintf(logStream, "%s %s: %s\n", timestamp, logLevelName[level], s)
+		return
+	}
+
+	eol := func(s string) string {
+		if strings.HasSuffix(s, "\n") {
+			return ""
+		}
+		return "\n"
+	}
+
+	msg := fmt.Sprintf(format, args...)
+
+	if strings.HasPrefix(msg, "{") {
+		// Assume the message is already structured, leave as-is unless it is too long.
+		// Note: chunking destroys the structure; developers will have to ensure their structured log
+		// is small enough to fit in a single message.
+		for _, m := range chunkLog(msg) {
+			fmt.Fprint(logStream, m, eol(m))
+		}
+		return
+	}
+
+	// First chunk the message, then structure each chunk.
+	traceID, spanID := traceAndSpan(c)
+	for _, m := range chunkLog(msg) {
+		sl := structuredLog{
+			Message:  m,
+			Severity: logLevelName[level],
+			TraceID:  traceID,
+			SpanID:   spanID,
+		}
+		if b, err := json.Marshal(sl); err != nil {
+			// Write raw message if error.
+			fmt.Fprint(logStream, m, eol(m))
+		} else {
+			s := string(b)
+			fmt.Fprint(logStream, s, eol(s))
+		}
+	}
+}
+
+type structuredLog struct {
+	Message  string `json:"message"`
+	Severity string `json:"severity"`
+	TraceID  string `json:"logging.googleapis.com/trace,omitempty"`
+	SpanID   string `json:"logging.googleapis.com/spanId,omitempty"`
+}
+
+func chunkLog(msg string) []string {
+	if len(msg) <= maxLogMessage {
+		return []string{msg}
+	}
+	var chunks []string
+	i := 0
+	for {
+		if i == len(msg) {
+			break
+		}
+		if i+maxLogMessage > len(msg) {
+			chunks = append(chunks, msg[i:])
+			break
+		}
+		chunks = append(chunks, msg[i:i+maxLogMessage])
+		i += maxLogMessage
+	}
+	for i, c := range chunks {
+		chunks[i] = fmt.Sprintf("Part %d/%d: ", i+1, len(chunks)) + c
+	}
+	return chunks
+}
+
+func traceAndSpan(c *context) (string, string) {
+	headers := c.req.Header["X-Cloud-Trace-Context"]
+	if len(headers) < 1 {
+		return "", ""
+	}
+	matches := traceContextRe.FindAllStringSubmatch(headers[0], -1)
+	if len(matches) < 1 || len(matches[0]) < 3 {
+		return "", ""
+	}
+	traceID := matches[0][1]
+	spanID := matches[0][2]
+	projectID := projectID()
+	return fmt.Sprintf("projects/%s/traces/%s", projectID, traceID), spanID
+}
diff --git a/v2/internal/log_test.go b/v2/internal/log_test.go
new file mode 100644
index 0000000..b724153
--- /dev/null
+++ b/v2/internal/log_test.go
@@ -0,0 +1,443 @@
+// Copyright 2021 Google Inc. All rights reserved.
+// Use of this source code is governed by the Apache 2.0
+// license that can be found in the LICENSE file.
+
+package internal
+
+import (
+	"bytes"
+	"encoding/json"
+	"fmt"
+	"io"
+	"net/http"
+	"os"
+	"reflect"
+	"testing"
+	"time"
+)
+
+func TestLogf(t *testing.T) {
+	testCases := []struct {
+		name          string
+		deployed      bool
+		level         int64
+		format        string
+		header        string
+		args          []interface{}
+		maxLogMessage int
+		want          string
+		wantJSON      bool
+	}{
+		{
+			name:   "local-debug",
+			level:  0,
+			format: "my %s %d",
+			args:   []interface{}{"abc", 1},
+			want:   "2021/05/12 16:09:52 DEBUG: my abc 1\n",
+		},
+		{
+			name:   "local-info",
+			level:  1,
+			format: "my %s %d",
+			args:   []interface{}{"abc", 1},
+			want:   "2021/05/12 16:09:52 INFO: my abc 1\n",
+		},
+		{
+			name:   "local-warning",
+			level:  2,
+			format: "my %s %d",
+			args:   []interface{}{"abc", 1},
+			want:   "2021/05/12 16:09:52 WARNING: my abc 1\n",
+		},
+		{
+			name:   "local-error",
+			level:  3,
+			format: "my %s %d",
+			args:   []interface{}{"abc", 1},
+			want:   "2021/05/12 16:09:52 ERROR: my abc 1\n",
+		},
+		{
+			name:   "local-critical",
+			level:  4,
+			format: "my %s %d",
+			args:   []interface{}{"abc", 1},
+			want:   "2021/05/12 16:09:52 CRITICAL: my abc 1\n",
+		},
+		{
+			name:   "local-multiline",
+			level:  0,
+			format: "my \n multiline\n\n",
+			want:   "2021/05/12 16:09:52 DEBUG: my \n multiline\n",
+		},
+		{
+			name:          "local-long-lines-not-split",
+			maxLogMessage: 10,
+			format:        "0123456789a123",
+			want:          "2021/05/12 16:09:52 DEBUG: 0123456789a123\n",
+		},
+		{
+			name:     "deployed-plain-debug",
+			deployed: true,
+			level:    0,
+			format:   "my %s %d",
+			args:     []interface{}{"abc", 1},
+			want:     `{"message":"my abc 1","severity":"DEBUG"}` + "\n",
+			wantJSON: true,
+		},
+		{
+			name:     "deployed-plain-info",
+			deployed: true,
+			level:    1,
+			format:   "my %s %d",
+			args:     []interface{}{"abc", 1},
+			want:     `{"message":"my abc 1","severity":"INFO"}` + "\n",
+			wantJSON: true,
+		},
+		{
+			name:     "deployed-plain-warning",
+			deployed: true,
+			level:    2,
+			format:   "my %s %d",
+			args:     []interface{}{"abc", 1},
+			want:     `{"message":"my abc 1","severity":"WARNING"}` + "\n",
+			wantJSON: true,
+		},
+		{
+			name:     "deployed-plain-error",
+			deployed: true,
+			level:    3,
+			format:   "my %s %d",
+			args:     []interface{}{"abc", 1},
+			want:     `{"message":"my abc 1","severity":"ERROR"}` + "\n",
+			wantJSON: true,
+		},
+		{
+			name:     "deployed-plain-critical",
+			deployed: true,
+			level:    4,
+			format:   "my %s %d",
+			args:     []interface{}{"abc", 1},
+			want:     `{"message":"my abc 1","severity":"CRITICAL"}` + "\n",
+			wantJSON: true,
+		},
+		{
+			name:     "deployed-plain-multiline",
+			deployed: true,
+			level:    0,
+			format:   "my \n multiline\n\n",
+			want:     "{\"message\":\"my \\n multiline\\n\\n\",\"severity\":\"DEBUG\"}\n",
+			wantJSON: true,
+		},
+		{
+			name:     "deployed-plain-megaquote",
+			deployed: true,
+			level:    0,
+			format:   `my "megaquote" %q`,
+			args:     []interface{}{`internal "quote"`},
+			want:     "{\"message\":\"my \\\"megaquote\\\" \\\"internal \\\\\\\"quote\\\\\\\"\\\"\",\"severity\":\"DEBUG\"}\n",
+			wantJSON: true,
+		},
+		{
+			name:          "deployed-too-long",
+			deployed:      true,
+			format:        "0123456789a123",
+			maxLogMessage: 10,
+			want:          "{\"message\":\"Part 1/2: 0123456789\",\"severity\":\"DEBUG\"}\n{\"message\":\"Part 2/2: a123\",\"severity\":\"DEBUG\"}\n",
+		},
+		{
+			name:     "deployed-with-trace-header",
+			deployed: true,
+			format:   "my message",
+			header:   "abc123/1234",
+			want:     "{\"message\":\"my message\",\"severity\":\"DEBUG\",\"logging.googleapis.com/trace\":\"projects/my-project/traces/abc123\",\"logging.googleapis.com/spanId\":\"1234\"}\n",
+		},
+		{
+			name:     "deployed-structured-debug",
+			deployed: true,
+			level:    0,
+			format:   `{"some": "message %s %d"}`,
+			args:     []interface{}{"abc", 1},
+			want:     `{"some": "message abc 1"}` + "\n",
+		},
+		{
+			name:     "deployed-structured-info",
+			deployed: true,
+			level:    1,
+			format:   `{"some": "message %s %d"}`,
+			args:     []interface{}{"abc", 1},
+			want:     `{"some": "message abc 1"}` + "\n",
+		},
+		{
+			name:     "deployed-structured-warning",
+			deployed: true,
+			level:    2,
+			format:   `{"some": "message %s %d"}`,
+			args:     []interface{}{"abc", 1},
+			want:     `{"some": "message abc 1"}` + "\n",
+		},
+		{
+			name:     "deployed-structured-error",
+			deployed: true,
+			level:    3,
+			format:   `{"some": "message %s %d"}`,
+			args:     []interface{}{"abc", 1},
+			want:     `{"some": "message abc 1"}` + "\n",
+		},
+		{
+			name:     "deployed-structured-critical",
+			deployed: true,
+			level:    4,
+			format:   `{"some": "message %s %d"}`,
+			args:     []interface{}{"abc", 1},
+			want:     `{"some": "message abc 1"}` + "\n",
+		},
+		{
+			// The leading "{" assumes this is already a structured log, so no alteration is performed.
+			name:     "deployed-structured-multiline",
+			deployed: true,
+			level:    4,
+			// This is not even valid JSON; we don't attempt to validate and only use the first character.
+			format: "{\"some\": \"message\n%s %d\"",
+			args:   []interface{}{"abc", 1},
+			want:   "{\"some\": \"message\nabc 1\"\n",
+		},
+		{
+			name:          "deployed-structured-too-long",
+			deployed:      true,
+			format:        `{"message": "abc", "severity": "DEBUG"}`,
+			maxLogMessage: 25,
+			// User-structured logs must manually chunk; here we can see the structured message is (knowingly) broken.
+			want: "Part 1/2: {\"message\": \"abc\", \"sever\nPart 2/2: ity\": \"DEBUG\"}\n",
+		},
+		{
+			name:     "deployed-structured-with-trace-header",
+			deployed: true,
+			format:   `{"message": "abc", "severity": "DEBUG"}`,
+			header:   "abc123/1234",
+			want:     "{\"message\": \"abc\", \"severity\": \"DEBUG\"}\n",
+		},
+	}
+
+	for _, tc := range testCases {
+		t.Run(tc.name, func(t *testing.T) {
+			env := ""
+			if tc.deployed {
+				env = "standard"
+			}
+			defer setEnvVar(t, "GAE_ENV", env)()
+			defer setEnvVar(t, "GOOGLE_CLOUD_PROJECT", "my-project")()
+			var buf bytes.Buffer
+			defer overrideLogStream(t, &buf)()
+			defer overrideTimeNow(t, time.Date(2021, 5, 12, 16, 9, 52, 0, time.UTC))()
+			if tc.maxLogMessage > 0 {
+				defer overrideMaxLogMessage(t, tc.maxLogMessage)()
+			}
+			var headers []string
+			if tc.header != "" {
+				headers = []string{tc.header}
+			}
+			c := buildContextWithTraceHeaders(t, headers)
+
+			logf(c, tc.level, tc.format, tc.args...)
+
+			if got, want := buf.String(), tc.want; got != want {
+				t.Errorf("incorrect log got=%q want=%q", got, want)
+			}
+
+			if tc.wantJSON {
+				var e struct {
+					Message  string `json:"message"`
+					Severity string `json:"severity"`
+				}
+				if err := json.Unmarshal(buf.Bytes(), &e); err != nil {
+					t.Fatalf("invalid JSON: %v", err)
+				}
+				if gotMsg, wantMsg := e.Message, fmt.Sprintf(tc.format, tc.args...); gotMsg != wantMsg {
+					t.Errorf("JSON-encoded message incorrect got=%q want=%q", gotMsg, wantMsg)
+				}
+				if gotSev, wantSev := e.Severity, logLevelName[tc.level]; gotSev != wantSev {
+					t.Errorf("JSON-encoded severity incorrect got=%q want=%q", gotSev, wantSev)
+				}
+			}
+		})
+	}
+}
+
+func TestChunkLog(t *testing.T) {
+	testCases := []struct {
+		name string
+		msg  string
+		want []string
+	}{
+		{
+			name: "empty",
+			msg:  "",
+			want: []string{""},
+		},
+		{
+			name: "short",
+			msg:  "short msg",
+			want: []string{"short msg"},
+		},
+		{
+			name: "exactly max",
+			msg:  "0123456789",
+			want: []string{"0123456789"},
+		},
+		{
+			name: "too long",
+			msg:  "0123456789a123",
+			want: []string{
+				"Part 1/2: 0123456789",
+				"Part 2/2: a123",
+			},
+		},
+		{
+			name: "too long exactly max",
+			msg:  "0123456789a123456789",
+			want: []string{
+				"Part 1/2: 0123456789",
+				"Part 2/2: a123456789",
+			},
+		},
+		{
+			name: "longer",
+			msg:  "0123456789a123456789b123456789c",
+			want: []string{
+				"Part 1/4: 0123456789",
+				"Part 2/4: a123456789",
+				"Part 3/4: b123456789",
+				"Part 4/4: c",
+			},
+		},
+	}
+
+	for _, tc := range testCases {
+		t.Run(tc.name, func(t *testing.T) {
+			defer overrideMaxLogMessage(t, 10)()
+
+			got := chunkLog(tc.msg)
+
+			if !reflect.DeepEqual(got, tc.want) {
+				t.Errorf("chunkLog() got=%q want=%q", got, tc.want)
+			}
+		})
+	}
+}
+
+func TestTraceAndSpan(t *testing.T) {
+	testCases := []struct {
+		name        string
+		header      []string
+		wantTraceID string
+		wantSpanID  string
+	}{
+		{
+			name: "empty",
+		},
+		{
+			name:   "header present, but empty",
+			header: []string{""},
+		},
+		{
+			name:        "trace and span",
+			header:      []string{"abc1234/456"},
+			wantTraceID: "projects/my-project/traces/abc1234",
+			wantSpanID:  "456",
+		},
+		{
+			name:        "trace and span with suffix",
+			header:      []string{"abc1234/456;o=0"},
+			wantTraceID: "projects/my-project/traces/abc1234",
+			wantSpanID:  "456",
+		},
+		{
+			name: "multiple headers, first taken",
+			header: []string{
+				"abc1234/456;o=1",
+				"zzzzzzz/999;o=0",
+			},
+			wantTraceID: "projects/my-project/traces/abc1234",
+			wantSpanID:  "456",
+		},
+		{
+			name:   "missing trace",
+			header: []string{"/456"},
+		},
+		{
+			name:   "missing span",
+			header: []string{"abc1234/"},
+		},
+		{
+			name:   "random",
+			header: []string{"somestring"},
+		},
+	}
+
+	for _, tc := range testCases {
+		t.Run(tc.name, func(t *testing.T) {
+			defer setEnvVar(t, "GOOGLE_CLOUD_PROJECT", "my-project")()
+			c := buildContextWithTraceHeaders(t, tc.header)
+
+			gotTraceID, gotSpanID := traceAndSpan(c)
+
+			if got, want := gotTraceID, tc.wantTraceID; got != want {
+				t.Errorf("Incorrect traceID got=%q want=%q", got, want)
+			}
+			if got, want := gotSpanID, tc.wantSpanID; got != want {
+				t.Errorf("Incorrect spanID got=%q want=%q", got, want)
+			}
+		})
+	}
+}
+
+func setEnvVar(t *testing.T, key, value string) func() {
+	t.Helper()
+	old, present := os.LookupEnv(key)
+	if err := os.Setenv(key, value); err != nil {
+		t.Fatal(err)
+	}
+	return func() {
+		if present {
+			if err := os.Setenv(key, old); err != nil {
+				t.Fatal(err)
+			}
+			if err := os.Unsetenv(key); err != nil {
+				t.Fatal(err)
+			}
+		}
+	}
+}
+
+func overrideLogStream(t *testing.T, writer io.Writer) func() {
+	t.Helper()
+	old := logStream
+	logStream = writer
+	return func() { logStream = old }
+}
+
+func overrideTimeNow(t *testing.T, now time.Time) func() {
+	t.Helper()
+	old := timeNow
+	timeNow = func() time.Time { return now }
+	return func() { timeNow = old }
+}
+
+func overrideMaxLogMessage(t *testing.T, max int) func() {
+	t.Helper()
+	old := maxLogMessage
+	maxLogMessage = max
+	return func() { maxLogMessage = old }
+}
+
+func buildContextWithTraceHeaders(t *testing.T, headers []string) *context {
+	t.Helper()
+	req, err := http.NewRequest("GET", "/", nil)
+	if err != nil {
+		t.Fatal(err)
+	}
+	for _, h := range headers {
+		req.Header.Add("X-Cloud-Trace-Context", h)
+	}
+	return fromContext(ContextForTesting(req))
+}
diff --git a/v2/internal/mail/mail_service.pb.go b/v2/internal/mail/mail_service.pb.go
new file mode 100644
index 0000000..5d532e2
--- /dev/null
+++ b/v2/internal/mail/mail_service.pb.go
@@ -0,0 +1,355 @@
+// Code generated by protoc-gen-go. DO NOT EDIT.
+// source: google.golang.org/appengine/internal/mail/mail_service.proto
+
+package mail
+
+import proto "github.com/golang/protobuf/proto"
+import fmt "fmt"
+import math "math"
+
+// Reference imports to suppress errors if they are not otherwise used.
+var _ = proto.Marshal
+var _ = fmt.Errorf
+var _ = math.Inf
+
+// This is a compile-time assertion to ensure that this generated file
+// is compatible with the proto package it is being compiled against.
+// A compilation error at this line likely means your copy of the
+// proto package needs to be updated.
+const _ = proto.ProtoPackageIsVersion2 // please upgrade the proto package
+
+type MailServiceError_ErrorCode int32
+
+const (
+	MailServiceError_OK                      MailServiceError_ErrorCode = 0
+	MailServiceError_INTERNAL_ERROR          MailServiceError_ErrorCode = 1
+	MailServiceError_BAD_REQUEST             MailServiceError_ErrorCode = 2
+	MailServiceError_UNAUTHORIZED_SENDER     MailServiceError_ErrorCode = 3
+	MailServiceError_INVALID_ATTACHMENT_TYPE MailServiceError_ErrorCode = 4
+	MailServiceError_INVALID_HEADER_NAME     MailServiceError_ErrorCode = 5
+	MailServiceError_INVALID_CONTENT_ID      MailServiceError_ErrorCode = 6
+)
+
+var MailServiceError_ErrorCode_name = map[int32]string{
+	0: "OK",
+	1: "INTERNAL_ERROR",
+	2: "BAD_REQUEST",
+	3: "UNAUTHORIZED_SENDER",
+	4: "INVALID_ATTACHMENT_TYPE",
+	5: "INVALID_HEADER_NAME",
+	6: "INVALID_CONTENT_ID",
+}
+var MailServiceError_ErrorCode_value = map[string]int32{
+	"OK":                      0,
+	"INTERNAL_ERROR":          1,
+	"BAD_REQUEST":             2,
+	"UNAUTHORIZED_SENDER":     3,
+	"INVALID_ATTACHMENT_TYPE": 4,
+	"INVALID_HEADER_NAME":     5,
+	"INVALID_CONTENT_ID":      6,
+}
+
+func (x MailServiceError_ErrorCode) Enum() *MailServiceError_ErrorCode {
+	p := new(MailServiceError_ErrorCode)
+	*p = x
+	return p
+}
+func (x MailServiceError_ErrorCode) String() string {
+	return proto.EnumName(MailServiceError_ErrorCode_name, int32(x))
+}
+func (x *MailServiceError_ErrorCode) UnmarshalJSON(data []byte) error {
+	value, err := proto.UnmarshalJSONEnum(MailServiceError_ErrorCode_value, data, "MailServiceError_ErrorCode")
+	if err != nil {
+		return err
+	}
+	*x = MailServiceError_ErrorCode(value)
+	return nil
+}
+func (MailServiceError_ErrorCode) EnumDescriptor() ([]byte, []int) {
+	return fileDescriptor_mail_service_78722be3c4c01d17, []int{0, 0}
+}
+
+type MailServiceError struct {
+	XXX_NoUnkeyedLiteral struct{} `json:"-"`
+	XXX_unrecognized     []byte   `json:"-"`
+	XXX_sizecache        int32    `json:"-"`
+}
+
+func (m *MailServiceError) Reset()         { *m = MailServiceError{} }
+func (m *MailServiceError) String() string { return proto.CompactTextString(m) }
+func (*MailServiceError) ProtoMessage()    {}
+func (*MailServiceError) Descriptor() ([]byte, []int) {
+	return fileDescriptor_mail_service_78722be3c4c01d17, []int{0}
+}
+func (m *MailServiceError) XXX_Unmarshal(b []byte) error {
+	return xxx_messageInfo_MailServiceError.Unmarshal(m, b)
+}
+func (m *MailServiceError) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) {
+	return xxx_messageInfo_MailServiceError.Marshal(b, m, deterministic)
+}
+func (dst *MailServiceError) XXX_Merge(src proto.Message) {
+	xxx_messageInfo_MailServiceError.Merge(dst, src)
+}
+func (m *MailServiceError) XXX_Size() int {
+	return xxx_messageInfo_MailServiceError.Size(m)
+}
+func (m *MailServiceError) XXX_DiscardUnknown() {
+	xxx_messageInfo_MailServiceError.DiscardUnknown(m)
+}
+
+var xxx_messageInfo_MailServiceError proto.InternalMessageInfo
+
+type MailAttachment struct {
+	FileName             *string  `protobuf:"bytes,1,req,name=FileName" json:"FileName,omitempty"`
+	Data                 []byte   `protobuf:"bytes,2,req,name=Data" json:"Data,omitempty"`
+	ContentID            *string  `protobuf:"bytes,3,opt,name=ContentID" json:"ContentID,omitempty"`
+	XXX_NoUnkeyedLiteral struct{} `json:"-"`
+	XXX_unrecognized     []byte   `json:"-"`
+	XXX_sizecache        int32    `json:"-"`
+}
+
+func (m *MailAttachment) Reset()         { *m = MailAttachment{} }
+func (m *MailAttachment) String() string { return proto.CompactTextString(m) }
+func (*MailAttachment) ProtoMessage()    {}
+func (*MailAttachment) Descriptor() ([]byte, []int) {
+	return fileDescriptor_mail_service_78722be3c4c01d17, []int{1}
+}
+func (m *MailAttachment) XXX_Unmarshal(b []byte) error {
+	return xxx_messageInfo_MailAttachment.Unmarshal(m, b)
+}
+func (m *MailAttachment) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) {
+	return xxx_messageInfo_MailAttachment.Marshal(b, m, deterministic)
+}
+func (dst *MailAttachment) XXX_Merge(src proto.Message) {
+	xxx_messageInfo_MailAttachment.Merge(dst, src)
+}
+func (m *MailAttachment) XXX_Size() int {
+	return xxx_messageInfo_MailAttachment.Size(m)
+}
+func (m *MailAttachment) XXX_DiscardUnknown() {
+	xxx_messageInfo_MailAttachment.DiscardUnknown(m)
+}
+
+var xxx_messageInfo_MailAttachment proto.InternalMessageInfo
+
+func (m *MailAttachment) GetFileName() string {
+	if m != nil && m.FileName != nil {
+		return *m.FileName
+	}
+	return ""
+}
+
+func (m *MailAttachment) GetData() []byte {
+	if m != nil {
+		return m.Data
+	}
+	return nil
+}
+
+func (m *MailAttachment) GetContentID() string {
+	if m != nil && m.ContentID != nil {
+		return *m.ContentID
+	}
+	return ""
+}
+
+type MailHeader struct {
+	Name                 *string  `protobuf:"bytes,1,req,name=name" json:"name,omitempty"`
+	Value                *string  `protobuf:"bytes,2,req,name=value" json:"value,omitempty"`
+	XXX_NoUnkeyedLiteral struct{} `json:"-"`
+	XXX_unrecognized     []byte   `json:"-"`
+	XXX_sizecache        int32    `json:"-"`
+}
+
+func (m *MailHeader) Reset()         { *m = MailHeader{} }
+func (m *MailHeader) String() string { return proto.CompactTextString(m) }
+func (*MailHeader) ProtoMessage()    {}
+func (*MailHeader) Descriptor() ([]byte, []int) {
+	return fileDescriptor_mail_service_78722be3c4c01d17, []int{2}
+}
+func (m *MailHeader) XXX_Unmarshal(b []byte) error {
+	return xxx_messageInfo_MailHeader.Unmarshal(m, b)
+}
+func (m *MailHeader) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) {
+	return xxx_messageInfo_MailHeader.Marshal(b, m, deterministic)
+}
+func (dst *MailHeader) XXX_Merge(src proto.Message) {
+	xxx_messageInfo_MailHeader.Merge(dst, src)
+}
+func (m *MailHeader) XXX_Size() int {
+	return xxx_messageInfo_MailHeader.Size(m)
+}
+func (m *MailHeader) XXX_DiscardUnknown() {
+	xxx_messageInfo_MailHeader.DiscardUnknown(m)
+}
+
+var xxx_messageInfo_MailHeader proto.InternalMessageInfo
+
+func (m *MailHeader) GetName() string {
+	if m != nil && m.Name != nil {
+		return *m.Name
+	}
+	return ""
+}
+
+func (m *MailHeader) GetValue() string {
+	if m != nil && m.Value != nil {
+		return *m.Value
+	}
+	return ""
+}
+
+type MailMessage struct {
+	Sender               *string           `protobuf:"bytes,1,req,name=Sender" json:"Sender,omitempty"`
+	ReplyTo              *string           `protobuf:"bytes,2,opt,name=ReplyTo" json:"ReplyTo,omitempty"`
+	To                   []string          `protobuf:"bytes,3,rep,name=To" json:"To,omitempty"`
+	Cc                   []string          `protobuf:"bytes,4,rep,name=Cc" json:"Cc,omitempty"`
+	Bcc                  []string          `protobuf:"bytes,5,rep,name=Bcc" json:"Bcc,omitempty"`
+	Subject              *string           `protobuf:"bytes,6,req,name=Subject" json:"Subject,omitempty"`
+	TextBody             *string           `protobuf:"bytes,7,opt,name=TextBody" json:"TextBody,omitempty"`
+	HtmlBody             *string           `protobuf:"bytes,8,opt,name=HtmlBody" json:"HtmlBody,omitempty"`
+	Attachment           []*MailAttachment `protobuf:"bytes,9,rep,name=Attachment" json:"Attachment,omitempty"`
+	Header               []*MailHeader     `protobuf:"bytes,10,rep,name=Header" json:"Header,omitempty"`
+	XXX_NoUnkeyedLiteral struct{}          `json:"-"`
+	XXX_unrecognized     []byte            `json:"-"`
+	XXX_sizecache        int32             `json:"-"`
+}
+
+func (m *MailMessage) Reset()         { *m = MailMessage{} }
+func (m *MailMessage) String() string { return proto.CompactTextString(m) }
+func (*MailMessage) ProtoMessage()    {}
+func (*MailMessage) Descriptor() ([]byte, []int) {
+	return fileDescriptor_mail_service_78722be3c4c01d17, []int{3}
+}
+func (m *MailMessage) XXX_Unmarshal(b []byte) error {
+	return xxx_messageInfo_MailMessage.Unmarshal(m, b)
+}
+func (m *MailMessage) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) {
+	return xxx_messageInfo_MailMessage.Marshal(b, m, deterministic)
+}
+func (dst *MailMessage) XXX_Merge(src proto.Message) {
+	xxx_messageInfo_MailMessage.Merge(dst, src)
+}
+func (m *MailMessage) XXX_Size() int {
+	return xxx_messageInfo_MailMessage.Size(m)
+}
+func (m *MailMessage) XXX_DiscardUnknown() {
+	xxx_messageInfo_MailMessage.DiscardUnknown(m)
+}
+
+var xxx_messageInfo_MailMessage proto.InternalMessageInfo
+
+func (m *MailMessage) GetSender() string {
+	if m != nil && m.Sender != nil {
+		return *m.Sender
+	}
+	return ""
+}
+
+func (m *MailMessage) GetReplyTo() string {
+	if m != nil && m.ReplyTo != nil {
+		return *m.ReplyTo
+	}
+	return ""
+}
+
+func (m *MailMessage) GetTo() []string {
+	if m != nil {
+		return m.To
+	}
+	return nil
+}
+
+func (m *MailMessage) GetCc() []string {
+	if m != nil {
+		return m.Cc
+	}
+	return nil
+}
+
+func (m *MailMessage) GetBcc() []string {
+	if m != nil {
+		return m.Bcc
+	}
+	return nil
+}
+
+func (m *MailMessage) GetSubject() string {
+	if m != nil && m.Subject != nil {
+		return *m.Subject
+	}
+	return ""
+}
+
+func (m *MailMessage) GetTextBody() string {
+	if m != nil && m.TextBody != nil {
+		return *m.TextBody
+	}
+	return ""
+}
+
+func (m *MailMessage) GetHtmlBody() string {
+	if m != nil && m.HtmlBody != nil {
+		return *m.HtmlBody
+	}
+	return ""
+}
+
+func (m *MailMessage) GetAttachment() []*MailAttachment {
+	if m != nil {
+		return m.Attachment
+	}
+	return nil
+}
+
+func (m *MailMessage) GetHeader() []*MailHeader {
+	if m != nil {
+		return m.Header
+	}
+	return nil
+}
+
+func init() {
+	proto.RegisterType((*MailServiceError)(nil), "appengine.MailServiceError")
+	proto.RegisterType((*MailAttachment)(nil), "appengine.MailAttachment")
+	proto.RegisterType((*MailHeader)(nil), "appengine.MailHeader")
+	proto.RegisterType((*MailMessage)(nil), "appengine.MailMessage")
+}
+
+func init() {
+	proto.RegisterFile("google.golang.org/appengine/v2/internal/mail/mail_service.proto", fileDescriptor_mail_service_78722be3c4c01d17)
+}
+
+var fileDescriptor_mail_service_78722be3c4c01d17 = []byte{
+	// 480 bytes of a gzipped FileDescriptorProto
+	0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0x5c, 0x92, 0xcf, 0x6e, 0xd3, 0x40,
+	0x10, 0xc6, 0x89, 0x9d, 0xb8, 0xf5, 0x04, 0x05, 0x6b, 0x81, 0x76, 0xf9, 0x73, 0x88, 0x72, 0xca,
+	0x85, 0x44, 0xe2, 0x80, 0x84, 0xc4, 0xc5, 0xb1, 0x17, 0xc5, 0xa2, 0x71, 0x60, 0xb3, 0x41, 0xa2,
+	0x07, 0xac, 0xc5, 0x19, 0x19, 0x23, 0xc7, 0x1b, 0x39, 0xdb, 0x8a, 0x3e, 0x0d, 0x4f, 0xc0, 0x8d,
+	0x07, 0x44, 0x6b, 0xc7, 0x09, 0xf4, 0x62, 0xcd, 0x6f, 0xbf, 0xf9, 0x66, 0xac, 0x4f, 0x03, 0xef,
+	0x32, 0xa5, 0xb2, 0x02, 0x27, 0x99, 0x2a, 0x64, 0x99, 0x4d, 0x54, 0x95, 0x4d, 0xe5, 0x6e, 0x87,
+	0x65, 0x96, 0x97, 0x38, 0xcd, 0x4b, 0x8d, 0x55, 0x29, 0x8b, 0xe9, 0x56, 0xe6, 0xcd, 0x27, 0xd9,
+	0x63, 0x75, 0x9b, 0xa7, 0x38, 0xd9, 0x55, 0x4a, 0x2b, 0xe2, 0x1e, 0x7b, 0x47, 0x7f, 0x3a, 0xe0,
+	0x2d, 0x64, 0x5e, 0xac, 0x9a, 0x06, 0x56, 0x55, 0xaa, 0x1a, 0xfd, 0xea, 0x80, 0x5b, 0x57, 0x81,
+	0xda, 0x20, 0x71, 0xc0, 0x5a, 0x7e, 0xf0, 0x1e, 0x10, 0x02, 0x83, 0x28, 0x16, 0x8c, 0xc7, 0xfe,
+	0x55, 0xc2, 0x38, 0x5f, 0x72, 0xaf, 0x43, 0x1e, 0x41, 0x7f, 0xe6, 0x87, 0x09, 0x67, 0x9f, 0xd6,
+	0x6c, 0x25, 0x3c, 0x8b, 0x5c, 0xc2, 0xe3, 0x75, 0xec, 0xaf, 0xc5, 0x7c, 0xc9, 0xa3, 0x6b, 0x16,
+	0x26, 0x2b, 0x16, 0x87, 0x8c, 0x7b, 0x36, 0x79, 0x01, 0x97, 0x51, 0xfc, 0xd9, 0xbf, 0x8a, 0xc2,
+	0xc4, 0x17, 0xc2, 0x0f, 0xe6, 0x0b, 0x16, 0x8b, 0x44, 0x7c, 0xf9, 0xc8, 0xbc, 0xae, 0x71, 0xb5,
+	0xe2, 0x9c, 0xf9, 0x21, 0xe3, 0x49, 0xec, 0x2f, 0x98, 0xd7, 0x23, 0x17, 0x40, 0x5a, 0x21, 0x58,
+	0xc6, 0xc2, 0x58, 0xa2, 0xd0, 0x73, 0x46, 0x5f, 0x61, 0x60, 0xfe, 0xda, 0xd7, 0x5a, 0xa6, 0xdf,
+	0xb7, 0x58, 0x6a, 0xf2, 0x1c, 0xce, 0xdf, 0xe7, 0x05, 0xc6, 0x72, 0x8b, 0xb4, 0x33, 0xb4, 0xc6,
+	0x2e, 0x3f, 0x32, 0x21, 0xd0, 0x0d, 0xa5, 0x96, 0xd4, 0x1a, 0x5a, 0xe3, 0x87, 0xbc, 0xae, 0xc9,
+	0x4b, 0x70, 0x03, 0x55, 0x6a, 0x2c, 0x75, 0x14, 0x52, 0x7b, 0xd8, 0x19, 0xbb, 0xfc, 0xf4, 0x30,
+	0x7a, 0x03, 0x60, 0xe6, 0xcf, 0x51, 0x6e, 0xb0, 0x32, 0xfe, 0xf2, 0x34, 0xb7, 0xae, 0xc9, 0x13,
+	0xe8, 0xdd, 0xca, 0xe2, 0x06, 0xeb, 0xa1, 0x2e, 0x6f, 0x60, 0xf4, 0xdb, 0x82, 0xbe, 0x31, 0x2e,
+	0x70, 0xbf, 0x97, 0x19, 0x92, 0x0b, 0x70, 0x56, 0x58, 0x6e, 0xb0, 0x3a, 0x78, 0x0f, 0x44, 0x28,
+	0x9c, 0x71, 0xdc, 0x15, 0x77, 0x42, 0x51, 0xab, 0xde, 0xdd, 0x22, 0x19, 0x80, 0x25, 0x14, 0xb5,
+	0x87, 0xf6, 0xd8, 0xe5, 0x56, 0xc3, 0x41, 0x4a, 0xbb, 0x0d, 0x07, 0x29, 0xf1, 0xc0, 0x9e, 0xa5,
+	0x29, 0xed, 0xd5, 0x0f, 0xa6, 0x34, 0xb3, 0x56, 0x37, 0xdf, 0x7e, 0x60, 0xaa, 0xa9, 0x53, 0x2f,
+	0x69, 0xd1, 0x64, 0x22, 0xf0, 0xa7, 0x9e, 0xa9, 0xcd, 0x1d, 0x3d, 0xab, 0xd7, 0x1c, 0xd9, 0x68,
+	0x73, 0xbd, 0x2d, 0x6a, 0xed, 0xbc, 0xd1, 0x5a, 0x26, 0x6f, 0x01, 0x4e, 0xc9, 0x52, 0x77, 0x68,
+	0x8f, 0xfb, 0xaf, 0x9f, 0x4d, 0x8e, 0x47, 0x33, 0xf9, 0x3f, 0x7a, 0xfe, 0x4f, 0x33, 0x79, 0x05,
+	0x4e, 0x13, 0x1a, 0x85, 0xda, 0xf6, 0xf4, 0x9e, 0xad, 0x11, 0xf9, 0xa1, 0x69, 0xe6, 0x5c, 0x77,
+	0xcd, 0x7d, 0xfe, 0x0d, 0x00, 0x00, 0xff, 0xff, 0x4e, 0xd3, 0x01, 0x27, 0xd0, 0x02, 0x00, 0x00,
+}
diff --git a/v2/internal/mail/mail_service.proto b/v2/internal/mail/mail_service.proto
new file mode 100644
index 0000000..4e57b7a
--- /dev/null
+++ b/v2/internal/mail/mail_service.proto
@@ -0,0 +1,45 @@
+syntax = "proto2";
+option go_package = "mail";
+
+package appengine;
+
+message MailServiceError {
+  enum ErrorCode {
+    OK = 0;
+    INTERNAL_ERROR = 1;
+    BAD_REQUEST = 2;
+    UNAUTHORIZED_SENDER = 3;
+    INVALID_ATTACHMENT_TYPE = 4;
+    INVALID_HEADER_NAME = 5;
+    INVALID_CONTENT_ID = 6;
+  }
+}
+
+message MailAttachment {
+  required string FileName = 1;
+  required bytes Data = 2;
+  optional string ContentID = 3;
+}
+
+message MailHeader {
+  required string name = 1;
+  required string value = 2;
+}
+
+message MailMessage {
+  required string Sender = 1;
+  optional string ReplyTo = 2;
+
+  repeated string To = 3;
+  repeated string Cc = 4;
+  repeated string Bcc = 5;
+
+  required string Subject = 6;
+
+  optional string TextBody = 7;
+  optional string HtmlBody = 8;
+
+  repeated MailAttachment Attachment = 9;
+
+  repeated MailHeader Header = 10;
+}
diff --git a/v2/internal/main.go b/v2/internal/main.go
new file mode 100644
index 0000000..0abb8c6
--- /dev/null
+++ b/v2/internal/main.go
@@ -0,0 +1,70 @@
+// Copyright 2011 Google Inc. All rights reserved.
+// Use of this source code is governed by the Apache 2.0
+// license that can be found in the LICENSE file.
+
+package internal
+
+import (
+	"io"
+	"log"
+	"net/http"
+	"net/url"
+	"os"
+	"path/filepath"
+	"runtime"
+)
+
+// MainPath stores the file path of the main package.
+var MainPath string
+
+func Main() {
+	MainPath = filepath.Dir(findMainPath())
+	installHealthChecker(http.DefaultServeMux)
+
+	port := "8080"
+	if s := os.Getenv("PORT"); s != "" {
+		port = s
+	}
+
+	host := ""
+	if IsDevAppServer() {
+		host = "127.0.0.1"
+	}
+	if err := http.ListenAndServe(host+":"+port, Middleware(http.DefaultServeMux)); err != nil {
+		log.Fatalf("http.ListenAndServe: %v", err)
+	}
+}
+
+// Find the path to package main by looking at the root Caller.
+func findMainPath() string {
+	pc := make([]uintptr, 100)
+	n := runtime.Callers(2, pc)
+	frames := runtime.CallersFrames(pc[:n])
+	for {
+		frame, more := frames.Next()
+		// Tests won't have package main, instead they have testing.tRunner
+		if frame.Function == "main.main" || frame.Function == "testing.tRunner" {
+			return frame.File
+		}
+		if !more {
+			break
+		}
+	}
+	return ""
+}
+
+func installHealthChecker(mux *http.ServeMux) {
+	// If no health check handler has been installed by this point, add a trivial one.
+	const healthPath = "/_ah/health"
+	hreq := &http.Request{
+		Method: "GET",
+		URL: &url.URL{
+			Path: healthPath,
+		},
+	}
+	if _, pat := mux.Handler(hreq); pat != healthPath {
+		mux.HandleFunc(healthPath, func(w http.ResponseWriter, r *http.Request) {
+			io.WriteString(w, "ok")
+		})
+	}
+}
diff --git a/v2/internal/main_test.go b/v2/internal/main_test.go
new file mode 100644
index 0000000..ce2f277
--- /dev/null
+++ b/v2/internal/main_test.go
@@ -0,0 +1,16 @@
+package internal
+
+import (
+	"go/build"
+	"path/filepath"
+	"testing"
+)
+
+func TestFindMainPath(t *testing.T) {
+	// Tests won't have package main, instead they have testing.tRunner
+	want := filepath.Join(build.Default.GOROOT, "src", "testing", "testing.go")
+	got := findMainPath()
+	if want != got {
+		t.Errorf("findMainPath: want %s, got %s", want, got)
+	}
+}
diff --git a/v2/internal/memcache/memcache_service.pb.go b/v2/internal/memcache/memcache_service.pb.go
new file mode 100644
index 0000000..5b231de
--- /dev/null
+++ b/v2/internal/memcache/memcache_service.pb.go
@@ -0,0 +1,1562 @@
+// Code generated by protoc-gen-go. DO NOT EDIT.
+// source: google.golang.org/appengine/internal/memcache/memcache_service.proto
+
+package memcache
+
+import proto "github.com/golang/protobuf/proto"
+import fmt "fmt"
+import math "math"
+
+// Reference imports to suppress errors if they are not otherwise used.
+var _ = proto.Marshal
+var _ = fmt.Errorf
+var _ = math.Inf
+
+// This is a compile-time assertion to ensure that this generated file
+// is compatible with the proto package it is being compiled against.
+// A compilation error at this line likely means your copy of the
+// proto package needs to be updated.
+const _ = proto.ProtoPackageIsVersion2 // please upgrade the proto package
+
+type MemcacheServiceError_ErrorCode int32
+
+const (
+	MemcacheServiceError_OK                MemcacheServiceError_ErrorCode = 0
+	MemcacheServiceError_UNSPECIFIED_ERROR MemcacheServiceError_ErrorCode = 1
+	MemcacheServiceError_NAMESPACE_NOT_SET MemcacheServiceError_ErrorCode = 2
+	MemcacheServiceError_PERMISSION_DENIED MemcacheServiceError_ErrorCode = 3
+	MemcacheServiceError_INVALID_VALUE     MemcacheServiceError_ErrorCode = 6
+)
+
+var MemcacheServiceError_ErrorCode_name = map[int32]string{
+	0: "OK",
+	1: "UNSPECIFIED_ERROR",
+	2: "NAMESPACE_NOT_SET",
+	3: "PERMISSION_DENIED",
+	6: "INVALID_VALUE",
+}
+var MemcacheServiceError_ErrorCode_value = map[string]int32{
+	"OK":                0,
+	"UNSPECIFIED_ERROR": 1,
+	"NAMESPACE_NOT_SET": 2,
+	"PERMISSION_DENIED": 3,
+	"INVALID_VALUE":     6,
+}
+
+func (x MemcacheServiceError_ErrorCode) Enum() *MemcacheServiceError_ErrorCode {
+	p := new(MemcacheServiceError_ErrorCode)
+	*p = x
+	return p
+}
+func (x MemcacheServiceError_ErrorCode) String() string {
+	return proto.EnumName(MemcacheServiceError_ErrorCode_name, int32(x))
+}
+func (x *MemcacheServiceError_ErrorCode) UnmarshalJSON(data []byte) error {
+	value, err := proto.UnmarshalJSONEnum(MemcacheServiceError_ErrorCode_value, data, "MemcacheServiceError_ErrorCode")
+	if err != nil {
+		return err
+	}
+	*x = MemcacheServiceError_ErrorCode(value)
+	return nil
+}
+func (MemcacheServiceError_ErrorCode) EnumDescriptor() ([]byte, []int) {
+	return fileDescriptor_memcache_service_e327a14e42649a60, []int{0, 0}
+}
+
+type MemcacheSetRequest_SetPolicy int32
+
+const (
+	MemcacheSetRequest_SET     MemcacheSetRequest_SetPolicy = 1
+	MemcacheSetRequest_ADD     MemcacheSetRequest_SetPolicy = 2
+	MemcacheSetRequest_REPLACE MemcacheSetRequest_SetPolicy = 3
+	MemcacheSetRequest_CAS     MemcacheSetRequest_SetPolicy = 4
+)
+
+var MemcacheSetRequest_SetPolicy_name = map[int32]string{
+	1: "SET",
+	2: "ADD",
+	3: "REPLACE",
+	4: "CAS",
+}
+var MemcacheSetRequest_SetPolicy_value = map[string]int32{
+	"SET":     1,
+	"ADD":     2,
+	"REPLACE": 3,
+	"CAS":     4,
+}
+
+func (x MemcacheSetRequest_SetPolicy) Enum() *MemcacheSetRequest_SetPolicy {
+	p := new(MemcacheSetRequest_SetPolicy)
+	*p = x
+	return p
+}
+func (x MemcacheSetRequest_SetPolicy) String() string {
+	return proto.EnumName(MemcacheSetRequest_SetPolicy_name, int32(x))
+}
+func (x *MemcacheSetRequest_SetPolicy) UnmarshalJSON(data []byte) error {
+	value, err := proto.UnmarshalJSONEnum(MemcacheSetRequest_SetPolicy_value, data, "MemcacheSetRequest_SetPolicy")
+	if err != nil {
+		return err
+	}
+	*x = MemcacheSetRequest_SetPolicy(value)
+	return nil
+}
+func (MemcacheSetRequest_SetPolicy) EnumDescriptor() ([]byte, []int) {
+	return fileDescriptor_memcache_service_e327a14e42649a60, []int{4, 0}
+}
+
+type MemcacheSetResponse_SetStatusCode int32
+
+const (
+	MemcacheSetResponse_STORED     MemcacheSetResponse_SetStatusCode = 1
+	MemcacheSetResponse_NOT_STORED MemcacheSetResponse_SetStatusCode = 2
+	MemcacheSetResponse_ERROR      MemcacheSetResponse_SetStatusCode = 3
+	MemcacheSetResponse_EXISTS     MemcacheSetResponse_SetStatusCode = 4
+)
+
+var MemcacheSetResponse_SetStatusCode_name = map[int32]string{
+	1: "STORED",
+	2: "NOT_STORED",
+	3: "ERROR",
+	4: "EXISTS",
+}
+var MemcacheSetResponse_SetStatusCode_value = map[string]int32{
+	"STORED":     1,
+	"NOT_STORED": 2,
+	"ERROR":      3,
+	"EXISTS":     4,
+}
+
+func (x MemcacheSetResponse_SetStatusCode) Enum() *MemcacheSetResponse_SetStatusCode {
+	p := new(MemcacheSetResponse_SetStatusCode)
+	*p = x
+	return p
+}
+func (x MemcacheSetResponse_SetStatusCode) String() string {
+	return proto.EnumName(MemcacheSetResponse_SetStatusCode_name, int32(x))
+}
+func (x *MemcacheSetResponse_SetStatusCode) UnmarshalJSON(data []byte) error {
+	value, err := proto.UnmarshalJSONEnum(MemcacheSetResponse_SetStatusCode_value, data, "MemcacheSetResponse_SetStatusCode")
+	if err != nil {
+		return err
+	}
+	*x = MemcacheSetResponse_SetStatusCode(value)
+	return nil
+}
+func (MemcacheSetResponse_SetStatusCode) EnumDescriptor() ([]byte, []int) {
+	return fileDescriptor_memcache_service_e327a14e42649a60, []int{5, 0}
+}
+
+type MemcacheDeleteResponse_DeleteStatusCode int32
+
+const (
+	MemcacheDeleteResponse_DELETED   MemcacheDeleteResponse_DeleteStatusCode = 1
+	MemcacheDeleteResponse_NOT_FOUND MemcacheDeleteResponse_DeleteStatusCode = 2
+)
+
+var MemcacheDeleteResponse_DeleteStatusCode_name = map[int32]string{
+	1: "DELETED",
+	2: "NOT_FOUND",
+}
+var MemcacheDeleteResponse_DeleteStatusCode_value = map[string]int32{
+	"DELETED":   1,
+	"NOT_FOUND": 2,
+}
+
+func (x MemcacheDeleteResponse_DeleteStatusCode) Enum() *MemcacheDeleteResponse_DeleteStatusCode {
+	p := new(MemcacheDeleteResponse_DeleteStatusCode)
+	*p = x
+	return p
+}
+func (x MemcacheDeleteResponse_DeleteStatusCode) String() string {
+	return proto.EnumName(MemcacheDeleteResponse_DeleteStatusCode_name, int32(x))
+}
+func (x *MemcacheDeleteResponse_DeleteStatusCode) UnmarshalJSON(data []byte) error {
+	value, err := proto.UnmarshalJSONEnum(MemcacheDeleteResponse_DeleteStatusCode_value, data, "MemcacheDeleteResponse_DeleteStatusCode")
+	if err != nil {
+		return err
+	}
+	*x = MemcacheDeleteResponse_DeleteStatusCode(value)
+	return nil
+}
+func (MemcacheDeleteResponse_DeleteStatusCode) EnumDescriptor() ([]byte, []int) {
+	return fileDescriptor_memcache_service_e327a14e42649a60, []int{7, 0}
+}
+
+type MemcacheIncrementRequest_Direction int32
+
+const (
+	MemcacheIncrementRequest_INCREMENT MemcacheIncrementRequest_Direction = 1
+	MemcacheIncrementRequest_DECREMENT MemcacheIncrementRequest_Direction = 2
+)
+
+var MemcacheIncrementRequest_Direction_name = map[int32]string{
+	1: "INCREMENT",
+	2: "DECREMENT",
+}
+var MemcacheIncrementRequest_Direction_value = map[string]int32{
+	"INCREMENT": 1,
+	"DECREMENT": 2,
+}
+
+func (x MemcacheIncrementRequest_Direction) Enum() *MemcacheIncrementRequest_Direction {
+	p := new(MemcacheIncrementRequest_Direction)
+	*p = x
+	return p
+}
+func (x MemcacheIncrementRequest_Direction) String() string {
+	return proto.EnumName(MemcacheIncrementRequest_Direction_name, int32(x))
+}
+func (x *MemcacheIncrementRequest_Direction) UnmarshalJSON(data []byte) error {
+	value, err := proto.UnmarshalJSONEnum(MemcacheIncrementRequest_Direction_value, data, "MemcacheIncrementRequest_Direction")
+	if err != nil {
+		return err
+	}
+	*x = MemcacheIncrementRequest_Direction(value)
+	return nil
+}
+func (MemcacheIncrementRequest_Direction) EnumDescriptor() ([]byte, []int) {
+	return fileDescriptor_memcache_service_e327a14e42649a60, []int{8, 0}
+}
+
+type MemcacheIncrementResponse_IncrementStatusCode int32
+
+const (
+	MemcacheIncrementResponse_OK          MemcacheIncrementResponse_IncrementStatusCode = 1
+	MemcacheIncrementResponse_NOT_CHANGED MemcacheIncrementResponse_IncrementStatusCode = 2
+	MemcacheIncrementResponse_ERROR       MemcacheIncrementResponse_IncrementStatusCode = 3
+)
+
+var MemcacheIncrementResponse_IncrementStatusCode_name = map[int32]string{
+	1: "OK",
+	2: "NOT_CHANGED",
+	3: "ERROR",
+}
+var MemcacheIncrementResponse_IncrementStatusCode_value = map[string]int32{
+	"OK":          1,
+	"NOT_CHANGED": 2,
+	"ERROR":       3,
+}
+
+func (x MemcacheIncrementResponse_IncrementStatusCode) Enum() *MemcacheIncrementResponse_IncrementStatusCode {
+	p := new(MemcacheIncrementResponse_IncrementStatusCode)
+	*p = x
+	return p
+}
+func (x MemcacheIncrementResponse_IncrementStatusCode) String() string {
+	return proto.EnumName(MemcacheIncrementResponse_IncrementStatusCode_name, int32(x))
+}
+func (x *MemcacheIncrementResponse_IncrementStatusCode) UnmarshalJSON(data []byte) error {
+	value, err := proto.UnmarshalJSONEnum(MemcacheIncrementResponse_IncrementStatusCode_value, data, "MemcacheIncrementResponse_IncrementStatusCode")
+	if err != nil {
+		return err
+	}
+	*x = MemcacheIncrementResponse_IncrementStatusCode(value)
+	return nil
+}
+func (MemcacheIncrementResponse_IncrementStatusCode) EnumDescriptor() ([]byte, []int) {
+	return fileDescriptor_memcache_service_e327a14e42649a60, []int{9, 0}
+}
+
+type MemcacheServiceError struct {
+	XXX_NoUnkeyedLiteral struct{} `json:"-"`
+	XXX_unrecognized     []byte   `json:"-"`
+	XXX_sizecache        int32    `json:"-"`
+}
+
+func (m *MemcacheServiceError) Reset()         { *m = MemcacheServiceError{} }
+func (m *MemcacheServiceError) String() string { return proto.CompactTextString(m) }
+func (*MemcacheServiceError) ProtoMessage()    {}
+func (*MemcacheServiceError) Descriptor() ([]byte, []int) {
+	return fileDescriptor_memcache_service_e327a14e42649a60, []int{0}
+}
+func (m *MemcacheServiceError) XXX_Unmarshal(b []byte) error {
+	return xxx_messageInfo_MemcacheServiceError.Unmarshal(m, b)
+}
+func (m *MemcacheServiceError) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) {
+	return xxx_messageInfo_MemcacheServiceError.Marshal(b, m, deterministic)
+}
+func (dst *MemcacheServiceError) XXX_Merge(src proto.Message) {
+	xxx_messageInfo_MemcacheServiceError.Merge(dst, src)
+}
+func (m *MemcacheServiceError) XXX_Size() int {
+	return xxx_messageInfo_MemcacheServiceError.Size(m)
+}
+func (m *MemcacheServiceError) XXX_DiscardUnknown() {
+	xxx_messageInfo_MemcacheServiceError.DiscardUnknown(m)
+}
+
+var xxx_messageInfo_MemcacheServiceError proto.InternalMessageInfo
+
+type AppOverride struct {
+	AppId                    *string  `protobuf:"bytes,1,req,name=app_id,json=appId" json:"app_id,omitempty"`
+	NumMemcachegBackends     *int32   `protobuf:"varint,2,opt,name=num_memcacheg_backends,json=numMemcachegBackends" json:"num_memcacheg_backends,omitempty"`            // Deprecated: Do not use.
+	IgnoreShardlock          *bool    `protobuf:"varint,3,opt,name=ignore_shardlock,json=ignoreShardlock" json:"ignore_shardlock,omitempty"`                             // Deprecated: Do not use.
+	MemcachePoolHint         *string  `protobuf:"bytes,4,opt,name=memcache_pool_hint,json=memcachePoolHint" json:"memcache_pool_hint,omitempty"`                         // Deprecated: Do not use.
+	MemcacheShardingStrategy []byte   `protobuf:"bytes,5,opt,name=memcache_sharding_strategy,json=memcacheShardingStrategy" json:"memcache_sharding_strategy,omitempty"` // Deprecated: Do not use.
+	XXX_NoUnkeyedLiteral     struct{} `json:"-"`
+	XXX_unrecognized         []byte   `json:"-"`
+	XXX_sizecache            int32    `json:"-"`
+}
+
+func (m *AppOverride) Reset()         { *m = AppOverride{} }
+func (m *AppOverride) String() string { return proto.CompactTextString(m) }
+func (*AppOverride) ProtoMessage()    {}
+func (*AppOverride) Descriptor() ([]byte, []int) {
+	return fileDescriptor_memcache_service_e327a14e42649a60, []int{1}
+}
+func (m *AppOverride) XXX_Unmarshal(b []byte) error {
+	return xxx_messageInfo_AppOverride.Unmarshal(m, b)
+}
+func (m *AppOverride) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) {
+	return xxx_messageInfo_AppOverride.Marshal(b, m, deterministic)
+}
+func (dst *AppOverride) XXX_Merge(src proto.Message) {
+	xxx_messageInfo_AppOverride.Merge(dst, src)
+}
+func (m *AppOverride) XXX_Size() int {
+	return xxx_messageInfo_AppOverride.Size(m)
+}
+func (m *AppOverride) XXX_DiscardUnknown() {
+	xxx_messageInfo_AppOverride.DiscardUnknown(m)
+}
+
+var xxx_messageInfo_AppOverride proto.InternalMessageInfo
+
+func (m *AppOverride) GetAppId() string {
+	if m != nil && m.AppId != nil {
+		return *m.AppId
+	}
+	return ""
+}
+
+// Deprecated: Do not use.
+func (m *AppOverride) GetNumMemcachegBackends() int32 {
+	if m != nil && m.NumMemcachegBackends != nil {
+		return *m.NumMemcachegBackends
+	}
+	return 0
+}
+
+// Deprecated: Do not use.
+func (m *AppOverride) GetIgnoreShardlock() bool {
+	if m != nil && m.IgnoreShardlock != nil {
+		return *m.IgnoreShardlock
+	}
+	return false
+}
+
+// Deprecated: Do not use.
+func (m *AppOverride) GetMemcachePoolHint() string {
+	if m != nil && m.MemcachePoolHint != nil {
+		return *m.MemcachePoolHint
+	}
+	return ""
+}
+
+// Deprecated: Do not use.
+func (m *AppOverride) GetMemcacheShardingStrategy() []byte {
+	if m != nil {
+		return m.MemcacheShardingStrategy
+	}
+	return nil
+}
+
+type MemcacheGetRequest struct {
+	Key                  [][]byte     `protobuf:"bytes,1,rep,name=key" json:"key,omitempty"`
+	NameSpace            *string      `protobuf:"bytes,2,opt,name=name_space,json=nameSpace,def=" json:"name_space,omitempty"`
+	ForCas               *bool        `protobuf:"varint,4,opt,name=for_cas,json=forCas" json:"for_cas,omitempty"`
+	Override             *AppOverride `protobuf:"bytes,5,opt,name=override" json:"override,omitempty"`
+	XXX_NoUnkeyedLiteral struct{}     `json:"-"`
+	XXX_unrecognized     []byte       `json:"-"`
+	XXX_sizecache        int32        `json:"-"`
+}
+
+func (m *MemcacheGetRequest) Reset()         { *m = MemcacheGetRequest{} }
+func (m *MemcacheGetRequest) String() string { return proto.CompactTextString(m) }
+func (*MemcacheGetRequest) ProtoMessage()    {}
+func (*MemcacheGetRequest) Descriptor() ([]byte, []int) {
+	return fileDescriptor_memcache_service_e327a14e42649a60, []int{2}
+}
+func (m *MemcacheGetRequest) XXX_Unmarshal(b []byte) error {
+	return xxx_messageInfo_MemcacheGetRequest.Unmarshal(m, b)
+}
+func (m *MemcacheGetRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) {
+	return xxx_messageInfo_MemcacheGetRequest.Marshal(b, m, deterministic)
+}
+func (dst *MemcacheGetRequest) XXX_Merge(src proto.Message) {
+	xxx_messageInfo_MemcacheGetRequest.Merge(dst, src)
+}
+func (m *MemcacheGetRequest) XXX_Size() int {
+	return xxx_messageInfo_MemcacheGetRequest.Size(m)
+}
+func (m *MemcacheGetRequest) XXX_DiscardUnknown() {
+	xxx_messageInfo_MemcacheGetRequest.DiscardUnknown(m)
+}
+
+var xxx_messageInfo_MemcacheGetRequest proto.InternalMessageInfo
+
+func (m *MemcacheGetRequest) GetKey() [][]byte {
+	if m != nil {
+		return m.Key
+	}
+	return nil
+}
+
+func (m *MemcacheGetRequest) GetNameSpace() string {
+	if m != nil && m.NameSpace != nil {
+		return *m.NameSpace
+	}
+	return ""
+}
+
+func (m *MemcacheGetRequest) GetForCas() bool {
+	if m != nil && m.ForCas != nil {
+		return *m.ForCas
+	}
+	return false
+}
+
+func (m *MemcacheGetRequest) GetOverride() *AppOverride {
+	if m != nil {
+		return m.Override
+	}
+	return nil
+}
+
+type MemcacheGetResponse struct {
+	Item                 []*MemcacheGetResponse_Item `protobuf:"group,1,rep,name=Item,json=item" json:"item,omitempty"`
+	XXX_NoUnkeyedLiteral struct{}                    `json:"-"`
+	XXX_unrecognized     []byte                      `json:"-"`
+	XXX_sizecache        int32                       `json:"-"`
+}
+
+func (m *MemcacheGetResponse) Reset()         { *m = MemcacheGetResponse{} }
+func (m *MemcacheGetResponse) String() string { return proto.CompactTextString(m) }
+func (*MemcacheGetResponse) ProtoMessage()    {}
+func (*MemcacheGetResponse) Descriptor() ([]byte, []int) {
+	return fileDescriptor_memcache_service_e327a14e42649a60, []int{3}
+}
+func (m *MemcacheGetResponse) XXX_Unmarshal(b []byte) error {
+	return xxx_messageInfo_MemcacheGetResponse.Unmarshal(m, b)
+}
+func (m *MemcacheGetResponse) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) {
+	return xxx_messageInfo_MemcacheGetResponse.Marshal(b, m, deterministic)
+}
+func (dst *MemcacheGetResponse) XXX_Merge(src proto.Message) {
+	xxx_messageInfo_MemcacheGetResponse.Merge(dst, src)
+}
+func (m *MemcacheGetResponse) XXX_Size() int {
+	return xxx_messageInfo_MemcacheGetResponse.Size(m)
+}
+func (m *MemcacheGetResponse) XXX_DiscardUnknown() {
+	xxx_messageInfo_MemcacheGetResponse.DiscardUnknown(m)
+}
+
+var xxx_messageInfo_MemcacheGetResponse proto.InternalMessageInfo
+
+func (m *MemcacheGetResponse) GetItem() []*MemcacheGetResponse_Item {
+	if m != nil {
+		return m.Item
+	}
+	return nil
+}
+
+type MemcacheGetResponse_Item struct {
+	Key                  []byte   `protobuf:"bytes,2,req,name=key" json:"key,omitempty"`
+	Value                []byte   `protobuf:"bytes,3,req,name=value" json:"value,omitempty"`
+	Flags                *uint32  `protobuf:"fixed32,4,opt,name=flags" json:"flags,omitempty"`
+	CasId                *uint64  `protobuf:"fixed64,5,opt,name=cas_id,json=casId" json:"cas_id,omitempty"`
+	ExpiresInSeconds     *int32   `protobuf:"varint,6,opt,name=expires_in_seconds,json=expiresInSeconds" json:"expires_in_seconds,omitempty"`
+	XXX_NoUnkeyedLiteral struct{} `json:"-"`
+	XXX_unrecognized     []byte   `json:"-"`
+	XXX_sizecache        int32    `json:"-"`
+}
+
+func (m *MemcacheGetResponse_Item) Reset()         { *m = MemcacheGetResponse_Item{} }
+func (m *MemcacheGetResponse_Item) String() string { return proto.CompactTextString(m) }
+func (*MemcacheGetResponse_Item) ProtoMessage()    {}
+func (*MemcacheGetResponse_Item) Descriptor() ([]byte, []int) {
+	return fileDescriptor_memcache_service_e327a14e42649a60, []int{3, 0}
+}
+func (m *MemcacheGetResponse_Item) XXX_Unmarshal(b []byte) error {
+	return xxx_messageInfo_MemcacheGetResponse_Item.Unmarshal(m, b)
+}
+func (m *MemcacheGetResponse_Item) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) {
+	return xxx_messageInfo_MemcacheGetResponse_Item.Marshal(b, m, deterministic)
+}
+func (dst *MemcacheGetResponse_Item) XXX_Merge(src proto.Message) {
+	xxx_messageInfo_MemcacheGetResponse_Item.Merge(dst, src)
+}
+func (m *MemcacheGetResponse_Item) XXX_Size() int {
+	return xxx_messageInfo_MemcacheGetResponse_Item.Size(m)
+}
+func (m *MemcacheGetResponse_Item) XXX_DiscardUnknown() {
+	xxx_messageInfo_MemcacheGetResponse_Item.DiscardUnknown(m)
+}
+
+var xxx_messageInfo_MemcacheGetResponse_Item proto.InternalMessageInfo
+
+func (m *MemcacheGetResponse_Item) GetKey() []byte {
+	if m != nil {
+		return m.Key
+	}
+	return nil
+}
+
+func (m *MemcacheGetResponse_Item) GetValue() []byte {
+	if m != nil {
+		return m.Value
+	}
+	return nil
+}
+
+func (m *MemcacheGetResponse_Item) GetFlags() uint32 {
+	if m != nil && m.Flags != nil {
+		return *m.Flags
+	}
+	return 0
+}
+
+func (m *MemcacheGetResponse_Item) GetCasId() uint64 {
+	if m != nil && m.CasId != nil {
+		return *m.CasId
+	}
+	return 0
+}
+
+func (m *MemcacheGetResponse_Item) GetExpiresInSeconds() int32 {
+	if m != nil && m.ExpiresInSeconds != nil {
+		return *m.ExpiresInSeconds
+	}
+	return 0
+}
+
+type MemcacheSetRequest struct {
+	Item                 []*MemcacheSetRequest_Item `protobuf:"group,1,rep,name=Item,json=item" json:"item,omitempty"`
+	NameSpace            *string                    `protobuf:"bytes,7,opt,name=name_space,json=nameSpace,def=" json:"name_space,omitempty"`
+	Override             *AppOverride               `protobuf:"bytes,10,opt,name=override" json:"override,omitempty"`
+	XXX_NoUnkeyedLiteral struct{}                   `json:"-"`
+	XXX_unrecognized     []byte                     `json:"-"`
+	XXX_sizecache        int32                      `json:"-"`
+}
+
+func (m *MemcacheSetRequest) Reset()         { *m = MemcacheSetRequest{} }
+func (m *MemcacheSetRequest) String() string { return proto.CompactTextString(m) }
+func (*MemcacheSetRequest) ProtoMessage()    {}
+func (*MemcacheSetRequest) Descriptor() ([]byte, []int) {
+	return fileDescriptor_memcache_service_e327a14e42649a60, []int{4}
+}
+func (m *MemcacheSetRequest) XXX_Unmarshal(b []byte) error {
+	return xxx_messageInfo_MemcacheSetRequest.Unmarshal(m, b)
+}
+func (m *MemcacheSetRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) {
+	return xxx_messageInfo_MemcacheSetRequest.Marshal(b, m, deterministic)
+}
+func (dst *MemcacheSetRequest) XXX_Merge(src proto.Message) {
+	xxx_messageInfo_MemcacheSetRequest.Merge(dst, src)
+}
+func (m *MemcacheSetRequest) XXX_Size() int {
+	return xxx_messageInfo_MemcacheSetRequest.Size(m)
+}
+func (m *MemcacheSetRequest) XXX_DiscardUnknown() {
+	xxx_messageInfo_MemcacheSetRequest.DiscardUnknown(m)
+}
+
+var xxx_messageInfo_MemcacheSetRequest proto.InternalMessageInfo
+
+func (m *MemcacheSetRequest) GetItem() []*MemcacheSetRequest_Item {
+	if m != nil {
+		return m.Item
+	}
+	return nil
+}
+
+func (m *MemcacheSetRequest) GetNameSpace() string {
+	if m != nil && m.NameSpace != nil {
+		return *m.NameSpace
+	}
+	return ""
+}
+
+func (m *MemcacheSetRequest) GetOverride() *AppOverride {
+	if m != nil {
+		return m.Override
+	}
+	return nil
+}
+
+type MemcacheSetRequest_Item struct {
+	Key                  []byte                        `protobuf:"bytes,2,req,name=key" json:"key,omitempty"`
+	Value                []byte                        `protobuf:"bytes,3,req,name=value" json:"value,omitempty"`
+	Flags                *uint32                       `protobuf:"fixed32,4,opt,name=flags" json:"flags,omitempty"`
+	SetPolicy            *MemcacheSetRequest_SetPolicy `protobuf:"varint,5,opt,name=set_policy,json=setPolicy,enum=appengine.MemcacheSetRequest_SetPolicy,def=1" json:"set_policy,omitempty"`
+	ExpirationTime       *uint32                       `protobuf:"fixed32,6,opt,name=expiration_time,json=expirationTime,def=0" json:"expiration_time,omitempty"`
+	CasId                *uint64                       `protobuf:"fixed64,8,opt,name=cas_id,json=casId" json:"cas_id,omitempty"`
+	ForCas               *bool                         `protobuf:"varint,9,opt,name=for_cas,json=forCas" json:"for_cas,omitempty"`
+	XXX_NoUnkeyedLiteral struct{}                      `json:"-"`
+	XXX_unrecognized     []byte                        `json:"-"`
+	XXX_sizecache        int32                         `json:"-"`
+}
+
+func (m *MemcacheSetRequest_Item) Reset()         { *m = MemcacheSetRequest_Item{} }
+func (m *MemcacheSetRequest_Item) String() string { return proto.CompactTextString(m) }
+func (*MemcacheSetRequest_Item) ProtoMessage()    {}
+func (*MemcacheSetRequest_Item) Descriptor() ([]byte, []int) {
+	return fileDescriptor_memcache_service_e327a14e42649a60, []int{4, 0}
+}
+func (m *MemcacheSetRequest_Item) XXX_Unmarshal(b []byte) error {
+	return xxx_messageInfo_MemcacheSetRequest_Item.Unmarshal(m, b)
+}
+func (m *MemcacheSetRequest_Item) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) {
+	return xxx_messageInfo_MemcacheSetRequest_Item.Marshal(b, m, deterministic)
+}
+func (dst *MemcacheSetRequest_Item) XXX_Merge(src proto.Message) {
+	xxx_messageInfo_MemcacheSetRequest_Item.Merge(dst, src)
+}
+func (m *MemcacheSetRequest_Item) XXX_Size() int {
+	return xxx_messageInfo_MemcacheSetRequest_Item.Size(m)
+}
+func (m *MemcacheSetRequest_Item) XXX_DiscardUnknown() {
+	xxx_messageInfo_MemcacheSetRequest_Item.DiscardUnknown(m)
+}
+
+var xxx_messageInfo_MemcacheSetRequest_Item proto.InternalMessageInfo
+
+const Default_MemcacheSetRequest_Item_SetPolicy MemcacheSetRequest_SetPolicy = MemcacheSetRequest_SET
+const Default_MemcacheSetRequest_Item_ExpirationTime uint32 = 0
+
+func (m *MemcacheSetRequest_Item) GetKey() []byte {
+	if m != nil {
+		return m.Key
+	}
+	return nil
+}
+
+func (m *MemcacheSetRequest_Item) GetValue() []byte {
+	if m != nil {
+		return m.Value
+	}
+	return nil
+}
+
+func (m *MemcacheSetRequest_Item) GetFlags() uint32 {
+	if m != nil && m.Flags != nil {
+		return *m.Flags
+	}
+	return 0
+}
+
+func (m *MemcacheSetRequest_Item) GetSetPolicy() MemcacheSetRequest_SetPolicy {
+	if m != nil && m.SetPolicy != nil {
+		return *m.SetPolicy
+	}
+	return Default_MemcacheSetRequest_Item_SetPolicy
+}
+
+func (m *MemcacheSetRequest_Item) GetExpirationTime() uint32 {
+	if m != nil && m.ExpirationTime != nil {
+		return *m.ExpirationTime
+	}
+	return Default_MemcacheSetRequest_Item_ExpirationTime
+}
+
+func (m *MemcacheSetRequest_Item) GetCasId() uint64 {
+	if m != nil && m.CasId != nil {
+		return *m.CasId
+	}
+	return 0
+}
+
+func (m *MemcacheSetRequest_Item) GetForCas() bool {
+	if m != nil && m.ForCas != nil {
+		return *m.ForCas
+	}
+	return false
+}
+
+type MemcacheSetResponse struct {
+	SetStatus            []MemcacheSetResponse_SetStatusCode `protobuf:"varint,1,rep,name=set_status,json=setStatus,enum=appengine.MemcacheSetResponse_SetStatusCode" json:"set_status,omitempty"`
+	XXX_NoUnkeyedLiteral struct{}                            `json:"-"`
+	XXX_unrecognized     []byte                              `json:"-"`
+	XXX_sizecache        int32                               `json:"-"`
+}
+
+func (m *MemcacheSetResponse) Reset()         { *m = MemcacheSetResponse{} }
+func (m *MemcacheSetResponse) String() string { return proto.CompactTextString(m) }
+func (*MemcacheSetResponse) ProtoMessage()    {}
+func (*MemcacheSetResponse) Descriptor() ([]byte, []int) {
+	return fileDescriptor_memcache_service_e327a14e42649a60, []int{5}
+}
+func (m *MemcacheSetResponse) XXX_Unmarshal(b []byte) error {
+	return xxx_messageInfo_MemcacheSetResponse.Unmarshal(m, b)
+}
+func (m *MemcacheSetResponse) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) {
+	return xxx_messageInfo_MemcacheSetResponse.Marshal(b, m, deterministic)
+}
+func (dst *MemcacheSetResponse) XXX_Merge(src proto.Message) {
+	xxx_messageInfo_MemcacheSetResponse.Merge(dst, src)
+}
+func (m *MemcacheSetResponse) XXX_Size() int {
+	return xxx_messageInfo_MemcacheSetResponse.Size(m)
+}
+func (m *MemcacheSetResponse) XXX_DiscardUnknown() {
+	xxx_messageInfo_MemcacheSetResponse.DiscardUnknown(m)
+}
+
+var xxx_messageInfo_MemcacheSetResponse proto.InternalMessageInfo
+
+func (m *MemcacheSetResponse) GetSetStatus() []MemcacheSetResponse_SetStatusCode {
+	if m != nil {
+		return m.SetStatus
+	}
+	return nil
+}
+
+type MemcacheDeleteRequest struct {
+	Item                 []*MemcacheDeleteRequest_Item `protobuf:"group,1,rep,name=Item,json=item" json:"item,omitempty"`
+	NameSpace            *string                       `protobuf:"bytes,4,opt,name=name_space,json=nameSpace,def=" json:"name_space,omitempty"`
+	Override             *AppOverride                  `protobuf:"bytes,5,opt,name=override" json:"override,omitempty"`
+	XXX_NoUnkeyedLiteral struct{}                      `json:"-"`
+	XXX_unrecognized     []byte                        `json:"-"`
+	XXX_sizecache        int32                         `json:"-"`
+}
+
+func (m *MemcacheDeleteRequest) Reset()         { *m = MemcacheDeleteRequest{} }
+func (m *MemcacheDeleteRequest) String() string { return proto.CompactTextString(m) }
+func (*MemcacheDeleteRequest) ProtoMessage()    {}
+func (*MemcacheDeleteRequest) Descriptor() ([]byte, []int) {
+	return fileDescriptor_memcache_service_e327a14e42649a60, []int{6}
+}
+func (m *MemcacheDeleteRequest) XXX_Unmarshal(b []byte) error {
+	return xxx_messageInfo_MemcacheDeleteRequest.Unmarshal(m, b)
+}
+func (m *MemcacheDeleteRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) {
+	return xxx_messageInfo_MemcacheDeleteRequest.Marshal(b, m, deterministic)
+}
+func (dst *MemcacheDeleteRequest) XXX_Merge(src proto.Message) {
+	xxx_messageInfo_MemcacheDeleteRequest.Merge(dst, src)
+}
+func (m *MemcacheDeleteRequest) XXX_Size() int {
+	return xxx_messageInfo_MemcacheDeleteRequest.Size(m)
+}
+func (m *MemcacheDeleteRequest) XXX_DiscardUnknown() {
+	xxx_messageInfo_MemcacheDeleteRequest.DiscardUnknown(m)
+}
+
+var xxx_messageInfo_MemcacheDeleteRequest proto.InternalMessageInfo
+
+func (m *MemcacheDeleteRequest) GetItem() []*MemcacheDeleteRequest_Item {
+	if m != nil {
+		return m.Item
+	}
+	return nil
+}
+
+func (m *MemcacheDeleteRequest) GetNameSpace() string {
+	if m != nil && m.NameSpace != nil {
+		return *m.NameSpace
+	}
+	return ""
+}
+
+func (m *MemcacheDeleteRequest) GetOverride() *AppOverride {
+	if m != nil {
+		return m.Override
+	}
+	return nil
+}
+
+type MemcacheDeleteRequest_Item struct {
+	Key                  []byte   `protobuf:"bytes,2,req,name=key" json:"key,omitempty"`
+	DeleteTime           *uint32  `protobuf:"fixed32,3,opt,name=delete_time,json=deleteTime,def=0" json:"delete_time,omitempty"`
+	XXX_NoUnkeyedLiteral struct{} `json:"-"`
+	XXX_unrecognized     []byte   `json:"-"`
+	XXX_sizecache        int32    `json:"-"`
+}
+
+func (m *MemcacheDeleteRequest_Item) Reset()         { *m = MemcacheDeleteRequest_Item{} }
+func (m *MemcacheDeleteRequest_Item) String() string { return proto.CompactTextString(m) }
+func (*MemcacheDeleteRequest_Item) ProtoMessage()    {}
+func (*MemcacheDeleteRequest_Item) Descriptor() ([]byte, []int) {
+	return fileDescriptor_memcache_service_e327a14e42649a60, []int{6, 0}
+}
+func (m *MemcacheDeleteRequest_Item) XXX_Unmarshal(b []byte) error {
+	return xxx_messageInfo_MemcacheDeleteRequest_Item.Unmarshal(m, b)
+}
+func (m *MemcacheDeleteRequest_Item) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) {
+	return xxx_messageInfo_MemcacheDeleteRequest_Item.Marshal(b, m, deterministic)
+}
+func (dst *MemcacheDeleteRequest_Item) XXX_Merge(src proto.Message) {
+	xxx_messageInfo_MemcacheDeleteRequest_Item.Merge(dst, src)
+}
+func (m *MemcacheDeleteRequest_Item) XXX_Size() int {
+	return xxx_messageInfo_MemcacheDeleteRequest_Item.Size(m)
+}
+func (m *MemcacheDeleteRequest_Item) XXX_DiscardUnknown() {
+	xxx_messageInfo_MemcacheDeleteRequest_Item.DiscardUnknown(m)
+}
+
+var xxx_messageInfo_MemcacheDeleteRequest_Item proto.InternalMessageInfo
+
+const Default_MemcacheDeleteRequest_Item_DeleteTime uint32 = 0
+
+func (m *MemcacheDeleteRequest_Item) GetKey() []byte {
+	if m != nil {
+		return m.Key
+	}
+	return nil
+}
+
+func (m *MemcacheDeleteRequest_Item) GetDeleteTime() uint32 {
+	if m != nil && m.DeleteTime != nil {
+		return *m.DeleteTime
+	}
+	return Default_MemcacheDeleteRequest_Item_DeleteTime
+}
+
+type MemcacheDeleteResponse struct {
+	DeleteStatus         []MemcacheDeleteResponse_DeleteStatusCode `protobuf:"varint,1,rep,name=delete_status,json=deleteStatus,enum=appengine.MemcacheDeleteResponse_DeleteStatusCode" json:"delete_status,omitempty"`
+	XXX_NoUnkeyedLiteral struct{}                                  `json:"-"`
+	XXX_unrecognized     []byte                                    `json:"-"`
+	XXX_sizecache        int32                                     `json:"-"`
+}
+
+func (m *MemcacheDeleteResponse) Reset()         { *m = MemcacheDeleteResponse{} }
+func (m *MemcacheDeleteResponse) String() string { return proto.CompactTextString(m) }
+func (*MemcacheDeleteResponse) ProtoMessage()    {}
+func (*MemcacheDeleteResponse) Descriptor() ([]byte, []int) {
+	return fileDescriptor_memcache_service_e327a14e42649a60, []int{7}
+}
+func (m *MemcacheDeleteResponse) XXX_Unmarshal(b []byte) error {
+	return xxx_messageInfo_MemcacheDeleteResponse.Unmarshal(m, b)
+}
+func (m *MemcacheDeleteResponse) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) {
+	return xxx_messageInfo_MemcacheDeleteResponse.Marshal(b, m, deterministic)
+}
+func (dst *MemcacheDeleteResponse) XXX_Merge(src proto.Message) {
+	xxx_messageInfo_MemcacheDeleteResponse.Merge(dst, src)
+}
+func (m *MemcacheDeleteResponse) XXX_Size() int {
+	return xxx_messageInfo_MemcacheDeleteResponse.Size(m)
+}
+func (m *MemcacheDeleteResponse) XXX_DiscardUnknown() {
+	xxx_messageInfo_MemcacheDeleteResponse.DiscardUnknown(m)
+}
+
+var xxx_messageInfo_MemcacheDeleteResponse proto.InternalMessageInfo
+
+func (m *MemcacheDeleteResponse) GetDeleteStatus() []MemcacheDeleteResponse_DeleteStatusCode {
+	if m != nil {
+		return m.DeleteStatus
+	}
+	return nil
+}
+
+type MemcacheIncrementRequest struct {
+	Key                  []byte                              `protobuf:"bytes,1,req,name=key" json:"key,omitempty"`
+	NameSpace            *string                             `protobuf:"bytes,4,opt,name=name_space,json=nameSpace,def=" json:"name_space,omitempty"`
+	Delta                *uint64                             `protobuf:"varint,2,opt,name=delta,def=1" json:"delta,omitempty"`
+	Direction            *MemcacheIncrementRequest_Direction `protobuf:"varint,3,opt,name=direction,enum=appengine.MemcacheIncrementRequest_Direction,def=1" json:"direction,omitempty"`
+	InitialValue         *uint64                             `protobuf:"varint,5,opt,name=initial_value,json=initialValue" json:"initial_value,omitempty"`
+	InitialFlags         *uint32                             `protobuf:"fixed32,6,opt,name=initial_flags,json=initialFlags" json:"initial_flags,omitempty"`
+	Override             *AppOverride                        `protobuf:"bytes,7,opt,name=override" json:"override,omitempty"`
+	XXX_NoUnkeyedLiteral struct{}                            `json:"-"`
+	XXX_unrecognized     []byte                              `json:"-"`
+	XXX_sizecache        int32                               `json:"-"`
+}
+
+func (m *MemcacheIncrementRequest) Reset()         { *m = MemcacheIncrementRequest{} }
+func (m *MemcacheIncrementRequest) String() string { return proto.CompactTextString(m) }
+func (*MemcacheIncrementRequest) ProtoMessage()    {}
+func (*MemcacheIncrementRequest) Descriptor() ([]byte, []int) {
+	return fileDescriptor_memcache_service_e327a14e42649a60, []int{8}
+}
+func (m *MemcacheIncrementRequest) XXX_Unmarshal(b []byte) error {
+	return xxx_messageInfo_MemcacheIncrementRequest.Unmarshal(m, b)
+}
+func (m *MemcacheIncrementRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) {
+	return xxx_messageInfo_MemcacheIncrementRequest.Marshal(b, m, deterministic)
+}
+func (dst *MemcacheIncrementRequest) XXX_Merge(src proto.Message) {
+	xxx_messageInfo_MemcacheIncrementRequest.Merge(dst, src)
+}
+func (m *MemcacheIncrementRequest) XXX_Size() int {
+	return xxx_messageInfo_MemcacheIncrementRequest.Size(m)
+}
+func (m *MemcacheIncrementRequest) XXX_DiscardUnknown() {
+	xxx_messageInfo_MemcacheIncrementRequest.DiscardUnknown(m)
+}
+
+var xxx_messageInfo_MemcacheIncrementRequest proto.InternalMessageInfo
+
+const Default_MemcacheIncrementRequest_Delta uint64 = 1
+const Default_MemcacheIncrementRequest_Direction MemcacheIncrementRequest_Direction = MemcacheIncrementRequest_INCREMENT
+
+func (m *MemcacheIncrementRequest) GetKey() []byte {
+	if m != nil {
+		return m.Key
+	}
+	return nil
+}
+
+func (m *MemcacheIncrementRequest) GetNameSpace() string {
+	if m != nil && m.NameSpace != nil {
+		return *m.NameSpace
+	}
+	return ""
+}
+
+func (m *MemcacheIncrementRequest) GetDelta() uint64 {
+	if m != nil && m.Delta != nil {
+		return *m.Delta
+	}
+	return Default_MemcacheIncrementRequest_Delta
+}
+
+func (m *MemcacheIncrementRequest) GetDirection() MemcacheIncrementRequest_Direction {
+	if m != nil && m.Direction != nil {
+		return *m.Direction
+	}
+	return Default_MemcacheIncrementRequest_Direction
+}
+
+func (m *MemcacheIncrementRequest) GetInitialValue() uint64 {
+	if m != nil && m.InitialValue != nil {
+		return *m.InitialValue
+	}
+	return 0
+}
+
+func (m *MemcacheIncrementRequest) GetInitialFlags() uint32 {
+	if m != nil && m.InitialFlags != nil {
+		return *m.InitialFlags
+	}
+	return 0
+}
+
+func (m *MemcacheIncrementRequest) GetOverride() *AppOverride {
+	if m != nil {
+		return m.Override
+	}
+	return nil
+}
+
+type MemcacheIncrementResponse struct {
+	NewValue             *uint64                                        `protobuf:"varint,1,opt,name=new_value,json=newValue" json:"new_value,omitempty"`
+	IncrementStatus      *MemcacheIncrementResponse_IncrementStatusCode `protobuf:"varint,2,opt,name=increment_status,json=incrementStatus,enum=appengine.MemcacheIncrementResponse_IncrementStatusCode" json:"increment_status,omitempty"`
+	XXX_NoUnkeyedLiteral struct{}                                       `json:"-"`
+	XXX_unrecognized     []byte                                         `json:"-"`
+	XXX_sizecache        int32                                          `json:"-"`
+}
+
+func (m *MemcacheIncrementResponse) Reset()         { *m = MemcacheIncrementResponse{} }
+func (m *MemcacheIncrementResponse) String() string { return proto.CompactTextString(m) }
+func (*MemcacheIncrementResponse) ProtoMessage()    {}
+func (*MemcacheIncrementResponse) Descriptor() ([]byte, []int) {
+	return fileDescriptor_memcache_service_e327a14e42649a60, []int{9}
+}
+func (m *MemcacheIncrementResponse) XXX_Unmarshal(b []byte) error {
+	return xxx_messageInfo_MemcacheIncrementResponse.Unmarshal(m, b)
+}
+func (m *MemcacheIncrementResponse) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) {
+	return xxx_messageInfo_MemcacheIncrementResponse.Marshal(b, m, deterministic)
+}
+func (dst *MemcacheIncrementResponse) XXX_Merge(src proto.Message) {
+	xxx_messageInfo_MemcacheIncrementResponse.Merge(dst, src)
+}
+func (m *MemcacheIncrementResponse) XXX_Size() int {
+	return xxx_messageInfo_MemcacheIncrementResponse.Size(m)
+}
+func (m *MemcacheIncrementResponse) XXX_DiscardUnknown() {
+	xxx_messageInfo_MemcacheIncrementResponse.DiscardUnknown(m)
+}
+
+var xxx_messageInfo_MemcacheIncrementResponse proto.InternalMessageInfo
+
+func (m *MemcacheIncrementResponse) GetNewValue() uint64 {
+	if m != nil && m.NewValue != nil {
+		return *m.NewValue
+	}
+	return 0
+}
+
+func (m *MemcacheIncrementResponse) GetIncrementStatus() MemcacheIncrementResponse_IncrementStatusCode {
+	if m != nil && m.IncrementStatus != nil {
+		return *m.IncrementStatus
+	}
+	return MemcacheIncrementResponse_OK
+}
+
+type MemcacheBatchIncrementRequest struct {
+	NameSpace            *string                     `protobuf:"bytes,1,opt,name=name_space,json=nameSpace,def=" json:"name_space,omitempty"`
+	Item                 []*MemcacheIncrementRequest `protobuf:"bytes,2,rep,name=item" json:"item,omitempty"`
+	Override             *AppOverride                `protobuf:"bytes,3,opt,name=override" json:"override,omitempty"`
+	XXX_NoUnkeyedLiteral struct{}                    `json:"-"`
+	XXX_unrecognized     []byte                      `json:"-"`
+	XXX_sizecache        int32                       `json:"-"`
+}
+
+func (m *MemcacheBatchIncrementRequest) Reset()         { *m = MemcacheBatchIncrementRequest{} }
+func (m *MemcacheBatchIncrementRequest) String() string { return proto.CompactTextString(m) }
+func (*MemcacheBatchIncrementRequest) ProtoMessage()    {}
+func (*MemcacheBatchIncrementRequest) Descriptor() ([]byte, []int) {
+	return fileDescriptor_memcache_service_e327a14e42649a60, []int{10}
+}
+func (m *MemcacheBatchIncrementRequest) XXX_Unmarshal(b []byte) error {
+	return xxx_messageInfo_MemcacheBatchIncrementRequest.Unmarshal(m, b)
+}
+func (m *MemcacheBatchIncrementRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) {
+	return xxx_messageInfo_MemcacheBatchIncrementRequest.Marshal(b, m, deterministic)
+}
+func (dst *MemcacheBatchIncrementRequest) XXX_Merge(src proto.Message) {
+	xxx_messageInfo_MemcacheBatchIncrementRequest.Merge(dst, src)
+}
+func (m *MemcacheBatchIncrementRequest) XXX_Size() int {
+	return xxx_messageInfo_MemcacheBatchIncrementRequest.Size(m)
+}
+func (m *MemcacheBatchIncrementRequest) XXX_DiscardUnknown() {
+	xxx_messageInfo_MemcacheBatchIncrementRequest.DiscardUnknown(m)
+}
+
+var xxx_messageInfo_MemcacheBatchIncrementRequest proto.InternalMessageInfo
+
+func (m *MemcacheBatchIncrementRequest) GetNameSpace() string {
+	if m != nil && m.NameSpace != nil {
+		return *m.NameSpace
+	}
+	return ""
+}
+
+func (m *MemcacheBatchIncrementRequest) GetItem() []*MemcacheIncrementRequest {
+	if m != nil {
+		return m.Item
+	}
+	return nil
+}
+
+func (m *MemcacheBatchIncrementRequest) GetOverride() *AppOverride {
+	if m != nil {
+		return m.Override
+	}
+	return nil
+}
+
+type MemcacheBatchIncrementResponse struct {
+	Item                 []*MemcacheIncrementResponse `protobuf:"bytes,1,rep,name=item" json:"item,omitempty"`
+	XXX_NoUnkeyedLiteral struct{}                     `json:"-"`
+	XXX_unrecognized     []byte                       `json:"-"`
+	XXX_sizecache        int32                        `json:"-"`
+}
+
+func (m *MemcacheBatchIncrementResponse) Reset()         { *m = MemcacheBatchIncrementResponse{} }
+func (m *MemcacheBatchIncrementResponse) String() string { return proto.CompactTextString(m) }
+func (*MemcacheBatchIncrementResponse) ProtoMessage()    {}
+func (*MemcacheBatchIncrementResponse) Descriptor() ([]byte, []int) {
+	return fileDescriptor_memcache_service_e327a14e42649a60, []int{11}
+}
+func (m *MemcacheBatchIncrementResponse) XXX_Unmarshal(b []byte) error {
+	return xxx_messageInfo_MemcacheBatchIncrementResponse.Unmarshal(m, b)
+}
+func (m *MemcacheBatchIncrementResponse) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) {
+	return xxx_messageInfo_MemcacheBatchIncrementResponse.Marshal(b, m, deterministic)
+}
+func (dst *MemcacheBatchIncrementResponse) XXX_Merge(src proto.Message) {
+	xxx_messageInfo_MemcacheBatchIncrementResponse.Merge(dst, src)
+}
+func (m *MemcacheBatchIncrementResponse) XXX_Size() int {
+	return xxx_messageInfo_MemcacheBatchIncrementResponse.Size(m)
+}
+func (m *MemcacheBatchIncrementResponse) XXX_DiscardUnknown() {
+	xxx_messageInfo_MemcacheBatchIncrementResponse.DiscardUnknown(m)
+}
+
+var xxx_messageInfo_MemcacheBatchIncrementResponse proto.InternalMessageInfo
+
+func (m *MemcacheBatchIncrementResponse) GetItem() []*MemcacheIncrementResponse {
+	if m != nil {
+		return m.Item
+	}
+	return nil
+}
+
+type MemcacheFlushRequest struct {
+	Override             *AppOverride `protobuf:"bytes,1,opt,name=override" json:"override,omitempty"`
+	XXX_NoUnkeyedLiteral struct{}     `json:"-"`
+	XXX_unrecognized     []byte       `json:"-"`
+	XXX_sizecache        int32        `json:"-"`
+}
+
+func (m *MemcacheFlushRequest) Reset()         { *m = MemcacheFlushRequest{} }
+func (m *MemcacheFlushRequest) String() string { return proto.CompactTextString(m) }
+func (*MemcacheFlushRequest) ProtoMessage()    {}
+func (*MemcacheFlushRequest) Descriptor() ([]byte, []int) {
+	return fileDescriptor_memcache_service_e327a14e42649a60, []int{12}
+}
+func (m *MemcacheFlushRequest) XXX_Unmarshal(b []byte) error {
+	return xxx_messageInfo_MemcacheFlushRequest.Unmarshal(m, b)
+}
+func (m *MemcacheFlushRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) {
+	return xxx_messageInfo_MemcacheFlushRequest.Marshal(b, m, deterministic)
+}
+func (dst *MemcacheFlushRequest) XXX_Merge(src proto.Message) {
+	xxx_messageInfo_MemcacheFlushRequest.Merge(dst, src)
+}
+func (m *MemcacheFlushRequest) XXX_Size() int {
+	return xxx_messageInfo_MemcacheFlushRequest.Size(m)
+}
+func (m *MemcacheFlushRequest) XXX_DiscardUnknown() {
+	xxx_messageInfo_MemcacheFlushRequest.DiscardUnknown(m)
+}
+
+var xxx_messageInfo_MemcacheFlushRequest proto.InternalMessageInfo
+
+func (m *MemcacheFlushRequest) GetOverride() *AppOverride {
+	if m != nil {
+		return m.Override
+	}
+	return nil
+}
+
+type MemcacheFlushResponse struct {
+	XXX_NoUnkeyedLiteral struct{} `json:"-"`
+	XXX_unrecognized     []byte   `json:"-"`
+	XXX_sizecache        int32    `json:"-"`
+}
+
+func (m *MemcacheFlushResponse) Reset()         { *m = MemcacheFlushResponse{} }
+func (m *MemcacheFlushResponse) String() string { return proto.CompactTextString(m) }
+func (*MemcacheFlushResponse) ProtoMessage()    {}
+func (*MemcacheFlushResponse) Descriptor() ([]byte, []int) {
+	return fileDescriptor_memcache_service_e327a14e42649a60, []int{13}
+}
+func (m *MemcacheFlushResponse) XXX_Unmarshal(b []byte) error {
+	return xxx_messageInfo_MemcacheFlushResponse.Unmarshal(m, b)
+}
+func (m *MemcacheFlushResponse) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) {
+	return xxx_messageInfo_MemcacheFlushResponse.Marshal(b, m, deterministic)
+}
+func (dst *MemcacheFlushResponse) XXX_Merge(src proto.Message) {
+	xxx_messageInfo_MemcacheFlushResponse.Merge(dst, src)
+}
+func (m *MemcacheFlushResponse) XXX_Size() int {
+	return xxx_messageInfo_MemcacheFlushResponse.Size(m)
+}
+func (m *MemcacheFlushResponse) XXX_DiscardUnknown() {
+	xxx_messageInfo_MemcacheFlushResponse.DiscardUnknown(m)
+}
+
+var xxx_messageInfo_MemcacheFlushResponse proto.InternalMessageInfo
+
+type MemcacheStatsRequest struct {
+	Override             *AppOverride `protobuf:"bytes,1,opt,name=override" json:"override,omitempty"`
+	XXX_NoUnkeyedLiteral struct{}     `json:"-"`
+	XXX_unrecognized     []byte       `json:"-"`
+	XXX_sizecache        int32        `json:"-"`
+}
+
+func (m *MemcacheStatsRequest) Reset()         { *m = MemcacheStatsRequest{} }
+func (m *MemcacheStatsRequest) String() string { return proto.CompactTextString(m) }
+func (*MemcacheStatsRequest) ProtoMessage()    {}
+func (*MemcacheStatsRequest) Descriptor() ([]byte, []int) {
+	return fileDescriptor_memcache_service_e327a14e42649a60, []int{14}
+}
+func (m *MemcacheStatsRequest) XXX_Unmarshal(b []byte) error {
+	return xxx_messageInfo_MemcacheStatsRequest.Unmarshal(m, b)
+}
+func (m *MemcacheStatsRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) {
+	return xxx_messageInfo_MemcacheStatsRequest.Marshal(b, m, deterministic)
+}
+func (dst *MemcacheStatsRequest) XXX_Merge(src proto.Message) {
+	xxx_messageInfo_MemcacheStatsRequest.Merge(dst, src)
+}
+func (m *MemcacheStatsRequest) XXX_Size() int {
+	return xxx_messageInfo_MemcacheStatsRequest.Size(m)
+}
+func (m *MemcacheStatsRequest) XXX_DiscardUnknown() {
+	xxx_messageInfo_MemcacheStatsRequest.DiscardUnknown(m)
+}
+
+var xxx_messageInfo_MemcacheStatsRequest proto.InternalMessageInfo
+
+func (m *MemcacheStatsRequest) GetOverride() *AppOverride {
+	if m != nil {
+		return m.Override
+	}
+	return nil
+}
+
+type MergedNamespaceStats struct {
+	Hits                 *uint64  `protobuf:"varint,1,req,name=hits" json:"hits,omitempty"`
+	Misses               *uint64  `protobuf:"varint,2,req,name=misses" json:"misses,omitempty"`
+	ByteHits             *uint64  `protobuf:"varint,3,req,name=byte_hits,json=byteHits" json:"byte_hits,omitempty"`
+	Items                *uint64  `protobuf:"varint,4,req,name=items" json:"items,omitempty"`
+	Bytes                *uint64  `protobuf:"varint,5,req,name=bytes" json:"bytes,omitempty"`
+	OldestItemAge        *uint32  `protobuf:"fixed32,6,req,name=oldest_item_age,json=oldestItemAge" json:"oldest_item_age,omitempty"`
+	XXX_NoUnkeyedLiteral struct{} `json:"-"`
+	XXX_unrecognized     []byte   `json:"-"`
+	XXX_sizecache        int32    `json:"-"`
+}
+
+func (m *MergedNamespaceStats) Reset()         { *m = MergedNamespaceStats{} }
+func (m *MergedNamespaceStats) String() string { return proto.CompactTextString(m) }
+func (*MergedNamespaceStats) ProtoMessage()    {}
+func (*MergedNamespaceStats) Descriptor() ([]byte, []int) {
+	return fileDescriptor_memcache_service_e327a14e42649a60, []int{15}
+}
+func (m *MergedNamespaceStats) XXX_Unmarshal(b []byte) error {
+	return xxx_messageInfo_MergedNamespaceStats.Unmarshal(m, b)
+}
+func (m *MergedNamespaceStats) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) {
+	return xxx_messageInfo_MergedNamespaceStats.Marshal(b, m, deterministic)
+}
+func (dst *MergedNamespaceStats) XXX_Merge(src proto.Message) {
+	xxx_messageInfo_MergedNamespaceStats.Merge(dst, src)
+}
+func (m *MergedNamespaceStats) XXX_Size() int {
+	return xxx_messageInfo_MergedNamespaceStats.Size(m)
+}
+func (m *MergedNamespaceStats) XXX_DiscardUnknown() {
+	xxx_messageInfo_MergedNamespaceStats.DiscardUnknown(m)
+}
+
+var xxx_messageInfo_MergedNamespaceStats proto.InternalMessageInfo
+
+func (m *MergedNamespaceStats) GetHits() uint64 {
+	if m != nil && m.Hits != nil {
+		return *m.Hits
+	}
+	return 0
+}
+
+func (m *MergedNamespaceStats) GetMisses() uint64 {
+	if m != nil && m.Misses != nil {
+		return *m.Misses
+	}
+	return 0
+}
+
+func (m *MergedNamespaceStats) GetByteHits() uint64 {
+	if m != nil && m.ByteHits != nil {
+		return *m.ByteHits
+	}
+	return 0
+}
+
+func (m *MergedNamespaceStats) GetItems() uint64 {
+	if m != nil && m.Items != nil {
+		return *m.Items
+	}
+	return 0
+}
+
+func (m *MergedNamespaceStats) GetBytes() uint64 {
+	if m != nil && m.Bytes != nil {
+		return *m.Bytes
+	}
+	return 0
+}
+
+func (m *MergedNamespaceStats) GetOldestItemAge() uint32 {
+	if m != nil && m.OldestItemAge != nil {
+		return *m.OldestItemAge
+	}
+	return 0
+}
+
+type MemcacheStatsResponse struct {
+	Stats                *MergedNamespaceStats `protobuf:"bytes,1,opt,name=stats" json:"stats,omitempty"`
+	XXX_NoUnkeyedLiteral struct{}              `json:"-"`
+	XXX_unrecognized     []byte                `json:"-"`
+	XXX_sizecache        int32                 `json:"-"`
+}
+
+func (m *MemcacheStatsResponse) Reset()         { *m = MemcacheStatsResponse{} }
+func (m *MemcacheStatsResponse) String() string { return proto.CompactTextString(m) }
+func (*MemcacheStatsResponse) ProtoMessage()    {}
+func (*MemcacheStatsResponse) Descriptor() ([]byte, []int) {
+	return fileDescriptor_memcache_service_e327a14e42649a60, []int{16}
+}
+func (m *MemcacheStatsResponse) XXX_Unmarshal(b []byte) error {
+	return xxx_messageInfo_MemcacheStatsResponse.Unmarshal(m, b)
+}
+func (m *MemcacheStatsResponse) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) {
+	return xxx_messageInfo_MemcacheStatsResponse.Marshal(b, m, deterministic)
+}
+func (dst *MemcacheStatsResponse) XXX_Merge(src proto.Message) {
+	xxx_messageInfo_MemcacheStatsResponse.Merge(dst, src)
+}
+func (m *MemcacheStatsResponse) XXX_Size() int {
+	return xxx_messageInfo_MemcacheStatsResponse.Size(m)
+}
+func (m *MemcacheStatsResponse) XXX_DiscardUnknown() {
+	xxx_messageInfo_MemcacheStatsResponse.DiscardUnknown(m)
+}
+
+var xxx_messageInfo_MemcacheStatsResponse proto.InternalMessageInfo
+
+func (m *MemcacheStatsResponse) GetStats() *MergedNamespaceStats {
+	if m != nil {
+		return m.Stats
+	}
+	return nil
+}
+
+type MemcacheGrabTailRequest struct {
+	ItemCount            *int32       `protobuf:"varint,1,req,name=item_count,json=itemCount" json:"item_count,omitempty"`
+	NameSpace            *string      `protobuf:"bytes,2,opt,name=name_space,json=nameSpace,def=" json:"name_space,omitempty"`
+	Override             *AppOverride `protobuf:"bytes,3,opt,name=override" json:"override,omitempty"`
+	XXX_NoUnkeyedLiteral struct{}     `json:"-"`
+	XXX_unrecognized     []byte       `json:"-"`
+	XXX_sizecache        int32        `json:"-"`
+}
+
+func (m *MemcacheGrabTailRequest) Reset()         { *m = MemcacheGrabTailRequest{} }
+func (m *MemcacheGrabTailRequest) String() string { return proto.CompactTextString(m) }
+func (*MemcacheGrabTailRequest) ProtoMessage()    {}
+func (*MemcacheGrabTailRequest) Descriptor() ([]byte, []int) {
+	return fileDescriptor_memcache_service_e327a14e42649a60, []int{17}
+}
+func (m *MemcacheGrabTailRequest) XXX_Unmarshal(b []byte) error {
+	return xxx_messageInfo_MemcacheGrabTailRequest.Unmarshal(m, b)
+}
+func (m *MemcacheGrabTailRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) {
+	return xxx_messageInfo_MemcacheGrabTailRequest.Marshal(b, m, deterministic)
+}
+func (dst *MemcacheGrabTailRequest) XXX_Merge(src proto.Message) {
+	xxx_messageInfo_MemcacheGrabTailRequest.Merge(dst, src)
+}
+func (m *MemcacheGrabTailRequest) XXX_Size() int {
+	return xxx_messageInfo_MemcacheGrabTailRequest.Size(m)
+}
+func (m *MemcacheGrabTailRequest) XXX_DiscardUnknown() {
+	xxx_messageInfo_MemcacheGrabTailRequest.DiscardUnknown(m)
+}
+
+var xxx_messageInfo_MemcacheGrabTailRequest proto.InternalMessageInfo
+
+func (m *MemcacheGrabTailRequest) GetItemCount() int32 {
+	if m != nil && m.ItemCount != nil {
+		return *m.ItemCount
+	}
+	return 0
+}
+
+func (m *MemcacheGrabTailRequest) GetNameSpace() string {
+	if m != nil && m.NameSpace != nil {
+		return *m.NameSpace
+	}
+	return ""
+}
+
+func (m *MemcacheGrabTailRequest) GetOverride() *AppOverride {
+	if m != nil {
+		return m.Override
+	}
+	return nil
+}
+
+type MemcacheGrabTailResponse struct {
+	Item                 []*MemcacheGrabTailResponse_Item `protobuf:"group,1,rep,name=Item,json=item" json:"item,omitempty"`
+	XXX_NoUnkeyedLiteral struct{}                         `json:"-"`
+	XXX_unrecognized     []byte                           `json:"-"`
+	XXX_sizecache        int32                            `json:"-"`
+}
+
+func (m *MemcacheGrabTailResponse) Reset()         { *m = MemcacheGrabTailResponse{} }
+func (m *MemcacheGrabTailResponse) String() string { return proto.CompactTextString(m) }
+func (*MemcacheGrabTailResponse) ProtoMessage()    {}
+func (*MemcacheGrabTailResponse) Descriptor() ([]byte, []int) {
+	return fileDescriptor_memcache_service_e327a14e42649a60, []int{18}
+}
+func (m *MemcacheGrabTailResponse) XXX_Unmarshal(b []byte) error {
+	return xxx_messageInfo_MemcacheGrabTailResponse.Unmarshal(m, b)
+}
+func (m *MemcacheGrabTailResponse) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) {
+	return xxx_messageInfo_MemcacheGrabTailResponse.Marshal(b, m, deterministic)
+}
+func (dst *MemcacheGrabTailResponse) XXX_Merge(src proto.Message) {
+	xxx_messageInfo_MemcacheGrabTailResponse.Merge(dst, src)
+}
+func (m *MemcacheGrabTailResponse) XXX_Size() int {
+	return xxx_messageInfo_MemcacheGrabTailResponse.Size(m)
+}
+func (m *MemcacheGrabTailResponse) XXX_DiscardUnknown() {
+	xxx_messageInfo_MemcacheGrabTailResponse.DiscardUnknown(m)
+}
+
+var xxx_messageInfo_MemcacheGrabTailResponse proto.InternalMessageInfo
+
+func (m *MemcacheGrabTailResponse) GetItem() []*MemcacheGrabTailResponse_Item {
+	if m != nil {
+		return m.Item
+	}
+	return nil
+}
+
+type MemcacheGrabTailResponse_Item struct {
+	Value                []byte   `protobuf:"bytes,2,req,name=value" json:"value,omitempty"`
+	Flags                *uint32  `protobuf:"fixed32,3,opt,name=flags" json:"flags,omitempty"`
+	XXX_NoUnkeyedLiteral struct{} `json:"-"`
+	XXX_unrecognized     []byte   `json:"-"`
+	XXX_sizecache        int32    `json:"-"`
+}
+
+func (m *MemcacheGrabTailResponse_Item) Reset()         { *m = MemcacheGrabTailResponse_Item{} }
+func (m *MemcacheGrabTailResponse_Item) String() string { return proto.CompactTextString(m) }
+func (*MemcacheGrabTailResponse_Item) ProtoMessage()    {}
+func (*MemcacheGrabTailResponse_Item) Descriptor() ([]byte, []int) {
+	return fileDescriptor_memcache_service_e327a14e42649a60, []int{18, 0}
+}
+func (m *MemcacheGrabTailResponse_Item) XXX_Unmarshal(b []byte) error {
+	return xxx_messageInfo_MemcacheGrabTailResponse_Item.Unmarshal(m, b)
+}
+func (m *MemcacheGrabTailResponse_Item) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) {
+	return xxx_messageInfo_MemcacheGrabTailResponse_Item.Marshal(b, m, deterministic)
+}
+func (dst *MemcacheGrabTailResponse_Item) XXX_Merge(src proto.Message) {
+	xxx_messageInfo_MemcacheGrabTailResponse_Item.Merge(dst, src)
+}
+func (m *MemcacheGrabTailResponse_Item) XXX_Size() int {
+	return xxx_messageInfo_MemcacheGrabTailResponse_Item.Size(m)
+}
+func (m *MemcacheGrabTailResponse_Item) XXX_DiscardUnknown() {
+	xxx_messageInfo_MemcacheGrabTailResponse_Item.DiscardUnknown(m)
+}
+
+var xxx_messageInfo_MemcacheGrabTailResponse_Item proto.InternalMessageInfo
+
+func (m *MemcacheGrabTailResponse_Item) GetValue() []byte {
+	if m != nil {
+		return m.Value
+	}
+	return nil
+}
+
+func (m *MemcacheGrabTailResponse_Item) GetFlags() uint32 {
+	if m != nil && m.Flags != nil {
+		return *m.Flags
+	}
+	return 0
+}
+
+func init() {
+	proto.RegisterType((*MemcacheServiceError)(nil), "appengine.MemcacheServiceError")
+	proto.RegisterType((*AppOverride)(nil), "appengine.AppOverride")
+	proto.RegisterType((*MemcacheGetRequest)(nil), "appengine.MemcacheGetRequest")
+	proto.RegisterType((*MemcacheGetResponse)(nil), "appengine.MemcacheGetResponse")
+	proto.RegisterType((*MemcacheGetResponse_Item)(nil), "appengine.MemcacheGetResponse.Item")
+	proto.RegisterType((*MemcacheSetRequest)(nil), "appengine.MemcacheSetRequest")
+	proto.RegisterType((*MemcacheSetRequest_Item)(nil), "appengine.MemcacheSetRequest.Item")
+	proto.RegisterType((*MemcacheSetResponse)(nil), "appengine.MemcacheSetResponse")
+	proto.RegisterType((*MemcacheDeleteRequest)(nil), "appengine.MemcacheDeleteRequest")
+	proto.RegisterType((*MemcacheDeleteRequest_Item)(nil), "appengine.MemcacheDeleteRequest.Item")
+	proto.RegisterType((*MemcacheDeleteResponse)(nil), "appengine.MemcacheDeleteResponse")
+	proto.RegisterType((*MemcacheIncrementRequest)(nil), "appengine.MemcacheIncrementRequest")
+	proto.RegisterType((*MemcacheIncrementResponse)(nil), "appengine.MemcacheIncrementResponse")
+	proto.RegisterType((*MemcacheBatchIncrementRequest)(nil), "appengine.MemcacheBatchIncrementRequest")
+	proto.RegisterType((*MemcacheBatchIncrementResponse)(nil), "appengine.MemcacheBatchIncrementResponse")
+	proto.RegisterType((*MemcacheFlushRequest)(nil), "appengine.MemcacheFlushRequest")
+	proto.RegisterType((*MemcacheFlushResponse)(nil), "appengine.MemcacheFlushResponse")
+	proto.RegisterType((*MemcacheStatsRequest)(nil), "appengine.MemcacheStatsRequest")
+	proto.RegisterType((*MergedNamespaceStats)(nil), "appengine.MergedNamespaceStats")
+	proto.RegisterType((*MemcacheStatsResponse)(nil), "appengine.MemcacheStatsResponse")
+	proto.RegisterType((*MemcacheGrabTailRequest)(nil), "appengine.MemcacheGrabTailRequest")
+	proto.RegisterType((*MemcacheGrabTailResponse)(nil), "appengine.MemcacheGrabTailResponse")
+	proto.RegisterType((*MemcacheGrabTailResponse_Item)(nil), "appengine.MemcacheGrabTailResponse.Item")
+}
+
+func init() {
+	proto.RegisterFile("google.golang.org/appengine/v2/internal/memcache/memcache_service.proto", fileDescriptor_memcache_service_e327a14e42649a60)
+}
+
+var fileDescriptor_memcache_service_e327a14e42649a60 = []byte{
+	// 1379 bytes of a gzipped FileDescriptorProto
+	0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0xac, 0x56, 0xcd, 0x92, 0xdb, 0xc4,
+	0x16, 0x8e, 0x24, 0xff, 0xe9, 0x78, 0x7e, 0x94, 0xce, 0x64, 0xe2, 0x3b, 0xb7, 0x72, 0xe3, 0x52,
+	0xee, 0xbd, 0x18, 0x2a, 0x71, 0x82, 0x29, 0x20, 0x99, 0xca, 0x02, 0x8f, 0xad, 0x49, 0x44, 0x66,
+	0xec, 0xa9, 0x96, 0x33, 0x50, 0xd9, 0xa8, 0x3a, 0x72, 0x47, 0xa3, 0x1a, 0x59, 0x12, 0x6a, 0x39,
+	0x21, 0x4b, 0x8a, 0x15, 0x55, 0xb0, 0xe3, 0x05, 0xd8, 0xb0, 0x63, 0xc5, 0x3b, 0xf0, 0x0c, 0x14,
+	0x7b, 0x8a, 0x15, 0xef, 0x40, 0x75, 0x4b, 0xb2, 0x65, 0x8f, 0x67, 0x98, 0x02, 0x76, 0x3a, 0xa7,
+	0x4f, 0xab, 0xcf, 0x77, 0xbe, 0xaf, 0x4f, 0x1f, 0xe8, 0xbb, 0x61, 0xe8, 0xfa, 0xb4, 0xed, 0x86,
+	0x3e, 0x09, 0xdc, 0x76, 0x18, 0xbb, 0xf7, 0x48, 0x14, 0xd1, 0xc0, 0xf5, 0x02, 0x7a, 0xcf, 0x0b,
+	0x12, 0x1a, 0x07, 0xc4, 0xbf, 0x37, 0xa1, 0x13, 0x87, 0x38, 0x27, 0x74, 0xf6, 0x61, 0x33, 0x1a,
+	0xbf, 0xf2, 0x1c, 0xda, 0x8e, 0xe2, 0x30, 0x09, 0x91, 0x3a, 0xdb, 0xa3, 0x7f, 0x29, 0xc1, 0xd6,
+	0x61, 0x16, 0x65, 0xa5, 0x41, 0x46, 0x1c, 0x87, 0xb1, 0x7e, 0x0a, 0xaa, 0xf8, 0xe8, 0x85, 0x63,
+	0x8a, 0x2a, 0x20, 0x0f, 0x9f, 0x6a, 0x57, 0xd0, 0x75, 0xb8, 0xfa, 0x6c, 0x60, 0x1d, 0x19, 0x3d,
+	0x73, 0xdf, 0x34, 0xfa, 0xb6, 0x81, 0xf1, 0x10, 0x6b, 0x12, 0x77, 0x0f, 0xba, 0x87, 0x86, 0x75,
+	0xd4, 0xed, 0x19, 0xf6, 0x60, 0x38, 0xb2, 0x2d, 0x63, 0xa4, 0xc9, 0xdc, 0x7d, 0x64, 0xe0, 0x43,
+	0xd3, 0xb2, 0xcc, 0xe1, 0xc0, 0xee, 0x1b, 0x03, 0xd3, 0xe8, 0x6b, 0x0a, 0xba, 0x0a, 0xeb, 0xe6,
+	0xe0, 0xb8, 0x7b, 0x60, 0xf6, 0xed, 0xe3, 0xee, 0xc1, 0x33, 0x43, 0xab, 0xe8, 0x5f, 0xc8, 0x50,
+	0xef, 0x46, 0xd1, 0xf0, 0x15, 0x8d, 0x63, 0x6f, 0x4c, 0xd1, 0x75, 0xa8, 0x90, 0x28, 0xb2, 0xbd,
+	0x71, 0x43, 0x6a, 0xca, 0x2d, 0x15, 0x97, 0x49, 0x14, 0x99, 0x63, 0xf4, 0x00, 0xb6, 0x83, 0xe9,
+	0xc4, 0xce, 0x51, 0xb9, 0xf6, 0x0b, 0xe2, 0x9c, 0xd2, 0x60, 0xcc, 0x1a, 0x72, 0x53, 0x6a, 0x95,
+	0xf7, 0xe4, 0x86, 0x84, 0xb7, 0x82, 0xe9, 0x24, 0x07, 0xe4, 0xee, 0x65, 0xeb, 0xe8, 0x2e, 0x68,
+	0x9e, 0x1b, 0x84, 0x31, 0xb5, 0xd9, 0x09, 0x89, 0xc7, 0x7e, 0xe8, 0x9c, 0x36, 0x94, 0xa6, 0xd4,
+	0xaa, 0x89, 0x3d, 0x9b, 0xe9, 0x9a, 0x95, 0x2f, 0xa1, 0xfb, 0x80, 0x66, 0xa5, 0x8b, 0xc2, 0xd0,
+	0xb7, 0x4f, 0xbc, 0x20, 0x69, 0x94, 0x9a, 0x52, 0x4b, 0x15, 0x1b, 0xb4, 0x7c, 0xf5, 0x28, 0x0c,
+	0xfd, 0x27, 0x5e, 0x90, 0xa0, 0x8f, 0x60, 0x67, 0x5e, 0x6c, 0xfe, 0x1f, 0x2f, 0x70, 0x6d, 0x96,
+	0xc4, 0x24, 0xa1, 0xee, 0x9b, 0x46, 0xb9, 0x29, 0xb5, 0xd6, 0xc4, 0xce, 0x46, 0x1e, 0x65, 0x65,
+	0x41, 0x56, 0x16, 0xa3, 0x7f, 0x2b, 0x01, 0xca, 0x13, 0x7f, 0x4c, 0x13, 0x4c, 0x3f, 0x9b, 0x52,
+	0x96, 0x20, 0x0d, 0x94, 0x53, 0xfa, 0xa6, 0x21, 0x35, 0x95, 0xd6, 0x1a, 0xe6, 0x9f, 0xe8, 0x16,
+	0x40, 0x40, 0x26, 0xd4, 0x66, 0x11, 0x71, 0xa8, 0x40, 0xae, 0xee, 0x5e, 0xc1, 0x2a, 0xf7, 0x59,
+	0xdc, 0x85, 0x6e, 0x40, 0xf5, 0x65, 0x18, 0xdb, 0x0e, 0x61, 0x22, 0xe5, 0x1a, 0xae, 0xbc, 0x0c,
+	0xe3, 0x1e, 0x61, 0xa8, 0x03, 0xb5, 0x30, 0x2b, 0xb1, 0x48, 0xa9, 0xde, 0xd9, 0x6e, 0xcf, 0xa4,
+	0xd0, 0x2e, 0x10, 0x80, 0x67, 0x71, 0xfa, 0x2f, 0x12, 0x5c, 0x5b, 0x48, 0x8b, 0x45, 0x61, 0xc0,
+	0x28, 0xfa, 0x10, 0x4a, 0x5e, 0x42, 0x27, 0x22, 0x31, 0xe8, 0xdc, 0x2e, 0xfc, 0x67, 0x45, 0x74,
+	0xdb, 0x4c, 0xe8, 0x04, 0x8b, 0x0d, 0x3b, 0x5f, 0x49, 0x50, 0xe2, 0x66, 0x8e, 0x4c, 0x6e, 0xca,
+	0x39, 0xb2, 0x2d, 0x28, 0xbf, 0x22, 0xfe, 0x94, 0x36, 0x14, 0xe1, 0x4b, 0x0d, 0xee, 0x7d, 0xe9,
+	0x13, 0x37, 0x05, 0x53, 0xc5, 0xa9, 0xc1, 0x25, 0xe2, 0x10, 0xc6, 0x25, 0xc2, 0x91, 0x54, 0x70,
+	0xd9, 0x21, 0xcc, 0x1c, 0xa3, 0x3b, 0x80, 0xe8, 0xe7, 0x91, 0x17, 0x53, 0x66, 0x7b, 0x81, 0xcd,
+	0xa8, 0x13, 0x72, 0x79, 0x54, 0xb8, 0x3c, 0xb0, 0x96, 0xad, 0x98, 0x81, 0x95, 0xfa, 0xf5, 0x9f,
+	0x94, 0x79, 0xcd, 0xad, 0x79, 0xcd, 0x3f, 0x58, 0xc0, 0xa6, 0xaf, 0xc0, 0x36, 0x0f, 0x2e, 0x40,
+	0x5b, 0x62, 0xa6, 0x7a, 0x96, 0x99, 0x22, 0x01, 0x70, 0x39, 0x02, 0x76, 0x7e, 0xff, 0x67, 0xea,
+	0xf5, 0x14, 0x80, 0xd1, 0xc4, 0x8e, 0x42, 0xdf, 0x73, 0x52, 0x41, 0x6e, 0x74, 0xde, 0xba, 0x18,
+	0x99, 0x45, 0x93, 0x23, 0x11, 0xbe, 0xab, 0x58, 0xc6, 0x08, 0xab, 0x2c, 0xb7, 0xd1, 0x3b, 0xb0,
+	0x29, 0x6a, 0x49, 0x12, 0x2f, 0x0c, 0xec, 0xc4, 0x9b, 0x50, 0x51, 0xe2, 0xea, 0xae, 0x74, 0x1f,
+	0x6f, 0xcc, 0x57, 0x46, 0xde, 0x84, 0x16, 0x88, 0xaa, 0x15, 0x89, 0x2a, 0x88, 0x54, 0x2d, 0x8a,
+	0x54, 0x7f, 0x0f, 0xd4, 0xd9, 0xc1, 0xa8, 0x0a, 0xfc, 0x68, 0x4d, 0xe2, 0x1f, 0xdd, 0x7e, 0x5f,
+	0x93, 0x51, 0x1d, 0xaa, 0xd8, 0x38, 0x3a, 0xe8, 0xf6, 0x0c, 0x4d, 0xe1, 0xde, 0x5e, 0xd7, 0xd2,
+	0x4a, 0xfa, 0xf7, 0x05, 0x95, 0x5a, 0x05, 0x95, 0x66, 0xa8, 0x59, 0x42, 0x92, 0x29, 0x13, 0x7c,
+	0x6e, 0x74, 0xee, 0x9c, 0x87, 0x3a, 0xd3, 0xaa, 0x45, 0x13, 0x4b, 0xc4, 0xf3, 0xd6, 0x27, 0x50,
+	0xa7, 0xa6, 0xbe, 0x07, 0xeb, 0x0b, 0x6b, 0x08, 0xa0, 0x62, 0x8d, 0x86, 0xd8, 0xe8, 0x6b, 0x12,
+	0xda, 0x00, 0x10, 0x9d, 0x2f, 0xb5, 0x65, 0xa4, 0x42, 0x39, 0x6d, 0x8f, 0x0a, 0x0f, 0x33, 0x3e,
+	0x35, 0xad, 0x11, 0x4f, 0xf4, 0x57, 0x09, 0xae, 0xe7, 0x87, 0xf6, 0xa9, 0x4f, 0x13, 0x9a, 0x8b,
+	0xee, 0xe1, 0x82, 0xe8, 0xfe, 0xb7, 0x22, 0xc9, 0x85, 0xf8, 0xf3, 0x75, 0x57, 0xba, 0x58, 0x77,
+	0x97, 0xbc, 0xf8, 0x3b, 0x8f, 0xce, 0x95, 0x9d, 0x0e, 0xf5, 0xb1, 0x48, 0x25, 0x65, 0x5e, 0xc9,
+	0x99, 0x87, 0xd4, 0xcb, 0x59, 0xd7, 0xbf, 0x93, 0x60, 0x7b, 0x39, 0xef, 0x8c, 0x93, 0x4f, 0x60,
+	0x3d, 0xdb, 0xbe, 0x40, 0x4b, 0xe7, 0x02, 0xc4, 0x19, 0x33, 0xa9, 0x59, 0x20, 0x67, 0x6d, 0x5c,
+	0xf0, 0xe8, 0x6d, 0xd0, 0x96, 0x23, 0xb8, 0x5c, 0xfa, 0xc6, 0x81, 0x31, 0x12, 0x1c, 0xad, 0x83,
+	0xca, 0x39, 0xda, 0x1f, 0x3e, 0x1b, 0xf4, 0x35, 0x59, 0xff, 0x4d, 0x86, 0x46, 0x7e, 0x92, 0x19,
+	0x38, 0x31, 0x9d, 0xd0, 0xe0, 0x6c, 0xdf, 0x95, 0x57, 0xf7, 0xdd, 0xd2, 0xaa, 0xbe, 0x5b, 0x1e,
+	0x53, 0x3f, 0x21, 0xa2, 0x27, 0x97, 0x76, 0xa5, 0x77, 0x71, 0x6a, 0xa3, 0x63, 0x50, 0xc7, 0x5e,
+	0x4c, 0x1d, 0x7e, 0x27, 0x44, 0xb9, 0x36, 0x3a, 0x77, 0x57, 0xa0, 0x5d, 0xce, 0xa1, 0xdd, 0xcf,
+	0x37, 0xed, 0xaa, 0xe6, 0xa0, 0x87, 0x8d, 0x43, 0x63, 0x30, 0xc2, 0xf3, 0x5f, 0xa1, 0xdb, 0xb0,
+	0xee, 0x05, 0x5e, 0xe2, 0x11, 0xdf, 0x4e, 0xfb, 0x00, 0xe7, 0xb6, 0x84, 0xd7, 0x32, 0xe7, 0xb1,
+	0x68, 0x07, 0x85, 0xa0, 0xb4, 0x2d, 0x88, 0x9b, 0x3a, 0x0b, 0xda, 0x17, 0xdd, 0xa1, 0x28, 0x90,
+	0xea, 0x25, 0x5f, 0x86, 0xb7, 0x41, 0x9d, 0x25, 0xc8, 0x4b, 0x3b, 0x4b, 0x31, 0xad, 0x74, 0xdf,
+	0xc8, 0x4d, 0x59, 0xff, 0x59, 0x82, 0x7f, 0xad, 0x40, 0x99, 0x09, 0xe2, 0xdf, 0xa0, 0x06, 0xf4,
+	0x75, 0x06, 0x41, 0x12, 0x10, 0x6a, 0x01, 0x7d, 0x9d, 0xa6, 0xef, 0x80, 0xe6, 0xe5, 0x3b, 0x72,
+	0xc1, 0xc8, 0xa2, 0x84, 0x0f, 0x2e, 0x2e, 0x61, 0xfe, 0xf2, 0xe4, 0x9e, 0x82, 0x6c, 0x36, 0xbd,
+	0x45, 0xa7, 0xfe, 0x10, 0xae, 0xad, 0x88, 0xcb, 0xc6, 0x1e, 0x09, 0x6d, 0x42, 0x9d, 0xeb, 0xa6,
+	0xf7, 0xa4, 0x3b, 0x78, 0xbc, 0x74, 0xb9, 0xf5, 0x1f, 0x24, 0xb8, 0x99, 0x9f, 0xbe, 0x47, 0x12,
+	0xe7, 0xe4, 0x8c, 0x92, 0x16, 0x75, 0x23, 0x9d, 0xd5, 0x4d, 0xfe, 0x94, 0xca, 0x4d, 0xa5, 0x55,
+	0x5f, 0xf9, 0x94, 0x2e, 0xff, 0x33, 0xbb, 0xf7, 0x45, 0xd6, 0x94, 0x4b, 0xb2, 0xf6, 0x1c, 0xfe,
+	0x73, 0x5e, 0xba, 0x19, 0x1d, 0x0f, 0x0a, 0x8d, 0xa8, 0xde, 0xf9, 0xef, 0x65, 0xaa, 0x9c, 0xe6,
+	0xa3, 0x7f, 0x3c, 0x9f, 0x25, 0xf7, 0xfd, 0x29, 0x3b, 0xc9, 0x2b, 0x50, 0xcc, 0x53, 0xba, 0x64,
+	0x9e, 0x37, 0xe6, 0x7d, 0x32, 0xfb, 0x57, 0x7a, 0x54, 0xf1, 0x10, 0x4e, 0x15, 0xfb, 0x3b, 0x87,
+	0xfc, 0x28, 0xa6, 0xdf, 0xd8, 0xa5, 0xe3, 0x01, 0x99, 0x50, 0x41, 0x90, 0xf8, 0x27, 0x42, 0x50,
+	0x3a, 0xf1, 0x12, 0x26, 0xae, 0x7f, 0x09, 0x8b, 0x6f, 0xb4, 0x0d, 0x95, 0x89, 0xc7, 0x18, 0x65,
+	0xa2, 0x17, 0x96, 0x70, 0x66, 0x71, 0xf9, 0xbe, 0x78, 0x93, 0x50, 0x5b, 0x6c, 0x50, 0xc4, 0x52,
+	0x8d, 0x3b, 0x9e, 0xf0, 0x4d, 0x5b, 0x50, 0xe6, 0xa5, 0xe1, 0x8f, 0x31, 0x5f, 0x48, 0x0d, 0xee,
+	0xe5, 0x11, 0xac, 0x51, 0x4e, 0xbd, 0xc2, 0x40, 0xff, 0x87, 0xcd, 0xd0, 0x1f, 0x53, 0x96, 0xd8,
+	0x3c, 0xca, 0x26, 0x2e, 0x7f, 0x55, 0xe5, 0x56, 0x15, 0xaf, 0xa7, 0x6e, 0xde, 0x8e, 0xbb, 0x2e,
+	0xd5, 0x07, 0xf3, 0xd2, 0x64, 0x15, 0xc8, 0x98, 0x7b, 0x1f, 0xca, 0xfc, 0x86, 0xb0, 0x0c, 0xff,
+	0xad, 0x05, 0xea, 0xce, 0xa2, 0xc4, 0x69, 0xb4, 0xfe, 0x8d, 0x04, 0x37, 0x66, 0x43, 0x5b, 0x4c,
+	0x5e, 0x8c, 0x88, 0xe7, 0xe7, 0x55, 0xbd, 0x09, 0x20, 0x92, 0x71, 0xc2, 0x69, 0x90, 0x88, 0x72,
+	0x94, 0xb1, 0xca, 0x3d, 0x3d, 0xee, 0xf8, 0xf3, 0x59, 0xf4, 0xaf, 0x48, 0xf4, 0x6b, 0x69, 0xde,
+	0x97, 0xe7, 0xf9, 0x64, 0x18, 0x1f, 0x2d, 0x3c, 0x93, 0xad, 0x55, 0x73, 0xe7, 0xd2, 0x96, 0xe2,
+	0xf0, 0xd9, 0xc9, 0x1e, 0xb5, 0xd9, 0xe4, 0x24, 0xaf, 0x9c, 0x9c, 0x94, 0xc2, 0xe4, 0xb4, 0x07,
+	0xcf, 0x6b, 0xf9, 0xd0, 0xfe, 0x47, 0x00, 0x00, 0x00, 0xff, 0xff, 0x76, 0x8b, 0xe6, 0x6b, 0x80,
+	0x0d, 0x00, 0x00,
+}
diff --git a/v2/internal/memcache/memcache_service.proto b/v2/internal/memcache/memcache_service.proto
new file mode 100644
index 0000000..5f0edcd
--- /dev/null
+++ b/v2/internal/memcache/memcache_service.proto
@@ -0,0 +1,165 @@
+syntax = "proto2";
+option go_package = "memcache";
+
+package appengine;
+
+message MemcacheServiceError {
+  enum ErrorCode {
+    OK = 0;
+    UNSPECIFIED_ERROR = 1;
+    NAMESPACE_NOT_SET = 2;
+    PERMISSION_DENIED = 3;
+    INVALID_VALUE = 6;
+  }
+}
+
+message AppOverride {
+  required string app_id = 1;
+
+  optional int32 num_memcacheg_backends = 2 [deprecated=true];
+  optional bool ignore_shardlock = 3 [deprecated=true];
+  optional string memcache_pool_hint = 4 [deprecated=true];
+  optional bytes memcache_sharding_strategy = 5 [deprecated=true];
+}
+
+message MemcacheGetRequest {
+  repeated bytes key = 1;
+  optional string name_space = 2 [default = ""];
+  optional bool for_cas = 4;
+  optional AppOverride override = 5;
+}
+
+message MemcacheGetResponse {
+  repeated group Item = 1 {
+    required bytes key = 2;
+    required bytes value = 3;
+    optional fixed32 flags = 4;
+    optional fixed64 cas_id = 5;
+    optional int32 expires_in_seconds = 6;
+  }
+}
+
+message MemcacheSetRequest {
+  enum SetPolicy {
+    SET = 1;
+    ADD = 2;
+    REPLACE = 3;
+    CAS = 4;
+  }
+  repeated group Item = 1 {
+    required bytes key = 2;
+    required bytes value = 3;
+
+    optional fixed32 flags = 4;
+    optional SetPolicy set_policy = 5 [default = SET];
+    optional fixed32 expiration_time = 6 [default = 0];
+
+    optional fixed64 cas_id = 8;
+    optional bool for_cas = 9;
+  }
+  optional string name_space = 7 [default = ""];
+  optional AppOverride override = 10;
+}
+
+message MemcacheSetResponse {
+  enum SetStatusCode {
+    STORED = 1;
+    NOT_STORED = 2;
+    ERROR = 3;
+    EXISTS = 4;
+  }
+  repeated SetStatusCode set_status = 1;
+}
+
+message MemcacheDeleteRequest {
+  repeated group Item = 1 {
+    required bytes key = 2;
+    optional fixed32 delete_time = 3 [default = 0];
+  }
+  optional string name_space = 4 [default = ""];
+  optional AppOverride override = 5;
+}
+
+message MemcacheDeleteResponse {
+  enum DeleteStatusCode {
+    DELETED = 1;
+    NOT_FOUND = 2;
+  }
+  repeated DeleteStatusCode delete_status = 1;
+}
+
+message MemcacheIncrementRequest {
+  enum Direction {
+    INCREMENT = 1;
+    DECREMENT = 2;
+  }
+  required bytes key = 1;
+  optional string name_space = 4 [default = ""];
+
+  optional uint64 delta = 2 [default = 1];
+  optional Direction direction = 3 [default = INCREMENT];
+
+  optional uint64 initial_value = 5;
+  optional fixed32 initial_flags = 6;
+  optional AppOverride override = 7;
+}
+
+message MemcacheIncrementResponse {
+  enum IncrementStatusCode {
+    OK = 1;
+    NOT_CHANGED = 2;
+    ERROR = 3;
+  }
+
+  optional uint64 new_value = 1;
+  optional IncrementStatusCode increment_status = 2;
+}
+
+message MemcacheBatchIncrementRequest {
+  optional string name_space = 1 [default = ""];
+  repeated MemcacheIncrementRequest item = 2;
+  optional AppOverride override = 3;
+}
+
+message MemcacheBatchIncrementResponse {
+  repeated MemcacheIncrementResponse item = 1;
+}
+
+message MemcacheFlushRequest {
+  optional AppOverride override = 1;
+}
+
+message MemcacheFlushResponse {
+}
+
+message MemcacheStatsRequest {
+  optional AppOverride override = 1;
+}
+
+message MergedNamespaceStats {
+  required uint64 hits = 1;
+  required uint64 misses = 2;
+  required uint64 byte_hits = 3;
+
+  required uint64 items = 4;
+  required uint64 bytes = 5;
+
+  required fixed32 oldest_item_age = 6;
+}
+
+message MemcacheStatsResponse {
+  optional MergedNamespaceStats stats = 1;
+}
+
+message MemcacheGrabTailRequest {
+  required int32 item_count = 1;
+  optional string name_space = 2 [default = ""];
+  optional AppOverride override = 3;
+}
+
+message MemcacheGrabTailResponse {
+  repeated group Item = 1 {
+    required bytes value = 2;
+    optional fixed32 flags = 3;
+  }
+}
diff --git a/v2/internal/metadata.go b/v2/internal/metadata.go
new file mode 100644
index 0000000..c4ba63b
--- /dev/null
+++ b/v2/internal/metadata.go
@@ -0,0 +1,60 @@
+// Copyright 2014 Google Inc. All rights reserved.
+// Use of this source code is governed by the Apache 2.0
+// license that can be found in the LICENSE file.
+
+package internal
+
+// This file has code for accessing metadata.
+//
+// References:
+//	https://cloud.google.com/compute/docs/metadata
+
+import (
+	"fmt"
+	"io/ioutil"
+	"net/http"
+	"net/url"
+)
+
+const (
+	metadataHost = "metadata"
+	metadataPath = "/computeMetadata/v1/"
+)
+
+var (
+	metadataRequestHeaders = http.Header{
+		"Metadata-Flavor": []string{"Google"},
+	}
+)
+
+// TODO(dsymonds): Do we need to support default values, like Python?
+func mustGetMetadata(key string) []byte {
+	b, err := getMetadata(key)
+	if err != nil {
+		panic(fmt.Sprintf("Metadata fetch failed for '%s': %v", key, err))
+	}
+	return b
+}
+
+func getMetadata(key string) ([]byte, error) {
+	// TODO(dsymonds): May need to use url.Parse to support keys with query args.
+	req := &http.Request{
+		Method: "GET",
+		URL: &url.URL{
+			Scheme: "http",
+			Host:   metadataHost,
+			Path:   metadataPath + key,
+		},
+		Header: metadataRequestHeaders,
+		Host:   metadataHost,
+	}
+	resp, err := http.DefaultClient.Do(req)
+	if err != nil {
+		return nil, err
+	}
+	defer resp.Body.Close()
+	if resp.StatusCode != 200 {
+		return nil, fmt.Errorf("metadata server returned HTTP %d", resp.StatusCode)
+	}
+	return ioutil.ReadAll(resp.Body)
+}
diff --git a/v2/internal/modules/modules_service.pb.go b/v2/internal/modules/modules_service.pb.go
new file mode 100644
index 0000000..25d7c2e
--- /dev/null
+++ b/v2/internal/modules/modules_service.pb.go
@@ -0,0 +1,786 @@
+// Code generated by protoc-gen-go. DO NOT EDIT.
+// source: google.golang.org/appengine/internal/modules/modules_service.proto
+
+package modules
+
+import proto "github.com/golang/protobuf/proto"
+import fmt "fmt"
+import math "math"
+
+// Reference imports to suppress errors if they are not otherwise used.
+var _ = proto.Marshal
+var _ = fmt.Errorf
+var _ = math.Inf
+
+// This is a compile-time assertion to ensure that this generated file
+// is compatible with the proto package it is being compiled against.
+// A compilation error at this line likely means your copy of the
+// proto package needs to be updated.
+const _ = proto.ProtoPackageIsVersion2 // please upgrade the proto package
+
+type ModulesServiceError_ErrorCode int32
+
+const (
+	ModulesServiceError_OK                ModulesServiceError_ErrorCode = 0
+	ModulesServiceError_INVALID_MODULE    ModulesServiceError_ErrorCode = 1
+	ModulesServiceError_INVALID_VERSION   ModulesServiceError_ErrorCode = 2
+	ModulesServiceError_INVALID_INSTANCES ModulesServiceError_ErrorCode = 3
+	ModulesServiceError_TRANSIENT_ERROR   ModulesServiceError_ErrorCode = 4
+	ModulesServiceError_UNEXPECTED_STATE  ModulesServiceError_ErrorCode = 5
+)
+
+var ModulesServiceError_ErrorCode_name = map[int32]string{
+	0: "OK",
+	1: "INVALID_MODULE",
+	2: "INVALID_VERSION",
+	3: "INVALID_INSTANCES",
+	4: "TRANSIENT_ERROR",
+	5: "UNEXPECTED_STATE",
+}
+var ModulesServiceError_ErrorCode_value = map[string]int32{
+	"OK":                0,
+	"INVALID_MODULE":    1,
+	"INVALID_VERSION":   2,
+	"INVALID_INSTANCES": 3,
+	"TRANSIENT_ERROR":   4,
+	"UNEXPECTED_STATE":  5,
+}
+
+func (x ModulesServiceError_ErrorCode) Enum() *ModulesServiceError_ErrorCode {
+	p := new(ModulesServiceError_ErrorCode)
+	*p = x
+	return p
+}
+func (x ModulesServiceError_ErrorCode) String() string {
+	return proto.EnumName(ModulesServiceError_ErrorCode_name, int32(x))
+}
+func (x *ModulesServiceError_ErrorCode) UnmarshalJSON(data []byte) error {
+	value, err := proto.UnmarshalJSONEnum(ModulesServiceError_ErrorCode_value, data, "ModulesServiceError_ErrorCode")
+	if err != nil {
+		return err
+	}
+	*x = ModulesServiceError_ErrorCode(value)
+	return nil
+}
+func (ModulesServiceError_ErrorCode) EnumDescriptor() ([]byte, []int) {
+	return fileDescriptor_modules_service_9cd3bffe4e91c59a, []int{0, 0}
+}
+
+type ModulesServiceError struct {
+	XXX_NoUnkeyedLiteral struct{} `json:"-"`
+	XXX_unrecognized     []byte   `json:"-"`
+	XXX_sizecache        int32    `json:"-"`
+}
+
+func (m *ModulesServiceError) Reset()         { *m = ModulesServiceError{} }
+func (m *ModulesServiceError) String() string { return proto.CompactTextString(m) }
+func (*ModulesServiceError) ProtoMessage()    {}
+func (*ModulesServiceError) Descriptor() ([]byte, []int) {
+	return fileDescriptor_modules_service_9cd3bffe4e91c59a, []int{0}
+}
+func (m *ModulesServiceError) XXX_Unmarshal(b []byte) error {
+	return xxx_messageInfo_ModulesServiceError.Unmarshal(m, b)
+}
+func (m *ModulesServiceError) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) {
+	return xxx_messageInfo_ModulesServiceError.Marshal(b, m, deterministic)
+}
+func (dst *ModulesServiceError) XXX_Merge(src proto.Message) {
+	xxx_messageInfo_ModulesServiceError.Merge(dst, src)
+}
+func (m *ModulesServiceError) XXX_Size() int {
+	return xxx_messageInfo_ModulesServiceError.Size(m)
+}
+func (m *ModulesServiceError) XXX_DiscardUnknown() {
+	xxx_messageInfo_ModulesServiceError.DiscardUnknown(m)
+}
+
+var xxx_messageInfo_ModulesServiceError proto.InternalMessageInfo
+
+type GetModulesRequest struct {
+	XXX_NoUnkeyedLiteral struct{} `json:"-"`
+	XXX_unrecognized     []byte   `json:"-"`
+	XXX_sizecache        int32    `json:"-"`
+}
+
+func (m *GetModulesRequest) Reset()         { *m = GetModulesRequest{} }
+func (m *GetModulesRequest) String() string { return proto.CompactTextString(m) }
+func (*GetModulesRequest) ProtoMessage()    {}
+func (*GetModulesRequest) Descriptor() ([]byte, []int) {
+	return fileDescriptor_modules_service_9cd3bffe4e91c59a, []int{1}
+}
+func (m *GetModulesRequest) XXX_Unmarshal(b []byte) error {
+	return xxx_messageInfo_GetModulesRequest.Unmarshal(m, b)
+}
+func (m *GetModulesRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) {
+	return xxx_messageInfo_GetModulesRequest.Marshal(b, m, deterministic)
+}
+func (dst *GetModulesRequest) XXX_Merge(src proto.Message) {
+	xxx_messageInfo_GetModulesRequest.Merge(dst, src)
+}
+func (m *GetModulesRequest) XXX_Size() int {
+	return xxx_messageInfo_GetModulesRequest.Size(m)
+}
+func (m *GetModulesRequest) XXX_DiscardUnknown() {
+	xxx_messageInfo_GetModulesRequest.DiscardUnknown(m)
+}
+
+var xxx_messageInfo_GetModulesRequest proto.InternalMessageInfo
+
+type GetModulesResponse struct {
+	Module               []string `protobuf:"bytes,1,rep,name=module" json:"module,omitempty"`
+	XXX_NoUnkeyedLiteral struct{} `json:"-"`
+	XXX_unrecognized     []byte   `json:"-"`
+	XXX_sizecache        int32    `json:"-"`
+}
+
+func (m *GetModulesResponse) Reset()         { *m = GetModulesResponse{} }
+func (m *GetModulesResponse) String() string { return proto.CompactTextString(m) }
+func (*GetModulesResponse) ProtoMessage()    {}
+func (*GetModulesResponse) Descriptor() ([]byte, []int) {
+	return fileDescriptor_modules_service_9cd3bffe4e91c59a, []int{2}
+}
+func (m *GetModulesResponse) XXX_Unmarshal(b []byte) error {
+	return xxx_messageInfo_GetModulesResponse.Unmarshal(m, b)
+}
+func (m *GetModulesResponse) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) {
+	return xxx_messageInfo_GetModulesResponse.Marshal(b, m, deterministic)
+}
+func (dst *GetModulesResponse) XXX_Merge(src proto.Message) {
+	xxx_messageInfo_GetModulesResponse.Merge(dst, src)
+}
+func (m *GetModulesResponse) XXX_Size() int {
+	return xxx_messageInfo_GetModulesResponse.Size(m)
+}
+func (m *GetModulesResponse) XXX_DiscardUnknown() {
+	xxx_messageInfo_GetModulesResponse.DiscardUnknown(m)
+}
+
+var xxx_messageInfo_GetModulesResponse proto.InternalMessageInfo
+
+func (m *GetModulesResponse) GetModule() []string {
+	if m != nil {
+		return m.Module
+	}
+	return nil
+}
+
+type GetVersionsRequest struct {
+	Module               *string  `protobuf:"bytes,1,opt,name=module" json:"module,omitempty"`
+	XXX_NoUnkeyedLiteral struct{} `json:"-"`
+	XXX_unrecognized     []byte   `json:"-"`
+	XXX_sizecache        int32    `json:"-"`
+}
+
+func (m *GetVersionsRequest) Reset()         { *m = GetVersionsRequest{} }
+func (m *GetVersionsRequest) String() string { return proto.CompactTextString(m) }
+func (*GetVersionsRequest) ProtoMessage()    {}
+func (*GetVersionsRequest) Descriptor() ([]byte, []int) {
+	return fileDescriptor_modules_service_9cd3bffe4e91c59a, []int{3}
+}
+func (m *GetVersionsRequest) XXX_Unmarshal(b []byte) error {
+	return xxx_messageInfo_GetVersionsRequest.Unmarshal(m, b)
+}
+func (m *GetVersionsRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) {
+	return xxx_messageInfo_GetVersionsRequest.Marshal(b, m, deterministic)
+}
+func (dst *GetVersionsRequest) XXX_Merge(src proto.Message) {
+	xxx_messageInfo_GetVersionsRequest.Merge(dst, src)
+}
+func (m *GetVersionsRequest) XXX_Size() int {
+	return xxx_messageInfo_GetVersionsRequest.Size(m)
+}
+func (m *GetVersionsRequest) XXX_DiscardUnknown() {
+	xxx_messageInfo_GetVersionsRequest.DiscardUnknown(m)
+}
+
+var xxx_messageInfo_GetVersionsRequest proto.InternalMessageInfo
+
+func (m *GetVersionsRequest) GetModule() string {
+	if m != nil && m.Module != nil {
+		return *m.Module
+	}
+	return ""
+}
+
+type GetVersionsResponse struct {
+	Version              []string `protobuf:"bytes,1,rep,name=version" json:"version,omitempty"`
+	XXX_NoUnkeyedLiteral struct{} `json:"-"`
+	XXX_unrecognized     []byte   `json:"-"`
+	XXX_sizecache        int32    `json:"-"`
+}
+
+func (m *GetVersionsResponse) Reset()         { *m = GetVersionsResponse{} }
+func (m *GetVersionsResponse) String() string { return proto.CompactTextString(m) }
+func (*GetVersionsResponse) ProtoMessage()    {}
+func (*GetVersionsResponse) Descriptor() ([]byte, []int) {
+	return fileDescriptor_modules_service_9cd3bffe4e91c59a, []int{4}
+}
+func (m *GetVersionsResponse) XXX_Unmarshal(b []byte) error {
+	return xxx_messageInfo_GetVersionsResponse.Unmarshal(m, b)
+}
+func (m *GetVersionsResponse) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) {
+	return xxx_messageInfo_GetVersionsResponse.Marshal(b, m, deterministic)
+}
+func (dst *GetVersionsResponse) XXX_Merge(src proto.Message) {
+	xxx_messageInfo_GetVersionsResponse.Merge(dst, src)
+}
+func (m *GetVersionsResponse) XXX_Size() int {
+	return xxx_messageInfo_GetVersionsResponse.Size(m)
+}
+func (m *GetVersionsResponse) XXX_DiscardUnknown() {
+	xxx_messageInfo_GetVersionsResponse.DiscardUnknown(m)
+}
+
+var xxx_messageInfo_GetVersionsResponse proto.InternalMessageInfo
+
+func (m *GetVersionsResponse) GetVersion() []string {
+	if m != nil {
+		return m.Version
+	}
+	return nil
+}
+
+type GetDefaultVersionRequest struct {
+	Module               *string  `protobuf:"bytes,1,opt,name=module" json:"module,omitempty"`
+	XXX_NoUnkeyedLiteral struct{} `json:"-"`
+	XXX_unrecognized     []byte   `json:"-"`
+	XXX_sizecache        int32    `json:"-"`
+}
+
+func (m *GetDefaultVersionRequest) Reset()         { *m = GetDefaultVersionRequest{} }
+func (m *GetDefaultVersionRequest) String() string { return proto.CompactTextString(m) }
+func (*GetDefaultVersionRequest) ProtoMessage()    {}
+func (*GetDefaultVersionRequest) Descriptor() ([]byte, []int) {
+	return fileDescriptor_modules_service_9cd3bffe4e91c59a, []int{5}
+}
+func (m *GetDefaultVersionRequest) XXX_Unmarshal(b []byte) error {
+	return xxx_messageInfo_GetDefaultVersionRequest.Unmarshal(m, b)
+}
+func (m *GetDefaultVersionRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) {
+	return xxx_messageInfo_GetDefaultVersionRequest.Marshal(b, m, deterministic)
+}
+func (dst *GetDefaultVersionRequest) XXX_Merge(src proto.Message) {
+	xxx_messageInfo_GetDefaultVersionRequest.Merge(dst, src)
+}
+func (m *GetDefaultVersionRequest) XXX_Size() int {
+	return xxx_messageInfo_GetDefaultVersionRequest.Size(m)
+}
+func (m *GetDefaultVersionRequest) XXX_DiscardUnknown() {
+	xxx_messageInfo_GetDefaultVersionRequest.DiscardUnknown(m)
+}
+
+var xxx_messageInfo_GetDefaultVersionRequest proto.InternalMessageInfo
+
+func (m *GetDefaultVersionRequest) GetModule() string {
+	if m != nil && m.Module != nil {
+		return *m.Module
+	}
+	return ""
+}
+
+type GetDefaultVersionResponse struct {
+	Version              *string  `protobuf:"bytes,1,req,name=version" json:"version,omitempty"`
+	XXX_NoUnkeyedLiteral struct{} `json:"-"`
+	XXX_unrecognized     []byte   `json:"-"`
+	XXX_sizecache        int32    `json:"-"`
+}
+
+func (m *GetDefaultVersionResponse) Reset()         { *m = GetDefaultVersionResponse{} }
+func (m *GetDefaultVersionResponse) String() string { return proto.CompactTextString(m) }
+func (*GetDefaultVersionResponse) ProtoMessage()    {}
+func (*GetDefaultVersionResponse) Descriptor() ([]byte, []int) {
+	return fileDescriptor_modules_service_9cd3bffe4e91c59a, []int{6}
+}
+func (m *GetDefaultVersionResponse) XXX_Unmarshal(b []byte) error {
+	return xxx_messageInfo_GetDefaultVersionResponse.Unmarshal(m, b)
+}
+func (m *GetDefaultVersionResponse) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) {
+	return xxx_messageInfo_GetDefaultVersionResponse.Marshal(b, m, deterministic)
+}
+func (dst *GetDefaultVersionResponse) XXX_Merge(src proto.Message) {
+	xxx_messageInfo_GetDefaultVersionResponse.Merge(dst, src)
+}
+func (m *GetDefaultVersionResponse) XXX_Size() int {
+	return xxx_messageInfo_GetDefaultVersionResponse.Size(m)
+}
+func (m *GetDefaultVersionResponse) XXX_DiscardUnknown() {
+	xxx_messageInfo_GetDefaultVersionResponse.DiscardUnknown(m)
+}
+
+var xxx_messageInfo_GetDefaultVersionResponse proto.InternalMessageInfo
+
+func (m *GetDefaultVersionResponse) GetVersion() string {
+	if m != nil && m.Version != nil {
+		return *m.Version
+	}
+	return ""
+}
+
+type GetNumInstancesRequest struct {
+	Module               *string  `protobuf:"bytes,1,opt,name=module" json:"module,omitempty"`
+	Version              *string  `protobuf:"bytes,2,opt,name=version" json:"version,omitempty"`
+	XXX_NoUnkeyedLiteral struct{} `json:"-"`
+	XXX_unrecognized     []byte   `json:"-"`
+	XXX_sizecache        int32    `json:"-"`
+}
+
+func (m *GetNumInstancesRequest) Reset()         { *m = GetNumInstancesRequest{} }
+func (m *GetNumInstancesRequest) String() string { return proto.CompactTextString(m) }
+func (*GetNumInstancesRequest) ProtoMessage()    {}
+func (*GetNumInstancesRequest) Descriptor() ([]byte, []int) {
+	return fileDescriptor_modules_service_9cd3bffe4e91c59a, []int{7}
+}
+func (m *GetNumInstancesRequest) XXX_Unmarshal(b []byte) error {
+	return xxx_messageInfo_GetNumInstancesRequest.Unmarshal(m, b)
+}
+func (m *GetNumInstancesRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) {
+	return xxx_messageInfo_GetNumInstancesRequest.Marshal(b, m, deterministic)
+}
+func (dst *GetNumInstancesRequest) XXX_Merge(src proto.Message) {
+	xxx_messageInfo_GetNumInstancesRequest.Merge(dst, src)
+}
+func (m *GetNumInstancesRequest) XXX_Size() int {
+	return xxx_messageInfo_GetNumInstancesRequest.Size(m)
+}
+func (m *GetNumInstancesRequest) XXX_DiscardUnknown() {
+	xxx_messageInfo_GetNumInstancesRequest.DiscardUnknown(m)
+}
+
+var xxx_messageInfo_GetNumInstancesRequest proto.InternalMessageInfo
+
+func (m *GetNumInstancesRequest) GetModule() string {
+	if m != nil && m.Module != nil {
+		return *m.Module
+	}
+	return ""
+}
+
+func (m *GetNumInstancesRequest) GetVersion() string {
+	if m != nil && m.Version != nil {
+		return *m.Version
+	}
+	return ""
+}
+
+type GetNumInstancesResponse struct {
+	Instances            *int64   `protobuf:"varint,1,req,name=instances" json:"instances,omitempty"`
+	XXX_NoUnkeyedLiteral struct{} `json:"-"`
+	XXX_unrecognized     []byte   `json:"-"`
+	XXX_sizecache        int32    `json:"-"`
+}
+
+func (m *GetNumInstancesResponse) Reset()         { *m = GetNumInstancesResponse{} }
+func (m *GetNumInstancesResponse) String() string { return proto.CompactTextString(m) }
+func (*GetNumInstancesResponse) ProtoMessage()    {}
+func (*GetNumInstancesResponse) Descriptor() ([]byte, []int) {
+	return fileDescriptor_modules_service_9cd3bffe4e91c59a, []int{8}
+}
+func (m *GetNumInstancesResponse) XXX_Unmarshal(b []byte) error {
+	return xxx_messageInfo_GetNumInstancesResponse.Unmarshal(m, b)
+}
+func (m *GetNumInstancesResponse) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) {
+	return xxx_messageInfo_GetNumInstancesResponse.Marshal(b, m, deterministic)
+}
+func (dst *GetNumInstancesResponse) XXX_Merge(src proto.Message) {
+	xxx_messageInfo_GetNumInstancesResponse.Merge(dst, src)
+}
+func (m *GetNumInstancesResponse) XXX_Size() int {
+	return xxx_messageInfo_GetNumInstancesResponse.Size(m)
+}
+func (m *GetNumInstancesResponse) XXX_DiscardUnknown() {
+	xxx_messageInfo_GetNumInstancesResponse.DiscardUnknown(m)
+}
+
+var xxx_messageInfo_GetNumInstancesResponse proto.InternalMessageInfo
+
+func (m *GetNumInstancesResponse) GetInstances() int64 {
+	if m != nil && m.Instances != nil {
+		return *m.Instances
+	}
+	return 0
+}
+
+type SetNumInstancesRequest struct {
+	Module               *string  `protobuf:"bytes,1,opt,name=module" json:"module,omitempty"`
+	Version              *string  `protobuf:"bytes,2,opt,name=version" json:"version,omitempty"`
+	Instances            *int64   `protobuf:"varint,3,req,name=instances" json:"instances,omitempty"`
+	XXX_NoUnkeyedLiteral struct{} `json:"-"`
+	XXX_unrecognized     []byte   `json:"-"`
+	XXX_sizecache        int32    `json:"-"`
+}
+
+func (m *SetNumInstancesRequest) Reset()         { *m = SetNumInstancesRequest{} }
+func (m *SetNumInstancesRequest) String() string { return proto.CompactTextString(m) }
+func (*SetNumInstancesRequest) ProtoMessage()    {}
+func (*SetNumInstancesRequest) Descriptor() ([]byte, []int) {
+	return fileDescriptor_modules_service_9cd3bffe4e91c59a, []int{9}
+}
+func (m *SetNumInstancesRequest) XXX_Unmarshal(b []byte) error {
+	return xxx_messageInfo_SetNumInstancesRequest.Unmarshal(m, b)
+}
+func (m *SetNumInstancesRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) {
+	return xxx_messageInfo_SetNumInstancesRequest.Marshal(b, m, deterministic)
+}
+func (dst *SetNumInstancesRequest) XXX_Merge(src proto.Message) {
+	xxx_messageInfo_SetNumInstancesRequest.Merge(dst, src)
+}
+func (m *SetNumInstancesRequest) XXX_Size() int {
+	return xxx_messageInfo_SetNumInstancesRequest.Size(m)
+}
+func (m *SetNumInstancesRequest) XXX_DiscardUnknown() {
+	xxx_messageInfo_SetNumInstancesRequest.DiscardUnknown(m)
+}
+
+var xxx_messageInfo_SetNumInstancesRequest proto.InternalMessageInfo
+
+func (m *SetNumInstancesRequest) GetModule() string {
+	if m != nil && m.Module != nil {
+		return *m.Module
+	}
+	return ""
+}
+
+func (m *SetNumInstancesRequest) GetVersion() string {
+	if m != nil && m.Version != nil {
+		return *m.Version
+	}
+	return ""
+}
+
+func (m *SetNumInstancesRequest) GetInstances() int64 {
+	if m != nil && m.Instances != nil {
+		return *m.Instances
+	}
+	return 0
+}
+
+type SetNumInstancesResponse struct {
+	XXX_NoUnkeyedLiteral struct{} `json:"-"`
+	XXX_unrecognized     []byte   `json:"-"`
+	XXX_sizecache        int32    `json:"-"`
+}
+
+func (m *SetNumInstancesResponse) Reset()         { *m = SetNumInstancesResponse{} }
+func (m *SetNumInstancesResponse) String() string { return proto.CompactTextString(m) }
+func (*SetNumInstancesResponse) ProtoMessage()    {}
+func (*SetNumInstancesResponse) Descriptor() ([]byte, []int) {
+	return fileDescriptor_modules_service_9cd3bffe4e91c59a, []int{10}
+}
+func (m *SetNumInstancesResponse) XXX_Unmarshal(b []byte) error {
+	return xxx_messageInfo_SetNumInstancesResponse.Unmarshal(m, b)
+}
+func (m *SetNumInstancesResponse) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) {
+	return xxx_messageInfo_SetNumInstancesResponse.Marshal(b, m, deterministic)
+}
+func (dst *SetNumInstancesResponse) XXX_Merge(src proto.Message) {
+	xxx_messageInfo_SetNumInstancesResponse.Merge(dst, src)
+}
+func (m *SetNumInstancesResponse) XXX_Size() int {
+	return xxx_messageInfo_SetNumInstancesResponse.Size(m)
+}
+func (m *SetNumInstancesResponse) XXX_DiscardUnknown() {
+	xxx_messageInfo_SetNumInstancesResponse.DiscardUnknown(m)
+}
+
+var xxx_messageInfo_SetNumInstancesResponse proto.InternalMessageInfo
+
+type StartModuleRequest struct {
+	Module               *string  `protobuf:"bytes,1,req,name=module" json:"module,omitempty"`
+	Version              *string  `protobuf:"bytes,2,req,name=version" json:"version,omitempty"`
+	XXX_NoUnkeyedLiteral struct{} `json:"-"`
+	XXX_unrecognized     []byte   `json:"-"`
+	XXX_sizecache        int32    `json:"-"`
+}
+
+func (m *StartModuleRequest) Reset()         { *m = StartModuleRequest{} }
+func (m *StartModuleRequest) String() string { return proto.CompactTextString(m) }
+func (*StartModuleRequest) ProtoMessage()    {}
+func (*StartModuleRequest) Descriptor() ([]byte, []int) {
+	return fileDescriptor_modules_service_9cd3bffe4e91c59a, []int{11}
+}
+func (m *StartModuleRequest) XXX_Unmarshal(b []byte) error {
+	return xxx_messageInfo_StartModuleRequest.Unmarshal(m, b)
+}
+func (m *StartModuleRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) {
+	return xxx_messageInfo_StartModuleRequest.Marshal(b, m, deterministic)
+}
+func (dst *StartModuleRequest) XXX_Merge(src proto.Message) {
+	xxx_messageInfo_StartModuleRequest.Merge(dst, src)
+}
+func (m *StartModuleRequest) XXX_Size() int {
+	return xxx_messageInfo_StartModuleRequest.Size(m)
+}
+func (m *StartModuleRequest) XXX_DiscardUnknown() {
+	xxx_messageInfo_StartModuleRequest.DiscardUnknown(m)
+}
+
+var xxx_messageInfo_StartModuleRequest proto.InternalMessageInfo
+
+func (m *StartModuleRequest) GetModule() string {
+	if m != nil && m.Module != nil {
+		return *m.Module
+	}
+	return ""
+}
+
+func (m *StartModuleRequest) GetVersion() string {
+	if m != nil && m.Version != nil {
+		return *m.Version
+	}
+	return ""
+}
+
+type StartModuleResponse struct {
+	XXX_NoUnkeyedLiteral struct{} `json:"-"`
+	XXX_unrecognized     []byte   `json:"-"`
+	XXX_sizecache        int32    `json:"-"`
+}
+
+func (m *StartModuleResponse) Reset()         { *m = StartModuleResponse{} }
+func (m *StartModuleResponse) String() string { return proto.CompactTextString(m) }
+func (*StartModuleResponse) ProtoMessage()    {}
+func (*StartModuleResponse) Descriptor() ([]byte, []int) {
+	return fileDescriptor_modules_service_9cd3bffe4e91c59a, []int{12}
+}
+func (m *StartModuleResponse) XXX_Unmarshal(b []byte) error {
+	return xxx_messageInfo_StartModuleResponse.Unmarshal(m, b)
+}
+func (m *StartModuleResponse) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) {
+	return xxx_messageInfo_StartModuleResponse.Marshal(b, m, deterministic)
+}
+func (dst *StartModuleResponse) XXX_Merge(src proto.Message) {
+	xxx_messageInfo_StartModuleResponse.Merge(dst, src)
+}
+func (m *StartModuleResponse) XXX_Size() int {
+	return xxx_messageInfo_StartModuleResponse.Size(m)
+}
+func (m *StartModuleResponse) XXX_DiscardUnknown() {
+	xxx_messageInfo_StartModuleResponse.DiscardUnknown(m)
+}
+
+var xxx_messageInfo_StartModuleResponse proto.InternalMessageInfo
+
+type StopModuleRequest struct {
+	Module               *string  `protobuf:"bytes,1,opt,name=module" json:"module,omitempty"`
+	Version              *string  `protobuf:"bytes,2,opt,name=version" json:"version,omitempty"`
+	XXX_NoUnkeyedLiteral struct{} `json:"-"`
+	XXX_unrecognized     []byte   `json:"-"`
+	XXX_sizecache        int32    `json:"-"`
+}
+
+func (m *StopModuleRequest) Reset()         { *m = StopModuleRequest{} }
+func (m *StopModuleRequest) String() string { return proto.CompactTextString(m) }
+func (*StopModuleRequest) ProtoMessage()    {}
+func (*StopModuleRequest) Descriptor() ([]byte, []int) {
+	return fileDescriptor_modules_service_9cd3bffe4e91c59a, []int{13}
+}
+func (m *StopModuleRequest) XXX_Unmarshal(b []byte) error {
+	return xxx_messageInfo_StopModuleRequest.Unmarshal(m, b)
+}
+func (m *StopModuleRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) {
+	return xxx_messageInfo_StopModuleRequest.Marshal(b, m, deterministic)
+}
+func (dst *StopModuleRequest) XXX_Merge(src proto.Message) {
+	xxx_messageInfo_StopModuleRequest.Merge(dst, src)
+}
+func (m *StopModuleRequest) XXX_Size() int {
+	return xxx_messageInfo_StopModuleRequest.Size(m)
+}
+func (m *StopModuleRequest) XXX_DiscardUnknown() {
+	xxx_messageInfo_StopModuleRequest.DiscardUnknown(m)
+}
+
+var xxx_messageInfo_StopModuleRequest proto.InternalMessageInfo
+
+func (m *StopModuleRequest) GetModule() string {
+	if m != nil && m.Module != nil {
+		return *m.Module
+	}
+	return ""
+}
+
+func (m *StopModuleRequest) GetVersion() string {
+	if m != nil && m.Version != nil {
+		return *m.Version
+	}
+	return ""
+}
+
+type StopModuleResponse struct {
+	XXX_NoUnkeyedLiteral struct{} `json:"-"`
+	XXX_unrecognized     []byte   `json:"-"`
+	XXX_sizecache        int32    `json:"-"`
+}
+
+func (m *StopModuleResponse) Reset()         { *m = StopModuleResponse{} }
+func (m *StopModuleResponse) String() string { return proto.CompactTextString(m) }
+func (*StopModuleResponse) ProtoMessage()    {}
+func (*StopModuleResponse) Descriptor() ([]byte, []int) {
+	return fileDescriptor_modules_service_9cd3bffe4e91c59a, []int{14}
+}
+func (m *StopModuleResponse) XXX_Unmarshal(b []byte) error {
+	return xxx_messageInfo_StopModuleResponse.Unmarshal(m, b)
+}
+func (m *StopModuleResponse) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) {
+	return xxx_messageInfo_StopModuleResponse.Marshal(b, m, deterministic)
+}
+func (dst *StopModuleResponse) XXX_Merge(src proto.Message) {
+	xxx_messageInfo_StopModuleResponse.Merge(dst, src)
+}
+func (m *StopModuleResponse) XXX_Size() int {
+	return xxx_messageInfo_StopModuleResponse.Size(m)
+}
+func (m *StopModuleResponse) XXX_DiscardUnknown() {
+	xxx_messageInfo_StopModuleResponse.DiscardUnknown(m)
+}
+
+var xxx_messageInfo_StopModuleResponse proto.InternalMessageInfo
+
+type GetHostnameRequest struct {
+	Module               *string  `protobuf:"bytes,1,opt,name=module" json:"module,omitempty"`
+	Version              *string  `protobuf:"bytes,2,opt,name=version" json:"version,omitempty"`
+	Instance             *string  `protobuf:"bytes,3,opt,name=instance" json:"instance,omitempty"`
+	XXX_NoUnkeyedLiteral struct{} `json:"-"`
+	XXX_unrecognized     []byte   `json:"-"`
+	XXX_sizecache        int32    `json:"-"`
+}
+
+func (m *GetHostnameRequest) Reset()         { *m = GetHostnameRequest{} }
+func (m *GetHostnameRequest) String() string { return proto.CompactTextString(m) }
+func (*GetHostnameRequest) ProtoMessage()    {}
+func (*GetHostnameRequest) Descriptor() ([]byte, []int) {
+	return fileDescriptor_modules_service_9cd3bffe4e91c59a, []int{15}
+}
+func (m *GetHostnameRequest) XXX_Unmarshal(b []byte) error {
+	return xxx_messageInfo_GetHostnameRequest.Unmarshal(m, b)
+}
+func (m *GetHostnameRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) {
+	return xxx_messageInfo_GetHostnameRequest.Marshal(b, m, deterministic)
+}
+func (dst *GetHostnameRequest) XXX_Merge(src proto.Message) {
+	xxx_messageInfo_GetHostnameRequest.Merge(dst, src)
+}
+func (m *GetHostnameRequest) XXX_Size() int {
+	return xxx_messageInfo_GetHostnameRequest.Size(m)
+}
+func (m *GetHostnameRequest) XXX_DiscardUnknown() {
+	xxx_messageInfo_GetHostnameRequest.DiscardUnknown(m)
+}
+
+var xxx_messageInfo_GetHostnameRequest proto.InternalMessageInfo
+
+func (m *GetHostnameRequest) GetModule() string {
+	if m != nil && m.Module != nil {
+		return *m.Module
+	}
+	return ""
+}
+
+func (m *GetHostnameRequest) GetVersion() string {
+	if m != nil && m.Version != nil {
+		return *m.Version
+	}
+	return ""
+}
+
+func (m *GetHostnameRequest) GetInstance() string {
+	if m != nil && m.Instance != nil {
+		return *m.Instance
+	}
+	return ""
+}
+
+type GetHostnameResponse struct {
+	Hostname             *string  `protobuf:"bytes,1,req,name=hostname" json:"hostname,omitempty"`
+	XXX_NoUnkeyedLiteral struct{} `json:"-"`
+	XXX_unrecognized     []byte   `json:"-"`
+	XXX_sizecache        int32    `json:"-"`
+}
+
+func (m *GetHostnameResponse) Reset()         { *m = GetHostnameResponse{} }
+func (m *GetHostnameResponse) String() string { return proto.CompactTextString(m) }
+func (*GetHostnameResponse) ProtoMessage()    {}
+func (*GetHostnameResponse) Descriptor() ([]byte, []int) {
+	return fileDescriptor_modules_service_9cd3bffe4e91c59a, []int{16}
+}
+func (m *GetHostnameResponse) XXX_Unmarshal(b []byte) error {
+	return xxx_messageInfo_GetHostnameResponse.Unmarshal(m, b)
+}
+func (m *GetHostnameResponse) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) {
+	return xxx_messageInfo_GetHostnameResponse.Marshal(b, m, deterministic)
+}
+func (dst *GetHostnameResponse) XXX_Merge(src proto.Message) {
+	xxx_messageInfo_GetHostnameResponse.Merge(dst, src)
+}
+func (m *GetHostnameResponse) XXX_Size() int {
+	return xxx_messageInfo_GetHostnameResponse.Size(m)
+}
+func (m *GetHostnameResponse) XXX_DiscardUnknown() {
+	xxx_messageInfo_GetHostnameResponse.DiscardUnknown(m)
+}
+
+var xxx_messageInfo_GetHostnameResponse proto.InternalMessageInfo
+
+func (m *GetHostnameResponse) GetHostname() string {
+	if m != nil && m.Hostname != nil {
+		return *m.Hostname
+	}
+	return ""
+}
+
+func init() {
+	proto.RegisterType((*ModulesServiceError)(nil), "appengine.ModulesServiceError")
+	proto.RegisterType((*GetModulesRequest)(nil), "appengine.GetModulesRequest")
+	proto.RegisterType((*GetModulesResponse)(nil), "appengine.GetModulesResponse")
+	proto.RegisterType((*GetVersionsRequest)(nil), "appengine.GetVersionsRequest")
+	proto.RegisterType((*GetVersionsResponse)(nil), "appengine.GetVersionsResponse")
+	proto.RegisterType((*GetDefaultVersionRequest)(nil), "appengine.GetDefaultVersionRequest")
+	proto.RegisterType((*GetDefaultVersionResponse)(nil), "appengine.GetDefaultVersionResponse")
+	proto.RegisterType((*GetNumInstancesRequest)(nil), "appengine.GetNumInstancesRequest")
+	proto.RegisterType((*GetNumInstancesResponse)(nil), "appengine.GetNumInstancesResponse")
+	proto.RegisterType((*SetNumInstancesRequest)(nil), "appengine.SetNumInstancesRequest")
+	proto.RegisterType((*SetNumInstancesResponse)(nil), "appengine.SetNumInstancesResponse")
+	proto.RegisterType((*StartModuleRequest)(nil), "appengine.StartModuleRequest")
+	proto.RegisterType((*StartModuleResponse)(nil), "appengine.StartModuleResponse")
+	proto.RegisterType((*StopModuleRequest)(nil), "appengine.StopModuleRequest")
+	proto.RegisterType((*StopModuleResponse)(nil), "appengine.StopModuleResponse")
+	proto.RegisterType((*GetHostnameRequest)(nil), "appengine.GetHostnameRequest")
+	proto.RegisterType((*GetHostnameResponse)(nil), "appengine.GetHostnameResponse")
+}
+
+func init() {
+	proto.RegisterFile("google.golang.org/appengine/v2/internal/modules/modules_service.proto", fileDescriptor_modules_service_9cd3bffe4e91c59a)
+}
+
+var fileDescriptor_modules_service_9cd3bffe4e91c59a = []byte{
+	// 457 bytes of a gzipped FileDescriptorProto
+	0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0xac, 0x94, 0xc1, 0x6f, 0xd3, 0x30,
+	0x14, 0xc6, 0x69, 0x02, 0xdb, 0xf2, 0x0e, 0x90, 0x3a, 0x5b, 0xd7, 0x4d, 0x1c, 0x50, 0x4e, 0x1c,
+	0x50, 0x2b, 0x90, 0x10, 0xe7, 0xae, 0x35, 0x25, 0xb0, 0xa5, 0x28, 0xce, 0x2a, 0xc4, 0xa5, 0x0a,
+	0xdb, 0x23, 0x8b, 0x94, 0xda, 0xc1, 0x76, 0x77, 0xe4, 0xbf, 0xe0, 0xff, 0x45, 0x4b, 0xed, 0xb6,
+	0x81, 0x4e, 0x45, 0x68, 0xa7, 0xe4, 0x7d, 0xfe, 0xfc, 0x7b, 0x9f, 0x5f, 0xac, 0xc0, 0x59, 0x2e,
+	0x44, 0x5e, 0x62, 0x2f, 0x17, 0x65, 0xc6, 0xf3, 0x9e, 0x90, 0x79, 0x3f, 0xab, 0x2a, 0xe4, 0x79,
+	0xc1, 0xb1, 0x5f, 0x70, 0x8d, 0x92, 0x67, 0x65, 0x7f, 0x2e, 0xae, 0x17, 0x25, 0x2a, 0xfb, 0x9c,
+	0x29, 0x94, 0xb7, 0xc5, 0x15, 0xf6, 0x2a, 0x29, 0xb4, 0x20, 0xde, 0x6a, 0x47, 0xf8, 0xab, 0x05,
+	0xc1, 0xc5, 0xd2, 0xc4, 0x96, 0x1e, 0x2a, 0xa5, 0x90, 0xe1, 0x4f, 0xf0, 0xea, 0x97, 0xa1, 0xb8,
+	0x46, 0xb2, 0x07, 0xce, 0xe4, 0x93, 0xff, 0x88, 0x10, 0x78, 0x1a, 0xc5, 0xd3, 0xc1, 0x79, 0x34,
+	0x9a, 0x5d, 0x4c, 0x46, 0x97, 0xe7, 0xd4, 0x6f, 0x91, 0x00, 0x9e, 0x59, 0x6d, 0x4a, 0x13, 0x16,
+	0x4d, 0x62, 0xdf, 0x21, 0x47, 0xd0, 0xb6, 0x62, 0x14, 0xb3, 0x74, 0x10, 0x0f, 0x29, 0xf3, 0xdd,
+	0x3b, 0x6f, 0x9a, 0x0c, 0x62, 0x16, 0xd1, 0x38, 0x9d, 0xd1, 0x24, 0x99, 0x24, 0xfe, 0x63, 0x72,
+	0x08, 0xfe, 0x65, 0x4c, 0xbf, 0x7c, 0xa6, 0xc3, 0x94, 0x8e, 0x66, 0x2c, 0x1d, 0xa4, 0xd4, 0x7f,
+	0x12, 0x06, 0xd0, 0x1e, 0xa3, 0x36, 0xc9, 0x12, 0xfc, 0xb1, 0x40, 0xa5, 0xc3, 0x57, 0x40, 0x36,
+	0x45, 0x55, 0x09, 0xae, 0x90, 0x74, 0x60, 0x6f, 0x79, 0xcc, 0x6e, 0xeb, 0x85, 0xfb, 0xd2, 0x4b,
+	0x4c, 0x65, 0xdc, 0x53, 0x94, 0xaa, 0x10, 0xdc, 0x32, 0x1a, 0xee, 0xd6, 0x86, 0xbb, 0x0f, 0x41,
+	0xc3, 0x6d, 0xe0, 0x5d, 0xd8, 0xbf, 0x5d, 0x6a, 0x86, 0x6e, 0xcb, 0xf0, 0x0d, 0x74, 0xc7, 0xa8,
+	0x47, 0xf8, 0x3d, 0x5b, 0x94, 0x76, 0xdf, 0xae, 0x26, 0x6f, 0xe1, 0x64, 0xcb, 0x9e, 0x6d, 0xad,
+	0x9c, 0xcd, 0x56, 0x1f, 0xa1, 0x33, 0x46, 0x1d, 0x2f, 0xe6, 0x11, 0x57, 0x3a, 0xe3, 0x57, 0xb8,
+	0xeb, 0x34, 0x9b, 0x2c, 0xa7, 0x5e, 0x58, 0xb1, 0xde, 0xc1, 0xf1, 0x5f, 0x2c, 0x13, 0xe0, 0x39,
+	0x78, 0x85, 0x15, 0xeb, 0x08, 0x6e, 0xb2, 0x16, 0xc2, 0x1b, 0xe8, 0xb0, 0x07, 0x0a, 0xd1, 0xec,
+	0xe4, 0xfe, 0xd9, 0xe9, 0x04, 0x8e, 0xd9, 0xf6, 0x88, 0xe1, 0x7b, 0x20, 0x4c, 0x67, 0xd2, 0xdc,
+	0x81, 0x6d, 0x01, 0x9c, 0xfb, 0x02, 0x34, 0x26, 0x7a, 0x04, 0x41, 0x83, 0x63, 0xf0, 0x14, 0xda,
+	0x4c, 0x8b, 0xea, 0x7e, 0xfa, 0xbf, 0xcd, 0xf8, 0xf0, 0x2e, 0xe5, 0x1a, 0x63, 0xe0, 0xdf, 0xea,
+	0xfb, 0xf8, 0x41, 0x28, 0xcd, 0xb3, 0xf9, 0xff, 0xd3, 0xc9, 0x29, 0x1c, 0xd8, 0x59, 0x75, 0xdd,
+	0x7a, 0x69, 0x55, 0x87, 0xaf, 0xeb, 0x5b, 0xbc, 0xee, 0x61, 0xbe, 0xec, 0x29, 0x1c, 0xdc, 0x18,
+	0xcd, 0x8c, 0x68, 0x55, 0x9f, 0x79, 0x5f, 0xf7, 0xcd, 0x5f, 0xe2, 0x77, 0x00, 0x00, 0x00, 0xff,
+	0xff, 0x6e, 0xbc, 0xe0, 0x61, 0x5c, 0x04, 0x00, 0x00,
+}
diff --git a/v2/internal/modules/modules_service.proto b/v2/internal/modules/modules_service.proto
new file mode 100644
index 0000000..d29f006
--- /dev/null
+++ b/v2/internal/modules/modules_service.proto
@@ -0,0 +1,80 @@
+syntax = "proto2";
+option go_package = "modules";
+
+package appengine;
+
+message ModulesServiceError {
+  enum ErrorCode {
+    OK  = 0;
+    INVALID_MODULE = 1;
+    INVALID_VERSION = 2;
+    INVALID_INSTANCES = 3;
+    TRANSIENT_ERROR = 4;
+    UNEXPECTED_STATE = 5;
+  }
+}
+
+message GetModulesRequest {
+}
+
+message GetModulesResponse {
+  repeated string module = 1;
+}
+
+message GetVersionsRequest {
+  optional string module = 1;
+}
+
+message GetVersionsResponse {
+  repeated string version = 1;
+}
+
+message GetDefaultVersionRequest {
+  optional string module = 1;
+}
+
+message GetDefaultVersionResponse {
+  required string version = 1;
+}
+
+message GetNumInstancesRequest {
+  optional string module = 1;
+  optional string version = 2;
+}
+
+message GetNumInstancesResponse {
+  required int64 instances = 1;
+}
+
+message SetNumInstancesRequest {
+  optional string module = 1;
+  optional string version = 2;
+  required int64 instances = 3;
+}
+
+message SetNumInstancesResponse {}
+
+message StartModuleRequest {
+  required string module = 1;
+  required string version = 2;
+}
+
+message StartModuleResponse {}
+
+message StopModuleRequest {
+  optional string module = 1;
+  optional string version = 2;
+}
+
+message StopModuleResponse {}
+
+message GetHostnameRequest {
+  optional string module = 1;
+  optional string version = 2;
+  optional string instance = 3;
+}
+
+message GetHostnameResponse {
+  required string hostname = 1;
+}
+
diff --git a/v2/internal/net.go b/v2/internal/net.go
new file mode 100644
index 0000000..fe42972
--- /dev/null
+++ b/v2/internal/net.go
@@ -0,0 +1,56 @@
+// Copyright 2014 Google Inc. All rights reserved.
+// Use of this source code is governed by the Apache 2.0
+// license that can be found in the LICENSE file.
+
+package internal
+
+// This file implements a network dialer that limits the number of concurrent connections.
+// It is only used for API calls.
+
+import (
+	"log"
+	"net"
+	"runtime"
+	"sync"
+	"time"
+)
+
+var limitSem = make(chan int, 100) // TODO(dsymonds): Use environment variable.
+
+func limitRelease() {
+	// non-blocking
+	select {
+	case <-limitSem:
+	default:
+		// This should not normally happen.
+		log.Print("appengine: unbalanced limitSem release!")
+	}
+}
+
+func limitDial(network, addr string) (net.Conn, error) {
+	limitSem <- 1
+
+	// Dial with a timeout in case the API host is MIA.
+	// The connection should normally be very fast.
+	conn, err := net.DialTimeout(network, addr, 10*time.Second)
+	if err != nil {
+		limitRelease()
+		return nil, err
+	}
+	lc := &limitConn{Conn: conn}
+	runtime.SetFinalizer(lc, (*limitConn).Close) // shouldn't usually be required
+	return lc, nil
+}
+
+type limitConn struct {
+	close sync.Once
+	net.Conn
+}
+
+func (lc *limitConn) Close() error {
+	defer lc.close.Do(func() {
+		limitRelease()
+		runtime.SetFinalizer(lc, nil)
+	})
+	return lc.Conn.Close()
+}
diff --git a/v2/internal/net_test.go b/v2/internal/net_test.go
new file mode 100644
index 0000000..f2d66a5
--- /dev/null
+++ b/v2/internal/net_test.go
@@ -0,0 +1,55 @@
+// Copyright 2014 Google Inc. All rights reserved.
+// Use of this source code is governed by the Apache 2.0
+// license that can be found in the LICENSE file.
+
+package internal
+
+import (
+	netcontext "context"
+	"sync"
+	"testing"
+	"time"
+
+	basepb "google.golang.org/appengine/v2/internal/base"
+)
+
+func TestDialLimit(t *testing.T) {
+	// Fill up semaphore with false acquisitions to permit only two TCP connections at a time.
+	// We don't replace limitSem because that results in a data race when net/http lazily closes connections.
+	nFake := cap(limitSem) - 2
+	for i := 0; i < nFake; i++ {
+		limitSem <- 1
+	}
+	defer func() {
+		for i := 0; i < nFake; i++ {
+			<-limitSem
+		}
+	}()
+
+	f, c, cleanup := setup() // setup is in api_test.go
+	defer cleanup()
+	f.hang = make(chan int)
+
+	// If we make two RunSlowly RPCs (which will wait for f.hang to be strobed),
+	// then the simple Non200 RPC should hang.
+	var wg sync.WaitGroup
+	wg.Add(2)
+	for i := 0; i < 2; i++ {
+		go func() {
+			defer wg.Done()
+			Call(toContext(c), "errors", "RunSlowly", &basepb.VoidProto{}, &basepb.VoidProto{})
+		}()
+	}
+	time.Sleep(50 * time.Millisecond) // let those two RPCs start
+
+	ctx, _ := netcontext.WithTimeout(toContext(c), 50*time.Millisecond)
+	err := Call(ctx, "errors", "Non200", &basepb.VoidProto{}, &basepb.VoidProto{})
+	if err != errTimeout {
+		t.Errorf("Non200 RPC returned with err %v, want errTimeout", err)
+	}
+
+	// Drain the two RunSlowly calls.
+	f.hang <- 1
+	f.hang <- 1
+	wg.Wait()
+}
diff --git a/v2/internal/regen.sh b/v2/internal/regen.sh
new file mode 100755
index 0000000..2fdb546
--- /dev/null
+++ b/v2/internal/regen.sh
@@ -0,0 +1,40 @@
+#!/bin/bash -e
+#
+# This script rebuilds the generated code for the protocol buffers.
+# To run this you will need protoc and goprotobuf installed;
+# see https://github.com/golang/protobuf for instructions.
+
+PKG=google.golang.org/appengine
+
+function die() {
+	echo 1>&2 $*
+	exit 1
+}
+
+# Sanity check that the right tools are accessible.
+for tool in go protoc protoc-gen-go; do
+	q=$(which $tool) || die "didn't find $tool"
+	echo 1>&2 "$tool: $q"
+done
+
+echo -n 1>&2 "finding package dir... "
+pkgdir=$(go list -f '{{.Dir}}' $PKG)
+echo 1>&2 $pkgdir
+base=$(echo $pkgdir | sed "s,/$PKG\$,,")
+echo 1>&2 "base: $base"
+cd $base
+
+# Run protoc once per package.
+for dir in $(find $PKG/internal -name '*.proto' | xargs dirname | sort | uniq); do
+	echo 1>&2 "* $dir"
+	protoc --go_out=. $dir/*.proto
+done
+
+for f in $(find $PKG/internal -name '*.pb.go'); do
+  # Remove proto.RegisterEnum calls.
+  # These cause duplicate registration panics when these packages
+  # are used on classic App Engine. proto.RegisterEnum only affects
+  # parsing the text format; we don't care about that.
+  # https://code.google.com/p/googleappengine/issues/detail?id=11670#c17
+  sed -i '/proto.RegisterEnum/d' $f
+done
diff --git a/v2/internal/remote_api/remote_api.pb.go b/v2/internal/remote_api/remote_api.pb.go
new file mode 100644
index 0000000..1e4094a
--- /dev/null
+++ b/v2/internal/remote_api/remote_api.pb.go
@@ -0,0 +1,361 @@
+// Code generated by protoc-gen-go. DO NOT EDIT.
+// source: google.golang.org/appengine/internal/remote_api/remote_api.proto
+
+package remote_api
+
+import proto "github.com/golang/protobuf/proto"
+import fmt "fmt"
+import math "math"
+
+// Reference imports to suppress errors if they are not otherwise used.
+var _ = proto.Marshal
+var _ = fmt.Errorf
+var _ = math.Inf
+
+// This is a compile-time assertion to ensure that this generated file
+// is compatible with the proto package it is being compiled against.
+// A compilation error at this line likely means your copy of the
+// proto package needs to be updated.
+const _ = proto.ProtoPackageIsVersion2 // please upgrade the proto package
+
+type RpcError_ErrorCode int32
+
+const (
+	RpcError_UNKNOWN             RpcError_ErrorCode = 0
+	RpcError_CALL_NOT_FOUND      RpcError_ErrorCode = 1
+	RpcError_PARSE_ERROR         RpcError_ErrorCode = 2
+	RpcError_SECURITY_VIOLATION  RpcError_ErrorCode = 3
+	RpcError_OVER_QUOTA          RpcError_ErrorCode = 4
+	RpcError_REQUEST_TOO_LARGE   RpcError_ErrorCode = 5
+	RpcError_CAPABILITY_DISABLED RpcError_ErrorCode = 6
+	RpcError_FEATURE_DISABLED    RpcError_ErrorCode = 7
+	RpcError_BAD_REQUEST         RpcError_ErrorCode = 8
+	RpcError_RESPONSE_TOO_LARGE  RpcError_ErrorCode = 9
+	RpcError_CANCELLED           RpcError_ErrorCode = 10
+	RpcError_REPLAY_ERROR        RpcError_ErrorCode = 11
+	RpcError_DEADLINE_EXCEEDED   RpcError_ErrorCode = 12
+)
+
+var RpcError_ErrorCode_name = map[int32]string{
+	0:  "UNKNOWN",
+	1:  "CALL_NOT_FOUND",
+	2:  "PARSE_ERROR",
+	3:  "SECURITY_VIOLATION",
+	4:  "OVER_QUOTA",
+	5:  "REQUEST_TOO_LARGE",
+	6:  "CAPABILITY_DISABLED",
+	7:  "FEATURE_DISABLED",
+	8:  "BAD_REQUEST",
+	9:  "RESPONSE_TOO_LARGE",
+	10: "CANCELLED",
+	11: "REPLAY_ERROR",
+	12: "DEADLINE_EXCEEDED",
+}
+var RpcError_ErrorCode_value = map[string]int32{
+	"UNKNOWN":             0,
+	"CALL_NOT_FOUND":      1,
+	"PARSE_ERROR":         2,
+	"SECURITY_VIOLATION":  3,
+	"OVER_QUOTA":          4,
+	"REQUEST_TOO_LARGE":   5,
+	"CAPABILITY_DISABLED": 6,
+	"FEATURE_DISABLED":    7,
+	"BAD_REQUEST":         8,
+	"RESPONSE_TOO_LARGE":  9,
+	"CANCELLED":           10,
+	"REPLAY_ERROR":        11,
+	"DEADLINE_EXCEEDED":   12,
+}
+
+func (x RpcError_ErrorCode) Enum() *RpcError_ErrorCode {
+	p := new(RpcError_ErrorCode)
+	*p = x
+	return p
+}
+func (x RpcError_ErrorCode) String() string {
+	return proto.EnumName(RpcError_ErrorCode_name, int32(x))
+}
+func (x *RpcError_ErrorCode) UnmarshalJSON(data []byte) error {
+	value, err := proto.UnmarshalJSONEnum(RpcError_ErrorCode_value, data, "RpcError_ErrorCode")
+	if err != nil {
+		return err
+	}
+	*x = RpcError_ErrorCode(value)
+	return nil
+}
+func (RpcError_ErrorCode) EnumDescriptor() ([]byte, []int) {
+	return fileDescriptor_remote_api_1978114ec33a273d, []int{2, 0}
+}
+
+type Request struct {
+	ServiceName          *string  `protobuf:"bytes,2,req,name=service_name,json=serviceName" json:"service_name,omitempty"`
+	Method               *string  `protobuf:"bytes,3,req,name=method" json:"method,omitempty"`
+	Request              []byte   `protobuf:"bytes,4,req,name=request" json:"request,omitempty"`
+	RequestId            *string  `protobuf:"bytes,5,opt,name=request_id,json=requestId" json:"request_id,omitempty"`
+	XXX_NoUnkeyedLiteral struct{} `json:"-"`
+	XXX_unrecognized     []byte   `json:"-"`
+	XXX_sizecache        int32    `json:"-"`
+}
+
+func (m *Request) Reset()         { *m = Request{} }
+func (m *Request) String() string { return proto.CompactTextString(m) }
+func (*Request) ProtoMessage()    {}
+func (*Request) Descriptor() ([]byte, []int) {
+	return fileDescriptor_remote_api_1978114ec33a273d, []int{0}
+}
+func (m *Request) XXX_Unmarshal(b []byte) error {
+	return xxx_messageInfo_Request.Unmarshal(m, b)
+}
+func (m *Request) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) {
+	return xxx_messageInfo_Request.Marshal(b, m, deterministic)
+}
+func (dst *Request) XXX_Merge(src proto.Message) {
+	xxx_messageInfo_Request.Merge(dst, src)
+}
+func (m *Request) XXX_Size() int {
+	return xxx_messageInfo_Request.Size(m)
+}
+func (m *Request) XXX_DiscardUnknown() {
+	xxx_messageInfo_Request.DiscardUnknown(m)
+}
+
+var xxx_messageInfo_Request proto.InternalMessageInfo
+
+func (m *Request) GetServiceName() string {
+	if m != nil && m.ServiceName != nil {
+		return *m.ServiceName
+	}
+	return ""
+}
+
+func (m *Request) GetMethod() string {
+	if m != nil && m.Method != nil {
+		return *m.Method
+	}
+	return ""
+}
+
+func (m *Request) GetRequest() []byte {
+	if m != nil {
+		return m.Request
+	}
+	return nil
+}
+
+func (m *Request) GetRequestId() string {
+	if m != nil && m.RequestId != nil {
+		return *m.RequestId
+	}
+	return ""
+}
+
+type ApplicationError struct {
+	Code                 *int32   `protobuf:"varint,1,req,name=code" json:"code,omitempty"`
+	Detail               *string  `protobuf:"bytes,2,req,name=detail" json:"detail,omitempty"`
+	XXX_NoUnkeyedLiteral struct{} `json:"-"`
+	XXX_unrecognized     []byte   `json:"-"`
+	XXX_sizecache        int32    `json:"-"`
+}
+
+func (m *ApplicationError) Reset()         { *m = ApplicationError{} }
+func (m *ApplicationError) String() string { return proto.CompactTextString(m) }
+func (*ApplicationError) ProtoMessage()    {}
+func (*ApplicationError) Descriptor() ([]byte, []int) {
+	return fileDescriptor_remote_api_1978114ec33a273d, []int{1}
+}
+func (m *ApplicationError) XXX_Unmarshal(b []byte) error {
+	return xxx_messageInfo_ApplicationError.Unmarshal(m, b)
+}
+func (m *ApplicationError) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) {
+	return xxx_messageInfo_ApplicationError.Marshal(b, m, deterministic)
+}
+func (dst *ApplicationError) XXX_Merge(src proto.Message) {
+	xxx_messageInfo_ApplicationError.Merge(dst, src)
+}
+func (m *ApplicationError) XXX_Size() int {
+	return xxx_messageInfo_ApplicationError.Size(m)
+}
+func (m *ApplicationError) XXX_DiscardUnknown() {
+	xxx_messageInfo_ApplicationError.DiscardUnknown(m)
+}
+
+var xxx_messageInfo_ApplicationError proto.InternalMessageInfo
+
+func (m *ApplicationError) GetCode() int32 {
+	if m != nil && m.Code != nil {
+		return *m.Code
+	}
+	return 0
+}
+
+func (m *ApplicationError) GetDetail() string {
+	if m != nil && m.Detail != nil {
+		return *m.Detail
+	}
+	return ""
+}
+
+type RpcError struct {
+	Code                 *int32   `protobuf:"varint,1,req,name=code" json:"code,omitempty"`
+	Detail               *string  `protobuf:"bytes,2,opt,name=detail" json:"detail,omitempty"`
+	XXX_NoUnkeyedLiteral struct{} `json:"-"`
+	XXX_unrecognized     []byte   `json:"-"`
+	XXX_sizecache        int32    `json:"-"`
+}
+
+func (m *RpcError) Reset()         { *m = RpcError{} }
+func (m *RpcError) String() string { return proto.CompactTextString(m) }
+func (*RpcError) ProtoMessage()    {}
+func (*RpcError) Descriptor() ([]byte, []int) {
+	return fileDescriptor_remote_api_1978114ec33a273d, []int{2}
+}
+func (m *RpcError) XXX_Unmarshal(b []byte) error {
+	return xxx_messageInfo_RpcError.Unmarshal(m, b)
+}
+func (m *RpcError) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) {
+	return xxx_messageInfo_RpcError.Marshal(b, m, deterministic)
+}
+func (dst *RpcError) XXX_Merge(src proto.Message) {
+	xxx_messageInfo_RpcError.Merge(dst, src)
+}
+func (m *RpcError) XXX_Size() int {
+	return xxx_messageInfo_RpcError.Size(m)
+}
+func (m *RpcError) XXX_DiscardUnknown() {
+	xxx_messageInfo_RpcError.DiscardUnknown(m)
+}
+
+var xxx_messageInfo_RpcError proto.InternalMessageInfo
+
+func (m *RpcError) GetCode() int32 {
+	if m != nil && m.Code != nil {
+		return *m.Code
+	}
+	return 0
+}
+
+func (m *RpcError) GetDetail() string {
+	if m != nil && m.Detail != nil {
+		return *m.Detail
+	}
+	return ""
+}
+
+type Response struct {
+	Response             []byte            `protobuf:"bytes,1,opt,name=response" json:"response,omitempty"`
+	Exception            []byte            `protobuf:"bytes,2,opt,name=exception" json:"exception,omitempty"`
+	ApplicationError     *ApplicationError `protobuf:"bytes,3,opt,name=application_error,json=applicationError" json:"application_error,omitempty"`
+	JavaException        []byte            `protobuf:"bytes,4,opt,name=java_exception,json=javaException" json:"java_exception,omitempty"`
+	RpcError             *RpcError         `protobuf:"bytes,5,opt,name=rpc_error,json=rpcError" json:"rpc_error,omitempty"`
+	XXX_NoUnkeyedLiteral struct{}          `json:"-"`
+	XXX_unrecognized     []byte            `json:"-"`
+	XXX_sizecache        int32             `json:"-"`
+}
+
+func (m *Response) Reset()         { *m = Response{} }
+func (m *Response) String() string { return proto.CompactTextString(m) }
+func (*Response) ProtoMessage()    {}
+func (*Response) Descriptor() ([]byte, []int) {
+	return fileDescriptor_remote_api_1978114ec33a273d, []int{3}
+}
+func (m *Response) XXX_Unmarshal(b []byte) error {
+	return xxx_messageInfo_Response.Unmarshal(m, b)
+}
+func (m *Response) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) {
+	return xxx_messageInfo_Response.Marshal(b, m, deterministic)
+}
+func (dst *Response) XXX_Merge(src proto.Message) {
+	xxx_messageInfo_Response.Merge(dst, src)
+}
+func (m *Response) XXX_Size() int {
+	return xxx_messageInfo_Response.Size(m)
+}
+func (m *Response) XXX_DiscardUnknown() {
+	xxx_messageInfo_Response.DiscardUnknown(m)
+}
+
+var xxx_messageInfo_Response proto.InternalMessageInfo
+
+func (m *Response) GetResponse() []byte {
+	if m != nil {
+		return m.Response
+	}
+	return nil
+}
+
+func (m *Response) GetException() []byte {
+	if m != nil {
+		return m.Exception
+	}
+	return nil
+}
+
+func (m *Response) GetApplicationError() *ApplicationError {
+	if m != nil {
+		return m.ApplicationError
+	}
+	return nil
+}
+
+func (m *Response) GetJavaException() []byte {
+	if m != nil {
+		return m.JavaException
+	}
+	return nil
+}
+
+func (m *Response) GetRpcError() *RpcError {
+	if m != nil {
+		return m.RpcError
+	}
+	return nil
+}
+
+func init() {
+	proto.RegisterType((*Request)(nil), "remote_api.Request")
+	proto.RegisterType((*ApplicationError)(nil), "remote_api.ApplicationError")
+	proto.RegisterType((*RpcError)(nil), "remote_api.RpcError")
+	proto.RegisterType((*Response)(nil), "remote_api.Response")
+}
+
+func init() {
+	proto.RegisterFile("google.golang.org/appengine/v2/internal/remote_api/remote_api.proto", fileDescriptor_remote_api_1978114ec33a273d)
+}
+
+var fileDescriptor_remote_api_1978114ec33a273d = []byte{
+	// 531 bytes of a gzipped FileDescriptorProto
+	0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0x8c, 0x93, 0x51, 0x6e, 0xd3, 0x40,
+	0x10, 0x86, 0xb1, 0x9b, 0x34, 0xf1, 0xc4, 0x2d, 0xdb, 0xa5, 0x14, 0x0b, 0x15, 0x29, 0x44, 0x42,
+	0xca, 0x53, 0x2a, 0x38, 0x00, 0x62, 0x63, 0x6f, 0x91, 0x85, 0x65, 0xa7, 0x6b, 0xbb, 0x50, 0x5e,
+	0x56, 0x2b, 0x67, 0x65, 0x8c, 0x12, 0xaf, 0xd9, 0x98, 0x8a, 0x17, 0x6e, 0xc0, 0xb5, 0x38, 0x0c,
+	0xb7, 0x40, 0x36, 0x6e, 0x63, 0xf5, 0x89, 0xb7, 0x7f, 0x7e, 0x7b, 0xe6, 0x1b, 0xcd, 0xcc, 0xc2,
+	0xbb, 0x5c, 0xa9, 0x7c, 0x23, 0x17, 0xb9, 0xda, 0x88, 0x32, 0x5f, 0x28, 0x9d, 0x5f, 0x88, 0xaa,
+	0x92, 0x65, 0x5e, 0x94, 0xf2, 0xa2, 0x28, 0x6b, 0xa9, 0x4b, 0xb1, 0xb9, 0xd0, 0x72, 0xab, 0x6a,
+	0xc9, 0x45, 0x55, 0xf4, 0xe4, 0xa2, 0xd2, 0xaa, 0x56, 0x18, 0xf6, 0xce, 0xec, 0x27, 0x8c, 0x98,
+	0xfc, 0xf6, 0x5d, 0xee, 0x6a, 0xfc, 0x12, 0xec, 0x9d, 0xd4, 0xb7, 0x45, 0x26, 0x79, 0x29, 0xb6,
+	0xd2, 0x31, 0xa7, 0xe6, 0xdc, 0x62, 0x93, 0xce, 0x0b, 0xc5, 0x56, 0xe2, 0x33, 0x38, 0xdc, 0xca,
+	0xfa, 0x8b, 0x5a, 0x3b, 0x07, 0xed, 0xc7, 0x2e, 0xc2, 0x0e, 0x8c, 0xf4, 0xbf, 0x2a, 0xce, 0x60,
+	0x6a, 0xce, 0x6d, 0x76, 0x17, 0xe2, 0x17, 0x00, 0x9d, 0xe4, 0xc5, 0xda, 0x19, 0x4e, 0x8d, 0xb9,
+	0xc5, 0xac, 0xce, 0xf1, 0xd7, 0xb3, 0xb7, 0x80, 0x48, 0x55, 0x6d, 0x8a, 0x4c, 0xd4, 0x85, 0x2a,
+	0xa9, 0xd6, 0x4a, 0x63, 0x0c, 0x83, 0x4c, 0xad, 0xa5, 0x63, 0x4c, 0xcd, 0xf9, 0x90, 0xb5, 0xba,
+	0x01, 0xaf, 0x65, 0x2d, 0x8a, 0x4d, 0xd7, 0x55, 0x17, 0xcd, 0x7e, 0x9b, 0x30, 0x66, 0x55, 0xf6,
+	0x7f, 0x89, 0x46, 0x2f, 0xf1, 0x97, 0x09, 0x56, 0x9b, 0xe5, 0x36, 0x7f, 0x4d, 0x60, 0x94, 0x86,
+	0x1f, 0xc2, 0xe8, 0x63, 0x88, 0x1e, 0x61, 0x0c, 0xc7, 0x2e, 0x09, 0x02, 0x1e, 0x46, 0x09, 0xbf,
+	0x8c, 0xd2, 0xd0, 0x43, 0x06, 0x7e, 0x0c, 0x93, 0x15, 0x61, 0x31, 0xe5, 0x94, 0xb1, 0x88, 0x21,
+	0x13, 0x9f, 0x01, 0x8e, 0xa9, 0x9b, 0x32, 0x3f, 0xb9, 0xe1, 0xd7, 0x7e, 0x14, 0x90, 0xc4, 0x8f,
+	0x42, 0x74, 0x80, 0x8f, 0x01, 0xa2, 0x6b, 0xca, 0xf8, 0x55, 0x1a, 0x25, 0x04, 0x0d, 0xf0, 0x53,
+	0x38, 0x61, 0xf4, 0x2a, 0xa5, 0x71, 0xc2, 0x93, 0x28, 0xe2, 0x01, 0x61, 0xef, 0x29, 0x1a, 0xe2,
+	0x67, 0xf0, 0xc4, 0x25, 0x2b, 0xb2, 0xf4, 0x83, 0xa6, 0x80, 0xe7, 0xc7, 0x64, 0x19, 0x50, 0x0f,
+	0x1d, 0xe2, 0x53, 0x40, 0x97, 0x94, 0x24, 0x29, 0xa3, 0x7b, 0x77, 0xd4, 0xe0, 0x97, 0xc4, 0xe3,
+	0x5d, 0x25, 0x34, 0x6e, 0xf0, 0x8c, 0xc6, 0xab, 0x28, 0x8c, 0x69, 0xaf, 0xae, 0x85, 0x8f, 0xc0,
+	0x72, 0x49, 0xe8, 0xd2, 0xa0, 0xc9, 0x03, 0x8c, 0xc0, 0x66, 0x74, 0x15, 0x90, 0x9b, 0xae, 0xef,
+	0x49, 0xd3, 0x8f, 0x47, 0x89, 0x17, 0xf8, 0x21, 0xe5, 0xf4, 0x93, 0x4b, 0xa9, 0x47, 0x3d, 0x64,
+	0xcf, 0xfe, 0x18, 0x30, 0x66, 0x72, 0x57, 0xa9, 0x72, 0x27, 0xf1, 0x73, 0x18, 0xeb, 0x4e, 0x3b,
+	0xc6, 0xd4, 0x98, 0xdb, 0xec, 0x3e, 0xc6, 0xe7, 0x60, 0xc9, 0x1f, 0x99, 0xac, 0x9a, 0x75, 0xb5,
+	0x23, 0xb5, 0xd9, 0xde, 0xc0, 0x3e, 0x9c, 0x88, 0xfd, 0x3a, 0xb9, 0x6c, 0x06, 0xec, 0x1c, 0x4c,
+	0x8d, 0xf9, 0xe4, 0xcd, 0xf9, 0xa2, 0x77, 0x87, 0x0f, 0x77, 0xce, 0x90, 0x78, 0x78, 0x05, 0xaf,
+	0xe0, 0xf8, 0xab, 0xb8, 0x15, 0x7c, 0x4f, 0x1b, 0xb4, 0xb4, 0xa3, 0xc6, 0xa5, 0xf7, 0xc4, 0xd7,
+	0x60, 0xe9, 0x2a, 0xeb, 0x48, 0xc3, 0x96, 0x74, 0xda, 0x27, 0xdd, 0x1d, 0x07, 0x1b, 0xeb, 0x4e,
+	0x2d, 0xed, 0xcf, 0xbd, 0x07, 0xf0, 0x37, 0x00, 0x00, 0xff, 0xff, 0x38, 0xd1, 0x0f, 0x22, 0x4f,
+	0x03, 0x00, 0x00,
+}
diff --git a/v2/internal/remote_api/remote_api.proto b/v2/internal/remote_api/remote_api.proto
new file mode 100644
index 0000000..f21763a
--- /dev/null
+++ b/v2/internal/remote_api/remote_api.proto
@@ -0,0 +1,44 @@
+syntax = "proto2";
+option go_package = "remote_api";
+
+package remote_api;
+
+message Request {
+  required string service_name = 2;
+  required string method = 3;
+  required bytes request = 4;
+  optional string request_id = 5;
+}
+
+message ApplicationError {
+  required int32 code = 1;
+  required string detail = 2;
+}
+
+message RpcError {
+  enum ErrorCode {
+    UNKNOWN = 0;
+    CALL_NOT_FOUND = 1;
+    PARSE_ERROR = 2;
+    SECURITY_VIOLATION = 3;
+    OVER_QUOTA = 4;
+    REQUEST_TOO_LARGE = 5;
+    CAPABILITY_DISABLED = 6;
+    FEATURE_DISABLED = 7;
+    BAD_REQUEST = 8;
+    RESPONSE_TOO_LARGE = 9;
+    CANCELLED = 10;
+    REPLAY_ERROR = 11;
+    DEADLINE_EXCEEDED = 12;
+  }
+  required int32 code = 1;
+  optional string detail = 2;
+}
+
+message Response {
+  optional bytes response = 1;
+  optional bytes exception = 2;
+  optional ApplicationError application_error = 3;
+  optional bytes java_exception = 4;
+  optional RpcError rpc_error = 5;
+}
diff --git a/v2/internal/system/system_service.pb.go b/v2/internal/system/system_service.pb.go
new file mode 100644
index 0000000..1eb2384
--- /dev/null
+++ b/v2/internal/system/system_service.pb.go
@@ -0,0 +1,362 @@
+// Code generated by protoc-gen-go. DO NOT EDIT.
+// source: google.golang.org/appengine/internal/system/system_service.proto
+
+package system
+
+import proto "github.com/golang/protobuf/proto"
+import fmt "fmt"
+import math "math"
+
+// Reference imports to suppress errors if they are not otherwise used.
+var _ = proto.Marshal
+var _ = fmt.Errorf
+var _ = math.Inf
+
+// This is a compile-time assertion to ensure that this generated file
+// is compatible with the proto package it is being compiled against.
+// A compilation error at this line likely means your copy of the
+// proto package needs to be updated.
+const _ = proto.ProtoPackageIsVersion2 // please upgrade the proto package
+
+type SystemServiceError_ErrorCode int32
+
+const (
+	SystemServiceError_OK               SystemServiceError_ErrorCode = 0
+	SystemServiceError_INTERNAL_ERROR   SystemServiceError_ErrorCode = 1
+	SystemServiceError_BACKEND_REQUIRED SystemServiceError_ErrorCode = 2
+	SystemServiceError_LIMIT_REACHED    SystemServiceError_ErrorCode = 3
+)
+
+var SystemServiceError_ErrorCode_name = map[int32]string{
+	0: "OK",
+	1: "INTERNAL_ERROR",
+	2: "BACKEND_REQUIRED",
+	3: "LIMIT_REACHED",
+}
+var SystemServiceError_ErrorCode_value = map[string]int32{
+	"OK":               0,
+	"INTERNAL_ERROR":   1,
+	"BACKEND_REQUIRED": 2,
+	"LIMIT_REACHED":    3,
+}
+
+func (x SystemServiceError_ErrorCode) Enum() *SystemServiceError_ErrorCode {
+	p := new(SystemServiceError_ErrorCode)
+	*p = x
+	return p
+}
+func (x SystemServiceError_ErrorCode) String() string {
+	return proto.EnumName(SystemServiceError_ErrorCode_name, int32(x))
+}
+func (x *SystemServiceError_ErrorCode) UnmarshalJSON(data []byte) error {
+	value, err := proto.UnmarshalJSONEnum(SystemServiceError_ErrorCode_value, data, "SystemServiceError_ErrorCode")
+	if err != nil {
+		return err
+	}
+	*x = SystemServiceError_ErrorCode(value)
+	return nil
+}
+func (SystemServiceError_ErrorCode) EnumDescriptor() ([]byte, []int) {
+	return fileDescriptor_system_service_ccf41ec210fc59eb, []int{0, 0}
+}
+
+type SystemServiceError struct {
+	XXX_NoUnkeyedLiteral struct{} `json:"-"`
+	XXX_unrecognized     []byte   `json:"-"`
+	XXX_sizecache        int32    `json:"-"`
+}
+
+func (m *SystemServiceError) Reset()         { *m = SystemServiceError{} }
+func (m *SystemServiceError) String() string { return proto.CompactTextString(m) }
+func (*SystemServiceError) ProtoMessage()    {}
+func (*SystemServiceError) Descriptor() ([]byte, []int) {
+	return fileDescriptor_system_service_ccf41ec210fc59eb, []int{0}
+}
+func (m *SystemServiceError) XXX_Unmarshal(b []byte) error {
+	return xxx_messageInfo_SystemServiceError.Unmarshal(m, b)
+}
+func (m *SystemServiceError) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) {
+	return xxx_messageInfo_SystemServiceError.Marshal(b, m, deterministic)
+}
+func (dst *SystemServiceError) XXX_Merge(src proto.Message) {
+	xxx_messageInfo_SystemServiceError.Merge(dst, src)
+}
+func (m *SystemServiceError) XXX_Size() int {
+	return xxx_messageInfo_SystemServiceError.Size(m)
+}
+func (m *SystemServiceError) XXX_DiscardUnknown() {
+	xxx_messageInfo_SystemServiceError.DiscardUnknown(m)
+}
+
+var xxx_messageInfo_SystemServiceError proto.InternalMessageInfo
+
+type SystemStat struct {
+	// Instaneous value of this stat.
+	Current *float64 `protobuf:"fixed64,1,opt,name=current" json:"current,omitempty"`
+	// Average over time, if this stat has an instaneous value.
+	Average1M  *float64 `protobuf:"fixed64,3,opt,name=average1m" json:"average1m,omitempty"`
+	Average10M *float64 `protobuf:"fixed64,4,opt,name=average10m" json:"average10m,omitempty"`
+	// Total value, if the stat accumulates over time.
+	Total *float64 `protobuf:"fixed64,2,opt,name=total" json:"total,omitempty"`
+	// Rate over time, if this stat accumulates.
+	Rate1M               *float64 `protobuf:"fixed64,5,opt,name=rate1m" json:"rate1m,omitempty"`
+	Rate10M              *float64 `protobuf:"fixed64,6,opt,name=rate10m" json:"rate10m,omitempty"`
+	XXX_NoUnkeyedLiteral struct{} `json:"-"`
+	XXX_unrecognized     []byte   `json:"-"`
+	XXX_sizecache        int32    `json:"-"`
+}
+
+func (m *SystemStat) Reset()         { *m = SystemStat{} }
+func (m *SystemStat) String() string { return proto.CompactTextString(m) }
+func (*SystemStat) ProtoMessage()    {}
+func (*SystemStat) Descriptor() ([]byte, []int) {
+	return fileDescriptor_system_service_ccf41ec210fc59eb, []int{1}
+}
+func (m *SystemStat) XXX_Unmarshal(b []byte) error {
+	return xxx_messageInfo_SystemStat.Unmarshal(m, b)
+}
+func (m *SystemStat) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) {
+	return xxx_messageInfo_SystemStat.Marshal(b, m, deterministic)
+}
+func (dst *SystemStat) XXX_Merge(src proto.Message) {
+	xxx_messageInfo_SystemStat.Merge(dst, src)
+}
+func (m *SystemStat) XXX_Size() int {
+	return xxx_messageInfo_SystemStat.Size(m)
+}
+func (m *SystemStat) XXX_DiscardUnknown() {
+	xxx_messageInfo_SystemStat.DiscardUnknown(m)
+}
+
+var xxx_messageInfo_SystemStat proto.InternalMessageInfo
+
+func (m *SystemStat) GetCurrent() float64 {
+	if m != nil && m.Current != nil {
+		return *m.Current
+	}
+	return 0
+}
+
+func (m *SystemStat) GetAverage1M() float64 {
+	if m != nil && m.Average1M != nil {
+		return *m.Average1M
+	}
+	return 0
+}
+
+func (m *SystemStat) GetAverage10M() float64 {
+	if m != nil && m.Average10M != nil {
+		return *m.Average10M
+	}
+	return 0
+}
+
+func (m *SystemStat) GetTotal() float64 {
+	if m != nil && m.Total != nil {
+		return *m.Total
+	}
+	return 0
+}
+
+func (m *SystemStat) GetRate1M() float64 {
+	if m != nil && m.Rate1M != nil {
+		return *m.Rate1M
+	}
+	return 0
+}
+
+func (m *SystemStat) GetRate10M() float64 {
+	if m != nil && m.Rate10M != nil {
+		return *m.Rate10M
+	}
+	return 0
+}
+
+type GetSystemStatsRequest struct {
+	XXX_NoUnkeyedLiteral struct{} `json:"-"`
+	XXX_unrecognized     []byte   `json:"-"`
+	XXX_sizecache        int32    `json:"-"`
+}
+
+func (m *GetSystemStatsRequest) Reset()         { *m = GetSystemStatsRequest{} }
+func (m *GetSystemStatsRequest) String() string { return proto.CompactTextString(m) }
+func (*GetSystemStatsRequest) ProtoMessage()    {}
+func (*GetSystemStatsRequest) Descriptor() ([]byte, []int) {
+	return fileDescriptor_system_service_ccf41ec210fc59eb, []int{2}
+}
+func (m *GetSystemStatsRequest) XXX_Unmarshal(b []byte) error {
+	return xxx_messageInfo_GetSystemStatsRequest.Unmarshal(m, b)
+}
+func (m *GetSystemStatsRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) {
+	return xxx_messageInfo_GetSystemStatsRequest.Marshal(b, m, deterministic)
+}
+func (dst *GetSystemStatsRequest) XXX_Merge(src proto.Message) {
+	xxx_messageInfo_GetSystemStatsRequest.Merge(dst, src)
+}
+func (m *GetSystemStatsRequest) XXX_Size() int {
+	return xxx_messageInfo_GetSystemStatsRequest.Size(m)
+}
+func (m *GetSystemStatsRequest) XXX_DiscardUnknown() {
+	xxx_messageInfo_GetSystemStatsRequest.DiscardUnknown(m)
+}
+
+var xxx_messageInfo_GetSystemStatsRequest proto.InternalMessageInfo
+
+type GetSystemStatsResponse struct {
+	// CPU used by this instance, in mcycles.
+	Cpu *SystemStat `protobuf:"bytes,1,opt,name=cpu" json:"cpu,omitempty"`
+	// Physical memory (RAM) used by this instance, in megabytes.
+	Memory               *SystemStat `protobuf:"bytes,2,opt,name=memory" json:"memory,omitempty"`
+	XXX_NoUnkeyedLiteral struct{}    `json:"-"`
+	XXX_unrecognized     []byte      `json:"-"`
+	XXX_sizecache        int32       `json:"-"`
+}
+
+func (m *GetSystemStatsResponse) Reset()         { *m = GetSystemStatsResponse{} }
+func (m *GetSystemStatsResponse) String() string { return proto.CompactTextString(m) }
+func (*GetSystemStatsResponse) ProtoMessage()    {}
+func (*GetSystemStatsResponse) Descriptor() ([]byte, []int) {
+	return fileDescriptor_system_service_ccf41ec210fc59eb, []int{3}
+}
+func (m *GetSystemStatsResponse) XXX_Unmarshal(b []byte) error {
+	return xxx_messageInfo_GetSystemStatsResponse.Unmarshal(m, b)
+}
+func (m *GetSystemStatsResponse) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) {
+	return xxx_messageInfo_GetSystemStatsResponse.Marshal(b, m, deterministic)
+}
+func (dst *GetSystemStatsResponse) XXX_Merge(src proto.Message) {
+	xxx_messageInfo_GetSystemStatsResponse.Merge(dst, src)
+}
+func (m *GetSystemStatsResponse) XXX_Size() int {
+	return xxx_messageInfo_GetSystemStatsResponse.Size(m)
+}
+func (m *GetSystemStatsResponse) XXX_DiscardUnknown() {
+	xxx_messageInfo_GetSystemStatsResponse.DiscardUnknown(m)
+}
+
+var xxx_messageInfo_GetSystemStatsResponse proto.InternalMessageInfo
+
+func (m *GetSystemStatsResponse) GetCpu() *SystemStat {
+	if m != nil {
+		return m.Cpu
+	}
+	return nil
+}
+
+func (m *GetSystemStatsResponse) GetMemory() *SystemStat {
+	if m != nil {
+		return m.Memory
+	}
+	return nil
+}
+
+type StartBackgroundRequestRequest struct {
+	XXX_NoUnkeyedLiteral struct{} `json:"-"`
+	XXX_unrecognized     []byte   `json:"-"`
+	XXX_sizecache        int32    `json:"-"`
+}
+
+func (m *StartBackgroundRequestRequest) Reset()         { *m = StartBackgroundRequestRequest{} }
+func (m *StartBackgroundRequestRequest) String() string { return proto.CompactTextString(m) }
+func (*StartBackgroundRequestRequest) ProtoMessage()    {}
+func (*StartBackgroundRequestRequest) Descriptor() ([]byte, []int) {
+	return fileDescriptor_system_service_ccf41ec210fc59eb, []int{4}
+}
+func (m *StartBackgroundRequestRequest) XXX_Unmarshal(b []byte) error {
+	return xxx_messageInfo_StartBackgroundRequestRequest.Unmarshal(m, b)
+}
+func (m *StartBackgroundRequestRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) {
+	return xxx_messageInfo_StartBackgroundRequestRequest.Marshal(b, m, deterministic)
+}
+func (dst *StartBackgroundRequestRequest) XXX_Merge(src proto.Message) {
+	xxx_messageInfo_StartBackgroundRequestRequest.Merge(dst, src)
+}
+func (m *StartBackgroundRequestRequest) XXX_Size() int {
+	return xxx_messageInfo_StartBackgroundRequestRequest.Size(m)
+}
+func (m *StartBackgroundRequestRequest) XXX_DiscardUnknown() {
+	xxx_messageInfo_StartBackgroundRequestRequest.DiscardUnknown(m)
+}
+
+var xxx_messageInfo_StartBackgroundRequestRequest proto.InternalMessageInfo
+
+type StartBackgroundRequestResponse struct {
+	// Every /_ah/background request will have an X-AppEngine-BackgroundRequest
+	// header, whose value will be equal to this parameter, the request_id.
+	RequestId            *string  `protobuf:"bytes,1,opt,name=request_id,json=requestId" json:"request_id,omitempty"`
+	XXX_NoUnkeyedLiteral struct{} `json:"-"`
+	XXX_unrecognized     []byte   `json:"-"`
+	XXX_sizecache        int32    `json:"-"`
+}
+
+func (m *StartBackgroundRequestResponse) Reset()         { *m = StartBackgroundRequestResponse{} }
+func (m *StartBackgroundRequestResponse) String() string { return proto.CompactTextString(m) }
+func (*StartBackgroundRequestResponse) ProtoMessage()    {}
+func (*StartBackgroundRequestResponse) Descriptor() ([]byte, []int) {
+	return fileDescriptor_system_service_ccf41ec210fc59eb, []int{5}
+}
+func (m *StartBackgroundRequestResponse) XXX_Unmarshal(b []byte) error {
+	return xxx_messageInfo_StartBackgroundRequestResponse.Unmarshal(m, b)
+}
+func (m *StartBackgroundRequestResponse) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) {
+	return xxx_messageInfo_StartBackgroundRequestResponse.Marshal(b, m, deterministic)
+}
+func (dst *StartBackgroundRequestResponse) XXX_Merge(src proto.Message) {
+	xxx_messageInfo_StartBackgroundRequestResponse.Merge(dst, src)
+}
+func (m *StartBackgroundRequestResponse) XXX_Size() int {
+	return xxx_messageInfo_StartBackgroundRequestResponse.Size(m)
+}
+func (m *StartBackgroundRequestResponse) XXX_DiscardUnknown() {
+	xxx_messageInfo_StartBackgroundRequestResponse.DiscardUnknown(m)
+}
+
+var xxx_messageInfo_StartBackgroundRequestResponse proto.InternalMessageInfo
+
+func (m *StartBackgroundRequestResponse) GetRequestId() string {
+	if m != nil && m.RequestId != nil {
+		return *m.RequestId
+	}
+	return ""
+}
+
+func init() {
+	proto.RegisterType((*SystemServiceError)(nil), "appengine.SystemServiceError")
+	proto.RegisterType((*SystemStat)(nil), "appengine.SystemStat")
+	proto.RegisterType((*GetSystemStatsRequest)(nil), "appengine.GetSystemStatsRequest")
+	proto.RegisterType((*GetSystemStatsResponse)(nil), "appengine.GetSystemStatsResponse")
+	proto.RegisterType((*StartBackgroundRequestRequest)(nil), "appengine.StartBackgroundRequestRequest")
+	proto.RegisterType((*StartBackgroundRequestResponse)(nil), "appengine.StartBackgroundRequestResponse")
+}
+
+func init() {
+	proto.RegisterFile("google.golang.org/appengine/v2/internal/system/system_service.proto", fileDescriptor_system_service_ccf41ec210fc59eb)
+}
+
+var fileDescriptor_system_service_ccf41ec210fc59eb = []byte{
+	// 377 bytes of a gzipped FileDescriptorProto
+	0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0x74, 0x92, 0x4f, 0x8f, 0x93, 0x40,
+	0x18, 0xc6, 0xa5, 0x75, 0x51, 0x5e, 0xa3, 0xc1, 0xc9, 0xee, 0xca, 0xc1, 0x5d, 0x0d, 0x17, 0xbd,
+	0x48, 0x57, 0xbf, 0x80, 0xf6, 0xcf, 0x44, 0x49, 0x6b, 0xab, 0xd3, 0x7a, 0xf1, 0x42, 0x26, 0xf0,
+	0x3a, 0x21, 0xc2, 0x0c, 0x0e, 0x43, 0x93, 0x7e, 0x27, 0x3f, 0xa4, 0xe9, 0x30, 0x6d, 0xcd, 0x26,
+	0x3d, 0x31, 0xcf, 0xf3, 0xfc, 0x02, 0x3f, 0x08, 0xf0, 0x49, 0x28, 0x25, 0x2a, 0x4c, 0x84, 0xaa,
+	0xb8, 0x14, 0x89, 0xd2, 0x62, 0xc4, 0x9b, 0x06, 0xa5, 0x28, 0x25, 0x8e, 0x4a, 0x69, 0x50, 0x4b,
+	0x5e, 0x8d, 0xda, 0x5d, 0x6b, 0xb0, 0x76, 0x97, 0xac, 0x45, 0xbd, 0x2d, 0x73, 0x4c, 0x1a, 0xad,
+	0x8c, 0x22, 0xc1, 0x91, 0x8f, 0x7f, 0x01, 0x59, 0x5b, 0x64, 0xdd, 0x13, 0x54, 0x6b, 0xa5, 0xe3,
+	0x6f, 0x10, 0xd8, 0xc3, 0x54, 0x15, 0x48, 0x7c, 0x18, 0xac, 0xe6, 0xe1, 0x03, 0x42, 0xe0, 0x59,
+	0xba, 0xdc, 0x50, 0xb6, 0x1c, 0x2f, 0x32, 0xca, 0xd8, 0x8a, 0x85, 0x1e, 0xb9, 0x84, 0x70, 0x32,
+	0x9e, 0xce, 0xe9, 0x72, 0x96, 0x31, 0xfa, 0xfd, 0x47, 0xca, 0xe8, 0x2c, 0x1c, 0x90, 0xe7, 0xf0,
+	0x74, 0x91, 0x7e, 0x4d, 0x37, 0x19, 0xa3, 0xe3, 0xe9, 0x17, 0x3a, 0x0b, 0x87, 0xf1, 0x5f, 0x0f,
+	0xc0, 0x3d, 0xc8, 0x70, 0x43, 0x22, 0x78, 0x94, 0x77, 0x5a, 0xa3, 0x34, 0x91, 0xf7, 0xda, 0x7b,
+	0xeb, 0xb1, 0x43, 0x24, 0x2f, 0x21, 0xe0, 0x5b, 0xd4, 0x5c, 0xe0, 0xfb, 0x3a, 0x1a, 0xda, 0xed,
+	0x54, 0x90, 0x5b, 0x80, 0x43, 0xb8, 0xab, 0xa3, 0x87, 0x76, 0xfe, 0xaf, 0x21, 0x97, 0x70, 0x61,
+	0x94, 0xe1, 0x55, 0x34, 0xb0, 0x53, 0x1f, 0xc8, 0x35, 0xf8, 0x9a, 0x9b, 0xfd, 0x0d, 0x2f, 0x6c,
+	0xed, 0xd2, 0xde, 0xc2, 0x9e, 0xee, 0xea, 0xc8, 0xef, 0x2d, 0x5c, 0x8c, 0x5f, 0xc0, 0xd5, 0x67,
+	0x34, 0x27, 0xe1, 0x96, 0xe1, 0x9f, 0x0e, 0x5b, 0x13, 0x37, 0x70, 0x7d, 0x7f, 0x68, 0x1b, 0x25,
+	0x5b, 0x24, 0x6f, 0x60, 0x98, 0x37, 0x9d, 0x7d, 0x9d, 0x27, 0x1f, 0xae, 0x92, 0xe3, 0x27, 0x4e,
+	0x4e, 0x30, 0xdb, 0x13, 0xe4, 0x1d, 0xf8, 0x35, 0xd6, 0x4a, 0xef, 0xac, 0xe4, 0x59, 0xd6, 0x41,
+	0xf1, 0x2b, 0xb8, 0x59, 0x1b, 0xae, 0xcd, 0x84, 0xe7, 0xbf, 0x85, 0x56, 0x9d, 0x2c, 0x9c, 0xcb,
+	0x41, 0xe9, 0x23, 0xdc, 0x9e, 0x03, 0x9c, 0xda, 0x0d, 0x80, 0xee, 0xab, 0xac, 0x2c, 0xac, 0x61,
+	0xc0, 0x02, 0xd7, 0xa4, 0xc5, 0xe4, 0xf1, 0x4f, 0xbf, 0xff, 0x4d, 0xfe, 0x05, 0x00, 0x00, 0xff,
+	0xff, 0x56, 0x5d, 0x5e, 0xc3, 0x5b, 0x02, 0x00, 0x00,
+}
diff --git a/v2/internal/system/system_service.proto b/v2/internal/system/system_service.proto
new file mode 100644
index 0000000..32c0bf8
--- /dev/null
+++ b/v2/internal/system/system_service.proto
@@ -0,0 +1,49 @@
+syntax = "proto2";
+option go_package = "system";
+
+package appengine;
+
+message SystemServiceError {
+  enum ErrorCode {
+    OK = 0;
+    INTERNAL_ERROR = 1;
+    BACKEND_REQUIRED = 2;
+    LIMIT_REACHED = 3;
+  }
+}
+
+message SystemStat {
+  // Instaneous value of this stat.
+  optional double current = 1;
+
+  // Average over time, if this stat has an instaneous value.
+  optional double average1m = 3;
+  optional double average10m = 4;
+
+  // Total value, if the stat accumulates over time.
+  optional double total = 2;
+
+  // Rate over time, if this stat accumulates.
+  optional double rate1m = 5;
+  optional double rate10m = 6;
+}
+
+message GetSystemStatsRequest {
+}
+
+message GetSystemStatsResponse {
+  // CPU used by this instance, in mcycles.
+  optional SystemStat cpu = 1;
+
+  // Physical memory (RAM) used by this instance, in megabytes.
+  optional SystemStat memory = 2;
+}
+
+message StartBackgroundRequestRequest {
+}
+
+message StartBackgroundRequestResponse {
+  // Every /_ah/background request will have an X-AppEngine-BackgroundRequest
+  // header, whose value will be equal to this parameter, the request_id.
+  optional string request_id = 1;
+}
diff --git a/v2/internal/taskqueue/taskqueue_service.pb.go b/v2/internal/taskqueue/taskqueue_service.pb.go
new file mode 100644
index 0000000..b3f0be4
--- /dev/null
+++ b/v2/internal/taskqueue/taskqueue_service.pb.go
@@ -0,0 +1,3149 @@
+// Code generated by protoc-gen-go. DO NOT EDIT.
+// source: google.golang.org/appengine/internal/taskqueue/taskqueue_service.proto
+
+package taskqueue
+
+import proto "github.com/golang/protobuf/proto"
+import fmt "fmt"
+import math "math"
+import datastore "google.golang.org/appengine/v2/internal/datastore"
+
+// Reference imports to suppress errors if they are not otherwise used.
+var _ = proto.Marshal
+var _ = fmt.Errorf
+var _ = math.Inf
+
+// This is a compile-time assertion to ensure that this generated file
+// is compatible with the proto package it is being compiled against.
+// A compilation error at this line likely means your copy of the
+// proto package needs to be updated.
+const _ = proto.ProtoPackageIsVersion2 // please upgrade the proto package
+
+type TaskQueueServiceError_ErrorCode int32
+
+const (
+	TaskQueueServiceError_OK                              TaskQueueServiceError_ErrorCode = 0
+	TaskQueueServiceError_UNKNOWN_QUEUE                   TaskQueueServiceError_ErrorCode = 1
+	TaskQueueServiceError_TRANSIENT_ERROR                 TaskQueueServiceError_ErrorCode = 2
+	TaskQueueServiceError_INTERNAL_ERROR                  TaskQueueServiceError_ErrorCode = 3
+	TaskQueueServiceError_TASK_TOO_LARGE                  TaskQueueServiceError_ErrorCode = 4
+	TaskQueueServiceError_INVALID_TASK_NAME               TaskQueueServiceError_ErrorCode = 5
+	TaskQueueServiceError_INVALID_QUEUE_NAME              TaskQueueServiceError_ErrorCode = 6
+	TaskQueueServiceError_INVALID_URL                     TaskQueueServiceError_ErrorCode = 7
+	TaskQueueServiceError_INVALID_QUEUE_RATE              TaskQueueServiceError_ErrorCode = 8
+	TaskQueueServiceError_PERMISSION_DENIED               TaskQueueServiceError_ErrorCode = 9
+	TaskQueueServiceError_TASK_ALREADY_EXISTS             TaskQueueServiceError_ErrorCode = 10
+	TaskQueueServiceError_TOMBSTONED_TASK                 TaskQueueServiceError_ErrorCode = 11
+	TaskQueueServiceError_INVALID_ETA                     TaskQueueServiceError_ErrorCode = 12
+	TaskQueueServiceError_INVALID_REQUEST                 TaskQueueServiceError_ErrorCode = 13
+	TaskQueueServiceError_UNKNOWN_TASK                    TaskQueueServiceError_ErrorCode = 14
+	TaskQueueServiceError_TOMBSTONED_QUEUE                TaskQueueServiceError_ErrorCode = 15
+	TaskQueueServiceError_DUPLICATE_TASK_NAME             TaskQueueServiceError_ErrorCode = 16
+	TaskQueueServiceError_SKIPPED                         TaskQueueServiceError_ErrorCode = 17
+	TaskQueueServiceError_TOO_MANY_TASKS                  TaskQueueServiceError_ErrorCode = 18
+	TaskQueueServiceError_INVALID_PAYLOAD                 TaskQueueServiceError_ErrorCode = 19
+	TaskQueueServiceError_INVALID_RETRY_PARAMETERS        TaskQueueServiceError_ErrorCode = 20
+	TaskQueueServiceError_INVALID_QUEUE_MODE              TaskQueueServiceError_ErrorCode = 21
+	TaskQueueServiceError_ACL_LOOKUP_ERROR                TaskQueueServiceError_ErrorCode = 22
+	TaskQueueServiceError_TRANSACTIONAL_REQUEST_TOO_LARGE TaskQueueServiceError_ErrorCode = 23
+	TaskQueueServiceError_INCORRECT_CREATOR_NAME          TaskQueueServiceError_ErrorCode = 24
+	TaskQueueServiceError_TASK_LEASE_EXPIRED              TaskQueueServiceError_ErrorCode = 25
+	TaskQueueServiceError_QUEUE_PAUSED                    TaskQueueServiceError_ErrorCode = 26
+	TaskQueueServiceError_INVALID_TAG                     TaskQueueServiceError_ErrorCode = 27
+	// Reserved range for the Datastore error codes.
+	// Original Datastore error code is shifted by DATASTORE_ERROR offset.
+	TaskQueueServiceError_DATASTORE_ERROR TaskQueueServiceError_ErrorCode = 10000
+)
+
+var TaskQueueServiceError_ErrorCode_name = map[int32]string{
+	0:     "OK",
+	1:     "UNKNOWN_QUEUE",
+	2:     "TRANSIENT_ERROR",
+	3:     "INTERNAL_ERROR",
+	4:     "TASK_TOO_LARGE",
+	5:     "INVALID_TASK_NAME",
+	6:     "INVALID_QUEUE_NAME",
+	7:     "INVALID_URL",
+	8:     "INVALID_QUEUE_RATE",
+	9:     "PERMISSION_DENIED",
+	10:    "TASK_ALREADY_EXISTS",
+	11:    "TOMBSTONED_TASK",
+	12:    "INVALID_ETA",
+	13:    "INVALID_REQUEST",
+	14:    "UNKNOWN_TASK",
+	15:    "TOMBSTONED_QUEUE",
+	16:    "DUPLICATE_TASK_NAME",
+	17:    "SKIPPED",
+	18:    "TOO_MANY_TASKS",
+	19:    "INVALID_PAYLOAD",
+	20:    "INVALID_RETRY_PARAMETERS",
+	21:    "INVALID_QUEUE_MODE",
+	22:    "ACL_LOOKUP_ERROR",
+	23:    "TRANSACTIONAL_REQUEST_TOO_LARGE",
+	24:    "INCORRECT_CREATOR_NAME",
+	25:    "TASK_LEASE_EXPIRED",
+	26:    "QUEUE_PAUSED",
+	27:    "INVALID_TAG",
+	10000: "DATASTORE_ERROR",
+}
+var TaskQueueServiceError_ErrorCode_value = map[string]int32{
+	"OK":                              0,
+	"UNKNOWN_QUEUE":                   1,
+	"TRANSIENT_ERROR":                 2,
+	"INTERNAL_ERROR":                  3,
+	"TASK_TOO_LARGE":                  4,
+	"INVALID_TASK_NAME":               5,
+	"INVALID_QUEUE_NAME":              6,
+	"INVALID_URL":                     7,
+	"INVALID_QUEUE_RATE":              8,
+	"PERMISSION_DENIED":               9,
+	"TASK_ALREADY_EXISTS":             10,
+	"TOMBSTONED_TASK":                 11,
+	"INVALID_ETA":                     12,
+	"INVALID_REQUEST":                 13,
+	"UNKNOWN_TASK":                    14,
+	"TOMBSTONED_QUEUE":                15,
+	"DUPLICATE_TASK_NAME":             16,
+	"SKIPPED":                         17,
+	"TOO_MANY_TASKS":                  18,
+	"INVALID_PAYLOAD":                 19,
+	"INVALID_RETRY_PARAMETERS":        20,
+	"INVALID_QUEUE_MODE":              21,
+	"ACL_LOOKUP_ERROR":                22,
+	"TRANSACTIONAL_REQUEST_TOO_LARGE": 23,
+	"INCORRECT_CREATOR_NAME":          24,
+	"TASK_LEASE_EXPIRED":              25,
+	"QUEUE_PAUSED":                    26,
+	"INVALID_TAG":                     27,
+	"DATASTORE_ERROR":                 10000,
+}
+
+func (x TaskQueueServiceError_ErrorCode) Enum() *TaskQueueServiceError_ErrorCode {
+	p := new(TaskQueueServiceError_ErrorCode)
+	*p = x
+	return p
+}
+func (x TaskQueueServiceError_ErrorCode) String() string {
+	return proto.EnumName(TaskQueueServiceError_ErrorCode_name, int32(x))
+}
+func (x *TaskQueueServiceError_ErrorCode) UnmarshalJSON(data []byte) error {
+	value, err := proto.UnmarshalJSONEnum(TaskQueueServiceError_ErrorCode_value, data, "TaskQueueServiceError_ErrorCode")
+	if err != nil {
+		return err
+	}
+	*x = TaskQueueServiceError_ErrorCode(value)
+	return nil
+}
+func (TaskQueueServiceError_ErrorCode) EnumDescriptor() ([]byte, []int) {
+	return fileDescriptor_taskqueue_service_05300f6f4e69f490, []int{0, 0}
+}
+
+type TaskQueueMode_Mode int32
+
+const (
+	TaskQueueMode_PUSH TaskQueueMode_Mode = 0
+	TaskQueueMode_PULL TaskQueueMode_Mode = 1
+)
+
+var TaskQueueMode_Mode_name = map[int32]string{
+	0: "PUSH",
+	1: "PULL",
+}
+var TaskQueueMode_Mode_value = map[string]int32{
+	"PUSH": 0,
+	"PULL": 1,
+}
+
+func (x TaskQueueMode_Mode) Enum() *TaskQueueMode_Mode {
+	p := new(TaskQueueMode_Mode)
+	*p = x
+	return p
+}
+func (x TaskQueueMode_Mode) String() string {
+	return proto.EnumName(TaskQueueMode_Mode_name, int32(x))
+}
+func (x *TaskQueueMode_Mode) UnmarshalJSON(data []byte) error {
+	value, err := proto.UnmarshalJSONEnum(TaskQueueMode_Mode_value, data, "TaskQueueMode_Mode")
+	if err != nil {
+		return err
+	}
+	*x = TaskQueueMode_Mode(value)
+	return nil
+}
+func (TaskQueueMode_Mode) EnumDescriptor() ([]byte, []int) {
+	return fileDescriptor_taskqueue_service_05300f6f4e69f490, []int{5, 0}
+}
+
+type TaskQueueAddRequest_RequestMethod int32
+
+const (
+	TaskQueueAddRequest_GET    TaskQueueAddRequest_RequestMethod = 1
+	TaskQueueAddRequest_POST   TaskQueueAddRequest_RequestMethod = 2
+	TaskQueueAddRequest_HEAD   TaskQueueAddRequest_RequestMethod = 3
+	TaskQueueAddRequest_PUT    TaskQueueAddRequest_RequestMethod = 4
+	TaskQueueAddRequest_DELETE TaskQueueAddRequest_RequestMethod = 5
+)
+
+var TaskQueueAddRequest_RequestMethod_name = map[int32]string{
+	1: "GET",
+	2: "POST",
+	3: "HEAD",
+	4: "PUT",
+	5: "DELETE",
+}
+var TaskQueueAddRequest_RequestMethod_value = map[string]int32{
+	"GET":    1,
+	"POST":   2,
+	"HEAD":   3,
+	"PUT":    4,
+	"DELETE": 5,
+}
+
+func (x TaskQueueAddRequest_RequestMethod) Enum() *TaskQueueAddRequest_RequestMethod {
+	p := new(TaskQueueAddRequest_RequestMethod)
+	*p = x
+	return p
+}
+func (x TaskQueueAddRequest_RequestMethod) String() string {
+	return proto.EnumName(TaskQueueAddRequest_RequestMethod_name, int32(x))
+}
+func (x *TaskQueueAddRequest_RequestMethod) UnmarshalJSON(data []byte) error {
+	value, err := proto.UnmarshalJSONEnum(TaskQueueAddRequest_RequestMethod_value, data, "TaskQueueAddRequest_RequestMethod")
+	if err != nil {
+		return err
+	}
+	*x = TaskQueueAddRequest_RequestMethod(value)
+	return nil
+}
+func (TaskQueueAddRequest_RequestMethod) EnumDescriptor() ([]byte, []int) {
+	return fileDescriptor_taskqueue_service_05300f6f4e69f490, []int{6, 0}
+}
+
+type TaskQueueQueryTasksResponse_Task_RequestMethod int32
+
+const (
+	TaskQueueQueryTasksResponse_Task_GET    TaskQueueQueryTasksResponse_Task_RequestMethod = 1
+	TaskQueueQueryTasksResponse_Task_POST   TaskQueueQueryTasksResponse_Task_RequestMethod = 2
+	TaskQueueQueryTasksResponse_Task_HEAD   TaskQueueQueryTasksResponse_Task_RequestMethod = 3
+	TaskQueueQueryTasksResponse_Task_PUT    TaskQueueQueryTasksResponse_Task_RequestMethod = 4
+	TaskQueueQueryTasksResponse_Task_DELETE TaskQueueQueryTasksResponse_Task_RequestMethod = 5
+)
+
+var TaskQueueQueryTasksResponse_Task_RequestMethod_name = map[int32]string{
+	1: "GET",
+	2: "POST",
+	3: "HEAD",
+	4: "PUT",
+	5: "DELETE",
+}
+var TaskQueueQueryTasksResponse_Task_RequestMethod_value = map[string]int32{
+	"GET":    1,
+	"POST":   2,
+	"HEAD":   3,
+	"PUT":    4,
+	"DELETE": 5,
+}
+
+func (x TaskQueueQueryTasksResponse_Task_RequestMethod) Enum() *TaskQueueQueryTasksResponse_Task_RequestMethod {
+	p := new(TaskQueueQueryTasksResponse_Task_RequestMethod)
+	*p = x
+	return p
+}
+func (x TaskQueueQueryTasksResponse_Task_RequestMethod) String() string {
+	return proto.EnumName(TaskQueueQueryTasksResponse_Task_RequestMethod_name, int32(x))
+}
+func (x *TaskQueueQueryTasksResponse_Task_RequestMethod) UnmarshalJSON(data []byte) error {
+	value, err := proto.UnmarshalJSONEnum(TaskQueueQueryTasksResponse_Task_RequestMethod_value, data, "TaskQueueQueryTasksResponse_Task_RequestMethod")
+	if err != nil {
+		return err
+	}
+	*x = TaskQueueQueryTasksResponse_Task_RequestMethod(value)
+	return nil
+}
+func (TaskQueueQueryTasksResponse_Task_RequestMethod) EnumDescriptor() ([]byte, []int) {
+	return fileDescriptor_taskqueue_service_05300f6f4e69f490, []int{30, 0, 0}
+}
+
+type TaskQueueServiceError struct {
+	XXX_NoUnkeyedLiteral struct{} `json:"-"`
+	XXX_unrecognized     []byte   `json:"-"`
+	XXX_sizecache        int32    `json:"-"`
+}
+
+func (m *TaskQueueServiceError) Reset()         { *m = TaskQueueServiceError{} }
+func (m *TaskQueueServiceError) String() string { return proto.CompactTextString(m) }
+func (*TaskQueueServiceError) ProtoMessage()    {}
+func (*TaskQueueServiceError) Descriptor() ([]byte, []int) {
+	return fileDescriptor_taskqueue_service_05300f6f4e69f490, []int{0}
+}
+func (m *TaskQueueServiceError) XXX_Unmarshal(b []byte) error {
+	return xxx_messageInfo_TaskQueueServiceError.Unmarshal(m, b)
+}
+func (m *TaskQueueServiceError) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) {
+	return xxx_messageInfo_TaskQueueServiceError.Marshal(b, m, deterministic)
+}
+func (dst *TaskQueueServiceError) XXX_Merge(src proto.Message) {
+	xxx_messageInfo_TaskQueueServiceError.Merge(dst, src)
+}
+func (m *TaskQueueServiceError) XXX_Size() int {
+	return xxx_messageInfo_TaskQueueServiceError.Size(m)
+}
+func (m *TaskQueueServiceError) XXX_DiscardUnknown() {
+	xxx_messageInfo_TaskQueueServiceError.DiscardUnknown(m)
+}
+
+var xxx_messageInfo_TaskQueueServiceError proto.InternalMessageInfo
+
+type TaskPayload struct {
+	XXX_NoUnkeyedLiteral         struct{} `json:"-"`
+	proto.XXX_InternalExtensions `protobuf_messageset:"1" json:"-"`
+	XXX_unrecognized             []byte `json:"-"`
+	XXX_sizecache                int32  `json:"-"`
+}
+
+func (m *TaskPayload) Reset()         { *m = TaskPayload{} }
+func (m *TaskPayload) String() string { return proto.CompactTextString(m) }
+func (*TaskPayload) ProtoMessage()    {}
+func (*TaskPayload) Descriptor() ([]byte, []int) {
+	return fileDescriptor_taskqueue_service_05300f6f4e69f490, []int{1}
+}
+
+func (m *TaskPayload) MarshalJSON() ([]byte, error) {
+	return proto.MarshalMessageSetJSON(&m.XXX_InternalExtensions)
+}
+func (m *TaskPayload) UnmarshalJSON(buf []byte) error {
+	return proto.UnmarshalMessageSetJSON(buf, &m.XXX_InternalExtensions)
+}
+
+var extRange_TaskPayload = []proto.ExtensionRange{
+	{Start: 10, End: 2147483646},
+}
+
+func (*TaskPayload) ExtensionRangeArray() []proto.ExtensionRange {
+	return extRange_TaskPayload
+}
+func (m *TaskPayload) XXX_Unmarshal(b []byte) error {
+	return xxx_messageInfo_TaskPayload.Unmarshal(m, b)
+}
+func (m *TaskPayload) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) {
+	return xxx_messageInfo_TaskPayload.Marshal(b, m, deterministic)
+}
+func (dst *TaskPayload) XXX_Merge(src proto.Message) {
+	xxx_messageInfo_TaskPayload.Merge(dst, src)
+}
+func (m *TaskPayload) XXX_Size() int {
+	return xxx_messageInfo_TaskPayload.Size(m)
+}
+func (m *TaskPayload) XXX_DiscardUnknown() {
+	xxx_messageInfo_TaskPayload.DiscardUnknown(m)
+}
+
+var xxx_messageInfo_TaskPayload proto.InternalMessageInfo
+
+type TaskQueueRetryParameters struct {
+	RetryLimit           *int32   `protobuf:"varint,1,opt,name=retry_limit,json=retryLimit" json:"retry_limit,omitempty"`
+	AgeLimitSec          *int64   `protobuf:"varint,2,opt,name=age_limit_sec,json=ageLimitSec" json:"age_limit_sec,omitempty"`
+	MinBackoffSec        *float64 `protobuf:"fixed64,3,opt,name=min_backoff_sec,json=minBackoffSec,def=0.1" json:"min_backoff_sec,omitempty"`
+	MaxBackoffSec        *float64 `protobuf:"fixed64,4,opt,name=max_backoff_sec,json=maxBackoffSec,def=3600" json:"max_backoff_sec,omitempty"`
+	MaxDoublings         *int32   `protobuf:"varint,5,opt,name=max_doublings,json=maxDoublings,def=16" json:"max_doublings,omitempty"`
+	XXX_NoUnkeyedLiteral struct{} `json:"-"`
+	XXX_unrecognized     []byte   `json:"-"`
+	XXX_sizecache        int32    `json:"-"`
+}
+
+func (m *TaskQueueRetryParameters) Reset()         { *m = TaskQueueRetryParameters{} }
+func (m *TaskQueueRetryParameters) String() string { return proto.CompactTextString(m) }
+func (*TaskQueueRetryParameters) ProtoMessage()    {}
+func (*TaskQueueRetryParameters) Descriptor() ([]byte, []int) {
+	return fileDescriptor_taskqueue_service_05300f6f4e69f490, []int{2}
+}
+func (m *TaskQueueRetryParameters) XXX_Unmarshal(b []byte) error {
+	return xxx_messageInfo_TaskQueueRetryParameters.Unmarshal(m, b)
+}
+func (m *TaskQueueRetryParameters) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) {
+	return xxx_messageInfo_TaskQueueRetryParameters.Marshal(b, m, deterministic)
+}
+func (dst *TaskQueueRetryParameters) XXX_Merge(src proto.Message) {
+	xxx_messageInfo_TaskQueueRetryParameters.Merge(dst, src)
+}
+func (m *TaskQueueRetryParameters) XXX_Size() int {
+	return xxx_messageInfo_TaskQueueRetryParameters.Size(m)
+}
+func (m *TaskQueueRetryParameters) XXX_DiscardUnknown() {
+	xxx_messageInfo_TaskQueueRetryParameters.DiscardUnknown(m)
+}
+
+var xxx_messageInfo_TaskQueueRetryParameters proto.InternalMessageInfo
+
+const Default_TaskQueueRetryParameters_MinBackoffSec float64 = 0.1
+const Default_TaskQueueRetryParameters_MaxBackoffSec float64 = 3600
+const Default_TaskQueueRetryParameters_MaxDoublings int32 = 16
+
+func (m *TaskQueueRetryParameters) GetRetryLimit() int32 {
+	if m != nil && m.RetryLimit != nil {
+		return *m.RetryLimit
+	}
+	return 0
+}
+
+func (m *TaskQueueRetryParameters) GetAgeLimitSec() int64 {
+	if m != nil && m.AgeLimitSec != nil {
+		return *m.AgeLimitSec
+	}
+	return 0
+}
+
+func (m *TaskQueueRetryParameters) GetMinBackoffSec() float64 {
+	if m != nil && m.MinBackoffSec != nil {
+		return *m.MinBackoffSec
+	}
+	return Default_TaskQueueRetryParameters_MinBackoffSec
+}
+
+func (m *TaskQueueRetryParameters) GetMaxBackoffSec() float64 {
+	if m != nil && m.MaxBackoffSec != nil {
+		return *m.MaxBackoffSec
+	}
+	return Default_TaskQueueRetryParameters_MaxBackoffSec
+}
+
+func (m *TaskQueueRetryParameters) GetMaxDoublings() int32 {
+	if m != nil && m.MaxDoublings != nil {
+		return *m.MaxDoublings
+	}
+	return Default_TaskQueueRetryParameters_MaxDoublings
+}
+
+type TaskQueueAcl struct {
+	UserEmail            [][]byte `protobuf:"bytes,1,rep,name=user_email,json=userEmail" json:"user_email,omitempty"`
+	WriterEmail          [][]byte `protobuf:"bytes,2,rep,name=writer_email,json=writerEmail" json:"writer_email,omitempty"`
+	XXX_NoUnkeyedLiteral struct{} `json:"-"`
+	XXX_unrecognized     []byte   `json:"-"`
+	XXX_sizecache        int32    `json:"-"`
+}
+
+func (m *TaskQueueAcl) Reset()         { *m = TaskQueueAcl{} }
+func (m *TaskQueueAcl) String() string { return proto.CompactTextString(m) }
+func (*TaskQueueAcl) ProtoMessage()    {}
+func (*TaskQueueAcl) Descriptor() ([]byte, []int) {
+	return fileDescriptor_taskqueue_service_05300f6f4e69f490, []int{3}
+}
+func (m *TaskQueueAcl) XXX_Unmarshal(b []byte) error {
+	return xxx_messageInfo_TaskQueueAcl.Unmarshal(m, b)
+}
+func (m *TaskQueueAcl) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) {
+	return xxx_messageInfo_TaskQueueAcl.Marshal(b, m, deterministic)
+}
+func (dst *TaskQueueAcl) XXX_Merge(src proto.Message) {
+	xxx_messageInfo_TaskQueueAcl.Merge(dst, src)
+}
+func (m *TaskQueueAcl) XXX_Size() int {
+	return xxx_messageInfo_TaskQueueAcl.Size(m)
+}
+func (m *TaskQueueAcl) XXX_DiscardUnknown() {
+	xxx_messageInfo_TaskQueueAcl.DiscardUnknown(m)
+}
+
+var xxx_messageInfo_TaskQueueAcl proto.InternalMessageInfo
+
+func (m *TaskQueueAcl) GetUserEmail() [][]byte {
+	if m != nil {
+		return m.UserEmail
+	}
+	return nil
+}
+
+func (m *TaskQueueAcl) GetWriterEmail() [][]byte {
+	if m != nil {
+		return m.WriterEmail
+	}
+	return nil
+}
+
+type TaskQueueHttpHeader struct {
+	Key                  []byte   `protobuf:"bytes,1,req,name=key" json:"key,omitempty"`
+	Value                []byte   `protobuf:"bytes,2,req,name=value" json:"value,omitempty"`
+	XXX_NoUnkeyedLiteral struct{} `json:"-"`
+	XXX_unrecognized     []byte   `json:"-"`
+	XXX_sizecache        int32    `json:"-"`
+}
+
+func (m *TaskQueueHttpHeader) Reset()         { *m = TaskQueueHttpHeader{} }
+func (m *TaskQueueHttpHeader) String() string { return proto.CompactTextString(m) }
+func (*TaskQueueHttpHeader) ProtoMessage()    {}
+func (*TaskQueueHttpHeader) Descriptor() ([]byte, []int) {
+	return fileDescriptor_taskqueue_service_05300f6f4e69f490, []int{4}
+}
+func (m *TaskQueueHttpHeader) XXX_Unmarshal(b []byte) error {
+	return xxx_messageInfo_TaskQueueHttpHeader.Unmarshal(m, b)
+}
+func (m *TaskQueueHttpHeader) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) {
+	return xxx_messageInfo_TaskQueueHttpHeader.Marshal(b, m, deterministic)
+}
+func (dst *TaskQueueHttpHeader) XXX_Merge(src proto.Message) {
+	xxx_messageInfo_TaskQueueHttpHeader.Merge(dst, src)
+}
+func (m *TaskQueueHttpHeader) XXX_Size() int {
+	return xxx_messageInfo_TaskQueueHttpHeader.Size(m)
+}
+func (m *TaskQueueHttpHeader) XXX_DiscardUnknown() {
+	xxx_messageInfo_TaskQueueHttpHeader.DiscardUnknown(m)
+}
+
+var xxx_messageInfo_TaskQueueHttpHeader proto.InternalMessageInfo
+
+func (m *TaskQueueHttpHeader) GetKey() []byte {
+	if m != nil {
+		return m.Key
+	}
+	return nil
+}
+
+func (m *TaskQueueHttpHeader) GetValue() []byte {
+	if m != nil {
+		return m.Value
+	}
+	return nil
+}
+
+type TaskQueueMode struct {
+	XXX_NoUnkeyedLiteral struct{} `json:"-"`
+	XXX_unrecognized     []byte   `json:"-"`
+	XXX_sizecache        int32    `json:"-"`
+}
+
+func (m *TaskQueueMode) Reset()         { *m = TaskQueueMode{} }
+func (m *TaskQueueMode) String() string { return proto.CompactTextString(m) }
+func (*TaskQueueMode) ProtoMessage()    {}
+func (*TaskQueueMode) Descriptor() ([]byte, []int) {
+	return fileDescriptor_taskqueue_service_05300f6f4e69f490, []int{5}
+}
+func (m *TaskQueueMode) XXX_Unmarshal(b []byte) error {
+	return xxx_messageInfo_TaskQueueMode.Unmarshal(m, b)
+}
+func (m *TaskQueueMode) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) {
+	return xxx_messageInfo_TaskQueueMode.Marshal(b, m, deterministic)
+}
+func (dst *TaskQueueMode) XXX_Merge(src proto.Message) {
+	xxx_messageInfo_TaskQueueMode.Merge(dst, src)
+}
+func (m *TaskQueueMode) XXX_Size() int {
+	return xxx_messageInfo_TaskQueueMode.Size(m)
+}
+func (m *TaskQueueMode) XXX_DiscardUnknown() {
+	xxx_messageInfo_TaskQueueMode.DiscardUnknown(m)
+}
+
+var xxx_messageInfo_TaskQueueMode proto.InternalMessageInfo
+
+type TaskQueueAddRequest struct {
+	QueueName            []byte                             `protobuf:"bytes,1,req,name=queue_name,json=queueName" json:"queue_name,omitempty"`
+	TaskName             []byte                             `protobuf:"bytes,2,req,name=task_name,json=taskName" json:"task_name,omitempty"`
+	EtaUsec              *int64                             `protobuf:"varint,3,req,name=eta_usec,json=etaUsec" json:"eta_usec,omitempty"`
+	Method               *TaskQueueAddRequest_RequestMethod `protobuf:"varint,5,opt,name=method,enum=appengine.TaskQueueAddRequest_RequestMethod,def=2" json:"method,omitempty"`
+	Url                  []byte                             `protobuf:"bytes,4,opt,name=url" json:"url,omitempty"`
+	Header               []*TaskQueueAddRequest_Header      `protobuf:"group,6,rep,name=Header,json=header" json:"header,omitempty"`
+	Body                 []byte                             `protobuf:"bytes,9,opt,name=body" json:"body,omitempty"`
+	Transaction          *datastore.Transaction             `protobuf:"bytes,10,opt,name=transaction" json:"transaction,omitempty"`
+	AppId                []byte                             `protobuf:"bytes,11,opt,name=app_id,json=appId" json:"app_id,omitempty"`
+	Crontimetable        *TaskQueueAddRequest_CronTimetable `protobuf:"group,12,opt,name=CronTimetable,json=crontimetable" json:"crontimetable,omitempty"`
+	Description          []byte                             `protobuf:"bytes,15,opt,name=description" json:"description,omitempty"`
+	Payload              *TaskPayload                       `protobuf:"bytes,16,opt,name=payload" json:"payload,omitempty"`
+	RetryParameters      *TaskQueueRetryParameters          `protobuf:"bytes,17,opt,name=retry_parameters,json=retryParameters" json:"retry_parameters,omitempty"`
+	Mode                 *TaskQueueMode_Mode                `protobuf:"varint,18,opt,name=mode,enum=appengine.TaskQueueMode_Mode,def=0" json:"mode,omitempty"`
+	Tag                  []byte                             `protobuf:"bytes,19,opt,name=tag" json:"tag,omitempty"`
+	XXX_NoUnkeyedLiteral struct{}                           `json:"-"`
+	XXX_unrecognized     []byte                             `json:"-"`
+	XXX_sizecache        int32                              `json:"-"`
+}
+
+func (m *TaskQueueAddRequest) Reset()         { *m = TaskQueueAddRequest{} }
+func (m *TaskQueueAddRequest) String() string { return proto.CompactTextString(m) }
+func (*TaskQueueAddRequest) ProtoMessage()    {}
+func (*TaskQueueAddRequest) Descriptor() ([]byte, []int) {
+	return fileDescriptor_taskqueue_service_05300f6f4e69f490, []int{6}
+}
+func (m *TaskQueueAddRequest) XXX_Unmarshal(b []byte) error {
+	return xxx_messageInfo_TaskQueueAddRequest.Unmarshal(m, b)
+}
+func (m *TaskQueueAddRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) {
+	return xxx_messageInfo_TaskQueueAddRequest.Marshal(b, m, deterministic)
+}
+func (dst *TaskQueueAddRequest) XXX_Merge(src proto.Message) {
+	xxx_messageInfo_TaskQueueAddRequest.Merge(dst, src)
+}
+func (m *TaskQueueAddRequest) XXX_Size() int {
+	return xxx_messageInfo_TaskQueueAddRequest.Size(m)
+}
+func (m *TaskQueueAddRequest) XXX_DiscardUnknown() {
+	xxx_messageInfo_TaskQueueAddRequest.DiscardUnknown(m)
+}
+
+var xxx_messageInfo_TaskQueueAddRequest proto.InternalMessageInfo
+
+const Default_TaskQueueAddRequest_Method TaskQueueAddRequest_RequestMethod = TaskQueueAddRequest_POST
+const Default_TaskQueueAddRequest_Mode TaskQueueMode_Mode = TaskQueueMode_PUSH
+
+func (m *TaskQueueAddRequest) GetQueueName() []byte {
+	if m != nil {
+		return m.QueueName
+	}
+	return nil
+}
+
+func (m *TaskQueueAddRequest) GetTaskName() []byte {
+	if m != nil {
+		return m.TaskName
+	}
+	return nil
+}
+
+func (m *TaskQueueAddRequest) GetEtaUsec() int64 {
+	if m != nil && m.EtaUsec != nil {
+		return *m.EtaUsec
+	}
+	return 0
+}
+
+func (m *TaskQueueAddRequest) GetMethod() TaskQueueAddRequest_RequestMethod {
+	if m != nil && m.Method != nil {
+		return *m.Method
+	}
+	return Default_TaskQueueAddRequest_Method
+}
+
+func (m *TaskQueueAddRequest) GetUrl() []byte {
+	if m != nil {
+		return m.Url
+	}
+	return nil
+}
+
+func (m *TaskQueueAddRequest) GetHeader() []*TaskQueueAddRequest_Header {
+	if m != nil {
+		return m.Header
+	}
+	return nil
+}
+
+func (m *TaskQueueAddRequest) GetBody() []byte {
+	if m != nil {
+		return m.Body
+	}
+	return nil
+}
+
+func (m *TaskQueueAddRequest) GetTransaction() *datastore.Transaction {
+	if m != nil {
+		return m.Transaction
+	}
+	return nil
+}
+
+func (m *TaskQueueAddRequest) GetAppId() []byte {
+	if m != nil {
+		return m.AppId
+	}
+	return nil
+}
+
+func (m *TaskQueueAddRequest) GetCrontimetable() *TaskQueueAddRequest_CronTimetable {
+	if m != nil {
+		return m.Crontimetable
+	}
+	return nil
+}
+
+func (m *TaskQueueAddRequest) GetDescription() []byte {
+	if m != nil {
+		return m.Description
+	}
+	return nil
+}
+
+func (m *TaskQueueAddRequest) GetPayload() *TaskPayload {
+	if m != nil {
+		return m.Payload
+	}
+	return nil
+}
+
+func (m *TaskQueueAddRequest) GetRetryParameters() *TaskQueueRetryParameters {
+	if m != nil {
+		return m.RetryParameters
+	}
+	return nil
+}
+
+func (m *TaskQueueAddRequest) GetMode() TaskQueueMode_Mode {
+	if m != nil && m.Mode != nil {
+		return *m.Mode
+	}
+	return Default_TaskQueueAddRequest_Mode
+}
+
+func (m *TaskQueueAddRequest) GetTag() []byte {
+	if m != nil {
+		return m.Tag
+	}
+	return nil
+}
+
+type TaskQueueAddRequest_Header struct {
+	Key                  []byte   `protobuf:"bytes,7,req,name=key" json:"key,omitempty"`
+	Value                []byte   `protobuf:"bytes,8,req,name=value" json:"value,omitempty"`
+	XXX_NoUnkeyedLiteral struct{} `json:"-"`
+	XXX_unrecognized     []byte   `json:"-"`
+	XXX_sizecache        int32    `json:"-"`
+}
+
+func (m *TaskQueueAddRequest_Header) Reset()         { *m = TaskQueueAddRequest_Header{} }
+func (m *TaskQueueAddRequest_Header) String() string { return proto.CompactTextString(m) }
+func (*TaskQueueAddRequest_Header) ProtoMessage()    {}
+func (*TaskQueueAddRequest_Header) Descriptor() ([]byte, []int) {
+	return fileDescriptor_taskqueue_service_05300f6f4e69f490, []int{6, 0}
+}
+func (m *TaskQueueAddRequest_Header) XXX_Unmarshal(b []byte) error {
+	return xxx_messageInfo_TaskQueueAddRequest_Header.Unmarshal(m, b)
+}
+func (m *TaskQueueAddRequest_Header) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) {
+	return xxx_messageInfo_TaskQueueAddRequest_Header.Marshal(b, m, deterministic)
+}
+func (dst *TaskQueueAddRequest_Header) XXX_Merge(src proto.Message) {
+	xxx_messageInfo_TaskQueueAddRequest_Header.Merge(dst, src)
+}
+func (m *TaskQueueAddRequest_Header) XXX_Size() int {
+	return xxx_messageInfo_TaskQueueAddRequest_Header.Size(m)
+}
+func (m *TaskQueueAddRequest_Header) XXX_DiscardUnknown() {
+	xxx_messageInfo_TaskQueueAddRequest_Header.DiscardUnknown(m)
+}
+
+var xxx_messageInfo_TaskQueueAddRequest_Header proto.InternalMessageInfo
+
+func (m *TaskQueueAddRequest_Header) GetKey() []byte {
+	if m != nil {
+		return m.Key
+	}
+	return nil
+}
+
+func (m *TaskQueueAddRequest_Header) GetValue() []byte {
+	if m != nil {
+		return m.Value
+	}
+	return nil
+}
+
+type TaskQueueAddRequest_CronTimetable struct {
+	Schedule             []byte   `protobuf:"bytes,13,req,name=schedule" json:"schedule,omitempty"`
+	Timezone             []byte   `protobuf:"bytes,14,req,name=timezone" json:"timezone,omitempty"`
+	XXX_NoUnkeyedLiteral struct{} `json:"-"`
+	XXX_unrecognized     []byte   `json:"-"`
+	XXX_sizecache        int32    `json:"-"`
+}
+
+func (m *TaskQueueAddRequest_CronTimetable) Reset()         { *m = TaskQueueAddRequest_CronTimetable{} }
+func (m *TaskQueueAddRequest_CronTimetable) String() string { return proto.CompactTextString(m) }
+func (*TaskQueueAddRequest_CronTimetable) ProtoMessage()    {}
+func (*TaskQueueAddRequest_CronTimetable) Descriptor() ([]byte, []int) {
+	return fileDescriptor_taskqueue_service_05300f6f4e69f490, []int{6, 1}
+}
+func (m *TaskQueueAddRequest_CronTimetable) XXX_Unmarshal(b []byte) error {
+	return xxx_messageInfo_TaskQueueAddRequest_CronTimetable.Unmarshal(m, b)
+}
+func (m *TaskQueueAddRequest_CronTimetable) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) {
+	return xxx_messageInfo_TaskQueueAddRequest_CronTimetable.Marshal(b, m, deterministic)
+}
+func (dst *TaskQueueAddRequest_CronTimetable) XXX_Merge(src proto.Message) {
+	xxx_messageInfo_TaskQueueAddRequest_CronTimetable.Merge(dst, src)
+}
+func (m *TaskQueueAddRequest_CronTimetable) XXX_Size() int {
+	return xxx_messageInfo_TaskQueueAddRequest_CronTimetable.Size(m)
+}
+func (m *TaskQueueAddRequest_CronTimetable) XXX_DiscardUnknown() {
+	xxx_messageInfo_TaskQueueAddRequest_CronTimetable.DiscardUnknown(m)
+}
+
+var xxx_messageInfo_TaskQueueAddRequest_CronTimetable proto.InternalMessageInfo
+
+func (m *TaskQueueAddRequest_CronTimetable) GetSchedule() []byte {
+	if m != nil {
+		return m.Schedule
+	}
+	return nil
+}
+
+func (m *TaskQueueAddRequest_CronTimetable) GetTimezone() []byte {
+	if m != nil {
+		return m.Timezone
+	}
+	return nil
+}
+
+type TaskQueueAddResponse struct {
+	ChosenTaskName       []byte   `protobuf:"bytes,1,opt,name=chosen_task_name,json=chosenTaskName" json:"chosen_task_name,omitempty"`
+	XXX_NoUnkeyedLiteral struct{} `json:"-"`
+	XXX_unrecognized     []byte   `json:"-"`
+	XXX_sizecache        int32    `json:"-"`
+}
+
+func (m *TaskQueueAddResponse) Reset()         { *m = TaskQueueAddResponse{} }
+func (m *TaskQueueAddResponse) String() string { return proto.CompactTextString(m) }
+func (*TaskQueueAddResponse) ProtoMessage()    {}
+func (*TaskQueueAddResponse) Descriptor() ([]byte, []int) {
+	return fileDescriptor_taskqueue_service_05300f6f4e69f490, []int{7}
+}
+func (m *TaskQueueAddResponse) XXX_Unmarshal(b []byte) error {
+	return xxx_messageInfo_TaskQueueAddResponse.Unmarshal(m, b)
+}
+func (m *TaskQueueAddResponse) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) {
+	return xxx_messageInfo_TaskQueueAddResponse.Marshal(b, m, deterministic)
+}
+func (dst *TaskQueueAddResponse) XXX_Merge(src proto.Message) {
+	xxx_messageInfo_TaskQueueAddResponse.Merge(dst, src)
+}
+func (m *TaskQueueAddResponse) XXX_Size() int {
+	return xxx_messageInfo_TaskQueueAddResponse.Size(m)
+}
+func (m *TaskQueueAddResponse) XXX_DiscardUnknown() {
+	xxx_messageInfo_TaskQueueAddResponse.DiscardUnknown(m)
+}
+
+var xxx_messageInfo_TaskQueueAddResponse proto.InternalMessageInfo
+
+func (m *TaskQueueAddResponse) GetChosenTaskName() []byte {
+	if m != nil {
+		return m.ChosenTaskName
+	}
+	return nil
+}
+
+type TaskQueueBulkAddRequest struct {
+	AddRequest           []*TaskQueueAddRequest `protobuf:"bytes,1,rep,name=add_request,json=addRequest" json:"add_request,omitempty"`
+	XXX_NoUnkeyedLiteral struct{}               `json:"-"`
+	XXX_unrecognized     []byte                 `json:"-"`
+	XXX_sizecache        int32                  `json:"-"`
+}
+
+func (m *TaskQueueBulkAddRequest) Reset()         { *m = TaskQueueBulkAddRequest{} }
+func (m *TaskQueueBulkAddRequest) String() string { return proto.CompactTextString(m) }
+func (*TaskQueueBulkAddRequest) ProtoMessage()    {}
+func (*TaskQueueBulkAddRequest) Descriptor() ([]byte, []int) {
+	return fileDescriptor_taskqueue_service_05300f6f4e69f490, []int{8}
+}
+func (m *TaskQueueBulkAddRequest) XXX_Unmarshal(b []byte) error {
+	return xxx_messageInfo_TaskQueueBulkAddRequest.Unmarshal(m, b)
+}
+func (m *TaskQueueBulkAddRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) {
+	return xxx_messageInfo_TaskQueueBulkAddRequest.Marshal(b, m, deterministic)
+}
+func (dst *TaskQueueBulkAddRequest) XXX_Merge(src proto.Message) {
+	xxx_messageInfo_TaskQueueBulkAddRequest.Merge(dst, src)
+}
+func (m *TaskQueueBulkAddRequest) XXX_Size() int {
+	return xxx_messageInfo_TaskQueueBulkAddRequest.Size(m)
+}
+func (m *TaskQueueBulkAddRequest) XXX_DiscardUnknown() {
+	xxx_messageInfo_TaskQueueBulkAddRequest.DiscardUnknown(m)
+}
+
+var xxx_messageInfo_TaskQueueBulkAddRequest proto.InternalMessageInfo
+
+func (m *TaskQueueBulkAddRequest) GetAddRequest() []*TaskQueueAddRequest {
+	if m != nil {
+		return m.AddRequest
+	}
+	return nil
+}
+
+type TaskQueueBulkAddResponse struct {
+	Taskresult           []*TaskQueueBulkAddResponse_TaskResult `protobuf:"group,1,rep,name=TaskResult,json=taskresult" json:"taskresult,omitempty"`
+	XXX_NoUnkeyedLiteral struct{}                               `json:"-"`
+	XXX_unrecognized     []byte                                 `json:"-"`
+	XXX_sizecache        int32                                  `json:"-"`
+}
+
+func (m *TaskQueueBulkAddResponse) Reset()         { *m = TaskQueueBulkAddResponse{} }
+func (m *TaskQueueBulkAddResponse) String() string { return proto.CompactTextString(m) }
+func (*TaskQueueBulkAddResponse) ProtoMessage()    {}
+func (*TaskQueueBulkAddResponse) Descriptor() ([]byte, []int) {
+	return fileDescriptor_taskqueue_service_05300f6f4e69f490, []int{9}
+}
+func (m *TaskQueueBulkAddResponse) XXX_Unmarshal(b []byte) error {
+	return xxx_messageInfo_TaskQueueBulkAddResponse.Unmarshal(m, b)
+}
+func (m *TaskQueueBulkAddResponse) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) {
+	return xxx_messageInfo_TaskQueueBulkAddResponse.Marshal(b, m, deterministic)
+}
+func (dst *TaskQueueBulkAddResponse) XXX_Merge(src proto.Message) {
+	xxx_messageInfo_TaskQueueBulkAddResponse.Merge(dst, src)
+}
+func (m *TaskQueueBulkAddResponse) XXX_Size() int {
+	return xxx_messageInfo_TaskQueueBulkAddResponse.Size(m)
+}
+func (m *TaskQueueBulkAddResponse) XXX_DiscardUnknown() {
+	xxx_messageInfo_TaskQueueBulkAddResponse.DiscardUnknown(m)
+}
+
+var xxx_messageInfo_TaskQueueBulkAddResponse proto.InternalMessageInfo
+
+func (m *TaskQueueBulkAddResponse) GetTaskresult() []*TaskQueueBulkAddResponse_TaskResult {
+	if m != nil {
+		return m.Taskresult
+	}
+	return nil
+}
+
+type TaskQueueBulkAddResponse_TaskResult struct {
+	Result               *TaskQueueServiceError_ErrorCode `protobuf:"varint,2,req,name=result,enum=appengine.TaskQueueServiceError_ErrorCode" json:"result,omitempty"`
+	ChosenTaskName       []byte                           `protobuf:"bytes,3,opt,name=chosen_task_name,json=chosenTaskName" json:"chosen_task_name,omitempty"`
+	XXX_NoUnkeyedLiteral struct{}                         `json:"-"`
+	XXX_unrecognized     []byte                           `json:"-"`
+	XXX_sizecache        int32                            `json:"-"`
+}
+
+func (m *TaskQueueBulkAddResponse_TaskResult) Reset()         { *m = TaskQueueBulkAddResponse_TaskResult{} }
+func (m *TaskQueueBulkAddResponse_TaskResult) String() string { return proto.CompactTextString(m) }
+func (*TaskQueueBulkAddResponse_TaskResult) ProtoMessage()    {}
+func (*TaskQueueBulkAddResponse_TaskResult) Descriptor() ([]byte, []int) {
+	return fileDescriptor_taskqueue_service_05300f6f4e69f490, []int{9, 0}
+}
+func (m *TaskQueueBulkAddResponse_TaskResult) XXX_Unmarshal(b []byte) error {
+	return xxx_messageInfo_TaskQueueBulkAddResponse_TaskResult.Unmarshal(m, b)
+}
+func (m *TaskQueueBulkAddResponse_TaskResult) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) {
+	return xxx_messageInfo_TaskQueueBulkAddResponse_TaskResult.Marshal(b, m, deterministic)
+}
+func (dst *TaskQueueBulkAddResponse_TaskResult) XXX_Merge(src proto.Message) {
+	xxx_messageInfo_TaskQueueBulkAddResponse_TaskResult.Merge(dst, src)
+}
+func (m *TaskQueueBulkAddResponse_TaskResult) XXX_Size() int {
+	return xxx_messageInfo_TaskQueueBulkAddResponse_TaskResult.Size(m)
+}
+func (m *TaskQueueBulkAddResponse_TaskResult) XXX_DiscardUnknown() {
+	xxx_messageInfo_TaskQueueBulkAddResponse_TaskResult.DiscardUnknown(m)
+}
+
+var xxx_messageInfo_TaskQueueBulkAddResponse_TaskResult proto.InternalMessageInfo
+
+func (m *TaskQueueBulkAddResponse_TaskResult) GetResult() TaskQueueServiceError_ErrorCode {
+	if m != nil && m.Result != nil {
+		return *m.Result
+	}
+	return TaskQueueServiceError_OK
+}
+
+func (m *TaskQueueBulkAddResponse_TaskResult) GetChosenTaskName() []byte {
+	if m != nil {
+		return m.ChosenTaskName
+	}
+	return nil
+}
+
+type TaskQueueDeleteRequest struct {
+	QueueName            []byte   `protobuf:"bytes,1,req,name=queue_name,json=queueName" json:"queue_name,omitempty"`
+	TaskName             [][]byte `protobuf:"bytes,2,rep,name=task_name,json=taskName" json:"task_name,omitempty"`
+	AppId                []byte   `protobuf:"bytes,3,opt,name=app_id,json=appId" json:"app_id,omitempty"`
+	XXX_NoUnkeyedLiteral struct{} `json:"-"`
+	XXX_unrecognized     []byte   `json:"-"`
+	XXX_sizecache        int32    `json:"-"`
+}
+
+func (m *TaskQueueDeleteRequest) Reset()         { *m = TaskQueueDeleteRequest{} }
+func (m *TaskQueueDeleteRequest) String() string { return proto.CompactTextString(m) }
+func (*TaskQueueDeleteRequest) ProtoMessage()    {}
+func (*TaskQueueDeleteRequest) Descriptor() ([]byte, []int) {
+	return fileDescriptor_taskqueue_service_05300f6f4e69f490, []int{10}
+}
+func (m *TaskQueueDeleteRequest) XXX_Unmarshal(b []byte) error {
+	return xxx_messageInfo_TaskQueueDeleteRequest.Unmarshal(m, b)
+}
+func (m *TaskQueueDeleteRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) {
+	return xxx_messageInfo_TaskQueueDeleteRequest.Marshal(b, m, deterministic)
+}
+func (dst *TaskQueueDeleteRequest) XXX_Merge(src proto.Message) {
+	xxx_messageInfo_TaskQueueDeleteRequest.Merge(dst, src)
+}
+func (m *TaskQueueDeleteRequest) XXX_Size() int {
+	return xxx_messageInfo_TaskQueueDeleteRequest.Size(m)
+}
+func (m *TaskQueueDeleteRequest) XXX_DiscardUnknown() {
+	xxx_messageInfo_TaskQueueDeleteRequest.DiscardUnknown(m)
+}
+
+var xxx_messageInfo_TaskQueueDeleteRequest proto.InternalMessageInfo
+
+func (m *TaskQueueDeleteRequest) GetQueueName() []byte {
+	if m != nil {
+		return m.QueueName
+	}
+	return nil
+}
+
+func (m *TaskQueueDeleteRequest) GetTaskName() [][]byte {
+	if m != nil {
+		return m.TaskName
+	}
+	return nil
+}
+
+func (m *TaskQueueDeleteRequest) GetAppId() []byte {
+	if m != nil {
+		return m.AppId
+	}
+	return nil
+}
+
+type TaskQueueDeleteResponse struct {
+	Result               []TaskQueueServiceError_ErrorCode `protobuf:"varint,3,rep,name=result,enum=appengine.TaskQueueServiceError_ErrorCode" json:"result,omitempty"`
+	XXX_NoUnkeyedLiteral struct{}                          `json:"-"`
+	XXX_unrecognized     []byte                            `json:"-"`
+	XXX_sizecache        int32                             `json:"-"`
+}
+
+func (m *TaskQueueDeleteResponse) Reset()         { *m = TaskQueueDeleteResponse{} }
+func (m *TaskQueueDeleteResponse) String() string { return proto.CompactTextString(m) }
+func (*TaskQueueDeleteResponse) ProtoMessage()    {}
+func (*TaskQueueDeleteResponse) Descriptor() ([]byte, []int) {
+	return fileDescriptor_taskqueue_service_05300f6f4e69f490, []int{11}
+}
+func (m *TaskQueueDeleteResponse) XXX_Unmarshal(b []byte) error {
+	return xxx_messageInfo_TaskQueueDeleteResponse.Unmarshal(m, b)
+}
+func (m *TaskQueueDeleteResponse) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) {
+	return xxx_messageInfo_TaskQueueDeleteResponse.Marshal(b, m, deterministic)
+}
+func (dst *TaskQueueDeleteResponse) XXX_Merge(src proto.Message) {
+	xxx_messageInfo_TaskQueueDeleteResponse.Merge(dst, src)
+}
+func (m *TaskQueueDeleteResponse) XXX_Size() int {
+	return xxx_messageInfo_TaskQueueDeleteResponse.Size(m)
+}
+func (m *TaskQueueDeleteResponse) XXX_DiscardUnknown() {
+	xxx_messageInfo_TaskQueueDeleteResponse.DiscardUnknown(m)
+}
+
+var xxx_messageInfo_TaskQueueDeleteResponse proto.InternalMessageInfo
+
+func (m *TaskQueueDeleteResponse) GetResult() []TaskQueueServiceError_ErrorCode {
+	if m != nil {
+		return m.Result
+	}
+	return nil
+}
+
+type TaskQueueForceRunRequest struct {
+	AppId                []byte   `protobuf:"bytes,1,opt,name=app_id,json=appId" json:"app_id,omitempty"`
+	QueueName            []byte   `protobuf:"bytes,2,req,name=queue_name,json=queueName" json:"queue_name,omitempty"`
+	TaskName             []byte   `protobuf:"bytes,3,req,name=task_name,json=taskName" json:"task_name,omitempty"`
+	XXX_NoUnkeyedLiteral struct{} `json:"-"`
+	XXX_unrecognized     []byte   `json:"-"`
+	XXX_sizecache        int32    `json:"-"`
+}
+
+func (m *TaskQueueForceRunRequest) Reset()         { *m = TaskQueueForceRunRequest{} }
+func (m *TaskQueueForceRunRequest) String() string { return proto.CompactTextString(m) }
+func (*TaskQueueForceRunRequest) ProtoMessage()    {}
+func (*TaskQueueForceRunRequest) Descriptor() ([]byte, []int) {
+	return fileDescriptor_taskqueue_service_05300f6f4e69f490, []int{12}
+}
+func (m *TaskQueueForceRunRequest) XXX_Unmarshal(b []byte) error {
+	return xxx_messageInfo_TaskQueueForceRunRequest.Unmarshal(m, b)
+}
+func (m *TaskQueueForceRunRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) {
+	return xxx_messageInfo_TaskQueueForceRunRequest.Marshal(b, m, deterministic)
+}
+func (dst *TaskQueueForceRunRequest) XXX_Merge(src proto.Message) {
+	xxx_messageInfo_TaskQueueForceRunRequest.Merge(dst, src)
+}
+func (m *TaskQueueForceRunRequest) XXX_Size() int {
+	return xxx_messageInfo_TaskQueueForceRunRequest.Size(m)
+}
+func (m *TaskQueueForceRunRequest) XXX_DiscardUnknown() {
+	xxx_messageInfo_TaskQueueForceRunRequest.DiscardUnknown(m)
+}
+
+var xxx_messageInfo_TaskQueueForceRunRequest proto.InternalMessageInfo
+
+func (m *TaskQueueForceRunRequest) GetAppId() []byte {
+	if m != nil {
+		return m.AppId
+	}
+	return nil
+}
+
+func (m *TaskQueueForceRunRequest) GetQueueName() []byte {
+	if m != nil {
+		return m.QueueName
+	}
+	return nil
+}
+
+func (m *TaskQueueForceRunRequest) GetTaskName() []byte {
+	if m != nil {
+		return m.TaskName
+	}
+	return nil
+}
+
+type TaskQueueForceRunResponse struct {
+	Result               *TaskQueueServiceError_ErrorCode `protobuf:"varint,3,req,name=result,enum=appengine.TaskQueueServiceError_ErrorCode" json:"result,omitempty"`
+	XXX_NoUnkeyedLiteral struct{}                         `json:"-"`
+	XXX_unrecognized     []byte                           `json:"-"`
+	XXX_sizecache        int32                            `json:"-"`
+}
+
+func (m *TaskQueueForceRunResponse) Reset()         { *m = TaskQueueForceRunResponse{} }
+func (m *TaskQueueForceRunResponse) String() string { return proto.CompactTextString(m) }
+func (*TaskQueueForceRunResponse) ProtoMessage()    {}
+func (*TaskQueueForceRunResponse) Descriptor() ([]byte, []int) {
+	return fileDescriptor_taskqueue_service_05300f6f4e69f490, []int{13}
+}
+func (m *TaskQueueForceRunResponse) XXX_Unmarshal(b []byte) error {
+	return xxx_messageInfo_TaskQueueForceRunResponse.Unmarshal(m, b)
+}
+func (m *TaskQueueForceRunResponse) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) {
+	return xxx_messageInfo_TaskQueueForceRunResponse.Marshal(b, m, deterministic)
+}
+func (dst *TaskQueueForceRunResponse) XXX_Merge(src proto.Message) {
+	xxx_messageInfo_TaskQueueForceRunResponse.Merge(dst, src)
+}
+func (m *TaskQueueForceRunResponse) XXX_Size() int {
+	return xxx_messageInfo_TaskQueueForceRunResponse.Size(m)
+}
+func (m *TaskQueueForceRunResponse) XXX_DiscardUnknown() {
+	xxx_messageInfo_TaskQueueForceRunResponse.DiscardUnknown(m)
+}
+
+var xxx_messageInfo_TaskQueueForceRunResponse proto.InternalMessageInfo
+
+func (m *TaskQueueForceRunResponse) GetResult() TaskQueueServiceError_ErrorCode {
+	if m != nil && m.Result != nil {
+		return *m.Result
+	}
+	return TaskQueueServiceError_OK
+}
+
+type TaskQueueUpdateQueueRequest struct {
+	AppId                 []byte                    `protobuf:"bytes,1,opt,name=app_id,json=appId" json:"app_id,omitempty"`
+	QueueName             []byte                    `protobuf:"bytes,2,req,name=queue_name,json=queueName" json:"queue_name,omitempty"`
+	BucketRefillPerSecond *float64                  `protobuf:"fixed64,3,req,name=bucket_refill_per_second,json=bucketRefillPerSecond" json:"bucket_refill_per_second,omitempty"`
+	BucketCapacity        *int32                    `protobuf:"varint,4,req,name=bucket_capacity,json=bucketCapacity" json:"bucket_capacity,omitempty"`
+	UserSpecifiedRate     *string                   `protobuf:"bytes,5,opt,name=user_specified_rate,json=userSpecifiedRate" json:"user_specified_rate,omitempty"`
+	RetryParameters       *TaskQueueRetryParameters `protobuf:"bytes,6,opt,name=retry_parameters,json=retryParameters" json:"retry_parameters,omitempty"`
+	MaxConcurrentRequests *int32                    `protobuf:"varint,7,opt,name=max_concurrent_requests,json=maxConcurrentRequests" json:"max_concurrent_requests,omitempty"`
+	Mode                  *TaskQueueMode_Mode       `protobuf:"varint,8,opt,name=mode,enum=appengine.TaskQueueMode_Mode,def=0" json:"mode,omitempty"`
+	Acl                   *TaskQueueAcl             `protobuf:"bytes,9,opt,name=acl" json:"acl,omitempty"`
+	HeaderOverride        []*TaskQueueHttpHeader    `protobuf:"bytes,10,rep,name=header_override,json=headerOverride" json:"header_override,omitempty"`
+	XXX_NoUnkeyedLiteral  struct{}                  `json:"-"`
+	XXX_unrecognized      []byte                    `json:"-"`
+	XXX_sizecache         int32                     `json:"-"`
+}
+
+func (m *TaskQueueUpdateQueueRequest) Reset()         { *m = TaskQueueUpdateQueueRequest{} }
+func (m *TaskQueueUpdateQueueRequest) String() string { return proto.CompactTextString(m) }
+func (*TaskQueueUpdateQueueRequest) ProtoMessage()    {}
+func (*TaskQueueUpdateQueueRequest) Descriptor() ([]byte, []int) {
+	return fileDescriptor_taskqueue_service_05300f6f4e69f490, []int{14}
+}
+func (m *TaskQueueUpdateQueueRequest) XXX_Unmarshal(b []byte) error {
+	return xxx_messageInfo_TaskQueueUpdateQueueRequest.Unmarshal(m, b)
+}
+func (m *TaskQueueUpdateQueueRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) {
+	return xxx_messageInfo_TaskQueueUpdateQueueRequest.Marshal(b, m, deterministic)
+}
+func (dst *TaskQueueUpdateQueueRequest) XXX_Merge(src proto.Message) {
+	xxx_messageInfo_TaskQueueUpdateQueueRequest.Merge(dst, src)
+}
+func (m *TaskQueueUpdateQueueRequest) XXX_Size() int {
+	return xxx_messageInfo_TaskQueueUpdateQueueRequest.Size(m)
+}
+func (m *TaskQueueUpdateQueueRequest) XXX_DiscardUnknown() {
+	xxx_messageInfo_TaskQueueUpdateQueueRequest.DiscardUnknown(m)
+}
+
+var xxx_messageInfo_TaskQueueUpdateQueueRequest proto.InternalMessageInfo
+
+const Default_TaskQueueUpdateQueueRequest_Mode TaskQueueMode_Mode = TaskQueueMode_PUSH
+
+func (m *TaskQueueUpdateQueueRequest) GetAppId() []byte {
+	if m != nil {
+		return m.AppId
+	}
+	return nil
+}
+
+func (m *TaskQueueUpdateQueueRequest) GetQueueName() []byte {
+	if m != nil {
+		return m.QueueName
+	}
+	return nil
+}
+
+func (m *TaskQueueUpdateQueueRequest) GetBucketRefillPerSecond() float64 {
+	if m != nil && m.BucketRefillPerSecond != nil {
+		return *m.BucketRefillPerSecond
+	}
+	return 0
+}
+
+func (m *TaskQueueUpdateQueueRequest) GetBucketCapacity() int32 {
+	if m != nil && m.BucketCapacity != nil {
+		return *m.BucketCapacity
+	}
+	return 0
+}
+
+func (m *TaskQueueUpdateQueueRequest) GetUserSpecifiedRate() string {
+	if m != nil && m.UserSpecifiedRate != nil {
+		return *m.UserSpecifiedRate
+	}
+	return ""
+}
+
+func (m *TaskQueueUpdateQueueRequest) GetRetryParameters() *TaskQueueRetryParameters {
+	if m != nil {
+		return m.RetryParameters
+	}
+	return nil
+}
+
+func (m *TaskQueueUpdateQueueRequest) GetMaxConcurrentRequests() int32 {
+	if m != nil && m.MaxConcurrentRequests != nil {
+		return *m.MaxConcurrentRequests
+	}
+	return 0
+}
+
+func (m *TaskQueueUpdateQueueRequest) GetMode() TaskQueueMode_Mode {
+	if m != nil && m.Mode != nil {
+		return *m.Mode
+	}
+	return Default_TaskQueueUpdateQueueRequest_Mode
+}
+
+func (m *TaskQueueUpdateQueueRequest) GetAcl() *TaskQueueAcl {
+	if m != nil {
+		return m.Acl
+	}
+	return nil
+}
+
+func (m *TaskQueueUpdateQueueRequest) GetHeaderOverride() []*TaskQueueHttpHeader {
+	if m != nil {
+		return m.HeaderOverride
+	}
+	return nil
+}
+
+type TaskQueueUpdateQueueResponse struct {
+	XXX_NoUnkeyedLiteral struct{} `json:"-"`
+	XXX_unrecognized     []byte   `json:"-"`
+	XXX_sizecache        int32    `json:"-"`
+}
+
+func (m *TaskQueueUpdateQueueResponse) Reset()         { *m = TaskQueueUpdateQueueResponse{} }
+func (m *TaskQueueUpdateQueueResponse) String() string { return proto.CompactTextString(m) }
+func (*TaskQueueUpdateQueueResponse) ProtoMessage()    {}
+func (*TaskQueueUpdateQueueResponse) Descriptor() ([]byte, []int) {
+	return fileDescriptor_taskqueue_service_05300f6f4e69f490, []int{15}
+}
+func (m *TaskQueueUpdateQueueResponse) XXX_Unmarshal(b []byte) error {
+	return xxx_messageInfo_TaskQueueUpdateQueueResponse.Unmarshal(m, b)
+}
+func (m *TaskQueueUpdateQueueResponse) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) {
+	return xxx_messageInfo_TaskQueueUpdateQueueResponse.Marshal(b, m, deterministic)
+}
+func (dst *TaskQueueUpdateQueueResponse) XXX_Merge(src proto.Message) {
+	xxx_messageInfo_TaskQueueUpdateQueueResponse.Merge(dst, src)
+}
+func (m *TaskQueueUpdateQueueResponse) XXX_Size() int {
+	return xxx_messageInfo_TaskQueueUpdateQueueResponse.Size(m)
+}
+func (m *TaskQueueUpdateQueueResponse) XXX_DiscardUnknown() {
+	xxx_messageInfo_TaskQueueUpdateQueueResponse.DiscardUnknown(m)
+}
+
+var xxx_messageInfo_TaskQueueUpdateQueueResponse proto.InternalMessageInfo
+
+type TaskQueueFetchQueuesRequest struct {
+	AppId                []byte   `protobuf:"bytes,1,opt,name=app_id,json=appId" json:"app_id,omitempty"`
+	MaxRows              *int32   `protobuf:"varint,2,req,name=max_rows,json=maxRows" json:"max_rows,omitempty"`
+	XXX_NoUnkeyedLiteral struct{} `json:"-"`
+	XXX_unrecognized     []byte   `json:"-"`
+	XXX_sizecache        int32    `json:"-"`
+}
+
+func (m *TaskQueueFetchQueuesRequest) Reset()         { *m = TaskQueueFetchQueuesRequest{} }
+func (m *TaskQueueFetchQueuesRequest) String() string { return proto.CompactTextString(m) }
+func (*TaskQueueFetchQueuesRequest) ProtoMessage()    {}
+func (*TaskQueueFetchQueuesRequest) Descriptor() ([]byte, []int) {
+	return fileDescriptor_taskqueue_service_05300f6f4e69f490, []int{16}
+}
+func (m *TaskQueueFetchQueuesRequest) XXX_Unmarshal(b []byte) error {
+	return xxx_messageInfo_TaskQueueFetchQueuesRequest.Unmarshal(m, b)
+}
+func (m *TaskQueueFetchQueuesRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) {
+	return xxx_messageInfo_TaskQueueFetchQueuesRequest.Marshal(b, m, deterministic)
+}
+func (dst *TaskQueueFetchQueuesRequest) XXX_Merge(src proto.Message) {
+	xxx_messageInfo_TaskQueueFetchQueuesRequest.Merge(dst, src)
+}
+func (m *TaskQueueFetchQueuesRequest) XXX_Size() int {
+	return xxx_messageInfo_TaskQueueFetchQueuesRequest.Size(m)
+}
+func (m *TaskQueueFetchQueuesRequest) XXX_DiscardUnknown() {
+	xxx_messageInfo_TaskQueueFetchQueuesRequest.DiscardUnknown(m)
+}
+
+var xxx_messageInfo_TaskQueueFetchQueuesRequest proto.InternalMessageInfo
+
+func (m *TaskQueueFetchQueuesRequest) GetAppId() []byte {
+	if m != nil {
+		return m.AppId
+	}
+	return nil
+}
+
+func (m *TaskQueueFetchQueuesRequest) GetMaxRows() int32 {
+	if m != nil && m.MaxRows != nil {
+		return *m.MaxRows
+	}
+	return 0
+}
+
+type TaskQueueFetchQueuesResponse struct {
+	Queue                []*TaskQueueFetchQueuesResponse_Queue `protobuf:"group,1,rep,name=Queue,json=queue" json:"queue,omitempty"`
+	XXX_NoUnkeyedLiteral struct{}                              `json:"-"`
+	XXX_unrecognized     []byte                                `json:"-"`
+	XXX_sizecache        int32                                 `json:"-"`
+}
+
+func (m *TaskQueueFetchQueuesResponse) Reset()         { *m = TaskQueueFetchQueuesResponse{} }
+func (m *TaskQueueFetchQueuesResponse) String() string { return proto.CompactTextString(m) }
+func (*TaskQueueFetchQueuesResponse) ProtoMessage()    {}
+func (*TaskQueueFetchQueuesResponse) Descriptor() ([]byte, []int) {
+	return fileDescriptor_taskqueue_service_05300f6f4e69f490, []int{17}
+}
+func (m *TaskQueueFetchQueuesResponse) XXX_Unmarshal(b []byte) error {
+	return xxx_messageInfo_TaskQueueFetchQueuesResponse.Unmarshal(m, b)
+}
+func (m *TaskQueueFetchQueuesResponse) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) {
+	return xxx_messageInfo_TaskQueueFetchQueuesResponse.Marshal(b, m, deterministic)
+}
+func (dst *TaskQueueFetchQueuesResponse) XXX_Merge(src proto.Message) {
+	xxx_messageInfo_TaskQueueFetchQueuesResponse.Merge(dst, src)
+}
+func (m *TaskQueueFetchQueuesResponse) XXX_Size() int {
+	return xxx_messageInfo_TaskQueueFetchQueuesResponse.Size(m)
+}
+func (m *TaskQueueFetchQueuesResponse) XXX_DiscardUnknown() {
+	xxx_messageInfo_TaskQueueFetchQueuesResponse.DiscardUnknown(m)
+}
+
+var xxx_messageInfo_TaskQueueFetchQueuesResponse proto.InternalMessageInfo
+
+func (m *TaskQueueFetchQueuesResponse) GetQueue() []*TaskQueueFetchQueuesResponse_Queue {
+	if m != nil {
+		return m.Queue
+	}
+	return nil
+}
+
+type TaskQueueFetchQueuesResponse_Queue struct {
+	QueueName             []byte                    `protobuf:"bytes,2,req,name=queue_name,json=queueName" json:"queue_name,omitempty"`
+	BucketRefillPerSecond *float64                  `protobuf:"fixed64,3,req,name=bucket_refill_per_second,json=bucketRefillPerSecond" json:"bucket_refill_per_second,omitempty"`
+	BucketCapacity        *float64                  `protobuf:"fixed64,4,req,name=bucket_capacity,json=bucketCapacity" json:"bucket_capacity,omitempty"`
+	UserSpecifiedRate     *string                   `protobuf:"bytes,5,opt,name=user_specified_rate,json=userSpecifiedRate" json:"user_specified_rate,omitempty"`
+	Paused                *bool                     `protobuf:"varint,6,req,name=paused,def=0" json:"paused,omitempty"`
+	RetryParameters       *TaskQueueRetryParameters `protobuf:"bytes,7,opt,name=retry_parameters,json=retryParameters" json:"retry_parameters,omitempty"`
+	MaxConcurrentRequests *int32                    `protobuf:"varint,8,opt,name=max_concurrent_requests,json=maxConcurrentRequests" json:"max_concurrent_requests,omitempty"`
+	Mode                  *TaskQueueMode_Mode       `protobuf:"varint,9,opt,name=mode,enum=appengine.TaskQueueMode_Mode,def=0" json:"mode,omitempty"`
+	Acl                   *TaskQueueAcl             `protobuf:"bytes,10,opt,name=acl" json:"acl,omitempty"`
+	HeaderOverride        []*TaskQueueHttpHeader    `protobuf:"bytes,11,rep,name=header_override,json=headerOverride" json:"header_override,omitempty"`
+	CreatorName           *string                   `protobuf:"bytes,12,opt,name=creator_name,json=creatorName,def=apphosting" json:"creator_name,omitempty"`
+	XXX_NoUnkeyedLiteral  struct{}                  `json:"-"`
+	XXX_unrecognized      []byte                    `json:"-"`
+	XXX_sizecache         int32                     `json:"-"`
+}
+
+func (m *TaskQueueFetchQueuesResponse_Queue) Reset()         { *m = TaskQueueFetchQueuesResponse_Queue{} }
+func (m *TaskQueueFetchQueuesResponse_Queue) String() string { return proto.CompactTextString(m) }
+func (*TaskQueueFetchQueuesResponse_Queue) ProtoMessage()    {}
+func (*TaskQueueFetchQueuesResponse_Queue) Descriptor() ([]byte, []int) {
+	return fileDescriptor_taskqueue_service_05300f6f4e69f490, []int{17, 0}
+}
+func (m *TaskQueueFetchQueuesResponse_Queue) XXX_Unmarshal(b []byte) error {
+	return xxx_messageInfo_TaskQueueFetchQueuesResponse_Queue.Unmarshal(m, b)
+}
+func (m *TaskQueueFetchQueuesResponse_Queue) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) {
+	return xxx_messageInfo_TaskQueueFetchQueuesResponse_Queue.Marshal(b, m, deterministic)
+}
+func (dst *TaskQueueFetchQueuesResponse_Queue) XXX_Merge(src proto.Message) {
+	xxx_messageInfo_TaskQueueFetchQueuesResponse_Queue.Merge(dst, src)
+}
+func (m *TaskQueueFetchQueuesResponse_Queue) XXX_Size() int {
+	return xxx_messageInfo_TaskQueueFetchQueuesResponse_Queue.Size(m)
+}
+func (m *TaskQueueFetchQueuesResponse_Queue) XXX_DiscardUnknown() {
+	xxx_messageInfo_TaskQueueFetchQueuesResponse_Queue.DiscardUnknown(m)
+}
+
+var xxx_messageInfo_TaskQueueFetchQueuesResponse_Queue proto.InternalMessageInfo
+
+const Default_TaskQueueFetchQueuesResponse_Queue_Paused bool = false
+const Default_TaskQueueFetchQueuesResponse_Queue_Mode TaskQueueMode_Mode = TaskQueueMode_PUSH
+const Default_TaskQueueFetchQueuesResponse_Queue_CreatorName string = "apphosting"
+
+func (m *TaskQueueFetchQueuesResponse_Queue) GetQueueName() []byte {
+	if m != nil {
+		return m.QueueName
+	}
+	return nil
+}
+
+func (m *TaskQueueFetchQueuesResponse_Queue) GetBucketRefillPerSecond() float64 {
+	if m != nil && m.BucketRefillPerSecond != nil {
+		return *m.BucketRefillPerSecond
+	}
+	return 0
+}
+
+func (m *TaskQueueFetchQueuesResponse_Queue) GetBucketCapacity() float64 {
+	if m != nil && m.BucketCapacity != nil {
+		return *m.BucketCapacity
+	}
+	return 0
+}
+
+func (m *TaskQueueFetchQueuesResponse_Queue) GetUserSpecifiedRate() string {
+	if m != nil && m.UserSpecifiedRate != nil {
+		return *m.UserSpecifiedRate
+	}
+	return ""
+}
+
+func (m *TaskQueueFetchQueuesResponse_Queue) GetPaused() bool {
+	if m != nil && m.Paused != nil {
+		return *m.Paused
+	}
+	return Default_TaskQueueFetchQueuesResponse_Queue_Paused
+}
+
+func (m *TaskQueueFetchQueuesResponse_Queue) GetRetryParameters() *TaskQueueRetryParameters {
+	if m != nil {
+		return m.RetryParameters
+	}
+	return nil
+}
+
+func (m *TaskQueueFetchQueuesResponse_Queue) GetMaxConcurrentRequests() int32 {
+	if m != nil && m.MaxConcurrentRequests != nil {
+		return *m.MaxConcurrentRequests
+	}
+	return 0
+}
+
+func (m *TaskQueueFetchQueuesResponse_Queue) GetMode() TaskQueueMode_Mode {
+	if m != nil && m.Mode != nil {
+		return *m.Mode
+	}
+	return Default_TaskQueueFetchQueuesResponse_Queue_Mode
+}
+
+func (m *TaskQueueFetchQueuesResponse_Queue) GetAcl() *TaskQueueAcl {
+	if m != nil {
+		return m.Acl
+	}
+	return nil
+}
+
+func (m *TaskQueueFetchQueuesResponse_Queue) GetHeaderOverride() []*TaskQueueHttpHeader {
+	if m != nil {
+		return m.HeaderOverride
+	}
+	return nil
+}
+
+func (m *TaskQueueFetchQueuesResponse_Queue) GetCreatorName() string {
+	if m != nil && m.CreatorName != nil {
+		return *m.CreatorName
+	}
+	return Default_TaskQueueFetchQueuesResponse_Queue_CreatorName
+}
+
+type TaskQueueFetchQueueStatsRequest struct {
+	AppId                []byte   `protobuf:"bytes,1,opt,name=app_id,json=appId" json:"app_id,omitempty"`
+	QueueName            [][]byte `protobuf:"bytes,2,rep,name=queue_name,json=queueName" json:"queue_name,omitempty"`
+	MaxNumTasks          *int32   `protobuf:"varint,3,opt,name=max_num_tasks,json=maxNumTasks,def=0" json:"max_num_tasks,omitempty"`
+	XXX_NoUnkeyedLiteral struct{} `json:"-"`
+	XXX_unrecognized     []byte   `json:"-"`
+	XXX_sizecache        int32    `json:"-"`
+}
+
+func (m *TaskQueueFetchQueueStatsRequest) Reset()         { *m = TaskQueueFetchQueueStatsRequest{} }
+func (m *TaskQueueFetchQueueStatsRequest) String() string { return proto.CompactTextString(m) }
+func (*TaskQueueFetchQueueStatsRequest) ProtoMessage()    {}
+func (*TaskQueueFetchQueueStatsRequest) Descriptor() ([]byte, []int) {
+	return fileDescriptor_taskqueue_service_05300f6f4e69f490, []int{18}
+}
+func (m *TaskQueueFetchQueueStatsRequest) XXX_Unmarshal(b []byte) error {
+	return xxx_messageInfo_TaskQueueFetchQueueStatsRequest.Unmarshal(m, b)
+}
+func (m *TaskQueueFetchQueueStatsRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) {
+	return xxx_messageInfo_TaskQueueFetchQueueStatsRequest.Marshal(b, m, deterministic)
+}
+func (dst *TaskQueueFetchQueueStatsRequest) XXX_Merge(src proto.Message) {
+	xxx_messageInfo_TaskQueueFetchQueueStatsRequest.Merge(dst, src)
+}
+func (m *TaskQueueFetchQueueStatsRequest) XXX_Size() int {
+	return xxx_messageInfo_TaskQueueFetchQueueStatsRequest.Size(m)
+}
+func (m *TaskQueueFetchQueueStatsRequest) XXX_DiscardUnknown() {
+	xxx_messageInfo_TaskQueueFetchQueueStatsRequest.DiscardUnknown(m)
+}
+
+var xxx_messageInfo_TaskQueueFetchQueueStatsRequest proto.InternalMessageInfo
+
+const Default_TaskQueueFetchQueueStatsRequest_MaxNumTasks int32 = 0
+
+func (m *TaskQueueFetchQueueStatsRequest) GetAppId() []byte {
+	if m != nil {
+		return m.AppId
+	}
+	return nil
+}
+
+func (m *TaskQueueFetchQueueStatsRequest) GetQueueName() [][]byte {
+	if m != nil {
+		return m.QueueName
+	}
+	return nil
+}
+
+func (m *TaskQueueFetchQueueStatsRequest) GetMaxNumTasks() int32 {
+	if m != nil && m.MaxNumTasks != nil {
+		return *m.MaxNumTasks
+	}
+	return Default_TaskQueueFetchQueueStatsRequest_MaxNumTasks
+}
+
+type TaskQueueScannerQueueInfo struct {
+	ExecutedLastMinute      *int64   `protobuf:"varint,1,req,name=executed_last_minute,json=executedLastMinute" json:"executed_last_minute,omitempty"`
+	ExecutedLastHour        *int64   `protobuf:"varint,2,req,name=executed_last_hour,json=executedLastHour" json:"executed_last_hour,omitempty"`
+	SamplingDurationSeconds *float64 `protobuf:"fixed64,3,req,name=sampling_duration_seconds,json=samplingDurationSeconds" json:"sampling_duration_seconds,omitempty"`
+	RequestsInFlight        *int32   `protobuf:"varint,4,opt,name=requests_in_flight,json=requestsInFlight" json:"requests_in_flight,omitempty"`
+	EnforcedRate            *float64 `protobuf:"fixed64,5,opt,name=enforced_rate,json=enforcedRate" json:"enforced_rate,omitempty"`
+	XXX_NoUnkeyedLiteral    struct{} `json:"-"`
+	XXX_unrecognized        []byte   `json:"-"`
+	XXX_sizecache           int32    `json:"-"`
+}
+
+func (m *TaskQueueScannerQueueInfo) Reset()         { *m = TaskQueueScannerQueueInfo{} }
+func (m *TaskQueueScannerQueueInfo) String() string { return proto.CompactTextString(m) }
+func (*TaskQueueScannerQueueInfo) ProtoMessage()    {}
+func (*TaskQueueScannerQueueInfo) Descriptor() ([]byte, []int) {
+	return fileDescriptor_taskqueue_service_05300f6f4e69f490, []int{19}
+}
+func (m *TaskQueueScannerQueueInfo) XXX_Unmarshal(b []byte) error {
+	return xxx_messageInfo_TaskQueueScannerQueueInfo.Unmarshal(m, b)
+}
+func (m *TaskQueueScannerQueueInfo) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) {
+	return xxx_messageInfo_TaskQueueScannerQueueInfo.Marshal(b, m, deterministic)
+}
+func (dst *TaskQueueScannerQueueInfo) XXX_Merge(src proto.Message) {
+	xxx_messageInfo_TaskQueueScannerQueueInfo.Merge(dst, src)
+}
+func (m *TaskQueueScannerQueueInfo) XXX_Size() int {
+	return xxx_messageInfo_TaskQueueScannerQueueInfo.Size(m)
+}
+func (m *TaskQueueScannerQueueInfo) XXX_DiscardUnknown() {
+	xxx_messageInfo_TaskQueueScannerQueueInfo.DiscardUnknown(m)
+}
+
+var xxx_messageInfo_TaskQueueScannerQueueInfo proto.InternalMessageInfo
+
+func (m *TaskQueueScannerQueueInfo) GetExecutedLastMinute() int64 {
+	if m != nil && m.ExecutedLastMinute != nil {
+		return *m.ExecutedLastMinute
+	}
+	return 0
+}
+
+func (m *TaskQueueScannerQueueInfo) GetExecutedLastHour() int64 {
+	if m != nil && m.ExecutedLastHour != nil {
+		return *m.ExecutedLastHour
+	}
+	return 0
+}
+
+func (m *TaskQueueScannerQueueInfo) GetSamplingDurationSeconds() float64 {
+	if m != nil && m.SamplingDurationSeconds != nil {
+		return *m.SamplingDurationSeconds
+	}
+	return 0
+}
+
+func (m *TaskQueueScannerQueueInfo) GetRequestsInFlight() int32 {
+	if m != nil && m.RequestsInFlight != nil {
+		return *m.RequestsInFlight
+	}
+	return 0
+}
+
+func (m *TaskQueueScannerQueueInfo) GetEnforcedRate() float64 {
+	if m != nil && m.EnforcedRate != nil {
+		return *m.EnforcedRate
+	}
+	return 0
+}
+
+type TaskQueueFetchQueueStatsResponse struct {
+	Queuestats           []*TaskQueueFetchQueueStatsResponse_QueueStats `protobuf:"group,1,rep,name=QueueStats,json=queuestats" json:"queuestats,omitempty"`
+	XXX_NoUnkeyedLiteral struct{}                                       `json:"-"`
+	XXX_unrecognized     []byte                                         `json:"-"`
+	XXX_sizecache        int32                                          `json:"-"`
+}
+
+func (m *TaskQueueFetchQueueStatsResponse) Reset()         { *m = TaskQueueFetchQueueStatsResponse{} }
+func (m *TaskQueueFetchQueueStatsResponse) String() string { return proto.CompactTextString(m) }
+func (*TaskQueueFetchQueueStatsResponse) ProtoMessage()    {}
+func (*TaskQueueFetchQueueStatsResponse) Descriptor() ([]byte, []int) {
+	return fileDescriptor_taskqueue_service_05300f6f4e69f490, []int{20}
+}
+func (m *TaskQueueFetchQueueStatsResponse) XXX_Unmarshal(b []byte) error {
+	return xxx_messageInfo_TaskQueueFetchQueueStatsResponse.Unmarshal(m, b)
+}
+func (m *TaskQueueFetchQueueStatsResponse) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) {
+	return xxx_messageInfo_TaskQueueFetchQueueStatsResponse.Marshal(b, m, deterministic)
+}
+func (dst *TaskQueueFetchQueueStatsResponse) XXX_Merge(src proto.Message) {
+	xxx_messageInfo_TaskQueueFetchQueueStatsResponse.Merge(dst, src)
+}
+func (m *TaskQueueFetchQueueStatsResponse) XXX_Size() int {
+	return xxx_messageInfo_TaskQueueFetchQueueStatsResponse.Size(m)
+}
+func (m *TaskQueueFetchQueueStatsResponse) XXX_DiscardUnknown() {
+	xxx_messageInfo_TaskQueueFetchQueueStatsResponse.DiscardUnknown(m)
+}
+
+var xxx_messageInfo_TaskQueueFetchQueueStatsResponse proto.InternalMessageInfo
+
+func (m *TaskQueueFetchQueueStatsResponse) GetQueuestats() []*TaskQueueFetchQueueStatsResponse_QueueStats {
+	if m != nil {
+		return m.Queuestats
+	}
+	return nil
+}
+
+type TaskQueueFetchQueueStatsResponse_QueueStats struct {
+	NumTasks             *int32                     `protobuf:"varint,2,req,name=num_tasks,json=numTasks" json:"num_tasks,omitempty"`
+	OldestEtaUsec        *int64                     `protobuf:"varint,3,req,name=oldest_eta_usec,json=oldestEtaUsec" json:"oldest_eta_usec,omitempty"`
+	ScannerInfo          *TaskQueueScannerQueueInfo `protobuf:"bytes,4,opt,name=scanner_info,json=scannerInfo" json:"scanner_info,omitempty"`
+	XXX_NoUnkeyedLiteral struct{}                   `json:"-"`
+	XXX_unrecognized     []byte                     `json:"-"`
+	XXX_sizecache        int32                      `json:"-"`
+}
+
+func (m *TaskQueueFetchQueueStatsResponse_QueueStats) Reset() {
+	*m = TaskQueueFetchQueueStatsResponse_QueueStats{}
+}
+func (m *TaskQueueFetchQueueStatsResponse_QueueStats) String() string {
+	return proto.CompactTextString(m)
+}
+func (*TaskQueueFetchQueueStatsResponse_QueueStats) ProtoMessage() {}
+func (*TaskQueueFetchQueueStatsResponse_QueueStats) Descriptor() ([]byte, []int) {
+	return fileDescriptor_taskqueue_service_05300f6f4e69f490, []int{20, 0}
+}
+func (m *TaskQueueFetchQueueStatsResponse_QueueStats) XXX_Unmarshal(b []byte) error {
+	return xxx_messageInfo_TaskQueueFetchQueueStatsResponse_QueueStats.Unmarshal(m, b)
+}
+func (m *TaskQueueFetchQueueStatsResponse_QueueStats) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) {
+	return xxx_messageInfo_TaskQueueFetchQueueStatsResponse_QueueStats.Marshal(b, m, deterministic)
+}
+func (dst *TaskQueueFetchQueueStatsResponse_QueueStats) XXX_Merge(src proto.Message) {
+	xxx_messageInfo_TaskQueueFetchQueueStatsResponse_QueueStats.Merge(dst, src)
+}
+func (m *TaskQueueFetchQueueStatsResponse_QueueStats) XXX_Size() int {
+	return xxx_messageInfo_TaskQueueFetchQueueStatsResponse_QueueStats.Size(m)
+}
+func (m *TaskQueueFetchQueueStatsResponse_QueueStats) XXX_DiscardUnknown() {
+	xxx_messageInfo_TaskQueueFetchQueueStatsResponse_QueueStats.DiscardUnknown(m)
+}
+
+var xxx_messageInfo_TaskQueueFetchQueueStatsResponse_QueueStats proto.InternalMessageInfo
+
+func (m *TaskQueueFetchQueueStatsResponse_QueueStats) GetNumTasks() int32 {
+	if m != nil && m.NumTasks != nil {
+		return *m.NumTasks
+	}
+	return 0
+}
+
+func (m *TaskQueueFetchQueueStatsResponse_QueueStats) GetOldestEtaUsec() int64 {
+	if m != nil && m.OldestEtaUsec != nil {
+		return *m.OldestEtaUsec
+	}
+	return 0
+}
+
+func (m *TaskQueueFetchQueueStatsResponse_QueueStats) GetScannerInfo() *TaskQueueScannerQueueInfo {
+	if m != nil {
+		return m.ScannerInfo
+	}
+	return nil
+}
+
+type TaskQueuePauseQueueRequest struct {
+	AppId                []byte   `protobuf:"bytes,1,req,name=app_id,json=appId" json:"app_id,omitempty"`
+	QueueName            []byte   `protobuf:"bytes,2,req,name=queue_name,json=queueName" json:"queue_name,omitempty"`
+	Pause                *bool    `protobuf:"varint,3,req,name=pause" json:"pause,omitempty"`
+	XXX_NoUnkeyedLiteral struct{} `json:"-"`
+	XXX_unrecognized     []byte   `json:"-"`
+	XXX_sizecache        int32    `json:"-"`
+}
+
+func (m *TaskQueuePauseQueueRequest) Reset()         { *m = TaskQueuePauseQueueRequest{} }
+func (m *TaskQueuePauseQueueRequest) String() string { return proto.CompactTextString(m) }
+func (*TaskQueuePauseQueueRequest) ProtoMessage()    {}
+func (*TaskQueuePauseQueueRequest) Descriptor() ([]byte, []int) {
+	return fileDescriptor_taskqueue_service_05300f6f4e69f490, []int{21}
+}
+func (m *TaskQueuePauseQueueRequest) XXX_Unmarshal(b []byte) error {
+	return xxx_messageInfo_TaskQueuePauseQueueRequest.Unmarshal(m, b)
+}
+func (m *TaskQueuePauseQueueRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) {
+	return xxx_messageInfo_TaskQueuePauseQueueRequest.Marshal(b, m, deterministic)
+}
+func (dst *TaskQueuePauseQueueRequest) XXX_Merge(src proto.Message) {
+	xxx_messageInfo_TaskQueuePauseQueueRequest.Merge(dst, src)
+}
+func (m *TaskQueuePauseQueueRequest) XXX_Size() int {
+	return xxx_messageInfo_TaskQueuePauseQueueRequest.Size(m)
+}
+func (m *TaskQueuePauseQueueRequest) XXX_DiscardUnknown() {
+	xxx_messageInfo_TaskQueuePauseQueueRequest.DiscardUnknown(m)
+}
+
+var xxx_messageInfo_TaskQueuePauseQueueRequest proto.InternalMessageInfo
+
+func (m *TaskQueuePauseQueueRequest) GetAppId() []byte {
+	if m != nil {
+		return m.AppId
+	}
+	return nil
+}
+
+func (m *TaskQueuePauseQueueRequest) GetQueueName() []byte {
+	if m != nil {
+		return m.QueueName
+	}
+	return nil
+}
+
+func (m *TaskQueuePauseQueueRequest) GetPause() bool {
+	if m != nil && m.Pause != nil {
+		return *m.Pause
+	}
+	return false
+}
+
+type TaskQueuePauseQueueResponse struct {
+	XXX_NoUnkeyedLiteral struct{} `json:"-"`
+	XXX_unrecognized     []byte   `json:"-"`
+	XXX_sizecache        int32    `json:"-"`
+}
+
+func (m *TaskQueuePauseQueueResponse) Reset()         { *m = TaskQueuePauseQueueResponse{} }
+func (m *TaskQueuePauseQueueResponse) String() string { return proto.CompactTextString(m) }
+func (*TaskQueuePauseQueueResponse) ProtoMessage()    {}
+func (*TaskQueuePauseQueueResponse) Descriptor() ([]byte, []int) {
+	return fileDescriptor_taskqueue_service_05300f6f4e69f490, []int{22}
+}
+func (m *TaskQueuePauseQueueResponse) XXX_Unmarshal(b []byte) error {
+	return xxx_messageInfo_TaskQueuePauseQueueResponse.Unmarshal(m, b)
+}
+func (m *TaskQueuePauseQueueResponse) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) {
+	return xxx_messageInfo_TaskQueuePauseQueueResponse.Marshal(b, m, deterministic)
+}
+func (dst *TaskQueuePauseQueueResponse) XXX_Merge(src proto.Message) {
+	xxx_messageInfo_TaskQueuePauseQueueResponse.Merge(dst, src)
+}
+func (m *TaskQueuePauseQueueResponse) XXX_Size() int {
+	return xxx_messageInfo_TaskQueuePauseQueueResponse.Size(m)
+}
+func (m *TaskQueuePauseQueueResponse) XXX_DiscardUnknown() {
+	xxx_messageInfo_TaskQueuePauseQueueResponse.DiscardUnknown(m)
+}
+
+var xxx_messageInfo_TaskQueuePauseQueueResponse proto.InternalMessageInfo
+
+type TaskQueuePurgeQueueRequest struct {
+	AppId                []byte   `protobuf:"bytes,1,opt,name=app_id,json=appId" json:"app_id,omitempty"`
+	QueueName            []byte   `protobuf:"bytes,2,req,name=queue_name,json=queueName" json:"queue_name,omitempty"`
+	XXX_NoUnkeyedLiteral struct{} `json:"-"`
+	XXX_unrecognized     []byte   `json:"-"`
+	XXX_sizecache        int32    `json:"-"`
+}
+
+func (m *TaskQueuePurgeQueueRequest) Reset()         { *m = TaskQueuePurgeQueueRequest{} }
+func (m *TaskQueuePurgeQueueRequest) String() string { return proto.CompactTextString(m) }
+func (*TaskQueuePurgeQueueRequest) ProtoMessage()    {}
+func (*TaskQueuePurgeQueueRequest) Descriptor() ([]byte, []int) {
+	return fileDescriptor_taskqueue_service_05300f6f4e69f490, []int{23}
+}
+func (m *TaskQueuePurgeQueueRequest) XXX_Unmarshal(b []byte) error {
+	return xxx_messageInfo_TaskQueuePurgeQueueRequest.Unmarshal(m, b)
+}
+func (m *TaskQueuePurgeQueueRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) {
+	return xxx_messageInfo_TaskQueuePurgeQueueRequest.Marshal(b, m, deterministic)
+}
+func (dst *TaskQueuePurgeQueueRequest) XXX_Merge(src proto.Message) {
+	xxx_messageInfo_TaskQueuePurgeQueueRequest.Merge(dst, src)
+}
+func (m *TaskQueuePurgeQueueRequest) XXX_Size() int {
+	return xxx_messageInfo_TaskQueuePurgeQueueRequest.Size(m)
+}
+func (m *TaskQueuePurgeQueueRequest) XXX_DiscardUnknown() {
+	xxx_messageInfo_TaskQueuePurgeQueueRequest.DiscardUnknown(m)
+}
+
+var xxx_messageInfo_TaskQueuePurgeQueueRequest proto.InternalMessageInfo
+
+func (m *TaskQueuePurgeQueueRequest) GetAppId() []byte {
+	if m != nil {
+		return m.AppId
+	}
+	return nil
+}
+
+func (m *TaskQueuePurgeQueueRequest) GetQueueName() []byte {
+	if m != nil {
+		return m.QueueName
+	}
+	return nil
+}
+
+type TaskQueuePurgeQueueResponse struct {
+	XXX_NoUnkeyedLiteral struct{} `json:"-"`
+	XXX_unrecognized     []byte   `json:"-"`
+	XXX_sizecache        int32    `json:"-"`
+}
+
+func (m *TaskQueuePurgeQueueResponse) Reset()         { *m = TaskQueuePurgeQueueResponse{} }
+func (m *TaskQueuePurgeQueueResponse) String() string { return proto.CompactTextString(m) }
+func (*TaskQueuePurgeQueueResponse) ProtoMessage()    {}
+func (*TaskQueuePurgeQueueResponse) Descriptor() ([]byte, []int) {
+	return fileDescriptor_taskqueue_service_05300f6f4e69f490, []int{24}
+}
+func (m *TaskQueuePurgeQueueResponse) XXX_Unmarshal(b []byte) error {
+	return xxx_messageInfo_TaskQueuePurgeQueueResponse.Unmarshal(m, b)
+}
+func (m *TaskQueuePurgeQueueResponse) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) {
+	return xxx_messageInfo_TaskQueuePurgeQueueResponse.Marshal(b, m, deterministic)
+}
+func (dst *TaskQueuePurgeQueueResponse) XXX_Merge(src proto.Message) {
+	xxx_messageInfo_TaskQueuePurgeQueueResponse.Merge(dst, src)
+}
+func (m *TaskQueuePurgeQueueResponse) XXX_Size() int {
+	return xxx_messageInfo_TaskQueuePurgeQueueResponse.Size(m)
+}
+func (m *TaskQueuePurgeQueueResponse) XXX_DiscardUnknown() {
+	xxx_messageInfo_TaskQueuePurgeQueueResponse.DiscardUnknown(m)
+}
+
+var xxx_messageInfo_TaskQueuePurgeQueueResponse proto.InternalMessageInfo
+
+type TaskQueueDeleteQueueRequest struct {
+	AppId                []byte   `protobuf:"bytes,1,req,name=app_id,json=appId" json:"app_id,omitempty"`
+	QueueName            []byte   `protobuf:"bytes,2,req,name=queue_name,json=queueName" json:"queue_name,omitempty"`
+	XXX_NoUnkeyedLiteral struct{} `json:"-"`
+	XXX_unrecognized     []byte   `json:"-"`
+	XXX_sizecache        int32    `json:"-"`
+}
+
+func (m *TaskQueueDeleteQueueRequest) Reset()         { *m = TaskQueueDeleteQueueRequest{} }
+func (m *TaskQueueDeleteQueueRequest) String() string { return proto.CompactTextString(m) }
+func (*TaskQueueDeleteQueueRequest) ProtoMessage()    {}
+func (*TaskQueueDeleteQueueRequest) Descriptor() ([]byte, []int) {
+	return fileDescriptor_taskqueue_service_05300f6f4e69f490, []int{25}
+}
+func (m *TaskQueueDeleteQueueRequest) XXX_Unmarshal(b []byte) error {
+	return xxx_messageInfo_TaskQueueDeleteQueueRequest.Unmarshal(m, b)
+}
+func (m *TaskQueueDeleteQueueRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) {
+	return xxx_messageInfo_TaskQueueDeleteQueueRequest.Marshal(b, m, deterministic)
+}
+func (dst *TaskQueueDeleteQueueRequest) XXX_Merge(src proto.Message) {
+	xxx_messageInfo_TaskQueueDeleteQueueRequest.Merge(dst, src)
+}
+func (m *TaskQueueDeleteQueueRequest) XXX_Size() int {
+	return xxx_messageInfo_TaskQueueDeleteQueueRequest.Size(m)
+}
+func (m *TaskQueueDeleteQueueRequest) XXX_DiscardUnknown() {
+	xxx_messageInfo_TaskQueueDeleteQueueRequest.DiscardUnknown(m)
+}
+
+var xxx_messageInfo_TaskQueueDeleteQueueRequest proto.InternalMessageInfo
+
+func (m *TaskQueueDeleteQueueRequest) GetAppId() []byte {
+	if m != nil {
+		return m.AppId
+	}
+	return nil
+}
+
+func (m *TaskQueueDeleteQueueRequest) GetQueueName() []byte {
+	if m != nil {
+		return m.QueueName
+	}
+	return nil
+}
+
+type TaskQueueDeleteQueueResponse struct {
+	XXX_NoUnkeyedLiteral struct{} `json:"-"`
+	XXX_unrecognized     []byte   `json:"-"`
+	XXX_sizecache        int32    `json:"-"`
+}
+
+func (m *TaskQueueDeleteQueueResponse) Reset()         { *m = TaskQueueDeleteQueueResponse{} }
+func (m *TaskQueueDeleteQueueResponse) String() string { return proto.CompactTextString(m) }
+func (*TaskQueueDeleteQueueResponse) ProtoMessage()    {}
+func (*TaskQueueDeleteQueueResponse) Descriptor() ([]byte, []int) {
+	return fileDescriptor_taskqueue_service_05300f6f4e69f490, []int{26}
+}
+func (m *TaskQueueDeleteQueueResponse) XXX_Unmarshal(b []byte) error {
+	return xxx_messageInfo_TaskQueueDeleteQueueResponse.Unmarshal(m, b)
+}
+func (m *TaskQueueDeleteQueueResponse) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) {
+	return xxx_messageInfo_TaskQueueDeleteQueueResponse.Marshal(b, m, deterministic)
+}
+func (dst *TaskQueueDeleteQueueResponse) XXX_Merge(src proto.Message) {
+	xxx_messageInfo_TaskQueueDeleteQueueResponse.Merge(dst, src)
+}
+func (m *TaskQueueDeleteQueueResponse) XXX_Size() int {
+	return xxx_messageInfo_TaskQueueDeleteQueueResponse.Size(m)
+}
+func (m *TaskQueueDeleteQueueResponse) XXX_DiscardUnknown() {
+	xxx_messageInfo_TaskQueueDeleteQueueResponse.DiscardUnknown(m)
+}
+
+var xxx_messageInfo_TaskQueueDeleteQueueResponse proto.InternalMessageInfo
+
+type TaskQueueDeleteGroupRequest struct {
+	AppId                []byte   `protobuf:"bytes,1,req,name=app_id,json=appId" json:"app_id,omitempty"`
+	XXX_NoUnkeyedLiteral struct{} `json:"-"`
+	XXX_unrecognized     []byte   `json:"-"`
+	XXX_sizecache        int32    `json:"-"`
+}
+
+func (m *TaskQueueDeleteGroupRequest) Reset()         { *m = TaskQueueDeleteGroupRequest{} }
+func (m *TaskQueueDeleteGroupRequest) String() string { return proto.CompactTextString(m) }
+func (*TaskQueueDeleteGroupRequest) ProtoMessage()    {}
+func (*TaskQueueDeleteGroupRequest) Descriptor() ([]byte, []int) {
+	return fileDescriptor_taskqueue_service_05300f6f4e69f490, []int{27}
+}
+func (m *TaskQueueDeleteGroupRequest) XXX_Unmarshal(b []byte) error {
+	return xxx_messageInfo_TaskQueueDeleteGroupRequest.Unmarshal(m, b)
+}
+func (m *TaskQueueDeleteGroupRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) {
+	return xxx_messageInfo_TaskQueueDeleteGroupRequest.Marshal(b, m, deterministic)
+}
+func (dst *TaskQueueDeleteGroupRequest) XXX_Merge(src proto.Message) {
+	xxx_messageInfo_TaskQueueDeleteGroupRequest.Merge(dst, src)
+}
+func (m *TaskQueueDeleteGroupRequest) XXX_Size() int {
+	return xxx_messageInfo_TaskQueueDeleteGroupRequest.Size(m)
+}
+func (m *TaskQueueDeleteGroupRequest) XXX_DiscardUnknown() {
+	xxx_messageInfo_TaskQueueDeleteGroupRequest.DiscardUnknown(m)
+}
+
+var xxx_messageInfo_TaskQueueDeleteGroupRequest proto.InternalMessageInfo
+
+func (m *TaskQueueDeleteGroupRequest) GetAppId() []byte {
+	if m != nil {
+		return m.AppId
+	}
+	return nil
+}
+
+type TaskQueueDeleteGroupResponse struct {
+	XXX_NoUnkeyedLiteral struct{} `json:"-"`
+	XXX_unrecognized     []byte   `json:"-"`
+	XXX_sizecache        int32    `json:"-"`
+}
+
+func (m *TaskQueueDeleteGroupResponse) Reset()         { *m = TaskQueueDeleteGroupResponse{} }
+func (m *TaskQueueDeleteGroupResponse) String() string { return proto.CompactTextString(m) }
+func (*TaskQueueDeleteGroupResponse) ProtoMessage()    {}
+func (*TaskQueueDeleteGroupResponse) Descriptor() ([]byte, []int) {
+	return fileDescriptor_taskqueue_service_05300f6f4e69f490, []int{28}
+}
+func (m *TaskQueueDeleteGroupResponse) XXX_Unmarshal(b []byte) error {
+	return xxx_messageInfo_TaskQueueDeleteGroupResponse.Unmarshal(m, b)
+}
+func (m *TaskQueueDeleteGroupResponse) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) {
+	return xxx_messageInfo_TaskQueueDeleteGroupResponse.Marshal(b, m, deterministic)
+}
+func (dst *TaskQueueDeleteGroupResponse) XXX_Merge(src proto.Message) {
+	xxx_messageInfo_TaskQueueDeleteGroupResponse.Merge(dst, src)
+}
+func (m *TaskQueueDeleteGroupResponse) XXX_Size() int {
+	return xxx_messageInfo_TaskQueueDeleteGroupResponse.Size(m)
+}
+func (m *TaskQueueDeleteGroupResponse) XXX_DiscardUnknown() {
+	xxx_messageInfo_TaskQueueDeleteGroupResponse.DiscardUnknown(m)
+}
+
+var xxx_messageInfo_TaskQueueDeleteGroupResponse proto.InternalMessageInfo
+
+type TaskQueueQueryTasksRequest struct {
+	AppId                []byte   `protobuf:"bytes,1,opt,name=app_id,json=appId" json:"app_id,omitempty"`
+	QueueName            []byte   `protobuf:"bytes,2,req,name=queue_name,json=queueName" json:"queue_name,omitempty"`
+	StartTaskName        []byte   `protobuf:"bytes,3,opt,name=start_task_name,json=startTaskName" json:"start_task_name,omitempty"`
+	StartEtaUsec         *int64   `protobuf:"varint,4,opt,name=start_eta_usec,json=startEtaUsec" json:"start_eta_usec,omitempty"`
+	StartTag             []byte   `protobuf:"bytes,6,opt,name=start_tag,json=startTag" json:"start_tag,omitempty"`
+	MaxRows              *int32   `protobuf:"varint,5,opt,name=max_rows,json=maxRows,def=1" json:"max_rows,omitempty"`
+	XXX_NoUnkeyedLiteral struct{} `json:"-"`
+	XXX_unrecognized     []byte   `json:"-"`
+	XXX_sizecache        int32    `json:"-"`
+}
+
+func (m *TaskQueueQueryTasksRequest) Reset()         { *m = TaskQueueQueryTasksRequest{} }
+func (m *TaskQueueQueryTasksRequest) String() string { return proto.CompactTextString(m) }
+func (*TaskQueueQueryTasksRequest) ProtoMessage()    {}
+func (*TaskQueueQueryTasksRequest) Descriptor() ([]byte, []int) {
+	return fileDescriptor_taskqueue_service_05300f6f4e69f490, []int{29}
+}
+func (m *TaskQueueQueryTasksRequest) XXX_Unmarshal(b []byte) error {
+	return xxx_messageInfo_TaskQueueQueryTasksRequest.Unmarshal(m, b)
+}
+func (m *TaskQueueQueryTasksRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) {
+	return xxx_messageInfo_TaskQueueQueryTasksRequest.Marshal(b, m, deterministic)
+}
+func (dst *TaskQueueQueryTasksRequest) XXX_Merge(src proto.Message) {
+	xxx_messageInfo_TaskQueueQueryTasksRequest.Merge(dst, src)
+}
+func (m *TaskQueueQueryTasksRequest) XXX_Size() int {
+	return xxx_messageInfo_TaskQueueQueryTasksRequest.Size(m)
+}
+func (m *TaskQueueQueryTasksRequest) XXX_DiscardUnknown() {
+	xxx_messageInfo_TaskQueueQueryTasksRequest.DiscardUnknown(m)
+}
+
+var xxx_messageInfo_TaskQueueQueryTasksRequest proto.InternalMessageInfo
+
+const Default_TaskQueueQueryTasksRequest_MaxRows int32 = 1
+
+func (m *TaskQueueQueryTasksRequest) GetAppId() []byte {
+	if m != nil {
+		return m.AppId
+	}
+	return nil
+}
+
+func (m *TaskQueueQueryTasksRequest) GetQueueName() []byte {
+	if m != nil {
+		return m.QueueName
+	}
+	return nil
+}
+
+func (m *TaskQueueQueryTasksRequest) GetStartTaskName() []byte {
+	if m != nil {
+		return m.StartTaskName
+	}
+	return nil
+}
+
+func (m *TaskQueueQueryTasksRequest) GetStartEtaUsec() int64 {
+	if m != nil && m.StartEtaUsec != nil {
+		return *m.StartEtaUsec
+	}
+	return 0
+}
+
+func (m *TaskQueueQueryTasksRequest) GetStartTag() []byte {
+	if m != nil {
+		return m.StartTag
+	}
+	return nil
+}
+
+func (m *TaskQueueQueryTasksRequest) GetMaxRows() int32 {
+	if m != nil && m.MaxRows != nil {
+		return *m.MaxRows
+	}
+	return Default_TaskQueueQueryTasksRequest_MaxRows
+}
+
+type TaskQueueQueryTasksResponse struct {
+	Task                 []*TaskQueueQueryTasksResponse_Task `protobuf:"group,1,rep,name=Task,json=task" json:"task,omitempty"`
+	XXX_NoUnkeyedLiteral struct{}                            `json:"-"`
+	XXX_unrecognized     []byte                              `json:"-"`
+	XXX_sizecache        int32                               `json:"-"`
+}
+
+func (m *TaskQueueQueryTasksResponse) Reset()         { *m = TaskQueueQueryTasksResponse{} }
+func (m *TaskQueueQueryTasksResponse) String() string { return proto.CompactTextString(m) }
+func (*TaskQueueQueryTasksResponse) ProtoMessage()    {}
+func (*TaskQueueQueryTasksResponse) Descriptor() ([]byte, []int) {
+	return fileDescriptor_taskqueue_service_05300f6f4e69f490, []int{30}
+}
+func (m *TaskQueueQueryTasksResponse) XXX_Unmarshal(b []byte) error {
+	return xxx_messageInfo_TaskQueueQueryTasksResponse.Unmarshal(m, b)
+}
+func (m *TaskQueueQueryTasksResponse) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) {
+	return xxx_messageInfo_TaskQueueQueryTasksResponse.Marshal(b, m, deterministic)
+}
+func (dst *TaskQueueQueryTasksResponse) XXX_Merge(src proto.Message) {
+	xxx_messageInfo_TaskQueueQueryTasksResponse.Merge(dst, src)
+}
+func (m *TaskQueueQueryTasksResponse) XXX_Size() int {
+	return xxx_messageInfo_TaskQueueQueryTasksResponse.Size(m)
+}
+func (m *TaskQueueQueryTasksResponse) XXX_DiscardUnknown() {
+	xxx_messageInfo_TaskQueueQueryTasksResponse.DiscardUnknown(m)
+}
+
+var xxx_messageInfo_TaskQueueQueryTasksResponse proto.InternalMessageInfo
+
+func (m *TaskQueueQueryTasksResponse) GetTask() []*TaskQueueQueryTasksResponse_Task {
+	if m != nil {
+		return m.Task
+	}
+	return nil
+}
+
+type TaskQueueQueryTasksResponse_Task struct {
+	TaskName             []byte                                          `protobuf:"bytes,2,req,name=task_name,json=taskName" json:"task_name,omitempty"`
+	EtaUsec              *int64                                          `protobuf:"varint,3,req,name=eta_usec,json=etaUsec" json:"eta_usec,omitempty"`
+	Url                  []byte                                          `protobuf:"bytes,4,opt,name=url" json:"url,omitempty"`
+	Method               *TaskQueueQueryTasksResponse_Task_RequestMethod `protobuf:"varint,5,opt,name=method,enum=appengine.TaskQueueQueryTasksResponse_Task_RequestMethod" json:"method,omitempty"`
+	RetryCount           *int32                                          `protobuf:"varint,6,opt,name=retry_count,json=retryCount,def=0" json:"retry_count,omitempty"`
+	Header               []*TaskQueueQueryTasksResponse_Task_Header      `protobuf:"group,7,rep,name=Header,json=header" json:"header,omitempty"`
+	BodySize             *int32                                          `protobuf:"varint,10,opt,name=body_size,json=bodySize" json:"body_size,omitempty"`
+	Body                 []byte                                          `protobuf:"bytes,11,opt,name=body" json:"body,omitempty"`
+	CreationTimeUsec     *int64                                          `protobuf:"varint,12,req,name=creation_time_usec,json=creationTimeUsec" json:"creation_time_usec,omitempty"`
+	Crontimetable        *TaskQueueQueryTasksResponse_Task_CronTimetable `protobuf:"group,13,opt,name=CronTimetable,json=crontimetable" json:"crontimetable,omitempty"`
+	Runlog               *TaskQueueQueryTasksResponse_Task_RunLog        `protobuf:"group,16,opt,name=RunLog,json=runlog" json:"runlog,omitempty"`
+	Description          []byte                                          `protobuf:"bytes,21,opt,name=description" json:"description,omitempty"`
+	Payload              *TaskPayload                                    `protobuf:"bytes,22,opt,name=payload" json:"payload,omitempty"`
+	RetryParameters      *TaskQueueRetryParameters                       `protobuf:"bytes,23,opt,name=retry_parameters,json=retryParameters" json:"retry_parameters,omitempty"`
+	FirstTryUsec         *int64                                          `protobuf:"varint,24,opt,name=first_try_usec,json=firstTryUsec" json:"first_try_usec,omitempty"`
+	Tag                  []byte                                          `protobuf:"bytes,25,opt,name=tag" json:"tag,omitempty"`
+	ExecutionCount       *int32                                          `protobuf:"varint,26,opt,name=execution_count,json=executionCount,def=0" json:"execution_count,omitempty"`
+	XXX_NoUnkeyedLiteral struct{}                                        `json:"-"`
+	XXX_unrecognized     []byte                                          `json:"-"`
+	XXX_sizecache        int32                                           `json:"-"`
+}
+
+func (m *TaskQueueQueryTasksResponse_Task) Reset()         { *m = TaskQueueQueryTasksResponse_Task{} }
+func (m *TaskQueueQueryTasksResponse_Task) String() string { return proto.CompactTextString(m) }
+func (*TaskQueueQueryTasksResponse_Task) ProtoMessage()    {}
+func (*TaskQueueQueryTasksResponse_Task) Descriptor() ([]byte, []int) {
+	return fileDescriptor_taskqueue_service_05300f6f4e69f490, []int{30, 0}
+}
+func (m *TaskQueueQueryTasksResponse_Task) XXX_Unmarshal(b []byte) error {
+	return xxx_messageInfo_TaskQueueQueryTasksResponse_Task.Unmarshal(m, b)
+}
+func (m *TaskQueueQueryTasksResponse_Task) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) {
+	return xxx_messageInfo_TaskQueueQueryTasksResponse_Task.Marshal(b, m, deterministic)
+}
+func (dst *TaskQueueQueryTasksResponse_Task) XXX_Merge(src proto.Message) {
+	xxx_messageInfo_TaskQueueQueryTasksResponse_Task.Merge(dst, src)
+}
+func (m *TaskQueueQueryTasksResponse_Task) XXX_Size() int {
+	return xxx_messageInfo_TaskQueueQueryTasksResponse_Task.Size(m)
+}
+func (m *TaskQueueQueryTasksResponse_Task) XXX_DiscardUnknown() {
+	xxx_messageInfo_TaskQueueQueryTasksResponse_Task.DiscardUnknown(m)
+}
+
+var xxx_messageInfo_TaskQueueQueryTasksResponse_Task proto.InternalMessageInfo
+
+const Default_TaskQueueQueryTasksResponse_Task_RetryCount int32 = 0
+const Default_TaskQueueQueryTasksResponse_Task_ExecutionCount int32 = 0
+
+func (m *TaskQueueQueryTasksResponse_Task) GetTaskName() []byte {
+	if m != nil {
+		return m.TaskName
+	}
+	return nil
+}
+
+func (m *TaskQueueQueryTasksResponse_Task) GetEtaUsec() int64 {
+	if m != nil && m.EtaUsec != nil {
+		return *m.EtaUsec
+	}
+	return 0
+}
+
+func (m *TaskQueueQueryTasksResponse_Task) GetUrl() []byte {
+	if m != nil {
+		return m.Url
+	}
+	return nil
+}
+
+func (m *TaskQueueQueryTasksResponse_Task) GetMethod() TaskQueueQueryTasksResponse_Task_RequestMethod {
+	if m != nil && m.Method != nil {
+		return *m.Method
+	}
+	return TaskQueueQueryTasksResponse_Task_GET
+}
+
+func (m *TaskQueueQueryTasksResponse_Task) GetRetryCount() int32 {
+	if m != nil && m.RetryCount != nil {
+		return *m.RetryCount
+	}
+	return Default_TaskQueueQueryTasksResponse_Task_RetryCount
+}
+
+func (m *TaskQueueQueryTasksResponse_Task) GetHeader() []*TaskQueueQueryTasksResponse_Task_Header {
+	if m != nil {
+		return m.Header
+	}
+	return nil
+}
+
+func (m *TaskQueueQueryTasksResponse_Task) GetBodySize() int32 {
+	if m != nil && m.BodySize != nil {
+		return *m.BodySize
+	}
+	return 0
+}
+
+func (m *TaskQueueQueryTasksResponse_Task) GetBody() []byte {
+	if m != nil {
+		return m.Body
+	}
+	return nil
+}
+
+func (m *TaskQueueQueryTasksResponse_Task) GetCreationTimeUsec() int64 {
+	if m != nil && m.CreationTimeUsec != nil {
+		return *m.CreationTimeUsec
+	}
+	return 0
+}
+
+func (m *TaskQueueQueryTasksResponse_Task) GetCrontimetable() *TaskQueueQueryTasksResponse_Task_CronTimetable {
+	if m != nil {
+		return m.Crontimetable
+	}
+	return nil
+}
+
+func (m *TaskQueueQueryTasksResponse_Task) GetRunlog() *TaskQueueQueryTasksResponse_Task_RunLog {
+	if m != nil {
+		return m.Runlog
+	}
+	return nil
+}
+
+func (m *TaskQueueQueryTasksResponse_Task) GetDescription() []byte {
+	if m != nil {
+		return m.Description
+	}
+	return nil
+}
+
+func (m *TaskQueueQueryTasksResponse_Task) GetPayload() *TaskPayload {
+	if m != nil {
+		return m.Payload
+	}
+	return nil
+}
+
+func (m *TaskQueueQueryTasksResponse_Task) GetRetryParameters() *TaskQueueRetryParameters {
+	if m != nil {
+		return m.RetryParameters
+	}
+	return nil
+}
+
+func (m *TaskQueueQueryTasksResponse_Task) GetFirstTryUsec() int64 {
+	if m != nil && m.FirstTryUsec != nil {
+		return *m.FirstTryUsec
+	}
+	return 0
+}
+
+func (m *TaskQueueQueryTasksResponse_Task) GetTag() []byte {
+	if m != nil {
+		return m.Tag
+	}
+	return nil
+}
+
+func (m *TaskQueueQueryTasksResponse_Task) GetExecutionCount() int32 {
+	if m != nil && m.ExecutionCount != nil {
+		return *m.ExecutionCount
+	}
+	return Default_TaskQueueQueryTasksResponse_Task_ExecutionCount
+}
+
+type TaskQueueQueryTasksResponse_Task_Header struct {
+	Key                  []byte   `protobuf:"bytes,8,req,name=key" json:"key,omitempty"`
+	Value                []byte   `protobuf:"bytes,9,req,name=value" json:"value,omitempty"`
+	XXX_NoUnkeyedLiteral struct{} `json:"-"`
+	XXX_unrecognized     []byte   `json:"-"`
+	XXX_sizecache        int32    `json:"-"`
+}
+
+func (m *TaskQueueQueryTasksResponse_Task_Header) Reset() {
+	*m = TaskQueueQueryTasksResponse_Task_Header{}
+}
+func (m *TaskQueueQueryTasksResponse_Task_Header) String() string { return proto.CompactTextString(m) }
+func (*TaskQueueQueryTasksResponse_Task_Header) ProtoMessage()    {}
+func (*TaskQueueQueryTasksResponse_Task_Header) Descriptor() ([]byte, []int) {
+	return fileDescriptor_taskqueue_service_05300f6f4e69f490, []int{30, 0, 0}
+}
+func (m *TaskQueueQueryTasksResponse_Task_Header) XXX_Unmarshal(b []byte) error {
+	return xxx_messageInfo_TaskQueueQueryTasksResponse_Task_Header.Unmarshal(m, b)
+}
+func (m *TaskQueueQueryTasksResponse_Task_Header) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) {
+	return xxx_messageInfo_TaskQueueQueryTasksResponse_Task_Header.Marshal(b, m, deterministic)
+}
+func (dst *TaskQueueQueryTasksResponse_Task_Header) XXX_Merge(src proto.Message) {
+	xxx_messageInfo_TaskQueueQueryTasksResponse_Task_Header.Merge(dst, src)
+}
+func (m *TaskQueueQueryTasksResponse_Task_Header) XXX_Size() int {
+	return xxx_messageInfo_TaskQueueQueryTasksResponse_Task_Header.Size(m)
+}
+func (m *TaskQueueQueryTasksResponse_Task_Header) XXX_DiscardUnknown() {
+	xxx_messageInfo_TaskQueueQueryTasksResponse_Task_Header.DiscardUnknown(m)
+}
+
+var xxx_messageInfo_TaskQueueQueryTasksResponse_Task_Header proto.InternalMessageInfo
+
+func (m *TaskQueueQueryTasksResponse_Task_Header) GetKey() []byte {
+	if m != nil {
+		return m.Key
+	}
+	return nil
+}
+
+func (m *TaskQueueQueryTasksResponse_Task_Header) GetValue() []byte {
+	if m != nil {
+		return m.Value
+	}
+	return nil
+}
+
+type TaskQueueQueryTasksResponse_Task_CronTimetable struct {
+	Schedule             []byte   `protobuf:"bytes,14,req,name=schedule" json:"schedule,omitempty"`
+	Timezone             []byte   `protobuf:"bytes,15,req,name=timezone" json:"timezone,omitempty"`
+	XXX_NoUnkeyedLiteral struct{} `json:"-"`
+	XXX_unrecognized     []byte   `json:"-"`
+	XXX_sizecache        int32    `json:"-"`
+}
+
+func (m *TaskQueueQueryTasksResponse_Task_CronTimetable) Reset() {
+	*m = TaskQueueQueryTasksResponse_Task_CronTimetable{}
+}
+func (m *TaskQueueQueryTasksResponse_Task_CronTimetable) String() string {
+	return proto.CompactTextString(m)
+}
+func (*TaskQueueQueryTasksResponse_Task_CronTimetable) ProtoMessage() {}
+func (*TaskQueueQueryTasksResponse_Task_CronTimetable) Descriptor() ([]byte, []int) {
+	return fileDescriptor_taskqueue_service_05300f6f4e69f490, []int{30, 0, 1}
+}
+func (m *TaskQueueQueryTasksResponse_Task_CronTimetable) XXX_Unmarshal(b []byte) error {
+	return xxx_messageInfo_TaskQueueQueryTasksResponse_Task_CronTimetable.Unmarshal(m, b)
+}
+func (m *TaskQueueQueryTasksResponse_Task_CronTimetable) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) {
+	return xxx_messageInfo_TaskQueueQueryTasksResponse_Task_CronTimetable.Marshal(b, m, deterministic)
+}
+func (dst *TaskQueueQueryTasksResponse_Task_CronTimetable) XXX_Merge(src proto.Message) {
+	xxx_messageInfo_TaskQueueQueryTasksResponse_Task_CronTimetable.Merge(dst, src)
+}
+func (m *TaskQueueQueryTasksResponse_Task_CronTimetable) XXX_Size() int {
+	return xxx_messageInfo_TaskQueueQueryTasksResponse_Task_CronTimetable.Size(m)
+}
+func (m *TaskQueueQueryTasksResponse_Task_CronTimetable) XXX_DiscardUnknown() {
+	xxx_messageInfo_TaskQueueQueryTasksResponse_Task_CronTimetable.DiscardUnknown(m)
+}
+
+var xxx_messageInfo_TaskQueueQueryTasksResponse_Task_CronTimetable proto.InternalMessageInfo
+
+func (m *TaskQueueQueryTasksResponse_Task_CronTimetable) GetSchedule() []byte {
+	if m != nil {
+		return m.Schedule
+	}
+	return nil
+}
+
+func (m *TaskQueueQueryTasksResponse_Task_CronTimetable) GetTimezone() []byte {
+	if m != nil {
+		return m.Timezone
+	}
+	return nil
+}
+
+type TaskQueueQueryTasksResponse_Task_RunLog struct {
+	DispatchedUsec       *int64   `protobuf:"varint,17,req,name=dispatched_usec,json=dispatchedUsec" json:"dispatched_usec,omitempty"`
+	LagUsec              *int64   `protobuf:"varint,18,req,name=lag_usec,json=lagUsec" json:"lag_usec,omitempty"`
+	ElapsedUsec          *int64   `protobuf:"varint,19,req,name=elapsed_usec,json=elapsedUsec" json:"elapsed_usec,omitempty"`
+	ResponseCode         *int64   `protobuf:"varint,20,opt,name=response_code,json=responseCode" json:"response_code,omitempty"`
+	RetryReason          *string  `protobuf:"bytes,27,opt,name=retry_reason,json=retryReason" json:"retry_reason,omitempty"`
+	XXX_NoUnkeyedLiteral struct{} `json:"-"`
+	XXX_unrecognized     []byte   `json:"-"`
+	XXX_sizecache        int32    `json:"-"`
+}
+
+func (m *TaskQueueQueryTasksResponse_Task_RunLog) Reset() {
+	*m = TaskQueueQueryTasksResponse_Task_RunLog{}
+}
+func (m *TaskQueueQueryTasksResponse_Task_RunLog) String() string { return proto.CompactTextString(m) }
+func (*TaskQueueQueryTasksResponse_Task_RunLog) ProtoMessage()    {}
+func (*TaskQueueQueryTasksResponse_Task_RunLog) Descriptor() ([]byte, []int) {
+	return fileDescriptor_taskqueue_service_05300f6f4e69f490, []int{30, 0, 2}
+}
+func (m *TaskQueueQueryTasksResponse_Task_RunLog) XXX_Unmarshal(b []byte) error {
+	return xxx_messageInfo_TaskQueueQueryTasksResponse_Task_RunLog.Unmarshal(m, b)
+}
+func (m *TaskQueueQueryTasksResponse_Task_RunLog) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) {
+	return xxx_messageInfo_TaskQueueQueryTasksResponse_Task_RunLog.Marshal(b, m, deterministic)
+}
+func (dst *TaskQueueQueryTasksResponse_Task_RunLog) XXX_Merge(src proto.Message) {
+	xxx_messageInfo_TaskQueueQueryTasksResponse_Task_RunLog.Merge(dst, src)
+}
+func (m *TaskQueueQueryTasksResponse_Task_RunLog) XXX_Size() int {
+	return xxx_messageInfo_TaskQueueQueryTasksResponse_Task_RunLog.Size(m)
+}
+func (m *TaskQueueQueryTasksResponse_Task_RunLog) XXX_DiscardUnknown() {
+	xxx_messageInfo_TaskQueueQueryTasksResponse_Task_RunLog.DiscardUnknown(m)
+}
+
+var xxx_messageInfo_TaskQueueQueryTasksResponse_Task_RunLog proto.InternalMessageInfo
+
+func (m *TaskQueueQueryTasksResponse_Task_RunLog) GetDispatchedUsec() int64 {
+	if m != nil && m.DispatchedUsec != nil {
+		return *m.DispatchedUsec
+	}
+	return 0
+}
+
+func (m *TaskQueueQueryTasksResponse_Task_RunLog) GetLagUsec() int64 {
+	if m != nil && m.LagUsec != nil {
+		return *m.LagUsec
+	}
+	return 0
+}
+
+func (m *TaskQueueQueryTasksResponse_Task_RunLog) GetElapsedUsec() int64 {
+	if m != nil && m.ElapsedUsec != nil {
+		return *m.ElapsedUsec
+	}
+	return 0
+}
+
+func (m *TaskQueueQueryTasksResponse_Task_RunLog) GetResponseCode() int64 {
+	if m != nil && m.ResponseCode != nil {
+		return *m.ResponseCode
+	}
+	return 0
+}
+
+func (m *TaskQueueQueryTasksResponse_Task_RunLog) GetRetryReason() string {
+	if m != nil && m.RetryReason != nil {
+		return *m.RetryReason
+	}
+	return ""
+}
+
+type TaskQueueFetchTaskRequest struct {
+	AppId                []byte   `protobuf:"bytes,1,opt,name=app_id,json=appId" json:"app_id,omitempty"`
+	QueueName            []byte   `protobuf:"bytes,2,req,name=queue_name,json=queueName" json:"queue_name,omitempty"`
+	TaskName             []byte   `protobuf:"bytes,3,req,name=task_name,json=taskName" json:"task_name,omitempty"`
+	XXX_NoUnkeyedLiteral struct{} `json:"-"`
+	XXX_unrecognized     []byte   `json:"-"`
+	XXX_sizecache        int32    `json:"-"`
+}
+
+func (m *TaskQueueFetchTaskRequest) Reset()         { *m = TaskQueueFetchTaskRequest{} }
+func (m *TaskQueueFetchTaskRequest) String() string { return proto.CompactTextString(m) }
+func (*TaskQueueFetchTaskRequest) ProtoMessage()    {}
+func (*TaskQueueFetchTaskRequest) Descriptor() ([]byte, []int) {
+	return fileDescriptor_taskqueue_service_05300f6f4e69f490, []int{31}
+}
+func (m *TaskQueueFetchTaskRequest) XXX_Unmarshal(b []byte) error {
+	return xxx_messageInfo_TaskQueueFetchTaskRequest.Unmarshal(m, b)
+}
+func (m *TaskQueueFetchTaskRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) {
+	return xxx_messageInfo_TaskQueueFetchTaskRequest.Marshal(b, m, deterministic)
+}
+func (dst *TaskQueueFetchTaskRequest) XXX_Merge(src proto.Message) {
+	xxx_messageInfo_TaskQueueFetchTaskRequest.Merge(dst, src)
+}
+func (m *TaskQueueFetchTaskRequest) XXX_Size() int {
+	return xxx_messageInfo_TaskQueueFetchTaskRequest.Size(m)
+}
+func (m *TaskQueueFetchTaskRequest) XXX_DiscardUnknown() {
+	xxx_messageInfo_TaskQueueFetchTaskRequest.DiscardUnknown(m)
+}
+
+var xxx_messageInfo_TaskQueueFetchTaskRequest proto.InternalMessageInfo
+
+func (m *TaskQueueFetchTaskRequest) GetAppId() []byte {
+	if m != nil {
+		return m.AppId
+	}
+	return nil
+}
+
+func (m *TaskQueueFetchTaskRequest) GetQueueName() []byte {
+	if m != nil {
+		return m.QueueName
+	}
+	return nil
+}
+
+func (m *TaskQueueFetchTaskRequest) GetTaskName() []byte {
+	if m != nil {
+		return m.TaskName
+	}
+	return nil
+}
+
+type TaskQueueFetchTaskResponse struct {
+	Task                 *TaskQueueQueryTasksResponse `protobuf:"bytes,1,req,name=task" json:"task,omitempty"`
+	XXX_NoUnkeyedLiteral struct{}                     `json:"-"`
+	XXX_unrecognized     []byte                       `json:"-"`
+	XXX_sizecache        int32                        `json:"-"`
+}
+
+func (m *TaskQueueFetchTaskResponse) Reset()         { *m = TaskQueueFetchTaskResponse{} }
+func (m *TaskQueueFetchTaskResponse) String() string { return proto.CompactTextString(m) }
+func (*TaskQueueFetchTaskResponse) ProtoMessage()    {}
+func (*TaskQueueFetchTaskResponse) Descriptor() ([]byte, []int) {
+	return fileDescriptor_taskqueue_service_05300f6f4e69f490, []int{32}
+}
+func (m *TaskQueueFetchTaskResponse) XXX_Unmarshal(b []byte) error {
+	return xxx_messageInfo_TaskQueueFetchTaskResponse.Unmarshal(m, b)
+}
+func (m *TaskQueueFetchTaskResponse) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) {
+	return xxx_messageInfo_TaskQueueFetchTaskResponse.Marshal(b, m, deterministic)
+}
+func (dst *TaskQueueFetchTaskResponse) XXX_Merge(src proto.Message) {
+	xxx_messageInfo_TaskQueueFetchTaskResponse.Merge(dst, src)
+}
+func (m *TaskQueueFetchTaskResponse) XXX_Size() int {
+	return xxx_messageInfo_TaskQueueFetchTaskResponse.Size(m)
+}
+func (m *TaskQueueFetchTaskResponse) XXX_DiscardUnknown() {
+	xxx_messageInfo_TaskQueueFetchTaskResponse.DiscardUnknown(m)
+}
+
+var xxx_messageInfo_TaskQueueFetchTaskResponse proto.InternalMessageInfo
+
+func (m *TaskQueueFetchTaskResponse) GetTask() *TaskQueueQueryTasksResponse {
+	if m != nil {
+		return m.Task
+	}
+	return nil
+}
+
+type TaskQueueUpdateStorageLimitRequest struct {
+	AppId                []byte   `protobuf:"bytes,1,req,name=app_id,json=appId" json:"app_id,omitempty"`
+	Limit                *int64   `protobuf:"varint,2,req,name=limit" json:"limit,omitempty"`
+	XXX_NoUnkeyedLiteral struct{} `json:"-"`
+	XXX_unrecognized     []byte   `json:"-"`
+	XXX_sizecache        int32    `json:"-"`
+}
+
+func (m *TaskQueueUpdateStorageLimitRequest) Reset()         { *m = TaskQueueUpdateStorageLimitRequest{} }
+func (m *TaskQueueUpdateStorageLimitRequest) String() string { return proto.CompactTextString(m) }
+func (*TaskQueueUpdateStorageLimitRequest) ProtoMessage()    {}
+func (*TaskQueueUpdateStorageLimitRequest) Descriptor() ([]byte, []int) {
+	return fileDescriptor_taskqueue_service_05300f6f4e69f490, []int{33}
+}
+func (m *TaskQueueUpdateStorageLimitRequest) XXX_Unmarshal(b []byte) error {
+	return xxx_messageInfo_TaskQueueUpdateStorageLimitRequest.Unmarshal(m, b)
+}
+func (m *TaskQueueUpdateStorageLimitRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) {
+	return xxx_messageInfo_TaskQueueUpdateStorageLimitRequest.Marshal(b, m, deterministic)
+}
+func (dst *TaskQueueUpdateStorageLimitRequest) XXX_Merge(src proto.Message) {
+	xxx_messageInfo_TaskQueueUpdateStorageLimitRequest.Merge(dst, src)
+}
+func (m *TaskQueueUpdateStorageLimitRequest) XXX_Size() int {
+	return xxx_messageInfo_TaskQueueUpdateStorageLimitRequest.Size(m)
+}
+func (m *TaskQueueUpdateStorageLimitRequest) XXX_DiscardUnknown() {
+	xxx_messageInfo_TaskQueueUpdateStorageLimitRequest.DiscardUnknown(m)
+}
+
+var xxx_messageInfo_TaskQueueUpdateStorageLimitRequest proto.InternalMessageInfo
+
+func (m *TaskQueueUpdateStorageLimitRequest) GetAppId() []byte {
+	if m != nil {
+		return m.AppId
+	}
+	return nil
+}
+
+func (m *TaskQueueUpdateStorageLimitRequest) GetLimit() int64 {
+	if m != nil && m.Limit != nil {
+		return *m.Limit
+	}
+	return 0
+}
+
+type TaskQueueUpdateStorageLimitResponse struct {
+	NewLimit             *int64   `protobuf:"varint,1,req,name=new_limit,json=newLimit" json:"new_limit,omitempty"`
+	XXX_NoUnkeyedLiteral struct{} `json:"-"`
+	XXX_unrecognized     []byte   `json:"-"`
+	XXX_sizecache        int32    `json:"-"`
+}
+
+func (m *TaskQueueUpdateStorageLimitResponse) Reset()         { *m = TaskQueueUpdateStorageLimitResponse{} }
+func (m *TaskQueueUpdateStorageLimitResponse) String() string { return proto.CompactTextString(m) }
+func (*TaskQueueUpdateStorageLimitResponse) ProtoMessage()    {}
+func (*TaskQueueUpdateStorageLimitResponse) Descriptor() ([]byte, []int) {
+	return fileDescriptor_taskqueue_service_05300f6f4e69f490, []int{34}
+}
+func (m *TaskQueueUpdateStorageLimitResponse) XXX_Unmarshal(b []byte) error {
+	return xxx_messageInfo_TaskQueueUpdateStorageLimitResponse.Unmarshal(m, b)
+}
+func (m *TaskQueueUpdateStorageLimitResponse) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) {
+	return xxx_messageInfo_TaskQueueUpdateStorageLimitResponse.Marshal(b, m, deterministic)
+}
+func (dst *TaskQueueUpdateStorageLimitResponse) XXX_Merge(src proto.Message) {
+	xxx_messageInfo_TaskQueueUpdateStorageLimitResponse.Merge(dst, src)
+}
+func (m *TaskQueueUpdateStorageLimitResponse) XXX_Size() int {
+	return xxx_messageInfo_TaskQueueUpdateStorageLimitResponse.Size(m)
+}
+func (m *TaskQueueUpdateStorageLimitResponse) XXX_DiscardUnknown() {
+	xxx_messageInfo_TaskQueueUpdateStorageLimitResponse.DiscardUnknown(m)
+}
+
+var xxx_messageInfo_TaskQueueUpdateStorageLimitResponse proto.InternalMessageInfo
+
+func (m *TaskQueueUpdateStorageLimitResponse) GetNewLimit() int64 {
+	if m != nil && m.NewLimit != nil {
+		return *m.NewLimit
+	}
+	return 0
+}
+
+type TaskQueueQueryAndOwnTasksRequest struct {
+	QueueName            []byte   `protobuf:"bytes,1,req,name=queue_name,json=queueName" json:"queue_name,omitempty"`
+	LeaseSeconds         *float64 `protobuf:"fixed64,2,req,name=lease_seconds,json=leaseSeconds" json:"lease_seconds,omitempty"`
+	MaxTasks             *int64   `protobuf:"varint,3,req,name=max_tasks,json=maxTasks" json:"max_tasks,omitempty"`
+	GroupByTag           *bool    `protobuf:"varint,4,opt,name=group_by_tag,json=groupByTag,def=0" json:"group_by_tag,omitempty"`
+	Tag                  []byte   `protobuf:"bytes,5,opt,name=tag" json:"tag,omitempty"`
+	XXX_NoUnkeyedLiteral struct{} `json:"-"`
+	XXX_unrecognized     []byte   `json:"-"`
+	XXX_sizecache        int32    `json:"-"`
+}
+
+func (m *TaskQueueQueryAndOwnTasksRequest) Reset()         { *m = TaskQueueQueryAndOwnTasksRequest{} }
+func (m *TaskQueueQueryAndOwnTasksRequest) String() string { return proto.CompactTextString(m) }
+func (*TaskQueueQueryAndOwnTasksRequest) ProtoMessage()    {}
+func (*TaskQueueQueryAndOwnTasksRequest) Descriptor() ([]byte, []int) {
+	return fileDescriptor_taskqueue_service_05300f6f4e69f490, []int{35}
+}
+func (m *TaskQueueQueryAndOwnTasksRequest) XXX_Unmarshal(b []byte) error {
+	return xxx_messageInfo_TaskQueueQueryAndOwnTasksRequest.Unmarshal(m, b)
+}
+func (m *TaskQueueQueryAndOwnTasksRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) {
+	return xxx_messageInfo_TaskQueueQueryAndOwnTasksRequest.Marshal(b, m, deterministic)
+}
+func (dst *TaskQueueQueryAndOwnTasksRequest) XXX_Merge(src proto.Message) {
+	xxx_messageInfo_TaskQueueQueryAndOwnTasksRequest.Merge(dst, src)
+}
+func (m *TaskQueueQueryAndOwnTasksRequest) XXX_Size() int {
+	return xxx_messageInfo_TaskQueueQueryAndOwnTasksRequest.Size(m)
+}
+func (m *TaskQueueQueryAndOwnTasksRequest) XXX_DiscardUnknown() {
+	xxx_messageInfo_TaskQueueQueryAndOwnTasksRequest.DiscardUnknown(m)
+}
+
+var xxx_messageInfo_TaskQueueQueryAndOwnTasksRequest proto.InternalMessageInfo
+
+const Default_TaskQueueQueryAndOwnTasksRequest_GroupByTag bool = false
+
+func (m *TaskQueueQueryAndOwnTasksRequest) GetQueueName() []byte {
+	if m != nil {
+		return m.QueueName
+	}
+	return nil
+}
+
+func (m *TaskQueueQueryAndOwnTasksRequest) GetLeaseSeconds() float64 {
+	if m != nil && m.LeaseSeconds != nil {
+		return *m.LeaseSeconds
+	}
+	return 0
+}
+
+func (m *TaskQueueQueryAndOwnTasksRequest) GetMaxTasks() int64 {
+	if m != nil && m.MaxTasks != nil {
+		return *m.MaxTasks
+	}
+	return 0
+}
+
+func (m *TaskQueueQueryAndOwnTasksRequest) GetGroupByTag() bool {
+	if m != nil && m.GroupByTag != nil {
+		return *m.GroupByTag
+	}
+	return Default_TaskQueueQueryAndOwnTasksRequest_GroupByTag
+}
+
+func (m *TaskQueueQueryAndOwnTasksRequest) GetTag() []byte {
+	if m != nil {
+		return m.Tag
+	}
+	return nil
+}
+
+type TaskQueueQueryAndOwnTasksResponse struct {
+	Task                 []*TaskQueueQueryAndOwnTasksResponse_Task `protobuf:"group,1,rep,name=Task,json=task" json:"task,omitempty"`
+	XXX_NoUnkeyedLiteral struct{}                                  `json:"-"`
+	XXX_unrecognized     []byte                                    `json:"-"`
+	XXX_sizecache        int32                                     `json:"-"`
+}
+
+func (m *TaskQueueQueryAndOwnTasksResponse) Reset()         { *m = TaskQueueQueryAndOwnTasksResponse{} }
+func (m *TaskQueueQueryAndOwnTasksResponse) String() string { return proto.CompactTextString(m) }
+func (*TaskQueueQueryAndOwnTasksResponse) ProtoMessage()    {}
+func (*TaskQueueQueryAndOwnTasksResponse) Descriptor() ([]byte, []int) {
+	return fileDescriptor_taskqueue_service_05300f6f4e69f490, []int{36}
+}
+func (m *TaskQueueQueryAndOwnTasksResponse) XXX_Unmarshal(b []byte) error {
+	return xxx_messageInfo_TaskQueueQueryAndOwnTasksResponse.Unmarshal(m, b)
+}
+func (m *TaskQueueQueryAndOwnTasksResponse) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) {
+	return xxx_messageInfo_TaskQueueQueryAndOwnTasksResponse.Marshal(b, m, deterministic)
+}
+func (dst *TaskQueueQueryAndOwnTasksResponse) XXX_Merge(src proto.Message) {
+	xxx_messageInfo_TaskQueueQueryAndOwnTasksResponse.Merge(dst, src)
+}
+func (m *TaskQueueQueryAndOwnTasksResponse) XXX_Size() int {
+	return xxx_messageInfo_TaskQueueQueryAndOwnTasksResponse.Size(m)
+}
+func (m *TaskQueueQueryAndOwnTasksResponse) XXX_DiscardUnknown() {
+	xxx_messageInfo_TaskQueueQueryAndOwnTasksResponse.DiscardUnknown(m)
+}
+
+var xxx_messageInfo_TaskQueueQueryAndOwnTasksResponse proto.InternalMessageInfo
+
+func (m *TaskQueueQueryAndOwnTasksResponse) GetTask() []*TaskQueueQueryAndOwnTasksResponse_Task {
+	if m != nil {
+		return m.Task
+	}
+	return nil
+}
+
+type TaskQueueQueryAndOwnTasksResponse_Task struct {
+	TaskName             []byte   `protobuf:"bytes,2,req,name=task_name,json=taskName" json:"task_name,omitempty"`
+	EtaUsec              *int64   `protobuf:"varint,3,req,name=eta_usec,json=etaUsec" json:"eta_usec,omitempty"`
+	RetryCount           *int32   `protobuf:"varint,4,opt,name=retry_count,json=retryCount,def=0" json:"retry_count,omitempty"`
+	Body                 []byte   `protobuf:"bytes,5,opt,name=body" json:"body,omitempty"`
+	Tag                  []byte   `protobuf:"bytes,6,opt,name=tag" json:"tag,omitempty"`
+	XXX_NoUnkeyedLiteral struct{} `json:"-"`
+	XXX_unrecognized     []byte   `json:"-"`
+	XXX_sizecache        int32    `json:"-"`
+}
+
+func (m *TaskQueueQueryAndOwnTasksResponse_Task) Reset() {
+	*m = TaskQueueQueryAndOwnTasksResponse_Task{}
+}
+func (m *TaskQueueQueryAndOwnTasksResponse_Task) String() string { return proto.CompactTextString(m) }
+func (*TaskQueueQueryAndOwnTasksResponse_Task) ProtoMessage()    {}
+func (*TaskQueueQueryAndOwnTasksResponse_Task) Descriptor() ([]byte, []int) {
+	return fileDescriptor_taskqueue_service_05300f6f4e69f490, []int{36, 0}
+}
+func (m *TaskQueueQueryAndOwnTasksResponse_Task) XXX_Unmarshal(b []byte) error {
+	return xxx_messageInfo_TaskQueueQueryAndOwnTasksResponse_Task.Unmarshal(m, b)
+}
+func (m *TaskQueueQueryAndOwnTasksResponse_Task) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) {
+	return xxx_messageInfo_TaskQueueQueryAndOwnTasksResponse_Task.Marshal(b, m, deterministic)
+}
+func (dst *TaskQueueQueryAndOwnTasksResponse_Task) XXX_Merge(src proto.Message) {
+	xxx_messageInfo_TaskQueueQueryAndOwnTasksResponse_Task.Merge(dst, src)
+}
+func (m *TaskQueueQueryAndOwnTasksResponse_Task) XXX_Size() int {
+	return xxx_messageInfo_TaskQueueQueryAndOwnTasksResponse_Task.Size(m)
+}
+func (m *TaskQueueQueryAndOwnTasksResponse_Task) XXX_DiscardUnknown() {
+	xxx_messageInfo_TaskQueueQueryAndOwnTasksResponse_Task.DiscardUnknown(m)
+}
+
+var xxx_messageInfo_TaskQueueQueryAndOwnTasksResponse_Task proto.InternalMessageInfo
+
+const Default_TaskQueueQueryAndOwnTasksResponse_Task_RetryCount int32 = 0
+
+func (m *TaskQueueQueryAndOwnTasksResponse_Task) GetTaskName() []byte {
+	if m != nil {
+		return m.TaskName
+	}
+	return nil
+}
+
+func (m *TaskQueueQueryAndOwnTasksResponse_Task) GetEtaUsec() int64 {
+	if m != nil && m.EtaUsec != nil {
+		return *m.EtaUsec
+	}
+	return 0
+}
+
+func (m *TaskQueueQueryAndOwnTasksResponse_Task) GetRetryCount() int32 {
+	if m != nil && m.RetryCount != nil {
+		return *m.RetryCount
+	}
+	return Default_TaskQueueQueryAndOwnTasksResponse_Task_RetryCount
+}
+
+func (m *TaskQueueQueryAndOwnTasksResponse_Task) GetBody() []byte {
+	if m != nil {
+		return m.Body
+	}
+	return nil
+}
+
+func (m *TaskQueueQueryAndOwnTasksResponse_Task) GetTag() []byte {
+	if m != nil {
+		return m.Tag
+	}
+	return nil
+}
+
+type TaskQueueModifyTaskLeaseRequest struct {
+	QueueName            []byte   `protobuf:"bytes,1,req,name=queue_name,json=queueName" json:"queue_name,omitempty"`
+	TaskName             []byte   `protobuf:"bytes,2,req,name=task_name,json=taskName" json:"task_name,omitempty"`
+	EtaUsec              *int64   `protobuf:"varint,3,req,name=eta_usec,json=etaUsec" json:"eta_usec,omitempty"`
+	LeaseSeconds         *float64 `protobuf:"fixed64,4,req,name=lease_seconds,json=leaseSeconds" json:"lease_seconds,omitempty"`
+	XXX_NoUnkeyedLiteral struct{} `json:"-"`
+	XXX_unrecognized     []byte   `json:"-"`
+	XXX_sizecache        int32    `json:"-"`
+}
+
+func (m *TaskQueueModifyTaskLeaseRequest) Reset()         { *m = TaskQueueModifyTaskLeaseRequest{} }
+func (m *TaskQueueModifyTaskLeaseRequest) String() string { return proto.CompactTextString(m) }
+func (*TaskQueueModifyTaskLeaseRequest) ProtoMessage()    {}
+func (*TaskQueueModifyTaskLeaseRequest) Descriptor() ([]byte, []int) {
+	return fileDescriptor_taskqueue_service_05300f6f4e69f490, []int{37}
+}
+func (m *TaskQueueModifyTaskLeaseRequest) XXX_Unmarshal(b []byte) error {
+	return xxx_messageInfo_TaskQueueModifyTaskLeaseRequest.Unmarshal(m, b)
+}
+func (m *TaskQueueModifyTaskLeaseRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) {
+	return xxx_messageInfo_TaskQueueModifyTaskLeaseRequest.Marshal(b, m, deterministic)
+}
+func (dst *TaskQueueModifyTaskLeaseRequest) XXX_Merge(src proto.Message) {
+	xxx_messageInfo_TaskQueueModifyTaskLeaseRequest.Merge(dst, src)
+}
+func (m *TaskQueueModifyTaskLeaseRequest) XXX_Size() int {
+	return xxx_messageInfo_TaskQueueModifyTaskLeaseRequest.Size(m)
+}
+func (m *TaskQueueModifyTaskLeaseRequest) XXX_DiscardUnknown() {
+	xxx_messageInfo_TaskQueueModifyTaskLeaseRequest.DiscardUnknown(m)
+}
+
+var xxx_messageInfo_TaskQueueModifyTaskLeaseRequest proto.InternalMessageInfo
+
+func (m *TaskQueueModifyTaskLeaseRequest) GetQueueName() []byte {
+	if m != nil {
+		return m.QueueName
+	}
+	return nil
+}
+
+func (m *TaskQueueModifyTaskLeaseRequest) GetTaskName() []byte {
+	if m != nil {
+		return m.TaskName
+	}
+	return nil
+}
+
+func (m *TaskQueueModifyTaskLeaseRequest) GetEtaUsec() int64 {
+	if m != nil && m.EtaUsec != nil {
+		return *m.EtaUsec
+	}
+	return 0
+}
+
+func (m *TaskQueueModifyTaskLeaseRequest) GetLeaseSeconds() float64 {
+	if m != nil && m.LeaseSeconds != nil {
+		return *m.LeaseSeconds
+	}
+	return 0
+}
+
+type TaskQueueModifyTaskLeaseResponse struct {
+	UpdatedEtaUsec       *int64   `protobuf:"varint,1,req,name=updated_eta_usec,json=updatedEtaUsec" json:"updated_eta_usec,omitempty"`
+	XXX_NoUnkeyedLiteral struct{} `json:"-"`
+	XXX_unrecognized     []byte   `json:"-"`
+	XXX_sizecache        int32    `json:"-"`
+}
+
+func (m *TaskQueueModifyTaskLeaseResponse) Reset()         { *m = TaskQueueModifyTaskLeaseResponse{} }
+func (m *TaskQueueModifyTaskLeaseResponse) String() string { return proto.CompactTextString(m) }
+func (*TaskQueueModifyTaskLeaseResponse) ProtoMessage()    {}
+func (*TaskQueueModifyTaskLeaseResponse) Descriptor() ([]byte, []int) {
+	return fileDescriptor_taskqueue_service_05300f6f4e69f490, []int{38}
+}
+func (m *TaskQueueModifyTaskLeaseResponse) XXX_Unmarshal(b []byte) error {
+	return xxx_messageInfo_TaskQueueModifyTaskLeaseResponse.Unmarshal(m, b)
+}
+func (m *TaskQueueModifyTaskLeaseResponse) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) {
+	return xxx_messageInfo_TaskQueueModifyTaskLeaseResponse.Marshal(b, m, deterministic)
+}
+func (dst *TaskQueueModifyTaskLeaseResponse) XXX_Merge(src proto.Message) {
+	xxx_messageInfo_TaskQueueModifyTaskLeaseResponse.Merge(dst, src)
+}
+func (m *TaskQueueModifyTaskLeaseResponse) XXX_Size() int {
+	return xxx_messageInfo_TaskQueueModifyTaskLeaseResponse.Size(m)
+}
+func (m *TaskQueueModifyTaskLeaseResponse) XXX_DiscardUnknown() {
+	xxx_messageInfo_TaskQueueModifyTaskLeaseResponse.DiscardUnknown(m)
+}
+
+var xxx_messageInfo_TaskQueueModifyTaskLeaseResponse proto.InternalMessageInfo
+
+func (m *TaskQueueModifyTaskLeaseResponse) GetUpdatedEtaUsec() int64 {
+	if m != nil && m.UpdatedEtaUsec != nil {
+		return *m.UpdatedEtaUsec
+	}
+	return 0
+}
+
+func init() {
+	proto.RegisterType((*TaskQueueServiceError)(nil), "appengine.TaskQueueServiceError")
+	proto.RegisterType((*TaskPayload)(nil), "appengine.TaskPayload")
+	proto.RegisterType((*TaskQueueRetryParameters)(nil), "appengine.TaskQueueRetryParameters")
+	proto.RegisterType((*TaskQueueAcl)(nil), "appengine.TaskQueueAcl")
+	proto.RegisterType((*TaskQueueHttpHeader)(nil), "appengine.TaskQueueHttpHeader")
+	proto.RegisterType((*TaskQueueMode)(nil), "appengine.TaskQueueMode")
+	proto.RegisterType((*TaskQueueAddRequest)(nil), "appengine.TaskQueueAddRequest")
+	proto.RegisterType((*TaskQueueAddRequest_Header)(nil), "appengine.TaskQueueAddRequest.Header")
+	proto.RegisterType((*TaskQueueAddRequest_CronTimetable)(nil), "appengine.TaskQueueAddRequest.CronTimetable")
+	proto.RegisterType((*TaskQueueAddResponse)(nil), "appengine.TaskQueueAddResponse")
+	proto.RegisterType((*TaskQueueBulkAddRequest)(nil), "appengine.TaskQueueBulkAddRequest")
+	proto.RegisterType((*TaskQueueBulkAddResponse)(nil), "appengine.TaskQueueBulkAddResponse")
+	proto.RegisterType((*TaskQueueBulkAddResponse_TaskResult)(nil), "appengine.TaskQueueBulkAddResponse.TaskResult")
+	proto.RegisterType((*TaskQueueDeleteRequest)(nil), "appengine.TaskQueueDeleteRequest")
+	proto.RegisterType((*TaskQueueDeleteResponse)(nil), "appengine.TaskQueueDeleteResponse")
+	proto.RegisterType((*TaskQueueForceRunRequest)(nil), "appengine.TaskQueueForceRunRequest")
+	proto.RegisterType((*TaskQueueForceRunResponse)(nil), "appengine.TaskQueueForceRunResponse")
+	proto.RegisterType((*TaskQueueUpdateQueueRequest)(nil), "appengine.TaskQueueUpdateQueueRequest")
+	proto.RegisterType((*TaskQueueUpdateQueueResponse)(nil), "appengine.TaskQueueUpdateQueueResponse")
+	proto.RegisterType((*TaskQueueFetchQueuesRequest)(nil), "appengine.TaskQueueFetchQueuesRequest")
+	proto.RegisterType((*TaskQueueFetchQueuesResponse)(nil), "appengine.TaskQueueFetchQueuesResponse")
+	proto.RegisterType((*TaskQueueFetchQueuesResponse_Queue)(nil), "appengine.TaskQueueFetchQueuesResponse.Queue")
+	proto.RegisterType((*TaskQueueFetchQueueStatsRequest)(nil), "appengine.TaskQueueFetchQueueStatsRequest")
+	proto.RegisterType((*TaskQueueScannerQueueInfo)(nil), "appengine.TaskQueueScannerQueueInfo")
+	proto.RegisterType((*TaskQueueFetchQueueStatsResponse)(nil), "appengine.TaskQueueFetchQueueStatsResponse")
+	proto.RegisterType((*TaskQueueFetchQueueStatsResponse_QueueStats)(nil), "appengine.TaskQueueFetchQueueStatsResponse.QueueStats")
+	proto.RegisterType((*TaskQueuePauseQueueRequest)(nil), "appengine.TaskQueuePauseQueueRequest")
+	proto.RegisterType((*TaskQueuePauseQueueResponse)(nil), "appengine.TaskQueuePauseQueueResponse")
+	proto.RegisterType((*TaskQueuePurgeQueueRequest)(nil), "appengine.TaskQueuePurgeQueueRequest")
+	proto.RegisterType((*TaskQueuePurgeQueueResponse)(nil), "appengine.TaskQueuePurgeQueueResponse")
+	proto.RegisterType((*TaskQueueDeleteQueueRequest)(nil), "appengine.TaskQueueDeleteQueueRequest")
+	proto.RegisterType((*TaskQueueDeleteQueueResponse)(nil), "appengine.TaskQueueDeleteQueueResponse")
+	proto.RegisterType((*TaskQueueDeleteGroupRequest)(nil), "appengine.TaskQueueDeleteGroupRequest")
+	proto.RegisterType((*TaskQueueDeleteGroupResponse)(nil), "appengine.TaskQueueDeleteGroupResponse")
+	proto.RegisterType((*TaskQueueQueryTasksRequest)(nil), "appengine.TaskQueueQueryTasksRequest")
+	proto.RegisterType((*TaskQueueQueryTasksResponse)(nil), "appengine.TaskQueueQueryTasksResponse")
+	proto.RegisterType((*TaskQueueQueryTasksResponse_Task)(nil), "appengine.TaskQueueQueryTasksResponse.Task")
+	proto.RegisterType((*TaskQueueQueryTasksResponse_Task_Header)(nil), "appengine.TaskQueueQueryTasksResponse.Task.Header")
+	proto.RegisterType((*TaskQueueQueryTasksResponse_Task_CronTimetable)(nil), "appengine.TaskQueueQueryTasksResponse.Task.CronTimetable")
+	proto.RegisterType((*TaskQueueQueryTasksResponse_Task_RunLog)(nil), "appengine.TaskQueueQueryTasksResponse.Task.RunLog")
+	proto.RegisterType((*TaskQueueFetchTaskRequest)(nil), "appengine.TaskQueueFetchTaskRequest")
+	proto.RegisterType((*TaskQueueFetchTaskResponse)(nil), "appengine.TaskQueueFetchTaskResponse")
+	proto.RegisterType((*TaskQueueUpdateStorageLimitRequest)(nil), "appengine.TaskQueueUpdateStorageLimitRequest")
+	proto.RegisterType((*TaskQueueUpdateStorageLimitResponse)(nil), "appengine.TaskQueueUpdateStorageLimitResponse")
+	proto.RegisterType((*TaskQueueQueryAndOwnTasksRequest)(nil), "appengine.TaskQueueQueryAndOwnTasksRequest")
+	proto.RegisterType((*TaskQueueQueryAndOwnTasksResponse)(nil), "appengine.TaskQueueQueryAndOwnTasksResponse")
+	proto.RegisterType((*TaskQueueQueryAndOwnTasksResponse_Task)(nil), "appengine.TaskQueueQueryAndOwnTasksResponse.Task")
+	proto.RegisterType((*TaskQueueModifyTaskLeaseRequest)(nil), "appengine.TaskQueueModifyTaskLeaseRequest")
+	proto.RegisterType((*TaskQueueModifyTaskLeaseResponse)(nil), "appengine.TaskQueueModifyTaskLeaseResponse")
+}
+
+func init() {
+	proto.RegisterFile("google.golang.org/appengine/v2/internal/taskqueue/taskqueue_service.proto", fileDescriptor_taskqueue_service_05300f6f4e69f490)
+}
+
+var fileDescriptor_taskqueue_service_05300f6f4e69f490 = []byte{
+	// 2747 bytes of a gzipped FileDescriptorProto
+	0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0xbc, 0x39, 0x4d, 0x73, 0xdb, 0xd6,
+	0xb5, 0x01, 0xbf, 0x44, 0x1e, 0x7e, 0xc1, 0xd7, 0xb2, 0x44, 0x51, 0x71, 0x22, 0xc3, 0xf9, 0xd0,
+	0x4b, 0xfc, 0x14, 0x59, 0x79, 0xe3, 0xbc, 0xa7, 0x99, 0x4c, 0x1e, 0x24, 0xc2, 0x32, 0x63, 0x8a,
+	0xa4, 0x2f, 0xa1, 0x34, 0xce, 0x4c, 0x07, 0x73, 0x45, 0x5c, 0x51, 0x18, 0x81, 0x00, 0x83, 0x0f,
+	0x5b, 0xf2, 0xa2, 0xab, 0xae, 0x3a, 0x5d, 0x74, 0xd3, 0xe9, 0x4c, 0x66, 0xba, 0xea, 0xf4, 0x37,
+	0x74, 0xd7, 0xfe, 0x90, 0x2e, 0x3b, 0xd3, 0x3f, 0xd0, 0x55, 0xa7, 0x0b, 0x77, 0xee, 0xbd, 0x00,
+	0x08, 0x4a, 0xb4, 0x6c, 0x4b, 0x49, 0x37, 0x12, 0x70, 0xce, 0xb9, 0xe7, 0xdc, 0xf3, 0x7d, 0x70,
+	0x08, 0x0f, 0x47, 0xae, 0x3b, 0xb2, 0xe9, 0xc6, 0xc8, 0xb5, 0x89, 0x33, 0xda, 0x70, 0xbd, 0xd1,
+	0x67, 0x64, 0x32, 0xa1, 0xce, 0xc8, 0x72, 0xe8, 0x67, 0x96, 0x13, 0x50, 0xcf, 0x21, 0xf6, 0x67,
+	0x01, 0xf1, 0x4f, 0xbe, 0x0f, 0x69, 0x48, 0xa7, 0x4f, 0x86, 0x4f, 0xbd, 0x67, 0xd6, 0x90, 0x6e,
+	0x4c, 0x3c, 0x37, 0x70, 0x51, 0x29, 0x39, 0xd5, 0x54, 0xdf, 0x88, 0xa5, 0x49, 0x02, 0xe2, 0x07,
+	0xae, 0x47, 0xa7, 0x4f, 0xc6, 0xb3, 0xcf, 0x05, 0x37, 0xe5, 0xb7, 0x79, 0xb8, 0xa5, 0x13, 0xff,
+	0xe4, 0x09, 0x93, 0x34, 0x10, 0x82, 0x34, 0xcf, 0x73, 0x3d, 0xe5, 0x5f, 0x39, 0x28, 0xf1, 0xa7,
+	0x5d, 0xd7, 0xa4, 0xa8, 0x00, 0x99, 0xde, 0x63, 0xf9, 0x1d, 0x74, 0x03, 0xaa, 0x07, 0xdd, 0xc7,
+	0xdd, 0xde, 0xcf, 0xba, 0xc6, 0x93, 0x03, 0xed, 0x40, 0x93, 0x25, 0x74, 0x13, 0xea, 0x3a, 0x56,
+	0xbb, 0x83, 0xb6, 0xd6, 0xd5, 0x0d, 0x0d, 0xe3, 0x1e, 0x96, 0x33, 0x08, 0x41, 0xad, 0xdd, 0xd5,
+	0x35, 0xdc, 0x55, 0x3b, 0x11, 0x2c, 0xcb, 0x60, 0xba, 0x3a, 0x78, 0x6c, 0xe8, 0xbd, 0x9e, 0xd1,
+	0x51, 0xf1, 0x9e, 0x26, 0xe7, 0xd0, 0x2d, 0xb8, 0xd1, 0xee, 0x7e, 0xa3, 0x76, 0xda, 0x2d, 0x83,
+	0xe3, 0xba, 0xea, 0xbe, 0x26, 0xe7, 0xd1, 0x12, 0xa0, 0x18, 0xcc, 0xc5, 0x08, 0x78, 0x01, 0xd5,
+	0xa1, 0x1c, 0xc3, 0x0f, 0x70, 0x47, 0x5e, 0xb8, 0x48, 0x88, 0x55, 0x5d, 0x93, 0x8b, 0x8c, 0x6f,
+	0x5f, 0xc3, 0xfb, 0xed, 0xc1, 0xa0, 0xdd, 0xeb, 0x1a, 0x2d, 0xad, 0xdb, 0xd6, 0x5a, 0x72, 0x09,
+	0x2d, 0xc3, 0x4d, 0x2e, 0x46, 0xed, 0x60, 0x4d, 0x6d, 0x3d, 0x35, 0xb4, 0x6f, 0xdb, 0x03, 0x7d,
+	0x20, 0x03, 0x57, 0xa2, 0xb7, 0xbf, 0x33, 0xd0, 0x7b, 0x5d, 0x4d, 0x5c, 0x45, 0x2e, 0xa7, 0xa5,
+	0x69, 0xba, 0x2a, 0x57, 0x18, 0x55, 0x0c, 0xc0, 0xda, 0x93, 0x03, 0x6d, 0xa0, 0xcb, 0x55, 0x24,
+	0x43, 0x25, 0x36, 0x09, 0x3f, 0x57, 0x43, 0x8b, 0x20, 0xa7, 0x98, 0x09, 0x3b, 0xd5, 0x99, 0xec,
+	0xd6, 0x41, 0xbf, 0xd3, 0xde, 0x55, 0x75, 0x2d, 0xa5, 0xac, 0x8c, 0xca, 0xb0, 0x30, 0x78, 0xdc,
+	0xee, 0xf7, 0xb5, 0x96, 0x7c, 0x83, 0x1b, 0xa9, 0xd7, 0x33, 0xf6, 0xd5, 0xee, 0x53, 0x4e, 0x34,
+	0x90, 0x51, 0x5a, 0x6c, 0x5f, 0x7d, 0xda, 0xe9, 0xa9, 0x2d, 0xf9, 0x26, 0x7a, 0x17, 0x1a, 0xd3,
+	0xbb, 0xe8, 0xf8, 0xa9, 0xd1, 0x57, 0xb1, 0xba, 0xaf, 0xe9, 0x1a, 0x1e, 0xc8, 0x8b, 0x17, 0xed,
+	0xb2, 0xdf, 0x6b, 0x69, 0xf2, 0x2d, 0x76, 0x35, 0x75, 0xb7, 0x63, 0x74, 0x7a, 0xbd, 0xc7, 0x07,
+	0xfd, 0xc8, 0x33, 0x4b, 0xe8, 0x2e, 0xbc, 0xcf, 0x5d, 0xa8, 0xee, 0xea, 0xed, 0x1e, 0x73, 0x59,
+	0xa4, 0x5d, 0xca, 0x55, 0xcb, 0xa8, 0x09, 0x4b, 0xed, 0xee, 0x6e, 0x0f, 0x63, 0x6d, 0x57, 0x37,
+	0x76, 0xb1, 0xa6, 0xea, 0x3d, 0x2c, 0x54, 0x68, 0x30, 0x71, 0x5c, 0xa3, 0x8e, 0xa6, 0x0e, 0x34,
+	0x43, 0xfb, 0xb6, 0xdf, 0xc6, 0x5a, 0x4b, 0x5e, 0x61, 0xb6, 0x11, 0xe2, 0xfb, 0xea, 0xc1, 0x40,
+	0x6b, 0xc9, 0xcd, 0xb4, 0x4d, 0x75, 0x75, 0x4f, 0x5e, 0x45, 0x8b, 0x50, 0x6f, 0xa9, 0xba, 0x3a,
+	0xd0, 0x7b, 0x58, 0x8b, 0x2e, 0xf4, 0x9b, 0xae, 0xb2, 0x0a, 0x65, 0x16, 0x96, 0x7d, 0x72, 0x66,
+	0xbb, 0xc4, 0xfc, 0xa4, 0x58, 0x04, 0xf9, 0xe5, 0xcb, 0x97, 0x2f, 0x17, 0xb6, 0x33, 0x45, 0x49,
+	0xf9, 0x9b, 0x04, 0x8d, 0x24, 0x68, 0x31, 0x0d, 0xbc, 0xb3, 0x3e, 0xf1, 0xc8, 0x98, 0x06, 0xd4,
+	0xf3, 0xd1, 0xfb, 0x50, 0xf6, 0x18, 0xc8, 0xb0, 0xad, 0xb1, 0x15, 0x34, 0xa4, 0x35, 0x69, 0x3d,
+	0x8f, 0x81, 0x83, 0x3a, 0x0c, 0x82, 0x14, 0xa8, 0x92, 0x11, 0x15, 0x68, 0xc3, 0xa7, 0xc3, 0x46,
+	0x66, 0x4d, 0x5a, 0xcf, 0xe2, 0x32, 0x19, 0x51, 0x4e, 0x30, 0xa0, 0x43, 0xf4, 0x29, 0xd4, 0xc7,
+	0x96, 0x63, 0x1c, 0x92, 0xe1, 0x89, 0x7b, 0x74, 0xc4, 0xa9, 0xb2, 0x6b, 0xd2, 0xba, 0xb4, 0x9d,
+	0xdd, 0xdc, 0xb8, 0x8f, 0xab, 0x63, 0xcb, 0xd9, 0x11, 0x28, 0x46, 0x7c, 0x0f, 0xea, 0x63, 0x72,
+	0x3a, 0x43, 0x9c, 0xe3, 0xc4, 0xb9, 0xcf, 0x1f, 0x6c, 0x6e, 0xe2, 0xea, 0x98, 0x9c, 0xa6, 0xa8,
+	0x3f, 0x06, 0x06, 0x30, 0x4c, 0x37, 0x3c, 0xb4, 0x2d, 0x67, 0xe4, 0x37, 0xf2, 0xec, 0x86, 0xdb,
+	0x99, 0xfb, 0x0f, 0x70, 0x65, 0x4c, 0x4e, 0x5b, 0x31, 0x5c, 0xe9, 0x43, 0x25, 0x51, 0x52, 0x1d,
+	0xda, 0xe8, 0x36, 0x40, 0xe8, 0x53, 0xcf, 0xa0, 0x63, 0x62, 0xd9, 0x0d, 0x69, 0x2d, 0xbb, 0x5e,
+	0xc1, 0x25, 0x06, 0xd1, 0x18, 0x00, 0xdd, 0x81, 0xca, 0x73, 0xcf, 0x0a, 0x12, 0x82, 0x0c, 0x27,
+	0x28, 0x0b, 0x18, 0x27, 0x51, 0xbe, 0x84, 0x9b, 0x09, 0xc7, 0x47, 0x41, 0x30, 0x79, 0x44, 0x89,
+	0x49, 0x3d, 0x24, 0x43, 0xf6, 0x84, 0x9e, 0x35, 0xa4, 0xb5, 0xcc, 0x7a, 0x05, 0xb3, 0x47, 0xb4,
+	0x08, 0xf9, 0x67, 0xc4, 0x0e, 0x69, 0x23, 0xc3, 0x61, 0xe2, 0x45, 0xf9, 0x14, 0xaa, 0xc9, 0xf1,
+	0x7d, 0xd7, 0xa4, 0x4a, 0x13, 0x72, 0xec, 0x3f, 0x2a, 0x42, 0xae, 0x7f, 0x30, 0x78, 0x24, 0xbf,
+	0x23, 0x9e, 0x3a, 0x1d, 0x59, 0x52, 0xfe, 0x51, 0x48, 0x09, 0x53, 0x4d, 0x13, 0xd3, 0xef, 0x43,
+	0xea, 0x07, 0x4c, 0x0b, 0x51, 0xd5, 0x1c, 0x32, 0xa6, 0x91, 0xcc, 0x12, 0x87, 0x74, 0xc9, 0x98,
+	0xa2, 0x55, 0x28, 0xb1, 0xc2, 0x27, 0xb0, 0x42, 0x7a, 0x91, 0x01, 0x38, 0x72, 0x05, 0x8a, 0x34,
+	0x20, 0x46, 0x28, 0xdc, 0x91, 0x59, 0xcf, 0xe2, 0x05, 0x1a, 0x90, 0x03, 0x9f, 0x0e, 0xd1, 0xd7,
+	0x50, 0x18, 0xd3, 0xe0, 0xd8, 0x35, 0xb9, 0x39, 0x6b, 0x5b, 0xf7, 0x36, 0x92, 0x4a, 0xb8, 0x31,
+	0xe7, 0x1a, 0x1b, 0xd1, 0xff, 0x7d, 0x7e, 0x66, 0x3b, 0xd7, 0xef, 0x0d, 0x74, 0x1c, 0x71, 0x60,
+	0xf6, 0x08, 0x3d, 0x9b, 0xfb, 0xb0, 0x82, 0xd9, 0x23, 0xfa, 0x12, 0x0a, 0xc7, 0xdc, 0x56, 0x8d,
+	0xc2, 0x5a, 0x76, 0x1d, 0xb6, 0x3e, 0x7c, 0x0d, 0x77, 0x61, 0x58, 0x1c, 0x1d, 0x42, 0x4b, 0x90,
+	0x3b, 0x74, 0xcd, 0xb3, 0x46, 0x89, 0x71, 0xdc, 0xc9, 0x14, 0x25, 0xcc, 0xdf, 0xd1, 0xff, 0x42,
+	0x39, 0xf0, 0x88, 0xe3, 0x93, 0x61, 0x60, 0xb9, 0x4e, 0x03, 0xd6, 0xa4, 0xf5, 0xf2, 0xd6, 0x52,
+	0x9a, 0xf7, 0x14, 0x8b, 0xd3, 0xa4, 0xe8, 0x16, 0x14, 0xc8, 0x64, 0x62, 0x58, 0x66, 0xa3, 0xcc,
+	0x6f, 0x99, 0x27, 0x93, 0x49, 0xdb, 0x44, 0x18, 0xaa, 0x43, 0xcf, 0x75, 0x02, 0x6b, 0x4c, 0x03,
+	0x72, 0x68, 0xd3, 0x46, 0x65, 0x4d, 0x5a, 0x87, 0xd7, 0x1a, 0x63, 0xd7, 0x73, 0x1d, 0x3d, 0x3e,
+	0x83, 0x67, 0x59, 0xa0, 0x35, 0x28, 0x9b, 0xd4, 0x1f, 0x7a, 0xd6, 0x84, 0x5f, 0xb2, 0xce, 0xe5,
+	0xa5, 0x41, 0x68, 0x13, 0x16, 0x26, 0x22, 0x4f, 0x1b, 0xf2, 0x45, 0x15, 0xa6, 0x59, 0x8c, 0x63,
+	0x32, 0xd4, 0x05, 0x59, 0xe4, 0xe8, 0x24, 0xc9, 0xdb, 0xc6, 0x0d, 0x7e, 0xf4, 0xee, 0xbc, 0xab,
+	0x9e, 0x4b, 0x71, 0x5c, 0xf7, 0xce, 0xe5, 0xfc, 0x17, 0x90, 0x1b, 0xbb, 0x26, 0x6d, 0x20, 0xee,
+	0xfb, 0xdb, 0xf3, 0x78, 0xb0, 0x40, 0xdd, 0x60, 0x7f, 0xb6, 0x79, 0xac, 0x62, 0x7e, 0x80, 0xb9,
+	0x3a, 0x20, 0xa3, 0xc6, 0x4d, 0xe1, 0xea, 0x80, 0x8c, 0x9a, 0x9b, 0x50, 0x98, 0x4d, 0x8b, 0x85,
+	0x39, 0x69, 0x51, 0x4c, 0xa5, 0x45, 0x73, 0x0f, 0xaa, 0x33, 0x06, 0x44, 0x4d, 0x28, 0xfa, 0xc3,
+	0x63, 0x6a, 0x86, 0x36, 0x6d, 0x54, 0x45, 0x08, 0xc7, 0xef, 0x0c, 0xc7, 0x4c, 0xfb, 0xc2, 0x75,
+	0x68, 0xa3, 0x16, 0x85, 0x77, 0xf4, 0xae, 0xa8, 0x50, 0x9d, 0x09, 0x4b, 0xb4, 0x00, 0xd9, 0x3d,
+	0x4d, 0x97, 0x25, 0x9e, 0x56, 0xbd, 0x81, 0x2e, 0x67, 0xd8, 0xd3, 0x23, 0x4d, 0x6d, 0xc9, 0x59,
+	0x86, 0xec, 0x1f, 0xe8, 0x72, 0x0e, 0x01, 0x14, 0x5a, 0x5a, 0x47, 0xd3, 0x35, 0x39, 0xaf, 0xfc,
+	0x3f, 0x2c, 0xce, 0x3a, 0xd8, 0x9f, 0xb8, 0x8e, 0x4f, 0xd1, 0x3a, 0xc8, 0xc3, 0x63, 0xd7, 0xa7,
+	0x8e, 0x31, 0xcd, 0x2e, 0x89, 0x2b, 0x5d, 0x13, 0x70, 0x3d, 0xca, 0x31, 0xe5, 0x3b, 0x58, 0x4e,
+	0x38, 0xec, 0x84, 0xf6, 0x49, 0x2a, 0x75, 0xbf, 0x82, 0x32, 0x31, 0x4d, 0xc3, 0x13, 0xaf, 0xbc,
+	0x02, 0x95, 0xb7, 0xde, 0xbb, 0x3c, 0xb6, 0x30, 0x90, 0xe4, 0x59, 0xf9, 0x7b, 0xba, 0x6e, 0x27,
+	0xcc, 0xa3, 0x2b, 0x76, 0x01, 0xd8, 0xdd, 0x3c, 0xea, 0x87, 0xb6, 0x60, 0x0e, 0x5b, 0x1b, 0xf3,
+	0x98, 0x9f, 0x3b, 0xc8, 0x11, 0x98, 0x9f, 0xc2, 0x29, 0x0e, 0xcd, 0x17, 0x00, 0x53, 0x0c, 0xda,
+	0x81, 0x42, 0xc4, 0x99, 0x15, 0x95, 0xda, 0xd6, 0x27, 0xf3, 0x38, 0xa7, 0xe7, 0x9f, 0x8d, 0x64,
+	0xf6, 0xc1, 0xd1, 0xc9, 0xb9, 0x46, 0xcc, 0xce, 0x35, 0xe2, 0x09, 0x2c, 0x25, 0x4c, 0x5b, 0xd4,
+	0xa6, 0x01, 0xbd, 0x5a, 0xf9, 0xcb, 0xce, 0x94, 0xbf, 0x69, 0xd2, 0x67, 0x53, 0x49, 0xaf, 0xfc,
+	0x3c, 0xe5, 0xb1, 0x58, 0x58, 0x64, 0xd3, 0xa9, 0xd6, 0xd9, 0xb5, 0xec, 0xd5, 0xb4, 0x56, 0xc6,
+	0x29, 0x9f, 0x3d, 0x74, 0xbd, 0x21, 0xc5, 0xa1, 0x13, 0x6b, 0x33, 0xbd, 0x91, 0x94, 0x2e, 0x43,
+	0xb3, 0x4a, 0x66, 0x2e, 0x55, 0x32, 0x3b, 0x5b, 0xe3, 0x15, 0x03, 0x56, 0xe6, 0x88, 0x9b, 0xa3,
+	0xcf, 0x15, 0xbd, 0xa8, 0xfc, 0x90, 0x83, 0xd5, 0x84, 0xf6, 0x60, 0x62, 0x92, 0x80, 0x46, 0x45,
+	0xe6, 0x3a, 0x3a, 0x7d, 0x01, 0x8d, 0xc3, 0x70, 0x78, 0x42, 0x03, 0xc3, 0xa3, 0x47, 0x96, 0x6d,
+	0x1b, 0x13, 0xea, 0xb1, 0x49, 0xc0, 0x75, 0x4c, 0x7e, 0x57, 0x09, 0xdf, 0x12, 0x78, 0xcc, 0xd1,
+	0x7d, 0xea, 0x0d, 0x38, 0x12, 0x7d, 0x0c, 0xf5, 0xe8, 0xe0, 0x90, 0x4c, 0xc8, 0xd0, 0x0a, 0xce,
+	0x1a, 0xb9, 0xb5, 0xcc, 0x7a, 0x1e, 0xd7, 0x04, 0x78, 0x37, 0x82, 0xa2, 0x0d, 0xb8, 0xc9, 0xdb,
+	0xbf, 0x3f, 0xa1, 0x43, 0xeb, 0xc8, 0xa2, 0xa6, 0xe1, 0x91, 0x80, 0xf2, 0x76, 0x57, 0xc2, 0x37,
+	0x18, 0x6a, 0x10, 0x63, 0x30, 0x09, 0xe8, 0xdc, 0x1a, 0x5b, 0xb8, 0x46, 0x8d, 0x7d, 0x00, 0xcb,
+	0x6c, 0x6e, 0x19, 0xba, 0xce, 0x30, 0xf4, 0x3c, 0xea, 0x04, 0x71, 0x21, 0xf0, 0x1b, 0x0b, 0x7c,
+	0xc6, 0xba, 0x35, 0x26, 0xa7, 0xbb, 0x09, 0x36, 0x32, 0xe7, 0xb4, 0x36, 0x17, 0xdf, 0xb6, 0x36,
+	0xff, 0x17, 0x64, 0xc9, 0xd0, 0xe6, 0x4d, 0xb3, 0xbc, 0xb5, 0x3c, 0xb7, 0xcc, 0x0c, 0x6d, 0xcc,
+	0x68, 0xd0, 0x1e, 0xd4, 0x45, 0xab, 0x35, 0xdc, 0x67, 0xd4, 0xf3, 0x2c, 0x93, 0x36, 0xe0, 0xd5,
+	0xd5, 0x69, 0x3a, 0xfa, 0xe0, 0x9a, 0x38, 0xd6, 0x8b, 0x4e, 0x29, 0xef, 0xc1, 0xbb, 0xf3, 0x63,
+	0x43, 0x04, 0xa0, 0xd2, 0x4b, 0xc5, 0xce, 0x43, 0x1a, 0x0c, 0x8f, 0xf9, 0x93, 0xff, 0x9a, 0xd8,
+	0x59, 0x81, 0x22, 0x33, 0x9d, 0xe7, 0x3e, 0xf7, 0x79, 0xe4, 0xe4, 0xf1, 0xc2, 0x98, 0x9c, 0x62,
+	0xf7, 0xb9, 0xaf, 0xfc, 0x31, 0x9f, 0x92, 0x38, 0xc3, 0x31, 0x0a, 0xf9, 0x5d, 0xc8, 0xf3, 0x28,
+	0x8b, 0x2a, 0xe2, 0x7f, 0xcf, 0x53, 0x68, 0xce, 0xb9, 0x0d, 0x71, 0x6f, 0x71, 0xb6, 0xf9, 0x97,
+	0x1c, 0xe4, 0x39, 0xe0, 0x3f, 0x1d, 0xc6, 0xd2, 0xb5, 0xc3, 0xf8, 0x36, 0x14, 0x26, 0x24, 0xf4,
+	0xa9, 0xd9, 0x28, 0xac, 0x65, 0xd6, 0x8b, 0xdb, 0xf9, 0x23, 0x62, 0xfb, 0x14, 0x47, 0xc0, 0xb9,
+	0x51, 0xbe, 0xf0, 0xd3, 0x44, 0x79, 0xf1, 0x4d, 0xa2, 0xbc, 0x74, 0xc5, 0x28, 0x87, 0xab, 0x45,
+	0x79, 0xf9, 0x2a, 0x51, 0x8e, 0xee, 0x43, 0x65, 0xe8, 0x51, 0x12, 0xb8, 0x9e, 0x08, 0x03, 0x36,
+	0x25, 0x96, 0xb6, 0x81, 0x4c, 0x26, 0xc7, 0xae, 0x1f, 0x58, 0xce, 0x88, 0xcf, 0xa8, 0xe5, 0x88,
+	0x86, 0x97, 0xe5, 0x5f, 0xc0, 0xfb, 0x73, 0xc2, 0x6d, 0x10, 0x90, 0xc0, 0x7f, 0xcb, 0xc2, 0x99,
+	0x9d, 0x8d, 0xb8, 0x0f, 0xc5, 0xe7, 0x90, 0x13, 0x8e, 0x79, 0x57, 0xf5, 0x79, 0x6f, 0xcb, 0x6f,
+	0x4b, 0x9b, 0xb8, 0x3c, 0x26, 0xa7, 0xdd, 0x70, 0xcc, 0xc4, 0xfa, 0xca, 0xaf, 0x32, 0xa9, 0xbe,
+	0x30, 0x18, 0x12, 0xc7, 0xa1, 0x1e, 0x7f, 0x6e, 0x3b, 0x47, 0x2e, 0xda, 0x84, 0x45, 0x7a, 0x4a,
+	0x87, 0x61, 0x40, 0x4d, 0xc3, 0x26, 0x7e, 0x60, 0x8c, 0x2d, 0x27, 0x0c, 0x44, 0x7f, 0xcd, 0x62,
+	0x14, 0xe3, 0x3a, 0xc4, 0x0f, 0xf6, 0x39, 0x06, 0xdd, 0x03, 0x34, 0x7b, 0xe2, 0xd8, 0x0d, 0x3d,
+	0x9e, 0x0f, 0x59, 0x2c, 0xa7, 0xe9, 0x1f, 0xb9, 0xa1, 0x87, 0xb6, 0x61, 0xc5, 0x27, 0xe3, 0x09,
+	0xfb, 0x2e, 0x33, 0xcc, 0xd0, 0x23, 0x6c, 0xec, 0x8d, 0xd2, 0xc2, 0x8f, 0xf2, 0x62, 0x39, 0x26,
+	0x68, 0x45, 0x78, 0x91, 0x18, 0x3e, 0x93, 0x14, 0x87, 0x90, 0x61, 0x39, 0xc6, 0x91, 0x6d, 0x8d,
+	0x8e, 0x03, 0xfe, 0x71, 0x91, 0xc7, 0x72, 0x8c, 0x69, 0x3b, 0x0f, 0x39, 0x1c, 0xdd, 0x85, 0x2a,
+	0x75, 0x8e, 0x58, 0xdf, 0x4b, 0x25, 0x86, 0x84, 0x2b, 0x31, 0x90, 0xe5, 0x84, 0xf2, 0xbb, 0x0c,
+	0xac, 0xbd, 0xda, 0x1b, 0x51, 0xe1, 0xf8, 0x26, 0xb2, 0xbb, 0xcf, 0xa0, 0x51, 0xf5, 0x78, 0x70,
+	0x79, 0xf5, 0x98, 0x61, 0xb0, 0x91, 0x02, 0xa5, 0x38, 0x35, 0x7f, 0x90, 0x00, 0xa6, 0x28, 0xd6,
+	0xcc, 0xa7, 0xbe, 0x13, 0xc5, 0xad, 0xe8, 0x44, 0x5e, 0x43, 0x1f, 0x41, 0xdd, 0xb5, 0x4d, 0xea,
+	0x07, 0xc6, 0xb9, 0xef, 0xb6, 0xaa, 0x00, 0x6b, 0xd1, 0xd7, 0xdb, 0x1e, 0x54, 0x7c, 0xe1, 0x53,
+	0xc3, 0x72, 0x8e, 0x5c, 0x6e, 0x9d, 0xf2, 0xd6, 0x07, 0x73, 0xbb, 0xfb, 0x39, 0xdf, 0xe3, 0x72,
+	0x74, 0x92, 0xbd, 0x28, 0xc7, 0xd0, 0x4c, 0x28, 0xfb, 0xac, 0x42, 0xbc, 0xb2, 0xb5, 0x67, 0xde,
+	0xb8, 0xb5, 0x2f, 0x42, 0x9e, 0x17, 0x1b, 0x7e, 0xf5, 0x22, 0x16, 0x2f, 0xca, 0xed, 0x54, 0x27,
+	0x48, 0x4b, 0x8a, 0x1a, 0x05, 0x4e, 0x5f, 0x24, 0xf4, 0x46, 0x3f, 0xc2, 0x8c, 0x31, 0x2b, 0x32,
+	0xc5, 0x33, 0x12, 0x39, 0x48, 0xa1, 0xc5, 0x1c, 0x78, 0x7d, 0xe5, 0x67, 0x1a, 0xe2, 0x0c, 0xd3,
+	0x48, 0xe8, 0xff, 0x5c, 0x10, 0xba, 0xe7, 0xb9, 0xe1, 0xe4, 0x72, 0xa1, 0x73, 0xb8, 0x46, 0xa7,
+	0x22, 0xae, 0x7f, 0x95, 0x52, 0xe6, 0x7b, 0x12, 0x52, 0xef, 0x8c, 0xc7, 0xd3, 0xf5, 0x46, 0xb4,
+	0x8f, 0xa0, 0xee, 0x07, 0xc4, 0x0b, 0x2e, 0x4c, 0xef, 0x55, 0x0e, 0x8e, 0x87, 0x77, 0xf4, 0x01,
+	0xd4, 0x04, 0x5d, 0x12, 0xb3, 0x39, 0xbe, 0x20, 0xaa, 0x70, 0x68, 0x1c, 0xb2, 0xab, 0x50, 0x8a,
+	0xb9, 0x8d, 0xf8, 0x5c, 0xc5, 0xbe, 0xf2, 0x04, 0x9f, 0x11, 0x7a, 0x37, 0xd5, 0xf0, 0xc5, 0x7a,
+	0x47, 0xba, 0x3f, 0xed, 0xf9, 0xbf, 0x84, 0x94, 0xd1, 0xd2, 0xda, 0x45, 0x99, 0xfb, 0x15, 0xe4,
+	0xd8, 0x15, 0xa3, 0x9c, 0xfd, 0x74, 0x5e, 0x16, 0x5c, 0x3c, 0x25, 0x3e, 0x83, 0xf8, 0xc1, 0xe6,
+	0x1f, 0x4a, 0x90, 0x63, 0xaf, 0x57, 0xde, 0xa6, 0x5c, 0xdc, 0x80, 0x3c, 0x39, 0xb7, 0x5f, 0xf9,
+	0xbf, 0xb7, 0xb8, 0xd5, 0xec, 0xb2, 0x25, 0x59, 0xb3, 0x28, 0xf1, 0xa2, 0x6e, 0xe8, 0x86, 0x4e,
+	0xc0, 0x6d, 0xc8, 0xeb, 0xbe, 0xd8, 0xd5, 0xed, 0x32, 0x20, 0xfa, 0x3a, 0x59, 0xbc, 0x2c, 0x70,
+	0x63, 0x6c, 0xbd, 0x8d, 0xd8, 0x73, 0x5b, 0x98, 0x55, 0x28, 0x1d, 0xba, 0xe6, 0x99, 0xe1, 0x5b,
+	0x2f, 0x28, 0xef, 0xb7, 0x79, 0x5c, 0x64, 0x80, 0x81, 0xf5, 0x82, 0x26, 0x2b, 0x9a, 0xf2, 0xb9,
+	0x15, 0xcd, 0x3d, 0x40, 0xbc, 0x0d, 0xb2, 0x82, 0xcf, 0x3e, 0xd4, 0x85, 0xb9, 0x2a, 0xa2, 0x4f,
+	0xc4, 0x18, 0xf6, 0xe9, 0xcf, 0xed, 0x66, 0x9c, 0xdf, 0xbf, 0x54, 0xf9, 0xfe, 0xe5, 0xad, 0x8c,
+	0x75, 0xe9, 0x32, 0xe6, 0x6b, 0x28, 0x78, 0xa1, 0x63, 0xbb, 0x23, 0xbe, 0x69, 0x79, 0x4b, 0x7b,
+	0xe0, 0xd0, 0xe9, 0xb8, 0x23, 0x1c, 0x71, 0x38, 0xbf, 0xd8, 0xb9, 0x75, 0xe9, 0x62, 0x67, 0xe9,
+	0xea, 0x8b, 0x9d, 0xe5, 0x6b, 0x8c, 0x63, 0x1f, 0x40, 0xed, 0xc8, 0xf2, 0xfc, 0xc0, 0x60, 0x3c,
+	0xb9, 0xe9, 0x1b, 0x22, 0x17, 0x39, 0x54, 0xf7, 0xce, 0xe2, 0x70, 0x65, 0x59, 0xb8, 0x92, 0x6c,
+	0x71, 0xd0, 0x27, 0x50, 0x17, 0x4d, 0x9c, 0xf9, 0x4d, 0xc4, 0x57, 0x33, 0x8e, 0xaf, 0x5a, 0x82,
+	0xe1, 0x31, 0x76, 0x71, 0xe3, 0x53, 0x9c, 0xb3, 0xf1, 0x29, 0xbd, 0xf1, 0xc6, 0xa7, 0x76, 0xc9,
+	0xc6, 0xa7, 0x3e, 0xbb, 0xf1, 0x69, 0xfe, 0x49, 0x82, 0x82, 0xf0, 0x0a, 0x1b, 0xa0, 0x4d, 0xcb,
+	0x9f, 0x90, 0x80, 0x9d, 0x13, 0xaa, 0xde, 0xe0, 0x51, 0x56, 0x9b, 0x82, 0xb9, 0xb2, 0x2b, 0x50,
+	0xb4, 0xc9, 0x48, 0x50, 0x20, 0x91, 0xb6, 0x36, 0x19, 0x71, 0xd4, 0x1d, 0xa8, 0x50, 0x9b, 0x4c,
+	0xfc, 0x98, 0xc1, 0x4d, 0x8e, 0x2e, 0x47, 0x30, 0x4e, 0x72, 0x17, 0xaa, 0x5e, 0x14, 0x14, 0xc6,
+	0x90, 0x0d, 0xac, 0x8b, 0xc2, 0x9e, 0x31, 0x90, 0xff, 0xd8, 0x73, 0x07, 0x2a, 0xc2, 0x8b, 0x1e,
+	0x25, 0xbe, 0xeb, 0x34, 0x56, 0xf9, 0x70, 0x2e, 0xb2, 0x15, 0x73, 0xd0, 0x8f, 0xb1, 0xab, 0x72,
+	0xd2, 0x5f, 0xfa, 0x6c, 0x06, 0x11, 0xeb, 0x9a, 0x9f, 0x6c, 0xb3, 0xf0, 0x6d, 0xaa, 0xa7, 0xa4,
+	0xe4, 0x45, 0x45, 0x77, 0x3b, 0x29, 0xba, 0x99, 0xf5, 0xf2, 0xd6, 0x47, 0x6f, 0x96, 0x57, 0xa2,
+	0xde, 0x2a, 0x4f, 0x40, 0x39, 0xf7, 0xd5, 0x38, 0x08, 0x5c, 0x2f, 0xfe, 0x3d, 0xe1, 0x35, 0x0d,
+	0x78, 0x11, 0xf2, 0xe2, 0x97, 0x0a, 0x31, 0x7c, 0x8a, 0x17, 0x65, 0x07, 0xee, 0x5e, 0xca, 0x32,
+	0xba, 0x35, 0x9b, 0xbe, 0xe8, 0xf3, 0xe4, 0xa7, 0x0e, 0xc6, 0xa0, 0xe8, 0xd0, 0xe7, 0x9c, 0x48,
+	0xf9, 0xb3, 0x94, 0x1a, 0x13, 0xf9, 0xe5, 0x55, 0xc7, 0xec, 0x3d, 0x77, 0x66, 0x7a, 0xe9, 0x6b,
+	0x16, 0x52, 0x77, 0xa1, 0x6a, 0x53, 0xe2, 0xd3, 0x64, 0xda, 0xcd, 0xf0, 0x69, 0xb7, 0xc2, 0x81,
+	0xf1, 0x88, 0xbb, 0x0a, 0x25, 0xd6, 0xee, 0xe2, 0xf9, 0x9d, 0xdf, 0x62, 0x4c, 0x4e, 0xc5, 0x0c,
+	0xf8, 0x31, 0x54, 0x46, 0xac, 0xb9, 0x1b, 0x87, 0x67, 0xbc, 0x57, 0xb2, 0xa6, 0x92, 0x7c, 0xc6,
+	0x01, 0x47, 0xed, 0x9c, 0xb1, 0xa6, 0x19, 0x65, 0x71, 0x3e, 0xc9, 0x62, 0xe5, 0x9f, 0x12, 0xdc,
+	0xb9, 0x44, 0x81, 0xc8, 0x06, 0xda, 0x4c, 0xbb, 0xbc, 0xff, 0x4a, 0xcf, 0xcd, 0x39, 0x9b, 0x6e,
+	0x9a, 0xbf, 0x96, 0xae, 0xd9, 0x34, 0xcf, 0xf5, 0xb3, 0xdc, 0xbc, 0x7e, 0x16, 0xb7, 0x99, 0xfc,
+	0xb9, 0x36, 0x13, 0xe9, 0x5e, 0x98, 0xea, 0xfe, 0x7b, 0x29, 0xf5, 0xc5, 0xb5, 0xef, 0x9a, 0xd6,
+	0x11, 0x0f, 0xbd, 0x0e, 0xb3, 0xfb, 0x4f, 0xfc, 0x5b, 0xca, 0x05, 0x9f, 0xe7, 0x2e, 0xfa, 0x5c,
+	0xe9, 0xa4, 0x62, 0xeb, 0xc2, 0xf5, 0xa6, 0x5b, 0xe7, 0x90, 0xc7, 0xae, 0x39, 0x9d, 0xa5, 0x44,
+	0x90, 0xd6, 0x22, 0x78, 0x34, 0x4d, 0xed, 0x94, 0xbf, 0x2b, 0x25, 0xbf, 0x77, 0xff, 0x3b, 0x00,
+	0x00, 0xff, 0xff, 0x67, 0xac, 0x35, 0x53, 0x2a, 0x1f, 0x00, 0x00,
+}
diff --git a/v2/internal/taskqueue/taskqueue_service.proto b/v2/internal/taskqueue/taskqueue_service.proto
new file mode 100644
index 0000000..1b65d96
--- /dev/null
+++ b/v2/internal/taskqueue/taskqueue_service.proto
@@ -0,0 +1,342 @@
+syntax = "proto2";
+option go_package = "taskqueue";
+
+import "google.golang.org/appengine/v2/internal/datastore/datastore_v3.proto";
+
+package appengine;
+
+message TaskQueueServiceError {
+  enum ErrorCode {
+    OK = 0;
+    UNKNOWN_QUEUE = 1;
+    TRANSIENT_ERROR = 2;
+    INTERNAL_ERROR = 3;
+    TASK_TOO_LARGE = 4;
+    INVALID_TASK_NAME = 5;
+    INVALID_QUEUE_NAME = 6;
+    INVALID_URL = 7;
+    INVALID_QUEUE_RATE = 8;
+    PERMISSION_DENIED = 9;
+    TASK_ALREADY_EXISTS = 10;
+    TOMBSTONED_TASK = 11;
+    INVALID_ETA = 12;
+    INVALID_REQUEST = 13;
+    UNKNOWN_TASK = 14;
+    TOMBSTONED_QUEUE = 15;
+    DUPLICATE_TASK_NAME = 16;
+    SKIPPED = 17;
+    TOO_MANY_TASKS = 18;
+    INVALID_PAYLOAD = 19;
+    INVALID_RETRY_PARAMETERS = 20;
+    INVALID_QUEUE_MODE = 21;
+    ACL_LOOKUP_ERROR = 22;
+    TRANSACTIONAL_REQUEST_TOO_LARGE = 23;
+    INCORRECT_CREATOR_NAME = 24;
+    TASK_LEASE_EXPIRED = 25;
+    QUEUE_PAUSED = 26;
+    INVALID_TAG = 27;
+
+    // Reserved range for the Datastore error codes.
+    // Original Datastore error code is shifted by DATASTORE_ERROR offset.
+    DATASTORE_ERROR = 10000;
+  }
+}
+
+message TaskPayload {
+  extensions 10 to max;
+  option message_set_wire_format = true;
+}
+
+message TaskQueueRetryParameters {
+  optional int32 retry_limit = 1;
+  optional int64 age_limit_sec = 2;
+
+  optional double min_backoff_sec = 3 [default = 0.1];
+  optional double max_backoff_sec = 4 [default = 3600];
+  optional int32 max_doublings = 5 [default = 16];
+}
+
+message TaskQueueAcl {
+  repeated bytes user_email = 1;
+  repeated bytes writer_email = 2;
+}
+
+message TaskQueueHttpHeader {
+  required bytes key = 1;
+  required bytes value = 2;
+}
+
+message TaskQueueMode {
+  enum Mode {
+    PUSH = 0;
+    PULL = 1;
+  }
+}
+
+message TaskQueueAddRequest {
+  required bytes queue_name = 1;
+  required bytes task_name = 2;
+  required int64 eta_usec = 3;
+
+  enum RequestMethod {
+    GET = 1;
+    POST = 2;
+    HEAD = 3;
+    PUT = 4;
+    DELETE = 5;
+  }
+  optional RequestMethod method = 5 [default=POST];
+
+  optional bytes url = 4;
+
+  repeated group Header = 6 {
+    required bytes key = 7;
+    required bytes value = 8;
+  }
+
+  optional bytes body = 9 [ctype=CORD];
+  optional Transaction transaction = 10;
+  optional bytes app_id = 11;
+
+  optional group CronTimetable = 12 {
+    required bytes schedule = 13;
+    required bytes timezone = 14;
+  }
+
+  optional bytes description = 15;
+  optional TaskPayload payload = 16;
+  optional TaskQueueRetryParameters retry_parameters = 17;
+  optional TaskQueueMode.Mode mode = 18 [default=PUSH];
+  optional bytes tag = 19;
+}
+
+message TaskQueueAddResponse {
+  optional bytes chosen_task_name = 1;
+}
+
+message TaskQueueBulkAddRequest {
+  repeated TaskQueueAddRequest add_request = 1;
+}
+
+message TaskQueueBulkAddResponse {
+  repeated group TaskResult = 1 {
+    required TaskQueueServiceError.ErrorCode result = 2;
+    optional bytes chosen_task_name = 3;
+  }
+}
+
+message TaskQueueDeleteRequest {
+  required bytes queue_name = 1;
+  repeated bytes task_name = 2;
+  optional bytes app_id = 3;
+}
+
+message TaskQueueDeleteResponse {
+  repeated TaskQueueServiceError.ErrorCode result = 3;
+}
+
+message TaskQueueForceRunRequest {
+  optional bytes app_id = 1;
+  required bytes queue_name = 2;
+  required bytes task_name = 3;
+}
+
+message TaskQueueForceRunResponse {
+  required TaskQueueServiceError.ErrorCode result = 3;
+}
+
+message TaskQueueUpdateQueueRequest {
+  optional bytes app_id = 1;
+  required bytes queue_name = 2;
+  required double bucket_refill_per_second = 3;
+  required int32 bucket_capacity = 4;
+  optional string user_specified_rate = 5;
+  optional TaskQueueRetryParameters retry_parameters = 6;
+  optional int32 max_concurrent_requests = 7;
+  optional TaskQueueMode.Mode mode = 8 [default = PUSH];
+  optional TaskQueueAcl acl = 9;
+  repeated TaskQueueHttpHeader header_override = 10;
+}
+
+message TaskQueueUpdateQueueResponse {
+}
+
+message TaskQueueFetchQueuesRequest {
+  optional bytes app_id = 1;
+  required int32 max_rows = 2;
+}
+
+message TaskQueueFetchQueuesResponse {
+  repeated group Queue = 1 {
+    required bytes queue_name = 2;
+    required double bucket_refill_per_second = 3;
+    required double bucket_capacity = 4;
+    optional string user_specified_rate = 5;
+    required bool paused = 6 [default=false];
+    optional TaskQueueRetryParameters retry_parameters = 7;
+    optional int32 max_concurrent_requests = 8;
+    optional TaskQueueMode.Mode mode = 9 [default = PUSH];
+    optional TaskQueueAcl acl = 10;
+    repeated TaskQueueHttpHeader header_override = 11;
+    optional string creator_name = 12 [ctype=CORD, default="apphosting"];
+  }
+}
+
+message TaskQueueFetchQueueStatsRequest {
+  optional bytes app_id = 1;
+  repeated bytes queue_name = 2;
+  optional int32 max_num_tasks = 3 [default = 0];
+}
+
+message TaskQueueScannerQueueInfo {
+  required int64 executed_last_minute = 1;
+  required int64 executed_last_hour = 2;
+  required double sampling_duration_seconds = 3;
+  optional int32 requests_in_flight = 4;
+  optional double enforced_rate = 5;
+}
+
+message TaskQueueFetchQueueStatsResponse {
+  repeated group QueueStats = 1 {
+    required int32 num_tasks = 2;
+    required int64 oldest_eta_usec = 3;
+    optional TaskQueueScannerQueueInfo scanner_info = 4;
+  }
+}
+message TaskQueuePauseQueueRequest {
+  required bytes app_id = 1;
+  required bytes queue_name = 2;
+  required bool pause = 3;
+}
+
+message TaskQueuePauseQueueResponse {
+}
+
+message TaskQueuePurgeQueueRequest {
+  optional bytes app_id = 1;
+  required bytes queue_name = 2;
+}
+
+message TaskQueuePurgeQueueResponse {
+}
+
+message TaskQueueDeleteQueueRequest {
+  required bytes app_id = 1;
+  required bytes queue_name = 2;
+}
+
+message TaskQueueDeleteQueueResponse {
+}
+
+message TaskQueueDeleteGroupRequest {
+  required bytes app_id = 1;
+}
+
+message TaskQueueDeleteGroupResponse {
+}
+
+message TaskQueueQueryTasksRequest {
+  optional bytes app_id = 1;
+  required bytes queue_name = 2;
+
+  optional bytes start_task_name = 3;
+  optional int64 start_eta_usec = 4;
+  optional bytes start_tag = 6;
+  optional int32 max_rows = 5 [default = 1];
+}
+
+message TaskQueueQueryTasksResponse {
+  repeated group Task = 1 {
+    required bytes task_name = 2;
+    required int64 eta_usec = 3;
+    optional bytes url = 4;
+
+    enum RequestMethod {
+      GET = 1;
+      POST = 2;
+      HEAD = 3;
+      PUT = 4;
+      DELETE = 5;
+    }
+    optional RequestMethod method = 5;
+
+    optional int32 retry_count = 6 [default=0];
+
+    repeated group Header = 7 {
+      required bytes key = 8;
+      required bytes value = 9;
+    }
+
+    optional int32 body_size = 10;
+    optional bytes body = 11 [ctype=CORD];
+    required int64 creation_time_usec = 12;
+
+    optional group CronTimetable = 13 {
+      required bytes schedule = 14;
+      required bytes timezone = 15;
+    }
+
+    optional group RunLog = 16 {
+      required int64 dispatched_usec = 17;
+      required int64 lag_usec = 18;
+      required int64 elapsed_usec = 19;
+      optional int64 response_code = 20;
+      optional string retry_reason = 27;
+    }
+
+    optional bytes description = 21;
+    optional TaskPayload payload = 22;
+    optional TaskQueueRetryParameters retry_parameters = 23;
+    optional int64 first_try_usec = 24;
+    optional bytes tag = 25;
+    optional int32 execution_count = 26 [default=0];
+  }
+}
+
+message TaskQueueFetchTaskRequest {
+  optional bytes app_id = 1;
+  required bytes queue_name = 2;
+  required bytes task_name = 3;
+}
+
+message TaskQueueFetchTaskResponse {
+  required TaskQueueQueryTasksResponse task = 1;
+}
+
+message TaskQueueUpdateStorageLimitRequest {
+  required bytes app_id = 1;
+  required int64 limit = 2;
+}
+
+message TaskQueueUpdateStorageLimitResponse {
+  required int64 new_limit = 1;
+}
+
+message TaskQueueQueryAndOwnTasksRequest {
+  required bytes queue_name = 1;
+  required double lease_seconds = 2;
+  required int64 max_tasks = 3;
+  optional bool group_by_tag = 4 [default=false];
+  optional bytes tag = 5;
+}
+
+message TaskQueueQueryAndOwnTasksResponse {
+  repeated group Task = 1 {
+    required bytes task_name = 2;
+    required int64 eta_usec = 3;
+    optional int32 retry_count = 4 [default=0];
+    optional bytes body = 5 [ctype=CORD];
+    optional bytes tag = 6;
+  }
+}
+
+message TaskQueueModifyTaskLeaseRequest {
+  required bytes queue_name = 1;
+  required bytes task_name = 2;
+  required int64 eta_usec = 3;
+  required double lease_seconds = 4;
+}
+
+message TaskQueueModifyTaskLeaseResponse {
+  required int64 updated_eta_usec = 1;
+}
diff --git a/v2/internal/transaction.go b/v2/internal/transaction.go
new file mode 100644
index 0000000..5fc5cfd
--- /dev/null
+++ b/v2/internal/transaction.go
@@ -0,0 +1,115 @@
+// Copyright 2014 Google Inc. All rights reserved.
+// Use of this source code is governed by the Apache 2.0
+// license that can be found in the LICENSE file.
+
+package internal
+
+// This file implements hooks for applying datastore transactions.
+
+import (
+	netcontext "context"
+	"errors"
+	"reflect"
+
+	"github.com/golang/protobuf/proto"
+
+	basepb "google.golang.org/appengine/v2/internal/base"
+	pb "google.golang.org/appengine/v2/internal/datastore"
+)
+
+var transactionSetters = make(map[reflect.Type]reflect.Value)
+
+// RegisterTransactionSetter registers a function that sets transaction information
+// in a protocol buffer message. f should be a function with two arguments,
+// the first being a protocol buffer type, and the second being *datastore.Transaction.
+func RegisterTransactionSetter(f interface{}) {
+	v := reflect.ValueOf(f)
+	transactionSetters[v.Type().In(0)] = v
+}
+
+// applyTransaction applies the transaction t to message pb
+// by using the relevant setter passed to RegisterTransactionSetter.
+func applyTransaction(pb proto.Message, t *pb.Transaction) {
+	v := reflect.ValueOf(pb)
+	if f, ok := transactionSetters[v.Type()]; ok {
+		f.Call([]reflect.Value{v, reflect.ValueOf(t)})
+	}
+}
+
+var transactionKey = "used for *Transaction"
+
+func transactionFromContext(ctx netcontext.Context) *transaction {
+	t, _ := ctx.Value(&transactionKey).(*transaction)
+	return t
+}
+
+func withTransaction(ctx netcontext.Context, t *transaction) netcontext.Context {
+	return netcontext.WithValue(ctx, &transactionKey, t)
+}
+
+type transaction struct {
+	transaction pb.Transaction
+	finished    bool
+}
+
+var ErrConcurrentTransaction = errors.New("internal: concurrent transaction")
+
+func RunTransactionOnce(c netcontext.Context, f func(netcontext.Context) error, xg bool, readOnly bool, previousTransaction *pb.Transaction) (*pb.Transaction, error) {
+	if transactionFromContext(c) != nil {
+		return nil, errors.New("nested transactions are not supported")
+	}
+
+	// Begin the transaction.
+	t := &transaction{}
+	req := &pb.BeginTransactionRequest{
+		App: proto.String(FullyQualifiedAppID(c)),
+	}
+	if xg {
+		req.AllowMultipleEg = proto.Bool(true)
+	}
+	if previousTransaction != nil {
+		req.PreviousTransaction = previousTransaction
+	}
+	if readOnly {
+		req.Mode = pb.BeginTransactionRequest_READ_ONLY.Enum()
+	} else {
+		req.Mode = pb.BeginTransactionRequest_READ_WRITE.Enum()
+	}
+	if err := Call(c, "datastore_v3", "BeginTransaction", req, &t.transaction); err != nil {
+		return nil, err
+	}
+
+	// Call f, rolling back the transaction if f returns a non-nil error, or panics.
+	// The panic is not recovered.
+	defer func() {
+		if t.finished {
+			return
+		}
+		t.finished = true
+		// Ignore the error return value, since we are already returning a non-nil
+		// error (or we're panicking).
+		Call(c, "datastore_v3", "Rollback", &t.transaction, &basepb.VoidProto{})
+	}()
+	if err := f(withTransaction(c, t)); err != nil {
+		return &t.transaction, err
+	}
+	t.finished = true
+
+	// Commit the transaction.
+	res := &pb.CommitResponse{}
+	err := Call(c, "datastore_v3", "Commit", &t.transaction, res)
+	if ae, ok := err.(*APIError); ok {
+		/* TODO: restore this conditional
+		if appengine.IsDevAppServer() {
+		*/
+		// The Python Dev AppServer raises an ApplicationError with error code 2 (which is
+		// Error.CONCURRENT_TRANSACTION) and message "Concurrency exception.".
+		if ae.Code == int32(pb.Error_BAD_REQUEST) && ae.Detail == "ApplicationError: 2 Concurrency exception." {
+			return &t.transaction, ErrConcurrentTransaction
+		}
+		if ae.Code == int32(pb.Error_CONCURRENT_TRANSACTION) {
+			return &t.transaction, ErrConcurrentTransaction
+		}
+	}
+	return &t.transaction, err
+}
diff --git a/v2/internal/urlfetch/urlfetch_service.pb.go b/v2/internal/urlfetch/urlfetch_service.pb.go
new file mode 100644
index 0000000..034f016
--- /dev/null
+++ b/v2/internal/urlfetch/urlfetch_service.pb.go
@@ -0,0 +1,527 @@
+// Code generated by protoc-gen-go. DO NOT EDIT.
+// source: google.golang.org/appengine/internal/urlfetch/urlfetch_service.proto
+
+package urlfetch
+
+import proto "github.com/golang/protobuf/proto"
+import fmt "fmt"
+import math "math"
+
+// Reference imports to suppress errors if they are not otherwise used.
+var _ = proto.Marshal
+var _ = fmt.Errorf
+var _ = math.Inf
+
+// This is a compile-time assertion to ensure that this generated file
+// is compatible with the proto package it is being compiled against.
+// A compilation error at this line likely means your copy of the
+// proto package needs to be updated.
+const _ = proto.ProtoPackageIsVersion2 // please upgrade the proto package
+
+type URLFetchServiceError_ErrorCode int32
+
+const (
+	URLFetchServiceError_OK                       URLFetchServiceError_ErrorCode = 0
+	URLFetchServiceError_INVALID_URL              URLFetchServiceError_ErrorCode = 1
+	URLFetchServiceError_FETCH_ERROR              URLFetchServiceError_ErrorCode = 2
+	URLFetchServiceError_UNSPECIFIED_ERROR        URLFetchServiceError_ErrorCode = 3
+	URLFetchServiceError_RESPONSE_TOO_LARGE       URLFetchServiceError_ErrorCode = 4
+	URLFetchServiceError_DEADLINE_EXCEEDED        URLFetchServiceError_ErrorCode = 5
+	URLFetchServiceError_SSL_CERTIFICATE_ERROR    URLFetchServiceError_ErrorCode = 6
+	URLFetchServiceError_DNS_ERROR                URLFetchServiceError_ErrorCode = 7
+	URLFetchServiceError_CLOSED                   URLFetchServiceError_ErrorCode = 8
+	URLFetchServiceError_INTERNAL_TRANSIENT_ERROR URLFetchServiceError_ErrorCode = 9
+	URLFetchServiceError_TOO_MANY_REDIRECTS       URLFetchServiceError_ErrorCode = 10
+	URLFetchServiceError_MALFORMED_REPLY          URLFetchServiceError_ErrorCode = 11
+	URLFetchServiceError_CONNECTION_ERROR         URLFetchServiceError_ErrorCode = 12
+)
+
+var URLFetchServiceError_ErrorCode_name = map[int32]string{
+	0:  "OK",
+	1:  "INVALID_URL",
+	2:  "FETCH_ERROR",
+	3:  "UNSPECIFIED_ERROR",
+	4:  "RESPONSE_TOO_LARGE",
+	5:  "DEADLINE_EXCEEDED",
+	6:  "SSL_CERTIFICATE_ERROR",
+	7:  "DNS_ERROR",
+	8:  "CLOSED",
+	9:  "INTERNAL_TRANSIENT_ERROR",
+	10: "TOO_MANY_REDIRECTS",
+	11: "MALFORMED_REPLY",
+	12: "CONNECTION_ERROR",
+}
+var URLFetchServiceError_ErrorCode_value = map[string]int32{
+	"OK":                       0,
+	"INVALID_URL":              1,
+	"FETCH_ERROR":              2,
+	"UNSPECIFIED_ERROR":        3,
+	"RESPONSE_TOO_LARGE":       4,
+	"DEADLINE_EXCEEDED":        5,
+	"SSL_CERTIFICATE_ERROR":    6,
+	"DNS_ERROR":                7,
+	"CLOSED":                   8,
+	"INTERNAL_TRANSIENT_ERROR": 9,
+	"TOO_MANY_REDIRECTS":       10,
+	"MALFORMED_REPLY":          11,
+	"CONNECTION_ERROR":         12,
+}
+
+func (x URLFetchServiceError_ErrorCode) Enum() *URLFetchServiceError_ErrorCode {
+	p := new(URLFetchServiceError_ErrorCode)
+	*p = x
+	return p
+}
+func (x URLFetchServiceError_ErrorCode) String() string {
+	return proto.EnumName(URLFetchServiceError_ErrorCode_name, int32(x))
+}
+func (x *URLFetchServiceError_ErrorCode) UnmarshalJSON(data []byte) error {
+	value, err := proto.UnmarshalJSONEnum(URLFetchServiceError_ErrorCode_value, data, "URLFetchServiceError_ErrorCode")
+	if err != nil {
+		return err
+	}
+	*x = URLFetchServiceError_ErrorCode(value)
+	return nil
+}
+func (URLFetchServiceError_ErrorCode) EnumDescriptor() ([]byte, []int) {
+	return fileDescriptor_urlfetch_service_b245a7065f33bced, []int{0, 0}
+}
+
+type URLFetchRequest_RequestMethod int32
+
+const (
+	URLFetchRequest_GET    URLFetchRequest_RequestMethod = 1
+	URLFetchRequest_POST   URLFetchRequest_RequestMethod = 2
+	URLFetchRequest_HEAD   URLFetchRequest_RequestMethod = 3
+	URLFetchRequest_PUT    URLFetchRequest_RequestMethod = 4
+	URLFetchRequest_DELETE URLFetchRequest_RequestMethod = 5
+	URLFetchRequest_PATCH  URLFetchRequest_RequestMethod = 6
+)
+
+var URLFetchRequest_RequestMethod_name = map[int32]string{
+	1: "GET",
+	2: "POST",
+	3: "HEAD",
+	4: "PUT",
+	5: "DELETE",
+	6: "PATCH",
+}
+var URLFetchRequest_RequestMethod_value = map[string]int32{
+	"GET":    1,
+	"POST":   2,
+	"HEAD":   3,
+	"PUT":    4,
+	"DELETE": 5,
+	"PATCH":  6,
+}
+
+func (x URLFetchRequest_RequestMethod) Enum() *URLFetchRequest_RequestMethod {
+	p := new(URLFetchRequest_RequestMethod)
+	*p = x
+	return p
+}
+func (x URLFetchRequest_RequestMethod) String() string {
+	return proto.EnumName(URLFetchRequest_RequestMethod_name, int32(x))
+}
+func (x *URLFetchRequest_RequestMethod) UnmarshalJSON(data []byte) error {
+	value, err := proto.UnmarshalJSONEnum(URLFetchRequest_RequestMethod_value, data, "URLFetchRequest_RequestMethod")
+	if err != nil {
+		return err
+	}
+	*x = URLFetchRequest_RequestMethod(value)
+	return nil
+}
+func (URLFetchRequest_RequestMethod) EnumDescriptor() ([]byte, []int) {
+	return fileDescriptor_urlfetch_service_b245a7065f33bced, []int{1, 0}
+}
+
+type URLFetchServiceError struct {
+	XXX_NoUnkeyedLiteral struct{} `json:"-"`
+	XXX_unrecognized     []byte   `json:"-"`
+	XXX_sizecache        int32    `json:"-"`
+}
+
+func (m *URLFetchServiceError) Reset()         { *m = URLFetchServiceError{} }
+func (m *URLFetchServiceError) String() string { return proto.CompactTextString(m) }
+func (*URLFetchServiceError) ProtoMessage()    {}
+func (*URLFetchServiceError) Descriptor() ([]byte, []int) {
+	return fileDescriptor_urlfetch_service_b245a7065f33bced, []int{0}
+}
+func (m *URLFetchServiceError) XXX_Unmarshal(b []byte) error {
+	return xxx_messageInfo_URLFetchServiceError.Unmarshal(m, b)
+}
+func (m *URLFetchServiceError) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) {
+	return xxx_messageInfo_URLFetchServiceError.Marshal(b, m, deterministic)
+}
+func (dst *URLFetchServiceError) XXX_Merge(src proto.Message) {
+	xxx_messageInfo_URLFetchServiceError.Merge(dst, src)
+}
+func (m *URLFetchServiceError) XXX_Size() int {
+	return xxx_messageInfo_URLFetchServiceError.Size(m)
+}
+func (m *URLFetchServiceError) XXX_DiscardUnknown() {
+	xxx_messageInfo_URLFetchServiceError.DiscardUnknown(m)
+}
+
+var xxx_messageInfo_URLFetchServiceError proto.InternalMessageInfo
+
+type URLFetchRequest struct {
+	Method                        *URLFetchRequest_RequestMethod `protobuf:"varint,1,req,name=Method,enum=appengine.URLFetchRequest_RequestMethod" json:"Method,omitempty"`
+	Url                           *string                        `protobuf:"bytes,2,req,name=Url" json:"Url,omitempty"`
+	Header                        []*URLFetchRequest_Header      `protobuf:"group,3,rep,name=Header,json=header" json:"header,omitempty"`
+	Payload                       []byte                         `protobuf:"bytes,6,opt,name=Payload" json:"Payload,omitempty"`
+	FollowRedirects               *bool                          `protobuf:"varint,7,opt,name=FollowRedirects,def=1" json:"FollowRedirects,omitempty"`
+	Deadline                      *float64                       `protobuf:"fixed64,8,opt,name=Deadline" json:"Deadline,omitempty"`
+	MustValidateServerCertificate *bool                          `protobuf:"varint,9,opt,name=MustValidateServerCertificate,def=1" json:"MustValidateServerCertificate,omitempty"`
+	XXX_NoUnkeyedLiteral          struct{}                       `json:"-"`
+	XXX_unrecognized              []byte                         `json:"-"`
+	XXX_sizecache                 int32                          `json:"-"`
+}
+
+func (m *URLFetchRequest) Reset()         { *m = URLFetchRequest{} }
+func (m *URLFetchRequest) String() string { return proto.CompactTextString(m) }
+func (*URLFetchRequest) ProtoMessage()    {}
+func (*URLFetchRequest) Descriptor() ([]byte, []int) {
+	return fileDescriptor_urlfetch_service_b245a7065f33bced, []int{1}
+}
+func (m *URLFetchRequest) XXX_Unmarshal(b []byte) error {
+	return xxx_messageInfo_URLFetchRequest.Unmarshal(m, b)
+}
+func (m *URLFetchRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) {
+	return xxx_messageInfo_URLFetchRequest.Marshal(b, m, deterministic)
+}
+func (dst *URLFetchRequest) XXX_Merge(src proto.Message) {
+	xxx_messageInfo_URLFetchRequest.Merge(dst, src)
+}
+func (m *URLFetchRequest) XXX_Size() int {
+	return xxx_messageInfo_URLFetchRequest.Size(m)
+}
+func (m *URLFetchRequest) XXX_DiscardUnknown() {
+	xxx_messageInfo_URLFetchRequest.DiscardUnknown(m)
+}
+
+var xxx_messageInfo_URLFetchRequest proto.InternalMessageInfo
+
+const Default_URLFetchRequest_FollowRedirects bool = true
+const Default_URLFetchRequest_MustValidateServerCertificate bool = true
+
+func (m *URLFetchRequest) GetMethod() URLFetchRequest_RequestMethod {
+	if m != nil && m.Method != nil {
+		return *m.Method
+	}
+	return URLFetchRequest_GET
+}
+
+func (m *URLFetchRequest) GetUrl() string {
+	if m != nil && m.Url != nil {
+		return *m.Url
+	}
+	return ""
+}
+
+func (m *URLFetchRequest) GetHeader() []*URLFetchRequest_Header {
+	if m != nil {
+		return m.Header
+	}
+	return nil
+}
+
+func (m *URLFetchRequest) GetPayload() []byte {
+	if m != nil {
+		return m.Payload
+	}
+	return nil
+}
+
+func (m *URLFetchRequest) GetFollowRedirects() bool {
+	if m != nil && m.FollowRedirects != nil {
+		return *m.FollowRedirects
+	}
+	return Default_URLFetchRequest_FollowRedirects
+}
+
+func (m *URLFetchRequest) GetDeadline() float64 {
+	if m != nil && m.Deadline != nil {
+		return *m.Deadline
+	}
+	return 0
+}
+
+func (m *URLFetchRequest) GetMustValidateServerCertificate() bool {
+	if m != nil && m.MustValidateServerCertificate != nil {
+		return *m.MustValidateServerCertificate
+	}
+	return Default_URLFetchRequest_MustValidateServerCertificate
+}
+
+type URLFetchRequest_Header struct {
+	Key                  *string  `protobuf:"bytes,4,req,name=Key" json:"Key,omitempty"`
+	Value                *string  `protobuf:"bytes,5,req,name=Value" json:"Value,omitempty"`
+	XXX_NoUnkeyedLiteral struct{} `json:"-"`
+	XXX_unrecognized     []byte   `json:"-"`
+	XXX_sizecache        int32    `json:"-"`
+}
+
+func (m *URLFetchRequest_Header) Reset()         { *m = URLFetchRequest_Header{} }
+func (m *URLFetchRequest_Header) String() string { return proto.CompactTextString(m) }
+func (*URLFetchRequest_Header) ProtoMessage()    {}
+func (*URLFetchRequest_Header) Descriptor() ([]byte, []int) {
+	return fileDescriptor_urlfetch_service_b245a7065f33bced, []int{1, 0}
+}
+func (m *URLFetchRequest_Header) XXX_Unmarshal(b []byte) error {
+	return xxx_messageInfo_URLFetchRequest_Header.Unmarshal(m, b)
+}
+func (m *URLFetchRequest_Header) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) {
+	return xxx_messageInfo_URLFetchRequest_Header.Marshal(b, m, deterministic)
+}
+func (dst *URLFetchRequest_Header) XXX_Merge(src proto.Message) {
+	xxx_messageInfo_URLFetchRequest_Header.Merge(dst, src)
+}
+func (m *URLFetchRequest_Header) XXX_Size() int {
+	return xxx_messageInfo_URLFetchRequest_Header.Size(m)
+}
+func (m *URLFetchRequest_Header) XXX_DiscardUnknown() {
+	xxx_messageInfo_URLFetchRequest_Header.DiscardUnknown(m)
+}
+
+var xxx_messageInfo_URLFetchRequest_Header proto.InternalMessageInfo
+
+func (m *URLFetchRequest_Header) GetKey() string {
+	if m != nil && m.Key != nil {
+		return *m.Key
+	}
+	return ""
+}
+
+func (m *URLFetchRequest_Header) GetValue() string {
+	if m != nil && m.Value != nil {
+		return *m.Value
+	}
+	return ""
+}
+
+type URLFetchResponse struct {
+	Content               []byte                     `protobuf:"bytes,1,opt,name=Content" json:"Content,omitempty"`
+	StatusCode            *int32                     `protobuf:"varint,2,req,name=StatusCode" json:"StatusCode,omitempty"`
+	Header                []*URLFetchResponse_Header `protobuf:"group,3,rep,name=Header,json=header" json:"header,omitempty"`
+	ContentWasTruncated   *bool                      `protobuf:"varint,6,opt,name=ContentWasTruncated,def=0" json:"ContentWasTruncated,omitempty"`
+	ExternalBytesSent     *int64                     `protobuf:"varint,7,opt,name=ExternalBytesSent" json:"ExternalBytesSent,omitempty"`
+	ExternalBytesReceived *int64                     `protobuf:"varint,8,opt,name=ExternalBytesReceived" json:"ExternalBytesReceived,omitempty"`
+	FinalUrl              *string                    `protobuf:"bytes,9,opt,name=FinalUrl" json:"FinalUrl,omitempty"`
+	ApiCpuMilliseconds    *int64                     `protobuf:"varint,10,opt,name=ApiCpuMilliseconds,def=0" json:"ApiCpuMilliseconds,omitempty"`
+	ApiBytesSent          *int64                     `protobuf:"varint,11,opt,name=ApiBytesSent,def=0" json:"ApiBytesSent,omitempty"`
+	ApiBytesReceived      *int64                     `protobuf:"varint,12,opt,name=ApiBytesReceived,def=0" json:"ApiBytesReceived,omitempty"`
+	XXX_NoUnkeyedLiteral  struct{}                   `json:"-"`
+	XXX_unrecognized      []byte                     `json:"-"`
+	XXX_sizecache         int32                      `json:"-"`
+}
+
+func (m *URLFetchResponse) Reset()         { *m = URLFetchResponse{} }
+func (m *URLFetchResponse) String() string { return proto.CompactTextString(m) }
+func (*URLFetchResponse) ProtoMessage()    {}
+func (*URLFetchResponse) Descriptor() ([]byte, []int) {
+	return fileDescriptor_urlfetch_service_b245a7065f33bced, []int{2}
+}
+func (m *URLFetchResponse) XXX_Unmarshal(b []byte) error {
+	return xxx_messageInfo_URLFetchResponse.Unmarshal(m, b)
+}
+func (m *URLFetchResponse) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) {
+	return xxx_messageInfo_URLFetchResponse.Marshal(b, m, deterministic)
+}
+func (dst *URLFetchResponse) XXX_Merge(src proto.Message) {
+	xxx_messageInfo_URLFetchResponse.Merge(dst, src)
+}
+func (m *URLFetchResponse) XXX_Size() int {
+	return xxx_messageInfo_URLFetchResponse.Size(m)
+}
+func (m *URLFetchResponse) XXX_DiscardUnknown() {
+	xxx_messageInfo_URLFetchResponse.DiscardUnknown(m)
+}
+
+var xxx_messageInfo_URLFetchResponse proto.InternalMessageInfo
+
+const Default_URLFetchResponse_ContentWasTruncated bool = false
+const Default_URLFetchResponse_ApiCpuMilliseconds int64 = 0
+const Default_URLFetchResponse_ApiBytesSent int64 = 0
+const Default_URLFetchResponse_ApiBytesReceived int64 = 0
+
+func (m *URLFetchResponse) GetContent() []byte {
+	if m != nil {
+		return m.Content
+	}
+	return nil
+}
+
+func (m *URLFetchResponse) GetStatusCode() int32 {
+	if m != nil && m.StatusCode != nil {
+		return *m.StatusCode
+	}
+	return 0
+}
+
+func (m *URLFetchResponse) GetHeader() []*URLFetchResponse_Header {
+	if m != nil {
+		return m.Header
+	}
+	return nil
+}
+
+func (m *URLFetchResponse) GetContentWasTruncated() bool {
+	if m != nil && m.ContentWasTruncated != nil {
+		return *m.ContentWasTruncated
+	}
+	return Default_URLFetchResponse_ContentWasTruncated
+}
+
+func (m *URLFetchResponse) GetExternalBytesSent() int64 {
+	if m != nil && m.ExternalBytesSent != nil {
+		return *m.ExternalBytesSent
+	}
+	return 0
+}
+
+func (m *URLFetchResponse) GetExternalBytesReceived() int64 {
+	if m != nil && m.ExternalBytesReceived != nil {
+		return *m.ExternalBytesReceived
+	}
+	return 0
+}
+
+func (m *URLFetchResponse) GetFinalUrl() string {
+	if m != nil && m.FinalUrl != nil {
+		return *m.FinalUrl
+	}
+	return ""
+}
+
+func (m *URLFetchResponse) GetApiCpuMilliseconds() int64 {
+	if m != nil && m.ApiCpuMilliseconds != nil {
+		return *m.ApiCpuMilliseconds
+	}
+	return Default_URLFetchResponse_ApiCpuMilliseconds
+}
+
+func (m *URLFetchResponse) GetApiBytesSent() int64 {
+	if m != nil && m.ApiBytesSent != nil {
+		return *m.ApiBytesSent
+	}
+	return Default_URLFetchResponse_ApiBytesSent
+}
+
+func (m *URLFetchResponse) GetApiBytesReceived() int64 {
+	if m != nil && m.ApiBytesReceived != nil {
+		return *m.ApiBytesReceived
+	}
+	return Default_URLFetchResponse_ApiBytesReceived
+}
+
+type URLFetchResponse_Header struct {
+	Key                  *string  `protobuf:"bytes,4,req,name=Key" json:"Key,omitempty"`
+	Value                *string  `protobuf:"bytes,5,req,name=Value" json:"Value,omitempty"`
+	XXX_NoUnkeyedLiteral struct{} `json:"-"`
+	XXX_unrecognized     []byte   `json:"-"`
+	XXX_sizecache        int32    `json:"-"`
+}
+
+func (m *URLFetchResponse_Header) Reset()         { *m = URLFetchResponse_Header{} }
+func (m *URLFetchResponse_Header) String() string { return proto.CompactTextString(m) }
+func (*URLFetchResponse_Header) ProtoMessage()    {}
+func (*URLFetchResponse_Header) Descriptor() ([]byte, []int) {
+	return fileDescriptor_urlfetch_service_b245a7065f33bced, []int{2, 0}
+}
+func (m *URLFetchResponse_Header) XXX_Unmarshal(b []byte) error {
+	return xxx_messageInfo_URLFetchResponse_Header.Unmarshal(m, b)
+}
+func (m *URLFetchResponse_Header) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) {
+	return xxx_messageInfo_URLFetchResponse_Header.Marshal(b, m, deterministic)
+}
+func (dst *URLFetchResponse_Header) XXX_Merge(src proto.Message) {
+	xxx_messageInfo_URLFetchResponse_Header.Merge(dst, src)
+}
+func (m *URLFetchResponse_Header) XXX_Size() int {
+	return xxx_messageInfo_URLFetchResponse_Header.Size(m)
+}
+func (m *URLFetchResponse_Header) XXX_DiscardUnknown() {
+	xxx_messageInfo_URLFetchResponse_Header.DiscardUnknown(m)
+}
+
+var xxx_messageInfo_URLFetchResponse_Header proto.InternalMessageInfo
+
+func (m *URLFetchResponse_Header) GetKey() string {
+	if m != nil && m.Key != nil {
+		return *m.Key
+	}
+	return ""
+}
+
+func (m *URLFetchResponse_Header) GetValue() string {
+	if m != nil && m.Value != nil {
+		return *m.Value
+	}
+	return ""
+}
+
+func init() {
+	proto.RegisterType((*URLFetchServiceError)(nil), "appengine.URLFetchServiceError")
+	proto.RegisterType((*URLFetchRequest)(nil), "appengine.URLFetchRequest")
+	proto.RegisterType((*URLFetchRequest_Header)(nil), "appengine.URLFetchRequest.Header")
+	proto.RegisterType((*URLFetchResponse)(nil), "appengine.URLFetchResponse")
+	proto.RegisterType((*URLFetchResponse_Header)(nil), "appengine.URLFetchResponse.Header")
+}
+
+func init() {
+	proto.RegisterFile("google.golang.org/appengine/v2/internal/urlfetch/urlfetch_service.proto", fileDescriptor_urlfetch_service_b245a7065f33bced)
+}
+
+var fileDescriptor_urlfetch_service_b245a7065f33bced = []byte{
+	// 770 bytes of a gzipped FileDescriptorProto
+	0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0x94, 0x54, 0xdd, 0x6e, 0xe3, 0x54,
+	0x10, 0xc6, 0x76, 0x7e, 0xa7, 0x5d, 0x7a, 0x76, 0xb6, 0x45, 0x66, 0xb5, 0xa0, 0x10, 0x09, 0x29,
+	0x17, 0x90, 0x2e, 0x2b, 0x24, 0x44, 0xaf, 0x70, 0xed, 0x93, 0xad, 0xa9, 0x63, 0x47, 0xc7, 0x4e,
+	0x61, 0xb9, 0xb1, 0xac, 0x78, 0x9a, 0x5a, 0xb2, 0xec, 0x60, 0x9f, 0x2c, 0xf4, 0x35, 0x78, 0x0d,
+	0xde, 0x87, 0xa7, 0xe1, 0x02, 0x9d, 0xc4, 0xc9, 0x6e, 0xbb, 0xd1, 0x4a, 0x5c, 0x65, 0xe6, 0x9b,
+	0xef, 0xcc, 0x99, 0x7c, 0xdf, 0xf8, 0x80, 0xb3, 0x2c, 0xcb, 0x65, 0x4e, 0xe3, 0x65, 0x99, 0x27,
+	0xc5, 0x72, 0x5c, 0x56, 0xcb, 0xf3, 0x64, 0xb5, 0xa2, 0x62, 0x99, 0x15, 0x74, 0x9e, 0x15, 0x92,
+	0xaa, 0x22, 0xc9, 0xcf, 0xd7, 0x55, 0x7e, 0x4b, 0x72, 0x71, 0xb7, 0x0f, 0xe2, 0x9a, 0xaa, 0xb7,
+	0xd9, 0x82, 0xc6, 0xab, 0xaa, 0x94, 0x25, 0xf6, 0xf7, 0x67, 0x86, 0x7f, 0xeb, 0x70, 0x3a, 0x17,
+	0xde, 0x44, 0xb1, 0xc2, 0x2d, 0x89, 0x57, 0x55, 0x59, 0x0d, 0xff, 0xd2, 0xa1, 0xbf, 0x89, 0xec,
+	0x32, 0x25, 0xec, 0x80, 0x1e, 0x5c, 0xb3, 0x4f, 0xf0, 0x04, 0x8e, 0x5c, 0xff, 0xc6, 0xf2, 0x5c,
+	0x27, 0x9e, 0x0b, 0x8f, 0x69, 0x0a, 0x98, 0xf0, 0xc8, 0xbe, 0x8a, 0xb9, 0x10, 0x81, 0x60, 0x3a,
+	0x9e, 0xc1, 0xd3, 0xb9, 0x1f, 0xce, 0xb8, 0xed, 0x4e, 0x5c, 0xee, 0x34, 0xb0, 0x81, 0x9f, 0x01,
+	0x0a, 0x1e, 0xce, 0x02, 0x3f, 0xe4, 0x71, 0x14, 0x04, 0xb1, 0x67, 0x89, 0xd7, 0x9c, 0xb5, 0x14,
+	0xdd, 0xe1, 0x96, 0xe3, 0xb9, 0x3e, 0x8f, 0xf9, 0xaf, 0x36, 0xe7, 0x0e, 0x77, 0x58, 0x1b, 0x3f,
+	0x87, 0xb3, 0x30, 0xf4, 0x62, 0x9b, 0x8b, 0xc8, 0x9d, 0xb8, 0xb6, 0x15, 0xf1, 0xa6, 0x53, 0x07,
+	0x9f, 0x40, 0xdf, 0xf1, 0xc3, 0x26, 0xed, 0x22, 0x40, 0xc7, 0xf6, 0x82, 0x90, 0x3b, 0xac, 0x87,
+	0x2f, 0xc0, 0x74, 0xfd, 0x88, 0x0b, 0xdf, 0xf2, 0xe2, 0x48, 0x58, 0x7e, 0xe8, 0x72, 0x3f, 0x6a,
+	0x98, 0x7d, 0x35, 0x82, 0xba, 0x79, 0x6a, 0xf9, 0x6f, 0x62, 0xc1, 0x1d, 0x57, 0x70, 0x3b, 0x0a,
+	0x19, 0xe0, 0x33, 0x38, 0x99, 0x5a, 0xde, 0x24, 0x10, 0x53, 0xee, 0xc4, 0x82, 0xcf, 0xbc, 0x37,
+	0xec, 0x08, 0x4f, 0x81, 0xd9, 0x81, 0xef, 0x73, 0x3b, 0x72, 0x03, 0xbf, 0x69, 0x71, 0x3c, 0xfc,
+	0xc7, 0x80, 0x93, 0x9d, 0x5a, 0x82, 0x7e, 0x5f, 0x53, 0x2d, 0xf1, 0x27, 0xe8, 0x4c, 0x49, 0xde,
+	0x95, 0xa9, 0xa9, 0x0d, 0xf4, 0xd1, 0xa7, 0xaf, 0x46, 0xe3, 0xbd, 0xba, 0xe3, 0x47, 0xdc, 0x71,
+	0xf3, 0xbb, 0xe5, 0x8b, 0xe6, 0x1c, 0x32, 0x30, 0xe6, 0x55, 0x6e, 0xea, 0x03, 0x7d, 0xd4, 0x17,
+	0x2a, 0xc4, 0x1f, 0xa1, 0x73, 0x47, 0x49, 0x4a, 0x95, 0x69, 0x0c, 0x8c, 0x11, 0xbc, 0xfa, 0xea,
+	0x23, 0x3d, 0xaf, 0x36, 0x44, 0xd1, 0x1c, 0xc0, 0x17, 0xd0, 0x9d, 0x25, 0xf7, 0x79, 0x99, 0xa4,
+	0x66, 0x67, 0xa0, 0x8d, 0x8e, 0x2f, 0xf5, 0x9e, 0x26, 0x76, 0x10, 0x8e, 0xe1, 0x64, 0x52, 0xe6,
+	0x79, 0xf9, 0x87, 0xa0, 0x34, 0xab, 0x68, 0x21, 0x6b, 0xb3, 0x3b, 0xd0, 0x46, 0xbd, 0x8b, 0x96,
+	0xac, 0xd6, 0x24, 0x1e, 0x17, 0xf1, 0x39, 0xf4, 0x1c, 0x4a, 0xd2, 0x3c, 0x2b, 0xc8, 0xec, 0x0d,
+	0xb4, 0x91, 0x26, 0xf6, 0x39, 0xfe, 0x0c, 0x5f, 0x4c, 0xd7, 0xb5, 0xbc, 0x49, 0xf2, 0x2c, 0x4d,
+	0x24, 0xa9, 0xed, 0xa1, 0xca, 0xa6, 0x4a, 0x66, 0xb7, 0xd9, 0x22, 0x91, 0x64, 0xf6, 0xdf, 0xeb,
+	0xfc, 0x71, 0xea, 0xf3, 0x97, 0xd0, 0xd9, 0xfe, 0x0f, 0x25, 0xc6, 0x35, 0xdd, 0x9b, 0xad, 0xad,
+	0x18, 0xd7, 0x74, 0x8f, 0xa7, 0xd0, 0xbe, 0x49, 0xf2, 0x35, 0x99, 0xed, 0x0d, 0xb6, 0x4d, 0x86,
+	0x1e, 0x3c, 0x79, 0xa0, 0x26, 0x76, 0xc1, 0x78, 0xcd, 0x23, 0xa6, 0x61, 0x0f, 0x5a, 0xb3, 0x20,
+	0x8c, 0x98, 0xae, 0xa2, 0x2b, 0x6e, 0x39, 0xcc, 0x50, 0xc5, 0xd9, 0x3c, 0x62, 0x2d, 0xb5, 0x2e,
+	0x0e, 0xf7, 0x78, 0xc4, 0x59, 0x1b, 0xfb, 0xd0, 0x9e, 0x59, 0x91, 0x7d, 0xc5, 0x3a, 0xc3, 0x7f,
+	0x0d, 0x60, 0xef, 0x84, 0xad, 0x57, 0x65, 0x51, 0x13, 0x9a, 0xd0, 0xb5, 0xcb, 0x42, 0x52, 0x21,
+	0x4d, 0x4d, 0x49, 0x29, 0x76, 0x29, 0x7e, 0x09, 0x10, 0xca, 0x44, 0xae, 0x6b, 0xf5, 0x71, 0x6c,
+	0x8c, 0x6b, 0x8b, 0xf7, 0x10, 0xbc, 0x78, 0xe4, 0xdf, 0xf0, 0xa0, 0x7f, 0xdb, 0x6b, 0x1e, 0x1b,
+	0xf8, 0x03, 0x3c, 0x6b, 0xae, 0xf9, 0x25, 0xa9, 0xa3, 0x6a, 0x5d, 0x28, 0x81, 0xb6, 0x66, 0xf6,
+	0x2e, 0xda, 0xb7, 0x49, 0x5e, 0x93, 0x38, 0xc4, 0xc0, 0x6f, 0xe0, 0x29, 0xff, 0x73, 0xfb, 0x02,
+	0x5c, 0xde, 0x4b, 0xaa, 0x43, 0x35, 0xb8, 0x72, 0xd7, 0x10, 0x1f, 0x16, 0xf0, 0x7b, 0x38, 0x7b,
+	0x00, 0x0a, 0x5a, 0x50, 0xf6, 0x96, 0xd2, 0x8d, 0xcd, 0x86, 0x38, 0x5c, 0x54, 0xfb, 0x30, 0xc9,
+	0x8a, 0x24, 0x57, 0xfb, 0xaa, 0xec, 0xed, 0x8b, 0x7d, 0x8e, 0xdf, 0x01, 0x5a, 0xab, 0xcc, 0x5e,
+	0xad, 0xa7, 0x59, 0x9e, 0x67, 0x35, 0x2d, 0xca, 0x22, 0xad, 0x4d, 0x50, 0xed, 0x2e, 0xb4, 0x97,
+	0xe2, 0x40, 0x11, 0xbf, 0x86, 0x63, 0x6b, 0x95, 0xbd, 0x9b, 0xf6, 0x68, 0x47, 0x7e, 0x00, 0xe3,
+	0xb7, 0xc0, 0x76, 0xf9, 0x7e, 0xcc, 0xe3, 0x1d, 0xf5, 0x83, 0xd2, 0xff, 0x5f, 0xa6, 0x4b, 0xf8,
+	0xad, 0xb7, 0x7b, 0x2a, 0xff, 0x0b, 0x00, 0x00, 0xff, 0xff, 0x1d, 0x9f, 0x6d, 0x24, 0x63, 0x05,
+	0x00, 0x00,
+}
diff --git a/v2/internal/urlfetch/urlfetch_service.proto b/v2/internal/urlfetch/urlfetch_service.proto
new file mode 100644
index 0000000..f695edf
--- /dev/null
+++ b/v2/internal/urlfetch/urlfetch_service.proto
@@ -0,0 +1,64 @@
+syntax = "proto2";
+option go_package = "urlfetch";
+
+package appengine;
+
+message URLFetchServiceError {
+  enum ErrorCode {
+    OK = 0;
+    INVALID_URL = 1;
+    FETCH_ERROR = 2;
+    UNSPECIFIED_ERROR = 3;
+    RESPONSE_TOO_LARGE = 4;
+    DEADLINE_EXCEEDED = 5;
+    SSL_CERTIFICATE_ERROR = 6;
+    DNS_ERROR = 7;
+    CLOSED = 8;
+    INTERNAL_TRANSIENT_ERROR = 9;
+    TOO_MANY_REDIRECTS = 10;
+    MALFORMED_REPLY = 11;
+    CONNECTION_ERROR = 12;
+  }
+}
+
+message URLFetchRequest {
+  enum RequestMethod {
+    GET = 1;
+    POST = 2;
+    HEAD = 3;
+    PUT = 4;
+    DELETE = 5;
+    PATCH = 6;
+  }
+  required RequestMethod Method = 1;
+  required string Url = 2;
+  repeated group Header = 3 {
+    required string Key = 4;
+    required string Value = 5;
+  }
+  optional bytes Payload = 6 [ctype=CORD];
+
+  optional bool FollowRedirects = 7 [default=true];
+
+  optional double Deadline = 8;
+
+  optional bool MustValidateServerCertificate = 9 [default=true];
+}
+
+message URLFetchResponse {
+  optional bytes Content = 1;
+  required int32 StatusCode = 2;
+  repeated group Header = 3 {
+    required string Key = 4;
+    required string Value = 5;
+  }
+  optional bool ContentWasTruncated = 6 [default=false];
+  optional int64 ExternalBytesSent = 7;
+  optional int64 ExternalBytesReceived = 8;
+
+  optional string FinalUrl = 9;
+
+  optional int64 ApiCpuMilliseconds = 10 [default=0];
+  optional int64 ApiBytesSent = 11 [default=0];
+  optional int64 ApiBytesReceived = 12 [default=0];
+}
diff --git a/v2/internal/user/user_service.pb.go b/v2/internal/user/user_service.pb.go
new file mode 100644
index 0000000..77b399b
--- /dev/null
+++ b/v2/internal/user/user_service.pb.go
@@ -0,0 +1,531 @@
+// Code generated by protoc-gen-go. DO NOT EDIT.
+// source: google.golang.org/appengine/internal/user/user_service.proto
+
+package user
+
+import proto "github.com/golang/protobuf/proto"
+import fmt "fmt"
+import math "math"
+
+// Reference imports to suppress errors if they are not otherwise used.
+var _ = proto.Marshal
+var _ = fmt.Errorf
+var _ = math.Inf
+
+// This is a compile-time assertion to ensure that this generated file
+// is compatible with the proto package it is being compiled against.
+// A compilation error at this line likely means your copy of the
+// proto package needs to be updated.
+const _ = proto.ProtoPackageIsVersion2 // please upgrade the proto package
+
+type UserServiceError_ErrorCode int32
+
+const (
+	UserServiceError_OK                    UserServiceError_ErrorCode = 0
+	UserServiceError_REDIRECT_URL_TOO_LONG UserServiceError_ErrorCode = 1
+	UserServiceError_NOT_ALLOWED           UserServiceError_ErrorCode = 2
+	UserServiceError_OAUTH_INVALID_TOKEN   UserServiceError_ErrorCode = 3
+	UserServiceError_OAUTH_INVALID_REQUEST UserServiceError_ErrorCode = 4
+	UserServiceError_OAUTH_ERROR           UserServiceError_ErrorCode = 5
+)
+
+var UserServiceError_ErrorCode_name = map[int32]string{
+	0: "OK",
+	1: "REDIRECT_URL_TOO_LONG",
+	2: "NOT_ALLOWED",
+	3: "OAUTH_INVALID_TOKEN",
+	4: "OAUTH_INVALID_REQUEST",
+	5: "OAUTH_ERROR",
+}
+var UserServiceError_ErrorCode_value = map[string]int32{
+	"OK":                    0,
+	"REDIRECT_URL_TOO_LONG": 1,
+	"NOT_ALLOWED":           2,
+	"OAUTH_INVALID_TOKEN":   3,
+	"OAUTH_INVALID_REQUEST": 4,
+	"OAUTH_ERROR":           5,
+}
+
+func (x UserServiceError_ErrorCode) Enum() *UserServiceError_ErrorCode {
+	p := new(UserServiceError_ErrorCode)
+	*p = x
+	return p
+}
+func (x UserServiceError_ErrorCode) String() string {
+	return proto.EnumName(UserServiceError_ErrorCode_name, int32(x))
+}
+func (x *UserServiceError_ErrorCode) UnmarshalJSON(data []byte) error {
+	value, err := proto.UnmarshalJSONEnum(UserServiceError_ErrorCode_value, data, "UserServiceError_ErrorCode")
+	if err != nil {
+		return err
+	}
+	*x = UserServiceError_ErrorCode(value)
+	return nil
+}
+func (UserServiceError_ErrorCode) EnumDescriptor() ([]byte, []int) {
+	return fileDescriptor_user_service_faa685423dd20b0a, []int{0, 0}
+}
+
+type UserServiceError struct {
+	XXX_NoUnkeyedLiteral struct{} `json:"-"`
+	XXX_unrecognized     []byte   `json:"-"`
+	XXX_sizecache        int32    `json:"-"`
+}
+
+func (m *UserServiceError) Reset()         { *m = UserServiceError{} }
+func (m *UserServiceError) String() string { return proto.CompactTextString(m) }
+func (*UserServiceError) ProtoMessage()    {}
+func (*UserServiceError) Descriptor() ([]byte, []int) {
+	return fileDescriptor_user_service_faa685423dd20b0a, []int{0}
+}
+func (m *UserServiceError) XXX_Unmarshal(b []byte) error {
+	return xxx_messageInfo_UserServiceError.Unmarshal(m, b)
+}
+func (m *UserServiceError) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) {
+	return xxx_messageInfo_UserServiceError.Marshal(b, m, deterministic)
+}
+func (dst *UserServiceError) XXX_Merge(src proto.Message) {
+	xxx_messageInfo_UserServiceError.Merge(dst, src)
+}
+func (m *UserServiceError) XXX_Size() int {
+	return xxx_messageInfo_UserServiceError.Size(m)
+}
+func (m *UserServiceError) XXX_DiscardUnknown() {
+	xxx_messageInfo_UserServiceError.DiscardUnknown(m)
+}
+
+var xxx_messageInfo_UserServiceError proto.InternalMessageInfo
+
+type CreateLoginURLRequest struct {
+	DestinationUrl       *string  `protobuf:"bytes,1,req,name=destination_url,json=destinationUrl" json:"destination_url,omitempty"`
+	AuthDomain           *string  `protobuf:"bytes,2,opt,name=auth_domain,json=authDomain" json:"auth_domain,omitempty"`
+	FederatedIdentity    *string  `protobuf:"bytes,3,opt,name=federated_identity,json=federatedIdentity,def=" json:"federated_identity,omitempty"`
+	XXX_NoUnkeyedLiteral struct{} `json:"-"`
+	XXX_unrecognized     []byte   `json:"-"`
+	XXX_sizecache        int32    `json:"-"`
+}
+
+func (m *CreateLoginURLRequest) Reset()         { *m = CreateLoginURLRequest{} }
+func (m *CreateLoginURLRequest) String() string { return proto.CompactTextString(m) }
+func (*CreateLoginURLRequest) ProtoMessage()    {}
+func (*CreateLoginURLRequest) Descriptor() ([]byte, []int) {
+	return fileDescriptor_user_service_faa685423dd20b0a, []int{1}
+}
+func (m *CreateLoginURLRequest) XXX_Unmarshal(b []byte) error {
+	return xxx_messageInfo_CreateLoginURLRequest.Unmarshal(m, b)
+}
+func (m *CreateLoginURLRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) {
+	return xxx_messageInfo_CreateLoginURLRequest.Marshal(b, m, deterministic)
+}
+func (dst *CreateLoginURLRequest) XXX_Merge(src proto.Message) {
+	xxx_messageInfo_CreateLoginURLRequest.Merge(dst, src)
+}
+func (m *CreateLoginURLRequest) XXX_Size() int {
+	return xxx_messageInfo_CreateLoginURLRequest.Size(m)
+}
+func (m *CreateLoginURLRequest) XXX_DiscardUnknown() {
+	xxx_messageInfo_CreateLoginURLRequest.DiscardUnknown(m)
+}
+
+var xxx_messageInfo_CreateLoginURLRequest proto.InternalMessageInfo
+
+func (m *CreateLoginURLRequest) GetDestinationUrl() string {
+	if m != nil && m.DestinationUrl != nil {
+		return *m.DestinationUrl
+	}
+	return ""
+}
+
+func (m *CreateLoginURLRequest) GetAuthDomain() string {
+	if m != nil && m.AuthDomain != nil {
+		return *m.AuthDomain
+	}
+	return ""
+}
+
+func (m *CreateLoginURLRequest) GetFederatedIdentity() string {
+	if m != nil && m.FederatedIdentity != nil {
+		return *m.FederatedIdentity
+	}
+	return ""
+}
+
+type CreateLoginURLResponse struct {
+	LoginUrl             *string  `protobuf:"bytes,1,req,name=login_url,json=loginUrl" json:"login_url,omitempty"`
+	XXX_NoUnkeyedLiteral struct{} `json:"-"`
+	XXX_unrecognized     []byte   `json:"-"`
+	XXX_sizecache        int32    `json:"-"`
+}
+
+func (m *CreateLoginURLResponse) Reset()         { *m = CreateLoginURLResponse{} }
+func (m *CreateLoginURLResponse) String() string { return proto.CompactTextString(m) }
+func (*CreateLoginURLResponse) ProtoMessage()    {}
+func (*CreateLoginURLResponse) Descriptor() ([]byte, []int) {
+	return fileDescriptor_user_service_faa685423dd20b0a, []int{2}
+}
+func (m *CreateLoginURLResponse) XXX_Unmarshal(b []byte) error {
+	return xxx_messageInfo_CreateLoginURLResponse.Unmarshal(m, b)
+}
+func (m *CreateLoginURLResponse) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) {
+	return xxx_messageInfo_CreateLoginURLResponse.Marshal(b, m, deterministic)
+}
+func (dst *CreateLoginURLResponse) XXX_Merge(src proto.Message) {
+	xxx_messageInfo_CreateLoginURLResponse.Merge(dst, src)
+}
+func (m *CreateLoginURLResponse) XXX_Size() int {
+	return xxx_messageInfo_CreateLoginURLResponse.Size(m)
+}
+func (m *CreateLoginURLResponse) XXX_DiscardUnknown() {
+	xxx_messageInfo_CreateLoginURLResponse.DiscardUnknown(m)
+}
+
+var xxx_messageInfo_CreateLoginURLResponse proto.InternalMessageInfo
+
+func (m *CreateLoginURLResponse) GetLoginUrl() string {
+	if m != nil && m.LoginUrl != nil {
+		return *m.LoginUrl
+	}
+	return ""
+}
+
+type CreateLogoutURLRequest struct {
+	DestinationUrl       *string  `protobuf:"bytes,1,req,name=destination_url,json=destinationUrl" json:"destination_url,omitempty"`
+	AuthDomain           *string  `protobuf:"bytes,2,opt,name=auth_domain,json=authDomain" json:"auth_domain,omitempty"`
+	XXX_NoUnkeyedLiteral struct{} `json:"-"`
+	XXX_unrecognized     []byte   `json:"-"`
+	XXX_sizecache        int32    `json:"-"`
+}
+
+func (m *CreateLogoutURLRequest) Reset()         { *m = CreateLogoutURLRequest{} }
+func (m *CreateLogoutURLRequest) String() string { return proto.CompactTextString(m) }
+func (*CreateLogoutURLRequest) ProtoMessage()    {}
+func (*CreateLogoutURLRequest) Descriptor() ([]byte, []int) {
+	return fileDescriptor_user_service_faa685423dd20b0a, []int{3}
+}
+func (m *CreateLogoutURLRequest) XXX_Unmarshal(b []byte) error {
+	return xxx_messageInfo_CreateLogoutURLRequest.Unmarshal(m, b)
+}
+func (m *CreateLogoutURLRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) {
+	return xxx_messageInfo_CreateLogoutURLRequest.Marshal(b, m, deterministic)
+}
+func (dst *CreateLogoutURLRequest) XXX_Merge(src proto.Message) {
+	xxx_messageInfo_CreateLogoutURLRequest.Merge(dst, src)
+}
+func (m *CreateLogoutURLRequest) XXX_Size() int {
+	return xxx_messageInfo_CreateLogoutURLRequest.Size(m)
+}
+func (m *CreateLogoutURLRequest) XXX_DiscardUnknown() {
+	xxx_messageInfo_CreateLogoutURLRequest.DiscardUnknown(m)
+}
+
+var xxx_messageInfo_CreateLogoutURLRequest proto.InternalMessageInfo
+
+func (m *CreateLogoutURLRequest) GetDestinationUrl() string {
+	if m != nil && m.DestinationUrl != nil {
+		return *m.DestinationUrl
+	}
+	return ""
+}
+
+func (m *CreateLogoutURLRequest) GetAuthDomain() string {
+	if m != nil && m.AuthDomain != nil {
+		return *m.AuthDomain
+	}
+	return ""
+}
+
+type CreateLogoutURLResponse struct {
+	LogoutUrl            *string  `protobuf:"bytes,1,req,name=logout_url,json=logoutUrl" json:"logout_url,omitempty"`
+	XXX_NoUnkeyedLiteral struct{} `json:"-"`
+	XXX_unrecognized     []byte   `json:"-"`
+	XXX_sizecache        int32    `json:"-"`
+}
+
+func (m *CreateLogoutURLResponse) Reset()         { *m = CreateLogoutURLResponse{} }
+func (m *CreateLogoutURLResponse) String() string { return proto.CompactTextString(m) }
+func (*CreateLogoutURLResponse) ProtoMessage()    {}
+func (*CreateLogoutURLResponse) Descriptor() ([]byte, []int) {
+	return fileDescriptor_user_service_faa685423dd20b0a, []int{4}
+}
+func (m *CreateLogoutURLResponse) XXX_Unmarshal(b []byte) error {
+	return xxx_messageInfo_CreateLogoutURLResponse.Unmarshal(m, b)
+}
+func (m *CreateLogoutURLResponse) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) {
+	return xxx_messageInfo_CreateLogoutURLResponse.Marshal(b, m, deterministic)
+}
+func (dst *CreateLogoutURLResponse) XXX_Merge(src proto.Message) {
+	xxx_messageInfo_CreateLogoutURLResponse.Merge(dst, src)
+}
+func (m *CreateLogoutURLResponse) XXX_Size() int {
+	return xxx_messageInfo_CreateLogoutURLResponse.Size(m)
+}
+func (m *CreateLogoutURLResponse) XXX_DiscardUnknown() {
+	xxx_messageInfo_CreateLogoutURLResponse.DiscardUnknown(m)
+}
+
+var xxx_messageInfo_CreateLogoutURLResponse proto.InternalMessageInfo
+
+func (m *CreateLogoutURLResponse) GetLogoutUrl() string {
+	if m != nil && m.LogoutUrl != nil {
+		return *m.LogoutUrl
+	}
+	return ""
+}
+
+type GetOAuthUserRequest struct {
+	Scope                *string  `protobuf:"bytes,1,opt,name=scope" json:"scope,omitempty"`
+	Scopes               []string `protobuf:"bytes,2,rep,name=scopes" json:"scopes,omitempty"`
+	XXX_NoUnkeyedLiteral struct{} `json:"-"`
+	XXX_unrecognized     []byte   `json:"-"`
+	XXX_sizecache        int32    `json:"-"`
+}
+
+func (m *GetOAuthUserRequest) Reset()         { *m = GetOAuthUserRequest{} }
+func (m *GetOAuthUserRequest) String() string { return proto.CompactTextString(m) }
+func (*GetOAuthUserRequest) ProtoMessage()    {}
+func (*GetOAuthUserRequest) Descriptor() ([]byte, []int) {
+	return fileDescriptor_user_service_faa685423dd20b0a, []int{5}
+}
+func (m *GetOAuthUserRequest) XXX_Unmarshal(b []byte) error {
+	return xxx_messageInfo_GetOAuthUserRequest.Unmarshal(m, b)
+}
+func (m *GetOAuthUserRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) {
+	return xxx_messageInfo_GetOAuthUserRequest.Marshal(b, m, deterministic)
+}
+func (dst *GetOAuthUserRequest) XXX_Merge(src proto.Message) {
+	xxx_messageInfo_GetOAuthUserRequest.Merge(dst, src)
+}
+func (m *GetOAuthUserRequest) XXX_Size() int {
+	return xxx_messageInfo_GetOAuthUserRequest.Size(m)
+}
+func (m *GetOAuthUserRequest) XXX_DiscardUnknown() {
+	xxx_messageInfo_GetOAuthUserRequest.DiscardUnknown(m)
+}
+
+var xxx_messageInfo_GetOAuthUserRequest proto.InternalMessageInfo
+
+func (m *GetOAuthUserRequest) GetScope() string {
+	if m != nil && m.Scope != nil {
+		return *m.Scope
+	}
+	return ""
+}
+
+func (m *GetOAuthUserRequest) GetScopes() []string {
+	if m != nil {
+		return m.Scopes
+	}
+	return nil
+}
+
+type GetOAuthUserResponse struct {
+	Email                *string  `protobuf:"bytes,1,req,name=email" json:"email,omitempty"`
+	UserId               *string  `protobuf:"bytes,2,req,name=user_id,json=userId" json:"user_id,omitempty"`
+	AuthDomain           *string  `protobuf:"bytes,3,req,name=auth_domain,json=authDomain" json:"auth_domain,omitempty"`
+	UserOrganization     *string  `protobuf:"bytes,4,opt,name=user_organization,json=userOrganization,def=" json:"user_organization,omitempty"`
+	IsAdmin              *bool    `protobuf:"varint,5,opt,name=is_admin,json=isAdmin,def=0" json:"is_admin,omitempty"`
+	ClientId             *string  `protobuf:"bytes,6,opt,name=client_id,json=clientId,def=" json:"client_id,omitempty"`
+	Scopes               []string `protobuf:"bytes,7,rep,name=scopes" json:"scopes,omitempty"`
+	XXX_NoUnkeyedLiteral struct{} `json:"-"`
+	XXX_unrecognized     []byte   `json:"-"`
+	XXX_sizecache        int32    `json:"-"`
+}
+
+func (m *GetOAuthUserResponse) Reset()         { *m = GetOAuthUserResponse{} }
+func (m *GetOAuthUserResponse) String() string { return proto.CompactTextString(m) }
+func (*GetOAuthUserResponse) ProtoMessage()    {}
+func (*GetOAuthUserResponse) Descriptor() ([]byte, []int) {
+	return fileDescriptor_user_service_faa685423dd20b0a, []int{6}
+}
+func (m *GetOAuthUserResponse) XXX_Unmarshal(b []byte) error {
+	return xxx_messageInfo_GetOAuthUserResponse.Unmarshal(m, b)
+}
+func (m *GetOAuthUserResponse) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) {
+	return xxx_messageInfo_GetOAuthUserResponse.Marshal(b, m, deterministic)
+}
+func (dst *GetOAuthUserResponse) XXX_Merge(src proto.Message) {
+	xxx_messageInfo_GetOAuthUserResponse.Merge(dst, src)
+}
+func (m *GetOAuthUserResponse) XXX_Size() int {
+	return xxx_messageInfo_GetOAuthUserResponse.Size(m)
+}
+func (m *GetOAuthUserResponse) XXX_DiscardUnknown() {
+	xxx_messageInfo_GetOAuthUserResponse.DiscardUnknown(m)
+}
+
+var xxx_messageInfo_GetOAuthUserResponse proto.InternalMessageInfo
+
+const Default_GetOAuthUserResponse_IsAdmin bool = false
+
+func (m *GetOAuthUserResponse) GetEmail() string {
+	if m != nil && m.Email != nil {
+		return *m.Email
+	}
+	return ""
+}
+
+func (m *GetOAuthUserResponse) GetUserId() string {
+	if m != nil && m.UserId != nil {
+		return *m.UserId
+	}
+	return ""
+}
+
+func (m *GetOAuthUserResponse) GetAuthDomain() string {
+	if m != nil && m.AuthDomain != nil {
+		return *m.AuthDomain
+	}
+	return ""
+}
+
+func (m *GetOAuthUserResponse) GetUserOrganization() string {
+	if m != nil && m.UserOrganization != nil {
+		return *m.UserOrganization
+	}
+	return ""
+}
+
+func (m *GetOAuthUserResponse) GetIsAdmin() bool {
+	if m != nil && m.IsAdmin != nil {
+		return *m.IsAdmin
+	}
+	return Default_GetOAuthUserResponse_IsAdmin
+}
+
+func (m *GetOAuthUserResponse) GetClientId() string {
+	if m != nil && m.ClientId != nil {
+		return *m.ClientId
+	}
+	return ""
+}
+
+func (m *GetOAuthUserResponse) GetScopes() []string {
+	if m != nil {
+		return m.Scopes
+	}
+	return nil
+}
+
+type CheckOAuthSignatureRequest struct {
+	XXX_NoUnkeyedLiteral struct{} `json:"-"`
+	XXX_unrecognized     []byte   `json:"-"`
+	XXX_sizecache        int32    `json:"-"`
+}
+
+func (m *CheckOAuthSignatureRequest) Reset()         { *m = CheckOAuthSignatureRequest{} }
+func (m *CheckOAuthSignatureRequest) String() string { return proto.CompactTextString(m) }
+func (*CheckOAuthSignatureRequest) ProtoMessage()    {}
+func (*CheckOAuthSignatureRequest) Descriptor() ([]byte, []int) {
+	return fileDescriptor_user_service_faa685423dd20b0a, []int{7}
+}
+func (m *CheckOAuthSignatureRequest) XXX_Unmarshal(b []byte) error {
+	return xxx_messageInfo_CheckOAuthSignatureRequest.Unmarshal(m, b)
+}
+func (m *CheckOAuthSignatureRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) {
+	return xxx_messageInfo_CheckOAuthSignatureRequest.Marshal(b, m, deterministic)
+}
+func (dst *CheckOAuthSignatureRequest) XXX_Merge(src proto.Message) {
+	xxx_messageInfo_CheckOAuthSignatureRequest.Merge(dst, src)
+}
+func (m *CheckOAuthSignatureRequest) XXX_Size() int {
+	return xxx_messageInfo_CheckOAuthSignatureRequest.Size(m)
+}
+func (m *CheckOAuthSignatureRequest) XXX_DiscardUnknown() {
+	xxx_messageInfo_CheckOAuthSignatureRequest.DiscardUnknown(m)
+}
+
+var xxx_messageInfo_CheckOAuthSignatureRequest proto.InternalMessageInfo
+
+type CheckOAuthSignatureResponse struct {
+	OauthConsumerKey     *string  `protobuf:"bytes,1,req,name=oauth_consumer_key,json=oauthConsumerKey" json:"oauth_consumer_key,omitempty"`
+	XXX_NoUnkeyedLiteral struct{} `json:"-"`
+	XXX_unrecognized     []byte   `json:"-"`
+	XXX_sizecache        int32    `json:"-"`
+}
+
+func (m *CheckOAuthSignatureResponse) Reset()         { *m = CheckOAuthSignatureResponse{} }
+func (m *CheckOAuthSignatureResponse) String() string { return proto.CompactTextString(m) }
+func (*CheckOAuthSignatureResponse) ProtoMessage()    {}
+func (*CheckOAuthSignatureResponse) Descriptor() ([]byte, []int) {
+	return fileDescriptor_user_service_faa685423dd20b0a, []int{8}
+}
+func (m *CheckOAuthSignatureResponse) XXX_Unmarshal(b []byte) error {
+	return xxx_messageInfo_CheckOAuthSignatureResponse.Unmarshal(m, b)
+}
+func (m *CheckOAuthSignatureResponse) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) {
+	return xxx_messageInfo_CheckOAuthSignatureResponse.Marshal(b, m, deterministic)
+}
+func (dst *CheckOAuthSignatureResponse) XXX_Merge(src proto.Message) {
+	xxx_messageInfo_CheckOAuthSignatureResponse.Merge(dst, src)
+}
+func (m *CheckOAuthSignatureResponse) XXX_Size() int {
+	return xxx_messageInfo_CheckOAuthSignatureResponse.Size(m)
+}
+func (m *CheckOAuthSignatureResponse) XXX_DiscardUnknown() {
+	xxx_messageInfo_CheckOAuthSignatureResponse.DiscardUnknown(m)
+}
+
+var xxx_messageInfo_CheckOAuthSignatureResponse proto.InternalMessageInfo
+
+func (m *CheckOAuthSignatureResponse) GetOauthConsumerKey() string {
+	if m != nil && m.OauthConsumerKey != nil {
+		return *m.OauthConsumerKey
+	}
+	return ""
+}
+
+func init() {
+	proto.RegisterType((*UserServiceError)(nil), "appengine.UserServiceError")
+	proto.RegisterType((*CreateLoginURLRequest)(nil), "appengine.CreateLoginURLRequest")
+	proto.RegisterType((*CreateLoginURLResponse)(nil), "appengine.CreateLoginURLResponse")
+	proto.RegisterType((*CreateLogoutURLRequest)(nil), "appengine.CreateLogoutURLRequest")
+	proto.RegisterType((*CreateLogoutURLResponse)(nil), "appengine.CreateLogoutURLResponse")
+	proto.RegisterType((*GetOAuthUserRequest)(nil), "appengine.GetOAuthUserRequest")
+	proto.RegisterType((*GetOAuthUserResponse)(nil), "appengine.GetOAuthUserResponse")
+	proto.RegisterType((*CheckOAuthSignatureRequest)(nil), "appengine.CheckOAuthSignatureRequest")
+	proto.RegisterType((*CheckOAuthSignatureResponse)(nil), "appengine.CheckOAuthSignatureResponse")
+}
+
+func init() {
+	proto.RegisterFile("google.golang.org/appengine/v2/internal/user/user_service.proto", fileDescriptor_user_service_faa685423dd20b0a)
+}
+
+var fileDescriptor_user_service_faa685423dd20b0a = []byte{
+	// 573 bytes of a gzipped FileDescriptorProto
+	0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0xb4, 0x52, 0x4d, 0x6f, 0xdb, 0x38,
+	0x10, 0x8d, 0xec, 0xd8, 0xb1, 0x26, 0xc0, 0x46, 0x61, 0xbe, 0xb4, 0x9b, 0x0d, 0xd6, 0xd0, 0x65,
+	0x7d, 0x68, 0xe3, 0x53, 0x81, 0x22, 0xe8, 0xc5, 0xb5, 0x85, 0xd4, 0xb0, 0x60, 0xa1, 0x8c, 0xd5,
+	0x02, 0xbd, 0x08, 0xac, 0x35, 0x51, 0x88, 0xc8, 0xa4, 0x4b, 0x52, 0x05, 0xd2, 0x73, 0x7f, 0x41,
+	0x6f, 0xfd, 0x93, 0xfd, 0x0d, 0x85, 0x68, 0x25, 0x50, 0xd2, 0x5e, 0x7b, 0x11, 0x34, 0xef, 0x0d,
+	0xdf, 0xbc, 0x37, 0x24, 0xbc, 0xca, 0xa5, 0xcc, 0x0b, 0x3c, 0xcf, 0x65, 0xc1, 0x44, 0x7e, 0x2e,
+	0x55, 0x3e, 0x64, 0xeb, 0x35, 0x8a, 0x9c, 0x0b, 0x1c, 0x72, 0x61, 0x50, 0x09, 0x56, 0x0c, 0x4b,
+	0x8d, 0xca, 0x7e, 0x52, 0x8d, 0xea, 0x33, 0x5f, 0xe2, 0xf9, 0x5a, 0x49, 0x23, 0x89, 0xfb, 0xd0,
+	0x1b, 0x7c, 0x77, 0xc0, 0x4b, 0x34, 0xaa, 0xab, 0x4d, 0x43, 0xa8, 0x94, 0x54, 0xc1, 0x57, 0x07,
+	0x5c, 0xfb, 0x37, 0x96, 0x19, 0x92, 0x2e, 0xb4, 0xe2, 0x99, 0xb7, 0x45, 0xfe, 0x86, 0x23, 0x1a,
+	0x4e, 0xa6, 0x34, 0x1c, 0x2f, 0xd2, 0x84, 0x46, 0xe9, 0x22, 0x8e, 0xd3, 0x28, 0x9e, 0x5f, 0x7a,
+	0x0e, 0xd9, 0x83, 0xdd, 0x79, 0xbc, 0x48, 0x47, 0x51, 0x14, 0xbf, 0x0f, 0x27, 0x5e, 0x8b, 0x9c,
+	0xc0, 0x41, 0x3c, 0x4a, 0x16, 0x6f, 0xd2, 0xe9, 0xfc, 0xdd, 0x28, 0x9a, 0x4e, 0xd2, 0x45, 0x3c,
+	0x0b, 0xe7, 0x5e, 0xbb, 0x12, 0x79, 0x4c, 0xd0, 0xf0, 0x6d, 0x12, 0x5e, 0x2d, 0xbc, 0xed, 0x4a,
+	0x64, 0x43, 0x85, 0x94, 0xc6, 0xd4, 0xeb, 0x04, 0xdf, 0x1c, 0x38, 0x1a, 0x2b, 0x64, 0x06, 0x23,
+	0x99, 0x73, 0x91, 0xd0, 0x88, 0xe2, 0xa7, 0x12, 0xb5, 0x21, 0xff, 0xc3, 0x5e, 0x86, 0xda, 0x70,
+	0xc1, 0x0c, 0x97, 0x22, 0x2d, 0x55, 0xe1, 0x3b, 0xfd, 0xd6, 0xc0, 0xa5, 0x7f, 0x35, 0xe0, 0x44,
+	0x15, 0xe4, 0x3f, 0xd8, 0x65, 0xa5, 0xb9, 0x49, 0x33, 0xb9, 0x62, 0x5c, 0xf8, 0xad, 0xbe, 0x33,
+	0x70, 0x29, 0x54, 0xd0, 0xc4, 0x22, 0x64, 0x08, 0xe4, 0x1a, 0x33, 0x54, 0xcc, 0x60, 0x96, 0xf2,
+	0x0c, 0x85, 0xe1, 0xe6, 0xce, 0x6f, 0x57, 0x7d, 0x17, 0x5b, 0x74, 0xff, 0x81, 0x9b, 0xd6, 0x54,
+	0xf0, 0x02, 0x8e, 0x9f, 0x7a, 0xd2, 0x6b, 0x29, 0x34, 0x92, 0x53, 0x70, 0x8b, 0x0a, 0x6b, 0xd8,
+	0xe9, 0x59, 0x20, 0x51, 0x45, 0xf0, 0xb1, 0x71, 0x4c, 0x96, 0xe6, 0x4f, 0x64, 0x09, 0x5e, 0xc2,
+	0xc9, 0x2f, 0x33, 0x6a, 0x6f, 0x67, 0x00, 0x85, 0x05, 0x1b, 0xfa, 0xee, 0x06, 0xa9, 0xdc, 0x8d,
+	0xe1, 0xe0, 0x12, 0x4d, 0x3c, 0x2a, 0xcd, 0x4d, 0xf5, 0x18, 0xee, 0xad, 0x1d, 0x42, 0x47, 0x2f,
+	0xe5, 0x1a, 0x7d, 0xc7, 0xce, 0xda, 0x14, 0xe4, 0x18, 0xba, 0xf6, 0x47, 0xfb, 0xad, 0x7e, 0x7b,
+	0xe0, 0xd2, 0xba, 0x0a, 0x7e, 0x38, 0x70, 0xf8, 0x58, 0xa5, 0x1e, 0x7e, 0x08, 0x1d, 0x5c, 0x31,
+	0x7e, 0x3f, 0x77, 0x53, 0x90, 0x13, 0xd8, 0xb1, 0x4f, 0x93, 0x67, 0x7e, 0xcb, 0xe2, 0xdd, 0xaa,
+	0x9c, 0x66, 0x4f, 0x73, 0xb6, 0x2d, 0xd9, 0xbc, 0xb3, 0xe7, 0xb0, 0x6f, 0x4f, 0x4a, 0x95, 0x33,
+	0xc1, 0xbf, 0xd8, 0x05, 0xf9, 0xdb, 0xf5, 0x95, 0x79, 0x15, 0x15, 0x37, 0x18, 0xd2, 0x87, 0x1e,
+	0xd7, 0x29, 0xcb, 0x56, 0x5c, 0xf8, 0x9d, 0xbe, 0x33, 0xe8, 0x5d, 0x74, 0xae, 0x59, 0xa1, 0x91,
+	0xee, 0x70, 0x3d, 0xaa, 0x50, 0x72, 0x06, 0xee, 0xb2, 0xe0, 0x28, 0x4c, 0x65, 0xa6, 0x5b, 0x0b,
+	0xf5, 0x36, 0xd0, 0x34, 0x6b, 0x04, 0xde, 0x79, 0x14, 0xf8, 0x5f, 0xf8, 0x67, 0x7c, 0x83, 0xcb,
+	0x5b, 0x9b, 0xf8, 0x8a, 0xe7, 0x82, 0x99, 0x52, 0x61, 0xbd, 0xbc, 0x60, 0x06, 0xa7, 0xbf, 0x65,
+	0xeb, 0xa5, 0x3c, 0x03, 0x22, 0x6d, 0xcc, 0xa5, 0x14, 0xba, 0x5c, 0xa1, 0x4a, 0x6f, 0xf1, 0xae,
+	0xde, 0x90, 0x67, 0x99, 0x71, 0x4d, 0xcc, 0xf0, 0xee, 0x75, 0xf7, 0xc3, 0x76, 0x95, 0xeb, 0x67,
+	0x00, 0x00, 0x00, 0xff, 0xff, 0x58, 0x04, 0x53, 0xcc, 0xf8, 0x03, 0x00, 0x00,
+}
diff --git a/v2/internal/user/user_service.proto b/v2/internal/user/user_service.proto
new file mode 100644
index 0000000..f3e9693
--- /dev/null
+++ b/v2/internal/user/user_service.proto
@@ -0,0 +1,58 @@
+syntax = "proto2";
+option go_package = "user";
+
+package appengine;
+
+message UserServiceError {
+  enum ErrorCode {
+    OK = 0;
+    REDIRECT_URL_TOO_LONG = 1;
+    NOT_ALLOWED = 2;
+    OAUTH_INVALID_TOKEN = 3;
+    OAUTH_INVALID_REQUEST = 4;
+    OAUTH_ERROR = 5;
+  }
+}
+
+message CreateLoginURLRequest {
+  required string destination_url = 1;
+  optional string auth_domain = 2;
+  optional string federated_identity = 3 [default = ""];
+}
+
+message CreateLoginURLResponse {
+  required string login_url = 1;
+}
+
+message CreateLogoutURLRequest {
+  required string destination_url = 1;
+  optional string auth_domain = 2;
+}
+
+message CreateLogoutURLResponse {
+  required string logout_url = 1;
+}
+
+message GetOAuthUserRequest {
+  optional string scope = 1;
+
+  repeated string scopes = 2;
+}
+
+message GetOAuthUserResponse {
+  required string email = 1;
+  required string user_id = 2;
+  required string auth_domain = 3;
+  optional string user_organization = 4 [default = ""];
+  optional bool is_admin = 5 [default = false];
+  optional string client_id = 6 [default = ""];
+
+  repeated string scopes = 7;
+}
+
+message CheckOAuthSignatureRequest {
+}
+
+message CheckOAuthSignatureResponse {
+  required string oauth_consumer_key = 1;
+}
diff --git a/v2/log/api.go b/v2/log/api.go
new file mode 100644
index 0000000..cecd712
--- /dev/null
+++ b/v2/log/api.go
@@ -0,0 +1,40 @@
+// Copyright 2015 Google Inc. All rights reserved.
+// Use of this source code is governed by the Apache 2.0
+// license that can be found in the LICENSE file.
+
+package log
+
+// This file implements the logging API.
+
+import (
+	"context"
+
+	"google.golang.org/appengine/v2/internal"
+)
+
+// Debugf formats its arguments according to the format, analogous to fmt.Printf,
+// and records the text as a log message at Debug level. The message will be associated
+// with the request linked with the provided context.
+func Debugf(ctx context.Context, format string, args ...interface{}) {
+	internal.Logf(ctx, 0, format, args...)
+}
+
+// Infof is like Debugf, but at Info level.
+func Infof(ctx context.Context, format string, args ...interface{}) {
+	internal.Logf(ctx, 1, format, args...)
+}
+
+// Warningf is like Debugf, but at Warning level.
+func Warningf(ctx context.Context, format string, args ...interface{}) {
+	internal.Logf(ctx, 2, format, args...)
+}
+
+// Errorf is like Debugf, but at Error level.
+func Errorf(ctx context.Context, format string, args ...interface{}) {
+	internal.Logf(ctx, 3, format, args...)
+}
+
+// Criticalf is like Debugf, but at Critical level.
+func Criticalf(ctx context.Context, format string, args ...interface{}) {
+	internal.Logf(ctx, 4, format, args...)
+}
diff --git a/v2/mail/mail.go b/v2/mail/mail.go
new file mode 100644
index 0000000..7b979f4
--- /dev/null
+++ b/v2/mail/mail.go
@@ -0,0 +1,124 @@
+// Copyright 2011 Google Inc. All rights reserved.
+// Use of this source code is governed by the Apache 2.0
+// license that can be found in the LICENSE file.
+
+/*
+Package mail provides the means of sending email from an
+App Engine application.
+
+Example:
+
+	msg := &mail.Message{
+		Sender:  "romeo@montague.com",
+		To:      []string{"Juliet <juliet@capulet.org>"},
+		Subject: "See you tonight",
+		Body:    "Don't forget our plans. Hark, 'til later.",
+	}
+	if err := mail.Send(c, msg); err != nil {
+		log.Errorf(c, "Alas, my user, the email failed to sendeth: %v", err)
+	}
+*/
+package mail // import "google.golang.org/appengine/v2/mail"
+
+import (
+	"context"
+	"net/mail"
+
+	"github.com/golang/protobuf/proto"
+
+	"google.golang.org/appengine/v2/internal"
+	bpb "google.golang.org/appengine/v2/internal/base"
+	pb "google.golang.org/appengine/v2/internal/mail"
+)
+
+// A Message represents an email message.
+// Addresses may be of any form permitted by RFC 822.
+type Message struct {
+	// Sender must be set, and must be either an application admin
+	// or the currently signed-in user.
+	Sender  string
+	ReplyTo string // may be empty
+
+	// At least one of these slices must have a non-zero length,
+	// except when calling SendToAdmins.
+	To, Cc, Bcc []string
+
+	Subject string
+
+	// At least one of Body or HTMLBody must be non-empty.
+	Body     string
+	HTMLBody string
+
+	Attachments []Attachment
+
+	// Extra mail headers.
+	// See https://cloud.google.com/appengine/docs/standard/go/mail/
+	// for permissible headers.
+	Headers mail.Header
+}
+
+// An Attachment represents an email attachment.
+type Attachment struct {
+	// Name must be set to a valid file name.
+	Name      string
+	Data      []byte
+	ContentID string
+}
+
+// Send sends an email message.
+func Send(c context.Context, msg *Message) error {
+	return send(c, "Send", msg)
+}
+
+// SendToAdmins sends an email message to the application's administrators.
+func SendToAdmins(c context.Context, msg *Message) error {
+	return send(c, "SendToAdmins", msg)
+}
+
+func send(c context.Context, method string, msg *Message) error {
+	req := &pb.MailMessage{
+		Sender:  &msg.Sender,
+		To:      msg.To,
+		Cc:      msg.Cc,
+		Bcc:     msg.Bcc,
+		Subject: &msg.Subject,
+	}
+	if msg.ReplyTo != "" {
+		req.ReplyTo = &msg.ReplyTo
+	}
+	if msg.Body != "" {
+		req.TextBody = &msg.Body
+	}
+	if msg.HTMLBody != "" {
+		req.HtmlBody = &msg.HTMLBody
+	}
+	if len(msg.Attachments) > 0 {
+		req.Attachment = make([]*pb.MailAttachment, len(msg.Attachments))
+		for i, att := range msg.Attachments {
+			req.Attachment[i] = &pb.MailAttachment{
+				FileName: proto.String(att.Name),
+				Data:     att.Data,
+			}
+			if att.ContentID != "" {
+				req.Attachment[i].ContentID = proto.String(att.ContentID)
+			}
+		}
+	}
+	for key, vs := range msg.Headers {
+		for _, v := range vs {
+			req.Header = append(req.Header, &pb.MailHeader{
+				Name:  proto.String(key),
+				Value: proto.String(v),
+			})
+		}
+	}
+	res := &bpb.VoidProto{}
+	if err := internal.Call(c, "mail", method, req, res); err != nil {
+		return err
+	}
+	return nil
+}
+
+func init() {
+	internal.RegisterErrorCodeMap("mail", pb.MailServiceError_ErrorCode_name)
+}
diff --git a/v2/mail/mail_test.go b/v2/mail/mail_test.go
new file mode 100644
index 0000000..1f1a4f5
--- /dev/null
+++ b/v2/mail/mail_test.go
@@ -0,0 +1,65 @@
+// Copyright 2011 Google Inc. All rights reserved.
+// Use of this source code is governed by the Apache 2.0
+// license that can be found in the LICENSE file.
+
+package mail
+
+import (
+	"testing"
+
+	"github.com/golang/protobuf/proto"
+
+	"google.golang.org/appengine/v2/internal/aetesting"
+	basepb "google.golang.org/appengine/v2/internal/base"
+	pb "google.golang.org/appengine/v2/internal/mail"
+)
+
+func TestMessageConstruction(t *testing.T) {
+	var got *pb.MailMessage
+	c := aetesting.FakeSingleContext(t, "mail", "Send", func(in *pb.MailMessage, out *basepb.VoidProto) error {
+		got = in
+		return nil
+	})
+
+	msg := &Message{
+		Sender: "dsymonds@example.com",
+		To:     []string{"nigeltao@example.com"},
+		Body:   "Hey, lunch time?",
+		Attachments: []Attachment{
+			// Regression test for a prod bug. The address of a range variable was used when
+			// constructing the outgoing proto, so multiple attachments used the same name.
+			{
+				Name:      "att1.txt",
+				Data:      []byte("data1"),
+				ContentID: "<att1>",
+			},
+			{
+				Name: "att2.txt",
+				Data: []byte("data2"),
+			},
+		},
+	}
+	if err := Send(c, msg); err != nil {
+		t.Fatalf("Send: %v", err)
+	}
+	want := &pb.MailMessage{
+		Sender:   proto.String("dsymonds@example.com"),
+		To:       []string{"nigeltao@example.com"},
+		Subject:  proto.String(""),
+		TextBody: proto.String("Hey, lunch time?"),
+		Attachment: []*pb.MailAttachment{
+			{
+				FileName:  proto.String("att1.txt"),
+				Data:      []byte("data1"),
+				ContentID: proto.String("<att1>"),
+			},
+			{
+				FileName: proto.String("att2.txt"),
+				Data:     []byte("data2"),
+			},
+		},
+	}
+	if !proto.Equal(got, want) {
+		t.Errorf("Bad proto for %+v\n got %v\nwant %v", msg, got, want)
+	}
+}
diff --git a/v2/memcache/memcache.go b/v2/memcache/memcache.go
new file mode 100644
index 0000000..577f606
--- /dev/null
+++ b/v2/memcache/memcache.go
@@ -0,0 +1,526 @@
+// Copyright 2011 Google Inc. All rights reserved.
+// Use of this source code is governed by the Apache 2.0
+// license that can be found in the LICENSE file.
+
+// Package memcache provides a client for App Engine's distributed in-memory
+// key-value store for small chunks of arbitrary data.
+//
+// The fundamental operations get and set items, keyed by a string.
+//
+//	item0, err := memcache.Get(c, "key")
+//	if err != nil && err != memcache.ErrCacheMiss {
+//		return err
+//	}
+//	if err == nil {
+//		fmt.Fprintf(w, "memcache hit: Key=%q Val=[% x]\n", item0.Key, item0.Value)
+//	} else {
+//		fmt.Fprintf(w, "memcache miss\n")
+//	}
+//
+// and
+//
+//	item1 := &memcache.Item{
+//		Key:   "foo",
+//		Value: []byte("bar"),
+//	}
+//	if err := memcache.Set(c, item1); err != nil {
+//		return err
+//	}
+package memcache // import "google.golang.org/appengine/v2/memcache"
+
+import (
+	"bytes"
+	"context"
+	"encoding/gob"
+	"encoding/json"
+	"errors"
+	"time"
+
+	"github.com/golang/protobuf/proto"
+
+	"google.golang.org/appengine/v2"
+	"google.golang.org/appengine/v2/internal"
+	pb "google.golang.org/appengine/v2/internal/memcache"
+)
+
+var (
+	// ErrCacheMiss means that an operation failed
+	// because the item wasn't present.
+	ErrCacheMiss = errors.New("memcache: cache miss")
+	// ErrCASConflict means that a CompareAndSwap call failed due to the
+	// cached value being modified between the Get and the CompareAndSwap.
+	// If the cached value was simply evicted rather than replaced,
+	// ErrNotStored will be returned instead.
+	ErrCASConflict = errors.New("memcache: compare-and-swap conflict")
+	// ErrNoStats means that no statistics were available.
+	ErrNoStats = errors.New("memcache: no statistics available")
+	// ErrNotStored means that a conditional write operation (i.e. Add or
+	// CompareAndSwap) failed because the condition was not satisfied.
+	ErrNotStored = errors.New("memcache: item not stored")
+	// ErrServerError means that a server error occurred.
+	ErrServerError = errors.New("memcache: server error")
+)
+
+// Item is the unit of memcache gets and sets.
+type Item struct {
+	// Key is the Item's key (250 bytes maximum).
+	Key string
+	// Value is the Item's value.
+	Value []byte
+	// Object is the Item's value for use with a Codec.
+	Object interface{}
+	// Flags are server-opaque flags whose semantics are entirely up to the
+	// App Engine app.
+	Flags uint32
+	// Expiration is the maximum duration that the item will stay
+	// in the cache.
+	// The zero value means the Item has no expiration time.
+	// Subsecond precision is ignored.
+	// This is not set when getting items.
+	Expiration time.Duration
+	// casID is a client-opaque value used for compare-and-swap operations.
+	// Zero means that compare-and-swap is not used.
+	casID uint64
+}
+
+const (
+	secondsIn30Years = 60 * 60 * 24 * 365 * 30 // from memcache server code
+	thirtyYears      = time.Duration(secondsIn30Years) * time.Second
+)
+
+// protoToItem converts a protocol buffer item to a Go struct.
+func protoToItem(p *pb.MemcacheGetResponse_Item) *Item {
+	return &Item{
+		Key:   string(p.Key),
+		Value: p.Value,
+		Flags: p.GetFlags(),
+		casID: p.GetCasId(),
+	}
+}
+
+// If err is an appengine.MultiError, return its first element. Otherwise, return err.
+func singleError(err error) error {
+	if me, ok := err.(appengine.MultiError); ok {
+		return me[0]
+	}
+	return err
+}
+
+// Get gets the item for the given key. ErrCacheMiss is returned for a memcache
+// cache miss. The key must be at most 250 bytes in length.
+func Get(c context.Context, key string) (*Item, error) {
+	m, err := GetMulti(c, []string{key})
+	if err != nil {
+		return nil, err
+	}
+	if _, ok := m[key]; !ok {
+		return nil, ErrCacheMiss
+	}
+	return m[key], nil
+}
+
+// GetMulti is a batch version of Get. The returned map from keys to items may
+// have fewer elements than the input slice, due to memcache cache misses.
+// Each key must be at most 250 bytes in length.
+func GetMulti(c context.Context, key []string) (map[string]*Item, error) {
+	if len(key) == 0 {
+		return nil, nil
+	}
+	keyAsBytes := make([][]byte, len(key))
+	for i, k := range key {
+		keyAsBytes[i] = []byte(k)
+	}
+	req := &pb.MemcacheGetRequest{
+		Key:    keyAsBytes,
+		ForCas: proto.Bool(true),
+	}
+	res := &pb.MemcacheGetResponse{}
+	if err := internal.Call(c, "memcache", "Get", req, res); err != nil {
+		return nil, err
+	}
+	m := make(map[string]*Item, len(res.Item))
+	for _, p := range res.Item {
+		t := protoToItem(p)
+		m[t.Key] = t
+	}
+	return m, nil
+}
+
+// Delete deletes the item for the given key.
+// ErrCacheMiss is returned if the specified item can not be found.
+// The key must be at most 250 bytes in length.
+func Delete(c context.Context, key string) error {
+	return singleError(DeleteMulti(c, []string{key}))
+}
+
+// DeleteMulti is a batch version of Delete.
+// If any keys cannot be found, an appengine.MultiError is returned.
+// Each key must be at most 250 bytes in length.
+func DeleteMulti(c context.Context, key []string) error {
+	if len(key) == 0 {
+		return nil
+	}
+	req := &pb.MemcacheDeleteRequest{
+		Item: make([]*pb.MemcacheDeleteRequest_Item, len(key)),
+	}
+	for i, k := range key {
+		req.Item[i] = &pb.MemcacheDeleteRequest_Item{Key: []byte(k)}
+	}
+	res := &pb.MemcacheDeleteResponse{}
+	if err := internal.Call(c, "memcache", "Delete", req, res); err != nil {
+		return err
+	}
+	if len(res.DeleteStatus) != len(key) {
+		return ErrServerError
+	}
+	me, any := make(appengine.MultiError, len(key)), false
+	for i, s := range res.DeleteStatus {
+		switch s {
+		case pb.MemcacheDeleteResponse_DELETED:
+			// OK
+		case pb.MemcacheDeleteResponse_NOT_FOUND:
+			me[i] = ErrCacheMiss
+			any = true
+		default:
+			me[i] = ErrServerError
+			any = true
+		}
+	}
+	if any {
+		return me
+	}
+	return nil
+}
+
+// Increment atomically increments the decimal value in the given key
+// by delta and returns the new value. The value must fit in a uint64.
+// Overflow wraps around, and underflow is capped to zero. The
+// provided delta may be negative. If the key doesn't exist in
+// memcache, the provided initial value is used to atomically
+// populate it before the delta is applied.
+// The key must be at most 250 bytes in length.
+func Increment(c context.Context, key string, delta int64, initialValue uint64) (newValue uint64, err error) {
+	return incr(c, key, delta, &initialValue)
+}
+
+// IncrementExisting works like Increment but assumes that the key
+// already exists in memcache and doesn't take an initial value.
+// IncrementExisting can save work if calculating the initial value is
+// expensive.
+// An error is returned if the specified item can not be found.
+func IncrementExisting(c context.Context, key string, delta int64) (newValue uint64, err error) {
+	return incr(c, key, delta, nil)
+}
+
+func incr(c context.Context, key string, delta int64, initialValue *uint64) (newValue uint64, err error) {
+	req := &pb.MemcacheIncrementRequest{
+		Key:          []byte(key),
+		InitialValue: initialValue,
+	}
+	if delta >= 0 {
+		req.Delta = proto.Uint64(uint64(delta))
+	} else {
+		req.Delta = proto.Uint64(uint64(-delta))
+		req.Direction = pb.MemcacheIncrementRequest_DECREMENT.Enum()
+	}
+	res := &pb.MemcacheIncrementResponse{}
+	err = internal.Call(c, "memcache", "Increment", req, res)
+	if err != nil {
+		return
+	}
+	if res.NewValue == nil {
+		return 0, ErrCacheMiss
+	}
+	return *res.NewValue, nil
+}
+
+// set sets the given items using the given conflict resolution policy.
+// appengine.MultiError may be returned.
+func set(c context.Context, item []*Item, value [][]byte, policy pb.MemcacheSetRequest_SetPolicy) error {
+	if len(item) == 0 {
+		return nil
+	}
+	req := &pb.MemcacheSetRequest{
+		Item: make([]*pb.MemcacheSetRequest_Item, len(item)),
+	}
+	for i, t := range item {
+		p := &pb.MemcacheSetRequest_Item{
+			Key: []byte(t.Key),
+		}
+		if value == nil {
+			p.Value = t.Value
+		} else {
+			p.Value = value[i]
+		}
+		if t.Flags != 0 {
+			p.Flags = proto.Uint32(t.Flags)
+		}
+		if t.Expiration != 0 {
+			// In the .proto file, MemcacheSetRequest_Item uses a fixed32 (i.e. unsigned)
+			// for expiration time, while MemcacheGetRequest_Item uses int32 (i.e. signed).
+			// Throughout this .go file, we use int32.
+			// Also, in the proto, the expiration value is either a duration (in seconds)
+			// or an absolute Unix timestamp (in seconds), depending on whether the
+			// value is less than or greater than or equal to 30 years, respectively.
+			if t.Expiration < time.Second {
+				// Because an Expiration of 0 means no expiration, we take
+				// care here to translate an item with an expiration
+				// Duration between 0-1 seconds as immediately expiring
+				// (saying it expired a few seconds ago), rather than
+				// rounding it down to 0 and making it live forever.
+				p.ExpirationTime = proto.Uint32(uint32(time.Now().Unix()) - 5)
+			} else if t.Expiration >= thirtyYears {
+				p.ExpirationTime = proto.Uint32(uint32(time.Now().Unix()) + uint32(t.Expiration/time.Second))
+			} else {
+				p.ExpirationTime = proto.Uint32(uint32(t.Expiration / time.Second))
+			}
+		}
+		if t.casID != 0 {
+			p.CasId = proto.Uint64(t.casID)
+			p.ForCas = proto.Bool(true)
+		}
+		p.SetPolicy = policy.Enum()
+		req.Item[i] = p
+	}
+	res := &pb.MemcacheSetResponse{}
+	if err := internal.Call(c, "memcache", "Set", req, res); err != nil {
+		return err
+	}
+	if len(res.SetStatus) != len(item) {
+		return ErrServerError
+	}
+	me, any := make(appengine.MultiError, len(item)), false
+	for i, st := range res.SetStatus {
+		var err error
+		switch st {
+		case pb.MemcacheSetResponse_STORED:
+			// OK
+		case pb.MemcacheSetResponse_NOT_STORED:
+			err = ErrNotStored
+		case pb.MemcacheSetResponse_EXISTS:
+			err = ErrCASConflict
+		default:
+			err = ErrServerError
+		}
+		if err != nil {
+			me[i] = err
+			any = true
+		}
+	}
+	if any {
+		return me
+	}
+	return nil
+}
+
+// Set writes the given item, unconditionally.
+func Set(c context.Context, item *Item) error {
+	return singleError(set(c, []*Item{item}, nil, pb.MemcacheSetRequest_SET))
+}
+
+// SetMulti is a batch version of Set.
+// appengine.MultiError may be returned.
+func SetMulti(c context.Context, item []*Item) error {
+	return set(c, item, nil, pb.MemcacheSetRequest_SET)
+}
+
+// Add writes the given item, if no value already exists for its key.
+// ErrNotStored is returned if that condition is not met.
+func Add(c context.Context, item *Item) error {
+	return singleError(set(c, []*Item{item}, nil, pb.MemcacheSetRequest_ADD))
+}
+
+// AddMulti is a batch version of Add.
+// appengine.MultiError may be returned.
+func AddMulti(c context.Context, item []*Item) error {
+	return set(c, item, nil, pb.MemcacheSetRequest_ADD)
+}
+
+// CompareAndSwap writes the given item that was previously returned by Get,
+// if the value was neither modified or evicted between the Get and the
+// CompareAndSwap calls. The item's Key should not change between calls but
+// all other item fields may differ.
+// ErrCASConflict is returned if the value was modified in between the calls.
+// ErrNotStored is returned if the value was evicted in between the calls.
+func CompareAndSwap(c context.Context, item *Item) error {
+	return singleError(set(c, []*Item{item}, nil, pb.MemcacheSetRequest_CAS))
+}
+
+// CompareAndSwapMulti is a batch version of CompareAndSwap.
+// appengine.MultiError may be returned.
+func CompareAndSwapMulti(c context.Context, item []*Item) error {
+	return set(c, item, nil, pb.MemcacheSetRequest_CAS)
+}
+
+// Codec represents a symmetric pair of functions that implement a codec.
+// Items stored into or retrieved from memcache using a Codec have their
+// values marshaled or unmarshaled.
+//
+// All the methods provided for Codec behave analogously to the package level
+// function with same name.
+type Codec struct {
+	Marshal   func(interface{}) ([]byte, error)
+	Unmarshal func([]byte, interface{}) error
+}
+
+// Get gets the item for the given key and decodes the obtained value into v.
+// ErrCacheMiss is returned for a memcache cache miss.
+// The key must be at most 250 bytes in length.
+func (cd Codec) Get(c context.Context, key string, v interface{}) (*Item, error) {
+	i, err := Get(c, key)
+	if err != nil {
+		return nil, err
+	}
+	if err := cd.Unmarshal(i.Value, v); err != nil {
+		return nil, err
+	}
+	return i, nil
+}
+
+func (cd Codec) set(c context.Context, items []*Item, policy pb.MemcacheSetRequest_SetPolicy) error {
+	var vs [][]byte
+	var me appengine.MultiError
+	for i, item := range items {
+		v, err := cd.Marshal(item.Object)
+		if err != nil {
+			if me == nil {
+				me = make(appengine.MultiError, len(items))
+			}
+			me[i] = err
+			continue
+		}
+		if me == nil {
+			vs = append(vs, v)
+		}
+	}
+	if me != nil {
+		return me
+	}
+
+	return set(c, items, vs, policy)
+}
+
+// Set writes the given item, unconditionally.
+func (cd Codec) Set(c context.Context, item *Item) error {
+	return singleError(cd.set(c, []*Item{item}, pb.MemcacheSetRequest_SET))
+}
+
+// SetMulti is a batch version of Set.
+// appengine.MultiError may be returned.
+func (cd Codec) SetMulti(c context.Context, items []*Item) error {
+	return cd.set(c, items, pb.MemcacheSetRequest_SET)
+}
+
+// Add writes the given item, if no value already exists for its key.
+// ErrNotStored is returned if that condition is not met.
+func (cd Codec) Add(c context.Context, item *Item) error {
+	return singleError(cd.set(c, []*Item{item}, pb.MemcacheSetRequest_ADD))
+}
+
+// AddMulti is a batch version of Add.
+// appengine.MultiError may be returned.
+func (cd Codec) AddMulti(c context.Context, items []*Item) error {
+	return cd.set(c, items, pb.MemcacheSetRequest_ADD)
+}
+
+// CompareAndSwap writes the given item that was previously returned by Get,
+// if the value was neither modified or evicted between the Get and the
+// CompareAndSwap calls. The item's Key should not change between calls but
+// all other item fields may differ.
+// ErrCASConflict is returned if the value was modified in between the calls.
+// ErrNotStored is returned if the value was evicted in between the calls.
+func (cd Codec) CompareAndSwap(c context.Context, item *Item) error {
+	return singleError(cd.set(c, []*Item{item}, pb.MemcacheSetRequest_CAS))
+}
+
+// CompareAndSwapMulti is a batch version of CompareAndSwap.
+// appengine.MultiError may be returned.
+func (cd Codec) CompareAndSwapMulti(c context.Context, items []*Item) error {
+	return cd.set(c, items, pb.MemcacheSetRequest_CAS)
+}
+
+var (
+	// Gob is a Codec that uses the gob package.
+	Gob = Codec{gobMarshal, gobUnmarshal}
+	// JSON is a Codec that uses the json package.
+	JSON = Codec{json.Marshal, json.Unmarshal}
+)
+
+func gobMarshal(v interface{}) ([]byte, error) {
+	var buf bytes.Buffer
+	if err := gob.NewEncoder(&buf).Encode(v); err != nil {
+		return nil, err
+	}
+	return buf.Bytes(), nil
+}
+
+func gobUnmarshal(data []byte, v interface{}) error {
+	return gob.NewDecoder(bytes.NewBuffer(data)).Decode(v)
+}
+
+// Statistics represents a set of statistics about the memcache cache.
+// This may include items that have expired but have not yet been removed from the cache.
+type Statistics struct {
+	Hits     uint64 // Counter of cache hits
+	Misses   uint64 // Counter of cache misses
+	ByteHits uint64 // Counter of bytes transferred for gets
+
+	Items uint64 // Items currently in the cache
+	Bytes uint64 // Size of all items currently in the cache
+
+	Oldest int64 // Age of access of the oldest item, in seconds
+}
+
+// Stats retrieves the current memcache statistics.
+func Stats(c context.Context) (*Statistics, error) {
+	req := &pb.MemcacheStatsRequest{}
+	res := &pb.MemcacheStatsResponse{}
+	if err := internal.Call(c, "memcache", "Stats", req, res); err != nil {
+		return nil, err
+	}
+	if res.Stats == nil {
+		return nil, ErrNoStats
+	}
+	return &Statistics{
+		Hits:     *res.Stats.Hits,
+		Misses:   *res.Stats.Misses,
+		ByteHits: *res.Stats.ByteHits,
+		Items:    *res.Stats.Items,
+		Bytes:    *res.Stats.Bytes,
+		Oldest:   int64(*res.Stats.OldestItemAge),
+	}, nil
+}
+
+// Flush flushes all items from memcache.
+func Flush(c context.Context) error {
+	req := &pb.MemcacheFlushRequest{}
+	res := &pb.MemcacheFlushResponse{}
+	return internal.Call(c, "memcache", "FlushAll", req, res)
+}
+
+func namespaceMod(m proto.Message, namespace string) {
+	switch m := m.(type) {
+	case *pb.MemcacheDeleteRequest:
+		if m.NameSpace == nil {
+			m.NameSpace = &namespace
+		}
+	case *pb.MemcacheGetRequest:
+		if m.NameSpace == nil {
+			m.NameSpace = &namespace
+		}
+	case *pb.MemcacheIncrementRequest:
+		if m.NameSpace == nil {
+			m.NameSpace = &namespace
+		}
+	case *pb.MemcacheSetRequest:
+		if m.NameSpace == nil {
+			m.NameSpace = &namespace
+		}
+		// MemcacheFlushRequest, MemcacheStatsRequest do not apply namespace.
+	}
+}
+
+func init() {
+	internal.RegisterErrorCodeMap("memcache", pb.MemcacheServiceError_ErrorCode_name)
+	internal.NamespaceMods["memcache"] = namespaceMod
+}
diff --git a/v2/memcache/memcache_test.go b/v2/memcache/memcache_test.go
new file mode 100644
index 0000000..59de0cd
--- /dev/null
+++ b/v2/memcache/memcache_test.go
@@ -0,0 +1,263 @@
+// Copyright 2014 Google Inc. All rights reserved.
+// Use of this source code is governed by the Apache 2.0
+// license that can be found in the LICENSE file.
+
+package memcache
+
+import (
+	"fmt"
+	"testing"
+
+	"google.golang.org/appengine/v2"
+	"google.golang.org/appengine/v2/internal/aetesting"
+	pb "google.golang.org/appengine/v2/internal/memcache"
+)
+
+var errRPC = fmt.Errorf("RPC error")
+
+func TestGetRequest(t *testing.T) {
+	serviceCalled := false
+	apiKey := "lyric"
+
+	c := aetesting.FakeSingleContext(t, "memcache", "Get", func(req *pb.MemcacheGetRequest, _ *pb.MemcacheGetResponse) error {
+		// Test request.
+		if n := len(req.Key); n != 1 {
+			t.Errorf("got %d want 1", n)
+			return nil
+		}
+		if k := string(req.Key[0]); k != apiKey {
+			t.Errorf("got %q want %q", k, apiKey)
+		}
+
+		serviceCalled = true
+		return nil
+	})
+
+	// Test the "forward" path from the API call parameters to the
+	// protobuf request object. (The "backward" path from the
+	// protobuf response object to the API call response,
+	// including the error response, are handled in the next few
+	// tests).
+	Get(c, apiKey)
+	if !serviceCalled {
+		t.Error("Service was not called as expected")
+	}
+}
+
+func TestGetResponseHit(t *testing.T) {
+	key := "lyric"
+	value := "Where the buffalo roam"
+
+	c := aetesting.FakeSingleContext(t, "memcache", "Get", func(_ *pb.MemcacheGetRequest, res *pb.MemcacheGetResponse) error {
+		res.Item = []*pb.MemcacheGetResponse_Item{
+			{Key: []byte(key), Value: []byte(value)},
+		}
+		return nil
+	})
+	apiItem, err := Get(c, key)
+	if apiItem == nil || apiItem.Key != key || string(apiItem.Value) != value {
+		t.Errorf("got %q, %q want {%q,%q}, nil", apiItem, err, key, value)
+	}
+}
+
+func TestGetResponseMiss(t *testing.T) {
+	c := aetesting.FakeSingleContext(t, "memcache", "Get", func(_ *pb.MemcacheGetRequest, res *pb.MemcacheGetResponse) error {
+		// don't fill in any of the response
+		return nil
+	})
+	_, err := Get(c, "something")
+	if err != ErrCacheMiss {
+		t.Errorf("got %v want ErrCacheMiss", err)
+	}
+}
+
+func TestGetResponseRPCError(t *testing.T) {
+	c := aetesting.FakeSingleContext(t, "memcache", "Get", func(_ *pb.MemcacheGetRequest, res *pb.MemcacheGetResponse) error {
+		return errRPC
+	})
+
+	if _, err := Get(c, "something"); err != errRPC {
+		t.Errorf("got %v want errRPC", err)
+	}
+}
+
+func TestAddRequest(t *testing.T) {
+	var apiItem = &Item{
+		Key:   "lyric",
+		Value: []byte("Oh, give me a home"),
+	}
+
+	serviceCalled := false
+
+	c := aetesting.FakeSingleContext(t, "memcache", "Set", func(req *pb.MemcacheSetRequest, _ *pb.MemcacheSetResponse) error {
+		// Test request.
+		pbItem := req.Item[0]
+		if k := string(pbItem.Key); k != apiItem.Key {
+			t.Errorf("got %q want %q", k, apiItem.Key)
+		}
+		if v := string(apiItem.Value); v != string(pbItem.Value) {
+			t.Errorf("got %q want %q", v, string(pbItem.Value))
+		}
+		if p := *pbItem.SetPolicy; p != pb.MemcacheSetRequest_ADD {
+			t.Errorf("got %v want %v", p, pb.MemcacheSetRequest_ADD)
+		}
+
+		serviceCalled = true
+		return nil
+	})
+
+	Add(c, apiItem)
+	if !serviceCalled {
+		t.Error("Service was not called as expected")
+	}
+}
+
+func TestAddResponseStored(t *testing.T) {
+	c := aetesting.FakeSingleContext(t, "memcache", "Set", func(_ *pb.MemcacheSetRequest, res *pb.MemcacheSetResponse) error {
+		res.SetStatus = []pb.MemcacheSetResponse_SetStatusCode{pb.MemcacheSetResponse_STORED}
+		return nil
+	})
+
+	if err := Add(c, &Item{}); err != nil {
+		t.Errorf("got %v want nil", err)
+	}
+}
+
+func TestAddResponseNotStored(t *testing.T) {
+	c := aetesting.FakeSingleContext(t, "memcache", "Set", func(_ *pb.MemcacheSetRequest, res *pb.MemcacheSetResponse) error {
+		res.SetStatus = []pb.MemcacheSetResponse_SetStatusCode{pb.MemcacheSetResponse_NOT_STORED}
+		return nil
+	})
+
+	if err := Add(c, &Item{}); err != ErrNotStored {
+		t.Errorf("got %v want ErrNotStored", err)
+	}
+}
+
+func TestAddResponseError(t *testing.T) {
+	c := aetesting.FakeSingleContext(t, "memcache", "Set", func(_ *pb.MemcacheSetRequest, res *pb.MemcacheSetResponse) error {
+		res.SetStatus = []pb.MemcacheSetResponse_SetStatusCode{pb.MemcacheSetResponse_ERROR}
+		return nil
+	})
+
+	if err := Add(c, &Item{}); err != ErrServerError {
+		t.Errorf("got %v want ErrServerError", err)
+	}
+}
+
+func TestAddResponseRPCError(t *testing.T) {
+	c := aetesting.FakeSingleContext(t, "memcache", "Set", func(_ *pb.MemcacheSetRequest, res *pb.MemcacheSetResponse) error {
+		return errRPC
+	})
+
+	if err := Add(c, &Item{}); err != errRPC {
+		t.Errorf("got %v want errRPC", err)
+	}
+}
+
+func TestSetRequest(t *testing.T) {
+	var apiItem = &Item{
+		Key:   "lyric",
+		Value: []byte("Where the buffalo roam"),
+	}
+
+	serviceCalled := false
+
+	c := aetesting.FakeSingleContext(t, "memcache", "Set", func(req *pb.MemcacheSetRequest, _ *pb.MemcacheSetResponse) error {
+		// Test request.
+		if n := len(req.Item); n != 1 {
+			t.Errorf("got %d want 1", n)
+			return nil
+		}
+		pbItem := req.Item[0]
+		if k := string(pbItem.Key); k != apiItem.Key {
+			t.Errorf("got %q want %q", k, apiItem.Key)
+		}
+		if v := string(pbItem.Value); v != string(apiItem.Value) {
+			t.Errorf("got %q want %q", v, string(apiItem.Value))
+		}
+		if p := *pbItem.SetPolicy; p != pb.MemcacheSetRequest_SET {
+			t.Errorf("got %v want %v", p, pb.MemcacheSetRequest_SET)
+		}
+
+		serviceCalled = true
+		return nil
+	})
+
+	Set(c, apiItem)
+	if !serviceCalled {
+		t.Error("Service was not called as expected")
+	}
+}
+
+func TestSetResponse(t *testing.T) {
+	c := aetesting.FakeSingleContext(t, "memcache", "Set", func(_ *pb.MemcacheSetRequest, res *pb.MemcacheSetResponse) error {
+		res.SetStatus = []pb.MemcacheSetResponse_SetStatusCode{pb.MemcacheSetResponse_STORED}
+		return nil
+	})
+
+	if err := Set(c, &Item{}); err != nil {
+		t.Errorf("got %v want nil", err)
+	}
+}
+
+func TestSetResponseError(t *testing.T) {
+	c := aetesting.FakeSingleContext(t, "memcache", "Set", func(_ *pb.MemcacheSetRequest, res *pb.MemcacheSetResponse) error {
+		res.SetStatus = []pb.MemcacheSetResponse_SetStatusCode{pb.MemcacheSetResponse_ERROR}
+		return nil
+	})
+
+	if err := Set(c, &Item{}); err != ErrServerError {
+		t.Errorf("got %v want ErrServerError", err)
+	}
+}
+
+func TestNamespaceResetting(t *testing.T) {
+	namec := make(chan *string, 1)
+	c0 := aetesting.FakeSingleContext(t, "memcache", "Get", func(req *pb.MemcacheGetRequest, res *pb.MemcacheGetResponse) error {
+		namec <- req.NameSpace
+		return errRPC
+	})
+
+	// Check that wrapping c0 in a namespace twice works correctly.
+	c1, err := appengine.Namespace(c0, "A")
+	if err != nil {
+		t.Fatalf("appengine.Namespace: %v", err)
+	}
+	c2, err := appengine.Namespace(c1, "") // should act as the original context
+	if err != nil {
+		t.Fatalf("appengine.Namespace: %v", err)
+	}
+
+	Get(c0, "key")
+	if ns := <-namec; ns != nil {
+		t.Errorf(`Get with c0: ns = %q, want nil`, *ns)
+	}
+
+	Get(c1, "key")
+	if ns := <-namec; ns == nil {
+		t.Error(`Get with c1: ns = nil, want "A"`)
+	} else if *ns != "A" {
+		t.Errorf(`Get with c1: ns = %q, want "A"`, *ns)
+	}
+
+	Get(c2, "key")
+	if ns := <-namec; ns != nil {
+		t.Errorf(`Get with c2: ns = %q, want nil`, *ns)
+	}
+}
+
+func TestGetMultiEmpty(t *testing.T) {
+	serviceCalled := false
+	c := aetesting.FakeSingleContext(t, "memcache", "Get", func(req *pb.MemcacheGetRequest, _ *pb.MemcacheGetResponse) error {
+		serviceCalled = true
+		return nil
+	})
+
+	// Test that the Memcache service is not called when
+	// GetMulti is passed an empty slice of keys.
+	GetMulti(c, []string{})
+	if serviceCalled {
+		t.Error("Service was called but should not have been")
+	}
+}
diff --git a/v2/module/module.go b/v2/module/module.go
new file mode 100644
index 0000000..5acadec
--- /dev/null
+++ b/v2/module/module.go
@@ -0,0 +1,115 @@
+// Copyright 2013 Google Inc. All rights reserved.
+// Use of this source code is governed by the Apache 2.0
+// license that can be found in the LICENSE file.
+
+/*
+Package module provides functions for interacting with modules.
+
+The appengine package contains functions that report the identity of the app,
+including the module name.
+*/
+package module // import "google.golang.org/appengine/v2/module"
+
+import (
+	"context"
+
+	"github.com/golang/protobuf/proto"
+
+	"google.golang.org/appengine/v2/internal"
+	pb "google.golang.org/appengine/v2/internal/modules"
+)
+
+// List returns the names of modules belonging to this application.
+func List(c context.Context) ([]string, error) {
+	req := &pb.GetModulesRequest{}
+	res := &pb.GetModulesResponse{}
+	err := internal.Call(c, "modules", "GetModules", req, res)
+	return res.Module, err
+}
+
+// NumInstances returns the number of instances of the given module/version.
+// If either argument is the empty string it means the default. This only works
+// if you are using manual_scaling in your app's config file.
+func NumInstances(c context.Context, module, version string) (int, error) {
+	req := &pb.GetNumInstancesRequest{}
+	if module != "" {
+		req.Module = &module
+	}
+	if version != "" {
+		req.Version = &version
+	}
+	res := &pb.GetNumInstancesResponse{}
+
+	if err := internal.Call(c, "modules", "GetNumInstances", req, res); err != nil {
+		return 0, err
+	}
+	return int(*res.Instances), nil
+}
+
+// SetNumInstances sets the number of instances of the given module.version to the
+// specified value. If either module or version are the empty string it means the
+// default.
+func SetNumInstances(c context.Context, module, version string, instances int) error {
+	req := &pb.SetNumInstancesRequest{}
+	if module != "" {
+		req.Module = &module
+	}
+	if version != "" {
+		req.Version = &version
+	}
+	req.Instances = proto.Int64(int64(instances))
+	res := &pb.SetNumInstancesResponse{}
+	return internal.Call(c, "modules", "SetNumInstances", req, res)
+}
+
+// Versions returns the names of the versions that belong to the specified module.
+// If module is the empty string, it means the default module.
+func Versions(c context.Context, module string) ([]string, error) {
+	req := &pb.GetVersionsRequest{}
+	if module != "" {
+		req.Module = &module
+	}
+	res := &pb.GetVersionsResponse{}
+	err := internal.Call(c, "modules", "GetVersions", req, res)
+	return res.GetVersion(), err
+}
+
+// DefaultVersion returns the default version of the specified module.
+// If module is the empty string, it means the default module.
+func DefaultVersion(c context.Context, module string) (string, error) {
+	req := &pb.GetDefaultVersionRequest{}
+	if module != "" {
+		req.Module = &module
+	}
+	res := &pb.GetDefaultVersionResponse{}
+	err := internal.Call(c, "modules", "GetDefaultVersion", req, res)
+	return res.GetVersion(), err
+}
+
+// Start starts the specified version of the specified module.
+// If either module or version are the empty string, it means the default.
+func Start(c context.Context, module, version string) error {
+	req := &pb.StartModuleRequest{}
+	if module != "" {
+		req.Module = &module
+	}
+	if version != "" {
+		req.Version = &version
+	}
+	res := &pb.StartModuleResponse{}
+	return internal.Call(c, "modules", "StartModule", req, res)
+}
+
+// Stop stops the specified version of the specified module.
+// If either module or version are the empty string, it means the default.
+func Stop(c context.Context, module, version string) error {
+	req := &pb.StopModuleRequest{}
+	if module != "" {
+		req.Module = &module
+	}
+	if version != "" {
+		req.Version = &version
+	}
+	res := &pb.StopModuleResponse{}
+	return internal.Call(c, "modules", "StopModule", req, res)
+}
diff --git a/v2/module/module_test.go b/v2/module/module_test.go
new file mode 100644
index 0000000..37cc7d0
--- /dev/null
+++ b/v2/module/module_test.go
@@ -0,0 +1,124 @@
+// Copyright 2013 Google Inc. All rights reserved.
+// Use of this source code is governed by the Apache 2.0
+// license that can be found in the LICENSE file.
+
+package module
+
+import (
+	"reflect"
+	"testing"
+
+	"github.com/golang/protobuf/proto"
+
+	"google.golang.org/appengine/v2/internal/aetesting"
+	pb "google.golang.org/appengine/v2/internal/modules"
+)
+
+const version = "test-version"
+const module = "test-module"
+const instances = 3
+
+func TestList(t *testing.T) {
+	c := aetesting.FakeSingleContext(t, "modules", "GetModules", func(req *pb.GetModulesRequest, res *pb.GetModulesResponse) error {
+		res.Module = []string{"default", "mod1"}
+		return nil
+	})
+	got, err := List(c)
+	if err != nil {
+		t.Fatalf("List: %v", err)
+	}
+	want := []string{"default", "mod1"}
+	if !reflect.DeepEqual(got, want) {
+		t.Errorf("List = %v, want %v", got, want)
+	}
+}
+
+func TestSetNumInstances(t *testing.T) {
+	c := aetesting.FakeSingleContext(t, "modules", "SetNumInstances", func(req *pb.SetNumInstancesRequest, res *pb.SetNumInstancesResponse) error {
+		if *req.Module != module {
+			t.Errorf("Module = %v, want %v", req.Module, module)
+		}
+		if *req.Version != version {
+			t.Errorf("Version = %v, want %v", req.Version, version)
+		}
+		if *req.Instances != instances {
+			t.Errorf("Instances = %v, want %d", req.Instances, instances)
+		}
+		return nil
+	})
+	err := SetNumInstances(c, module, version, instances)
+	if err != nil {
+		t.Fatalf("SetNumInstances: %v", err)
+	}
+}
+
+func TestVersions(t *testing.T) {
+	c := aetesting.FakeSingleContext(t, "modules", "GetVersions", func(req *pb.GetVersionsRequest, res *pb.GetVersionsResponse) error {
+		if *req.Module != module {
+			t.Errorf("Module = %v, want %v", req.Module, module)
+		}
+		res.Version = []string{"v1", "v2", "v3"}
+		return nil
+	})
+	got, err := Versions(c, module)
+	if err != nil {
+		t.Fatalf("Versions: %v", err)
+	}
+	want := []string{"v1", "v2", "v3"}
+	if !reflect.DeepEqual(got, want) {
+		t.Errorf("Versions = %v, want %v", got, want)
+	}
+}
+
+func TestDefaultVersion(t *testing.T) {
+	c := aetesting.FakeSingleContext(t, "modules", "GetDefaultVersion", func(req *pb.GetDefaultVersionRequest, res *pb.GetDefaultVersionResponse) error {
+		if *req.Module != module {
+			t.Errorf("Module = %v, want %v", req.Module, module)
+		}
+		res.Version = proto.String(version)
+		return nil
+	})
+	got, err := DefaultVersion(c, module)
+	if err != nil {
+		t.Fatalf("DefaultVersion: %v", err)
+	}
+	if got != version {
+		t.Errorf("Version = %v, want %v", got, version)
+	}
+}
+
+func TestStart(t *testing.T) {
+	c := aetesting.FakeSingleContext(t, "modules", "StartModule", func(req *pb.StartModuleRequest, res *pb.StartModuleResponse) error {
+		if *req.Module != module {
+			t.Errorf("Module = %v, want %v", req.Module, module)
+		}
+		if *req.Version != version {
+			t.Errorf("Version = %v, want %v", req.Version, version)
+		}
+		return nil
+	})
+
+	err := Start(c, module, version)
+	if err != nil {
+		t.Fatalf("Start: %v", err)
+	}
+}
+
+func TestStop(t *testing.T) {
+	c := aetesting.FakeSingleContext(t, "modules", "StopModule", func(req *pb.StopModuleRequest, res *pb.StopModuleResponse) error {
+		version := "test-version"
+		module := "test-module"
+		if *req.Module != module {
+			t.Errorf("Module = %v, want %v", req.Module, module)
+		}
+		if *req.Version != version {
+			t.Errorf("Version = %v, want %v", req.Version, version)
+		}
+		return nil
+	})
+
+	err := Stop(c, module, version)
+	if err != nil {
+		t.Fatalf("Stop: %v", err)
+	}
+}
diff --git a/v2/namespace.go b/v2/namespace.go
new file mode 100644
index 0000000..7d7bf28
--- /dev/null
+++ b/v2/namespace.go
@@ -0,0 +1,24 @@
+// Copyright 2012 Google Inc. All rights reserved.
+// Use of this source code is governed by the Apache 2.0
+// license that can be found in the LICENSE file.
+
+package appengine
+
+import (
+	"context"
+	"fmt"
+	"regexp"
+
+	"google.golang.org/appengine/v2/internal"
+)
+
+// Namespace returns a replacement context that operates within the given namespace.
+func Namespace(c context.Context, namespace string) (context.Context, error) {
+	if !validNamespace.MatchString(namespace) {
+		return nil, fmt.Errorf("appengine: namespace %q does not match /%s/", namespace, validNamespace)
+	}
+	return internal.NamespacedContext(c, namespace), nil
+}
+
+// validNamespace matches valid namespace names.
+var validNamespace = regexp.MustCompile(`^[0-9A-Za-z._-]{0,100}$`)
diff --git a/v2/namespace_test.go b/v2/namespace_test.go
new file mode 100644
index 0000000..e19fa4f
--- /dev/null
+++ b/v2/namespace_test.go
@@ -0,0 +1,38 @@
+// Copyright 2014 Google Inc. All rights reserved.
+// Use of this source code is governed by the Apache 2.0
+// license that can be found in the LICENSE file.
+
+package appengine
+
+import (
+	"context"
+	"testing"
+)
+
+func TestNamespaceValidity(t *testing.T) {
+	testCases := []struct {
+		namespace string
+		ok        bool
+	}{
+		// data from Python's namespace_manager_test.py
+		{"", true},
+		{"__a.namespace.123__", true},
+		{"-_A....NAMESPACE-_", true},
+		{"-", true},
+		{".", true},
+		{".-", true},
+
+		{"?", false},
+		{"+", false},
+		{"!", false},
+		{" ", false},
+	}
+	for _, tc := range testCases {
+		_, err := Namespace(context.Background(), tc.namespace)
+		if err == nil && !tc.ok {
+			t.Errorf("Namespace %q should be rejected, but wasn't", tc.namespace)
+		} else if err != nil && tc.ok {
+			t.Errorf("Namespace %q should be accepted, but wasn't", tc.namespace)
+		}
+	}
+}
diff --git a/v2/runtime/runtime.go b/v2/runtime/runtime.go
new file mode 100644
index 0000000..5923be6
--- /dev/null
+++ b/v2/runtime/runtime.go
@@ -0,0 +1,147 @@
+// Copyright 2011 Google Inc. All rights reserved.
+// Use of this source code is governed by the Apache 2.0
+// license that can be found in the LICENSE file.
+
+/*
+Package runtime exposes information about the resource usage of the application.
+It also provides a way to run code in a new background context of a module.
+
+This package does not work on App Engine "flexible environment".
+*/
+package runtime // import "google.golang.org/appengine/v2/runtime"
+
+import (
+	"context"
+	"net/http"
+
+	"google.golang.org/appengine/v2"
+	"google.golang.org/appengine/v2/internal"
+	pb "google.golang.org/appengine/v2/internal/system"
+)
+
+// Statistics represents the system's statistics.
+type Statistics struct {
+	// CPU records the CPU consumed by this instance, in megacycles.
+	CPU struct {
+		Total   float64
+		Rate1M  float64 // consumption rate over one minute
+		Rate10M float64 // consumption rate over ten minutes
+	}
+	// RAM records the memory used by the instance, in megabytes.
+	RAM struct {
+		Current    float64
+		Average1M  float64 // average usage over one minute
+		Average10M float64 // average usage over ten minutes
+	}
+}
+
+func Stats(c context.Context) (*Statistics, error) {
+	req := &pb.GetSystemStatsRequest{}
+	res := &pb.GetSystemStatsResponse{}
+	if err := internal.Call(c, "system", "GetSystemStats", req, res); err != nil {
+		return nil, err
+	}
+	s := &Statistics{}
+	if res.Cpu != nil {
+		s.CPU.Total = res.Cpu.GetTotal()
+		s.CPU.Rate1M = res.Cpu.GetRate1M()
+		s.CPU.Rate10M = res.Cpu.GetRate10M()
+	}
+	if res.Memory != nil {
+		s.RAM.Current = res.Memory.GetCurrent()
+		s.RAM.Average1M = res.Memory.GetAverage1M()
+		s.RAM.Average10M = res.Memory.GetAverage10M()
+	}
+	return s, nil
+}
+
+/*
+RunInBackground makes an API call that triggers an /_ah/background request.
+
+There are two independent code paths that need to make contact:
+the RunInBackground code, and the /_ah/background handler. The matchmaker
+loop arranges for the two paths to meet. The RunInBackground code passes
+a send to the matchmaker, the /_ah/background passes a recv to the matchmaker,
+and the matchmaker hooks them up.
+*/
+
+func init() {
+	http.HandleFunc("/_ah/background", handleBackground)
+
+	sc := make(chan send)
+	rc := make(chan recv)
+	sendc, recvc = sc, rc
+	go matchmaker(sc, rc)
+}
+
+var (
+	sendc chan<- send // RunInBackground sends to this
+	recvc chan<- recv // handleBackground sends to this
+)
+
+type send struct {
+	id string
+	f  func(context.Context)
+}
+
+type recv struct {
+	id string
+	ch chan<- func(context.Context)
+}
+
+func matchmaker(sendc <-chan send, recvc <-chan recv) {
+	// When one side of the match arrives before the other
+	// it is inserted in the corresponding map.
+	waitSend := make(map[string]send)
+	waitRecv := make(map[string]recv)
+
+	for {
+		select {
+		case s := <-sendc:
+			if r, ok := waitRecv[s.id]; ok {
+				// meet!
+				delete(waitRecv, s.id)
+				r.ch <- s.f
+			} else {
+				// waiting for r
+				waitSend[s.id] = s
+			}
+		case r := <-recvc:
+			if s, ok := waitSend[r.id]; ok {
+				// meet!
+				delete(waitSend, r.id)
+				r.ch <- s.f
+			} else {
+				// waiting for s
+				waitRecv[r.id] = r
+			}
+		}
+	}
+}
+
+var newContext = appengine.NewContext // for testing
+
+func handleBackground(w http.ResponseWriter, req *http.Request) {
+	id := req.Header.Get("X-AppEngine-BackgroundRequest")
+
+	ch := make(chan func(context.Context))
+	recvc <- recv{id, ch}
+	(<-ch)(newContext(req))
+}
+
+// RunInBackground runs f in a background goroutine in this process.
+// f is provided a context that may outlast the context provided to RunInBackground.
+// This is only valid to invoke from a service set to basic or manual scaling.
+func RunInBackground(c context.Context, f func(c context.Context)) error {
+	req := &pb.StartBackgroundRequestRequest{}
+	res := &pb.StartBackgroundRequestResponse{}
+	if err := internal.Call(c, "system", "StartBackgroundRequest", req, res); err != nil {
+		return err
+	}
+	sendc <- send{res.GetRequestId(), f}
+	return nil
+}
+
+func init() {
+	internal.RegisterErrorCodeMap("system", pb.SystemServiceError_ErrorCode_name)
+}
diff --git a/v2/runtime/runtime_test.go b/v2/runtime/runtime_test.go
new file mode 100644
index 0000000..0916442
--- /dev/null
+++ b/v2/runtime/runtime_test.go
@@ -0,0 +1,101 @@
+// Copyright 2012 Google Inc. All rights reserved.
+// Use of this source code is governed by the Apache 2.0
+// license that can be found in the LICENSE file.
+
+package runtime
+
+import (
+	"context"
+	"fmt"
+	"net/http"
+	"net/http/httptest"
+	"testing"
+	"time"
+
+	"github.com/golang/protobuf/proto"
+
+	"google.golang.org/appengine/v2/internal/aetesting"
+	pb "google.golang.org/appengine/v2/internal/system"
+)
+
+func TestRunInBackgroundSendFirst(t *testing.T) { testRunInBackground(t, true) }
+func TestRunInBackgroundRecvFirst(t *testing.T) { testRunInBackground(t, false) }
+
+func testRunInBackground(t *testing.T, sendFirst bool) {
+	srv := httptest.NewServer(nil)
+	defer srv.Close()
+
+	const id = "f00bar"
+	sendWait, recvWait := make(chan bool), make(chan bool)
+	sbr := make(chan bool) // strobed when system.StartBackgroundRequest has started
+
+	calls := 0
+	c := aetesting.FakeSingleContext(t, "system", "StartBackgroundRequest", func(req *pb.StartBackgroundRequestRequest, res *pb.StartBackgroundRequestResponse) error {
+		calls++
+		if calls > 1 {
+			t.Errorf("Too many calls to system.StartBackgroundRequest")
+		}
+		sbr <- true
+		res.RequestId = proto.String(id)
+		<-sendWait
+		return nil
+	})
+
+	var c2 context.Context // a fake
+	newContext = func(*http.Request) context.Context {
+		return c2
+	}
+
+	var fRun int
+	f := func(c3 context.Context) {
+		fRun++
+		if c3 != c2 {
+			t.Errorf("f got a different context than expected")
+		}
+	}
+
+	ribErrc := make(chan error)
+	go func() {
+		ribErrc <- RunInBackground(c, f)
+	}()
+
+	brErrc := make(chan error)
+	go func() {
+		<-sbr
+		req, err := http.NewRequest("GET", srv.URL+"/_ah/background", nil)
+		if err != nil {
+			brErrc <- fmt.Errorf("http.NewRequest: %v", err)
+			return
+		}
+		req.Header.Set("X-AppEngine-BackgroundRequest", id)
+		client := &http.Client{
+			Transport: &http.Transport{
+				Proxy: http.ProxyFromEnvironment,
+			},
+		}
+
+		<-recvWait
+		_, err = client.Do(req)
+		brErrc <- err
+	}()
+
+	// Send and receive are both waiting at this point.
+	waits := [2]chan bool{sendWait, recvWait}
+	if !sendFirst {
+		waits[0], waits[1] = waits[1], waits[0]
+	}
+	waits[0] <- true
+	time.Sleep(100 * time.Millisecond)
+	waits[1] <- true
+
+	if err := <-ribErrc; err != nil {
+		t.Fatalf("RunInBackground: %v", err)
+	}
+	if err := <-brErrc; err != nil {
+		t.Fatalf("background request: %v", err)
+	}
+
+	if fRun != 1 {
+		t.Errorf("Got %d runs of f, want 1", fRun)
+	}
+}
diff --git a/v2/taskqueue/taskqueue.go b/v2/taskqueue/taskqueue.go
new file mode 100644
index 0000000..436e8f5
--- /dev/null
+++ b/v2/taskqueue/taskqueue.go
@@ -0,0 +1,541 @@
+// Copyright 2011 Google Inc. All rights reserved.
+// Use of this source code is governed by the Apache 2.0
+// license that can be found in the LICENSE file.
+
+/*
+Package taskqueue provides a client for App Engine's taskqueue service.
+Using this service, applications may perform work outside a user's request.
+
+A Task may be constructed manually; alternatively, since the most common
+taskqueue operation is to add a single POST task, NewPOSTTask makes it easy.
+
+	t := taskqueue.NewPOSTTask("/worker", url.Values{
+		"key": {key},
+	})
+	taskqueue.Add(c, t, "") // add t to the default queue
+*/
+package taskqueue // import "google.golang.org/appengine/v2/taskqueue"
+
+import (
+	"context"
+	"errors"
+	"fmt"
+	"net/http"
+	"net/url"
+	"strconv"
+	"time"
+
+	"github.com/golang/protobuf/proto"
+
+	"google.golang.org/appengine/v2"
+	"google.golang.org/appengine/v2/internal"
+	dspb "google.golang.org/appengine/v2/internal/datastore"
+	pb "google.golang.org/appengine/v2/internal/taskqueue"
+)
+
+var (
+	// ErrTaskAlreadyAdded is the error returned by Add and AddMulti when a task has already been added with a particular name.
+	ErrTaskAlreadyAdded = errors.New("taskqueue: task has already been added")
+)
+
+// RetryOptions let you control whether to retry a task and the backoff intervals between tries.
+type RetryOptions struct {
+	// Number of tries/leases after which the task fails permanently and is deleted.
+	// If AgeLimit is also set, both limits must be exceeded for the task to fail permanently.
+	RetryLimit int32
+
+	// Maximum time allowed since the task's first try before the task fails permanently and is deleted (only for push tasks).
+	// If RetryLimit is also set, both limits must be exceeded for the task to fail permanently.
+	AgeLimit time.Duration
+
+	// Minimum time between successive tries (only for push tasks).
+	MinBackoff time.Duration
+
+	// Maximum time between successive tries (only for push tasks).
+	MaxBackoff time.Duration
+
+	// Maximum number of times to double the interval between successive tries before the intervals increase linearly (only for push tasks).
+	MaxDoublings int32
+
+	// If MaxDoublings is zero, set ApplyZeroMaxDoublings to true to override the default non-zero value.
+	// Otherwise a zero MaxDoublings is ignored and the default is used.
+	ApplyZeroMaxDoublings bool
+}
+
+// toRetryParameter converts RetryOptions to pb.TaskQueueRetryParameters.
+func (opt *RetryOptions) toRetryParameters() *pb.TaskQueueRetryParameters {
+	params := &pb.TaskQueueRetryParameters{}
+	if opt.RetryLimit > 0 {
+		params.RetryLimit = proto.Int32(opt.RetryLimit)
+	}
+	if opt.AgeLimit > 0 {
+		params.AgeLimitSec = proto.Int64(int64(opt.AgeLimit.Seconds()))
+	}
+	if opt.MinBackoff > 0 {
+		params.MinBackoffSec = proto.Float64(opt.MinBackoff.Seconds())
+	}
+	if opt.MaxBackoff > 0 {
+		params.MaxBackoffSec = proto.Float64(opt.MaxBackoff.Seconds())
+	}
+	if opt.MaxDoublings > 0 || (opt.MaxDoublings == 0 && opt.ApplyZeroMaxDoublings) {
+		params.MaxDoublings = proto.Int32(opt.MaxDoublings)
+	}
+	return params
+}
+
+// A Task represents a task to be executed.
+type Task struct {
+	// Path is the worker URL for the task.
+	// If unset, it will default to /_ah/queue/<queue_name>.
+	Path string
+
+	// Payload is the data for the task.
+	// This will be delivered as the HTTP request body.
+	// It is only used when Method is POST, PUT or PULL.
+	// url.Values' Encode method may be used to generate this for POST requests.
+	Payload []byte
+
+	// Additional HTTP headers to pass at the task's execution time.
+	// To schedule the task to be run with an alternate app version
+	// or backend, set the "Host" header.
+	Header http.Header
+
+	// Method is the HTTP method for the task ("GET", "POST", etc.),
+	// or "PULL" if this is task is destined for a pull-based queue.
+	// If empty, this defaults to "POST".
+	Method string
+
+	// A name for the task.
+	// If empty, a name will be chosen.
+	Name string
+
+	// Delay specifies the duration the task queue service must wait
+	// before executing the task.
+	// Either Delay or ETA may be set, but not both.
+	Delay time.Duration
+
+	// ETA specifies the earliest time a task may be executed (push queues)
+	// or leased (pull queues).
+	// Either Delay or ETA may be set, but not both.
+	ETA time.Time
+
+	// The number of times the task has been dispatched or leased.
+	RetryCount int32
+
+	// Tag for the task. Only used when Method is PULL.
+	Tag string
+
+	// Retry options for this task. May be nil.
+	RetryOptions *RetryOptions
+}
+
+func (t *Task) method() string {
+	if t.Method == "" {
+		return "POST"
+	}
+	return t.Method
+}
+
+// NewPOSTTask creates a Task that will POST to a path with the given form data.
+func NewPOSTTask(path string, params url.Values) *Task {
+	h := make(http.Header)
+	h.Set("Content-Type", "application/x-www-form-urlencoded")
+	return &Task{
+		Path:    path,
+		Payload: []byte(params.Encode()),
+		Header:  h,
+		Method:  "POST",
+	}
+}
+
+// RequestHeaders are the special HTTP request headers available to push task
+// HTTP request handlers. These headers are set internally by App Engine.
+// See https://cloud.google.com/appengine/docs/standard/go/taskqueue/push/creating-handlers#reading_request_headers
+// for a description of the fields.
+type RequestHeaders struct {
+	QueueName          string
+	TaskName           string
+	TaskRetryCount     int64
+	TaskExecutionCount int64
+	TaskETA            time.Time
+
+	TaskPreviousResponse int
+	TaskRetryReason      string
+	FailFast             bool
+}
+
+// ParseRequestHeaders parses the special HTTP request headers available to push
+// task request handlers. This function silently ignores values of the wrong
+// format.
+func ParseRequestHeaders(h http.Header) *RequestHeaders {
+	ret := &RequestHeaders{
+		QueueName: h.Get("X-AppEngine-QueueName"),
+		TaskName:  h.Get("X-AppEngine-TaskName"),
+	}
+
+	ret.TaskRetryCount, _ = strconv.ParseInt(h.Get("X-AppEngine-TaskRetryCount"), 10, 64)
+	ret.TaskExecutionCount, _ = strconv.ParseInt(h.Get("X-AppEngine-TaskExecutionCount"), 10, 64)
+
+	etaSecs, _ := strconv.ParseInt(h.Get("X-AppEngine-TaskETA"), 10, 64)
+	if etaSecs != 0 {
+		ret.TaskETA = time.Unix(etaSecs, 0)
+	}
+
+	ret.TaskPreviousResponse, _ = strconv.Atoi(h.Get("X-AppEngine-TaskPreviousResponse"))
+	ret.TaskRetryReason = h.Get("X-AppEngine-TaskRetryReason")
+	if h.Get("X-AppEngine-FailFast") != "" {
+		ret.FailFast = true
+	}
+
+	return ret
+}
+
+var (
+	currentNamespace = http.CanonicalHeaderKey("X-AppEngine-Current-Namespace")
+	defaultNamespace = http.CanonicalHeaderKey("X-AppEngine-Default-Namespace")
+)
+
+func getDefaultNamespace(ctx context.Context) string {
+	return internal.IncomingHeaders(ctx).Get(defaultNamespace)
+}
+
+func newAddReq(c context.Context, task *Task, queueName string) (*pb.TaskQueueAddRequest, error) {
+	if queueName == "" {
+		queueName = "default"
+	}
+	path := task.Path
+	if path == "" {
+		path = "/_ah/queue/" + queueName
+	}
+	eta := task.ETA
+	if eta.IsZero() {
+		eta = time.Now().Add(task.Delay)
+	} else if task.Delay != 0 {
+		panic("taskqueue: both Delay and ETA are set")
+	}
+	req := &pb.TaskQueueAddRequest{
+		QueueName: []byte(queueName),
+		TaskName:  []byte(task.Name),
+		EtaUsec:   proto.Int64(eta.UnixNano() / 1e3),
+	}
+	method := task.method()
+	if method == "PULL" {
+		// Pull-based task
+		req.Body = task.Payload
+		req.Mode = pb.TaskQueueMode_PULL.Enum()
+		if task.Tag != "" {
+			req.Tag = []byte(task.Tag)
+		}
+	} else {
+		// HTTP-based task
+		if v, ok := pb.TaskQueueAddRequest_RequestMethod_value[method]; ok {
+			req.Method = pb.TaskQueueAddRequest_RequestMethod(v).Enum()
+		} else {
+			return nil, fmt.Errorf("taskqueue: bad method %q", method)
+		}
+		req.Url = []byte(path)
+		for k, vs := range task.Header {
+			for _, v := range vs {
+				req.Header = append(req.Header, &pb.TaskQueueAddRequest_Header{
+					Key:   []byte(k),
+					Value: []byte(v),
+				})
+			}
+		}
+		if method == "POST" || method == "PUT" {
+			req.Body = task.Payload
+		}
+
+		// Namespace headers.
+		if _, ok := task.Header[currentNamespace]; !ok {
+			// Fetch the current namespace of this request.
+			ns := internal.NamespaceFromContext(c)
+			req.Header = append(req.Header, &pb.TaskQueueAddRequest_Header{
+				Key:   []byte(currentNamespace),
+				Value: []byte(ns),
+			})
+		}
+		if _, ok := task.Header[defaultNamespace]; !ok {
+			// Fetch the X-AppEngine-Default-Namespace header of this request.
+			if ns := getDefaultNamespace(c); ns != "" {
+				req.Header = append(req.Header, &pb.TaskQueueAddRequest_Header{
+					Key:   []byte(defaultNamespace),
+					Value: []byte(ns),
+				})
+			}
+		}
+	}
+
+	if task.RetryOptions != nil {
+		req.RetryParameters = task.RetryOptions.toRetryParameters()
+	}
+
+	return req, nil
+}
+
+var alreadyAddedErrors = map[pb.TaskQueueServiceError_ErrorCode]bool{
+	pb.TaskQueueServiceError_TASK_ALREADY_EXISTS: true,
+	pb.TaskQueueServiceError_TOMBSTONED_TASK:     true,
+}
+
+// Add adds the task to a named queue.
+// An empty queue name means that the default queue will be used.
+// Add returns an equivalent Task with defaults filled in, including setting
+// the task's Name field to the chosen name if the original was empty.
+func Add(c context.Context, task *Task, queueName string) (*Task, error) {
+	req, err := newAddReq(c, task, queueName)
+	if err != nil {
+		return nil, err
+	}
+	res := &pb.TaskQueueAddResponse{}
+	if err := internal.Call(c, "taskqueue", "Add", req, res); err != nil {
+		apiErr, ok := err.(*internal.APIError)
+		if ok && alreadyAddedErrors[pb.TaskQueueServiceError_ErrorCode(apiErr.Code)] {
+			return nil, ErrTaskAlreadyAdded
+		}
+		return nil, err
+	}
+	resultTask := *task
+	resultTask.Method = task.method()
+	if task.Name == "" {
+		resultTask.Name = string(res.ChosenTaskName)
+	}
+	return &resultTask, nil
+}
+
+// AddMulti adds multiple tasks to a named queue.
+// An empty queue name means that the default queue will be used.
+// AddMulti returns a slice of equivalent tasks with defaults filled in, including setting
+// each task's Name field to the chosen name if the original was empty.
+// If a given task is badly formed or could not be added, an appengine.MultiError is returned.
+func AddMulti(c context.Context, tasks []*Task, queueName string) ([]*Task, error) {
+	req := &pb.TaskQueueBulkAddRequest{
+		AddRequest: make([]*pb.TaskQueueAddRequest, len(tasks)),
+	}
+	me, any := make(appengine.MultiError, len(tasks)), false
+	for i, t := range tasks {
+		req.AddRequest[i], me[i] = newAddReq(c, t, queueName)
+		any = any || me[i] != nil
+	}
+	if any {
+		return nil, me
+	}
+	res := &pb.TaskQueueBulkAddResponse{}
+	if err := internal.Call(c, "taskqueue", "BulkAdd", req, res); err != nil {
+		return nil, err
+	}
+	if len(res.Taskresult) != len(tasks) {
+		return nil, errors.New("taskqueue: server error")
+	}
+	tasksOut := make([]*Task, len(tasks))
+	for i, tr := range res.Taskresult {
+		tasksOut[i] = new(Task)
+		*tasksOut[i] = *tasks[i]
+		tasksOut[i].Method = tasksOut[i].method()
+		if tasksOut[i].Name == "" {
+			tasksOut[i].Name = string(tr.ChosenTaskName)
+		}
+		if *tr.Result != pb.TaskQueueServiceError_OK {
+			if alreadyAddedErrors[*tr.Result] {
+				me[i] = ErrTaskAlreadyAdded
+			} else {
+				me[i] = &internal.APIError{
+					Service: "taskqueue",
+					Code:    int32(*tr.Result),
+				}
+			}
+			any = true
+		}
+	}
+	if any {
+		return tasksOut, me
+	}
+	return tasksOut, nil
+}
+
+// Delete deletes a task from a named queue.
+func Delete(c context.Context, task *Task, queueName string) error {
+	err := DeleteMulti(c, []*Task{task}, queueName)
+	if me, ok := err.(appengine.MultiError); ok {
+		return me[0]
+	}
+	return err
+}
+
+// DeleteMulti deletes multiple tasks from a named queue.
+// If a given task could not be deleted, an appengine.MultiError is returned.
+// Each task is deleted independently; one may fail to delete while the others
+// are successfully deleted.
+func DeleteMulti(c context.Context, tasks []*Task, queueName string) error {
+	taskNames := make([][]byte, len(tasks))
+	for i, t := range tasks {
+		taskNames[i] = []byte(t.Name)
+	}
+	if queueName == "" {
+		queueName = "default"
+	}
+	req := &pb.TaskQueueDeleteRequest{
+		QueueName: []byte(queueName),
+		TaskName:  taskNames,
+	}
+	res := &pb.TaskQueueDeleteResponse{}
+	if err := internal.Call(c, "taskqueue", "Delete", req, res); err != nil {
+		return err
+	}
+	if a, b := len(req.TaskName), len(res.Result); a != b {
+		return fmt.Errorf("taskqueue: internal error: requested deletion of %d tasks, got %d results", a, b)
+	}
+	me, any := make(appengine.MultiError, len(res.Result)), false
+	for i, ec := range res.Result {
+		if ec != pb.TaskQueueServiceError_OK {
+			me[i] = &internal.APIError{
+				Service: "taskqueue",
+				Code:    int32(ec),
+			}
+			any = true
+		}
+	}
+	if any {
+		return me
+	}
+	return nil
+}
+
+func lease(c context.Context, maxTasks int, queueName string, leaseTime int, groupByTag bool, tag []byte) ([]*Task, error) {
+	if queueName == "" {
+		queueName = "default"
+	}
+	req := &pb.TaskQueueQueryAndOwnTasksRequest{
+		QueueName:    []byte(queueName),
+		LeaseSeconds: proto.Float64(float64(leaseTime)),
+		MaxTasks:     proto.Int64(int64(maxTasks)),
+		GroupByTag:   proto.Bool(groupByTag),
+		Tag:          tag,
+	}
+	res := &pb.TaskQueueQueryAndOwnTasksResponse{}
+	if err := internal.Call(c, "taskqueue", "QueryAndOwnTasks", req, res); err != nil {
+		return nil, err
+	}
+	tasks := make([]*Task, len(res.Task))
+	for i, t := range res.Task {
+		tasks[i] = &Task{
+			Payload:    t.Body,
+			Name:       string(t.TaskName),
+			Method:     "PULL",
+			ETA:        time.Unix(0, *t.EtaUsec*1e3),
+			RetryCount: *t.RetryCount,
+			Tag:        string(t.Tag),
+		}
+	}
+	return tasks, nil
+}
+
+// Lease leases tasks from a queue.
+// leaseTime is in seconds.
+// The number of tasks fetched will be at most maxTasks.
+func Lease(c context.Context, maxTasks int, queueName string, leaseTime int) ([]*Task, error) {
+	return lease(c, maxTasks, queueName, leaseTime, false, nil)
+}
+
+// LeaseByTag leases tasks from a queue, grouped by tag.
+// If tag is empty, then the returned tasks are grouped by the tag of the task with earliest ETA.
+// leaseTime is in seconds.
+// The number of tasks fetched will be at most maxTasks.
+func LeaseByTag(c context.Context, maxTasks int, queueName string, leaseTime int, tag string) ([]*Task, error) {
+	return lease(c, maxTasks, queueName, leaseTime, true, []byte(tag))
+}
+
+// Purge removes all tasks from a queue.
+func Purge(c context.Context, queueName string) error {
+	if queueName == "" {
+		queueName = "default"
+	}
+	req := &pb.TaskQueuePurgeQueueRequest{
+		QueueName: []byte(queueName),
+	}
+	res := &pb.TaskQueuePurgeQueueResponse{}
+	return internal.Call(c, "taskqueue", "PurgeQueue", req, res)
+}
+
+// ModifyLease modifies the lease of a task.
+// Used to request more processing time, or to abandon processing.
+// leaseTime is in seconds and must not be negative.
+func ModifyLease(c context.Context, task *Task, queueName string, leaseTime int) error {
+	if queueName == "" {
+		queueName = "default"
+	}
+	req := &pb.TaskQueueModifyTaskLeaseRequest{
+		QueueName:    []byte(queueName),
+		TaskName:     []byte(task.Name),
+		EtaUsec:      proto.Int64(task.ETA.UnixNano() / 1e3), // Used to verify ownership.
+		LeaseSeconds: proto.Float64(float64(leaseTime)),
+	}
+	res := &pb.TaskQueueModifyTaskLeaseResponse{}
+	if err := internal.Call(c, "taskqueue", "ModifyTaskLease", req, res); err != nil {
+		return err
+	}
+	task.ETA = time.Unix(0, *res.UpdatedEtaUsec*1e3)
+	return nil
+}
+
+// QueueStatistics represents statistics about a single task queue.
+type QueueStatistics struct {
+	Tasks     int       // may be an approximation
+	OldestETA time.Time // zero if there are no pending tasks
+
+	Executed1Minute int     // tasks executed in the last minute
+	InFlight        int     // tasks executing now
+	EnforcedRate    float64 // requests per second
+}
+
+// QueueStats retrieves statistics about queues.
+func QueueStats(c context.Context, queueNames []string) ([]QueueStatistics, error) {
+	req := &pb.TaskQueueFetchQueueStatsRequest{
+		QueueName: make([][]byte, len(queueNames)),
+	}
+	for i, q := range queueNames {
+		if q == "" {
+			q = "default"
+		}
+		req.QueueName[i] = []byte(q)
+	}
+	res := &pb.TaskQueueFetchQueueStatsResponse{}
+	if err := internal.Call(c, "taskqueue", "FetchQueueStats", req, res); err != nil {
+		return nil, err
+	}
+	qs := make([]QueueStatistics, len(res.Queuestats))
+	for i, qsg := range res.Queuestats {
+		qs[i] = QueueStatistics{
+			Tasks: int(*qsg.NumTasks),
+		}
+		if eta := *qsg.OldestEtaUsec; eta > -1 {
+			qs[i].OldestETA = time.Unix(0, eta*1e3)
+		}
+		if si := qsg.ScannerInfo; si != nil {
+			qs[i].Executed1Minute = int(*si.ExecutedLastMinute)
+			qs[i].InFlight = int(si.GetRequestsInFlight())
+			qs[i].EnforcedRate = si.GetEnforcedRate()
+		}
+	}
+	return qs, nil
+}
+
+func setTransaction(x *pb.TaskQueueAddRequest, t *dspb.Transaction) {
+	x.Transaction = t
+}
+
+func init() {
+	internal.RegisterErrorCodeMap("taskqueue", pb.TaskQueueServiceError_ErrorCode_name)
+
+	// Datastore error codes are shifted by DATASTORE_ERROR when presented through taskqueue.
+	dsCode := int32(pb.TaskQueueServiceError_DATASTORE_ERROR) + int32(dspb.Error_TIMEOUT)
+	internal.RegisterTimeoutErrorCode("taskqueue", dsCode)
+
+	// Transaction registration.
+	internal.RegisterTransactionSetter(setTransaction)
+	internal.RegisterTransactionSetter(func(x *pb.TaskQueueBulkAddRequest, t *dspb.Transaction) {
+		for _, req := range x.AddRequest {
+			setTransaction(req, t)
+		}
+	})
+}
diff --git a/v2/taskqueue/taskqueue_test.go b/v2/taskqueue/taskqueue_test.go
new file mode 100644
index 0000000..bc376ed
--- /dev/null
+++ b/v2/taskqueue/taskqueue_test.go
@@ -0,0 +1,173 @@
+// Copyright 2014 Google Inc. All rights reserved.
+// Use of this source code is governed by the Apache 2.0
+// license that can be found in the LICENSE file.
+
+package taskqueue
+
+import (
+	"errors"
+	"fmt"
+	"net/http"
+	"reflect"
+	"testing"
+	"time"
+
+	"google.golang.org/appengine/v2"
+	"google.golang.org/appengine/v2/internal"
+	"google.golang.org/appengine/v2/internal/aetesting"
+	pb "google.golang.org/appengine/v2/internal/taskqueue"
+)
+
+func TestAddErrors(t *testing.T) {
+	var tests = []struct {
+		err, want error
+		sameErr   bool // if true, should return err exactly
+	}{
+		{
+			err: &internal.APIError{
+				Service: "taskqueue",
+				Code:    int32(pb.TaskQueueServiceError_TASK_ALREADY_EXISTS),
+			},
+			want: ErrTaskAlreadyAdded,
+		},
+		{
+			err: &internal.APIError{
+				Service: "taskqueue",
+				Code:    int32(pb.TaskQueueServiceError_TOMBSTONED_TASK),
+			},
+			want: ErrTaskAlreadyAdded,
+		},
+		{
+			err: &internal.APIError{
+				Service: "taskqueue",
+				Code:    int32(pb.TaskQueueServiceError_UNKNOWN_QUEUE),
+			},
+			want:    errors.New("not used"),
+			sameErr: true,
+		},
+	}
+	for _, tc := range tests {
+		c := aetesting.FakeSingleContext(t, "taskqueue", "Add", func(req *pb.TaskQueueAddRequest, res *pb.TaskQueueAddResponse) error {
+			// don't fill in any of the response
+			return tc.err
+		})
+		task := &Task{Path: "/worker", Method: "PULL"}
+		_, err := Add(c, task, "a-queue")
+		want := tc.want
+		if tc.sameErr {
+			want = tc.err
+		}
+		if err != want {
+			t.Errorf("Add with tc.err = %v, got %#v, want = %#v", tc.err, err, want)
+		}
+	}
+}
+
+func TestAddMulti(t *testing.T) {
+	c := aetesting.FakeSingleContext(t, "taskqueue", "BulkAdd", func(req *pb.TaskQueueBulkAddRequest, res *pb.TaskQueueBulkAddResponse) error {
+		res.Taskresult = []*pb.TaskQueueBulkAddResponse_TaskResult{
+			{
+				Result: pb.TaskQueueServiceError_OK.Enum(),
+			},
+			{
+				Result: pb.TaskQueueServiceError_TASK_ALREADY_EXISTS.Enum(),
+			},
+			{
+				Result: pb.TaskQueueServiceError_TOMBSTONED_TASK.Enum(),
+			},
+			{
+				Result: pb.TaskQueueServiceError_INTERNAL_ERROR.Enum(),
+			},
+		}
+		return nil
+	})
+	tasks := []*Task{
+		{Path: "/worker", Method: "PULL"},
+		{Path: "/worker", Method: "PULL"},
+		{Path: "/worker", Method: "PULL"},
+		{Path: "/worker", Method: "PULL"},
+	}
+	r, err := AddMulti(c, tasks, "a-queue")
+	if len(r) != len(tasks) {
+		t.Fatalf("AddMulti returned %d tasks, want %d", len(r), len(tasks))
+	}
+	want := appengine.MultiError{
+		nil,
+		ErrTaskAlreadyAdded,
+		ErrTaskAlreadyAdded,
+		&internal.APIError{
+			Service: "taskqueue",
+			Code:    int32(pb.TaskQueueServiceError_INTERNAL_ERROR),
+		},
+	}
+	if !reflect.DeepEqual(err, want) {
+		t.Errorf("AddMulti got %v, wanted %v", err, want)
+	}
+}
+
+func TestAddWithEmptyPath(t *testing.T) {
+	c := aetesting.FakeSingleContext(t, "taskqueue", "Add", func(req *pb.TaskQueueAddRequest, res *pb.TaskQueueAddResponse) error {
+		if got, want := string(req.Url), "/_ah/queue/a-queue"; got != want {
+			return fmt.Errorf("req.Url = %q; want %q", got, want)
+		}
+		return nil
+	})
+	if _, err := Add(c, &Task{}, "a-queue"); err != nil {
+		t.Fatalf("Add: %v", err)
+	}
+}
+
+func TestParseRequestHeaders(t *testing.T) {
+	tests := []struct {
+		Header http.Header
+		Want   RequestHeaders
+	}{
+		{
+			Header: map[string][]string{
+				"X-Appengine-Queuename":            []string{"foo"},
+				"X-Appengine-Taskname":             []string{"bar"},
+				"X-Appengine-Taskretrycount":       []string{"4294967297"}, // 2^32 + 1
+				"X-Appengine-Taskexecutioncount":   []string{"4294967298"}, // 2^32 + 2
+				"X-Appengine-Tasketa":              []string{"1500000000"},
+				"X-Appengine-Taskpreviousresponse": []string{"404"},
+				"X-Appengine-Taskretryreason":      []string{"baz"},
+				"X-Appengine-Failfast":             []string{"yes"},
+			},
+			Want: RequestHeaders{
+				QueueName:            "foo",
+				TaskName:             "bar",
+				TaskRetryCount:       4294967297,
+				TaskExecutionCount:   4294967298,
+				TaskETA:              time.Date(2017, time.July, 14, 2, 40, 0, 0, time.UTC),
+				TaskPreviousResponse: 404,
+				TaskRetryReason:      "baz",
+				FailFast:             true,
+			},
+		},
+		{
+			Header: map[string][]string{},
+			Want: RequestHeaders{
+				QueueName:            "",
+				TaskName:             "",
+				TaskRetryCount:       0,
+				TaskExecutionCount:   0,
+				TaskETA:              time.Time{},
+				TaskPreviousResponse: 0,
+				TaskRetryReason:      "",
+				FailFast:             false,
+			},
+		},
+	}
+
+	for idx, test := range tests {
+		got := *ParseRequestHeaders(test.Header)
+		if got.TaskETA.UnixNano() != test.Want.TaskETA.UnixNano() {
+			t.Errorf("%d. ParseRequestHeaders got TaskETA %v, wanted %v", idx, got.TaskETA, test.Want.TaskETA)
+		}
+		got.TaskETA = time.Time{}
+		test.Want.TaskETA = time.Time{}
+		if !reflect.DeepEqual(got, test.Want) {
+			t.Errorf("%d. ParseRequestHeaders got %v, wanted %v", idx, got, test.Want)
+		}
+	}
+}
diff --git a/v2/timeout.go b/v2/timeout.go
new file mode 100644
index 0000000..fcf3ad0
--- /dev/null
+++ b/v2/timeout.go
@@ -0,0 +1,20 @@
+// Copyright 2013 Google Inc. All rights reserved.
+// Use of this source code is governed by the Apache 2.0
+// license that can be found in the LICENSE file.
+
+package appengine
+
+import "context"
+
+// IsTimeoutError reports whether err is a timeout error.
+func IsTimeoutError(err error) bool {
+	if err == context.DeadlineExceeded {
+		return true
+	}
+	if t, ok := err.(interface {
+		IsTimeout() bool
+	}); ok {
+		return t.IsTimeout()
+	}
+	return false
+}
diff --git a/v2/urlfetch/urlfetch.go b/v2/urlfetch/urlfetch.go
new file mode 100644
index 0000000..a4a39ed
--- /dev/null
+++ b/v2/urlfetch/urlfetch.go
@@ -0,0 +1,210 @@
+// Copyright 2011 Google Inc. All rights reserved.
+// Use of this source code is governed by the Apache 2.0
+// license that can be found in the LICENSE file.
+
+// Package urlfetch provides an http.RoundTripper implementation
+// for fetching URLs via App Engine's urlfetch service.
+package urlfetch // import "google.golang.org/appengine/v2/urlfetch"
+
+import (
+	"context"
+	"errors"
+	"fmt"
+	"io"
+	"io/ioutil"
+	"net/http"
+	"net/url"
+	"strconv"
+	"strings"
+	"time"
+
+	"github.com/golang/protobuf/proto"
+
+	"google.golang.org/appengine/v2/internal"
+	pb "google.golang.org/appengine/v2/internal/urlfetch"
+)
+
+// Transport is an implementation of http.RoundTripper for
+// App Engine. Users should generally create an http.Client using
+// this transport and use the Client rather than using this transport
+// directly.
+type Transport struct {
+	Context context.Context
+
+	// Controls whether the application checks the validity of SSL certificates
+	// over HTTPS connections. A value of false (the default) instructs the
+	// application to send a request to the server only if the certificate is
+	// valid and signed by a trusted certificate authority (CA), and also
+	// includes a hostname that matches the certificate. A value of true
+	// instructs the application to perform no certificate validation.
+	AllowInvalidServerCertificate bool
+}
+
+// Verify statically that *Transport implements http.RoundTripper.
+var _ http.RoundTripper = (*Transport)(nil)
+
+// Client returns an *http.Client using a default urlfetch Transport. This
+// client will have the default deadline of 5 seconds, and will check the
+// validity of SSL certificates.
+//
+// Any deadline of the provided context will be used for requests through this client;
+// if the client does not have a deadline then a 5 second default is used.
+func Client(ctx context.Context) *http.Client {
+	return &http.Client{
+		Transport: &Transport{
+			Context: ctx,
+		},
+	}
+}
+
+type bodyReader struct {
+	content   []byte
+	truncated bool
+	closed    bool
+}
+
+// ErrTruncatedBody is the error returned after the final Read() from a
+// response's Body if the body has been truncated by App Engine's proxy.
+var ErrTruncatedBody = errors.New("urlfetch: truncated body")
+
+func statusCodeToText(code int) string {
+	if t := http.StatusText(code); t != "" {
+		return t
+	}
+	return strconv.Itoa(code)
+}
+
+func (br *bodyReader) Read(p []byte) (n int, err error) {
+	if br.closed {
+		if br.truncated {
+			return 0, ErrTruncatedBody
+		}
+		return 0, io.EOF
+	}
+	n = copy(p, br.content)
+	if n > 0 {
+		br.content = br.content[n:]
+		return
+	}
+	if br.truncated {
+		br.closed = true
+		return 0, ErrTruncatedBody
+	}
+	return 0, io.EOF
+}
+
+func (br *bodyReader) Close() error {
+	br.closed = true
+	br.content = nil
+	return nil
+}
+
+// A map of the URL Fetch-accepted methods that take a request body.
+var methodAcceptsRequestBody = map[string]bool{
+	"POST":  true,
+	"PUT":   true,
+	"PATCH": true,
+}
+
+// urlString returns a valid string given a URL. This function is necessary because
+// the String method of URL doesn't correctly handle URLs with non-empty Opaque values.
+// See http://code.google.com/p/go/issues/detail?id=4860.
+func urlString(u *url.URL) string {
+	if u.Opaque == "" || strings.HasPrefix(u.Opaque, "//") {
+		return u.String()
+	}
+	aux := *u
+	aux.Opaque = "//" + aux.Host + aux.Opaque
+	return aux.String()
+}
+
+// RoundTrip issues a single HTTP request and returns its response. Per the
+// http.RoundTripper interface, RoundTrip only returns an error if there
+// was an unsupported request or the URL Fetch proxy fails.
+// Note that HTTP response codes such as 5xx, 403, 404, etc are not
+// errors as far as the transport is concerned and will be returned
+// with err set to nil.
+func (t *Transport) RoundTrip(req *http.Request) (res *http.Response, err error) {
+	methNum, ok := pb.URLFetchRequest_RequestMethod_value[req.Method]
+	if !ok {
+		return nil, fmt.Errorf("urlfetch: unsupported HTTP method %q", req.Method)
+	}
+
+	method := pb.URLFetchRequest_RequestMethod(methNum)
+
+	freq := &pb.URLFetchRequest{
+		Method:                        &method,
+		Url:                           proto.String(urlString(req.URL)),
+		FollowRedirects:               proto.Bool(false), // http.Client's responsibility
+		MustValidateServerCertificate: proto.Bool(!t.AllowInvalidServerCertificate),
+	}
+	if deadline, ok := t.Context.Deadline(); ok {
+		freq.Deadline = proto.Float64(deadline.Sub(time.Now()).Seconds())
+	}
+
+	for k, vals := range req.Header {
+		for _, val := range vals {
+			freq.Header = append(freq.Header, &pb.URLFetchRequest_Header{
+				Key:   proto.String(k),
+				Value: proto.String(val),
+			})
+		}
+	}
+	if methodAcceptsRequestBody[req.Method] && req.Body != nil {
+		// Avoid a []byte copy if req.Body has a Bytes method.
+		switch b := req.Body.(type) {
+		case interface {
+			Bytes() []byte
+		}:
+			freq.Payload = b.Bytes()
+		default:
+			freq.Payload, err = ioutil.ReadAll(req.Body)
+			if err != nil {
+				return nil, err
+			}
+		}
+	}
+
+	fres := &pb.URLFetchResponse{}
+	if err := internal.Call(t.Context, "urlfetch", "Fetch", freq, fres); err != nil {
+		return nil, err
+	}
+
+	res = &http.Response{}
+	res.StatusCode = int(*fres.StatusCode)
+	res.Status = fmt.Sprintf("%d %s", res.StatusCode, statusCodeToText(res.StatusCode))
+	res.Header = make(http.Header)
+	res.Request = req
+
+	// Faked:
+	res.ProtoMajor = 1
+	res.ProtoMinor = 1
+	res.Proto = "HTTP/1.1"
+	res.Close = true
+
+	for _, h := range fres.Header {
+		hkey := http.CanonicalHeaderKey(*h.Key)
+		hval := *h.Value
+		if hkey == "Content-Length" {
+			// Will get filled in below for all but HEAD requests.
+			if req.Method == "HEAD" {
+				res.ContentLength, _ = strconv.ParseInt(hval, 10, 64)
+			}
+			continue
+		}
+		res.Header.Add(hkey, hval)
+	}
+
+	if req.Method != "HEAD" {
+		res.ContentLength = int64(len(fres.Content))
+	}
+
+	truncated := fres.GetContentWasTruncated()
+	res.Body = &bodyReader{content: fres.Content, truncated: truncated}
+	return
+}
+
+func init() {
+	internal.RegisterErrorCodeMap("urlfetch", pb.URLFetchServiceError_ErrorCode_name)
+	internal.RegisterTimeoutErrorCode("urlfetch", int32(pb.URLFetchServiceError_DEADLINE_EXCEEDED))
+}
diff --git a/v2/user/oauth.go b/v2/user/oauth.go
new file mode 100644
index 0000000..36a55ff
--- /dev/null
+++ b/v2/user/oauth.go
@@ -0,0 +1,52 @@
+// Copyright 2012 Google Inc. All rights reserved.
+// Use of this source code is governed by the Apache 2.0
+// license that can be found in the LICENSE file.
+
+package user
+
+import (
+	"context"
+
+	"google.golang.org/appengine/v2/internal"
+	pb "google.golang.org/appengine/v2/internal/user"
+)
+
+// CurrentOAuth returns the user associated with the OAuth consumer making this
+// request. If the OAuth consumer did not make a valid OAuth request, or the
+// scopes is non-empty and the current user does not have at least one of the
+// scopes, this method will return an error.
+func CurrentOAuth(c context.Context, scopes ...string) (*User, error) {
+	req := &pb.GetOAuthUserRequest{}
+	if len(scopes) != 1 || scopes[0] != "" {
+		// The signature for this function used to be CurrentOAuth(Context, string).
+		// Ignore the singular "" scope to preserve existing behavior.
+		req.Scopes = scopes
+	}
+
+	res := &pb.GetOAuthUserResponse{}
+
+	err := internal.Call(c, "user", "GetOAuthUser", req, res)
+	if err != nil {
+		return nil, err
+	}
+	return &User{
+		Email:      *res.Email,
+		AuthDomain: *res.AuthDomain,
+		Admin:      res.GetIsAdmin(),
+		ID:         *res.UserId,
+		ClientID:   res.GetClientId(),
+	}, nil
+}
+
+// OAuthConsumerKey returns the OAuth consumer key provided with the current
+// request. This method will return an error if the OAuth request was invalid.
+func OAuthConsumerKey(c context.Context) (string, error) {
+	req := &pb.CheckOAuthSignatureRequest{}
+	res := &pb.CheckOAuthSignatureResponse{}
+
+	err := internal.Call(c, "user", "CheckOAuthSignature", req, res)
+	if err != nil {
+		return "", err
+	}
+	return *res.OauthConsumerKey, err
+}
diff --git a/v2/user/user.go b/v2/user/user.go
new file mode 100644
index 0000000..5220b03
--- /dev/null
+++ b/v2/user/user.go
@@ -0,0 +1,109 @@
+// Copyright 2011 Google Inc. All rights reserved.
+// Use of this source code is governed by the Apache 2.0
+// license that can be found in the LICENSE file.
+
+// Package user provides a client for App Engine's user authentication service.
+package user // import "google.golang.org/appengine/v2/user"
+
+import (
+	"context"
+	"strings"
+
+	"github.com/golang/protobuf/proto"
+
+	"google.golang.org/appengine/v2/internal"
+	pb "google.golang.org/appengine/v2/internal/user"
+)
+
+// User represents a user of the application.
+type User struct {
+	Email      string
+	AuthDomain string
+	Admin      bool
+
+	// ID is the unique permanent ID of the user.
+	// It is populated if the Email is associated
+	// with a Google account, or empty otherwise.
+	ID string
+
+	// ClientID is the ID of the pre-registered client so its identity can be verified.
+	// See https://developers.google.com/console/help/#generatingoauth2 for more information.
+	ClientID string
+
+	FederatedIdentity string
+	FederatedProvider string
+}
+
+// String returns a displayable name for the user.
+func (u *User) String() string {
+	if u.AuthDomain != "" && strings.HasSuffix(u.Email, "@"+u.AuthDomain) {
+		return u.Email[:len(u.Email)-len("@"+u.AuthDomain)]
+	}
+	if u.FederatedIdentity != "" {
+		return u.FederatedIdentity
+	}
+	return u.Email
+}
+
+// LoginURL returns a URL that, when visited, prompts the user to sign in,
+// then redirects the user to the URL specified by dest.
+func LoginURL(c context.Context, dest string) (string, error) {
+	return LoginURLFederated(c, dest, "")
+}
+
+// LoginURLFederated is like LoginURL but accepts a user's OpenID identifier.
+func LoginURLFederated(c context.Context, dest, identity string) (string, error) {
+	req := &pb.CreateLoginURLRequest{
+		DestinationUrl: proto.String(dest),
+	}
+	if identity != "" {
+		req.FederatedIdentity = proto.String(identity)
+	}
+	res := &pb.CreateLoginURLResponse{}
+	if err := internal.Call(c, "user", "CreateLoginURL", req, res); err != nil {
+		return "", err
+	}
+	return *res.LoginUrl, nil
+}
+
+// LogoutURL returns a URL that, when visited, signs the user out,
+// then redirects the user to the URL specified by dest.
+func LogoutURL(c context.Context, dest string) (string, error) {
+	req := &pb.CreateLogoutURLRequest{
+		DestinationUrl: proto.String(dest),
+	}
+	res := &pb.CreateLogoutURLResponse{}
+	if err := internal.Call(c, "user", "CreateLogoutURL", req, res); err != nil {
+		return "", err
+	}
+	return *res.LogoutUrl, nil
+}
+
+func init() {
+	internal.RegisterErrorCodeMap("user", pb.UserServiceError_ErrorCode_name)
+}
+
+// Current returns the currently logged-in user,
+// or nil if the user is not signed in.
+func Current(c context.Context) *User {
+	h := internal.IncomingHeaders(c)
+	u := &User{
+		Email:             h.Get("X-AppEngine-User-Email"),
+		AuthDomain:        h.Get("X-AppEngine-Auth-Domain"),
+		ID:                h.Get("X-AppEngine-User-Id"),
+		Admin:             h.Get("X-AppEngine-User-Is-Admin") == "1",
+		FederatedIdentity: h.Get("X-AppEngine-Federated-Identity"),
+		FederatedProvider: h.Get("X-AppEngine-Federated-Provider"),
+	}
+	if u.Email == "" && u.FederatedIdentity == "" {
+		return nil
+	}
+	return u
+}
+
+// IsAdmin returns true if the current user is signed in and
+// is currently registered as an administrator of the application.
+func IsAdmin(c context.Context) bool {
+	h := internal.IncomingHeaders(c)
+	return h.Get("X-AppEngine-User-Is-Admin") == "1"
+}
diff --git a/v2/user/user_test.go b/v2/user/user_test.go
new file mode 100644
index 0000000..dbef0cc
--- /dev/null
+++ b/v2/user/user_test.go
@@ -0,0 +1,97 @@
+// Copyright 2011 Google Inc. All rights reserved.
+// Use of this source code is governed by the Apache 2.0
+// license that can be found in the LICENSE file.
+
+package user
+
+import (
+	"fmt"
+	"net/http"
+	"testing"
+
+	"github.com/golang/protobuf/proto"
+
+	"google.golang.org/appengine/v2/internal"
+	"google.golang.org/appengine/v2/internal/aetesting"
+	pb "google.golang.org/appengine/v2/internal/user"
+)
+
+func baseReq() *http.Request {
+	return &http.Request{
+		Header: http.Header{},
+	}
+}
+
+type basicUserTest struct {
+	nickname, email, authDomain, admin string
+	// expectations
+	isNil, isAdmin bool
+	displayName    string
+}
+
+var basicUserTests = []basicUserTest{
+	{"", "", "", "0", true, false, ""},
+	{"ken", "ken@example.com", "example.com", "0", false, false, "ken"},
+	{"ken", "ken@example.com", "auth_domain.com", "1", false, true, "ken@example.com"},
+}
+
+func TestBasicUserAPI(t *testing.T) {
+	for i, tc := range basicUserTests {
+		req := baseReq()
+		req.Header.Set("X-AppEngine-User-Nickname", tc.nickname)
+		req.Header.Set("X-AppEngine-User-Email", tc.email)
+		req.Header.Set("X-AppEngine-Auth-Domain", tc.authDomain)
+		req.Header.Set("X-AppEngine-User-Is-Admin", tc.admin)
+
+		c := internal.ContextForTesting(req)
+
+		if ga := IsAdmin(c); ga != tc.isAdmin {
+			t.Errorf("test %d: expected IsAdmin(c) = %v, got %v", i, tc.isAdmin, ga)
+		}
+
+		u := Current(c)
+		if tc.isNil {
+			if u != nil {
+				t.Errorf("test %d: expected u == nil, got %+v", i, u)
+			}
+			continue
+		}
+		if u == nil {
+			t.Errorf("test %d: expected u != nil, got nil", i)
+			continue
+		}
+		if u.Email != tc.email {
+			t.Errorf("test %d: expected u.Email = %q, got %q", i, tc.email, u.Email)
+		}
+		if gs := u.String(); gs != tc.displayName {
+			t.Errorf("test %d: expected u.String() = %q, got %q", i, tc.displayName, gs)
+		}
+		if u.Admin != tc.isAdmin {
+			t.Errorf("test %d: expected u.Admin = %v, got %v", i, tc.isAdmin, u.Admin)
+		}
+	}
+}
+
+func TestLoginURL(t *testing.T) {
+	expectedQuery := &pb.CreateLoginURLRequest{
+		DestinationUrl: proto.String("/destination"),
+	}
+	const expectedDest = "/redir/dest"
+	c := aetesting.FakeSingleContext(t, "user", "CreateLoginURL", func(req *pb.CreateLoginURLRequest, res *pb.CreateLoginURLResponse) error {
+		if !proto.Equal(req, expectedQuery) {
+			return fmt.Errorf("got %v, want %v", req, expectedQuery)
+		}
+		res.LoginUrl = proto.String(expectedDest)
+		return nil
+	})
+
+	url, err := LoginURL(c, "/destination")
+	if err != nil {
+		t.Fatalf("LoginURL failed: %v", err)
+	}
+	if url != expectedDest {
+		t.Errorf("got %v, want %v", url, expectedDest)
+	}
+}
+
+// TODO(dsymonds): Add test for LogoutURL.
diff --git a/xmpp/xmpp.go b/xmpp/xmpp.go
index 3a561fd..437dc4c 100644
--- a/xmpp/xmpp.go
+++ b/xmpp/xmpp.go
@@ -7,6 +7,7 @@ Package xmpp provides the means to send and receive instant messages
 to and from users of XMPP-compatible services.
 
 To send a message,
+
 	m := &xmpp.Message{
 		To:   []string{"kaylee@example.com"},
 		Body: `Hi! How's the carrot?`,
@@ -14,6 +15,7 @@ To send a message,
 	err := m.Send(c)
 
 To receive messages,
+
 	func init() {
 		xmpp.Handle(handleChat)
 	}

More details

Full run details