New Upstream Release - golang-github-oschwald-maxminddb-golang

Ready changes

Summary

Merged new upstream version: 1.10.0 (was: 1.8.0).

Resulting package

Built on 2022-12-14T13:55 (took 3m31s)

The resulting binary packages can be installed (if you have the apt repository enabled) by running one of:

apt install -t fresh-releases golang-github-oschwald-maxminddb-golang-dev

Lintian Result

Diff

diff --git a/.github/dependabot.yml b/.github/dependabot.yml
new file mode 100644
index 0000000..fcdfddc
--- /dev/null
+++ b/.github/dependabot.yml
@@ -0,0 +1,8 @@
+version: 2
+updates:
+- package-ecosystem: gomod
+  directory: "/"
+  schedule:
+    interval: daily
+    time: "13:00"
+  open-pull-requests-limit: 10
diff --git a/.github/workflows/codeql-analysis.yml b/.github/workflows/codeql-analysis.yml
index 278af78..047ecfb 100644
--- a/.github/workflows/codeql-analysis.yml
+++ b/.github/workflows/codeql-analysis.yml
@@ -2,6 +2,8 @@ name: "Code scanning - action"
 
 on:
   push:
+    branches-ignore:
+      - 'dependabot/**'
   pull_request:
   schedule:
     - cron: '0 13 * * 4'
@@ -23,7 +25,7 @@ jobs:
     # the head of the pull request instead of the merge commit.
     - run: git checkout HEAD^2
       if: ${{ github.event_name == 'pull_request' }}
-      
+
     # Initializes the CodeQL tools for scanning.
     - name: Initialize CodeQL
       uses: github/codeql-action/init@v1
diff --git a/.github/workflows/go.yml b/.github/workflows/go.yml
index 4b3ba82..618c5ee 100644
--- a/.github/workflows/go.yml
+++ b/.github/workflows/go.yml
@@ -8,7 +8,7 @@ jobs:
     name: Build
     strategy:
       matrix:
-        go-version: [1.13.x, 1.14.x, 1.15.x]
+        go-version: [1.18.x, 1.19.x]
         platform: [ubuntu-latest, macos-latest, windows-latest]
     runs-on: ${{ matrix.platform }}
     steps:
diff --git a/.github/workflows/golangci-lint.yml b/.github/workflows/golangci-lint.yml
index b96e8e4..d863672 100644
--- a/.github/workflows/golangci-lint.yml
+++ b/.github/workflows/golangci-lint.yml
@@ -9,6 +9,6 @@ jobs:
     steps:
       - uses: actions/checkout@v2
       - name: golangci-lint
-        uses: golangci/golangci-lint-action@v1
+        uses: golangci/golangci-lint-action@v2
         with:
-          version: v1.30
+          version: latest
diff --git a/.golangci.toml b/.golangci.toml
index f8b0079..b4f7e6a 100644
--- a/.golangci.toml
+++ b/.golangci.toml
@@ -1,56 +1,472 @@
 [run]
   deadline = "10m"
+
   tests = true
 
 [linters]
   disable-all = true
   enable = [
+    "asciicheck",
+    "bidichk",
     "bodyclose",
+    "containedctx",
+    "contextcheck",
     "deadcode",
     "depguard",
+    "durationcheck",
     "errcheck",
+    "errchkjson",
+    "errname",
+    "errorlint",
     "exportloopref",
+    "forbidigo",
+    #"forcetypeassert",
     "goconst",
     "gocyclo",
     "gocritic",
+    "godot",
     "gofumpt",
-    "golint",
+    "gomodguard",
     "gosec",
     "gosimple",
     "govet",
+    "grouper",
     "ineffassign",
-    "maligned",
+    "lll",
+    "makezero",
+    "maintidx",
     "misspell",
     "nakedret",
+    "nilerr",
     "noctx",
     "nolintlint",
+    "nosprintfhostport",
+    "predeclared",
+    "revive",
+    "rowserrcheck",
     "sqlclosecheck",
     "staticcheck",
     "structcheck",
     "stylecheck",
+    "tenv",
+    "tparallel",
     "typecheck",
     "unconvert",
     "unparam",
     "unused",
     "varcheck",
     "vetshadow",
+    "wastedassign",
+  ]
+
+# Please note that we only use depguard for stdlib as gomodguard only
+# supports modules currently. See https://github.com/ryancurrah/gomodguard/issues/12
+[linters-settings.depguard]
+  list-type = "blacklist"
+  include-go-root = true
+  packages = [
+    # ioutil is deprecated. The functions have been moved elsewhere:
+    # https://golang.org/doc/go1.16#ioutil
+    "io/ioutil",
   ]
 
 [linters-settings.errcheck]
+    # Don't allow setting of error to the blank identifier. If there is a legtimate
+    # reason, there should be a nolint with an explanation.
+    check-blank = true
+
+    exclude-functions = [
+        # If we are rolling back a transaction, we are often already in an error
+        # state.
+        '(*database/sql.Tx).Rollback',
+
+        # It is reasonable to ignore errors if Cleanup fails in most cases.
+        '(*github.com/google/renameio/v2.PendingFile).Cleanup',
+
+        # We often don't care if removing a file failed (e.g., it doesn't exist)
+        'os.Remove',
+        'os.RemoveAll',
+    ]
+
+    # Ignoring Close so that we don't have to have a bunch of
+    # `defer func() { _ = r.Close() }()` constructs when we
+    # don't actually care about the error.
     ignore = "Close,fmt:.*"
 
+[linters-settings.errorlint]
+    errorf = true
+    asserts = true
+    comparison = true
+
+[linters-settings.exhaustive]
+    default-signifies-exhaustive = true
+
+[linters-settings.forbidigo]
+    # Forbid the following identifiers
+    forbid = [
+        "^minFraud*",
+        "^maxMind*",
+    ]
+
+[linters-settings.gocritic]
+    enabled-checks = [
+        "appendAssign",
+        "appendCombine",
+        "argOrder",
+        "assignOp",
+        "badCall",
+        "badCond",
+        "badLock",
+        "badRegexp",
+        "badSorting",
+        "boolExprSimplify",
+        "builtinShadow",
+        "builtinShadowDecl",
+        "captLocal",
+        "caseOrder",
+        "codegenComment",
+        "commentedOutCode",
+        "commentedOutImport",
+        "commentFormatting",
+        "defaultCaseOrder",
+        # Revive's defer rule already captures this. This caught no extra cases.
+        # "deferInLoop",
+        "deferUnlambda",
+        "deprecatedComment",
+        "docStub",
+        "dupArg",
+        "dupBranchBody",
+        "dupCase",
+        "dupImport",
+        "dupSubExpr",
+        "dynamicFmtString",
+        "elseif",
+        "emptyDecl",
+        "emptyFallthrough",
+        "emptyStringTest",
+        "equalFold",
+        "evalOrder",
+        "exitAfterDefer",
+        "exposedSyncMutex",
+        "externalErrorReassign",
+        # Given that all of our code runs on Linux and the / separate should
+        # work fine, this seems less important.
+        # "filepathJoin",
+        "flagDeref",
+        "flagName",
+        "hexLiteral",
+        "ifElseChain",
+        "importShadow",
+        "indexAlloc",
+        "initClause",
+        "ioutilDeprecated",
+        "mapKey",
+        "methodExprCall",
+        "nestingReduce",
+        "newDeref",
+        "nilValReturn",
+        "octalLiteral",
+        "offBy1",
+        "paramTypeCombine",
+        "preferDecodeRune",
+        "preferFilepathJoin",
+        "preferFprint",
+        "preferStringWriter",
+        "preferWriteByte",
+        "ptrToRefParam",
+        "rangeExprCopy",
+        "rangeValCopy",
+        "redundantSprint",
+        "regexpMust",
+        "regexpPattern",
+        # This might be good, but I don't think we want to encourage
+        # significant changes to regexes as we port stuff from Perl.
+        # "regexpSimplify",
+        "ruleguard",
+        "singleCaseSwitch",
+        "sliceClear",
+        "sloppyLen",
+        # This seems like it might also be good, but a lot of existing code
+        # fails.
+        # "sloppyReassign",
+        "returnAfterHttpError",
+        "sloppyTypeAssert",
+        "sortSlice",
+        "sprintfQuotedString",
+        "sqlQuery",
+        "stringsCompare",
+        "stringXbytes",
+        "switchTrue",
+        "syncMapLoadAndDelete",
+        "timeExprSimplify",
+        "todoCommentWithoutDetail",
+        "tooManyResultsChecker",
+        "truncateCmp",
+        "typeAssertChain",
+        "typeDefFirst",
+        "typeSwitchVar",
+        "typeUnparen",
+        "underef",
+        "unlabelStmt",
+        "unlambda",
+        # I am not sure we would want this linter and a lot of existing
+        # code fails.
+        # "unnamedResult",
+        "unnecessaryBlock",
+        "unnecessaryDefer",
+        "unslice",
+        "valSwap",
+        "weakCond",
+        "wrapperFunc",
+        "yodaStyleExpr",
+        # This requires explanations for "nolint" directives. This would be
+        # nice for gosec ones, but I am not sure we want it generally unless
+        # we can get the false positive rate lower.
+        # "whyNoLint"
+    ]
+
 [linters-settings.gofumpt]
     extra-rules = true
+    lang-version = "1.18"
+
+[linters-settings.govet]
+    "enable-all" = true
+
+[linters-settings.lll]
+    line-length = 120
+    tab-width = 4
+
+[linters-settings.nolintlint]
+    allow-leading-space = false
+    allow-unused = false
+    allow-no-explanation = ["lll", "misspell"]
+    require-explanation = true
+    require-specific = true
+
+[linters-settings.revive]
+    ignore-generated-header = true
+    severity = "warning"
+
+    # This might be nice but it is so common that it is hard
+    # to enable.
+    # [[linters-settings.revive.rules]]
+    # name = "add-constant"
+
+    # [[linters-settings.revive.rules]]
+    # name = "argument-limit"
+
+    [[linters-settings.revive.rules]]
+    name = "atomic"
+
+    [[linters-settings.revive.rules]]
+    name = "bare-return"
+
+    [[linters-settings.revive.rules]]
+    name = "blank-imports"
+
+    [[linters-settings.revive.rules]]
+    name = "bool-literal-in-expr"
+
+    [[linters-settings.revive.rules]]
+    name = "call-to-gc"
+
+    # [[linters-settings.revive.rules]]
+    # name = "cognitive-complexity"
+
+    # Probably a good rule, but we have a lot of names that
+    # only have case differences.
+    # [[linters-settings.revive.rules]]
+    # name = "confusing-naming"
+
+    # [[linters-settings.revive.rules]]
+    # name = "confusing-results"
+
+    [[linters-settings.revive.rules]]
+    name = "constant-logical-expr"
+
+    [[linters-settings.revive.rules]]
+    name = "context-as-argument"
+
+    [[linters-settings.revive.rules]]
+    name = "context-keys-type"
+
+    # [[linters-settings.revive.rules]]
+    # name = "cyclomatic"
+
+    # [[linters-settings.revive.rules]]
+    # name = "deep-exit"
+
+    [[linters-settings.revive.rules]]
+    name = "defer"
+
+    [[linters-settings.revive.rules]]
+    name = "dot-imports"
+
+    [[linters-settings.revive.rules]]
+    name = "duplicated-imports"
+
+    [[linters-settings.revive.rules]]
+    name = "early-return"
+
+    [[linters-settings.revive.rules]]
+    name = "empty-block"
+
+    [[linters-settings.revive.rules]]
+    name = "empty-lines"
+
+    [[linters-settings.revive.rules]]
+    name = "errorf"
+
+    [[linters-settings.revive.rules]]
+    name = "error-naming"
+
+    [[linters-settings.revive.rules]]
+    name = "error-return"
+
+    [[linters-settings.revive.rules]]
+    name = "error-strings"
+
+    [[linters-settings.revive.rules]]
+    name = "exported"
 
-[issues]
-exclude-use-default = false
+    # [[linters-settings.revive.rules]]
+    # name = "file-header"
 
-  [[issues.exclude-rules]]
+    # We have a lot of flag parameters. This linter probably makes
+    # a good point, but we would need some cleanup or a lot of nolints.
+    # [[linters-settings.revive.rules]]
+    # name = "flag-parameter"
+
+    # [[linters-settings.revive.rules]]
+    # name = "function-result-limit"
+
+    [[linters-settings.revive.rules]]
+    name = "get-return"
+
+    [[linters-settings.revive.rules]]
+    name = "identical-branches"
+
+    [[linters-settings.revive.rules]]
+    name = "if-return"
+
+    [[linters-settings.revive.rules]]
+    name = "imports-blacklist"
+
+    [[linters-settings.revive.rules]]
+    name = "import-shadowing"
+
+    [[linters-settings.revive.rules]]
+    name = "increment-decrement"
+
+    [[linters-settings.revive.rules]]
+    name = "indent-error-flow"
+
+    # [[linters-settings.revive.rules]]
+    # name = "line-length-limit"
+
+    # [[linters-settings.revive.rules]]
+    # name = "max-public-structs"
+
+    [[linters-settings.revive.rules]]
+    name = "modifies-parameter"
+
+    [[linters-settings.revive.rules]]
+    name = "modifies-value-receiver"
+
+    # We frequently use nested structs, particularly in tests.
+    # [[linters-settings.revive.rules]]
+    # name = "nested-structs"
+
+    [[linters-settings.revive.rules]]
+    name = "optimize-operands-order"
+
+    [[linters-settings.revive.rules]]
+    name = "package-comments"
+
+    [[linters-settings.revive.rules]]
+    name = "range"
+
+    [[linters-settings.revive.rules]]
+    name = "range-val-address"
+
+    [[linters-settings.revive.rules]]
+    name = "range-val-in-closure"
+
+    [[linters-settings.revive.rules]]
+    name = "receiver-naming"
+
+    [[linters-settings.revive.rules]]
+    name = "redefines-builtin-id"
+
+    [[linters-settings.revive.rules]]
+    name = "string-of-int"
+
+    [[linters-settings.revive.rules]]
+    name = "struct-tag"
+
+    [[linters-settings.revive.rules]]
+    name = "superfluous-else"
+
+    [[linters-settings.revive.rules]]
+    name = "time-naming"
+
+    [[linters-settings.revive.rules]]
+    name = "unconditional-recursion"
+
+    [[linters-settings.revive.rules]]
+    name = "unexported-naming"
+
+    [[linters-settings.revive.rules]]
+    name = "unexported-return"
+
+    # This is covered elsewhere and we want to ignore some
+    # functions such as fmt.Fprintf.
+    # [[linters-settings.revive.rules]]
+    # name = "unhandled-error"
+
+    [[linters-settings.revive.rules]]
+    name = "unnecessary-stmt"
+
+    [[linters-settings.revive.rules]]
+    name = "unreachable-code"
+
+    [[linters-settings.revive.rules]]
+    name = "unused-parameter"
+
+    # We generally have unused receivers in tests for meeting the
+    # requirements of an interface.
+    # [[linters-settings.revive.rules]]
+    # name = "unused-receiver"
+
+    # This probably makes sense after we upgrade to 1.18
+    # [[linters-settings.revive.rules]]
+    # name = "use-any"
+
+    [[linters-settings.revive.rules]]
+    name = "useless-break"
+
+    [[linters-settings.revive.rules]]
+    name = "var-declaration"
+
+    [[linters-settings.revive.rules]]
+    name = "var-naming"
+
+    [[linters-settings.revive.rules]]
+    name = "waitgroup-by-value"
+
+[linters-settings.unparam]
+    check-exported = true
+
+[[issues.exclude-rules]]
   linters = [
-    "gosec"
+    "govet"
   ]
-
-  # G304 - Potential file inclusion via variable (gosec)
-  # G404 - "Use of weak random number generator (math/rand instead of crypto/rand)"
-  #        We only use this in tests.
-  text = "G304|G404"
+  # we want to enable almost all govet rules. It is easier to just filter out
+  # the ones we don't want:
+  #
+  # * fieldalignment - way too noisy. Although it is very useful in particular
+  #   cases where we are trying to use as little memory as possible, having
+  #   it go off on every struct isn't helpful.
+  # * shadow - although often useful, it complains about _many_ err
+  #   shadowing assignments and some others where shadowing is clear.
+  text = "^(fieldalignment|shadow)"
diff --git a/debian/changelog b/debian/changelog
index b9ea5f6..3e58bff 100644
--- a/debian/changelog
+++ b/debian/changelog
@@ -1,3 +1,9 @@
+golang-github-oschwald-maxminddb-golang (1.10.0-1) UNRELEASED; urgency=low
+
+  * New upstream release.
+
+ -- Debian Janitor <janitor@jelmer.uk>  Wed, 14 Dec 2022 13:52:47 -0000
+
 golang-github-oschwald-maxminddb-golang (1.8.0-1) unstable; urgency=medium
 
   [ Cyril Brulebois ]
diff --git a/decoder.go b/decoder.go
index ebf01c3..828c57f 100644
--- a/decoder.go
+++ b/decoder.go
@@ -29,20 +29,22 @@ const (
 	_Slice
 	// We don't use the next two. They are placeholders. See the spec
 	// for more details.
-	_Container // nolint: deadcode, varcheck
-	_Marker    // nolint: deadcode, varcheck
+	_Container //nolint: deadcode, varcheck // above
+	_Marker    //nolint: deadcode, varcheck // above
 	_Bool
 	_Float32
 )
 
 const (
-	// This is the value used in libmaxminddb
+	// This is the value used in libmaxminddb.
 	maximumDataStructureDepth = 512
 )
 
 func (d *decoder) decode(offset uint, result reflect.Value, depth int) (uint, error) {
 	if depth > maximumDataStructureDepth {
-		return 0, newInvalidDatabaseError("exceeded maximum data structure depth; database is likely corrupt")
+		return 0, newInvalidDatabaseError(
+			"exceeded maximum data structure depth; database is likely corrupt",
+		)
 	}
 	typeNum, size, newOffset, err := d.decodeCtrlData(offset)
 	if err != nil {
@@ -56,22 +58,32 @@ func (d *decoder) decode(offset uint, result reflect.Value, depth int) (uint, er
 	return d.decodeFromType(typeNum, size, newOffset, result, depth+1)
 }
 
-func (d *decoder) decodeToDeserializer(offset uint, dser deserializer, depth int) (uint, error) {
+func (d *decoder) decodeToDeserializer(
+	offset uint,
+	dser deserializer,
+	depth int,
+	getNext bool,
+) (uint, error) {
 	if depth > maximumDataStructureDepth {
-		return 0, newInvalidDatabaseError("exceeded maximum data structure depth; database is likely corrupt")
+		return 0, newInvalidDatabaseError(
+			"exceeded maximum data structure depth; database is likely corrupt",
+		)
 	}
-	typeNum, size, newOffset, err := d.decodeCtrlData(offset)
+	skip, err := dser.ShouldSkip(uintptr(offset))
 	if err != nil {
 		return 0, err
 	}
+	if skip {
+		if getNext {
+			return d.nextValueOffset(offset, 1)
+		}
+		return 0, nil
+	}
 
-	skip, err := dser.ShouldSkip(uintptr(offset))
+	typeNum, size, newOffset, err := d.decodeCtrlData(offset)
 	if err != nil {
 		return 0, err
 	}
-	if skip {
-		return d.nextValueOffset(offset, 1)
-	}
 
 	return d.decodeFromTypeToDeserializer(typeNum, size, newOffset, dser, depth+1)
 }
@@ -97,7 +109,11 @@ func (d *decoder) decodeCtrlData(offset uint) (dataType, uint, uint, error) {
 	return typeNum, size, newOffset, err
 }
 
-func (d *decoder) sizeFromCtrlByte(ctrlByte byte, offset uint, typeNum dataType) (uint, uint, error) {
+func (d *decoder) sizeFromCtrlByte(
+	ctrlByte byte,
+	offset uint,
+	typeNum dataType,
+) (uint, uint, error) {
 	size := uint(ctrlByte & 0x1f)
 	if typeNum == _Extended {
 		return size, offset, nil
@@ -196,7 +212,7 @@ func (d *decoder) decodeFromTypeToDeserializer(
 		if err != nil {
 			return 0, err
 		}
-		_, err = d.decodeToDeserializer(pointer, dser, depth)
+		_, err = d.decodeToDeserializer(pointer, dser, depth, false)
 		return newOffset, err
 	case _Slice:
 		return d.decodeSliceToDeserializer(size, offset, dser, depth)
@@ -241,7 +257,10 @@ func (d *decoder) decodeFromTypeToDeserializer(
 
 func (d *decoder) unmarshalBool(size, offset uint, result reflect.Value) (uint, error) {
 	if size > 1 {
-		return 0, newInvalidDatabaseError("the MaxMind DB file's data section contains bad data (bool size of %v)", size)
+		return 0, newInvalidDatabaseError(
+			"the MaxMind DB file's data section contains bad data (bool size of %v)",
+			size,
+		)
 	}
 	value, newOffset := d.decodeBool(size, offset)
 
@@ -309,7 +328,10 @@ func (d *decoder) unmarshalBytes(size, offset uint, result reflect.Value) (uint,
 
 func (d *decoder) unmarshalFloat32(size, offset uint, result reflect.Value) (uint, error) {
 	if size != 4 {
-		return 0, newInvalidDatabaseError("the MaxMind DB file's data section contains bad data (float32 size of %v)", size)
+		return 0, newInvalidDatabaseError(
+			"the MaxMind DB file's data section contains bad data (float32 size of %v)",
+			size,
+		)
 	}
 	value, newOffset := d.decodeFloat32(size, offset)
 
@@ -328,7 +350,10 @@ func (d *decoder) unmarshalFloat32(size, offset uint, result reflect.Value) (uin
 
 func (d *decoder) unmarshalFloat64(size, offset uint, result reflect.Value) (uint, error) {
 	if size != 8 {
-		return 0, newInvalidDatabaseError("the MaxMind DB file's data section contains bad data (float 64 size of %v)", size)
+		return 0, newInvalidDatabaseError(
+			"the MaxMind DB file's data section contains bad data (float 64 size of %v)",
+			size,
+		)
 	}
 	value, newOffset := d.decodeFloat64(size, offset)
 
@@ -350,7 +375,10 @@ func (d *decoder) unmarshalFloat64(size, offset uint, result reflect.Value) (uin
 
 func (d *decoder) unmarshalInt32(size, offset uint, result reflect.Value) (uint, error) {
 	if size > 4 {
-		return 0, newInvalidDatabaseError("the MaxMind DB file's data section contains bad data (int32 size of %v)", size)
+		return 0, newInvalidDatabaseError(
+			"the MaxMind DB file's data section contains bad data (int32 size of %v)",
+			size,
+		)
 	}
 	value, newOffset := d.decodeInt(size, offset)
 
@@ -361,7 +389,12 @@ func (d *decoder) unmarshalInt32(size, offset uint, result reflect.Value) (uint,
 			result.SetInt(n)
 			return newOffset, nil
 		}
-	case reflect.Uint, reflect.Uint8, reflect.Uint16, reflect.Uint32, reflect.Uint64, reflect.Uintptr:
+	case reflect.Uint,
+		reflect.Uint8,
+		reflect.Uint16,
+		reflect.Uint32,
+		reflect.Uint64,
+		reflect.Uintptr:
 		n := uint64(value)
 		if !result.OverflowUint(n) {
 			result.SetUint(n)
@@ -401,7 +434,11 @@ func (d *decoder) unmarshalMap(
 	}
 }
 
-func (d *decoder) unmarshalPointer(size, offset uint, result reflect.Value, depth int) (uint, error) {
+func (d *decoder) unmarshalPointer(
+	size, offset uint,
+	result reflect.Value,
+	depth int,
+) (uint, error) {
 	pointer, newOffset, err := d.decodePointer(size, offset)
 	if err != nil {
 		return 0, err
@@ -447,9 +484,17 @@ func (d *decoder) unmarshalString(size, offset uint, result reflect.Value) (uint
 	return newOffset, newUnmarshalTypeError(value, result.Type())
 }
 
-func (d *decoder) unmarshalUint(size, offset uint, result reflect.Value, uintType uint) (uint, error) {
+func (d *decoder) unmarshalUint(
+	size, offset uint,
+	result reflect.Value,
+	uintType uint,
+) (uint, error) {
 	if size > uintType/8 {
-		return 0, newInvalidDatabaseError("the MaxMind DB file's data section contains bad data (uint%v size of %v)", uintType, size)
+		return 0, newInvalidDatabaseError(
+			"the MaxMind DB file's data section contains bad data (uint%v size of %v)",
+			uintType,
+			size,
+		)
 	}
 
 	value, newOffset := d.decodeUint(size, offset)
@@ -461,7 +506,12 @@ func (d *decoder) unmarshalUint(size, offset uint, result reflect.Value, uintTyp
 			result.SetInt(n)
 			return newOffset, nil
 		}
-	case reflect.Uint, reflect.Uint8, reflect.Uint16, reflect.Uint32, reflect.Uint64, reflect.Uintptr:
+	case reflect.Uint,
+		reflect.Uint8,
+		reflect.Uint16,
+		reflect.Uint32,
+		reflect.Uint64,
+		reflect.Uintptr:
 		if !result.OverflowUint(value) {
 			result.SetUint(value)
 			return newOffset, nil
@@ -479,7 +529,10 @@ var bigIntType = reflect.TypeOf(big.Int{})
 
 func (d *decoder) unmarshalUint128(size, offset uint, result reflect.Value) (uint, error) {
 	if size > 16 {
-		return 0, newInvalidDatabaseError("the MaxMind DB file's data section contains bad data (uint128 size of %v)", size)
+		return 0, newInvalidDatabaseError(
+			"the MaxMind DB file's data section contains bad data (uint128 size of %v)",
+			size,
+		)
 	}
 	value, newOffset := d.decodeUint128(size, offset)
 
@@ -581,12 +634,12 @@ func (d *decoder) decodeMapToDeserializer(
 	}
 	for i := uint(0); i < size; i++ {
 		// TODO - implement key/value skipping?
-		offset, err = d.decodeToDeserializer(offset, dser, depth)
+		offset, err = d.decodeToDeserializer(offset, dser, depth, true)
 		if err != nil {
 			return 0, err
 		}
 
-		offset, err = d.decodeToDeserializer(offset, dser, depth)
+		offset, err = d.decodeToDeserializer(offset, dser, depth, true)
 		if err != nil {
 			return 0, err
 		}
@@ -661,7 +714,7 @@ func (d *decoder) decodeSliceToDeserializer(
 		return 0, err
 	}
 	for i := uint(0); i < size; i++ {
-		offset, err = d.decodeToDeserializer(offset, dser, depth)
+		offset, err = d.decodeToDeserializer(offset, dser, depth, true)
 		if err != nil {
 			return 0, err
 		}
@@ -817,7 +870,7 @@ func (d *decoder) decodeKey(offset uint) ([]byte, uint, error) {
 
 // This function is used to skip ahead to the next value without decoding
 // the one at the offset passed in. The size bits have different meanings for
-// different data types
+// different data types.
 func (d *decoder) nextValueOffset(offset, numberToSkip uint) (uint, error) {
 	if numberToSkip == 0 {
 		return offset, nil
diff --git a/decoder_test.go b/decoder_test.go
index c5b6230..730e42c 100644
--- a/decoder_test.go
+++ b/decoder_test.go
@@ -2,8 +2,8 @@ package maxminddb
 
 import (
 	"encoding/hex"
-	"io/ioutil"
 	"math/big"
+	"os"
 	"reflect"
 	"strings"
 	"testing"
@@ -51,7 +51,7 @@ func TestFloat(t *testing.T) {
 }
 
 func TestInt32(t *testing.T) {
-	int32 := map[string]interface{}{
+	int32s := map[string]interface{}{
 		"0001":         0,
 		"0401ffffffff": -1,
 		"0101ff":       255,
@@ -65,7 +65,7 @@ func TestInt32(t *testing.T) {
 		"04017fffffff": 2147483647,
 		"040180000001": -2147483647,
 	}
-	validateDecoding(t, int32)
+	validateDecoding(t, int32s)
 }
 
 func TestMap(t *testing.T) {
@@ -73,8 +73,12 @@ func TestMap(t *testing.T) {
 		"e0":                             map[string]interface{}{},
 		"e142656e43466f6f":               map[string]interface{}{"en": "Foo"},
 		"e242656e43466f6f427a6843e4baba": map[string]interface{}{"en": "Foo", "zh": "人"},
-		"e1446e616d65e242656e43466f6f427a6843e4baba": map[string]interface{}{"name": map[string]interface{}{"en": "Foo", "zh": "人"}},
-		"e1496c616e677561676573020442656e427a68":     map[string]interface{}{"languages": []interface{}{"en", "zh"}},
+		"e1446e616d65e242656e43466f6f427a6843e4baba": map[string]interface{}{
+			"name": map[string]interface{}{"en": "Foo", "zh": "人"},
+		},
+		"e1496c616e677561676573020442656e427a68": map[string]interface{}{
+			"languages": []interface{}{"en", "zh"},
+		},
 	}
 	validateDecoding(t, maps)
 }
@@ -116,7 +120,8 @@ func TestString(t *testing.T) {
 func TestByte(t *testing.T) {
 	b := make(map[string]interface{})
 	for key, val := range testStrings {
-		oldCtrl, _ := hex.DecodeString(key[0:2])
+		oldCtrl, err := hex.DecodeString(key[0:2])
+		require.NoError(t, err)
 		newCtrl := []byte{oldCtrl[0] ^ 0xc0}
 		key = strings.Replace(key, hex.EncodeToString(oldCtrl), hex.EncodeToString(newCtrl), 1)
 		b[key] = []byte(val.(string))
@@ -126,18 +131,18 @@ func TestByte(t *testing.T) {
 }
 
 func TestUint16(t *testing.T) {
-	uint16 := map[string]interface{}{
+	uint16s := map[string]interface{}{
 		"a0":     uint64(0),
 		"a1ff":   uint64(255),
 		"a201f4": uint64(500),
 		"a22a78": uint64(10872),
 		"a2ffff": uint64(65535),
 	}
-	validateDecoding(t, uint16)
+	validateDecoding(t, uint16s)
 }
 
 func TestUint32(t *testing.T) {
-	uint32 := map[string]interface{}{
+	uint32s := map[string]interface{}{
 		"c0":         uint64(0),
 		"c1ff":       uint64(255),
 		"c201f4":     uint64(500),
@@ -146,7 +151,7 @@ func TestUint32(t *testing.T) {
 		"c3ffffff":   uint64(16777215),
 		"c4ffffffff": uint64(4294967295),
 	}
-	validateDecoding(t, uint32)
+	validateDecoding(t, uint32s)
 }
 
 func TestUint64(t *testing.T) {
@@ -168,7 +173,7 @@ func TestUint64(t *testing.T) {
 	validateDecoding(t, uints)
 }
 
-// Dedup with above somehow
+// Dedup with above somehow.
 func TestUint128(t *testing.T) {
 	ctrlByte := "03"
 	bits := uint(128)
@@ -190,7 +195,7 @@ func TestUint128(t *testing.T) {
 }
 
 // No pow or bit shifting for big int, apparently :-(
-// This is _not_ meant to be a comprehensive power function
+// This is _not_ meant to be a comprehensive power function.
 func powBigInt(bi *big.Int, pow uint) *big.Int {
 	newInt := big.NewInt(1)
 	for i := uint(0); i < pow; i++ {
@@ -201,11 +206,12 @@ func powBigInt(bi *big.Int, pow uint) *big.Int {
 
 func validateDecoding(t *testing.T, tests map[string]interface{}) {
 	for inputStr, expected := range tests {
-		inputBytes, _ := hex.DecodeString(inputStr)
+		inputBytes, err := hex.DecodeString(inputStr)
+		require.NoError(t, err)
 		d := decoder{inputBytes}
 
 		var result interface{}
-		_, err := d.decode(0, reflect.ValueOf(&result), 0)
+		_, err = d.decode(0, reflect.ValueOf(&result), 0)
 		assert.NoError(t, err)
 
 		if !reflect.DeepEqual(result, expected) {
@@ -216,7 +222,7 @@ func validateDecoding(t *testing.T, tests map[string]interface{}) {
 }
 
 func TestPointers(t *testing.T) {
-	bytes, err := ioutil.ReadFile(testFile("maps-with-pointers.raw"))
+	bytes, err := os.ReadFile(testFile("maps-with-pointers.raw"))
 	require.NoError(t, err)
 	d := decoder{bytes}
 
diff --git a/deserializer.go deserializer_test.go b/deserializer_test.go
similarity index 92%
rename from deserializer.go deserializer_test.go
rename to deserializer_test.go
index 8b15fee..2412248 100644
--- a/deserializer.go deserializer_test.go	
+++ b/deserializer_test.go
@@ -30,7 +30,7 @@ type testDeserializer struct {
 	key   *string
 }
 
-func (d *testDeserializer) ShouldSkip(offset uintptr) (bool, error) {
+func (d *testDeserializer) ShouldSkip(_ uintptr) (bool, error) {
 	return false, nil
 }
 
@@ -38,10 +38,11 @@ func (d *testDeserializer) StartSlice(size uint) error {
 	return d.add(make([]interface{}, size))
 }
 
-func (d *testDeserializer) StartMap(size uint) error {
+func (d *testDeserializer) StartMap(_ uint) error {
 	return d.add(map[string]interface{}{})
 }
 
+//nolint:unparam // This is to meet the requirements of the interface.
 func (d *testDeserializer) End() error {
 	d.stack = d.stack[:len(d.stack)-1]
 	return nil
diff --git a/example_test.go b/example_test.go
index 2f0ca99..8e8587c 100644
--- a/example_test.go
+++ b/example_test.go
@@ -8,7 +8,7 @@ import (
 	"github.com/oschwald/maxminddb-golang"
 )
 
-// This example shows how to decode to a struct
+// This example shows how to decode to a struct.
 func ExampleReader_Lookup_struct() {
 	db, err := maxminddb.Open("test-data/test-data/GeoIP2-City-Test.mmdb")
 	if err != nil {
@@ -26,14 +26,14 @@ func ExampleReader_Lookup_struct() {
 
 	err = db.Lookup(ip, &record)
 	if err != nil {
-		log.Fatal(err)
+		log.Panic(err)
 	}
 	fmt.Print(record.Country.ISOCode)
 	// Output:
 	// GB
 }
 
-// This example demonstrates how to decode to an interface{}
+// This example demonstrates how to decode to an interface{}.
 func ExampleReader_Lookup_interface() {
 	db, err := maxminddb.Open("test-data/test-data/GeoIP2-City-Test.mmdb")
 	if err != nil {
@@ -46,13 +46,13 @@ func ExampleReader_Lookup_interface() {
 	var record interface{}
 	err = db.Lookup(ip, &record)
 	if err != nil {
-		log.Fatal(err)
+		log.Panic(err)
 	}
 	fmt.Printf("%v", record)
 }
 
 // This example demonstrates how to iterate over all networks in the
-// database
+// database.
 func ExampleReader_Networks() {
 	db, err := maxminddb.Open("test-data/test-data/GeoIP2-Connection-Type-Test.mmdb")
 	if err != nil {
@@ -68,33 +68,37 @@ func ExampleReader_Networks() {
 	for networks.Next() {
 		subnet, err := networks.Network(&record)
 		if err != nil {
-			log.Fatal(err)
+			log.Panic(err)
 		}
 		fmt.Printf("%s: %s\n", subnet.String(), record.Domain)
 	}
 	if networks.Err() != nil {
-		log.Fatal(networks.Err())
+		log.Panic(networks.Err())
 	}
 	// Output:
-	// 1.0.0.0/24: Dialup
-	// 1.0.1.0/24: Cable/DSL
-	// 1.0.2.0/23: Dialup
-	// 1.0.4.0/22: Dialup
-	// 1.0.8.0/21: Dialup
-	// 1.0.16.0/20: Dialup
-	// 1.0.32.0/19: Dialup
-	// 1.0.64.0/18: Dialup
-	// 1.0.128.0/17: Dialup
+	// 1.0.0.0/24: Cable/DSL
+	// 1.0.1.0/24: Cellular
+	// 1.0.2.0/23: Cable/DSL
+	// 1.0.4.0/22: Cable/DSL
+	// 1.0.8.0/21: Cable/DSL
+	// 1.0.16.0/20: Cable/DSL
+	// 1.0.32.0/19: Cable/DSL
+	// 1.0.64.0/18: Cable/DSL
+	// 1.0.128.0/17: Cable/DSL
+	// 2.125.160.216/29: Cable/DSL
+	// 67.43.156.0/24: Cellular
 	// 80.214.0.0/20: Cellular
 	// 96.1.0.0/16: Cable/DSL
 	// 96.10.0.0/15: Cable/DSL
 	// 96.69.0.0/16: Cable/DSL
 	// 96.94.0.0/15: Cable/DSL
 	// 108.96.0.0/11: Cellular
-	// 175.16.199.0/24: Dialup
+	// 149.101.100.0/28: Cellular
+	// 175.16.199.0/24: Cable/DSL
 	// 187.156.138.0/24: Cable/DSL
 	// 201.243.200.0/24: Corporate
 	// 207.179.48.0/20: Cellular
+	// 216.160.83.56/29: Corporate
 	// 2003::/24: Cable/DSL
 }
 
@@ -113,29 +117,29 @@ func ExampleReader_NetworksWithin() {
 
 	_, network, err := net.ParseCIDR("1.0.0.0/8")
 	if err != nil {
-		log.Fatal(err)
+		log.Panic(err)
 	}
 
 	networks := db.NetworksWithin(network, maxminddb.SkipAliasedNetworks)
 	for networks.Next() {
 		subnet, err := networks.Network(&record)
 		if err != nil {
-			log.Fatal(err)
+			log.Panic(err)
 		}
 		fmt.Printf("%s: %s\n", subnet.String(), record.Domain)
 	}
 	if networks.Err() != nil {
-		log.Fatal(networks.Err())
+		log.Panic(networks.Err())
 	}
 
 	// Output:
-	// 1.0.0.0/24: Dialup
-	// 1.0.1.0/24: Cable/DSL
-	// 1.0.2.0/23: Dialup
-	// 1.0.4.0/22: Dialup
-	// 1.0.8.0/21: Dialup
-	// 1.0.16.0/20: Dialup
-	// 1.0.32.0/19: Dialup
-	// 1.0.64.0/18: Dialup
-	// 1.0.128.0/17: Dialup
+	// 1.0.0.0/24: Cable/DSL
+	// 1.0.1.0/24: Cellular
+	// 1.0.2.0/23: Cable/DSL
+	// 1.0.4.0/22: Cable/DSL
+	// 1.0.8.0/21: Cable/DSL
+	// 1.0.16.0/20: Cable/DSL
+	// 1.0.32.0/19: Cable/DSL
+	// 1.0.64.0/18: Cable/DSL
+	// 1.0.128.0/17: Cable/DSL
 }
diff --git a/go.mod b/go.mod
index 4918a8c..dcce7e4 100644
--- a/go.mod
+++ b/go.mod
@@ -1,8 +1,14 @@
 module github.com/oschwald/maxminddb-golang
 
-go 1.9
+go 1.18
 
 require (
-	github.com/stretchr/testify v1.6.1
-	golang.org/x/sys v0.0.0-20191224085550-c709ea063b76
+	github.com/stretchr/testify v1.7.3
+	golang.org/x/sys v0.0.0-20220804214406-8e32c043e418
+)
+
+require (
+	github.com/davecgh/go-spew v1.1.1 // indirect
+	github.com/pmezard/go-difflib v1.0.0 // indirect
+	gopkg.in/yaml.v3 v3.0.1 // indirect
 )
diff --git a/go.sum b/go.sum
index ae4f4e7..0afa47d 100644
--- a/go.sum
+++ b/go.sum
@@ -1,14 +1,19 @@
-github.com/davecgh/go-spew v1.1.0 h1:ZDRjVQ15GmhC3fiQ8ni8+OwkZQO4DARzQgrnXU1Liz8=
 github.com/davecgh/go-spew v1.1.0/go.mod h1:J7Y8YcW2NihsgmVo/mv3lAwl/skON4iLHjSsI+c5H38=
+github.com/davecgh/go-spew v1.1.1 h1:vj9j/u1bqnvCEfJOwUhtlOARqs3+rkHYY13jYWTU97c=
+github.com/davecgh/go-spew v1.1.1/go.mod h1:J7Y8YcW2NihsgmVo/mv3lAwl/skON4iLHjSsI+c5H38=
 github.com/pmezard/go-difflib v1.0.0 h1:4DBwDE0NGyQoBHbLQYPwSUPoCMWR5BEzIk/f1lZbAQM=
 github.com/pmezard/go-difflib v1.0.0/go.mod h1:iKH77koFhYxTK1pcRnkKkqfTogsbg7gZNVY4sRDYZ/4=
-github.com/stretchr/objx v0.1.0 h1:4G4v2dO3VZwixGIRoQ5Lfboy6nUhCyYzaqnIAPPhYs4=
 github.com/stretchr/objx v0.1.0/go.mod h1:HFkY916IF+rwdDfMAkV7OtwuqBVzrE8GR6GFx+wExME=
-github.com/stretchr/testify v1.6.1 h1:hDPOHmpOpP40lSULcqw7IrRb/u7w6RpDC9399XyoNd0=
-github.com/stretchr/testify v1.6.1/go.mod h1:6Fq8oRcR53rry900zMqJjRRixrwX3KX962/h/Wwjteg=
-golang.org/x/sys v0.0.0-20191224085550-c709ea063b76 h1:Dho5nD6R3PcW2SH1or8vS0dszDaXRxIw55lBX7XiE5g=
-golang.org/x/sys v0.0.0-20191224085550-c709ea063b76/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
+github.com/stretchr/objx v0.4.0/go.mod h1:YvHI0jy2hoMjB+UWwv71VJQ9isScKT/TqJzVSSt89Yw=
+github.com/stretchr/testify v1.7.1/go.mod h1:6Fq8oRcR53rry900zMqJjRRixrwX3KX962/h/Wwjteg=
+github.com/stretchr/testify v1.7.3 h1:dAm0YRdRQlWojc3CrCRgPBzG5f941d0zvAKu7qY4e+I=
+github.com/stretchr/testify v1.7.3/go.mod h1:yNjHg4UonilssWZ8iaSj1OCr/vHnekPRkoO+kdMU+MU=
+golang.org/x/sys v0.0.0-20220325203850-36772127a21f h1:TrmogKRsSOxRMJbLYGrB4SBbW+LJcEllYBLME5Zk5pU=
+golang.org/x/sys v0.0.0-20220325203850-36772127a21f/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
+golang.org/x/sys v0.0.0-20220804214406-8e32c043e418 h1:9vYwv7OjYaky/tlAeD7C4oC9EsPTlaFl1H2jS++V+ME=
+golang.org/x/sys v0.0.0-20220804214406-8e32c043e418/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
 gopkg.in/check.v1 v0.0.0-20161208181325-20d25e280405 h1:yhCVgyC4o1eVCa2tZl7eS0r+SDo693bJlVdllGtEeKM=
 gopkg.in/check.v1 v0.0.0-20161208181325-20d25e280405/go.mod h1:Co6ibVJAznAaIkqp8huTwlJQCZ016jof/cbN4VW5Yz0=
-gopkg.in/yaml.v3 v3.0.0-20200313102051-9f266ea9e77c h1:dUUwHk2QECo/6vqA44rthZ8ie2QXMNeKRTHCNY2nXvo=
 gopkg.in/yaml.v3 v3.0.0-20200313102051-9f266ea9e77c/go.mod h1:K4uyk7z7BCEPqu6E+C64Yfv1cQ7kz7rIZviUmN+EgEM=
+gopkg.in/yaml.v3 v3.0.1 h1:fxVm/GzAzEWqLHuvctI91KS9hhNmmWOoWu0XTYJS7CA=
+gopkg.in/yaml.v3 v3.0.1/go.mod h1:K4uyk7z7BCEPqu6E+C64Yfv1cQ7kz7rIZviUmN+EgEM=
diff --git a/mmap_unix.go b/mmap_unix.go
index d41af1e..eeb2e05 100644
--- a/mmap_unix.go
+++ b/mmap_unix.go
@@ -1,3 +1,4 @@
+//go:build !windows && !appengine && !plan9
 // +build !windows,!appengine,!plan9
 
 package maxminddb
diff --git a/node.go b/node.go
index 68990db..16e8b5f 100644
--- a/node.go
+++ b/node.go
@@ -10,11 +10,15 @@ type nodeReader24 struct {
 }
 
 func (n nodeReader24) readLeft(nodeNumber uint) uint {
-	return (uint(n.buffer[nodeNumber]) << 16) | (uint(n.buffer[nodeNumber+1]) << 8) | uint(n.buffer[nodeNumber+2])
+	return (uint(n.buffer[nodeNumber]) << 16) |
+		(uint(n.buffer[nodeNumber+1]) << 8) |
+		uint(n.buffer[nodeNumber+2])
 }
 
 func (n nodeReader24) readRight(nodeNumber uint) uint {
-	return (uint(n.buffer[nodeNumber+3]) << 16) | (uint(n.buffer[nodeNumber+4]) << 8) | uint(n.buffer[nodeNumber+5])
+	return (uint(n.buffer[nodeNumber+3]) << 16) |
+		(uint(n.buffer[nodeNumber+4]) << 8) |
+		uint(n.buffer[nodeNumber+5])
 }
 
 type nodeReader28 struct {
@@ -22,11 +26,17 @@ type nodeReader28 struct {
 }
 
 func (n nodeReader28) readLeft(nodeNumber uint) uint {
-	return ((uint(n.buffer[nodeNumber+3]) & 0xF0) << 20) | (uint(n.buffer[nodeNumber]) << 16) | (uint(n.buffer[nodeNumber+1]) << 8) | uint(n.buffer[nodeNumber+2])
+	return ((uint(n.buffer[nodeNumber+3]) & 0xF0) << 20) |
+		(uint(n.buffer[nodeNumber]) << 16) |
+		(uint(n.buffer[nodeNumber+1]) << 8) |
+		uint(n.buffer[nodeNumber+2])
 }
 
 func (n nodeReader28) readRight(nodeNumber uint) uint {
-	return ((uint(n.buffer[nodeNumber+3]) & 0x0F) << 24) | (uint(n.buffer[nodeNumber+4]) << 16) | (uint(n.buffer[nodeNumber+5]) << 8) | uint(n.buffer[nodeNumber+6])
+	return ((uint(n.buffer[nodeNumber+3]) & 0x0F) << 24) |
+		(uint(n.buffer[nodeNumber+4]) << 16) |
+		(uint(n.buffer[nodeNumber+5]) << 8) |
+		uint(n.buffer[nodeNumber+6])
 }
 
 type nodeReader32 struct {
@@ -34,9 +44,15 @@ type nodeReader32 struct {
 }
 
 func (n nodeReader32) readLeft(nodeNumber uint) uint {
-	return (uint(n.buffer[nodeNumber]) << 24) | (uint(n.buffer[nodeNumber+1]) << 16) | (uint(n.buffer[nodeNumber+2]) << 8) | uint(n.buffer[nodeNumber+3])
+	return (uint(n.buffer[nodeNumber]) << 24) |
+		(uint(n.buffer[nodeNumber+1]) << 16) |
+		(uint(n.buffer[nodeNumber+2]) << 8) |
+		uint(n.buffer[nodeNumber+3])
 }
 
 func (n nodeReader32) readRight(nodeNumber uint) uint {
-	return (uint(n.buffer[nodeNumber+4]) << 24) | (uint(n.buffer[nodeNumber+5]) << 16) | (uint(n.buffer[nodeNumber+6]) << 8) | uint(n.buffer[nodeNumber+7])
+	return (uint(n.buffer[nodeNumber+4]) << 24) |
+		(uint(n.buffer[nodeNumber+5]) << 16) |
+		(uint(n.buffer[nodeNumber+6]) << 8) |
+		uint(n.buffer[nodeNumber+7])
 }
diff --git a/reader.go b/reader.go
index 7b33ace..263cf64 100644
--- a/reader.go
+++ b/reader.go
@@ -150,7 +150,10 @@ func (r *Reader) Lookup(ip net.IP, result interface{}) error {
 // database record cannot be stored in result because of type differences, an
 // UnmarshalTypeError is returned. If the database is invalid or otherwise
 // cannot be read, an InvalidDatabaseError is returned.
-func (r *Reader) LookupNetwork(ip net.IP, result interface{}) (network *net.IPNet, ok bool, err error) {
+func (r *Reader) LookupNetwork(
+	ip net.IP,
+	result interface{},
+) (network *net.IPNet, ok bool, err error) {
 	if r.buffer == nil {
 		return nil, false, errors.New("cannot call Lookup on a closed database")
 	}
@@ -228,7 +231,7 @@ func (r *Reader) decode(offset uintptr, result interface{}) error {
 	}
 
 	if dser, ok := result.(deserializer); ok {
-		_, err := r.decoder.decodeToDeserializer(uint(offset), dser, 0)
+		_, err := r.decoder.decodeToDeserializer(uint(offset), dser, 0, false)
 		return err
 	}
 
@@ -238,7 +241,7 @@ func (r *Reader) decode(offset uintptr, result interface{}) error {
 
 func (r *Reader) lookupPointer(ip net.IP) (uint, int, net.IP, error) {
 	if ip == nil {
-		return 0, 0, ip, errors.New("IP passed to Lookup cannot be nil")
+		return 0, 0, nil, errors.New("IP passed to Lookup cannot be nil")
 	}
 
 	ipV4Address := ip.To4()
@@ -246,7 +249,10 @@ func (r *Reader) lookupPointer(ip net.IP) (uint, int, net.IP, error) {
 		ip = ipV4Address
 	}
 	if len(ip) == 16 && r.Metadata.IPVersion == 4 {
-		return 0, 0, ip, fmt.Errorf("error looking up '%s': you attempted to look up an IPv6 address in an IPv4-only database", ip.String())
+		return 0, 0, ip, fmt.Errorf(
+			"error looking up '%s': you attempted to look up an IPv6 address in an IPv4-only database",
+			ip.String(),
+		)
 	}
 
 	bitCount := uint(len(ip) * 8)
diff --git a/reader_other.go b/reader_other.go
index ee2e5e8..0ed9de1 100644
--- a/reader_other.go
+++ b/reader_other.go
@@ -1,3 +1,4 @@
+//go:build !appengine && !plan9
 // +build !appengine,!plan9
 
 package maxminddb
@@ -33,13 +34,15 @@ func Open(file string) (*Reader, error) {
 	}
 
 	if err := mapFile.Close(); err != nil {
-		_ = munmap(mmap)
+		//nolint:errcheck // we prefer to return the original error
+		munmap(mmap)
 		return nil, err
 	}
 
 	reader, err := FromBytes(mmap)
 	if err != nil {
-		_ = munmap(mmap)
+		//nolint:errcheck // we prefer to return the original error
+		munmap(mmap)
 		return nil, err
 	}
 
diff --git a/reader_test.go b/reader_test.go
index 19031c2..9eabea1 100644
--- a/reader_test.go
+++ b/reader_test.go
@@ -3,10 +3,10 @@ package maxminddb
 import (
 	"errors"
 	"fmt"
-	"io/ioutil"
 	"math/big"
 	"math/rand"
 	"net"
+	"os"
 	"path/filepath"
 	"testing"
 	"time"
@@ -18,7 +18,11 @@ import (
 func TestReader(t *testing.T) {
 	for _, recordSize := range []uint{24, 28, 32} {
 		for _, ipVersion := range []uint{4, 6} {
-			fileName := fmt.Sprintf(testFile("MaxMind-DB-test-ipv%d-%d.mmdb"), ipVersion, recordSize)
+			fileName := fmt.Sprintf(
+				testFile("MaxMind-DB-test-ipv%d-%d.mmdb"),
+				ipVersion,
+				recordSize,
+			)
 			reader, err := Open(fileName)
 			require.NoError(t, err, "unexpected error while opening database: %v", err)
 			checkMetadata(t, reader, ipVersion, recordSize)
@@ -35,8 +39,13 @@ func TestReader(t *testing.T) {
 func TestReaderBytes(t *testing.T) {
 	for _, recordSize := range []uint{24, 28, 32} {
 		for _, ipVersion := range []uint{4, 6} {
-			fileName := fmt.Sprintf(testFile("MaxMind-DB-test-ipv%d-%d.mmdb"), ipVersion, recordSize)
-			bytes, _ := ioutil.ReadFile(fileName)
+			fileName := fmt.Sprintf(
+				testFile("MaxMind-DB-test-ipv%d-%d.mmdb"),
+				ipVersion,
+				recordSize,
+			)
+			bytes, err := os.ReadFile(fileName)
+			require.NoError(t, err)
 			reader, err := FromBytes(bytes)
 			require.NoError(t, err, "unexpected error while opening bytes: %v", err)
 
@@ -206,6 +215,11 @@ func TestDecodingToInterface(t *testing.T) {
 	checkDecodingToInterface(t, recordInterface)
 }
 
+func TestMetadataPointer(t *testing.T) {
+	_, err := Open(testFile("MaxMind-DB-test-metadata-pointers.mmdb"))
+	require.NoError(t, err, "unexpected error while opening database: %v", err)
+}
+
 func checkDecodingToInterface(t *testing.T, recordInterface interface{}) {
 	record := recordInterface.(map[string]interface{})
 	assert.Equal(t, []interface{}{uint64(1), uint64(2), uint64(3)}, record["array"])
@@ -233,7 +247,6 @@ func checkDecodingToInterface(t *testing.T, recordInterface interface{}) {
 	assert.Equal(t, bigInt, record["uint128"])
 }
 
-// nolint: maligned
 type TestType struct {
 	Array      []uint                 `maxminddb:"array"`
 	Boolean    bool                   `maxminddb:"boolean"`
@@ -326,7 +339,11 @@ func TestNonEmptyNilInterface(t *testing.T) {
 	require.NoError(t, err)
 
 	err = reader.Lookup(net.ParseIP("::1.1.1.0"), &result)
-	assert.Equal(t, "maxminddb: cannot unmarshal map into type maxminddb.TestInterface", err.Error())
+	assert.Equal(
+		t,
+		"maxminddb: cannot unmarshal map into type maxminddb.TestInterface",
+		err.Error(),
+	)
 }
 
 type CityTraits struct {
@@ -499,7 +516,9 @@ func TestIpv6inIpv4(t *testing.T) {
 	var emptyResult TestType
 	assert.Equal(t, emptyResult, result)
 
-	expected := errors.New("error looking up '2001::': you attempted to look up an IPv6 address in an IPv4-only database")
+	expected := errors.New(
+		"error looking up '2001::': you attempted to look up an IPv6 address in an IPv4-only database",
+	)
 	assert.Equal(t, expected, err)
 	assert.NoError(t, reader.Close(), "error on close")
 }
@@ -511,7 +530,9 @@ func TestBrokenDoubleDatabase(t *testing.T) {
 	var result interface{}
 	err = reader.Lookup(net.ParseIP("2001:220::"), &result)
 
-	expected := newInvalidDatabaseError("the MaxMind DB file's data section contains bad data (float 64 size of 2)")
+	expected := newInvalidDatabaseError(
+		"the MaxMind DB file's data section contains bad data (float 64 size of 2)",
+	)
 	assert.Equal(t, expected, err)
 	assert.NoError(t, reader.Close(), "error on close")
 }
@@ -536,30 +557,33 @@ func TestNonDatabase(t *testing.T) {
 }
 
 func TestDecodingToNonPointer(t *testing.T) {
-	reader, _ := Open(testFile("MaxMind-DB-test-decoder.mmdb"))
+	reader, err := Open(testFile("MaxMind-DB-test-decoder.mmdb"))
+	require.NoError(t, err)
 
 	var recordInterface interface{}
-	err := reader.Lookup(net.ParseIP("::1.1.1.0"), recordInterface)
+	err = reader.Lookup(net.ParseIP("::1.1.1.0"), recordInterface)
 	assert.Equal(t, "result param must be a pointer", err.Error())
 	assert.NoError(t, reader.Close(), "error on close")
 }
 
 func TestNilLookup(t *testing.T) {
-	reader, _ := Open(testFile("MaxMind-DB-test-decoder.mmdb"))
+	reader, err := Open(testFile("MaxMind-DB-test-decoder.mmdb"))
+	require.NoError(t, err)
 
 	var recordInterface interface{}
-	err := reader.Lookup(nil, recordInterface)
+	err = reader.Lookup(nil, recordInterface)
 	assert.Equal(t, "IP passed to Lookup cannot be nil", err.Error())
 	assert.NoError(t, reader.Close(), "error on close")
 }
 
 func TestUsingClosedDatabase(t *testing.T) {
-	reader, _ := Open(testFile("MaxMind-DB-test-decoder.mmdb"))
+	reader, err := Open(testFile("MaxMind-DB-test-decoder.mmdb"))
+	require.NoError(t, err)
 	require.NoError(t, reader.Close())
 
 	var recordInterface interface{}
 
-	err := reader.Lookup(nil, recordInterface)
+	err = reader.Lookup(nil, recordInterface)
 	assert.Equal(t, "cannot call Lookup on a closed database", err.Error())
 
 	_, err = reader.LookupOffset(nil)
@@ -693,6 +717,7 @@ func BenchmarkInterfaceLookup(b *testing.B) {
 	db, err := Open("GeoLite2-City.mmdb")
 	require.NoError(b, err)
 
+	//nolint:gosec // this is a test
 	r := rand.New(rand.NewSource(time.Now().UnixNano()))
 	var result interface{}
 
@@ -711,6 +736,7 @@ func BenchmarkInterfaceLookupNetwork(b *testing.B) {
 	db, err := Open("GeoLite2-City.mmdb")
 	require.NoError(b, err)
 
+	//nolint:gosec // this is a test
 	r := rand.New(rand.NewSource(time.Now().UnixNano()))
 	var result interface{}
 
@@ -779,6 +805,7 @@ func BenchmarkCityLookup(b *testing.B) {
 	db, err := Open("GeoLite2-City.mmdb")
 	require.NoError(b, err)
 
+	//nolint:gosec // this is a test
 	r := rand.New(rand.NewSource(time.Now().UnixNano()))
 	var result fullCity
 
@@ -797,6 +824,7 @@ func BenchmarkCityLookupNetwork(b *testing.B) {
 	db, err := Open("GeoLite2-City.mmdb")
 	require.NoError(b, err)
 
+	//nolint:gosec // this is a test
 	r := rand.New(rand.NewSource(time.Now().UnixNano()))
 	var result fullCity
 
@@ -821,6 +849,7 @@ func BenchmarkCountryCode(b *testing.B) {
 		} `maxminddb:"country"`
 	}
 
+	//nolint:gosec // this is a test
 	r := rand.New(rand.NewSource(0))
 	var result MinCountry
 
diff --git a/traverse.go b/traverse.go
index 9b9b25b..7009ec1 100644
--- a/traverse.go
+++ b/traverse.go
@@ -27,7 +27,7 @@ var (
 	allIPv6 = &net.IPNet{IP: make(net.IP, 16), Mask: net.CIDRMask(0, 128)}
 )
 
-// NetworksOption are options for Networks and NetworksWithin
+// NetworksOption are options for Networks and NetworksWithin.
 type NetworksOption func(*Networks)
 
 // SkipAliasedNetworks is an option for Networks and NetworksWithin that
diff --git a/traverse_test.go b/traverse_test.go
index 2c55581..a38a8f5 100644
--- a/traverse_test.go
+++ b/traverse_test.go
@@ -12,7 +12,9 @@ import (
 func TestNetworks(t *testing.T) {
 	for _, recordSize := range []uint{24, 28, 32} {
 		for _, ipVersion := range []uint{4, 6} {
-			fileName := testFile(fmt.Sprintf("MaxMind-DB-test-ipv%d-%d.mmdb", ipVersion, recordSize))
+			fileName := testFile(
+				fmt.Sprintf("MaxMind-DB-test-ipv%d-%d.mmdb", ipVersion, recordSize),
+			)
 			reader, err := Open(fileName)
 			require.Nil(t, err, "unexpected error while opening database: %v", err)
 
diff --git a/verifier.go b/verifier.go
index 45d25c8..88381d7 100644
--- a/verifier.go
+++ b/verifier.go
@@ -141,19 +141,30 @@ func (v *verifier) verifyDataSection(offsets map[uint]bool) error {
 		rv := reflect.ValueOf(&data)
 		newOffset, err := decoder.decode(offset, rv, 0)
 		if err != nil {
-			return newInvalidDatabaseError("received decoding error (%v) at offset of %v", err, offset)
+			return newInvalidDatabaseError(
+				"received decoding error (%v) at offset of %v",
+				err,
+				offset,
+			)
 		}
 		if newOffset <= offset {
-			return newInvalidDatabaseError("data section offset unexpectedly went from %v to %v", offset, newOffset)
+			return newInvalidDatabaseError(
+				"data section offset unexpectedly went from %v to %v",
+				offset,
+				newOffset,
+			)
 		}
 
 		pointer := offset
 
-		if _, ok := offsets[pointer]; ok {
-			delete(offsets, pointer)
-		} else {
-			return newInvalidDatabaseError("found data (%v) at %v that the search tree does not point to", data, pointer)
+		if _, ok := offsets[pointer]; !ok {
+			return newInvalidDatabaseError(
+				"found data (%v) at %v that the search tree does not point to",
+				data,
+				pointer,
+			)
 		}
+		delete(offsets, pointer)
 
 		offset = newOffset
 	}
diff --git a/verifier_test.go b/verifier_test.go
index fc62563..dfdbd63 100644
--- a/verifier_test.go
+++ b/verifier_test.go
@@ -36,7 +36,13 @@ func TestVerifyOnGoodDatabases(t *testing.T) {
 			reader, err := Open(testFile(database))
 			require.NoError(t, err)
 
-			assert.NoError(t, reader.Verify(), "Received error (%v) when verifying %v", err, database)
+			assert.NoError(
+				t,
+				reader.Verify(),
+				"Received error (%v) when verifying %v",
+				err,
+				database,
+			)
 		})
 	}
 }

Debdiff

[The following lists of changes regard files as different if they have different names, permissions or owners.]

Files in second set of .debs but not in first

-rw-r--r--  root/root   /usr/share/gocode/src/github.com/oschwald/maxminddb-golang/deserializer_test.go

Files in first set of .debs but not in second

-rw-r--r--  root/root   /usr/share/gocode/src/github.com/oschwald/maxminddb-golang/deserializer.go deserializer_test.go

No differences were encountered in the control files

More details

Full run details