Skip to content
Snippets Groups Projects
Commit b8e2a070 authored by Anton Gladky's avatar Anton Gladky
Browse files

New upstream version 1.12.0

parent 92e3c743
No related branches found
No related tags found
No related merge requests found
version: 2
updates:
- package-ecosystem: gomod
directory: "/"
schedule:
interval: daily
time: "13:00"
open-pull-requests-limit: 10
- package-ecosystem: "github-actions"
directory: "/"
schedule:
interval: daily
name: "Code scanning - action"
on:
push:
branches-ignore:
- 'dependabot/**'
pull_request:
schedule:
- cron: '0 13 * * 4'
jobs:
CodeQL-Build:
runs-on: ubuntu-latest
steps:
- name: Checkout repository
uses: actions/checkout@v3
with:
# We must fetch at least the immediate parents so that if this is
# a pull request then we can checkout the head.
fetch-depth: 2
# If this run was triggered by a pull request event, then checkout
# the head of the pull request instead of the merge commit.
- run: git checkout HEAD^2
if: ${{ github.event_name == 'pull_request' }}
# Initializes the CodeQL tools for scanning.
- name: Initialize CodeQL
uses: github/codeql-action/init@v2
# Override language selection by uncommenting this and choosing your languages
# with:
# languages: go, javascript, csharp, python, cpp, java
# Autobuild attempts to build any compiled languages (C/C++, C#, or Java).
# If this step fails, then you should remove it and run the build manually (see below)
- name: Autobuild
uses: github/codeql-action/autobuild@v2
# ℹ️ Command-line programs to run using the OS shell.
# 📚 https://git.io/JvXDl
# ✏️ If the Autobuild fails above, remove it and uncomment the following three lines
# and modify them (or add more) to build your code if your project
# uses a compiled language
#- run: |
# make bootstrap
# make release
- name: Perform CodeQL Analysis
uses: github/codeql-action/analyze@v2
name: Go
on: [push, pull_request]
jobs:
build:
name: Build
strategy:
matrix:
go-version: [1.19.x, 1.20.x]
platform: [ubuntu-latest, macos-latest, windows-latest]
runs-on: ${{ matrix.platform }}
steps:
- name: Set up Go 1.x
uses: actions/setup-go@v4
with:
go-version: ${{ matrix.go-version }}
id: go
- name: Check out code into the Go module directory
uses: actions/checkout@v3
with:
submodules: true
- name: Get dependencies
run: go get -v -t -d ./...
- name: Build
run: go build -v .
- name: Test
run: go test -race -v ./...
name: golangci-lint
on: [push, pull_request]
jobs:
golangci:
name: lint
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v3
- name: golangci-lint
uses: golangci/golangci-lint-action@v3
with:
version: latest
[submodule "test-data"]
path = test-data
url = git://github.com/maxmind/MaxMind-DB.git
url = https://github.com/maxmind/MaxMind-DB.git
[run]
deadline = "10m"
tests = true
[linters]
disable-all = true
enable = [
"asasalint",
"asciicheck",
"bidichk",
"bodyclose",
"containedctx",
"contextcheck",
"depguard",
"dupword",
"durationcheck",
"errcheck",
"errchkjson",
"errname",
"errorlint",
# "exhaustive",
"exportloopref",
"forbidigo",
"goconst",
"gocyclo",
"gocritic",
"godot",
"gofumpt",
"gomodguard",
"gosec",
"gosimple",
"govet",
"grouper",
"ineffassign",
"lll",
"makezero",
"maintidx",
"misspell",
"nakedret",
"nilerr",
"noctx",
"nolintlint",
"nosprintfhostport",
"predeclared",
"revive",
"rowserrcheck",
"sqlclosecheck",
"staticcheck",
"stylecheck",
"tenv",
"tparallel",
"typecheck",
"unconvert",
"unparam",
"unused",
"usestdlibvars",
"vetshadow",
"wastedassign",
]
[[linters-settings.depguard.rules.main.deny]]
pkg = "io/ioutil"
desc = "Deprecated. Functions have been moved elsewhere."
[linters-settings.errcheck]
check-blank = true
# Ignoring Close so that we don't have to have a bunch of
# `defer func() { _ = r.Close() }()` constructs when we
# don't actually care about the error.
ignore = "Close,fmt:.*"
[linters-settings.errorlint]
errorf = true
asserts = true
comparison = true
[linters-settings.exhaustive]
default-signifies-exhaustive = true
[linters-settings.forbidigo]
# Forbid the following identifiers
forbid = [
"Geoip", # use "GeoIP"
"^geoIP", # use "geoip"
"Maxmind", # use "MaxMind"
"^maxMind", # use "maxmind"
]
[linters-settings.gocritic]
enabled-checks = [
"appendAssign",
"appendCombine",
"argOrder",
"assignOp",
"badCall",
"badCond",
"badLock",
"badRegexp",
"badSorting",
"boolExprSimplify",
"builtinShadow",
"builtinShadowDecl",
"captLocal",
"caseOrder",
"codegenComment",
"commentedOutCode",
"commentedOutImport",
"commentFormatting",
"defaultCaseOrder",
"deferInLoop",
"deferUnlambda",
"deprecatedComment",
"docStub",
"dupArg",
"dupBranchBody",
"dupCase",
"dupImport",
"dupSubExpr",
"dynamicFmtString",
"elseif",
"emptyDecl",
"emptyFallthrough",
"emptyStringTest",
"equalFold",
"evalOrder",
"exitAfterDefer",
"exposedSyncMutex",
"externalErrorReassign",
"filepathJoin",
"flagDeref",
"flagName",
"hexLiteral",
"httpNoBody",
"hugeParam",
"ifElseChain",
"importShadow",
"indexAlloc",
"initClause",
"mapKey",
"methodExprCall",
"nestingReduce",
"newDeref",
"nilValReturn",
"octalLiteral",
"offBy1",
"paramTypeCombine",
"preferDecodeRune",
"preferFilepathJoin",
"preferFprint",
"preferStringWriter",
"preferWriteByte",
"ptrToRefParam",
"rangeExprCopy",
"rangeValCopy",
"redundantSprint",
"regexpMust",
"regexpPattern",
"regexpSimplify",
"returnAfterHttpError",
"ruleguard",
"singleCaseSwitch",
"sliceClear",
"sloppyLen",
"sloppyReassign",
"sloppyTestFuncName",
"sloppyTypeAssert",
"sortSlice",
"sprintfQuotedString",
"sqlQuery",
"stringsCompare",
"stringConcatSimplify",
"stringXbytes",
"switchTrue",
"syncMapLoadAndDelete",
"timeExprSimplify",
"todoCommentWithoutDetail",
"tooManyResultsChecker",
"truncateCmp",
"typeAssertChain",
"typeDefFirst",
"typeSwitchVar",
"typeUnparen",
"underef",
"unlabelStmt",
"unlambda",
# "unnamedResult",
"unnecessaryBlock",
"unnecessaryDefer",
"unslice",
"valSwap",
"weakCond",
# Covered by nolintlint
# "whyNoLint"
"wrapperFunc",
"yodaStyleExpr",
]
[linters-settings.gofumpt]
extra-rules = true
lang-version = "1.19"
[linters-settings.gosec]
excludes = [
# G104 - "Audit errors not checked." We use errcheck for this.
"G104",
# G304 - "Potential file inclusion via variable"
"G304",
# G306 - "Expect WriteFile permissions to be 0600 or less".
"G306",
# Prohibits defer (*os.File).Close, which we allow when reading from file.
"G307",
]
[linters-settings.govet]
"enable-all" = true
disable = ["shadow"]
[linters-settings.lll]
line-length = 120
tab-width = 4
[linters-settings.nolintlint]
allow-leading-space = false
allow-unused = false
allow-no-explanation = ["lll", "misspell"]
require-explanation = true
require-specific = true
[linters-settings.revive]
ignore-generated-header = true
severity = "warning"
# [[linters-settings.revive.rules]]
# name = "add-constant"
# [[linters-settings.revive.rules]]
# name = "argument-limit"
[[linters-settings.revive.rules]]
name = "atomic"
[[linters-settings.revive.rules]]
name = "bare-return"
[[linters-settings.revive.rules]]
name = "blank-imports"
[[linters-settings.revive.rules]]
name = "bool-literal-in-expr"
[[linters-settings.revive.rules]]
name = "call-to-gc"
# [[linters-settings.revive.rules]]
# name = "cognitive-complexity"
[[linters-settings.revive.rules]]
name = "comment-spacings"
arguments = ["easyjson", "nolint"]
# [[linters-settings.revive.rules]]
# name = "confusing-naming"
# [[linters-settings.revive.rules]]
# name = "confusing-results"
[[linters-settings.revive.rules]]
name = "constant-logical-expr"
[[linters-settings.revive.rules]]
name = "context-as-argument"
[[linters-settings.revive.rules]]
name = "context-keys-type"
# [[linters-settings.revive.rules]]
# name = "cyclomatic"
[[linters-settings.revive.rules]]
name = "datarace"
# [[linters-settings.revive.rules]]
# name = "deep-exit"
[[linters-settings.revive.rules]]
name = "defer"
[[linters-settings.revive.rules]]
name = "dot-imports"
[[linters-settings.revive.rules]]
name = "duplicated-imports"
[[linters-settings.revive.rules]]
name = "early-return"
[[linters-settings.revive.rules]]
name = "empty-block"
[[linters-settings.revive.rules]]
name = "empty-lines"
[[linters-settings.revive.rules]]
name = "errorf"
[[linters-settings.revive.rules]]
name = "error-naming"
[[linters-settings.revive.rules]]
name = "error-return"
[[linters-settings.revive.rules]]
name = "error-strings"
[[linters-settings.revive.rules]]
name = "exported"
# [[linters-settings.revive.rules]]
# name = "file-header"
# [[linters-settings.revive.rules]]
# name = "flag-parameter"
# [[linters-settings.revive.rules]]
# name = "function-result-limit"
[[linters-settings.revive.rules]]
name = "get-return"
[[linters-settings.revive.rules]]
name = "identical-branches"
[[linters-settings.revive.rules]]
name = "if-return"
[[linters-settings.revive.rules]]
name = "imports-blacklist"
[[linters-settings.revive.rules]]
name = "import-shadowing"
[[linters-settings.revive.rules]]
name = "increment-decrement"
[[linters-settings.revive.rules]]
name = "indent-error-flow"
# [[linters-settings.revive.rules]]
# name = "line-length-limit"
# [[linters-settings.revive.rules]]
# name = "max-public-structs"
[[linters-settings.revive.rules]]
name = "modifies-parameter"
[[linters-settings.revive.rules]]
name = "modifies-value-receiver"
# [[linters-settings.revive.rules]]
# name = "nested-structs"
[[linters-settings.revive.rules]]
name = "optimize-operands-order"
[[linters-settings.revive.rules]]
name = "package-comments"
[[linters-settings.revive.rules]]
name = "range"
[[linters-settings.revive.rules]]
name = "range-val-address"
[[linters-settings.revive.rules]]
name = "range-val-in-closure"
[[linters-settings.revive.rules]]
name = "receiver-naming"
[[linters-settings.revive.rules]]
name = "redefines-builtin-id"
[[linters-settings.revive.rules]]
name = "string-of-int"
[[linters-settings.revive.rules]]
name = "struct-tag"
[[linters-settings.revive.rules]]
name = "superfluous-else"
[[linters-settings.revive.rules]]
name = "time-equal"
[[linters-settings.revive.rules]]
name = "time-naming"
[[linters-settings.revive.rules]]
name = "unconditional-recursion"
[[linters-settings.revive.rules]]
name = "unexported-naming"
[[linters-settings.revive.rules]]
name = "unexported-return"
# [[linters-settings.revive.rules]]
# name = "unhandled-error"
[[linters-settings.revive.rules]]
name = "unnecessary-stmt"
[[linters-settings.revive.rules]]
name = "unreachable-code"
[[linters-settings.revive.rules]]
name = "unused-parameter"
[[linters-settings.revive.rules]]
name = "unused-receiver"
[[linters-settings.revive.rules]]
name = "use-any"
[[linters-settings.revive.rules]]
name = "useless-break"
[[linters-settings.revive.rules]]
name = "var-declaration"
[[linters-settings.revive.rules]]
name = "var-naming"
[[linters-settings.revive.rules]]
name = "waitgroup-by-value"
[linters-settings.unparam]
check-exported = true
[issues]
exclude-use-default = false
[[issues.exclude-rules]]
linters = [
"govet"
]
path = "_test.go"
text = "^fieldalignment"
language: go
go:
- 1.4
- 1.5
- 1.6
- 1.7
- 1.8
- tip
before_install:
- "if [[ $TRAVIS_GO_VERSION == 1.7 ]]; then go get -v github.com/golang/lint/golint; fi"
install:
- go get -v -t ./...
script:
- go test -race -cpu 1,4 -v
- go test -race -v -tags appengine
- "if [[ $TRAVIS_GO_VERSION == 1.7 ]]; then go vet ./...; fi"
- "if [[ $TRAVIS_GO_VERSION == 1.7 ]]; then golint .; fi"
sudo: false
# MaxMind DB Reader for Go #
[![Build Status](https://travis-ci.org/oschwald/maxminddb-golang.png?branch=master)](https://travis-ci.org/oschwald/maxminddb-golang)
[![Windows Build Status](https://ci.appveyor.com/api/projects/status/4j2f9oep8nnfrmov/branch/master?svg=true)](https://ci.appveyor.com/project/oschwald/maxminddb-golang/branch/master)
[![GoDoc](https://godoc.org/github.com/oschwald/maxminddb-golang?status.png)](https://godoc.org/github.com/oschwald/maxminddb-golang)
[![GoDoc](https://godoc.org/github.com/oschwald/maxminddb-golang?status.svg)](https://godoc.org/github.com/oschwald/maxminddb-golang)
This is a Go reader for the MaxMind DB format. Although this can be used to
read [GeoLite2](http://dev.maxmind.com/geoip/geoip2/geolite2/) and
......
version: "{build}"
os: Windows Server 2012 R2
clone_folder: c:\gopath\src\github.com\oschwald\maxminddb-golang
environment:
GOPATH: c:\gopath
install:
- echo %PATH%
- echo %GOPATH%
- git submodule update --init --recursive
- go version
- go env
- go get -v -t ./...
build_script:
- go test -v ./...
......@@ -27,20 +27,24 @@ const (
_Uint64
_Uint128
_Slice
_Container
_Marker
// We don't use the next two. They are placeholders. See the spec
// for more details.
_Container //nolint: deadcode, varcheck // above
_Marker //nolint: deadcode, varcheck // above
_Bool
_Float32
)
const (
// This is the value used in libmaxminddb
// This is the value used in libmaxminddb.
maximumDataStructureDepth = 512
)
func (d *decoder) decode(offset uint, result reflect.Value, depth int) (uint, error) {
if depth > maximumDataStructureDepth {
return 0, newInvalidDatabaseError("exceeded maximum data structure depth; database is likely corrupt")
return 0, newInvalidDatabaseError(
"exceeded maximum data structure depth; database is likely corrupt",
)
}
typeNum, size, newOffset, err := d.decodeCtrlData(offset)
if err != nil {
......@@ -54,6 +58,36 @@ func (d *decoder) decode(offset uint, result reflect.Value, depth int) (uint, er
return d.decodeFromType(typeNum, size, newOffset, result, depth+1)
}
func (d *decoder) decodeToDeserializer(
offset uint,
dser deserializer,
depth int,
getNext bool,
) (uint, error) {
if depth > maximumDataStructureDepth {
return 0, newInvalidDatabaseError(
"exceeded maximum data structure depth; database is likely corrupt",
)
}
skip, err := dser.ShouldSkip(uintptr(offset))
if err != nil {
return 0, err
}
if skip {
if getNext {
return d.nextValueOffset(offset, 1)
}
return 0, nil
}
typeNum, size, newOffset, err := d.decodeCtrlData(offset)
if err != nil {
return 0, err
}
return d.decodeFromTypeToDeserializer(typeNum, size, newOffset, dser, depth+1)
}
func (d *decoder) decodeCtrlData(offset uint) (dataType, uint, uint, error) {
newOffset := offset + 1
if offset >= uint(len(d.buffer)) {
......@@ -75,7 +109,11 @@ func (d *decoder) decodeCtrlData(offset uint) (dataType, uint, uint, error) {
return typeNum, size, newOffset, err
}
func (d *decoder) sizeFromCtrlByte(ctrlByte byte, offset uint, typeNum dataType) (uint, uint, error) {
func (d *decoder) sizeFromCtrlByte(
ctrlByte byte,
offset uint,
typeNum dataType,
) (uint, uint, error) {
size := uint(ctrlByte & 0x1f)
if typeNum == _Extended {
return size, offset, nil
......@@ -113,12 +151,12 @@ func (d *decoder) decodeFromType(
result reflect.Value,
depth int,
) (uint, error) {
result = d.indirect(result)
result = indirect(result)
// For these types, size has a special meaning
switch dtype {
case _Bool:
return d.unmarshalBool(size, offset, result)
return unmarshalBool(size, offset, result)
case _Map:
return d.unmarshalMap(size, offset, result, depth)
case _Pointer:
......@@ -155,14 +193,77 @@ func (d *decoder) decodeFromType(
}
}
func (d *decoder) unmarshalBool(size uint, offset uint, result reflect.Value) (uint, error) {
if size > 1 {
return 0, newInvalidDatabaseError("the MaxMind DB file's data section contains bad data (bool size of %v)", size)
}
value, newOffset, err := d.decodeBool(size, offset)
func (d *decoder) decodeFromTypeToDeserializer(
dtype dataType,
size uint,
offset uint,
dser deserializer,
depth int,
) (uint, error) {
// For these types, size has a special meaning
switch dtype {
case _Bool:
v, offset := decodeBool(size, offset)
return offset, dser.Bool(v)
case _Map:
return d.decodeMapToDeserializer(size, offset, dser, depth)
case _Pointer:
pointer, newOffset, err := d.decodePointer(size, offset)
if err != nil {
return 0, err
}
_, err = d.decodeToDeserializer(pointer, dser, depth, false)
return newOffset, err
case _Slice:
return d.decodeSliceToDeserializer(size, offset, dser, depth)
}
// For the remaining types, size is the byte size
if offset+size > uint(len(d.buffer)) {
return 0, newOffsetError()
}
switch dtype {
case _Bytes:
v, offset := d.decodeBytes(size, offset)
return offset, dser.Bytes(v)
case _Float32:
v, offset := d.decodeFloat32(size, offset)
return offset, dser.Float32(v)
case _Float64:
v, offset := d.decodeFloat64(size, offset)
return offset, dser.Float64(v)
case _Int32:
v, offset := d.decodeInt(size, offset)
return offset, dser.Int32(int32(v))
case _String:
v, offset := d.decodeString(size, offset)
return offset, dser.String(v)
case _Uint16:
v, offset := d.decodeUint(size, offset)
return offset, dser.Uint16(uint16(v))
case _Uint32:
v, offset := d.decodeUint(size, offset)
return offset, dser.Uint32(uint32(v))
case _Uint64:
v, offset := d.decodeUint(size, offset)
return offset, dser.Uint64(v)
case _Uint128:
v, offset := d.decodeUint128(size, offset)
return offset, dser.Uint128(v)
default:
return 0, newInvalidDatabaseError("unknown type: %d", dtype)
}
}
func unmarshalBool(size, offset uint, result reflect.Value) (uint, error) {
if size > 1 {
return 0, newInvalidDatabaseError(
"the MaxMind DB file's data section contains bad data (bool size of %v)",
size,
)
}
value, newOffset := decodeBool(size, offset)
switch result.Kind() {
case reflect.Bool:
result.SetBool(value)
......@@ -180,7 +281,7 @@ func (d *decoder) unmarshalBool(size uint, offset uint, result reflect.Value) (u
// heavily based on encoding/json as my original version had a subtle
// bug. This method should be considered to be licensed under
// https://golang.org/LICENSE
func (d *decoder) indirect(result reflect.Value) reflect.Value {
func indirect(result reflect.Value) reflect.Value {
for {
// Load value from interface, but only if the result will be
// usefully addressable.
......@@ -199,6 +300,7 @@ func (d *decoder) indirect(result reflect.Value) reflect.Value {
if result.IsNil() {
result.Set(reflect.New(result.Type().Elem()))
}
result = result.Elem()
}
return result
......@@ -206,11 +308,9 @@ func (d *decoder) indirect(result reflect.Value) reflect.Value {
var sliceType = reflect.TypeOf([]byte{})
func (d *decoder) unmarshalBytes(size uint, offset uint, result reflect.Value) (uint, error) {
value, newOffset, err := d.decodeBytes(size, offset)
if err != nil {
return 0, err
}
func (d *decoder) unmarshalBytes(size, offset uint, result reflect.Value) (uint, error) {
value, newOffset := d.decodeBytes(size, offset)
switch result.Kind() {
case reflect.Slice:
if result.Type() == sliceType {
......@@ -226,14 +326,14 @@ func (d *decoder) unmarshalBytes(size uint, offset uint, result reflect.Value) (
return newOffset, newUnmarshalTypeError(value, result.Type())
}
func (d *decoder) unmarshalFloat32(size uint, offset uint, result reflect.Value) (uint, error) {
func (d *decoder) unmarshalFloat32(size, offset uint, result reflect.Value) (uint, error) {
if size != 4 {
return 0, newInvalidDatabaseError("the MaxMind DB file's data section contains bad data (float32 size of %v)", size)
}
value, newOffset, err := d.decodeFloat32(size, offset)
if err != nil {
return 0, err
return 0, newInvalidDatabaseError(
"the MaxMind DB file's data section contains bad data (float32 size of %v)",
size,
)
}
value, newOffset := d.decodeFloat32(size, offset)
switch result.Kind() {
case reflect.Float32, reflect.Float64:
......@@ -248,15 +348,15 @@ func (d *decoder) unmarshalFloat32(size uint, offset uint, result reflect.Value)
return newOffset, newUnmarshalTypeError(value, result.Type())
}
func (d *decoder) unmarshalFloat64(size uint, offset uint, result reflect.Value) (uint, error) {
func (d *decoder) unmarshalFloat64(size, offset uint, result reflect.Value) (uint, error) {
if size != 8 {
return 0, newInvalidDatabaseError("the MaxMind DB file's data section contains bad data (float 64 size of %v)", size)
}
value, newOffset, err := d.decodeFloat64(size, offset)
if err != nil {
return 0, err
return 0, newInvalidDatabaseError(
"the MaxMind DB file's data section contains bad data (float 64 size of %v)",
size,
)
}
value, newOffset := d.decodeFloat64(size, offset)
switch result.Kind() {
case reflect.Float32, reflect.Float64:
if result.OverflowFloat(value) {
......@@ -273,14 +373,14 @@ func (d *decoder) unmarshalFloat64(size uint, offset uint, result reflect.Value)
return newOffset, newUnmarshalTypeError(value, result.Type())
}
func (d *decoder) unmarshalInt32(size uint, offset uint, result reflect.Value) (uint, error) {
func (d *decoder) unmarshalInt32(size, offset uint, result reflect.Value) (uint, error) {
if size > 4 {
return 0, newInvalidDatabaseError("the MaxMind DB file's data section contains bad data (int32 size of %v)", size)
}
value, newOffset, err := d.decodeInt(size, offset)
if err != nil {
return 0, err
return 0, newInvalidDatabaseError(
"the MaxMind DB file's data section contains bad data (int32 size of %v)",
size,
)
}
value, newOffset := d.decodeInt(size, offset)
switch result.Kind() {
case reflect.Int, reflect.Int8, reflect.Int16, reflect.Int32, reflect.Int64:
......@@ -289,7 +389,12 @@ func (d *decoder) unmarshalInt32(size uint, offset uint, result reflect.Value) (
result.SetInt(n)
return newOffset, nil
}
case reflect.Uint, reflect.Uint8, reflect.Uint16, reflect.Uint32, reflect.Uint64, reflect.Uintptr:
case reflect.Uint,
reflect.Uint8,
reflect.Uint16,
reflect.Uint32,
reflect.Uint64,
reflect.Uintptr:
n := uint64(value)
if !result.OverflowUint(n) {
result.SetUint(n)
......@@ -310,7 +415,7 @@ func (d *decoder) unmarshalMap(
result reflect.Value,
depth int,
) (uint, error) {
result = d.indirect(result)
result = indirect(result)
switch result.Kind() {
default:
return 0, newUnmarshalTypeError("map", result.Type())
......@@ -320,7 +425,7 @@ func (d *decoder) unmarshalMap(
return d.decodeMap(size, offset, result, depth)
case reflect.Interface:
if result.NumMethod() == 0 {
rv := reflect.ValueOf(make(map[string]interface{}, size))
rv := reflect.ValueOf(make(map[string]any, size))
newOffset, err := d.decodeMap(size, offset, rv, depth)
result.Set(rv)
return newOffset, err
......@@ -329,7 +434,11 @@ func (d *decoder) unmarshalMap(
}
}
func (d *decoder) unmarshalPointer(size uint, offset uint, result reflect.Value, depth int) (uint, error) {
func (d *decoder) unmarshalPointer(
size, offset uint,
result reflect.Value,
depth int,
) (uint, error) {
pointer, newOffset, err := d.decodePointer(size, offset)
if err != nil {
return 0, err
......@@ -349,7 +458,7 @@ func (d *decoder) unmarshalSlice(
return d.decodeSlice(size, offset, result, depth)
case reflect.Interface:
if result.NumMethod() == 0 {
a := []interface{}{}
a := []any{}
rv := reflect.ValueOf(&a).Elem()
newOffset, err := d.decodeSlice(size, offset, rv, depth)
result.Set(rv)
......@@ -359,12 +468,9 @@ func (d *decoder) unmarshalSlice(
return 0, newUnmarshalTypeError("array", result.Type())
}
func (d *decoder) unmarshalString(size uint, offset uint, result reflect.Value) (uint, error) {
value, newOffset, err := d.decodeString(size, offset)
func (d *decoder) unmarshalString(size, offset uint, result reflect.Value) (uint, error) {
value, newOffset := d.decodeString(size, offset)
if err != nil {
return 0, err
}
switch result.Kind() {
case reflect.String:
result.SetString(value)
......@@ -376,18 +482,22 @@ func (d *decoder) unmarshalString(size uint, offset uint, result reflect.Value)
}
}
return newOffset, newUnmarshalTypeError(value, result.Type())
}
func (d *decoder) unmarshalUint(size uint, offset uint, result reflect.Value, uintType uint) (uint, error) {
func (d *decoder) unmarshalUint(
size, offset uint,
result reflect.Value,
uintType uint,
) (uint, error) {
if size > uintType/8 {
return 0, newInvalidDatabaseError("the MaxMind DB file's data section contains bad data (uint%v size of %v)", uintType, size)
return 0, newInvalidDatabaseError(
"the MaxMind DB file's data section contains bad data (uint%v size of %v)",
uintType,
size,
)
}
value, newOffset, err := d.decodeUint(size, offset)
if err != nil {
return 0, err
}
value, newOffset := d.decodeUint(size, offset)
switch result.Kind() {
case reflect.Int, reflect.Int8, reflect.Int16, reflect.Int32, reflect.Int64:
......@@ -396,7 +506,12 @@ func (d *decoder) unmarshalUint(size uint, offset uint, result reflect.Value, ui
result.SetInt(n)
return newOffset, nil
}
case reflect.Uint, reflect.Uint8, reflect.Uint16, reflect.Uint32, reflect.Uint64, reflect.Uintptr:
case reflect.Uint,
reflect.Uint8,
reflect.Uint16,
reflect.Uint32,
reflect.Uint64,
reflect.Uintptr:
if !result.OverflowUint(value) {
result.SetUint(value)
return newOffset, nil
......@@ -412,14 +527,14 @@ func (d *decoder) unmarshalUint(size uint, offset uint, result reflect.Value, ui
var bigIntType = reflect.TypeOf(big.Int{})
func (d *decoder) unmarshalUint128(size uint, offset uint, result reflect.Value) (uint, error) {
func (d *decoder) unmarshalUint128(size, offset uint, result reflect.Value) (uint, error) {
if size > 16 {
return 0, newInvalidDatabaseError("the MaxMind DB file's data section contains bad data (uint128 size of %v)", size)
}
value, newOffset, err := d.decodeUint128(size, offset)
if err != nil {
return 0, err
return 0, newInvalidDatabaseError(
"the MaxMind DB file's data section contains bad data (uint128 size of %v)",
size,
)
}
value, newOffset := d.decodeUint128(size, offset)
switch result.Kind() {
case reflect.Struct:
......@@ -436,36 +551,36 @@ func (d *decoder) unmarshalUint128(size uint, offset uint, result reflect.Value)
return newOffset, newUnmarshalTypeError(value, result.Type())
}
func (d *decoder) decodeBool(size uint, offset uint) (bool, uint, error) {
return size != 0, offset, nil
func decodeBool(size, offset uint) (bool, uint) {
return size != 0, offset
}
func (d *decoder) decodeBytes(size uint, offset uint) ([]byte, uint, error) {
func (d *decoder) decodeBytes(size, offset uint) ([]byte, uint) {
newOffset := offset + size
bytes := make([]byte, size)
copy(bytes, d.buffer[offset:newOffset])
return bytes, newOffset, nil
return bytes, newOffset
}
func (d *decoder) decodeFloat64(size uint, offset uint) (float64, uint, error) {
func (d *decoder) decodeFloat64(size, offset uint) (float64, uint) {
newOffset := offset + size
bits := binary.BigEndian.Uint64(d.buffer[offset:newOffset])
return math.Float64frombits(bits), newOffset, nil
return math.Float64frombits(bits), newOffset
}
func (d *decoder) decodeFloat32(size uint, offset uint) (float32, uint, error) {
func (d *decoder) decodeFloat32(size, offset uint) (float32, uint) {
newOffset := offset + size
bits := binary.BigEndian.Uint32(d.buffer[offset:newOffset])
return math.Float32frombits(bits), newOffset, nil
return math.Float32frombits(bits), newOffset
}
func (d *decoder) decodeInt(size uint, offset uint) (int, uint, error) {
func (d *decoder) decodeInt(size, offset uint) (int, uint) {
newOffset := offset + size
var val int32
for _, b := range d.buffer[offset:newOffset] {
val = (val << 8) | int32(b)
}
return int(val), newOffset, nil
return int(val), newOffset
}
func (d *decoder) decodeMap(
......@@ -475,24 +590,65 @@ func (d *decoder) decodeMap(
depth int,
) (uint, error) {
if result.IsNil() {
result.Set(reflect.MakeMap(result.Type()))
result.Set(reflect.MakeMapWithSize(result.Type(), int(size)))
}
mapType := result.Type()
keyValue := reflect.New(mapType.Key()).Elem()
elemType := mapType.Elem()
var elemValue reflect.Value
for i := uint(0); i < size; i++ {
var key []byte
var err error
key, offset, err = d.decodeKey(offset)
if err != nil {
return 0, err
}
if elemValue.IsValid() {
// After 1.20 is the minimum supported version, this can just be
// elemValue.SetZero()
reflectSetZero(elemValue)
} else {
elemValue = reflect.New(elemType).Elem()
}
offset, err = d.decode(offset, elemValue, depth)
if err != nil {
return 0, err
}
keyValue.SetString(string(key))
result.SetMapIndex(keyValue, elemValue)
}
return offset, nil
}
func (d *decoder) decodeMapToDeserializer(
size uint,
offset uint,
dser deserializer,
depth int,
) (uint, error) {
err := dser.StartMap(size)
if err != nil {
return 0, err
}
for i := uint(0); i < size; i++ {
// TODO - implement key/value skipping?
offset, err = d.decodeToDeserializer(offset, dser, depth, true)
if err != nil {
return 0, err
}
value := reflect.New(result.Type().Elem())
offset, err = d.decode(offset, value, depth)
offset, err = d.decodeToDeserializer(offset, dser, depth, true)
if err != nil {
return 0, err
}
result.SetMapIndex(reflect.ValueOf(string(key)), value.Elem())
}
err = dser.End()
if err != nil {
return 0, err
}
return offset, nil
}
......@@ -511,7 +667,7 @@ func (d *decoder) decodePointer(
if pointerSize == 4 {
prefix = 0
} else {
prefix = uint(size & 0x7)
prefix = size & 0x7
}
unpacked := uintFromBytes(prefix, pointerBytes)
......@@ -549,20 +705,33 @@ func (d *decoder) decodeSlice(
return offset, nil
}
func (d *decoder) decodeString(size uint, offset uint) (string, uint, error) {
newOffset := offset + size
return string(d.buffer[offset:newOffset]), newOffset, nil
func (d *decoder) decodeSliceToDeserializer(
size uint,
offset uint,
dser deserializer,
depth int,
) (uint, error) {
err := dser.StartSlice(size)
if err != nil {
return 0, err
}
for i := uint(0); i < size; i++ {
offset, err = d.decodeToDeserializer(offset, dser, depth, true)
if err != nil {
return 0, err
}
type fieldsType struct {
namedFields map[string]int
anonymousFields []int
}
err = dser.End()
if err != nil {
return 0, err
}
return offset, nil
}
var (
fieldMap = map[reflect.Type]*fieldsType{}
fieldMapMu sync.RWMutex
)
func (d *decoder) decodeString(size, offset uint) (string, uint) {
newOffset := offset + size
return string(d.buffer[offset:newOffset]), newOffset
}
func (d *decoder) decodeStruct(
size uint,
......@@ -570,39 +739,10 @@ func (d *decoder) decodeStruct(
result reflect.Value,
depth int,
) (uint, error) {
resultType := result.Type()
fieldMapMu.RLock()
fields, ok := fieldMap[resultType]
fieldMapMu.RUnlock()
if !ok {
numFields := resultType.NumField()
namedFields := make(map[string]int, numFields)
var anonymous []int
for i := 0; i < numFields; i++ {
field := resultType.Field(i)
fieldName := field.Name
if tag := field.Tag.Get("maxminddb"); tag != "" {
if tag == "-" {
continue
}
fieldName = tag
}
if field.Anonymous {
anonymous = append(anonymous, i)
continue
}
namedFields[fieldName] = i
}
fieldMapMu.Lock()
fields = &fieldsType{namedFields, anonymous}
fieldMap[resultType] = fields
fieldMapMu.Unlock()
}
fields := cachedFields(result)
// This fills in embedded structs
for i := range fields.anonymousFields {
for _, i := range fields.anonymousFields {
_, err := d.unmarshalMap(size, offset, result.Field(i), depth)
if err != nil {
return 0, err
......@@ -638,7 +778,45 @@ func (d *decoder) decodeStruct(
return offset, nil
}
func (d *decoder) decodeUint(size uint, offset uint) (uint64, uint, error) {
type fieldsType struct {
namedFields map[string]int
anonymousFields []int
}
var fieldsMap sync.Map
func cachedFields(result reflect.Value) *fieldsType {
resultType := result.Type()
if fields, ok := fieldsMap.Load(resultType); ok {
return fields.(*fieldsType)
}
numFields := resultType.NumField()
namedFields := make(map[string]int, numFields)
var anonymous []int
for i := 0; i < numFields; i++ {
field := resultType.Field(i)
fieldName := field.Name
if tag := field.Tag.Get("maxminddb"); tag != "" {
if tag == "-" {
continue
}
fieldName = tag
}
if field.Anonymous {
anonymous = append(anonymous, i)
continue
}
namedFields[fieldName] = i
}
fields := &fieldsType{namedFields, anonymous}
fieldsMap.Store(resultType, fields)
return fields
}
func (d *decoder) decodeUint(size, offset uint) (uint64, uint) {
newOffset := offset + size
bytes := d.buffer[offset:newOffset]
......@@ -646,15 +824,15 @@ func (d *decoder) decodeUint(size uint, offset uint) (uint64, uint, error) {
for _, b := range bytes {
val = (val << 8) | uint64(b)
}
return val, newOffset, nil
return val, newOffset
}
func (d *decoder) decodeUint128(size uint, offset uint) (*big.Int, uint, error) {
func (d *decoder) decodeUint128(size, offset uint) (*big.Int, uint) {
newOffset := offset + size
val := new(big.Int)
val.SetBytes(d.buffer[offset:newOffset])
return val, newOffset, nil
return val, newOffset
}
func uintFromBytes(prefix uint, uintBytes []byte) uint {
......@@ -694,8 +872,8 @@ func (d *decoder) decodeKey(offset uint) ([]byte, uint, error) {
// This function is used to skip ahead to the next value without decoding
// the one at the offset passed in. The size bits have different meanings for
// different data types
func (d *decoder) nextValueOffset(offset uint, numberToSkip uint) (uint, error) {
// different data types.
func (d *decoder) nextValueOffset(offset, numberToSkip uint) (uint, error) {
if numberToSkip == 0 {
return offset, nil
}
......
......@@ -2,17 +2,18 @@ package maxminddb
import (
"encoding/hex"
"io/ioutil"
"math/big"
"os"
"reflect"
"strings"
"testing"
"github.com/stretchr/testify/assert"
"github.com/stretchr/testify/require"
)
func TestBool(t *testing.T) {
bools := map[string]interface{}{
bools := map[string]any{
"0007": false,
"0107": true,
}
......@@ -21,7 +22,7 @@ func TestBool(t *testing.T) {
}
func TestDouble(t *testing.T) {
doubles := map[string]interface{}{
doubles := map[string]any{
"680000000000000000": 0.0,
"683FE0000000000000": 0.5,
"68400921FB54442EEA": 3.14159265359,
......@@ -35,7 +36,7 @@ func TestDouble(t *testing.T) {
}
func TestFloat(t *testing.T) {
floats := map[string]interface{}{
floats := map[string]any{
"040800000000": float32(0.0),
"04083F800000": float32(1.0),
"04083F8CCCCD": float32(1.1),
......@@ -50,7 +51,7 @@ func TestFloat(t *testing.T) {
}
func TestInt32(t *testing.T) {
int32 := map[string]interface{}{
int32s := map[string]any{
"0001": 0,
"0401ffffffff": -1,
"0101ff": 255,
......@@ -64,33 +65,37 @@ func TestInt32(t *testing.T) {
"04017fffffff": 2147483647,
"040180000001": -2147483647,
}
validateDecoding(t, int32)
validateDecoding(t, int32s)
}
func TestMap(t *testing.T) {
maps := map[string]interface{}{
"e0": map[string]interface{}{},
"e142656e43466f6f": map[string]interface{}{"en": "Foo"},
"e242656e43466f6f427a6843e4baba": map[string]interface{}{"en": "Foo", "zh": "人"},
"e1446e616d65e242656e43466f6f427a6843e4baba": map[string]interface{}{"name": map[string]interface{}{"en": "Foo", "zh": "人"}},
"e1496c616e677561676573020442656e427a68": map[string]interface{}{"languages": []interface{}{"en", "zh"}},
maps := map[string]any{
"e0": map[string]any{},
"e142656e43466f6f": map[string]any{"en": "Foo"},
"e242656e43466f6f427a6843e4baba": map[string]any{"en": "Foo", "zh": "人"},
"e1446e616d65e242656e43466f6f427a6843e4baba": map[string]any{
"name": map[string]any{"en": "Foo", "zh": "人"},
},
"e1496c616e677561676573020442656e427a68": map[string]any{
"languages": []any{"en", "zh"},
},
}
validateDecoding(t, maps)
}
func TestSlice(t *testing.T) {
slice := map[string]interface{}{
"0004": []interface{}{},
"010443466f6f": []interface{}{"Foo"},
"020443466f6f43e4baba": []interface{}{"Foo", "人"},
slice := map[string]any{
"0004": []any{},
"010443466f6f": []any{"Foo"},
"020443466f6f43e4baba": []any{"Foo", "人"},
}
validateDecoding(t, slice)
}
var testStrings = makeTestStrings()
func makeTestStrings() map[string]interface{} {
str := map[string]interface{}{
func makeTestStrings() map[string]any {
str := map[string]any{
"40": "",
"4131": "1",
"43E4BABA": "人",
......@@ -113,9 +118,10 @@ func TestString(t *testing.T) {
}
func TestByte(t *testing.T) {
b := make(map[string]interface{})
b := make(map[string]any)
for key, val := range testStrings {
oldCtrl, _ := hex.DecodeString(key[0:2])
oldCtrl, err := hex.DecodeString(key[0:2])
require.NoError(t, err)
newCtrl := []byte{oldCtrl[0] ^ 0xc0}
key = strings.Replace(key, hex.EncodeToString(oldCtrl), hex.EncodeToString(newCtrl), 1)
b[key] = []byte(val.(string))
......@@ -125,18 +131,18 @@ func TestByte(t *testing.T) {
}
func TestUint16(t *testing.T) {
uint16 := map[string]interface{}{
uint16s := map[string]any{
"a0": uint64(0),
"a1ff": uint64(255),
"a201f4": uint64(500),
"a22a78": uint64(10872),
"a2ffff": uint64(65535),
}
validateDecoding(t, uint16)
validateDecoding(t, uint16s)
}
func TestUint32(t *testing.T) {
uint32 := map[string]interface{}{
uint32s := map[string]any{
"c0": uint64(0),
"c1ff": uint64(255),
"c201f4": uint64(500),
......@@ -145,14 +151,14 @@ func TestUint32(t *testing.T) {
"c3ffffff": uint64(16777215),
"c4ffffffff": uint64(4294967295),
}
validateDecoding(t, uint32)
validateDecoding(t, uint32s)
}
func TestUint64(t *testing.T) {
ctrlByte := "02"
bits := uint64(64)
uints := map[string]interface{}{
uints := map[string]any{
"00" + ctrlByte: uint64(0),
"02" + ctrlByte + "01f4": uint64(500),
"02" + ctrlByte + "2a78": uint64(10872),
......@@ -167,12 +173,12 @@ func TestUint64(t *testing.T) {
validateDecoding(t, uints)
}
// Dedup with above somehow
// Dedup with above somehow.
func TestUint128(t *testing.T) {
ctrlByte := "03"
bits := uint(128)
uints := map[string]interface{}{
uints := map[string]any{
"00" + ctrlByte: big.NewInt(0),
"02" + ctrlByte + "01f4": big.NewInt(500),
"02" + ctrlByte + "2a78": big.NewInt(10872),
......@@ -189,7 +195,7 @@ func TestUint128(t *testing.T) {
}
// No pow or bit shifting for big int, apparently :-(
// This is _not_ meant to be a comprehensive power function
// This is _not_ meant to be a comprehensive power function.
func powBigInt(bi *big.Int, pow uint) *big.Int {
newInt := big.NewInt(1)
for i := uint(0); i < pow; i++ {
......@@ -198,14 +204,15 @@ func powBigInt(bi *big.Int, pow uint) *big.Int {
return newInt
}
func validateDecoding(t *testing.T, tests map[string]interface{}) {
func validateDecoding(t *testing.T, tests map[string]any) {
for inputStr, expected := range tests {
inputBytes, _ := hex.DecodeString(inputStr)
inputBytes, err := hex.DecodeString(inputStr)
require.NoError(t, err)
d := decoder{inputBytes}
var result interface{}
_, err := d.decode(0, reflect.ValueOf(&result), 0)
assert.Nil(t, err)
var result any
_, err = d.decode(0, reflect.ValueOf(&result), 0)
assert.NoError(t, err)
if !reflect.DeepEqual(result, expected) {
// A big case statement would produce nicer errors
......@@ -215,8 +222,8 @@ func validateDecoding(t *testing.T, tests map[string]interface{}) {
}
func TestPointers(t *testing.T) {
bytes, err := ioutil.ReadFile("test-data/test-data/maps-with-pointers.raw")
assert.Nil(t, err)
bytes, err := os.ReadFile(testFile("maps-with-pointers.raw"))
require.NoError(t, err)
d := decoder{bytes}
expected := map[uint]map[string]string{
......@@ -231,10 +238,9 @@ func TestPointers(t *testing.T) {
for offset, expectedValue := range expected {
var actual map[string]string
_, err := d.decode(offset, reflect.ValueOf(&actual), 0)
assert.Nil(t, err)
assert.NoError(t, err)
if !reflect.DeepEqual(actual, expectedValue) {
t.Errorf("Decode for pointer at %d failed", offset)
}
}
}
package maxminddb
import "math/big"
// deserializer is an interface for a type that deserializes an MaxMind DB
// data record to some other type. This exists as an alternative to the
// standard reflection API.
//
// This is fundamentally different than the Unmarshaler interface that
// several packages provide. A Deserializer will generally create the
// final struct or value rather than unmarshaling to itself.
//
// This interface and the associated unmarshaling code is EXPERIMENTAL!
// It is not currently covered by any Semantic Versioning guarantees.
// Use at your own risk.
type deserializer interface {
ShouldSkip(offset uintptr) (bool, error)
StartSlice(size uint) error
StartMap(size uint) error
End() error
String(string) error
Float64(float64) error
Bytes([]byte) error
Uint16(uint16) error
Uint32(uint32) error
Int32(int32) error
Uint64(uint64) error
Uint128(*big.Int) error
Bool(bool) error
Float32(float32) error
}
package maxminddb
import (
"math/big"
"net"
"testing"
"github.com/stretchr/testify/require"
)
func TestDecodingToDeserializer(t *testing.T) {
reader, err := Open(testFile("MaxMind-DB-test-decoder.mmdb"))
require.NoError(t, err, "unexpected error while opening database: %v", err)
dser := testDeserializer{}
err = reader.Lookup(net.ParseIP("::1.1.1.0"), &dser)
require.NoError(t, err, "unexpected error while doing lookup: %v", err)
checkDecodingToInterface(t, dser.rv)
}
type stackValue struct {
value any
curNum int
}
type testDeserializer struct {
stack []*stackValue
rv any
key *string
}
func (*testDeserializer) ShouldSkip(_ uintptr) (bool, error) {
return false, nil
}
func (d *testDeserializer) StartSlice(size uint) error {
return d.add(make([]any, size))
}
func (d *testDeserializer) StartMap(_ uint) error {
return d.add(map[string]any{})
}
//nolint:unparam // This is to meet the requirements of the interface.
func (d *testDeserializer) End() error {
d.stack = d.stack[:len(d.stack)-1]
return nil
}
func (d *testDeserializer) String(v string) error {
return d.add(v)
}
func (d *testDeserializer) Float64(v float64) error {
return d.add(v)
}
func (d *testDeserializer) Bytes(v []byte) error {
return d.add(v)
}
func (d *testDeserializer) Uint16(v uint16) error {
return d.add(uint64(v))
}
func (d *testDeserializer) Uint32(v uint32) error {
return d.add(uint64(v))
}
func (d *testDeserializer) Int32(v int32) error {
return d.add(int(v))
}
func (d *testDeserializer) Uint64(v uint64) error {
return d.add(v)
}
func (d *testDeserializer) Uint128(v *big.Int) error {
return d.add(v)
}
func (d *testDeserializer) Bool(v bool) error {
return d.add(v)
}
func (d *testDeserializer) Float32(v float32) error {
return d.add(v)
}
func (d *testDeserializer) add(v any) error {
if len(d.stack) == 0 {
d.rv = v
} else {
top := d.stack[len(d.stack)-1]
switch parent := top.value.(type) {
case map[string]any:
if d.key == nil {
key := v.(string)
d.key = &key
} else {
parent[*d.key] = v
d.key = nil
}
case []any:
parent[top.curNum] = v
top.curNum++
default:
}
}
switch v := v.(type) {
case map[string]any, []any:
d.stack = append(d.stack, &stackValue{value: v})
default:
}
return nil
}
......@@ -15,7 +15,7 @@ func newOffsetError() InvalidDatabaseError {
return InvalidDatabaseError{"unexpected end of database"}
}
func newInvalidDatabaseError(format string, args ...interface{}) InvalidDatabaseError {
func newInvalidDatabaseError(format string, args ...any) InvalidDatabaseError {
return InvalidDatabaseError{fmt.Sprintf(format, args...)}
}
......@@ -26,11 +26,11 @@ func (e InvalidDatabaseError) Error() string {
// UnmarshalTypeError is returned when the value in the database cannot be
// assigned to the specified data type.
type UnmarshalTypeError struct {
Value string // stringified copy of the database value that caused the error
Type reflect.Type // type of the value that could not be assign to
Type reflect.Type
Value string
}
func newUnmarshalTypeError(value interface{}, rType reflect.Type) UnmarshalTypeError {
func newUnmarshalTypeError(value any, rType reflect.Type) UnmarshalTypeError {
return UnmarshalTypeError{
Value: fmt.Sprintf("%v", value),
Type: rType,
......
......@@ -8,7 +8,7 @@ import (
"github.com/oschwald/maxminddb-golang"
)
// This example shows how to decode to a struct
// This example shows how to decode to a struct.
func ExampleReader_Lookup_struct() {
db, err := maxminddb.Open("test-data/test-data/GeoIP2-City-Test.mmdb")
if err != nil {
......@@ -26,14 +26,14 @@ func ExampleReader_Lookup_struct() {
err = db.Lookup(ip, &record)
if err != nil {
log.Fatal(err)
log.Panic(err)
}
fmt.Print(record.Country.ISOCode)
// Output:
// GB
}
// This example demonstrates how to decode to an interface{}
// This example demonstrates how to decode to an any.
func ExampleReader_Lookup_interface() {
db, err := maxminddb.Open("test-data/test-data/GeoIP2-City-Test.mmdb")
if err != nil {
......@@ -43,16 +43,16 @@ func ExampleReader_Lookup_interface() {
ip := net.ParseIP("81.2.69.142")
var record interface{}
var record any
err = db.Lookup(ip, &record)
if err != nil {
log.Fatal(err)
log.Panic(err)
}
fmt.Printf("%v", record)
}
// This example demonstrates how to iterate over all networks in the
// database
// database.
func ExampleReader_Networks() {
db, err := maxminddb.Open("test-data/test-data/GeoIP2-Connection-Type-Test.mmdb")
if err != nil {
......@@ -64,94 +64,82 @@ func ExampleReader_Networks() {
Domain string `maxminddb:"connection_type"`
}{}
networks := db.Networks()
networks := db.Networks(maxminddb.SkipAliasedNetworks)
for networks.Next() {
subnet, err := networks.Network(&record)
if err != nil {
log.Fatal(err)
log.Panic(err)
}
fmt.Printf("%s: %s\n", subnet.String(), record.Domain)
}
if networks.Err() != nil {
log.Fatal(networks.Err())
log.Panic(networks.Err())
}
// Output:
// ::100:0/120: Dialup
// ::100:100/120: Cable/DSL
// ::100:200/119: Dialup
// ::100:400/118: Dialup
// ::100:800/117: Dialup
// ::100:1000/116: Dialup
// ::100:2000/115: Dialup
// ::100:4000/114: Dialup
// ::100:8000/113: Dialup
// ::50d6:0/116: Cellular
// ::6001:0/112: Cable/DSL
// ::600a:0/111: Cable/DSL
// ::6045:0/112: Cable/DSL
// ::605e:0/111: Cable/DSL
// ::6c60:0/107: Cellular
// ::af10:c700/120: Dialup
// ::bb9c:8a00/120: Cable/DSL
// ::c9f3:c800/120: Corporate
// ::cfb3:3000/116: Cellular
// 1.0.0.0/24: Dialup
// 1.0.1.0/24: Cable/DSL
// 1.0.2.0/23: Dialup
// 1.0.4.0/22: Dialup
// 1.0.8.0/21: Dialup
// 1.0.16.0/20: Dialup
// 1.0.32.0/19: Dialup
// 1.0.64.0/18: Dialup
// 1.0.128.0/17: Dialup
// 1.0.0.0/24: Cable/DSL
// 1.0.1.0/24: Cellular
// 1.0.2.0/23: Cable/DSL
// 1.0.4.0/22: Cable/DSL
// 1.0.8.0/21: Cable/DSL
// 1.0.16.0/20: Cable/DSL
// 1.0.32.0/19: Cable/DSL
// 1.0.64.0/18: Cable/DSL
// 1.0.128.0/17: Cable/DSL
// 2.125.160.216/29: Cable/DSL
// 67.43.156.0/24: Cellular
// 80.214.0.0/20: Cellular
// 96.1.0.0/16: Cable/DSL
// 96.10.0.0/15: Cable/DSL
// 96.69.0.0/16: Cable/DSL
// 96.94.0.0/15: Cable/DSL
// 108.96.0.0/11: Cellular
// 175.16.199.0/24: Dialup
// 149.101.100.0/28: Cellular
// 175.16.199.0/24: Cable/DSL
// 187.156.138.0/24: Cable/DSL
// 201.243.200.0/24: Corporate
// 207.179.48.0/20: Cellular
// 2001:0:100::/56: Dialup
// 2001:0:100:100::/56: Cable/DSL
// 2001:0:100:200::/55: Dialup
// 2001:0:100:400::/54: Dialup
// 2001:0:100:800::/53: Dialup
// 2001:0:100:1000::/52: Dialup
// 2001:0:100:2000::/51: Dialup
// 2001:0:100:4000::/50: Dialup
// 2001:0:100:8000::/49: Dialup
// 2001:0:50d6::/52: Cellular
// 2001:0:6001::/48: Cable/DSL
// 2001:0:600a::/47: Cable/DSL
// 2001:0:6045::/48: Cable/DSL
// 2001:0:605e::/47: Cable/DSL
// 2001:0:6c60::/43: Cellular
// 2001:0:af10:c700::/56: Dialup
// 2001:0:bb9c:8a00::/56: Cable/DSL
// 2001:0:c9f3:c800::/56: Corporate
// 2001:0:cfb3:3000::/52: Cellular
// 2002:100::/40: Dialup
// 2002:100:100::/40: Cable/DSL
// 2002:100:200::/39: Dialup
// 2002:100:400::/38: Dialup
// 2002:100:800::/37: Dialup
// 2002:100:1000::/36: Dialup
// 2002:100:2000::/35: Dialup
// 2002:100:4000::/34: Dialup
// 2002:100:8000::/33: Dialup
// 2002:50d6::/36: Cellular
// 2002:6001::/32: Cable/DSL
// 2002:600a::/31: Cable/DSL
// 2002:6045::/32: Cable/DSL
// 2002:605e::/31: Cable/DSL
// 2002:6c60::/27: Cellular
// 2002:af10:c700::/40: Dialup
// 2002:bb9c:8a00::/40: Cable/DSL
// 2002:c9f3:c800::/40: Corporate
// 2002:cfb3:3000::/36: Cellular
// 216.160.83.56/29: Corporate
// 2003::/24: Cable/DSL
}
// This example demonstrates how to iterate over all networks in the
// database which are contained within an arbitrary network.
func ExampleReader_NetworksWithin() {
db, err := maxminddb.Open("test-data/test-data/GeoIP2-Connection-Type-Test.mmdb")
if err != nil {
log.Fatal(err)
}
defer db.Close()
record := struct {
Domain string `maxminddb:"connection_type"`
}{}
_, network, err := net.ParseCIDR("1.0.0.0/8")
if err != nil {
log.Panic(err)
}
networks := db.NetworksWithin(network, maxminddb.SkipAliasedNetworks)
for networks.Next() {
subnet, err := networks.Network(&record)
if err != nil {
log.Panic(err)
}
fmt.Printf("%s: %s\n", subnet.String(), record.Domain)
}
if networks.Err() != nil {
log.Panic(networks.Err())
}
// Output:
// 1.0.0.0/24: Cable/DSL
// 1.0.1.0/24: Cellular
// 1.0.2.0/23: Cable/DSL
// 1.0.4.0/22: Cable/DSL
// 1.0.8.0/21: Cable/DSL
// 1.0.16.0/20: Cable/DSL
// 1.0.32.0/19: Cable/DSL
// 1.0.64.0/18: Cable/DSL
// 1.0.128.0/17: Cable/DSL
}
go.mod 0 → 100644
module github.com/oschwald/maxminddb-golang
go 1.19
require (
github.com/stretchr/testify v1.8.4
golang.org/x/sys v0.10.0
)
require (
github.com/davecgh/go-spew v1.1.1 // indirect
github.com/pmezard/go-difflib v1.0.0 // indirect
gopkg.in/yaml.v3 v3.0.1 // indirect
)
// +build !windows,!appengine
//go:build !windows && !appengine && !plan9 && !js && !wasip1 && !wasi
// +build !windows,!appengine,!plan9,!js,!wasip1,!wasi
package maxminddb
import (
"syscall"
"golang.org/x/sys/unix"
)
func mmap(fd int, length int) (data []byte, err error) {
return unix.Mmap(fd, 0, length, syscall.PROT_READ, syscall.MAP_SHARED)
func mmap(fd, length int) (data []byte, err error) {
return unix.Mmap(fd, 0, length, unix.PROT_READ, unix.MAP_SHARED)
}
func munmap(b []byte) (err error) {
......
//go:build windows && !appengine
// +build windows,!appengine
package maxminddb
......
node.go 0 → 100644
package maxminddb
type nodeReader interface {
readLeft(uint) uint
readRight(uint) uint
}
type nodeReader24 struct {
buffer []byte
}
func (n nodeReader24) readLeft(nodeNumber uint) uint {
return (uint(n.buffer[nodeNumber]) << 16) |
(uint(n.buffer[nodeNumber+1]) << 8) |
uint(n.buffer[nodeNumber+2])
}
func (n nodeReader24) readRight(nodeNumber uint) uint {
return (uint(n.buffer[nodeNumber+3]) << 16) |
(uint(n.buffer[nodeNumber+4]) << 8) |
uint(n.buffer[nodeNumber+5])
}
type nodeReader28 struct {
buffer []byte
}
func (n nodeReader28) readLeft(nodeNumber uint) uint {
return ((uint(n.buffer[nodeNumber+3]) & 0xF0) << 20) |
(uint(n.buffer[nodeNumber]) << 16) |
(uint(n.buffer[nodeNumber+1]) << 8) |
uint(n.buffer[nodeNumber+2])
}
func (n nodeReader28) readRight(nodeNumber uint) uint {
return ((uint(n.buffer[nodeNumber+3]) & 0x0F) << 24) |
(uint(n.buffer[nodeNumber+4]) << 16) |
(uint(n.buffer[nodeNumber+5]) << 8) |
uint(n.buffer[nodeNumber+6])
}
type nodeReader32 struct {
buffer []byte
}
func (n nodeReader32) readLeft(nodeNumber uint) uint {
return (uint(n.buffer[nodeNumber]) << 24) |
(uint(n.buffer[nodeNumber+1]) << 16) |
(uint(n.buffer[nodeNumber+2]) << 8) |
uint(n.buffer[nodeNumber+3])
}
func (n nodeReader32) readRight(nodeNumber uint) uint {
return (uint(n.buffer[nodeNumber+4]) << 24) |
(uint(n.buffer[nodeNumber+5]) << 16) |
(uint(n.buffer[nodeNumber+6]) << 8) |
uint(n.buffer[nodeNumber+7])
}
0% Loading or .
You are about to add 0 people to the discussion. Proceed with caution.
Please register or to comment