Bläddra i källkod

udpate vendor

lijian 2 år sedan
förälder
incheckning
d66444b60f
36 ändrade filer med 9196 tillägg och 0 borttagningar
  1. BIN
      .DS_Store
  2. BIN
      vendor/.DS_Store
  3. BIN
      vendor/github.com/.DS_Store
  4. 201 0
      vendor/github.com/deepmap/oapi-codegen/LICENSE
  5. 29 0
      vendor/github.com/deepmap/oapi-codegen/Makefile
  6. 762 0
      vendor/github.com/deepmap/oapi-codegen/README.md
  7. 64 0
      vendor/github.com/deepmap/oapi-codegen/go.mod
  8. 202 0
      vendor/github.com/deepmap/oapi-codegen/go.sum
  9. 24 0
      vendor/github.com/deepmap/oapi-codegen/pkg/runtime/bind.go
  10. 309 0
      vendor/github.com/deepmap/oapi-codegen/pkg/runtime/bindform.go
  11. 526 0
      vendor/github.com/deepmap/oapi-codegen/pkg/runtime/bindparam.go
  12. 174 0
      vendor/github.com/deepmap/oapi-codegen/pkg/runtime/bindstring.go
  13. 358 0
      vendor/github.com/deepmap/oapi-codegen/pkg/runtime/deepobject.go
  14. 26 0
      vendor/github.com/deepmap/oapi-codegen/pkg/runtime/jsonmerge.go
  15. 473 0
      vendor/github.com/deepmap/oapi-codegen/pkg/runtime/styleparam.go
  16. 43 0
      vendor/github.com/deepmap/oapi-codegen/pkg/types/date.go
  17. 27 0
      vendor/github.com/deepmap/oapi-codegen/pkg/types/email.go
  18. 71 0
      vendor/github.com/deepmap/oapi-codegen/pkg/types/file.go
  19. 11 0
      vendor/github.com/deepmap/oapi-codegen/pkg/types/regexes.go
  20. 7 0
      vendor/github.com/deepmap/oapi-codegen/pkg/types/uuid.go
  21. BIN
      vendor/github.com/influxdata/.DS_Store
  22. BIN
      vendor/github.com/influxdata/influxdb-client-go/.DS_Store
  23. BIN
      vendor/github.com/influxdata/influxdb-client-go/v2/.DS_Store
  24. 20 0
      vendor/github.com/influxdata/line-protocol/LICENSE
  25. 24 0
      vendor/github.com/influxdata/line-protocol/README.md
  26. 303 0
      vendor/github.com/influxdata/line-protocol/encoder.go
  27. 264 0
      vendor/github.com/influxdata/line-protocol/escape.go
  28. 3 0
      vendor/github.com/influxdata/line-protocol/go.mod
  29. 0 0
      vendor/github.com/influxdata/line-protocol/go.sum
  30. 128 0
      vendor/github.com/influxdata/line-protocol/handler.go
  31. 3828 0
      vendor/github.com/influxdata/line-protocol/machine.go
  32. 549 0
      vendor/github.com/influxdata/line-protocol/machine.go.rl
  33. 428 0
      vendor/github.com/influxdata/line-protocol/metric.go
  34. 192 0
      vendor/github.com/influxdata/line-protocol/parser.go
  35. 130 0
      vendor/github.com/influxdata/line-protocol/writer.go
  36. 20 0
      vendor/vendor.json

BIN
.DS_Store


BIN
vendor/.DS_Store


BIN
vendor/github.com/.DS_Store


+ 201 - 0
vendor/github.com/deepmap/oapi-codegen/LICENSE

@@ -0,0 +1,201 @@
+                                 Apache License
+                           Version 2.0, January 2004
+                        http://www.apache.org/licenses/
+
+   TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION
+
+   1. Definitions.
+
+      "License" shall mean the terms and conditions for use, reproduction,
+      and distribution as defined by Sections 1 through 9 of this document.
+
+      "Licensor" shall mean the copyright owner or entity authorized by
+      the copyright owner that is granting the License.
+
+      "Legal Entity" shall mean the union of the acting entity and all
+      other entities that control, are controlled by, or are under common
+      control with that entity. For the purposes of this definition,
+      "control" means (i) the power, direct or indirect, to cause the
+      direction or management of such entity, whether by contract or
+      otherwise, or (ii) ownership of fifty percent (50%) or more of the
+      outstanding shares, or (iii) beneficial ownership of such entity.
+
+      "You" (or "Your") shall mean an individual or Legal Entity
+      exercising permissions granted by this License.
+
+      "Source" form shall mean the preferred form for making modifications,
+      including but not limited to software source code, documentation
+      source, and configuration files.
+
+      "Object" form shall mean any form resulting from mechanical
+      transformation or translation of a Source form, including but
+      not limited to compiled object code, generated documentation,
+      and conversions to other media types.
+
+      "Work" shall mean the work of authorship, whether in Source or
+      Object form, made available under the License, as indicated by a
+      copyright notice that is included in or attached to the work
+      (an example is provided in the Appendix below).
+
+      "Derivative Works" shall mean any work, whether in Source or Object
+      form, that is based on (or derived from) the Work and for which the
+      editorial revisions, annotations, elaborations, or other modifications
+      represent, as a whole, an original work of authorship. For the purposes
+      of this License, Derivative Works shall not include works that remain
+      separable from, or merely link (or bind by name) to the interfaces of,
+      the Work and Derivative Works thereof.
+
+      "Contribution" shall mean any work of authorship, including
+      the original version of the Work and any modifications or additions
+      to that Work or Derivative Works thereof, that is intentionally
+      submitted to Licensor for inclusion in the Work by the copyright owner
+      or by an individual or Legal Entity authorized to submit on behalf of
+      the copyright owner. For the purposes of this definition, "submitted"
+      means any form of electronic, verbal, or written communication sent
+      to the Licensor or its representatives, including but not limited to
+      communication on electronic mailing lists, source code control systems,
+      and issue tracking systems that are managed by, or on behalf of, the
+      Licensor for the purpose of discussing and improving the Work, but
+      excluding communication that is conspicuously marked or otherwise
+      designated in writing by the copyright owner as "Not a Contribution."
+
+      "Contributor" shall mean Licensor and any individual or Legal Entity
+      on behalf of whom a Contribution has been received by Licensor and
+      subsequently incorporated within the Work.
+
+   2. Grant of Copyright License. Subject to the terms and conditions of
+      this License, each Contributor hereby grants to You a perpetual,
+      worldwide, non-exclusive, no-charge, royalty-free, irrevocable
+      copyright license to reproduce, prepare Derivative Works of,
+      publicly display, publicly perform, sublicense, and distribute the
+      Work and such Derivative Works in Source or Object form.
+
+   3. Grant of Patent License. Subject to the terms and conditions of
+      this License, each Contributor hereby grants to You a perpetual,
+      worldwide, non-exclusive, no-charge, royalty-free, irrevocable
+      (except as stated in this section) patent license to make, have made,
+      use, offer to sell, sell, import, and otherwise transfer the Work,
+      where such license applies only to those patent claims licensable
+      by such Contributor that are necessarily infringed by their
+      Contribution(s) alone or by combination of their Contribution(s)
+      with the Work to which such Contribution(s) was submitted. If You
+      institute patent litigation against any entity (including a
+      cross-claim or counterclaim in a lawsuit) alleging that the Work
+      or a Contribution incorporated within the Work constitutes direct
+      or contributory patent infringement, then any patent licenses
+      granted to You under this License for that Work shall terminate
+      as of the date such litigation is filed.
+
+   4. Redistribution. You may reproduce and distribute copies of the
+      Work or Derivative Works thereof in any medium, with or without
+      modifications, and in Source or Object form, provided that You
+      meet the following conditions:
+
+      (a) You must give any other recipients of the Work or
+          Derivative Works a copy of this License; and
+
+      (b) You must cause any modified files to carry prominent notices
+          stating that You changed the files; and
+
+      (c) You must retain, in the Source form of any Derivative Works
+          that You distribute, all copyright, patent, trademark, and
+          attribution notices from the Source form of the Work,
+          excluding those notices that do not pertain to any part of
+          the Derivative Works; and
+
+      (d) If the Work includes a "NOTICE" text file as part of its
+          distribution, then any Derivative Works that You distribute must
+          include a readable copy of the attribution notices contained
+          within such NOTICE file, excluding those notices that do not
+          pertain to any part of the Derivative Works, in at least one
+          of the following places: within a NOTICE text file distributed
+          as part of the Derivative Works; within the Source form or
+          documentation, if provided along with the Derivative Works; or,
+          within a display generated by the Derivative Works, if and
+          wherever such third-party notices normally appear. The contents
+          of the NOTICE file are for informational purposes only and
+          do not modify the License. You may add Your own attribution
+          notices within Derivative Works that You distribute, alongside
+          or as an addendum to the NOTICE text from the Work, provided
+          that such additional attribution notices cannot be construed
+          as modifying the License.
+
+      You may add Your own copyright statement to Your modifications and
+      may provide additional or different license terms and conditions
+      for use, reproduction, or distribution of Your modifications, or
+      for any such Derivative Works as a whole, provided Your use,
+      reproduction, and distribution of the Work otherwise complies with
+      the conditions stated in this License.
+
+   5. Submission of Contributions. Unless You explicitly state otherwise,
+      any Contribution intentionally submitted for inclusion in the Work
+      by You to the Licensor shall be under the terms and conditions of
+      this License, without any additional terms or conditions.
+      Notwithstanding the above, nothing herein shall supersede or modify
+      the terms of any separate license agreement you may have executed
+      with Licensor regarding such Contributions.
+
+   6. Trademarks. This License does not grant permission to use the trade
+      names, trademarks, service marks, or product names of the Licensor,
+      except as required for reasonable and customary use in describing the
+      origin of the Work and reproducing the content of the NOTICE file.
+
+   7. Disclaimer of Warranty. Unless required by applicable law or
+      agreed to in writing, Licensor provides the Work (and each
+      Contributor provides its Contributions) on an "AS IS" BASIS,
+      WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
+      implied, including, without limitation, any warranties or conditions
+      of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A
+      PARTICULAR PURPOSE. You are solely responsible for determining the
+      appropriateness of using or redistributing the Work and assume any
+      risks associated with Your exercise of permissions under this License.
+
+   8. Limitation of Liability. In no event and under no legal theory,
+      whether in tort (including negligence), contract, or otherwise,
+      unless required by applicable law (such as deliberate and grossly
+      negligent acts) or agreed to in writing, shall any Contributor be
+      liable to You for damages, including any direct, indirect, special,
+      incidental, or consequential damages of any character arising as a
+      result of this License or out of the use or inability to use the
+      Work (including but not limited to damages for loss of goodwill,
+      work stoppage, computer failure or malfunction, or any and all
+      other commercial damages or losses), even if such Contributor
+      has been advised of the possibility of such damages.
+
+   9. Accepting Warranty or Additional Liability. While redistributing
+      the Work or Derivative Works thereof, You may choose to offer,
+      and charge a fee for, acceptance of support, warranty, indemnity,
+      or other liability obligations and/or rights consistent with this
+      License. However, in accepting such obligations, You may act only
+      on Your own behalf and on Your sole responsibility, not on behalf
+      of any other Contributor, and only if You agree to indemnify,
+      defend, and hold each Contributor harmless for any liability
+      incurred by, or claims asserted against, such Contributor by reason
+      of your accepting any such warranty or additional liability.
+
+   END OF TERMS AND CONDITIONS
+
+   APPENDIX: How to apply the Apache License to your work.
+
+      To apply the Apache License to your work, attach the following
+      boilerplate notice, with the fields enclosed by brackets "[]"
+      replaced with your own identifying information. (Don't include
+      the brackets!)  The text should be enclosed in the appropriate
+      comment syntax for the file format. We also recommend that a
+      file or class name and description of purpose be included on the
+      same "printed page" as the copyright notice for easier
+      identification within third-party archives.
+
+   Copyright [yyyy] [name of copyright owner]
+
+   Licensed under the Apache License, Version 2.0 (the "License");
+   you may not use this file except in compliance with the License.
+   You may obtain a copy of the License at
+
+       http://www.apache.org/licenses/LICENSE-2.0
+
+   Unless required by applicable law or agreed to in writing, software
+   distributed under the License is distributed on an "AS IS" BASIS,
+   WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+   See the License for the specific language governing permissions and
+   limitations under the License.

+ 29 - 0
vendor/github.com/deepmap/oapi-codegen/Makefile

@@ -0,0 +1,29 @@
+GOBASE=$(shell pwd)
+GOBIN=$(GOBASE)/bin
+
+help:
+	@echo "This is a helper makefile for oapi-codegen"
+	@echo "Targets:"
+	@echo "    generate:    regenerate all generated files"
+	@echo "    test:        run all tests"
+	@echo "    gin_example  generate gin example server code"
+	@echo "    tidy         tidy go mod"
+
+$(GOBIN)/golangci-lint:
+	curl -sSfL https://raw.githubusercontent.com/golangci/golangci-lint/master/install.sh | sh -s -- -b $(GOBIN) v1.50.1
+
+.PHONY: tools
+tools: $(GOBIN)/golangci-lint
+
+lint: tools
+	$(GOBIN)/golangci-lint run ./...
+
+generate:
+	go generate ./...
+
+test:
+	go test -cover ./...
+
+tidy:
+	@echo "tidy..."
+	go mod tidy

+ 762 - 0
vendor/github.com/deepmap/oapi-codegen/README.md

@@ -0,0 +1,762 @@
+OpenAPI Client and Server Code Generator
+----------------------------------------
+
+⚠️ This README may be for the latest development version, which may contain
+unreleased changes. Please ensure you're looking at the README for the latest
+release version.
+
+This package contains a set of utilities for generating Go boilerplate code for
+services based on
+[OpenAPI 3.0](https://github.com/OAI/OpenAPI-Specification/blob/master/versions/3.0.0.md)
+API definitions. When working with services, it's important to have an API
+contract which servers and clients both implement to minimize the chances of
+incompatibilities. It's tedious to generate Go models which precisely correspond to
+OpenAPI specifications, so let our code generator do that work for you, so that
+you can focus on implementing the business logic for your service.
+
+We have chosen to focus on [Echo](https://github.com/labstack/echo) as
+our default HTTP routing engine, due to its speed and simplicity for the generated
+stubs, and [Chi](https://github.com/go-chi/chi), and [Gin](https://github.com/gin-gonic/gin)
+have also been added by contributors as additional routers. We chose Echo because
+the `Context` object is a mockable interface, and it allows for some advanced
+testing.
+
+This package tries to be too simple rather than too generic, so we've made some
+design decisions in favor of simplicity, knowing that we can't generate strongly
+typed Go code for all possible OpenAPI Schemas. If there is a way to accomplish
+something via utility code or reflection, it's probably a better approach than
+code generation, which is fragile due to the very dynamic nature of OpenAPI and
+the very static nature of Go.
+
+## Overview
+
+We're going to use the OpenAPI example of the
+[Expanded Petstore](https://github.com/OAI/OpenAPI-Specification/blob/master/examples/v3.0/petstore-expanded.yaml)
+in the descriptions below, please have a look at it.
+
+In order to create a Go server to serve this exact schema, you would have to
+write a lot of boilerplate code to perform all the marshalling and unmarshalling
+into objects which match the OpenAPI 3.0 definition. The code generator in this
+directory does a lot of that for you. You would run it like so:
+
+    go install github.com/deepmap/oapi-codegen/cmd/oapi-codegen@latest
+    oapi-codegen petstore-expanded.yaml > petstore.gen.go
+
+Let's go through that `petstore.gen.go` file to show you everything which was
+generated.
+
+
+## Generated Server Boilerplate
+
+The `/components/schemas` section in OpenAPI defines reusable objects, so Go
+types are generated for these. The Pet Store example defines `Error`, `Pet`,
+`Pets` and `NewPet`, so we do the same in Go:
+```go
+// Error defines model for Error.
+type Error struct {
+    // Error code
+    Code int32 `json:"code"`
+
+    // Error message
+    Message string `json:"message"`
+}
+
+// NewPet defines model for NewPet.
+type NewPet struct {
+    // Name of the pet
+    Name string `json:"name"`
+
+    // Type of the pet
+    Tag *string `json:"tag,omitempty"`
+}
+
+// Pet defines model for Pet.
+type Pet struct {
+    // Unique id of the pet
+    Id int64 `json:"id"`
+
+    // Name of the pet
+    Name string `json:"name"`
+
+    // Type of the pet
+    Tag *string `json:"tag,omitempty"`
+}
+
+// Type definition for component schema "Pets"
+type Pets []Pet
+```
+
+It's best to define objects under `/components` field in the schema, since
+those will be turned into named Go types. If you use inline types in your
+handler definitions, we will generate inline, anonymous Go types, but those
+are more tedious to deal with since you will have to redeclare them at every
+point of use.
+
+For each element in the `paths` map in OpenAPI, we will generate a Go handler
+function in an interface object. Here is the generated Go interface for our
+Echo server.
+
+```go
+type ServerInterface interface {
+    //  (GET /pets)
+    FindPets(ctx echo.Context, params FindPetsParams) error
+    //  (POST /pets)
+    AddPet(ctx echo.Context) error
+    //  (DELETE /pets/{id})
+    DeletePet(ctx echo.Context, id int64) error
+    //  (GET /pets/{id})
+    FindPetById(ctx echo.Context, id int64) error
+}
+```
+
+These are the functions which you will implement yourself in order to create
+a server conforming to the API specification. Normally, all the arguments and
+parameters are stored on the `echo.Context` in handlers, so we do the tedious
+work of unmarshalling the JSON automatically, simply passing values into
+your handlers.
+
+Notice that `FindPetById` takes a parameter `id int64`. All path arguments
+will be passed as arguments to your function, since they are mandatory.
+
+Remaining arguments can be passed in headers, query arguments or cookies. Those
+will be written to a `params` object. Look at the `FindPets` function above, it
+takes as input `FindPetsParams`, which is defined as follows:
+ ```go
+// Parameters object for FindPets
+type FindPetsParams struct {
+    Tags  *[]string `json:"tags,omitempty"`
+    Limit *int32   `json:"limit,omitempty"`
+}
+```
+
+The HTTP query parameter `limit` turns into a Go field named `Limit`. It is
+passed by pointer, since it is an optional parameter. If the parameter is
+specified, the pointer will be non-`nil`, and you can read its value.
+
+If you changed the OpenAPI specification to make the parameter required, the
+`FindPetsParams` structure will contain the type by value:
+```go
+type FindPetsParams struct {
+    Tags  *[]string `json:"tags,omitempty"`
+    Limit int32     `json:"limit"`
+}
+```
+
+### Registering handlers
+There are a few ways of registering your http handler based on the type of server generated i.e. `-generate server` or `-generate chi-server`
+
+<details><summary><code>Echo</code></summary>
+
+Code generated using `-generate server`.
+
+The usage of `Echo` is out of scope of this doc, but once you have an
+echo instance, we generate a utility function to help you associate your handlers
+with this autogenerated code. For the pet store, it looks like this:
+```go
+func RegisterHandlers(router codegen.EchoRouter, si ServerInterface) {
+    wrapper := ServerInterfaceWrapper{
+        Handler: si,
+    }
+    router.GET("/pets", wrapper.FindPets)
+    router.POST("/pets", wrapper.AddPet)
+    router.DELETE("/pets/:id", wrapper.DeletePet)
+    router.GET("/pets/:id", wrapper.FindPetById)
+}
+```
+
+The wrapper functions referenced above contain generated code which pulls
+parameters off the `Echo` request context, and unmarshals them into Go objects.
+
+You would register the generated handlers as follows:
+```go
+func SetupHandler() {
+    var myApi PetStoreImpl  // This implements the pet store interface
+    e := echo.New()
+    petstore.RegisterHandlers(e, &myApi)
+    ...
+}
+```
+
+</summary></details>
+
+<details><summary><code>Chi</code></summary>
+
+Code generated using `-generate chi-server`.
+
+```go
+type PetStoreImpl struct {}
+func (*PetStoreImpl) GetPets(w http.ResponseWriter, r *http.Request) {
+    // Implement me
+}
+
+func SetupHandler() {
+    var myApi PetStoreImpl
+
+    r := chi.NewRouter()
+    r.Mount("/", Handler(&myApi))
+}
+```
+</summary></details>
+
+<details><summary><code>Gin</code></summary>
+
+Code generated using `-generate gin`.
+
+The usage of `gin` is out of scope of this doc, but once you have an
+gin instance, we generate a utility function to help you associate your handlers
+with this autogenerated code. For the pet store, it looks like this:
+```go
+// RegisterHandlersWithOptions creates http.Handler with additional options
+func RegisterHandlersWithOptions(router *gin.Engine, si ServerInterface, options GinServerOptions) *gin.Engine {
+	wrapper := ServerInterfaceWrapper{
+		Handler:            si,
+		HandlerMiddlewares: options.Middlewares,
+	}
+
+	router.GET(options.BaseURL+"/pets", wrapper.FindPets)
+	router.POST(options.BaseURL+"/pets", wrapper.AddPet)
+	router.DELETE(options.BaseURL+"/pets/:id", wrapper.DeletePet)
+	router.GET(options.BaseURL+"/pets/:id", wrapper.FindPetByID)
+	return router
+}
+```
+
+```go
+import (
+	"github.com/gin-gonic/gin"
+	"github.com/deepmap/oapi-codegen/examples/petstore-expanded/gin/api"
+	middleware "github.com/deepmap/oapi-codegen/pkg/gin-middleware"
+)
+
+type PetStoreImpl struct {}
+func (*PetStoreImpl) GetPets(w http.ResponseWriter, r *http.Request) {
+    // Implement me
+}
+
+func SetupHandler() {
+    var myApi PetStoreImpl
+
+    r := gin.Default()
+	  r.Use(middleware.OapiRequestValidator(swagger))
+    r = api.RegisterHandlers(r, petStore)
+}
+```
+</summary></details>
+
+<details><summary><code>net/http</code></summary>
+
+[Chi](https://github.com/go-chi/chi) is 100% compatible with `net/http` allowing the following with code generated using `-generate chi-server`.
+
+```go
+type PetStoreImpl struct {}
+func (*PetStoreImpl) GetPets(w http.ResponseWriter, r *http.Request) {
+    // Implement me
+}
+
+func SetupHandler() {
+    var myApi PetStoreImpl
+
+    http.Handle("/", Handler(&myApi))
+}
+```
+
+Alternatively, [Gorilla](https://github.com/gorilla/mux) is also 100% compatible with `net/http` and can be generated with `-generate gorilla`.
+
+</summary></details>
+
+#### Strict server generation
+
+oapi-codegen also supports generating RPC inspired strict server, that will parse request bodies and encode responses. 
+The main points of this code is to automate some parsing, abstract user code from server specific code, 
+and also to force user code to comply with the schema.
+It supports binding of `application/json` and `application/x-www-form-urlencoded` to a struct, for `multipart` requests
+it generates a `multipart.Reader`, which can be used to either manually iterating over parts or using `runtime.BindMultipart`
+function to bind the form to a struct. All other content types are represented by a `io.Reader` interface.
+
+To form a response simply return one of the generated structs with corresponding status code and content type. For example,
+to return a status code 200 JSON response for a AddPet use the `AddPet200JSONResponse` struct which will set the correct
+Content-Type header, status code and will marshal the response data. You can also return an error, that will
+cause an `Internal Server Error` response.
+
+Short example:
+```go
+type PetStoreImpl struct {}
+func (*PetStoreImpl) GetPets(ctx context.Context, request GetPetsRequestObject) (GetPetsResponseObject, error) {
+    var result []Pet
+	// Implement me
+    return GetPets200JSONResponse(result), nil
+}
+```
+For a complete example see `/examples/petstore-expanded/strict`.
+
+Code is generated with a configuration flag `generate: strict-server: true` along with any other server (echo, chi, gin and gorilla are supported).
+The generated strict wrapper can then be used as an implementation for `ServerInterface`. Setup example:
+```go
+func SetupHandler() {
+    var myApi PetStoreImpl
+	myStrictApiHandler := api.NewStrictHandler(myApi, nil)
+    e := echo.New()
+    petstore.RegisterHandlers(e, &myStrictApiHandler)
+}
+```
+
+Strict server also has its own middlewares. It can access to both request and response structs,
+as well as raw request\response data. It can be used for logging the parsed request\response objects, transforming go errors into response structs,
+authorization, etc. Note that middlewares are server-specific.
+
+#### Additional Properties in type definitions
+
+[OpenAPI Schemas](https://swagger.io/specification/#schemaObject) implicitly
+accept `additionalProperties`, meaning that any fields provided, but not explicitly
+defined via properties on the schema are accepted as input, and propagated. When
+unspecified, the `additionalProperties` field is assumed to be `true`.
+
+Additional properties are tricky to support in Go with typing, and require
+lots of boilerplate code, so in this library, we assume that `additionalProperties`
+defaults to `false` and we don't generate this boilerplate. If you would like
+an object to accept `additionalProperties`, specify a schema for `additionalProperties`.
+
+Say we declared `NewPet` above like so:
+```yaml
+    NewPet:
+      required:
+        - name
+      properties:
+        name:
+          type: string
+        tag:
+          type: string
+      additionalProperties:
+        type: string
+```
+
+The Go code for `NewPet` would now look like this:
+```go
+// NewPet defines model for NewPet.
+type NewPet struct {
+	Name                 string            `json:"name"`
+	Tag                  *string           `json:"tag,omitempty"`
+	AdditionalProperties map[string]string `json:"-"`
+}
+```
+
+The additionalProperties, of type `string` become `map[string]string`, which maps
+field names to instances of the `additionalProperties` schema.
+```go
+// Getter for additional properties for NewPet. Returns the specified
+// element and whether it was found
+func (a NewPet) Get(fieldName string) (value string, found bool) {...}
+
+// Setter for additional properties for NewPet
+func (a *NewPet) Set(fieldName string, value string) {...}
+
+// Override default JSON handling for NewPet to handle additionalProperties
+func (a *NewPet) UnmarshalJSON(b []byte) error {...}
+
+// Override default JSON handling for NewPet to handle additionalProperties
+func (a NewPet) MarshalJSON() ([]byte, error) {...}w
+```
+
+There are many special cases for `additionalProperties`, such as having to
+define types for inner fields which themselves support additionalProperties, and
+all of them are tested via the `internal/test/components` schemas and tests. Please
+look through those tests for more usage examples.
+
+#### oneOf/anyOf/allOf support
+
+- `oneOf` and `anyOf` are implemented using delayed parsing with the help of `json.RawMessage`.
+The following schema will result in a type that has methods such as `AsCat`, `AsDog`, `FromCat`, `FromDog`, `MergeCat`, `MergeDog`. If the schema also includes a discriminator the generated code will also have methods such as `Discriminator`, `ValueByDiscriminator` and will force discriminator value in `From` methods.
+```yaml
+schema:
+  oneOf:
+    - $ref: '#/components/schemas/Cat'
+    - $ref: '#/components/schemas/Dog'
+```
+- `allOf` is supported, by taking the union of all the fields in all the
+    component schemas. This is the most useful of these operations, and is
+    commonly used to merge objects with an identifier, as in the
+    `petstore-expanded` example.
+
+## Generated Client Boilerplate
+
+Once your server is up and running, you probably want to make requests to it. If
+you're going to do those requests from your Go code, we also generate a client
+which is conformant with your schema to help in marshaling objects to JSON. It
+uses the same types and similar function signatures to your request handlers.
+
+The interface for the pet store looks like this:
+
+```go
+// The interface specification for the client above.
+type ClientInterface interface {
+
+	// FindPets request
+	FindPets(ctx context.Context, params *FindPetsParams, reqEditors ...RequestEditorFn) (*http.Response, error)
+
+	// AddPet request with JSON body
+	AddPet(ctx context.Context, body NewPet, reqEditors ...RequestEditorFn) (*http.Response, error)
+
+	// DeletePet request
+	DeletePet(ctx context.Context, id int64, reqEditors ...RequestEditorFn) (*http.Response, error)
+
+	// FindPetById request
+	FindPetById(ctx context.Context, id int64, reqEditors ...RequestEditorFn) (*http.Response, error)
+}
+```
+
+A Client object which implements the above interface is also generated:
+
+```go
+// Client which conforms to the OpenAPI3 specification for this service.
+type Client struct {
+    // The endpoint of the server conforming to this interface, with scheme,
+    // https://api.deepmap.com for example.
+    Server string
+
+    // HTTP client with any customized settings, such as certificate chains.
+    Client http.Client
+
+    // A callback for modifying requests which are generated before sending over
+    // the network.
+    RequestEditors []func(ctx context.Context, req *http.Request) error
+}
+```
+
+Each operation in your OpenAPI spec will result in a client function which
+takes the same arguments. It's difficult to handle any arbitrary body that
+Swagger supports, so we've done some special casing for bodies, and you may get
+more than one function for an operation with a request body.
+
+1) If you have more than one request body type, meaning more than one media
+ type, you will have a generic handler of this form:
+
+        AddPet(ctx context.Context, contentType string, body io.Reader)
+
+2) If you have only a JSON request body, you will get:
+
+        AddPet(ctx context.Context, body NewPet)
+
+3) If you have multiple request body types, which include a JSON type you will
+ get two functions. We've chosen to give the JSON version a shorter name, as
+ we work with JSON and don't want to wear out our keyboards.
+
+        AddPet(ctx context.Context, body NewPet)
+        AddPetWithBody(ctx context.Context, contentType string, body io.Reader)
+
+The Client object above is fairly flexible, since you can pass in your own
+`http.Client` and a request editing callback. You can use that callback to add
+headers. In our middleware stack, we annotate the context with additional
+information such as the request ID and function tracing information, and we
+use the callback to propagate that information into the request headers. Still, we
+can't foresee all possible usages, so those functions call through to helper
+functions which create requests. In the case of the pet store, we have:
+
+```go
+// Request generator for FindPets
+func NewFindPetsRequest(server string, params *FindPetsParams) (*http.Request, error) {...}
+
+// Request generator for AddPet with JSON body
+func NewAddPetRequest(server string, body NewPet) (*http.Request, error) {...}
+
+// Request generator for AddPet with non-JSON body
+func NewAddPetRequestWithBody(server string, contentType string, body io.Reader) (*http.Request, error) {...}
+
+// Request generator for DeletePet
+func NewDeletePetRequest(server string, id int64) (*http.Request, error) {...}
+
+// Request generator for FindPetById
+func NewFindPetByIdRequest(server string, id int64) (*http.Request, error) {...}
+```
+
+You can call these functions to build an `http.Request` from Go objects, which
+will correspond to your request schema. They map one-to-one to the functions on
+the client, except that we always generate the generic non-JSON body handler.
+
+There are some caveats to using this code.
+- exploded, form style query arguments, which are the default argument format
+ in OpenAPI 3.0 are undecidable. Say that I have two objects, one composed of
+ the fields `(name=bob, id=5)` and another which has `(name=shoe, color=brown)`.
+ The first parameter is named `person` and the second is named `item`. The
+ default marshaling style for query args would result in
+ `/path/?name=bob,id=5&name=shoe,color=brown`. In order to tell what belongs
+ to which object, we'd have to look at all the parameters and try to deduce it,
+ but we're lazy, so we didn't. Don't use exploded form style arguments if
+ you're passing around objects which have similar field names. If you
+ used unexploded form parameters, you'd have
+ `/path/?person=name,bob,id,5&item=name,shoe,color,brown`, which an be
+ parsed unambiguously.
+
+- Parameters can be defined via `schema` or via `content`. Use the `content` form
+ for anything other than trivial objects, they can marshal to arbitrary JSON
+ structures. When you send them as cookie (`in: cookie`) arguments, we will
+ URL encode them, since JSON delimiters aren't allowed in cookies.
+
+## Using SecurityProviders
+
+If you generate client-code, you can use some default-provided security providers
+which help you to use the various OpenAPI 3 Authentication mechanism.
+
+
+```go
+    import (
+        "github.com/deepmap/oapi-codegen/pkg/securityprovider"
+    )
+
+    func CreateSampleProviders() error {
+        // Example BasicAuth
+        // See: https://swagger.io/docs/specification/authentication/basic-authentication/
+        basicAuthProvider, basicAuthProviderErr := securityprovider.NewSecurityProviderBasicAuth("MY_USER", "MY_PASS")
+        if basicAuthProviderErr != nil {
+            panic(basicAuthProviderErr)
+        }
+
+        // Example BearerToken
+        // See: https://swagger.io/docs/specification/authentication/bearer-authentication/
+        bearerTokenProvider, bearerTokenProviderErr := securityprovider.NewSecurityProviderBearerToken("MY_TOKEN")
+        if bearerTokenProviderErr != nil {
+            panic(bearerTokenProviderErr)
+        }
+
+        // Example ApiKey provider
+        // See: https://swagger.io/docs/specification/authentication/api-keys/
+        apiKeyProvider, apiKeyProviderErr := securityprovider.NewSecurityProviderApiKey("query", "myApiKeyParam", "MY_API_KEY")
+        if apiKeyProviderErr != nil {
+            panic(apiKeyProviderErr)
+        }
+
+        // Example providing your own provider using an anonymous function wrapping in the
+        // InterceptoFn adapter. The behaviour between the InterceptorFn and the Interceptor interface
+        // are the same as http.HandlerFunc and http.Handler.
+        customProvider := func(req *http.Request, ctx context.Context) error {
+            // Just log the request header, nothing else.
+            log.Println(req.Header)
+            return nil
+        }
+
+        // Exhaustive list of some defaults you can use to initialize a Client.
+        // If you need to override the underlying httpClient, you can use the option
+        //
+        // WithHTTPClient(httpClient *http.Client)
+        //
+        client, clientErr := NewClient("https://api.deepmap.com", WithRequestEditorFn(apiKeyProvider.Intercept))
+
+        return nil
+    }
+```
+
+## Extensions
+
+`oapi-codegen` supports the following extended properties:
+
+- `x-go-type`: specifies Go type name. It allows you to specify the type name for a schema, and
+  will override any default value. This extended property isn't supported in all parts of
+  OpenAPI, so please refer to the spec as to where it's allowed. Swagger validation tools will
+  flag incorrect usage of this property.
+- `x-go-name`: specifies Go field name. It allows you to specify the field name for a schema, and
+  will override any default value. This extended property isn't supported in all parts of
+  OpenAPI, so please refer to the spec as to where it's allowed. Swagger validation tools will
+  flag incorrect usage of this property.
+- `x-go-json-ignore`: sets tag to `-` to ignore the field in json completely.
+- `x-oapi-codegen-extra-tags`: adds extra Go field tags to the generated struct field. This is
+  useful for interfacing with tag based ORM or validation libraries. The extra tags that
+  are added are in addition to the regular json tags that are generated. If you specify your 
+  own `json` tag, you will override the default one. 
+
+    ```yaml
+    components:
+      schemas:
+        Object:
+          properties:
+            name:
+              type: string
+              x-oapi-codegen-extra-tags:
+                tag1: value1
+                tag2: value2
+    ```
+  In the example above, field `name` will be declared as: 
+  
+  ```
+  Name string `json:"name" tag1:"value1" tag2:"value2"`
+  ```
+- `x-go-type-import`: adds extra Go imports to your generated code. It can help you, when you want to
+   choose your own import package for `x-go-type`.
+
+  ```yaml
+    schemas:
+      Pet:
+        properties:
+          age:
+            x-go-type: myuuid.UUID
+            x-go-type-import:
+              name: myuuid
+              path: github.com/google/uuid
+  ```
+  After code generation you will get this:
+  ```go
+    import (
+        ...
+        myuuid "github.com/google/uuid"
+    )
+    
+  //Pet defines model for Pet.
+    type Pet struct {
+        Age *myuuid.UUID `json:"age,omitempty"`
+    }
+
+  ```
+  `name` is an optional parameter. Example:
+
+  ```yaml
+  components:
+  schemas:
+    Pet:
+      properties:
+        age:
+          x-go-type: uuid.UUID
+          x-go-type-import:
+            path: github.com/google/uuid
+      required:
+        - age
+  ```
+
+  After code generation you will get this result:
+
+  ```go
+  import (
+	  "github.com/google/uuid"
+  )
+
+  // Pet defines model for Pet.
+  type Pet struct {
+	  Age uuid.UUID `json:"age"`
+  }
+  ```
+
+
+## Using `oapi-codegen`
+
+The default options for `oapi-codegen` will generate everything; client, server,
+type definitions and embedded swagger spec, but you can generate subsets of
+those via the `-generate` flag. It defaults to `types,client,server,spec`, but
+you can specify any combination of those.
+
+- `types`: generate all type definitions for all types in the OpenAPI spec. This
+ will be everything under `#components`, as well as request parameter, request
+ body, and response type objects.
+- `server`: generate the Echo server boilerplate. `server` requires the types in the
+ same package to compile.
+- `chi-server`: generate the Chi server boilerplate. This code is dependent on
+ that produced by the `types` target.
+- `client`: generate the client boilerplate. It, too, requires the types to be
+ present in its package.
+- `spec`: embed the OpenAPI spec into the generated code as a gzipped blob.
+  This is then usable with the `OapiRequestValidator`, or to be used by other
+  methods that need access to the parsed OpenAPI specification
+- `skip-fmt`: skip running `goimports` on the generated code. This is useful for debugging
+ the generated file in case the spec contains weird strings.
+- `skip-prune`: skip pruning unused components from the spec prior to generating
+ the code.
+- `import-mapping`: specifies a map of references external OpenAPI specs to go
+ Go include paths. Please see below.
+
+So, for example, if you would like to produce only the server code, you could
+run `oapi-codegen -generate types,server`. You could generate `types` and
+`server` into separate files, but both are required for the server code.
+
+`oapi-codegen` can filter paths base on their tags in the openapi definition.
+Use either `-include-tags` or `-exclude-tags` followed by a comma-separated list
+of tags. For instance, to generate a server that serves all paths except those
+tagged with `auth` or `admin`, use the argument, `-exclude-tags="auth,admin"`.
+To generate a server that only handles `admin` paths, use the argument
+`-include-tags="admin"`. When neither of these arguments is present, all paths
+are generated.
+
+`oapi-codegen` can filter schemas based on the option `--exclude-schemas`, which is
+a comma separated list of schema names. For instance, `--exclude-schemas=Pet,NewPet`
+will exclude from generation schemas `Pet` and `NewPet`. This allow to have a
+in the same package a manually defined structure or interface and refer to it
+in the openapi spec.
+
+Since `go generate` commands must be a single line, all the options above can make
+them pretty unwieldy, so you can specify all of the options in a configuration
+file via the `--config` option. Please see the test under
+[`/internal/test/externalref/`](https://github.com/deepmap/oapi-codegen/blob/master/internal/test/externalref/externalref.cfg.yaml)
+for an example. The structure of the file is as follows:
+    
+```yaml
+package: externalref
+generate:
+  models: true
+  embedded-spec: true
+import-mapping:
+  ./packageA/spec.yaml: github.com/deepmap/oapi-codegen/internal/test/externalref/packageA
+  ./packageB/spec.yaml: github.com/deepmap/oapi-codegen/internal/test/externalref/packageB
+output: externalref.gen.go
+output-options:
+  skip-prune: true
+```
+
+Have a look at [`cmd/oapi-codegen/oapi-codegen.go`](https://github.com/deepmap/oapi-codegen/blob/master/cmd/oapi-codegen/oapi-codegen.go#L48) 
+to see all the fields on the configuration structure.
+
+### Import Mappings
+
+OpenAPI specifications may contain references to other OpenAPI specifications,
+and we need some additional information in order to be able to generate correct
+Go code.
+
+An external reference looks like this:
+
+    $ref: ./some_spec.yaml#/components/schemas/Type
+
+We assume that you have already generated the boilerplate code for `./some_spec.yaml`
+using `oapi-codegen`, and you have a package which contains the generated code,
+let's call it `github.com/deepmap/some-package`. You need to tell `oapi-codegen` that
+`some_spec.yaml` corresponds to this package, and you would do it by specifying
+this command line argument:
+
+    -import-mapping=./some_spec.yaml:github.com/deepmap/some-package
+
+This tells us that in order to resolve references generated from `some_spec.yaml` we
+need to import `github.com/deepmap/some-package`. You may specify multiple mappings
+by comma separating them in the form `key1:value1,key2:value2`.
+
+## What's missing or incomplete
+
+This code is still young, and not complete, since we're filling it in as we
+need it. We've not yet implemented several things:
+
+- `patternProperties` isn't yet supported and will exit with an error. Pattern
+ properties were defined in JSONSchema, and the `kin-openapi` Swagger object
+ knows how to parse them, but they're not part of OpenAPI 3.0, so we've left
+ them out, as support is very complicated.
+
+
+## Making changes to code generation
+
+The code generator uses a tool to inline all the template definitions into
+code, so that we don't have to deal with the location of the template files.
+When you update any of the files under the `templates/` directory, you will
+need to regenerate the template inlines:
+
+    go generate ./pkg/codegen/templates
+
+All this command does is inline the files ending in `.tmpl` into the specified
+Go file.
+
+Afterwards you should run `go generate ./...`, and the templates will be updated
+ accordingly.
+
+Alternatively, you can provide custom templates to override built-in ones using
+the `-templates` flag specifying a path to a directory containing templates
+files. These files **must** be named identically to built-in template files
+(see `pkg/codegen/templates/*.tmpl` in the source code), and will be interpreted
+on-the-fly at run time. Example:
+
+    $ ls -1 my-templates/
+    client.tmpl
+    typedef.tmpl
+    $ oapi-codegen \
+        -templates my-templates/ \
+        -generate types,client \
+        petstore-expanded.yaml

+ 64 - 0
vendor/github.com/deepmap/oapi-codegen/go.mod

@@ -0,0 +1,64 @@
+module github.com/deepmap/oapi-codegen
+
+require (
+	github.com/apapsch/go-jsonmerge/v2 v2.0.0
+	github.com/getkin/kin-openapi v0.107.0
+	github.com/gin-gonic/gin v1.8.1
+	github.com/go-chi/chi/v5 v5.0.7
+	github.com/golangci/lint-1 v0.0.0-20181222135242-d2cdd8c08219
+	github.com/google/uuid v1.3.0
+	github.com/gorilla/mux v1.8.0
+	github.com/labstack/echo/v4 v4.9.1
+	github.com/lestrrat-go/jwx v1.2.25
+	github.com/matryer/moq v0.2.7
+	github.com/stretchr/testify v1.8.1
+	golang.org/x/text v0.4.0
+	golang.org/x/tools v0.3.0
+	gopkg.in/yaml.v2 v2.4.0
+)
+
+require (
+	github.com/davecgh/go-spew v1.1.1 // indirect
+	github.com/decred/dcrd/dcrec/secp256k1/v4 v4.0.1 // indirect
+	github.com/gin-contrib/sse v0.1.0 // indirect
+	github.com/go-openapi/jsonpointer v0.19.5 // indirect
+	github.com/go-openapi/swag v0.21.1 // indirect
+	github.com/go-playground/locales v0.14.0 // indirect
+	github.com/go-playground/universal-translator v0.18.0 // indirect
+	github.com/go-playground/validator/v10 v10.11.1 // indirect
+	github.com/goccy/go-json v0.9.11 // indirect
+	github.com/golang-jwt/jwt v3.2.2+incompatible // indirect
+	github.com/invopop/yaml v0.1.0 // indirect
+	github.com/josharian/intern v1.0.0 // indirect
+	github.com/json-iterator/go v1.1.12 // indirect
+	github.com/labstack/gommon v0.4.0 // indirect
+	github.com/leodido/go-urn v1.2.1 // indirect
+	github.com/lestrrat-go/backoff/v2 v2.0.8 // indirect
+	github.com/lestrrat-go/blackmagic v1.0.0 // indirect
+	github.com/lestrrat-go/httpcc v1.0.1 // indirect
+	github.com/lestrrat-go/iter v1.0.1 // indirect
+	github.com/lestrrat-go/option v1.0.0 // indirect
+	github.com/mailru/easyjson v0.7.7 // indirect
+	github.com/mattn/go-colorable v0.1.13 // indirect
+	github.com/mattn/go-isatty v0.0.16 // indirect
+	github.com/modern-go/concurrent v0.0.0-20180306012644-bacd9c7ef1dd // indirect
+	github.com/modern-go/reflect2 v1.0.2 // indirect
+	github.com/mohae/deepcopy v0.0.0-20170929034955-c48cc78d4826 // indirect
+	github.com/pelletier/go-toml/v2 v2.0.5 // indirect
+	github.com/pkg/errors v0.9.1 // indirect
+	github.com/pmezard/go-difflib v1.0.0 // indirect
+	github.com/stretchr/objx v0.5.0 // indirect
+	github.com/ugorji/go/codec v1.2.7 // indirect
+	github.com/valyala/bytebufferpool v1.0.0 // indirect
+	github.com/valyala/fasttemplate v1.2.2 // indirect
+	golang.org/x/crypto v0.1.0 // indirect
+	golang.org/x/mod v0.7.0 // indirect
+	golang.org/x/net v0.2.0 // indirect
+	golang.org/x/sys v0.2.0 // indirect
+	golang.org/x/time v0.0.0-20220411224347-583f2d630306 // indirect
+	golang.org/x/xerrors v0.0.0-20220411194840-2f41105eb62f // indirect
+	google.golang.org/protobuf v1.28.1 // indirect
+	gopkg.in/yaml.v3 v3.0.1 // indirect
+)
+
+go 1.18

+ 202 - 0
vendor/github.com/deepmap/oapi-codegen/go.sum

@@ -0,0 +1,202 @@
+github.com/RaveNoX/go-jsoncommentstrip v1.0.0/go.mod h1:78ihd09MekBnJnxpICcwzCMzGrKSKYe4AqU6PDYYpjk=
+github.com/apapsch/go-jsonmerge/v2 v2.0.0 h1:axGnT1gRIfimI7gJifB699GoE/oq+F2MU7Dml6nw9rQ=
+github.com/apapsch/go-jsonmerge/v2 v2.0.0/go.mod h1:lvDnEdqiQrp0O42VQGgmlKpxL1AP2+08jFMw88y4klk=
+github.com/bmatcuk/doublestar v1.1.1/go.mod h1:UD6OnuiIn0yFxxA2le/rnRU1G4RaI4UvFv1sNto9p6w=
+github.com/creack/pty v1.1.9/go.mod h1:oKZEueFk5CKHvIhNR5MUki03XCEU+Q6VDXinZuGJ33E=
+github.com/davecgh/go-spew v1.1.0/go.mod h1:J7Y8YcW2NihsgmVo/mv3lAwl/skON4iLHjSsI+c5H38=
+github.com/davecgh/go-spew v1.1.1 h1:vj9j/u1bqnvCEfJOwUhtlOARqs3+rkHYY13jYWTU97c=
+github.com/davecgh/go-spew v1.1.1/go.mod h1:J7Y8YcW2NihsgmVo/mv3lAwl/skON4iLHjSsI+c5H38=
+github.com/decred/dcrd/crypto/blake256 v1.0.0/go.mod h1:sQl2p6Y26YV+ZOcSTP6thNdn47hh8kt6rqSlvmrXFAc=
+github.com/decred/dcrd/dcrec/secp256k1/v4 v4.0.0-20210816181553-5444fa50b93d/go.mod h1:tmAIfUFEirG/Y8jhZ9M+h36obRZAk/1fcSpXwAVlfqE=
+github.com/decred/dcrd/dcrec/secp256k1/v4 v4.0.1 h1:YLtO71vCjJRCBcrPMtQ9nqBsqpA1m5sE92cU+pd5Mcc=
+github.com/decred/dcrd/dcrec/secp256k1/v4 v4.0.1/go.mod h1:hyedUtir6IdtD/7lIxGeCxkaw7y45JueMRL4DIyJDKs=
+github.com/getkin/kin-openapi v0.107.0 h1:bxhL6QArW7BXQj8NjXfIJQy680NsMKd25nwhvpCXchg=
+github.com/getkin/kin-openapi v0.107.0/go.mod h1:9Dhr+FasATJZjS4iOLvB0hkaxgYdulrNYm2e9epLWOo=
+github.com/gin-contrib/sse v0.1.0 h1:Y/yl/+YNO8GZSjAhjMsSuLt29uWRFHdHYUb5lYOV9qE=
+github.com/gin-contrib/sse v0.1.0/go.mod h1:RHrZQHXnP2xjPF+u1gW/2HnVO7nvIa9PG3Gm+fLHvGI=
+github.com/gin-gonic/gin v1.8.1 h1:4+fr/el88TOO3ewCmQr8cx/CtZ/umlIRIs5M4NTNjf8=
+github.com/gin-gonic/gin v1.8.1/go.mod h1:ji8BvRH1azfM+SYow9zQ6SZMvR8qOMZHmsCuWR9tTTk=
+github.com/go-chi/chi/v5 v5.0.7 h1:rDTPXLDHGATaeHvVlLcR4Qe0zftYethFucbjVQ1PxU8=
+github.com/go-chi/chi/v5 v5.0.7/go.mod h1:DslCQbL2OYiznFReuXYUmQ2hGd1aDpCnlMNITLSKoi8=
+github.com/go-openapi/jsonpointer v0.19.5 h1:gZr+CIYByUqjcgeLXnQu2gHYQC9o73G2XUeOFYEICuY=
+github.com/go-openapi/jsonpointer v0.19.5/go.mod h1:Pl9vOtqEWErmShwVjC8pYs9cog34VGT37dQOVbmoatg=
+github.com/go-openapi/swag v0.19.5/go.mod h1:POnQmlKehdgb5mhVOsnJFsivZCEZ/vjK9gh66Z9tfKk=
+github.com/go-openapi/swag v0.21.1 h1:wm0rhTb5z7qpJRHBdPOMuY4QjVUMbF6/kwoYeRAOrKU=
+github.com/go-openapi/swag v0.21.1/go.mod h1:QYRuS/SOXUCsnplDa677K7+DxSOj6IPNl/eQntq43wQ=
+github.com/go-playground/assert/v2 v2.0.1 h1:MsBgLAaY856+nPRTKrp3/OZK38U/wa0CcBYNjji3q3A=
+github.com/go-playground/assert/v2 v2.0.1/go.mod h1:VDjEfimB/XKnb+ZQfWdccd7VUvScMdVu0Titje2rxJ4=
+github.com/go-playground/locales v0.14.0 h1:u50s323jtVGugKlcYeyzC0etD1HifMjqmJqb8WugfUU=
+github.com/go-playground/locales v0.14.0/go.mod h1:sawfccIbzZTqEDETgFXqTho0QybSa7l++s0DH+LDiLs=
+github.com/go-playground/universal-translator v0.18.0 h1:82dyy6p4OuJq4/CByFNOn/jYrnRPArHwAcmLoJZxyho=
+github.com/go-playground/universal-translator v0.18.0/go.mod h1:UvRDBj+xPUEGrFYl+lu/H90nyDXpg0fqeB/AQUGNTVA=
+github.com/go-playground/validator/v10 v10.11.1 h1:prmOlTVv+YjZjmRmNSF3VmspqJIxJWXmqUsHwfTRRkQ=
+github.com/go-playground/validator/v10 v10.11.1/go.mod h1:i+3WkQ1FvaUjjxh1kSvIA4dMGDBiPU55YFDl0WbKdWU=
+github.com/goccy/go-json v0.9.7/go.mod h1:6MelG93GURQebXPDq3khkgXZkazVtN9CRI+MGFi0w8I=
+github.com/goccy/go-json v0.9.11 h1:/pAaQDLHEoCq/5FFmSKBswWmK6H0e8g4159Kc/X/nqk=
+github.com/goccy/go-json v0.9.11/go.mod h1:6MelG93GURQebXPDq3khkgXZkazVtN9CRI+MGFi0w8I=
+github.com/golang-jwt/jwt v3.2.2+incompatible h1:IfV12K8xAKAnZqdXVzCZ+TOjboZ2keLg81eXfW3O+oY=
+github.com/golang-jwt/jwt v3.2.2+incompatible/go.mod h1:8pz2t5EyA70fFQQSrl6XZXzqecmYZeUEB8OUGHkxJ+I=
+github.com/golang/protobuf v1.5.0/go.mod h1:FsONVRAS9T7sI+LIUmWTfcYkHO4aIWwzhcaSAoJOfIk=
+github.com/golangci/lint-1 v0.0.0-20181222135242-d2cdd8c08219 h1:utua3L2IbQJmauC5IXdEA547bcoU5dozgQAfc8Onsg4=
+github.com/golangci/lint-1 v0.0.0-20181222135242-d2cdd8c08219/go.mod h1:/X8TswGSh1pIozq4ZwCfxS0WA5JGXguxk94ar/4c87Y=
+github.com/google/go-cmp v0.5.5 h1:Khx7svrCpmxxtHBq5j2mp/xVjsi8hQMfNLvJFAlrGgU=
+github.com/google/go-cmp v0.5.5/go.mod h1:v8dTdLbMG2kIc/vJvl+f65V22dbkXbowE6jgT/gNBxE=
+github.com/google/gofuzz v1.0.0/go.mod h1:dBl0BpW6vV/+mYPU4Po3pmUjxk6FQPldtuIdl/M65Eg=
+github.com/google/uuid v1.3.0 h1:t6JiXgmwXMjEs8VusXIJk2BXHsn+wx8BZdTaoZ5fu7I=
+github.com/google/uuid v1.3.0/go.mod h1:TIyPZe4MgqvfeYDBFedMoGGpEw/LqOeaOT+nhxU+yHo=
+github.com/gorilla/mux v1.8.0 h1:i40aqfkR1h2SlN9hojwV5ZA91wcXFOvkdNIeFDP5koI=
+github.com/gorilla/mux v1.8.0/go.mod h1:DVbg23sWSpFRCP0SfiEN6jmj59UnW/n46BH5rLB71So=
+github.com/invopop/yaml v0.1.0 h1:YW3WGUoJEXYfzWBjn00zIlrw7brGVD0fUKRYDPAPhrc=
+github.com/invopop/yaml v0.1.0/go.mod h1:2XuRLgs/ouIrW3XNzuNj7J3Nvu/Dig5MXvbCEdiBN3Q=
+github.com/josharian/intern v1.0.0 h1:vlS4z54oSdjm0bgjRigI+G1HpF+tI+9rE5LLzOg8HmY=
+github.com/josharian/intern v1.0.0/go.mod h1:5DoeVV0s6jJacbCEi61lwdGj/aVlrQvzHFFd8Hwg//Y=
+github.com/json-iterator/go v1.1.12 h1:PV8peI4a0ysnczrg+LtxykD8LfKY9ML6u2jnxaEnrnM=
+github.com/json-iterator/go v1.1.12/go.mod h1:e30LSqwooZae/UwlEbR2852Gd8hjQvJoHmT4TnhNGBo=
+github.com/juju/gnuflag v0.0.0-20171113085948-2ce1bb71843d/go.mod h1:2PavIy+JPciBPrBUjwbNvtwB6RQlve+hkpll6QSNmOE=
+github.com/kr/pretty v0.1.0/go.mod h1:dAy3ld7l9f0ibDNOQOHHMYYIIbhfbHSm3C4ZsoJORNo=
+github.com/kr/pretty v0.2.1/go.mod h1:ipq/a2n7PKx3OHsz4KJII5eveXtPO4qwEXGdVfWzfnI=
+github.com/kr/pretty v0.3.0 h1:WgNl7dwNpEZ6jJ9k1snq4pZsg7DOEN8hP9Xw0Tsjwk0=
+github.com/kr/pretty v0.3.0/go.mod h1:640gp4NfQd8pI5XOwp5fnNeVWj67G7CFk/SaSQn7NBk=
+github.com/kr/pty v1.1.1/go.mod h1:pFQYn66WHrOpPYNljwOMqo10TkYh1fy3cYio2l3bCsQ=
+github.com/kr/text v0.1.0/go.mod h1:4Jbv+DJW3UT/LiOwJeYQe1efqtUx/iVham/4vfdArNI=
+github.com/kr/text v0.2.0 h1:5Nx0Ya0ZqY2ygV366QzturHI13Jq95ApcVaJBhpS+AY=
+github.com/kr/text v0.2.0/go.mod h1:eLer722TekiGuMkidMxC/pM04lWEeraHUUmBw8l2grE=
+github.com/labstack/echo/v4 v4.9.1 h1:GliPYSpzGKlyOhqIbG8nmHBo3i1saKWFOgh41AN3b+Y=
+github.com/labstack/echo/v4 v4.9.1/go.mod h1:Pop5HLc+xoc4qhTZ1ip6C0RtP7Z+4VzRLWZZFKqbbjo=
+github.com/labstack/gommon v0.4.0 h1:y7cvthEAEbU0yHOf4axH8ZG2NH8knB9iNSoTO8dyIk8=
+github.com/labstack/gommon v0.4.0/go.mod h1:uW6kP17uPlLJsD3ijUYn3/M5bAxtlZhMI6m3MFxTMTM=
+github.com/leodido/go-urn v1.2.1 h1:BqpAaACuzVSgi/VLzGZIobT2z4v53pjosyNd9Yv6n/w=
+github.com/leodido/go-urn v1.2.1/go.mod h1:zt4jvISO2HfUBqxjfIshjdMTYS56ZS/qv49ictyFfxY=
+github.com/lestrrat-go/backoff/v2 v2.0.8 h1:oNb5E5isby2kiro9AgdHLv5N5tint1AnDVVf2E2un5A=
+github.com/lestrrat-go/backoff/v2 v2.0.8/go.mod h1:rHP/q/r9aT27n24JQLa7JhSQZCKBBOiM/uP402WwN8Y=
+github.com/lestrrat-go/blackmagic v1.0.0 h1:XzdxDbuQTz0RZZEmdU7cnQxUtFUzgCSPq8RCz4BxIi4=
+github.com/lestrrat-go/blackmagic v1.0.0/go.mod h1:TNgH//0vYSs8VXDCfkZLgIrVTTXQELZffUV0tz3MtdQ=
+github.com/lestrrat-go/httpcc v1.0.1 h1:ydWCStUeJLkpYyjLDHihupbn2tYmZ7m22BGkcvZZrIE=
+github.com/lestrrat-go/httpcc v1.0.1/go.mod h1:qiltp3Mt56+55GPVCbTdM9MlqhvzyuL6W/NMDA8vA5E=
+github.com/lestrrat-go/iter v1.0.1 h1:q8faalr2dY6o8bV45uwrxq12bRa1ezKrB6oM9FUgN4A=
+github.com/lestrrat-go/iter v1.0.1/go.mod h1:zIdgO1mRKhn8l9vrZJZz9TUMMFbQbLeTsbqPDrJ/OJc=
+github.com/lestrrat-go/jwx v1.2.25 h1:tAx93jN2SdPvFn08fHNAhqFJazn5mBBOB8Zli0g0otA=
+github.com/lestrrat-go/jwx v1.2.25/go.mod h1:zoNuZymNl5lgdcu6P7K6ie2QRll5HVfF4xwxBBK1NxY=
+github.com/lestrrat-go/option v1.0.0 h1:WqAWL8kh8VcSoD6xjSH34/1m8yxluXQbDeKNfvFeEO4=
+github.com/lestrrat-go/option v1.0.0/go.mod h1:5ZHFbivi4xwXxhxY9XHDe2FHo6/Z7WWmtT7T5nBBp3I=
+github.com/mailru/easyjson v0.0.0-20190614124828-94de47d64c63/go.mod h1:C1wdFJiN94OJF2b5HbByQZoLdCWB1Yqtg26g4irojpc=
+github.com/mailru/easyjson v0.0.0-20190626092158-b2ccc519800e/go.mod h1:C1wdFJiN94OJF2b5HbByQZoLdCWB1Yqtg26g4irojpc=
+github.com/mailru/easyjson v0.7.6/go.mod h1:xzfreul335JAWq5oZzymOObrkdz5UnU4kGfJJLY9Nlc=
+github.com/mailru/easyjson v0.7.7 h1:UGYAvKxe3sBsEDzO8ZeWOSlIQfWFlxbzLZe7hwFURr0=
+github.com/mailru/easyjson v0.7.7/go.mod h1:xzfreul335JAWq5oZzymOObrkdz5UnU4kGfJJLY9Nlc=
+github.com/matryer/moq v0.2.7 h1:RtpiPUM8L7ZSCbSwK+QcZH/E9tgqAkFjKQxsRs25b4w=
+github.com/matryer/moq v0.2.7/go.mod h1:kITsx543GOENm48TUAQyJ9+SAvFSr7iGQXPoth/VUBk=
+github.com/mattn/go-colorable v0.1.11/go.mod h1:u5H1YNBxpqRaxsYJYSkiCWKzEfiAb1Gb520KVy5xxl4=
+github.com/mattn/go-colorable v0.1.13 h1:fFA4WZxdEF4tXPZVKMLwD8oUnCTTo08duU7wxecdEvA=
+github.com/mattn/go-colorable v0.1.13/go.mod h1:7S9/ev0klgBDR4GtXTXX8a3vIGJpMovkB8vQcUbaXHg=
+github.com/mattn/go-isatty v0.0.14/go.mod h1:7GGIvUiUoEMVVmxf/4nioHXj79iQHKdU27kJ6hsGG94=
+github.com/mattn/go-isatty v0.0.16 h1:bq3VjFmv/sOjHtdEhmkEV4x1AJtvUvOJ2PFAZ5+peKQ=
+github.com/mattn/go-isatty v0.0.16/go.mod h1:kYGgaQfpe5nmfYZH+SKPsOc2e4SrIfOl2e/yFXSvRLM=
+github.com/modern-go/concurrent v0.0.0-20180228061459-e0a39a4cb421/go.mod h1:6dJC0mAP4ikYIbvyc7fijjWJddQyLn8Ig3JB5CqoB9Q=
+github.com/modern-go/concurrent v0.0.0-20180306012644-bacd9c7ef1dd h1:TRLaZ9cD/w8PVh93nsPXa1VrQ6jlwL5oN8l14QlcNfg=
+github.com/modern-go/concurrent v0.0.0-20180306012644-bacd9c7ef1dd/go.mod h1:6dJC0mAP4ikYIbvyc7fijjWJddQyLn8Ig3JB5CqoB9Q=
+github.com/modern-go/reflect2 v1.0.2 h1:xBagoLtFs94CBntxluKeaWgTMpvLxC4ur3nMaC9Gz0M=
+github.com/modern-go/reflect2 v1.0.2/go.mod h1:yWuevngMOJpCy52FWWMvUC8ws7m/LJsjYzDa0/r8luk=
+github.com/mohae/deepcopy v0.0.0-20170929034955-c48cc78d4826 h1:RWengNIwukTxcDr9M+97sNutRR1RKhG96O6jWumTTnw=
+github.com/mohae/deepcopy v0.0.0-20170929034955-c48cc78d4826/go.mod h1:TaXosZuwdSHYgviHp1DAtfrULt5eUgsSMsZf+YrPgl8=
+github.com/niemeyer/pretty v0.0.0-20200227124842-a10e7caefd8e/go.mod h1:zD1mROLANZcx1PVRCS0qkT7pwLkGfwJo4zjcN/Tysno=
+github.com/pelletier/go-toml/v2 v2.0.5 h1:ipoSadvV8oGUjnUbMub59IDPPwfxF694nG/jwbMiyQg=
+github.com/pelletier/go-toml/v2 v2.0.5/go.mod h1:OMHamSCAODeSsVrwwvcJOaoN0LIUIaFVNZzmWyNfXas=
+github.com/pkg/diff v0.0.0-20210226163009-20ebb0f2a09e/go.mod h1:pJLUxLENpZxwdsKMEsNbx1VGcRFpLqf3715MtcvvzbA=
+github.com/pkg/errors v0.9.1 h1:FEBLx1zS214owpjy7qsBeixbURkuhQAwrK5UwLGTwt4=
+github.com/pkg/errors v0.9.1/go.mod h1:bwawxfHBFNV+L2hUp1rHADufV3IMtnDRdf1r5NINEl0=
+github.com/pmezard/go-difflib v1.0.0 h1:4DBwDE0NGyQoBHbLQYPwSUPoCMWR5BEzIk/f1lZbAQM=
+github.com/pmezard/go-difflib v1.0.0/go.mod h1:iKH77koFhYxTK1pcRnkKkqfTogsbg7gZNVY4sRDYZ/4=
+github.com/rogpeppe/go-internal v1.6.1/go.mod h1:xXDCJY+GAPziupqXw64V24skbSoqbTEfhy4qGm1nDQc=
+github.com/rogpeppe/go-internal v1.8.0 h1:FCbCCtXNOY3UtUuHUYaghJg4y7Fd14rXifAYUAtL9R8=
+github.com/rogpeppe/go-internal v1.8.0/go.mod h1:WmiCO8CzOY8rg0OYDC4/i/2WRWAB6poM+XZ2dLUbcbE=
+github.com/spkg/bom v0.0.0-20160624110644-59b7046e48ad/go.mod h1:qLr4V1qq6nMqFKkMo8ZTx3f+BZEkzsRUY10Xsm2mwU0=
+github.com/stretchr/objx v0.1.0/go.mod h1:HFkY916IF+rwdDfMAkV7OtwuqBVzrE8GR6GFx+wExME=
+github.com/stretchr/objx v0.4.0/go.mod h1:YvHI0jy2hoMjB+UWwv71VJQ9isScKT/TqJzVSSt89Yw=
+github.com/stretchr/objx v0.5.0 h1:1zr/of2m5FGMsad5YfcqgdqdWrIhu+EBEJRhR1U7z/c=
+github.com/stretchr/objx v0.5.0/go.mod h1:Yh+to48EsGEfYuaHDzXPcE3xhTkx73EhmCGUpEOglKo=
+github.com/stretchr/testify v1.3.0/go.mod h1:M5WIy9Dh21IEIfnGCwXGc5bZfKNJtfHm1UVUgZn+9EI=
+github.com/stretchr/testify v1.5.1/go.mod h1:5W2xD1RspED5o8YsWQXVCued0rvSQ+mT+I5cxcmMvtA=
+github.com/stretchr/testify v1.6.1/go.mod h1:6Fq8oRcR53rry900zMqJjRRixrwX3KX962/h/Wwjteg=
+github.com/stretchr/testify v1.7.0/go.mod h1:6Fq8oRcR53rry900zMqJjRRixrwX3KX962/h/Wwjteg=
+github.com/stretchr/testify v1.7.1/go.mod h1:6Fq8oRcR53rry900zMqJjRRixrwX3KX962/h/Wwjteg=
+github.com/stretchr/testify v1.8.0/go.mod h1:yNjHg4UonilssWZ8iaSj1OCr/vHnekPRkoO+kdMU+MU=
+github.com/stretchr/testify v1.8.1 h1:w7B6lhMri9wdJUVmEZPGGhZzrYTPvgJArz7wNPgYKsk=
+github.com/stretchr/testify v1.8.1/go.mod h1:w2LPCIKwWwSfY2zedu0+kehJoqGctiVI29o6fzry7u4=
+github.com/ugorji/go v1.2.7/go.mod h1:nF9osbDWLy6bDVv/Rtoh6QgnvNDpmCalQV5urGCCS6M=
+github.com/ugorji/go/codec v1.2.7 h1:YPXUKf7fYbp/y8xloBqZOw2qaVggbfwMlI8WM3wZUJ0=
+github.com/ugorji/go/codec v1.2.7/go.mod h1:WGN1fab3R1fzQlVQTkfxVtIBhWDRqOviHU95kRgeqEY=
+github.com/valyala/bytebufferpool v1.0.0 h1:GqA5TC/0021Y/b9FG4Oi9Mr3q7XYx6KllzawFIhcdPw=
+github.com/valyala/bytebufferpool v1.0.0/go.mod h1:6bBcMArwyJ5K/AmCkWv1jt77kVWyCJ6HpOuEn7z0Csc=
+github.com/valyala/fasttemplate v1.2.1/go.mod h1:KHLXt3tVN2HBp8eijSv/kGJopbvo7S+qRAEEKiv+SiQ=
+github.com/valyala/fasttemplate v1.2.2 h1:lxLXG0uE3Qnshl9QyaK6XJxMXlQZELvChBOCmQD0Loo=
+github.com/valyala/fasttemplate v1.2.2/go.mod h1:KHLXt3tVN2HBp8eijSv/kGJopbvo7S+qRAEEKiv+SiQ=
+github.com/yuin/goldmark v1.4.1/go.mod h1:mwnBkeHKe2W/ZEtQ+71ViKU8L12m81fl3OWwC1Zlc8k=
+golang.org/x/crypto v0.0.0-20190308221718-c2843e01d9a2/go.mod h1:djNgcEr1/C05ACkg1iLfiJU5Ep61QUkGW8qpdssI0+w=
+golang.org/x/crypto v0.0.0-20210921155107-089bfa567519/go.mod h1:GvvjBRRGRdwPK5ydBHafDWAxML/pGHZbMvKqRZ5+Abc=
+golang.org/x/crypto v0.0.0-20211215153901-e495a2d5b3d3/go.mod h1:IxCIyHEi3zRg3s0A5j5BB6A9Jmi73HwBIUl50j+osU4=
+golang.org/x/crypto v0.0.0-20220427172511-eb4f295cb31f/go.mod h1:IxCIyHEi3zRg3s0A5j5BB6A9Jmi73HwBIUl50j+osU4=
+golang.org/x/crypto v0.1.0 h1:MDRAIl0xIo9Io2xV565hzXHw3zVseKrJKodhohM5CjU=
+golang.org/x/crypto v0.1.0/go.mod h1:RecgLatLF4+eUMCP1PoPZQb+cVrJcOPbHkTkbkB9sbw=
+golang.org/x/mod v0.6.0-dev.0.20220106191415-9b9b3d81d5e3/go.mod h1:3p9vT2HGsQu2K1YbXdKPJLVgG5VJdoTa1poYQBtP1AY=
+golang.org/x/mod v0.7.0 h1:LapD9S96VoQRhi/GrNTqeBJFrUjs5UHCAtTlgwA5oZA=
+golang.org/x/mod v0.7.0/go.mod h1:iBbtSCu2XBx23ZKBPSOrRkjjQPZFPuis4dIYUhu/chs=
+golang.org/x/net v0.0.0-20190620200207-3b0461eec859/go.mod h1:z5CRVTTTmAJ677TzLLGU+0bjPO0LkuOLi4/5GtJWs/s=
+golang.org/x/net v0.0.0-20210226172049-e18ecbb05110/go.mod h1:m0MpNAwzfU5UDzcl9v0D8zg8gWTRqZa9RBIspLL5mdg=
+golang.org/x/net v0.0.0-20211015210444-4f30a5c0130f/go.mod h1:9nx3DQGgdP8bBQD5qxJ1jj9UTztislL4KSBs9R2vV5Y=
+golang.org/x/net v0.0.0-20211112202133-69e39bad7dc2/go.mod h1:9nx3DQGgdP8bBQD5qxJ1jj9UTztislL4KSBs9R2vV5Y=
+golang.org/x/net v0.2.0 h1:sZfSu1wtKLGlWI4ZZayP0ck9Y73K1ynO6gqzTdBVdPU=
+golang.org/x/net v0.2.0/go.mod h1:KqCZLdyyvdV855qA2rE3GC2aiw5xGR5TEjj8smXukLY=
+golang.org/x/sync v0.0.0-20190423024810-112230192c58/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM=
+golang.org/x/sync v0.0.0-20210220032951-036812b2e83c/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM=
+golang.org/x/sync v0.1.0 h1:wsuoTGHzEhffawBOhz5CYhcrV4IdKZbEyZjBMuTp12o=
+golang.org/x/sys v0.0.0-20190215142949-d0b11bdaac8a/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY=
+golang.org/x/sys v0.0.0-20201119102817-f84b799fce68/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
+golang.org/x/sys v0.0.0-20210423082822-04245dca01da/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
+golang.org/x/sys v0.0.0-20210615035016-665e8c7367d1/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
+golang.org/x/sys v0.0.0-20210630005230-0f9fa26af87c/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
+golang.org/x/sys v0.0.0-20210806184541-e5e7981a1069/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
+golang.org/x/sys v0.0.0-20210927094055-39ccf1dd6fa6/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
+golang.org/x/sys v0.0.0-20211019181941-9d821ace8654/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
+golang.org/x/sys v0.0.0-20211103235746-7861aae1554b/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
+golang.org/x/sys v0.0.0-20220811171246-fbc7d0a398ab/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
+golang.org/x/sys v0.2.0 h1:ljd4t30dBnAvMZaQCevtY0xLLD0A+bRZXbgLMLU1F/A=
+golang.org/x/sys v0.2.0/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
+golang.org/x/term v0.0.0-20201126162022-7de9c90e9dd1/go.mod h1:bj7SfCRtBDWHUb9snDiAeCFNEtKQo2Wmx5Cou7ajbmo=
+golang.org/x/text v0.3.0/go.mod h1:NqM8EUOU14njkJ3fqMW+pc6Ldnwhi/IjpwHt7yyuwOQ=
+golang.org/x/text v0.3.3/go.mod h1:5Zoc/QRtKVWzQhOtBMvqHzDpF6irO9z98xDceosuGiQ=
+golang.org/x/text v0.3.6/go.mod h1:5Zoc/QRtKVWzQhOtBMvqHzDpF6irO9z98xDceosuGiQ=
+golang.org/x/text v0.3.7/go.mod h1:u+2+/6zg+i71rQMx5EYifcz6MCKuco9NR6JIITiCfzQ=
+golang.org/x/text v0.4.0 h1:BrVqGRd7+k1DiOgtnFvAkoQEWQvBc25ouMJM6429SFg=
+golang.org/x/text v0.4.0/go.mod h1:mrYo+phRRbMaCq/xk9113O4dZlRixOauAjOtrjsXDZ8=
+golang.org/x/time v0.0.0-20220411224347-583f2d630306 h1:+gHMid33q6pen7kv9xvT+JRinntgeXO2AeZVd0AWD3w=
+golang.org/x/time v0.0.0-20220411224347-583f2d630306/go.mod h1:tRJNPiyCQ0inRvYxbN9jk5I+vvW/OXSQhTDSoE431IQ=
+golang.org/x/tools v0.0.0-20180917221912-90fa682c2a6e/go.mod h1:n7NCudcB/nEzxVGmLbDWY5pfWTLqBcC2KZ6jyYvM4mQ=
+golang.org/x/tools v0.0.0-20191119224855-298f0cb1881e/go.mod h1:b+2E5dAYhXwXZwtnZ6UAqBI28+e2cm9otk0dWdXHAEo=
+golang.org/x/tools v0.1.10/go.mod h1:Uh6Zz+xoGYZom868N8YTex3t7RhtHDBrE8Gzo9bV56E=
+golang.org/x/tools v0.3.0 h1:SrNbZl6ECOS1qFzgTdQfWXZM9XBkiA6tkFrH9YSTPHM=
+golang.org/x/tools v0.3.0/go.mod h1:/rWhSS2+zyEVwoJf8YAX6L2f0ntZ7Kn/mGgAWcipA5k=
+golang.org/x/xerrors v0.0.0-20190717185122-a985d3407aa7/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0=
+golang.org/x/xerrors v0.0.0-20191011141410-1b5146add898/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0=
+golang.org/x/xerrors v0.0.0-20191204190536-9bdfabe68543/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0=
+golang.org/x/xerrors v0.0.0-20200804184101-5ec99f83aff1/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0=
+golang.org/x/xerrors v0.0.0-20220411194840-2f41105eb62f h1:GGU+dLjvlC3qDwqYgL6UgRmHXhOOgns0bZu2Ty5mm6U=
+golang.org/x/xerrors v0.0.0-20220411194840-2f41105eb62f/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0=
+google.golang.org/protobuf v1.26.0-rc.1/go.mod h1:jlhhOSvTdKEhbULTjvd4ARK9grFBp09yW+WbY/TyQbw=
+google.golang.org/protobuf v1.28.1 h1:d0NfwRgPtno5B1Wa6L2DAG+KivqkdutMf1UhdNx175w=
+google.golang.org/protobuf v1.28.1/go.mod h1:HV8QOd/L58Z+nl8r43ehVNZIU/HEI6OcFqwMG9pJV4I=
+gopkg.in/check.v1 v0.0.0-20161208181325-20d25e280405/go.mod h1:Co6ibVJAznAaIkqp8huTwlJQCZ016jof/cbN4VW5Yz0=
+gopkg.in/check.v1 v1.0.0-20180628173108-788fd7840127/go.mod h1:Co6ibVJAznAaIkqp8huTwlJQCZ016jof/cbN4VW5Yz0=
+gopkg.in/check.v1 v1.0.0-20200227125254-8fa46927fb4f/go.mod h1:Co6ibVJAznAaIkqp8huTwlJQCZ016jof/cbN4VW5Yz0=
+gopkg.in/check.v1 v1.0.0-20201130134442-10cb98267c6c h1:Hei/4ADfdWqJk1ZMxUNpqntNwaWcugrBjAiHlqqRiVk=
+gopkg.in/check.v1 v1.0.0-20201130134442-10cb98267c6c/go.mod h1:JHkPIbrfpd72SG/EVd6muEfDQjcINNoR0C8j2r3qZ4Q=
+gopkg.in/errgo.v2 v2.1.0/go.mod h1:hNsd1EY+bozCKY1Ytp96fpM3vjJbqLJn88ws8XvfDNI=
+gopkg.in/yaml.v2 v2.2.2/go.mod h1:hI93XBmqTisBFMUTm0b8Fm+jr3Dg1NNxqwp+5A1VGuI=
+gopkg.in/yaml.v2 v2.4.0 h1:D8xgwECY7CYvx+Y2n4sBz93Jn9JRvxdiyyo8CTfuKaY=
+gopkg.in/yaml.v2 v2.4.0/go.mod h1:RDklbk79AGWmwhnvt/jBztapEOGDOx6ZbXqjP6csGnQ=
+gopkg.in/yaml.v3 v3.0.0-20200313102051-9f266ea9e77c/go.mod h1:K4uyk7z7BCEPqu6E+C64Yfv1cQ7kz7rIZviUmN+EgEM=
+gopkg.in/yaml.v3 v3.0.0-20200615113413-eeeca48fe776/go.mod h1:K4uyk7z7BCEPqu6E+C64Yfv1cQ7kz7rIZviUmN+EgEM=
+gopkg.in/yaml.v3 v3.0.0-20210107192922-496545a6307b/go.mod h1:K4uyk7z7BCEPqu6E+C64Yfv1cQ7kz7rIZviUmN+EgEM=
+gopkg.in/yaml.v3 v3.0.0/go.mod h1:K4uyk7z7BCEPqu6E+C64Yfv1cQ7kz7rIZviUmN+EgEM=
+gopkg.in/yaml.v3 v3.0.1 h1:fxVm/GzAzEWqLHuvctI91KS9hhNmmWOoWu0XTYJS7CA=
+gopkg.in/yaml.v3 v3.0.1/go.mod h1:K4uyk7z7BCEPqu6E+C64Yfv1cQ7kz7rIZviUmN+EgEM=

+ 24 - 0
vendor/github.com/deepmap/oapi-codegen/pkg/runtime/bind.go

@@ -0,0 +1,24 @@
+// Copyright 2021 DeepMap, Inc.
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+//
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+package runtime
+
+// Binder is the interface implemented by types that can be bound to a query string or a parameter string
+// The input can be assumed to be a valid string.  If you define a Bind method you are responsible for all
+// data being completely bound to the type.
+//
+// By convention, to approximate the behavior of Bind functions themselves,
+// Binder implements Bind("") as a no-op.
+type Binder interface {
+	Bind(src string) error
+}

+ 309 - 0
vendor/github.com/deepmap/oapi-codegen/pkg/runtime/bindform.go

@@ -0,0 +1,309 @@
+package runtime
+
+import (
+	"encoding/json"
+	"errors"
+	"fmt"
+	"mime/multipart"
+	"net/url"
+	"reflect"
+	"strconv"
+	"strings"
+
+	"github.com/deepmap/oapi-codegen/pkg/types"
+)
+
+const tagName = "json"
+const jsonContentType = "application/json"
+
+type RequestBodyEncoding struct {
+	ContentType string
+	Style       string
+	Explode     *bool
+}
+
+func BindMultipart(ptr interface{}, reader multipart.Reader) error {
+	const defaultMemory = 32 << 20
+	form, err := reader.ReadForm(defaultMemory)
+	if err != nil {
+		return err
+	}
+	return BindForm(ptr, form.Value, form.File, nil)
+}
+
+func BindForm(ptr interface{}, form map[string][]string, files map[string][]*multipart.FileHeader, encodings map[string]RequestBodyEncoding) error {
+	ptrVal := reflect.Indirect(reflect.ValueOf(ptr))
+	if ptrVal.Kind() != reflect.Struct {
+		return errors.New("form data body should be a struct")
+	}
+	tValue := ptrVal.Type()
+
+	for i := 0; i < tValue.NumField(); i++ {
+		field := ptrVal.Field(i)
+		tag := tValue.Field(i).Tag.Get(tagName)
+		if !field.CanInterface() || tag == "-" {
+			continue
+		}
+		tag = strings.Split(tag, ",")[0] // extract the name of the tag
+		if encoding, ok := encodings[tag]; ok {
+			// custom encoding
+			values := form[tag]
+			if len(values) == 0 {
+				continue
+			}
+			value := values[0]
+			if encoding.ContentType != "" {
+				if strings.HasPrefix(encoding.ContentType, jsonContentType) {
+					if err := json.Unmarshal([]byte(value), ptr); err != nil {
+						return err
+					}
+				}
+				return errors.New("unsupported encoding, only application/json is supported")
+			} else {
+				var explode bool
+				if encoding.Explode != nil {
+					explode = *encoding.Explode
+				}
+				if err := BindStyledParameterWithLocation(encoding.Style, explode, tag, ParamLocationUndefined, value, field.Addr().Interface()); err != nil {
+					return err
+				}
+			}
+		} else {
+			// regular form data
+			if _, err := bindFormImpl(field, form, files, tag); err != nil {
+				return err
+			}
+		}
+	}
+
+	return nil
+}
+
+func MarshalForm(ptr interface{}, encodings map[string]RequestBodyEncoding) (url.Values, error) {
+	ptrVal := reflect.Indirect(reflect.ValueOf(ptr))
+	if ptrVal.Kind() != reflect.Struct {
+		return nil, errors.New("form data body should be a struct")
+	}
+	tValue := ptrVal.Type()
+	result := make(url.Values)
+	for i := 0; i < tValue.NumField(); i++ {
+		field := ptrVal.Field(i)
+		tag := tValue.Field(i).Tag.Get(tagName)
+		if !field.CanInterface() || tag == "-" {
+			continue
+		}
+		omitEmpty := strings.HasSuffix(tag, ",omitempty")
+		if omitEmpty && field.IsZero() {
+			continue
+		}
+		tag = strings.Split(tag, ",")[0] // extract the name of the tag
+		if encoding, ok := encodings[tag]; ok && encoding.ContentType != "" {
+			if strings.HasPrefix(encoding.ContentType, jsonContentType) {
+				if data, err := json.Marshal(field); err != nil { //nolint:staticcheck
+					return nil, err
+				} else {
+					result[tag] = append(result[tag], string(data))
+				}
+			}
+			return nil, errors.New("unsupported encoding, only application/json is supported")
+		} else {
+			marshalFormImpl(field, result, tag)
+		}
+	}
+	return result, nil
+}
+
+func bindFormImpl(v reflect.Value, form map[string][]string, files map[string][]*multipart.FileHeader, name string) (bool, error) {
+	var hasData bool
+	switch v.Kind() {
+	case reflect.Interface:
+		return bindFormImpl(v.Elem(), form, files, name)
+	case reflect.Ptr:
+		ptrData := v.Elem()
+		if !ptrData.IsValid() {
+			ptrData = reflect.New(v.Type().Elem())
+		}
+		ptrHasData, err := bindFormImpl(ptrData, form, files, name)
+		if err == nil && ptrHasData && !v.Elem().IsValid() {
+			v.Set(ptrData)
+		}
+		return ptrHasData, err
+	case reflect.Slice:
+		if files := append(files[name], files[name+"[]"]...); len(files) != 0 {
+			if _, ok := v.Interface().([]types.File); ok {
+				result := make([]types.File, len(files))
+				for i, file := range files {
+					result[i].InitFromMultipart(file)
+				}
+				v.Set(reflect.ValueOf(result))
+				hasData = true
+			}
+		}
+		indexedElementsCount := indexedElementsCount(form, files, name)
+		items := append(form[name], form[name+"[]"]...)
+		if indexedElementsCount+len(items) != 0 {
+			result := reflect.MakeSlice(v.Type(), indexedElementsCount+len(items), indexedElementsCount+len(items))
+			for i := 0; i < indexedElementsCount; i++ {
+				if _, err := bindFormImpl(result.Index(i), form, files, fmt.Sprintf("%s[%v]", name, i)); err != nil {
+					return false, err
+				}
+			}
+			for i, item := range items {
+				if err := BindStringToObject(item, result.Index(indexedElementsCount+i).Addr().Interface()); err != nil {
+					return false, err
+				}
+			}
+			v.Set(result)
+			hasData = true
+		}
+	case reflect.Struct:
+		if files := files[name]; len(files) != 0 {
+			if file, ok := v.Interface().(types.File); ok {
+				file.InitFromMultipart(files[0])
+				v.Set(reflect.ValueOf(file))
+				return true, nil
+			}
+		}
+		for i := 0; i < v.NumField(); i++ {
+			field := v.Type().Field(i)
+			tag := field.Tag.Get(tagName)
+			if field.Name == "AdditionalProperties" && field.Type.Kind() == reflect.Map && tag == "-" {
+				additionalPropertiesHasData, err := bindAdditionalProperties(v.Field(i), v, form, files, name)
+				if err != nil {
+					return false, err
+				}
+				hasData = hasData || additionalPropertiesHasData
+			}
+			if !v.Field(i).CanInterface() || tag == "-" {
+				continue
+			}
+			tag = strings.Split(tag, ",")[0] // extract the name of the tag
+			fieldHasData, err := bindFormImpl(v.Field(i), form, files, fmt.Sprintf("%s[%s]", name, tag))
+			if err != nil {
+				return false, err
+			}
+			hasData = hasData || fieldHasData
+		}
+		return hasData, nil
+	default:
+		value := form[name]
+		if len(value) != 0 {
+			return true, BindStringToObject(value[0], v.Addr().Interface())
+		}
+	}
+	return hasData, nil
+}
+
+func indexedElementsCount(form map[string][]string, files map[string][]*multipart.FileHeader, name string) int {
+	name += "["
+	maxIndex := -1
+	for k := range form {
+		if strings.HasPrefix(k, name) {
+			str := strings.TrimPrefix(k, name)
+			str = str[:strings.Index(str, "]")]
+			if idx, err := strconv.Atoi(str); err == nil {
+				if idx > maxIndex {
+					maxIndex = idx
+				}
+			}
+		}
+	}
+	for k := range files {
+		if strings.HasPrefix(k, name) {
+			str := strings.TrimPrefix(k, name)
+			str = str[:strings.Index(str, "]")]
+			if idx, err := strconv.Atoi(str); err == nil {
+				if idx > maxIndex {
+					maxIndex = idx
+				}
+			}
+		}
+	}
+	return maxIndex + 1
+}
+
+func bindAdditionalProperties(additionalProperties reflect.Value, parentStruct reflect.Value, form map[string][]string, files map[string][]*multipart.FileHeader, name string) (bool, error) {
+	hasData := false
+	valueType := additionalProperties.Type().Elem()
+
+	// store all fixed properties in a set
+	fieldsSet := make(map[string]struct{})
+	for i := 0; i < parentStruct.NumField(); i++ {
+		tag := parentStruct.Type().Field(i).Tag.Get(tagName)
+		if !parentStruct.Field(i).CanInterface() || tag == "-" {
+			continue
+		}
+		tag = strings.Split(tag, ",")[0]
+		fieldsSet[tag] = struct{}{}
+	}
+
+	result := reflect.MakeMap(additionalProperties.Type())
+	for k := range form {
+		if strings.HasPrefix(k, name+"[") {
+			key := strings.TrimPrefix(k, name+"[")
+			key = key[:strings.Index(key, "]")]
+			if _, ok := fieldsSet[key]; ok {
+				continue
+			}
+			value := reflect.New(valueType)
+			ptrHasData, err := bindFormImpl(value, form, files, fmt.Sprintf("%s[%s]", name, key))
+			if err != nil {
+				return false, err
+			}
+			result.SetMapIndex(reflect.ValueOf(key), value.Elem())
+			hasData = hasData || ptrHasData
+		}
+	}
+	for k := range files {
+		if strings.HasPrefix(k, name+"[") {
+			key := strings.TrimPrefix(k, name+"[")
+			key = key[:strings.Index(key, "]")]
+			if _, ok := fieldsSet[key]; ok {
+				continue
+			}
+			value := reflect.New(valueType)
+			result.SetMapIndex(reflect.ValueOf(key), value)
+			ptrHasData, err := bindFormImpl(value, form, files, fmt.Sprintf("%s[%s]", name, key))
+			if err != nil {
+				return false, err
+			}
+			result.SetMapIndex(reflect.ValueOf(key), value.Elem())
+			hasData = hasData || ptrHasData
+		}
+	}
+	if hasData {
+		additionalProperties.Set(result)
+	}
+	return hasData, nil
+}
+
+func marshalFormImpl(v reflect.Value, result url.Values, name string) {
+	switch v.Kind() {
+	case reflect.Interface, reflect.Ptr:
+		marshalFormImpl(v.Elem(), result, name)
+	case reflect.Slice:
+		for i := 0; i < v.Len(); i++ {
+			elem := v.Index(i)
+			marshalFormImpl(elem, result, fmt.Sprintf("%s[%v]", name, i))
+		}
+	case reflect.Struct:
+		for i := 0; i < v.NumField(); i++ {
+			field := v.Type().Field(i)
+			tag := field.Tag.Get(tagName)
+			if field.Name == "AdditionalProperties" && tag == "-" {
+				iter := v.MapRange()
+				for iter.Next() {
+					marshalFormImpl(iter.Value(), result, fmt.Sprintf("%s[%s]", name, iter.Key().String()))
+				}
+				continue
+			}
+			if !v.Field(i).CanInterface() || tag == "-" {
+				continue
+			}
+			tag = strings.Split(tag, ",")[0] // extract the name of the tag
+			marshalFormImpl(v.Field(i), result, fmt.Sprintf("%s[%s]", name, tag))
+		}
+	default:
+		result[name] = append(result[name], fmt.Sprint(v.Interface()))
+	}
+}

+ 526 - 0
vendor/github.com/deepmap/oapi-codegen/pkg/runtime/bindparam.go

@@ -0,0 +1,526 @@
+// Copyright 2019 DeepMap, Inc.
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+//
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+package runtime
+
+import (
+	"encoding"
+	"encoding/json"
+	"errors"
+	"fmt"
+	"net/url"
+	"reflect"
+	"strings"
+	"time"
+
+	"github.com/deepmap/oapi-codegen/pkg/types"
+)
+
+// BindStyledParameter binds a parameter as described in the Path Parameters
+// section here to a Go object:
+// https://swagger.io/docs/specification/serialization/
+// It is a backward compatible function to clients generated with codegen
+// up to version v1.5.5. v1.5.6+ calls the function below.
+func BindStyledParameter(style string, explode bool, paramName string,
+	value string, dest interface{}) error {
+	return BindStyledParameterWithLocation(style, explode, paramName, ParamLocationUndefined, value, dest)
+}
+
+// BindStyledParameterWithLocation binds a parameter as described in the Path Parameters
+// section here to a Go object:
+// https://swagger.io/docs/specification/serialization/
+func BindStyledParameterWithLocation(style string, explode bool, paramName string,
+	paramLocation ParamLocation, value string, dest interface{}) error {
+
+	if value == "" {
+		return fmt.Errorf("parameter '%s' is empty, can't bind its value", paramName)
+	}
+
+	// Based on the location of the parameter, we need to unescape it properly.
+	var err error
+	switch paramLocation {
+	case ParamLocationQuery, ParamLocationUndefined:
+		// We unescape undefined parameter locations here for older generated code,
+		// since prior to this refactoring, they always query unescaped.
+		value, err = url.QueryUnescape(value)
+		if err != nil {
+			return fmt.Errorf("error unescaping query parameter '%s': %v", paramName, err)
+		}
+	case ParamLocationPath:
+		value, err = url.PathUnescape(value)
+		if err != nil {
+			return fmt.Errorf("error unescaping path parameter '%s': %v", paramName, err)
+		}
+	default:
+		// Headers and cookies aren't escaped.
+	}
+
+	// If the destination implements encoding.TextUnmarshaler we use it for binding
+	if tu, ok := dest.(encoding.TextUnmarshaler); ok {
+		if err := tu.UnmarshalText([]byte(value)); err != nil {
+			return fmt.Errorf("error unmarshalling '%s' text as %T: %s", value, dest, err)
+		}
+
+		return nil
+	}
+
+	// Everything comes in by pointer, dereference it
+	v := reflect.Indirect(reflect.ValueOf(dest))
+
+	// This is the basic type of the destination object.
+	t := v.Type()
+
+	if t.Kind() == reflect.Struct {
+		// We've got a destination object, we'll create a JSON representation
+		// of the input value, and let the json library deal with the unmarshalling
+		parts, err := splitStyledParameter(style, explode, true, paramName, value)
+		if err != nil {
+			return err
+		}
+
+		return bindSplitPartsToDestinationStruct(paramName, parts, explode, dest)
+	}
+
+	if t.Kind() == reflect.Slice {
+		// Chop up the parameter into parts based on its style
+		parts, err := splitStyledParameter(style, explode, false, paramName, value)
+		if err != nil {
+			return fmt.Errorf("error splitting input '%s' into parts: %s", value, err)
+		}
+
+		return bindSplitPartsToDestinationArray(parts, dest)
+	}
+
+	// Try to bind the remaining types as a base type.
+	return BindStringToObject(value, dest)
+}
+
+// This is a complex set of operations, but each given parameter style can be
+// packed together in multiple ways, using different styles of separators, and
+// different packing strategies based on the explode flag. This function takes
+// as input any parameter format, and unpacks it to a simple list of strings
+// or key-values which we can then treat generically.
+// Why, oh why, great Swagger gods, did you have to make this so complicated?
+func splitStyledParameter(style string, explode bool, object bool, paramName string, value string) ([]string, error) {
+	switch style {
+	case "simple":
+		// In the simple case, we always split on comma
+		parts := strings.Split(value, ",")
+		return parts, nil
+	case "label":
+		// In the label case, it's more tricky. In the no explode case, we have
+		// /users/.3,4,5 for arrays
+		// /users/.role,admin,firstName,Alex for objects
+		// in the explode case, we have:
+		// /users/.3.4.5
+		// /users/.role=admin.firstName=Alex
+		if explode {
+			// In the exploded case, split everything on periods.
+			parts := strings.Split(value, ".")
+			// The first part should be an empty string because we have a
+			// leading period.
+			if parts[0] != "" {
+				return nil, fmt.Errorf("invalid format for label parameter '%s', should start with '.'", paramName)
+			}
+			return parts[1:], nil
+
+		} else {
+			// In the unexploded case, we strip off the leading period.
+			if value[0] != '.' {
+				return nil, fmt.Errorf("invalid format for label parameter '%s', should start with '.'", paramName)
+			}
+			// The rest is comma separated.
+			return strings.Split(value[1:], ","), nil
+		}
+
+	case "matrix":
+		if explode {
+			// In the exploded case, we break everything up on semicolon
+			parts := strings.Split(value, ";")
+			// The first part should always be empty string, since we started
+			// with ;something
+			if parts[0] != "" {
+				return nil, fmt.Errorf("invalid format for matrix parameter '%s', should start with ';'", paramName)
+			}
+			parts = parts[1:]
+			// Now, if we have an object, we just have a list of x=y statements.
+			// for a non-object, like an array, we have id=x, id=y. id=z, etc,
+			// so we need to strip the prefix from each of them.
+			if !object {
+				prefix := paramName + "="
+				for i := range parts {
+					parts[i] = strings.TrimPrefix(parts[i], prefix)
+				}
+			}
+			return parts, nil
+		} else {
+			// In the unexploded case, parameters will start with ;paramName=
+			prefix := ";" + paramName + "="
+			if !strings.HasPrefix(value, prefix) {
+				return nil, fmt.Errorf("expected parameter '%s' to start with %s", paramName, prefix)
+			}
+			str := strings.TrimPrefix(value, prefix)
+			return strings.Split(str, ","), nil
+		}
+	case "form":
+		var parts []string
+		if explode {
+			parts = strings.Split(value, "&")
+			if !object {
+				prefix := paramName + "="
+				for i := range parts {
+					parts[i] = strings.TrimPrefix(parts[i], prefix)
+				}
+			}
+			return parts, nil
+		} else {
+			parts = strings.Split(value, ",")
+			prefix := paramName + "="
+			for i := range parts {
+				parts[i] = strings.TrimPrefix(parts[i], prefix)
+			}
+		}
+		return parts, nil
+	}
+
+	return nil, fmt.Errorf("unhandled parameter style: %s", style)
+}
+
+// Given a set of values as a slice, create a slice to hold them all, and
+// assign to each one by one.
+func bindSplitPartsToDestinationArray(parts []string, dest interface{}) error {
+	// Everything comes in by pointer, dereference it
+	v := reflect.Indirect(reflect.ValueOf(dest))
+
+	// This is the basic type of the destination object.
+	t := v.Type()
+
+	// We've got a destination array, bind each object one by one.
+	// This generates a slice of the correct element type and length to
+	// hold all the parts.
+	newArray := reflect.MakeSlice(t, len(parts), len(parts))
+	for i, p := range parts {
+		err := BindStringToObject(p, newArray.Index(i).Addr().Interface())
+		if err != nil {
+			return fmt.Errorf("error setting array element: %s", err)
+		}
+	}
+	v.Set(newArray)
+	return nil
+}
+
+// Given a set of chopped up parameter parts, bind them to a destination
+// struct. The exploded parameter controls whether we send key value pairs
+// in the exploded case, or a sequence of values which are interpreted as
+// tuples.
+// Given the struct Id { firstName string, role string }, as in the canonical
+// swagger examples, in the exploded case, we would pass
+// ["firstName=Alex", "role=admin"], where in the non-exploded case, we would
+// pass "firstName", "Alex", "role", "admin"]
+//
+// We punt the hard work of binding these values to the object to the json
+// library. We'll turn those arrays into JSON strings, and unmarshal
+// into the struct.
+func bindSplitPartsToDestinationStruct(paramName string, parts []string, explode bool, dest interface{}) error {
+	// We've got a destination object, we'll create a JSON representation
+	// of the input value, and let the json library deal with the unmarshalling
+	var fields []string
+	if explode {
+		fields = make([]string, len(parts))
+		for i, property := range parts {
+			propertyParts := strings.Split(property, "=")
+			if len(propertyParts) != 2 {
+				return fmt.Errorf("parameter '%s' has invalid exploded format", paramName)
+			}
+			fields[i] = "\"" + propertyParts[0] + "\":\"" + propertyParts[1] + "\""
+		}
+	} else {
+		if len(parts)%2 != 0 {
+			return fmt.Errorf("parameter '%s' has invalid format, property/values need to be pairs", paramName)
+		}
+		fields = make([]string, len(parts)/2)
+		for i := 0; i < len(parts); i += 2 {
+			key := parts[i]
+			value := parts[i+1]
+			fields[i/2] = "\"" + key + "\":\"" + value + "\""
+		}
+	}
+	jsonParam := "{" + strings.Join(fields, ",") + "}"
+	err := json.Unmarshal([]byte(jsonParam), dest)
+	if err != nil {
+		return fmt.Errorf("error binding parameter %s fields: %s", paramName, err)
+	}
+	return nil
+}
+
+// BindQueryParameter works much like BindStyledParameter, however it takes a query argument
+// input array from the url package, since query arguments come through a
+// different path than the styled arguments. They're also exceptionally fussy.
+// For example, consider the exploded and unexploded form parameter examples:
+// (exploded) /users?role=admin&firstName=Alex
+// (unexploded) /users?id=role,admin,firstName,Alex
+//
+// In the first case, we can pull the "id" parameter off the context,
+// and unmarshal via json as an intermediate. Easy. In the second case, we
+// don't have the id QueryParam present, but must find "role", and "firstName".
+// what if there is another parameter similar to "ID" named "role"? We can't
+// tell them apart. This code tries to fail, but the moral of the story is that
+// you shouldn't pass objects via form styled query arguments, just use
+// the Content parameter form.
+func BindQueryParameter(style string, explode bool, required bool, paramName string,
+	queryParams url.Values, dest interface{}) error {
+
+	// dv = destination value.
+	dv := reflect.Indirect(reflect.ValueOf(dest))
+
+	// intermediate value form which is either dv or dv dereferenced.
+	v := dv
+
+	// inner code will bind the string's value to this interface.
+	var output interface{}
+
+	if required {
+		// If the parameter is required, then the generated code will pass us
+		// a pointer to it: &int, &object, and so forth. We can directly set
+		// them.
+		output = dest
+	} else {
+		// For optional parameters, we have an extra indirect. An optional
+		// parameter of type "int" will be *int on the struct. We pass that
+		// in by pointer, and have **int.
+
+		// If the destination, is a nil pointer, we need to allocate it.
+		if v.IsNil() {
+			t := v.Type()
+			newValue := reflect.New(t.Elem())
+			// for now, hang onto the output buffer separately from destination,
+			// as we don't want to write anything to destination until we can
+			// unmarshal successfully, and check whether a field is required.
+			output = newValue.Interface()
+		} else {
+			// If the destination isn't nil, just use that.
+			output = v.Interface()
+		}
+
+		// Get rid of that extra indirect as compared to the required case,
+		// so the code below doesn't have to care.
+		v = reflect.Indirect(reflect.ValueOf(output))
+	}
+
+	// This is the basic type of the destination object.
+	t := v.Type()
+	k := t.Kind()
+
+	switch style {
+	case "form":
+		var parts []string
+		if explode {
+			// ok, the explode case in query arguments is very, very annoying,
+			// because an exploded object, such as /users?role=admin&firstName=Alex
+			// isn't actually present in the parameter array. We have to do
+			// different things based on destination type.
+			values, found := queryParams[paramName]
+			var err error
+
+			switch k {
+			case reflect.Slice:
+				// In the slice case, we simply use the arguments provided by
+				// http library.
+
+				if !found {
+					if required {
+						return fmt.Errorf("query parameter '%s' is required", paramName)
+					} else {
+						// If an optional parameter is not found, we do nothing,
+						return nil
+					}
+				}
+				err = bindSplitPartsToDestinationArray(values, output)
+			case reflect.Struct:
+				// This case is really annoying, and error prone, but the
+				// form style object binding doesn't tell us which arguments
+				// in the query string correspond to the object's fields. We'll
+				// try to bind field by field.
+				var fieldsPresent bool
+				fieldsPresent, err = bindParamsToExplodedObject(paramName, queryParams, output)
+				// If no fields were set, and there is no error, we will not fall
+				// through to assign the destination.
+				if !fieldsPresent {
+					return nil
+				}
+			default:
+				// Primitive object case. We expect to have 1 value to
+				// unmarshal.
+				if len(values) == 0 {
+					if required {
+						return fmt.Errorf("query parameter '%s' is required", paramName)
+					} else {
+						return nil
+					}
+				}
+				if len(values) != 1 {
+					return fmt.Errorf("multiple values for single value parameter '%s'", paramName)
+				}
+
+				if !found {
+					if required {
+						return fmt.Errorf("query parameter '%s' is required", paramName)
+					} else {
+						// If an optional parameter is not found, we do nothing,
+						return nil
+					}
+				}
+				err = BindStringToObject(values[0], output)
+			}
+			if err != nil {
+				return err
+			}
+			// If the parameter is required, and we've successfully unmarshaled
+			// it, this assigns the new object to the pointer pointer.
+			if !required {
+				dv.Set(reflect.ValueOf(output))
+			}
+			return nil
+		} else {
+			values, found := queryParams[paramName]
+			if !found {
+				if required {
+					return fmt.Errorf("query parameter '%s' is required", paramName)
+				} else {
+					return nil
+				}
+			}
+			if len(values) != 1 {
+				return fmt.Errorf("parameter '%s' is not exploded, but is specified multiple times", paramName)
+			}
+			parts = strings.Split(values[0], ",")
+		}
+		var err error
+		switch k {
+		case reflect.Slice:
+			err = bindSplitPartsToDestinationArray(parts, output)
+		case reflect.Struct:
+			err = bindSplitPartsToDestinationStruct(paramName, parts, explode, output)
+		default:
+			if len(parts) == 0 {
+				if required {
+					return fmt.Errorf("query parameter '%s' is required", paramName)
+				} else {
+					return nil
+				}
+			}
+			if len(parts) != 1 {
+				return fmt.Errorf("multiple values for single value parameter '%s'", paramName)
+			}
+			err = BindStringToObject(parts[0], output)
+		}
+		if err != nil {
+			return err
+		}
+		if !required {
+			dv.Set(reflect.ValueOf(output))
+		}
+		return nil
+	case "deepObject":
+		if !explode {
+			return errors.New("deepObjects must be exploded")
+		}
+		return UnmarshalDeepObject(dest, paramName, queryParams)
+	case "spaceDelimited", "pipeDelimited":
+		return fmt.Errorf("query arguments of style '%s' aren't yet supported", style)
+	default:
+		return fmt.Errorf("style '%s' on parameter '%s' is invalid", style, paramName)
+
+	}
+}
+
+// bindParamsToExplodedObject reflects the destination structure, and pulls the value for
+// each settable field from the given parameters map. This is to deal with the
+// exploded form styled object which may occupy any number of parameter names.
+// We don't try to be smart here, if the field exists as a query argument,
+// set its value. This function returns a boolean, telling us whether there was
+// anything to bind. There will be nothing to bind if a parameter isn't found by name,
+// or none of an exploded object's fields are present.
+func bindParamsToExplodedObject(paramName string, values url.Values, dest interface{}) (bool, error) {
+	// Dereference pointers to their destination values
+	binder, v, t := indirect(dest)
+	if binder != nil {
+		_, found := values[paramName]
+		if !found {
+			return false, nil
+		}
+		return true, BindStringToObject(values.Get(paramName), dest)
+	}
+	if t.Kind() != reflect.Struct {
+		return false, fmt.Errorf("unmarshalling query arg '%s' into wrong type", paramName)
+	}
+
+	fieldsPresent := false
+	for i := 0; i < t.NumField(); i++ {
+		fieldT := t.Field(i)
+
+		// Skip unsettable fields, such as internal ones.
+		if !v.Field(i).CanSet() {
+			continue
+		}
+
+		// Find the json annotation on the field, and use the json specified
+		// name if available, otherwise, just the field name.
+		tag := fieldT.Tag.Get("json")
+		fieldName := fieldT.Name
+		if tag != "" {
+			tagParts := strings.Split(tag, ",")
+			name := tagParts[0]
+			if name != "" {
+				fieldName = name
+			}
+		}
+
+		// At this point, we look up field name in the parameter list.
+		fieldVal, found := values[fieldName]
+		if found {
+			if len(fieldVal) != 1 {
+				return false, fmt.Errorf("field '%s' specified multiple times for param '%s'", fieldName, paramName)
+			}
+			err := BindStringToObject(fieldVal[0], v.Field(i).Addr().Interface())
+			if err != nil {
+				return false, fmt.Errorf("could not bind query arg '%s' to request object: %s'", paramName, err)
+			}
+			fieldsPresent = true
+		}
+	}
+	return fieldsPresent, nil
+}
+
+// indirect
+func indirect(dest interface{}) (interface{}, reflect.Value, reflect.Type) {
+	v := reflect.ValueOf(dest)
+	if v.Type().NumMethod() > 0 && v.CanInterface() {
+		if u, ok := v.Interface().(Binder); ok {
+			return u, reflect.Value{}, nil
+		}
+	}
+	v = reflect.Indirect(v)
+	t := v.Type()
+	// special handling for custom types which might look like an object. We
+	// don't want to use object binding on them, but rather treat them as
+	// primitive types. time.Time{} is a unique case since we can't add a Binder
+	// to it without changing the underlying generated code.
+	if t.ConvertibleTo(reflect.TypeOf(time.Time{})) {
+		return dest, reflect.Value{}, nil
+	}
+	if t.ConvertibleTo(reflect.TypeOf(types.Date{})) {
+		return dest, reflect.Value{}, nil
+	}
+	return nil, v, t
+}

+ 174 - 0
vendor/github.com/deepmap/oapi-codegen/pkg/runtime/bindstring.go

@@ -0,0 +1,174 @@
+// Copyright 2019 DeepMap, Inc.
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+//
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+package runtime
+
+import (
+	"encoding"
+	"errors"
+	"fmt"
+	"reflect"
+	"strconv"
+	"time"
+
+	"github.com/deepmap/oapi-codegen/pkg/types"
+)
+
+// BindStringToObject takes a string, and attempts to assign it to the destination
+// interface via whatever type conversion is necessary. We have to do this
+// via reflection instead of a much simpler type switch so that we can handle
+// type aliases. This function was the easy way out, the better way, since we
+// know the destination type each place that we use this, is to generate code
+// to read each specific type.
+func BindStringToObject(src string, dst interface{}) error {
+	var err error
+
+	v := reflect.ValueOf(dst)
+	t := reflect.TypeOf(dst)
+
+	// We need to dereference pointers
+	if t.Kind() == reflect.Ptr {
+		v = reflect.Indirect(v)
+		t = v.Type()
+	}
+
+	// For some optioinal args
+	if t.Kind() == reflect.Ptr {
+		if v.IsNil() {
+			v.Set(reflect.New(t.Elem()))
+		}
+
+		v = reflect.Indirect(v)
+		t = v.Type()
+	}
+
+	// The resulting type must be settable. reflect will catch issues like
+	// passing the destination by value.
+	if !v.CanSet() {
+		return errors.New("destination is not settable")
+	}
+
+	switch t.Kind() {
+	case reflect.Int, reflect.Int8, reflect.Int16, reflect.Int32, reflect.Int64:
+		var val int64
+		val, err = strconv.ParseInt(src, 10, 64)
+		if err == nil {
+			if v.OverflowInt(val) {
+				err = fmt.Errorf("value '%s' overflows destination of type: %s", src, t.Kind())
+			}
+			if err == nil {
+				v.SetInt(val)
+			}
+		}
+	case reflect.Uint, reflect.Uint8, reflect.Uint16, reflect.Uint32, reflect.Uint64:
+		var val uint64
+		val, err = strconv.ParseUint(src, 10, 64)
+		if err == nil {
+			if v.OverflowUint(val) {
+				err = fmt.Errorf("value '%s' overflows destination of type: %s", src, t.Kind())
+			}
+			v.SetUint(val)
+		}
+	case reflect.String:
+		v.SetString(src)
+		err = nil
+	case reflect.Float64, reflect.Float32:
+		var val float64
+		val, err = strconv.ParseFloat(src, 64)
+		if err == nil {
+			if v.OverflowFloat(val) {
+				err = fmt.Errorf("value '%s' overflows destination of type: %s", src, t.Kind())
+			}
+			v.SetFloat(val)
+		}
+	case reflect.Bool:
+		var val bool
+		val, err = strconv.ParseBool(src)
+		if err == nil {
+			v.SetBool(val)
+		}
+	case reflect.Array:
+		if tu, ok := dst.(encoding.TextUnmarshaler); ok {
+			if err := tu.UnmarshalText([]byte(src)); err != nil {
+				return fmt.Errorf("error unmarshalling '%s' text as %T: %s", src, dst, err)
+			}
+
+			return nil
+		}
+		fallthrough
+	case reflect.Struct:
+		// if this is not of type Time or of type Date look to see if this is of type Binder.
+		if dstType, ok := dst.(Binder); ok {
+			return dstType.Bind(src)
+		}
+
+		if t.ConvertibleTo(reflect.TypeOf(time.Time{})) {
+			// Don't fail on empty string.
+			if src == "" {
+				return nil
+			}
+			// Time is a special case of a struct that we handle
+			parsedTime, err := time.Parse(time.RFC3339Nano, src)
+			if err != nil {
+				parsedTime, err = time.Parse(types.DateFormat, src)
+				if err != nil {
+					return fmt.Errorf("error parsing '%s' as RFC3339 or 2006-01-02 time: %s", src, err)
+				}
+			}
+			// So, assigning this gets a little fun. We have a value to the
+			// dereference destination. We can't do a conversion to
+			// time.Time because the result isn't assignable, so we need to
+			// convert pointers.
+			if t != reflect.TypeOf(time.Time{}) {
+				vPtr := v.Addr()
+				vtPtr := vPtr.Convert(reflect.TypeOf(&time.Time{}))
+				v = reflect.Indirect(vtPtr)
+			}
+			v.Set(reflect.ValueOf(parsedTime))
+			return nil
+		}
+
+		if t.ConvertibleTo(reflect.TypeOf(types.Date{})) {
+			// Don't fail on empty string.
+			if src == "" {
+				return nil
+			}
+			parsedTime, err := time.Parse(types.DateFormat, src)
+			if err != nil {
+				return fmt.Errorf("error parsing '%s' as date: %s", src, err)
+			}
+			parsedDate := types.Date{Time: parsedTime}
+
+			// We have to do the same dance here to assign, just like with times
+			// above.
+			if t != reflect.TypeOf(types.Date{}) {
+				vPtr := v.Addr()
+				vtPtr := vPtr.Convert(reflect.TypeOf(&types.Date{}))
+				v = reflect.Indirect(vtPtr)
+			}
+			v.Set(reflect.ValueOf(parsedDate))
+			return nil
+		}
+
+		// We fall through to the error case below if we haven't handled the
+		// destination type above.
+		fallthrough
+	default:
+		// We've got a bunch of types unimplemented, don't fail silently.
+		err = fmt.Errorf("can not bind to destination of type: %s", t.Kind())
+	}
+	if err != nil {
+		return fmt.Errorf("error binding string parameter: %s", err)
+	}
+	return nil
+}

+ 358 - 0
vendor/github.com/deepmap/oapi-codegen/pkg/runtime/deepobject.go

@@ -0,0 +1,358 @@
+package runtime
+
+import (
+	"encoding/json"
+	"errors"
+	"fmt"
+	"net/url"
+	"reflect"
+	"sort"
+	"strconv"
+	"strings"
+	"time"
+
+	"github.com/deepmap/oapi-codegen/pkg/types"
+)
+
+func marshalDeepObject(in interface{}, path []string) ([]string, error) {
+	var result []string
+
+	switch t := in.(type) {
+	case []interface{}:
+		// For the array, we will use numerical subscripts of the form [x],
+		// in the same order as the array.
+		for i, iface := range t {
+			newPath := append(path, strconv.Itoa(i))
+			fields, err := marshalDeepObject(iface, newPath)
+			if err != nil {
+				return nil, fmt.Errorf("error traversing array: %w", err)
+			}
+			result = append(result, fields...)
+		}
+	case map[string]interface{}:
+		// For a map, each key (field name) becomes a member of the path, and
+		// we recurse. First, sort the keys.
+		keys := make([]string, len(t))
+		i := 0
+		for k := range t {
+			keys[i] = k
+			i++
+		}
+		sort.Strings(keys)
+
+		// Now, for each key, we recursively marshal it.
+		for _, k := range keys {
+			newPath := append(path, k)
+			fields, err := marshalDeepObject(t[k], newPath)
+			if err != nil {
+				return nil, fmt.Errorf("error traversing map: %w", err)
+			}
+			result = append(result, fields...)
+		}
+	default:
+		// Now, for a concrete value, we will turn the path elements
+		// into a deepObject style set of subscripts. [a, b, c] turns into
+		// [a][b][c]
+		prefix := "[" + strings.Join(path, "][") + "]"
+		result = []string{
+			prefix + fmt.Sprintf("=%v", t),
+		}
+	}
+	return result, nil
+}
+
+func MarshalDeepObject(i interface{}, paramName string) (string, error) {
+	// We're going to marshal to JSON and unmarshal into an interface{},
+	// which will use the json pkg to deal with all the field annotations. We
+	// can then walk the generic object structure to produce a deepObject. This
+	// isn't efficient and it would be more efficient to reflect on our own,
+	// but it's complicated, error-prone code.
+	buf, err := json.Marshal(i)
+	if err != nil {
+		return "", fmt.Errorf("failed to marshal input to JSON: %w", err)
+	}
+	var i2 interface{}
+	err = json.Unmarshal(buf, &i2)
+	if err != nil {
+		return "", fmt.Errorf("failed to unmarshal JSON: %w", err)
+	}
+	fields, err := marshalDeepObject(i2, nil)
+	if err != nil {
+		return "", fmt.Errorf("error traversing JSON structure: %w", err)
+	}
+
+	// Prefix the param name to each subscripted field.
+	for i := range fields {
+		fields[i] = paramName + fields[i]
+	}
+	return strings.Join(fields, "&"), nil
+}
+
+type fieldOrValue struct {
+	fields map[string]fieldOrValue
+	value  string
+}
+
+func (f *fieldOrValue) appendPathValue(path []string, value string) {
+	fieldName := path[0]
+	if len(path) == 1 {
+		f.fields[fieldName] = fieldOrValue{value: value}
+		return
+	}
+
+	pv, found := f.fields[fieldName]
+	if !found {
+		pv = fieldOrValue{
+			fields: make(map[string]fieldOrValue),
+		}
+		f.fields[fieldName] = pv
+	}
+	pv.appendPathValue(path[1:], value)
+}
+
+func makeFieldOrValue(paths [][]string, values []string) fieldOrValue {
+
+	f := fieldOrValue{
+		fields: make(map[string]fieldOrValue),
+	}
+	for i := range paths {
+		path := paths[i]
+		value := values[i]
+		f.appendPathValue(path, value)
+	}
+	return f
+}
+
+func UnmarshalDeepObject(dst interface{}, paramName string, params url.Values) error {
+	// Params are all the query args, so we need those that look like
+	// "paramName["...
+	var fieldNames []string
+	var fieldValues []string
+	searchStr := paramName + "["
+	for pName, pValues := range params {
+		if strings.HasPrefix(pName, searchStr) {
+			// trim the parameter name from the full name.
+			pName = pName[len(paramName):]
+			fieldNames = append(fieldNames, pName)
+			if len(pValues) != 1 {
+				return fmt.Errorf("%s has multiple values", pName)
+			}
+			fieldValues = append(fieldValues, pValues[0])
+		}
+	}
+
+	// Now, for each field, reconstruct its subscript path and value
+	paths := make([][]string, len(fieldNames))
+	for i, path := range fieldNames {
+		path = strings.TrimLeft(path, "[")
+		path = strings.TrimRight(path, "]")
+		paths[i] = strings.Split(path, "][")
+	}
+
+	fieldPaths := makeFieldOrValue(paths, fieldValues)
+	err := assignPathValues(dst, fieldPaths)
+	if err != nil {
+		return fmt.Errorf("error assigning value to destination: %w", err)
+	}
+
+	return nil
+}
+
+// This returns a field name, either using the variable name, or the json
+// annotation if that exists.
+func getFieldName(f reflect.StructField) string {
+	n := f.Name
+	tag, found := f.Tag.Lookup("json")
+	if found {
+		// If we have a json field, and the first part of it before the
+		// first comma is non-empty, that's our field name.
+		parts := strings.Split(tag, ",")
+		if parts[0] != "" {
+			n = parts[0]
+		}
+	}
+	return n
+}
+
+// Create a map of field names that we'll see in the deepObject to reflect
+// field indices on the given type.
+func fieldIndicesByJsonTag(i interface{}) (map[string]int, error) {
+	t := reflect.TypeOf(i)
+	if t.Kind() != reflect.Struct {
+		return nil, errors.New("expected a struct as input")
+	}
+
+	n := t.NumField()
+	fieldMap := make(map[string]int)
+	for i := 0; i < n; i++ {
+		field := t.Field(i)
+		fieldName := getFieldName(field)
+		fieldMap[fieldName] = i
+	}
+	return fieldMap, nil
+}
+
+func assignPathValues(dst interface{}, pathValues fieldOrValue) error {
+	//t := reflect.TypeOf(dst)
+	v := reflect.ValueOf(dst)
+
+	iv := reflect.Indirect(v)
+	it := iv.Type()
+
+	switch it.Kind() {
+	case reflect.Slice:
+		sliceLength := len(pathValues.fields)
+		dstSlice := reflect.MakeSlice(it, sliceLength, sliceLength)
+		err := assignSlice(dstSlice, pathValues)
+		if err != nil {
+			return fmt.Errorf("error assigning slice: %w", err)
+		}
+		iv.Set(dstSlice)
+		return nil
+	case reflect.Struct:
+		// Some special types we care about are structs. Handle them
+		// here. They may be redefined, so we need to do some hoop
+		// jumping. If the types are aliased, we need to type convert
+		// the pointer, then set the value of the dereference pointer.
+
+		// We check to see if the object implements the Binder interface first.
+		if dst, isBinder := v.Interface().(Binder); isBinder {
+			return dst.Bind(pathValues.value)
+		}
+		// Then check the legacy types
+		if it.ConvertibleTo(reflect.TypeOf(types.Date{})) {
+			var date types.Date
+			var err error
+			date.Time, err = time.Parse(types.DateFormat, pathValues.value)
+			if err != nil {
+				return fmt.Errorf("invalid date format: %w", err)
+			}
+			dst := iv
+			if it != reflect.TypeOf(types.Date{}) {
+				// Types are aliased, convert the pointers.
+				ivPtr := iv.Addr()
+				aPtr := ivPtr.Convert(reflect.TypeOf(&types.Date{}))
+				dst = reflect.Indirect(aPtr)
+			}
+			dst.Set(reflect.ValueOf(date))
+		}
+		if it.ConvertibleTo(reflect.TypeOf(time.Time{})) {
+			var tm time.Time
+			var err error
+			tm, err = time.Parse(time.RFC3339Nano, pathValues.value)
+			if err != nil {
+				// Fall back to parsing it as a date.
+				// TODO: why is this marked as an ineffassign?
+				tm, err = time.Parse(types.DateFormat, pathValues.value) //nolint:ineffassign,staticcheck
+				if err != nil {
+					return fmt.Errorf("error parsing tim as RFC3339 or 2006-01-02 time: %s", err)
+				}
+				return fmt.Errorf("invalid date format: %w", err)
+			}
+			dst := iv
+			if it != reflect.TypeOf(time.Time{}) {
+				// Types are aliased, convert the pointers.
+				ivPtr := iv.Addr()
+				aPtr := ivPtr.Convert(reflect.TypeOf(&time.Time{}))
+				dst = reflect.Indirect(aPtr)
+			}
+			dst.Set(reflect.ValueOf(tm))
+		}
+		fieldMap, err := fieldIndicesByJsonTag(iv.Interface())
+		if err != nil {
+			return fmt.Errorf("failed enumerating fields: %w", err)
+		}
+		for _, fieldName := range sortedFieldOrValueKeys(pathValues.fields) {
+			fieldValue := pathValues.fields[fieldName]
+			fieldIndex, found := fieldMap[fieldName]
+			if !found {
+				return fmt.Errorf("field [%s] is not present in destination object", fieldName)
+			}
+			field := iv.Field(fieldIndex)
+			err = assignPathValues(field.Addr().Interface(), fieldValue)
+			if err != nil {
+				return fmt.Errorf("error assigning field [%s]: %w", fieldName, err)
+			}
+		}
+		return nil
+	case reflect.Ptr:
+		// If we have a pointer after redirecting, it means we're dealing with
+		// an optional field, such as *string, which was passed in as &foo. We
+		// will allocate it if necessary, and call ourselves with a different
+		// interface.
+		dstVal := reflect.New(it.Elem())
+		dstPtr := dstVal.Interface()
+		err := assignPathValues(dstPtr, pathValues)
+		iv.Set(dstVal)
+		return err
+	case reflect.Bool:
+		val, err := strconv.ParseBool(pathValues.value)
+		if err != nil {
+			return fmt.Errorf("expected a valid bool, got %s", pathValues.value)
+		}
+		iv.SetBool(val)
+		return nil
+	case reflect.Float32:
+		val, err := strconv.ParseFloat(pathValues.value, 32)
+		if err != nil {
+			return fmt.Errorf("expected a valid float, got %s", pathValues.value)
+		}
+		iv.SetFloat(val)
+		return nil
+	case reflect.Float64:
+		val, err := strconv.ParseFloat(pathValues.value, 64)
+		if err != nil {
+			return fmt.Errorf("expected a valid float, got %s", pathValues.value)
+		}
+		iv.SetFloat(val)
+		return nil
+	case reflect.Int, reflect.Int8, reflect.Int16, reflect.Int32, reflect.Int64:
+		val, err := strconv.ParseInt(pathValues.value, 10, 64)
+		if err != nil {
+			return fmt.Errorf("expected a valid int, got %s", pathValues.value)
+		}
+		iv.SetInt(val)
+		return nil
+	case reflect.String:
+		iv.SetString(pathValues.value)
+		return nil
+	default:
+		return errors.New("unhandled type: " + it.String())
+	}
+}
+
+func assignSlice(dst reflect.Value, pathValues fieldOrValue) error {
+	// Gather up the values
+	nValues := len(pathValues.fields)
+	values := make([]string, nValues)
+	// We expect to have consecutive array indices in the map
+	for i := 0; i < nValues; i++ {
+		indexStr := strconv.Itoa(i)
+		fv, found := pathValues.fields[indexStr]
+		if !found {
+			return errors.New("array deepObjects must have consecutive indices")
+		}
+		values[i] = fv.value
+	}
+
+	// This could be cleaner, but we can call into assignPathValues to
+	// avoid recreating this logic.
+	for i := 0; i < nValues; i++ {
+		dstElem := dst.Index(i).Addr()
+		err := assignPathValues(dstElem.Interface(), fieldOrValue{value: values[i]})
+		if err != nil {
+			return fmt.Errorf("error binding array: %w", err)
+		}
+	}
+
+	return nil
+}
+
+func sortedFieldOrValueKeys(m map[string]fieldOrValue) []string {
+	keys := make([]string, 0, len(m))
+	for k := range m {
+		keys = append(keys, k)
+	}
+	sort.Strings(keys)
+	return keys
+}

+ 26 - 0
vendor/github.com/deepmap/oapi-codegen/pkg/runtime/jsonmerge.go

@@ -0,0 +1,26 @@
+package runtime
+
+import (
+	"encoding/json"
+
+	"github.com/apapsch/go-jsonmerge/v2"
+)
+
+// JsonMerge merges two JSON representation into a single object. `data` is the
+// existing representation and `patch` is the new data to be merged in
+func JsonMerge(data, patch json.RawMessage) (json.RawMessage, error) {
+	merger := jsonmerge.Merger{
+		CopyNonexistent: true,
+	}
+	if data == nil {
+		data = []byte(`{}`)
+	}
+	if patch == nil {
+		patch = []byte(`{}`)
+	}
+	merged, err := merger.MergeBytes(data, patch)
+	if err != nil {
+		return nil, err
+	}
+	return merged, nil
+}

+ 473 - 0
vendor/github.com/deepmap/oapi-codegen/pkg/runtime/styleparam.go

@@ -0,0 +1,473 @@
+// Copyright 2019 DeepMap, Inc.
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+//
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+package runtime
+
+import (
+	"bytes"
+	"encoding"
+	"encoding/json"
+	"errors"
+	"fmt"
+	"net/url"
+	"reflect"
+	"sort"
+	"strconv"
+	"strings"
+	"time"
+
+	"github.com/deepmap/oapi-codegen/pkg/types"
+)
+
+// Parameter escaping works differently based on where a header is found
+
+type ParamLocation int
+
+const (
+	ParamLocationUndefined ParamLocation = iota
+	ParamLocationQuery
+	ParamLocationPath
+	ParamLocationHeader
+	ParamLocationCookie
+)
+
+// StyleParam is used by older generated code, and must remain compatible
+// with that code. It is not to be used in new templates. Please see the
+// function below, which can specialize its output based on the location of
+// the parameter.
+func StyleParam(style string, explode bool, paramName string, value interface{}) (string, error) {
+	return StyleParamWithLocation(style, explode, paramName, ParamLocationUndefined, value)
+}
+
+// Given an input value, such as a primitive type, array or object, turn it
+// into a parameter based on style/explode definition, performing whatever
+// escaping is necessary based on parameter location
+func StyleParamWithLocation(style string, explode bool, paramName string, paramLocation ParamLocation, value interface{}) (string, error) {
+	t := reflect.TypeOf(value)
+	v := reflect.ValueOf(value)
+
+	// Things may be passed in by pointer, we need to dereference, so return
+	// error on nil.
+	if t.Kind() == reflect.Ptr {
+		if v.IsNil() {
+			return "", fmt.Errorf("value is a nil pointer")
+		}
+		v = reflect.Indirect(v)
+		t = v.Type()
+	}
+
+	// If the value implements encoding.TextMarshaler we use it for marshaling
+	// https://github.com/deepmap/oapi-codegen/issues/504
+	if tu, ok := value.(encoding.TextMarshaler); ok {
+		t := reflect.Indirect(reflect.ValueOf(value)).Type()
+		convertableToTime := t.ConvertibleTo(reflect.TypeOf(time.Time{}))
+		convertableToDate := t.ConvertibleTo(reflect.TypeOf(types.Date{}))
+
+		// Since both time.Time and types.Date implement encoding.TextMarshaler
+		// we should avoid calling theirs MarshalText()
+		if !convertableToTime && !convertableToDate {
+			b, err := tu.MarshalText()
+			if err != nil {
+				return "", fmt.Errorf("error marshaling '%s' as text: %s", value, err)
+			}
+
+			return stylePrimitive(style, explode, paramName, paramLocation, string(b))
+		}
+	}
+
+	switch t.Kind() {
+	case reflect.Slice:
+		n := v.Len()
+		sliceVal := make([]interface{}, n)
+		for i := 0; i < n; i++ {
+			sliceVal[i] = v.Index(i).Interface()
+		}
+		return styleSlice(style, explode, paramName, paramLocation, sliceVal)
+	case reflect.Struct:
+		return styleStruct(style, explode, paramName, paramLocation, value)
+	case reflect.Map:
+		return styleMap(style, explode, paramName, paramLocation, value)
+	default:
+		return stylePrimitive(style, explode, paramName, paramLocation, value)
+	}
+}
+
+func styleSlice(style string, explode bool, paramName string, paramLocation ParamLocation, values []interface{}) (string, error) {
+	if style == "deepObject" {
+		if !explode {
+			return "", errors.New("deepObjects must be exploded")
+		}
+		return MarshalDeepObject(values, paramName)
+	}
+
+	var prefix string
+	var separator string
+
+	switch style {
+	case "simple":
+		separator = ","
+	case "label":
+		prefix = "."
+		if explode {
+			separator = "."
+		} else {
+			separator = ","
+		}
+	case "matrix":
+		prefix = fmt.Sprintf(";%s=", paramName)
+		if explode {
+			separator = prefix
+		} else {
+			separator = ","
+		}
+	case "form":
+		prefix = fmt.Sprintf("%s=", paramName)
+		if explode {
+			separator = "&" + prefix
+		} else {
+			separator = ","
+		}
+	case "spaceDelimited":
+		prefix = fmt.Sprintf("%s=", paramName)
+		if explode {
+			separator = "&" + prefix
+		} else {
+			separator = " "
+		}
+	case "pipeDelimited":
+		prefix = fmt.Sprintf("%s=", paramName)
+		if explode {
+			separator = "&" + prefix
+		} else {
+			separator = "|"
+		}
+	default:
+		return "", fmt.Errorf("unsupported style '%s'", style)
+	}
+
+	// We're going to assume here that the array is one of simple types.
+	var err error
+	var part string
+	parts := make([]string, len(values))
+	for i, v := range values {
+		part, err = primitiveToString(v)
+		part = escapeParameterString(part, paramLocation)
+		parts[i] = part
+		if err != nil {
+			return "", fmt.Errorf("error formatting '%s': %s", paramName, err)
+		}
+	}
+	return prefix + strings.Join(parts, separator), nil
+}
+
+func sortedKeys(strMap map[string]string) []string {
+	keys := make([]string, len(strMap))
+	i := 0
+	for k := range strMap {
+		keys[i] = k
+		i++
+	}
+	sort.Strings(keys)
+	return keys
+}
+
+// These are special cases. The value may be a date, time, or uuid,
+// in which case, marshal it into the correct format.
+func marshalKnownTypes(value interface{}) (string, bool) {
+	v := reflect.Indirect(reflect.ValueOf(value))
+	t := v.Type()
+
+	if t.ConvertibleTo(reflect.TypeOf(time.Time{})) {
+		tt := v.Convert(reflect.TypeOf(time.Time{}))
+		timeVal := tt.Interface().(time.Time)
+		return timeVal.Format(time.RFC3339Nano), true
+	}
+
+	if t.ConvertibleTo(reflect.TypeOf(types.Date{})) {
+		d := v.Convert(reflect.TypeOf(types.Date{}))
+		dateVal := d.Interface().(types.Date)
+		return dateVal.Format(types.DateFormat), true
+	}
+
+	if t.ConvertibleTo(reflect.TypeOf(types.UUID{})) {
+		u := v.Convert(reflect.TypeOf(types.UUID{}))
+		uuidVal := u.Interface().(types.UUID)
+		return uuidVal.String(), true
+	}
+
+	return "", false
+}
+
+func styleStruct(style string, explode bool, paramName string, paramLocation ParamLocation, value interface{}) (string, error) {
+	if timeVal, ok := marshalKnownTypes(value); ok {
+		styledVal, err := stylePrimitive(style, explode, paramName, paramLocation, timeVal)
+		if err != nil {
+			return "", fmt.Errorf("failed to style time: %w", err)
+		}
+		return styledVal, nil
+	}
+
+	if style == "deepObject" {
+		if !explode {
+			return "", errors.New("deepObjects must be exploded")
+		}
+		return MarshalDeepObject(value, paramName)
+	}
+
+	// If input has Marshaler, such as object has Additional Property or AnyOf,
+	// We use this Marshaler and convert into interface{} before styling.
+	if m, ok := value.(json.Marshaler); ok {
+		buf, err := m.MarshalJSON()
+		if err != nil {
+			return "", fmt.Errorf("failed to marshal input to JSON: %w", err)
+		}
+		e := json.NewDecoder(bytes.NewReader(buf))
+		e.UseNumber()
+		var i2 interface{}
+		err = e.Decode(&i2)
+		if err != nil {
+			return "", fmt.Errorf("failed to unmarshal JSON: %w", err)
+		}
+		s, err := StyleParamWithLocation(style, explode, paramName, paramLocation, i2)
+		if err != nil {
+			return "", fmt.Errorf("error style JSON structure: %w", err)
+		}
+		return s, nil
+	}
+
+	// Otherwise, we need to build a dictionary of the struct's fields. Each
+	// field may only be a primitive value.
+	v := reflect.ValueOf(value)
+	t := reflect.TypeOf(value)
+	fieldDict := make(map[string]string)
+
+	for i := 0; i < t.NumField(); i++ {
+		fieldT := t.Field(i)
+		// Find the json annotation on the field, and use the json specified
+		// name if available, otherwise, just the field name.
+		tag := fieldT.Tag.Get("json")
+		fieldName := fieldT.Name
+		if tag != "" {
+			tagParts := strings.Split(tag, ",")
+			name := tagParts[0]
+			if name != "" {
+				fieldName = name
+			}
+		}
+		f := v.Field(i)
+
+		// Unset optional fields will be nil pointers, skip over those.
+		if f.Type().Kind() == reflect.Ptr && f.IsNil() {
+			continue
+		}
+		str, err := primitiveToString(f.Interface())
+		if err != nil {
+			return "", fmt.Errorf("error formatting '%s': %s", paramName, err)
+		}
+		fieldDict[fieldName] = str
+	}
+
+	return processFieldDict(style, explode, paramName, paramLocation, fieldDict)
+}
+
+func styleMap(style string, explode bool, paramName string, paramLocation ParamLocation, value interface{}) (string, error) {
+	if style == "deepObject" {
+		if !explode {
+			return "", errors.New("deepObjects must be exploded")
+		}
+		return MarshalDeepObject(value, paramName)
+	}
+
+	dict, ok := value.(map[string]interface{})
+	if !ok {
+		return "", errors.New("map not of type map[string]interface{}")
+	}
+
+	fieldDict := make(map[string]string)
+	for fieldName, value := range dict {
+		str, err := primitiveToString(value)
+		if err != nil {
+			return "", fmt.Errorf("error formatting '%s': %s", paramName, err)
+		}
+		fieldDict[fieldName] = str
+	}
+	return processFieldDict(style, explode, paramName, paramLocation, fieldDict)
+}
+
+func processFieldDict(style string, explode bool, paramName string, paramLocation ParamLocation, fieldDict map[string]string) (string, error) {
+	var parts []string
+
+	// This works for everything except deepObject. We'll handle that one
+	// separately.
+	if style != "deepObject" {
+		if explode {
+			for _, k := range sortedKeys(fieldDict) {
+				v := escapeParameterString(fieldDict[k], paramLocation)
+				parts = append(parts, k+"="+v)
+			}
+		} else {
+			for _, k := range sortedKeys(fieldDict) {
+				v := escapeParameterString(fieldDict[k], paramLocation)
+				parts = append(parts, k)
+				parts = append(parts, v)
+			}
+		}
+	}
+
+	var prefix string
+	var separator string
+
+	switch style {
+	case "simple":
+		separator = ","
+	case "label":
+		prefix = "."
+		if explode {
+			separator = prefix
+		} else {
+			separator = ","
+		}
+	case "matrix":
+		if explode {
+			separator = ";"
+			prefix = ";"
+		} else {
+			separator = ","
+			prefix = fmt.Sprintf(";%s=", paramName)
+		}
+	case "form":
+		if explode {
+			separator = "&"
+		} else {
+			prefix = fmt.Sprintf("%s=", paramName)
+			separator = ","
+		}
+	case "deepObject":
+		{
+			if !explode {
+				return "", fmt.Errorf("deepObject parameters must be exploded")
+			}
+			for _, k := range sortedKeys(fieldDict) {
+				v := fieldDict[k]
+				part := fmt.Sprintf("%s[%s]=%s", paramName, k, v)
+				parts = append(parts, part)
+			}
+			separator = "&"
+		}
+	default:
+		return "", fmt.Errorf("unsupported style '%s'", style)
+	}
+
+	return prefix + strings.Join(parts, separator), nil
+}
+
+func stylePrimitive(style string, explode bool, paramName string, paramLocation ParamLocation, value interface{}) (string, error) {
+	strVal, err := primitiveToString(value)
+	if err != nil {
+		return "", err
+	}
+
+	var prefix string
+	switch style {
+	case "simple":
+	case "label":
+		prefix = "."
+	case "matrix":
+		prefix = fmt.Sprintf(";%s=", paramName)
+	case "form":
+		prefix = fmt.Sprintf("%s=", paramName)
+	default:
+		return "", fmt.Errorf("unsupported style '%s'", style)
+	}
+	return prefix + escapeParameterString(strVal, paramLocation), nil
+}
+
+// Converts a primitive value to a string. We need to do this based on the
+// Kind of an interface, not the Type to work with aliased types.
+func primitiveToString(value interface{}) (string, error) {
+	var output string
+
+	// sometimes time and date used like primitive types
+	// it can happen if paramether is object and has time or date as field
+	if res, ok := marshalKnownTypes(value); ok {
+		return res, nil
+	}
+
+	// Values may come in by pointer for optionals, so make sure to dereferene.
+	v := reflect.Indirect(reflect.ValueOf(value))
+	t := v.Type()
+	kind := t.Kind()
+
+	switch kind {
+	case reflect.Int8, reflect.Int16, reflect.Int32, reflect.Int64, reflect.Int:
+		output = strconv.FormatInt(v.Int(), 10)
+	case reflect.Uint8, reflect.Uint16, reflect.Uint32, reflect.Uint64, reflect.Uint:
+		output = strconv.FormatUint(v.Uint(), 10)
+	case reflect.Float64:
+		output = strconv.FormatFloat(v.Float(), 'f', -1, 64)
+	case reflect.Float32:
+		output = strconv.FormatFloat(v.Float(), 'f', -1, 32)
+	case reflect.Bool:
+		if v.Bool() {
+			output = "true"
+		} else {
+			output = "false"
+		}
+	case reflect.String:
+		output = v.String()
+	case reflect.Struct:
+		// If input has Marshaler, such as object has Additional Property or AnyOf,
+		// We use this Marshaler and convert into interface{} before styling.
+		if m, ok := value.(json.Marshaler); ok {
+			buf, err := m.MarshalJSON()
+			if err != nil {
+				return "", fmt.Errorf("failed to marshal input to JSON: %w", err)
+			}
+			e := json.NewDecoder(bytes.NewReader(buf))
+			e.UseNumber()
+			var i2 interface{}
+			err = e.Decode(&i2)
+			if err != nil {
+				return "", fmt.Errorf("failed to unmarshal JSON: %w", err)
+			}
+			output, err = primitiveToString(i2)
+			if err != nil {
+				return "", fmt.Errorf("error convert JSON structure: %w", err)
+			}
+			break
+		}
+		fallthrough
+	default:
+		v, ok := value.(fmt.Stringer)
+		if !ok {
+			return "", fmt.Errorf("unsupported type %s", reflect.TypeOf(value).String())
+		}
+
+		output = v.String()
+	}
+	return output, nil
+}
+
+// This function escapes a parameter value bas on the location of that parameter.
+// Query params and path params need different kinds of escaping, while header
+// and cookie params seem not to need escaping.
+func escapeParameterString(value string, paramLocation ParamLocation) string {
+	switch paramLocation {
+	case ParamLocationQuery:
+		return url.QueryEscape(value)
+	case ParamLocationPath:
+		return url.PathEscape(value)
+	default:
+		return value
+	}
+}

+ 43 - 0
vendor/github.com/deepmap/oapi-codegen/pkg/types/date.go

@@ -0,0 +1,43 @@
+package types
+
+import (
+	"encoding/json"
+	"time"
+)
+
+const DateFormat = "2006-01-02"
+
+type Date struct {
+	time.Time
+}
+
+func (d Date) MarshalJSON() ([]byte, error) {
+	return json.Marshal(d.Time.Format(DateFormat))
+}
+
+func (d *Date) UnmarshalJSON(data []byte) error {
+	var dateStr string
+	err := json.Unmarshal(data, &dateStr)
+	if err != nil {
+		return err
+	}
+	parsed, err := time.Parse(DateFormat, dateStr)
+	if err != nil {
+		return err
+	}
+	d.Time = parsed
+	return nil
+}
+
+func (d Date) String() string {
+	return d.Time.Format(DateFormat)
+}
+
+func (d *Date) UnmarshalText(data []byte) error {
+	parsed, err := time.Parse(DateFormat, string(data))
+	if err != nil {
+		return err
+	}
+	d.Time = parsed
+	return nil
+}

+ 27 - 0
vendor/github.com/deepmap/oapi-codegen/pkg/types/email.go

@@ -0,0 +1,27 @@
+package types
+
+import (
+	"encoding/json"
+	"errors"
+)
+
+type Email string
+
+func (e Email) MarshalJSON() ([]byte, error) {
+	if !emailRegex.MatchString(string(e)) {
+		return nil, errors.New("email: failed to pass regex validation")
+	}
+	return json.Marshal(string(e))
+}
+
+func (e *Email) UnmarshalJSON(data []byte) error {
+	var s string
+	if err := json.Unmarshal(data, &s); err != nil {
+		return err
+	}
+	if !emailRegex.MatchString(s) {
+		return errors.New("email: failed to pass regex validation")
+	}
+	*e = Email(s)
+	return nil
+}

+ 71 - 0
vendor/github.com/deepmap/oapi-codegen/pkg/types/file.go

@@ -0,0 +1,71 @@
+package types
+
+import (
+	"bytes"
+	"encoding/json"
+	"io"
+	"mime/multipart"
+)
+
+type File struct {
+	multipart *multipart.FileHeader
+	data      []byte
+	filename  string
+}
+
+func (file *File) InitFromMultipart(header *multipart.FileHeader) {
+	file.multipart = header
+	file.data = nil
+	file.filename = ""
+}
+
+func (file *File) InitFromBytes(data []byte, filename string) {
+	file.data = data
+	file.filename = filename
+	file.multipart = nil
+}
+
+func (file File) MarshalJSON() ([]byte, error) {
+	b, err := file.Bytes()
+	if err != nil {
+		return nil, err
+	}
+	return json.Marshal(b)
+}
+
+func (file *File) UnmarshalJSON(data []byte) error {
+	return json.Unmarshal(data, &file.data)
+}
+
+func (file File) Bytes() ([]byte, error) {
+	if file.multipart != nil {
+		f, err := file.multipart.Open()
+		if err != nil {
+			return nil, err
+		}
+		defer func() { _ = f.Close() }()
+		return io.ReadAll(f)
+	}
+	return file.data, nil
+}
+
+func (file File) Reader() (io.ReadCloser, error) {
+	if file.multipart != nil {
+		return file.multipart.Open()
+	}
+	return io.NopCloser(bytes.NewReader(file.data)), nil
+}
+
+func (file File) Filename() string {
+	if file.multipart != nil {
+		return file.multipart.Filename
+	}
+	return file.filename
+}
+
+func (file File) FileSize() int64 {
+	if file.multipart != nil {
+		return file.multipart.Size
+	}
+	return int64(len(file.data))
+}

+ 11 - 0
vendor/github.com/deepmap/oapi-codegen/pkg/types/regexes.go

@@ -0,0 +1,11 @@
+package types
+
+import "regexp"
+
+const (
+	emailRegexString = "^(?:(?:(?:(?:[a-zA-Z]|\\d|[!#\\$%&'\\*\\+\\-\\/=\\?\\^_`{\\|}~]|[\\x{00A0}-\\x{D7FF}\\x{F900}-\\x{FDCF}\\x{FDF0}-\\x{FFEF}])+(?:\\.([a-zA-Z]|\\d|[!#\\$%&'\\*\\+\\-\\/=\\?\\^_`{\\|}~]|[\\x{00A0}-\\x{D7FF}\\x{F900}-\\x{FDCF}\\x{FDF0}-\\x{FFEF}])+)*)|(?:(?:\\x22)(?:(?:(?:(?:\\x20|\\x09)*(?:\\x0d\\x0a))?(?:\\x20|\\x09)+)?(?:(?:[\\x01-\\x08\\x0b\\x0c\\x0e-\\x1f\\x7f]|\\x21|[\\x23-\\x5b]|[\\x5d-\\x7e]|[\\x{00A0}-\\x{D7FF}\\x{F900}-\\x{FDCF}\\x{FDF0}-\\x{FFEF}])|(?:(?:[\\x01-\\x09\\x0b\\x0c\\x0d-\\x7f]|[\\x{00A0}-\\x{D7FF}\\x{F900}-\\x{FDCF}\\x{FDF0}-\\x{FFEF}]))))*(?:(?:(?:\\x20|\\x09)*(?:\\x0d\\x0a))?(\\x20|\\x09)+)?(?:\\x22))))@(?:(?:(?:[a-zA-Z]|\\d|[\\x{00A0}-\\x{D7FF}\\x{F900}-\\x{FDCF}\\x{FDF0}-\\x{FFEF}])|(?:(?:[a-zA-Z]|\\d|[\\x{00A0}-\\x{D7FF}\\x{F900}-\\x{FDCF}\\x{FDF0}-\\x{FFEF}])(?:[a-zA-Z]|\\d|-|\\.|~|[\\x{00A0}-\\x{D7FF}\\x{F900}-\\x{FDCF}\\x{FDF0}-\\x{FFEF}])*(?:[a-zA-Z]|\\d|[\\x{00A0}-\\x{D7FF}\\x{F900}-\\x{FDCF}\\x{FDF0}-\\x{FFEF}])))\\.)+(?:(?:[a-zA-Z]|[\\x{00A0}-\\x{D7FF}\\x{F900}-\\x{FDCF}\\x{FDF0}-\\x{FFEF}])|(?:(?:[a-zA-Z]|[\\x{00A0}-\\x{D7FF}\\x{F900}-\\x{FDCF}\\x{FDF0}-\\x{FFEF}])(?:[a-zA-Z]|\\d|-|\\.|~|[\\x{00A0}-\\x{D7FF}\\x{F900}-\\x{FDCF}\\x{FDF0}-\\x{FFEF}])*(?:[a-zA-Z]|[\\x{00A0}-\\x{D7FF}\\x{F900}-\\x{FDCF}\\x{FDF0}-\\x{FFEF}])))\\.?$"
+)
+
+var (
+	emailRegex = regexp.MustCompile(emailRegexString)
+)

+ 7 - 0
vendor/github.com/deepmap/oapi-codegen/pkg/types/uuid.go

@@ -0,0 +1,7 @@
+package types
+
+import (
+	"github.com/google/uuid"
+)
+
+type UUID = uuid.UUID

BIN
vendor/github.com/influxdata/.DS_Store


BIN
vendor/github.com/influxdata/influxdb-client-go/.DS_Store


BIN
vendor/github.com/influxdata/influxdb-client-go/v2/.DS_Store


+ 20 - 0
vendor/github.com/influxdata/line-protocol/LICENSE

@@ -0,0 +1,20 @@
+The MIT License (MIT)
+
+Copyright (c) 2013-2018 InfluxData Inc.
+
+Permission is hereby granted, free of charge, to any person obtaining a copy of
+this software and associated documentation files (the "Software"), to deal in
+the Software without restriction, including without limitation the rights to
+use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of
+the Software, and to permit persons to whom the Software is furnished to do so,
+subject to the following conditions:
+
+The above copyright notice and this permission notice shall be included in all
+copies or substantial portions of the Software.
+
+THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
+IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS
+FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR
+COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER
+IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN
+CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.

+ 24 - 0
vendor/github.com/influxdata/line-protocol/README.md

@@ -0,0 +1,24 @@
+# line-protocol
+
+[![Go Reference](https://pkg.go.dev/badge/github.com/influxdata/line-protocol/v2.svg)](https://pkg.go.dev/github.com/influxdata/line-protocol/v2)
+
+This is an encoder for the influx [line protocol.](https://docs.influxdata.com/influxdb/latest/reference/syntax/line-protocol/)
+
+It has an interface similar to the standard library's `json.Encoder`.
+
+
+### some caveats.
+- It is not concurrency-safe.  If you want to make multiple calls to `Encoder.Encode` concurrently you have to manage the concurrency yourself.
+- It can only encode values that are uint64, int64, int, float32, float64, string, or bool. 
+- Ints are converted to int64, float32's to float64.
+- If UintSupport is not set, uint64s are converted to int64's and if they are larger than the max int64, they get truncated to the max int64 instead of overflowing.
+
+
+### Example:
+```go
+buf := &bytes.Buffer{}
+serializer := protocol.NewEncoder(buf)
+serializer.SetMaxLineBytes(1024)
+serializer.SetFieldTypeSupport(UintSupport)
+serializer.Encode(e) // where e is something that implements the protocol.Metric interface
+```

+ 303 - 0
vendor/github.com/influxdata/line-protocol/encoder.go

@@ -0,0 +1,303 @@
+package protocol
+
+import (
+	"fmt"
+	"io"
+	"math"
+	"sort"
+	"strconv"
+	"time"
+)
+
+// ErrIsNaN is a field error for when a float field is NaN.
+var ErrIsNaN = &FieldError{"is NaN"}
+
+// ErrIsInf is a field error for when a float field is Inf.
+var ErrIsInf = &FieldError{"is Inf"}
+
+// Encoder marshals Metrics into influxdb line protocol.
+// It is not safe for concurrent use, make a new one!
+// The default behavior when encountering a field error is to ignore the field and move on.
+// If you wish it to error out on field errors, use Encoder.FailOnFieldErr(true)
+type Encoder struct {
+	w                io.Writer
+	fieldSortOrder   FieldSortOrder
+	fieldTypeSupport FieldTypeSupport
+	failOnFieldError bool
+	maxLineBytes     int
+	fieldList        []*Field
+	header           []byte
+	footer           []byte
+	pair             []byte
+	precision        time.Duration
+}
+
+// SetMaxLineBytes sets a maximum length for a line, Encode will error if the generated line is longer
+func (e *Encoder) SetMaxLineBytes(i int) {
+	e.maxLineBytes = i
+}
+
+// SetFieldSortOrder sets a sort order for the data.
+// The options are:
+// NoSortFields (doesn't sort the fields)
+// SortFields (sorts the keys in alphabetical order)
+func (e *Encoder) SetFieldSortOrder(s FieldSortOrder) {
+	e.fieldSortOrder = s
+}
+
+// SetFieldTypeSupport sets flags for if the encoder supports certain optional field types such as uint64
+func (e *Encoder) SetFieldTypeSupport(s FieldTypeSupport) {
+	e.fieldTypeSupport = s
+}
+
+// FailOnFieldErr whether or not to fail on a field error or just move on.
+// The default behavior to move on
+func (e *Encoder) FailOnFieldErr(s bool) {
+	e.failOnFieldError = s
+}
+
+// SetPrecision sets time precision for writes
+// Default is nanoseconds precision
+func (e *Encoder) SetPrecision(p time.Duration) {
+	e.precision = p
+}
+
+// NewEncoder gives us an encoder that marshals to a writer in influxdb line protocol
+// as defined by:
+// https://docs.influxdata.com/influxdb/v1.5/write_protocols/line_protocol_reference/
+func NewEncoder(w io.Writer) *Encoder {
+	return &Encoder{
+		w:         w,
+		header:    make([]byte, 0, 128),
+		footer:    make([]byte, 0, 128),
+		pair:      make([]byte, 0, 128),
+		fieldList: make([]*Field, 0, 16),
+		precision: time.Nanosecond,
+	}
+}
+
+// This is here to significantly reduce allocations, wish that we had constant/immutable keyword that applied to
+// more complex objects
+var comma = []byte(",")
+
+// Encode marshals a Metric to the io.Writer in the Encoder
+func (e *Encoder) Encode(m Metric) (int, error) {
+	err := e.buildHeader(m)
+	if err != nil {
+		return 0, err
+	}
+
+	e.buildFooter(m.Time())
+
+	// here we make a copy of the *fields so we can do an in-place sort
+	e.fieldList = append(e.fieldList[:0], m.FieldList()...)
+
+	if e.fieldSortOrder == SortFields {
+		sort.Slice(e.fieldList, func(i, j int) bool {
+			return e.fieldList[i].Key < e.fieldList[j].Key
+		})
+	}
+	i := 0
+	totalWritten := 0
+	pairsLen := 0
+	firstField := true
+	for _, field := range e.fieldList {
+		err = e.buildFieldPair(field.Key, field.Value)
+		if err != nil {
+			if e.failOnFieldError {
+				return 0, err
+			}
+			continue
+		}
+
+		bytesNeeded := len(e.header) + pairsLen + len(e.pair) + len(e.footer)
+
+		// Additional length needed for field separator `,`
+		if !firstField {
+			bytesNeeded++
+		}
+
+		if e.maxLineBytes > 0 && bytesNeeded > e.maxLineBytes {
+			// Need at least one field per line
+			if firstField {
+				return 0, ErrNeedMoreSpace
+			}
+
+			i, err = e.w.Write(e.footer)
+			if err != nil {
+				return 0, err
+			}
+			pairsLen = 0
+			totalWritten += i
+
+			bytesNeeded = len(e.header) + len(e.pair) + len(e.footer)
+
+			if e.maxLineBytes > 0 && bytesNeeded > e.maxLineBytes {
+				return 0, ErrNeedMoreSpace
+			}
+
+			i, err = e.w.Write(e.header)
+			if err != nil {
+				return 0, err
+			}
+			totalWritten += i
+
+			i, err = e.w.Write(e.pair)
+			if err != nil {
+				return 0, err
+			}
+			totalWritten += i
+
+			pairsLen += len(e.pair)
+			firstField = false
+			continue
+		}
+
+		if firstField {
+			i, err = e.w.Write(e.header)
+			if err != nil {
+				return 0, err
+			}
+			totalWritten += i
+
+		} else {
+			i, err = e.w.Write(comma)
+			if err != nil {
+				return 0, err
+			}
+			totalWritten += i
+
+		}
+
+		i, err = e.w.Write(e.pair)
+		if err != nil {
+			return 0, err
+		}
+		totalWritten += i
+
+		pairsLen += len(e.pair)
+		firstField = false
+	}
+
+	if firstField {
+		return 0, ErrNoFields
+	}
+	i, err = e.w.Write(e.footer)
+	if err != nil {
+		return 0, err
+	}
+	totalWritten += i
+	return totalWritten, nil
+
+}
+
+func (e *Encoder) buildHeader(m Metric) error {
+	e.header = e.header[:0]
+	name := nameEscape(m.Name())
+	if name == "" {
+		return ErrInvalidName
+	}
+	e.header = append(e.header, name...)
+
+	for _, tag := range m.TagList() {
+		key := escape(tag.Key)
+		value := escape(tag.Value)
+
+		// Some keys and values are not encodeable as line protocol, such as
+		// those with a trailing '\' or empty strings.
+		if key == "" || value == "" {
+			continue
+		}
+
+		e.header = append(e.header, ',')
+		e.header = append(e.header, key...)
+		e.header = append(e.header, '=')
+		e.header = append(e.header, value...)
+	}
+
+	e.header = append(e.header, ' ')
+	return nil
+}
+
+func (e *Encoder) buildFieldVal(value interface{}) error {
+	switch v := value.(type) {
+	case uint64:
+		if e.fieldTypeSupport&UintSupport != 0 {
+			e.pair = append(strconv.AppendUint(e.pair, v, 10), 'u')
+		} else if v <= uint64(math.MaxInt64) {
+			e.pair = append(strconv.AppendInt(e.pair, int64(v), 10), 'i')
+		} else {
+			e.pair = append(strconv.AppendInt(e.pair, math.MaxInt64, 10), 'i')
+		}
+	case int64:
+		e.pair = append(strconv.AppendInt(e.pair, v, 10), 'i')
+	case int:
+		e.pair = append(strconv.AppendInt(e.pair, int64(v), 10), 'i')
+	case float64:
+		if math.IsNaN(v) {
+			return ErrIsNaN
+		}
+
+		if math.IsInf(v, 0) {
+			return ErrIsInf
+		}
+
+		e.pair = strconv.AppendFloat(e.pair, v, 'f', -1, 64)
+	case float32:
+		v32 := float64(v)
+		if math.IsNaN(v32) {
+			return ErrIsNaN
+		}
+
+		if math.IsInf(v32, 0) {
+			return ErrIsInf
+		}
+
+		e.pair = strconv.AppendFloat(e.pair, v32, 'f', -1, 64)
+
+	case string:
+		e.pair = append(e.pair, '"')
+		e.pair = append(e.pair, stringFieldEscape(v)...)
+		e.pair = append(e.pair, '"')
+	case []byte:
+		e.pair = append(e.pair, '"')
+		stringFieldEscapeBytes(&e.pair, v)
+		e.pair = append(e.pair, '"')
+	case bool:
+		e.pair = strconv.AppendBool(e.pair, v)
+	default:
+		return &FieldError{fmt.Sprintf("invalid value type: %T", v)}
+	}
+	return nil
+}
+
+func (e *Encoder) buildFieldPair(key string, value interface{}) error {
+	e.pair = e.pair[:0]
+	key = escape(key)
+	// Some keys are not encodeable as line protocol, such as those with a
+	// trailing '\' or empty strings.
+	if key == "" || key[:len(key)-1] == "\\" {
+		return &FieldError{"invalid field key"}
+	}
+	e.pair = append(e.pair, key...)
+	e.pair = append(e.pair, '=')
+	return e.buildFieldVal(value)
+}
+
+func (e *Encoder) buildFooter(t time.Time) {
+	e.footer = e.footer[:0]
+	if !t.IsZero() {
+		e.footer = append(e.footer, ' ')
+		switch e.precision {
+		case time.Microsecond:
+			e.footer = strconv.AppendInt(e.footer, t.UnixNano()/1000, 10)
+		case time.Millisecond:
+			e.footer = strconv.AppendInt(e.footer, t.UnixNano()/1000000, 10)
+		case time.Second:
+			e.footer = strconv.AppendInt(e.footer, t.Unix(), 10)
+		default:
+			e.footer = strconv.AppendInt(e.footer, t.UnixNano(), 10)
+		}
+	}
+	e.footer = append(e.footer, '\n')
+}

+ 264 - 0
vendor/github.com/influxdata/line-protocol/escape.go

@@ -0,0 +1,264 @@
+package protocol
+
+import (
+	"bytes"
+	"reflect"
+	"strconv"
+	"strings"
+	"unicode/utf8"
+	"unsafe"
+)
+
+const (
+	escapes            = "\t\n\f\r ,="
+	nameEscapes        = "\t\n\f\r ,"
+	stringFieldEscapes = "\t\n\f\r\\\""
+)
+
+var (
+	stringEscaper = strings.NewReplacer(
+		"\t", `\t`,
+		"\n", `\n`,
+		"\f", `\f`,
+		"\r", `\r`,
+		`,`, `\,`,
+		` `, `\ `,
+		`=`, `\=`,
+	)
+
+	nameEscaper = strings.NewReplacer(
+		"\t", `\t`,
+		"\n", `\n`,
+		"\f", `\f`,
+		"\r", `\r`,
+		`,`, `\,`,
+		` `, `\ `,
+	)
+
+	stringFieldEscaper = strings.NewReplacer(
+		"\t", `\t`,
+		"\n", `\n`,
+		"\f", `\f`,
+		"\r", `\r`,
+		`"`, `\"`,
+		`\`, `\\`,
+	)
+)
+
+var (
+	unescaper = strings.NewReplacer(
+		`\,`, `,`,
+		`\"`, `"`, // ???
+		`\ `, ` `,
+		`\=`, `=`,
+	)
+
+	nameUnescaper = strings.NewReplacer(
+		`\,`, `,`,
+		`\ `, ` `,
+	)
+
+	stringFieldUnescaper = strings.NewReplacer(
+		`\"`, `"`,
+		`\\`, `\`,
+	)
+)
+
+// The various escape functions allocate, I'd like to fix that.
+// TODO: make escape not allocate
+
+// Escape a tagkey, tagvalue, or fieldkey
+func escape(s string) string {
+	if strings.ContainsAny(s, escapes) {
+		return stringEscaper.Replace(s)
+	}
+	return s
+}
+
+// Escape a measurement name
+func nameEscape(s string) string {
+	if strings.ContainsAny(s, nameEscapes) {
+		return nameEscaper.Replace(s)
+	}
+	return s
+}
+
+// Escape a string field
+func stringFieldEscape(s string) string {
+	if strings.ContainsAny(s, stringFieldEscapes) {
+		return stringFieldEscaper.Replace(s)
+	}
+	return s
+}
+
+const (
+	utf8mask  = byte(0x3F)
+	utf8bytex = byte(0x80) // 1000 0000
+	utf8len2  = byte(0xC0) // 1100 0000
+	utf8len3  = byte(0xE0) // 1110 0000
+	utf8len4  = byte(0xF0) // 1111 0000
+)
+
+func escapeBytes(dest *[]byte, b []byte) {
+	if bytes.ContainsAny(b, escapes) {
+		var r rune
+		for i, j := 0, 0; i < len(b); i += j {
+			r, j = utf8.DecodeRune(b[i:])
+			switch {
+			case r == '\t':
+				*dest = append(*dest, `\t`...)
+			case r == '\n':
+				*dest = append(*dest, `\n`...)
+			case r == '\f':
+				*dest = append(*dest, `\f`...)
+			case r == '\r':
+				*dest = append(*dest, `\r`...)
+			case r == ',':
+				*dest = append(*dest, `\,`...)
+			case r == ' ':
+				*dest = append(*dest, `\ `...)
+			case r == '=':
+				*dest = append(*dest, `\=`...)
+			case r <= 1<<7-1:
+				*dest = append(*dest, byte(r))
+			case r <= 1<<11-1:
+				*dest = append(*dest, utf8len2|byte(r>>6), utf8bytex|byte(r)&utf8mask)
+			case r <= 1<<16-1:
+				*dest = append(*dest, utf8len3|byte(r>>12), utf8bytex|byte(r>>6)&utf8mask, utf8bytex|byte(r)&utf8mask)
+			default:
+				*dest = append(*dest, utf8len4|byte(r>>18), utf8bytex|byte(r>>12)&utf8mask, utf8bytex|byte(r>>6)&utf8mask, utf8bytex|byte(r)&utf8mask)
+			}
+		}
+		return
+	}
+	*dest = append(*dest, b...)
+}
+
+// Escape a measurement name
+func nameEscapeBytes(dest *[]byte, b []byte) {
+	if bytes.ContainsAny(b, nameEscapes) {
+		var r rune
+		for i, j := 0, 0; i < len(b); i += j {
+			r, j = utf8.DecodeRune(b[i:])
+			switch {
+			case r == '\t':
+				*dest = append(*dest, `\t`...)
+			case r == '\n':
+				*dest = append(*dest, `\n`...)
+			case r == '\f':
+				*dest = append(*dest, `\f`...)
+			case r == '\r':
+				*dest = append(*dest, `\r`...)
+			case r == ',':
+				*dest = append(*dest, `\,`...)
+			case r == ' ':
+				*dest = append(*dest, `\ `...)
+			case r == '\\':
+				*dest = append(*dest, `\\`...)
+			case r <= 1<<7-1:
+				*dest = append(*dest, byte(r))
+			case r <= 1<<11-1:
+				*dest = append(*dest, utf8len2|byte(r>>6), utf8bytex|byte(r)&utf8mask)
+			case r <= 1<<16-1:
+				*dest = append(*dest, utf8len3|byte(r>>12), utf8bytex|byte(r>>6)&utf8mask, utf8bytex|byte(r)&utf8mask)
+			default:
+				*dest = append(*dest, utf8len4|byte(r>>18), utf8bytex|byte(r>>12)&utf8mask, utf8bytex|byte(r>>6)&utf8mask, utf8bytex|byte(r)&utf8mask)
+			}
+		}
+		return
+	}
+	*dest = append(*dest, b...)
+}
+
+func stringFieldEscapeBytes(dest *[]byte, b []byte) {
+	if bytes.ContainsAny(b, stringFieldEscapes) {
+		var r rune
+		for i, j := 0, 0; i < len(b); i += j {
+			r, j = utf8.DecodeRune(b[i:])
+			switch {
+			case r == '\t':
+				*dest = append(*dest, `\t`...)
+			case r == '\n':
+				*dest = append(*dest, `\n`...)
+			case r == '\f':
+				*dest = append(*dest, `\f`...)
+			case r == '\r':
+				*dest = append(*dest, `\r`...)
+			case r == ',':
+				*dest = append(*dest, `\,`...)
+			case r == ' ':
+				*dest = append(*dest, `\ `...)
+			case r == '\\':
+				*dest = append(*dest, `\\`...)
+			case r <= 1<<7-1:
+				*dest = append(*dest, byte(r))
+			case r <= 1<<11-1:
+				*dest = append(*dest, utf8len2|byte(r>>6), utf8bytex|byte(r)&utf8mask)
+			case r <= 1<<16-1:
+				*dest = append(*dest, utf8len3|byte(r>>12), utf8bytex|byte(r>>6)&utf8mask, utf8bytex|byte(r)&utf8mask)
+			default:
+				*dest = append(*dest, utf8len4|byte(r>>18), utf8bytex|byte(r>>12)&utf8mask, utf8bytex|byte(r>>6)&utf8mask, utf8bytex|byte(r)&utf8mask)
+			}
+		}
+		return
+	}
+	*dest = append(*dest, b...)
+}
+
+func unescape(b []byte) string {
+	if bytes.ContainsAny(b, escapes) {
+		return unescaper.Replace(unsafeBytesToString(b))
+	}
+	return string(b)
+}
+
+func nameUnescape(b []byte) string {
+	if bytes.ContainsAny(b, nameEscapes) {
+		return nameUnescaper.Replace(unsafeBytesToString(b))
+	}
+	return string(b)
+}
+
+// unsafeBytesToString converts a []byte to a string without a heap allocation.
+//
+// It is unsafe, and is intended to prepare input to short-lived functions
+// that require strings.
+func unsafeBytesToString(in []byte) string {
+	src := *(*reflect.SliceHeader)(unsafe.Pointer(&in))
+	dst := reflect.StringHeader{
+		Data: src.Data,
+		Len:  src.Len,
+	}
+	s := *(*string)(unsafe.Pointer(&dst))
+	return s
+}
+
+// parseIntBytes is a zero-alloc wrapper around strconv.ParseInt.
+func parseIntBytes(b []byte, base int, bitSize int) (i int64, err error) {
+	s := unsafeBytesToString(b)
+	return strconv.ParseInt(s, base, bitSize)
+}
+
+// parseUintBytes is a zero-alloc wrapper around strconv.ParseUint.
+func parseUintBytes(b []byte, base int, bitSize int) (i uint64, err error) {
+	s := unsafeBytesToString(b)
+	return strconv.ParseUint(s, base, bitSize)
+}
+
+// parseFloatBytes is a zero-alloc wrapper around strconv.ParseFloat.
+func parseFloatBytes(b []byte, bitSize int) (float64, error) {
+	s := unsafeBytesToString(b)
+	return strconv.ParseFloat(s, bitSize)
+}
+
+// parseBoolBytes is a zero-alloc wrapper around strconv.ParseBool.
+func parseBoolBytes(b []byte) (bool, error) {
+	return strconv.ParseBool(unsafeBytesToString(b))
+}
+
+func stringFieldUnescape(b []byte) string {
+	if bytes.ContainsAny(b, stringFieldEscapes) {
+		return stringFieldUnescaper.Replace(unsafeBytesToString(b))
+	}
+	return string(b)
+}

+ 3 - 0
vendor/github.com/influxdata/line-protocol/go.mod

@@ -0,0 +1,3 @@
+module github.com/influxdata/line-protocol
+
+go 1.13

+ 0 - 0
vendor/github.com/influxdata/line-protocol/go.sum


+ 128 - 0
vendor/github.com/influxdata/line-protocol/handler.go

@@ -0,0 +1,128 @@
+package protocol
+
+import (
+	"bytes"
+	"errors"
+	"strconv"
+	"time"
+)
+
+// MetricHandler implements the Handler interface and produces Metric.
+type MetricHandler struct {
+	timePrecision time.Duration
+	timeFunc      TimeFunc
+	metric        MutableMetric
+}
+
+func NewMetricHandler() *MetricHandler {
+	return &MetricHandler{
+		timePrecision: time.Nanosecond,
+		timeFunc:      time.Now,
+	}
+}
+
+func (h *MetricHandler) SetTimePrecision(p time.Duration) {
+	h.timePrecision = p
+	// When the timestamp is omitted from the metric, the timestamp
+	// comes from the server clock, truncated to the nearest unit of
+	// measurement provided in precision.
+	//
+	// When a timestamp is provided in the metric, precsision is
+	// overloaded to hold the unit of measurement of the timestamp.
+}
+
+func (h *MetricHandler) SetTimeFunc(f TimeFunc) {
+	h.timeFunc = f
+}
+
+func (h *MetricHandler) Metric() (Metric, error) {
+	if h.metric.Time().IsZero() {
+		h.metric.SetTime(h.timeFunc().Truncate(h.timePrecision))
+	}
+	return h.metric, nil
+}
+
+func (h *MetricHandler) SetMeasurement(name []byte) error {
+	var err error
+	h.metric, err = New(nameUnescape(name),
+		nil, nil, time.Time{})
+	return err
+}
+
+func (h *MetricHandler) AddTag(key []byte, value []byte) error {
+	tk := unescape(key)
+	tv := unescape(value)
+	h.metric.AddTag(tk, tv)
+	return nil
+}
+
+func (h *MetricHandler) AddInt(key []byte, value []byte) error {
+	fk := unescape(key)
+	fv, err := parseIntBytes(bytes.TrimSuffix(value, []byte("i")), 10, 64)
+	if err != nil {
+		if numerr, ok := err.(*strconv.NumError); ok {
+			return numerr.Err
+		}
+		return err
+	}
+	h.metric.AddField(fk, fv)
+	return nil
+}
+
+func (h *MetricHandler) AddUint(key []byte, value []byte) error {
+	fk := unescape(key)
+	fv, err := parseUintBytes(bytes.TrimSuffix(value, []byte("u")), 10, 64)
+	if err != nil {
+		if numerr, ok := err.(*strconv.NumError); ok {
+			return numerr.Err
+		}
+		return err
+	}
+	h.metric.AddField(fk, fv)
+	return nil
+}
+
+func (h *MetricHandler) AddFloat(key []byte, value []byte) error {
+	fk := unescape(key)
+	fv, err := parseFloatBytes(value, 64)
+	if err != nil {
+		if numerr, ok := err.(*strconv.NumError); ok {
+			return numerr.Err
+		}
+		return err
+	}
+	h.metric.AddField(fk, fv)
+	return nil
+}
+
+func (h *MetricHandler) AddString(key []byte, value []byte) error {
+	fk := unescape(key)
+	fv := stringFieldUnescape(value)
+	h.metric.AddField(fk, fv)
+	return nil
+}
+
+func (h *MetricHandler) AddBool(key []byte, value []byte) error {
+	fk := unescape(key)
+	fv, err := parseBoolBytes(value)
+	if err != nil {
+		return errors.New("unparseable bool")
+	}
+	h.metric.AddField(fk, fv)
+	return nil
+}
+
+func (h *MetricHandler) SetTimestamp(tm []byte) error {
+	v, err := parseIntBytes(tm, 10, 64)
+	if err != nil {
+		if numerr, ok := err.(*strconv.NumError); ok {
+			return numerr.Err
+		}
+		return err
+	}
+
+	//time precision is overloaded to mean time unit here
+	ns := v * int64(h.timePrecision)
+	h.metric.SetTime(time.Unix(0, ns))
+	return nil
+}

+ 3828 - 0
vendor/github.com/influxdata/line-protocol/machine.go

@@ -0,0 +1,3828 @@
+//line machine.go.rl:1
+package protocol
+
+import (
+	"errors"
+	"io"
+)
+
+var (
+	ErrNameParse      = errors.New("expected measurement name")
+	ErrFieldParse     = errors.New("expected field")
+	ErrTagParse       = errors.New("expected tag")
+	ErrTimestampParse = errors.New("expected timestamp")
+	ErrParse          = errors.New("parse error")
+	EOF               = errors.New("EOF")
+)
+
+//line machine.go.rl:310
+
+//line machine.go:25
+const LineProtocol_start int = 47
+const LineProtocol_first_final int = 47
+const LineProtocol_error int = 0
+
+const LineProtocol_en_main int = 47
+const LineProtocol_en_discard_line int = 35
+const LineProtocol_en_align int = 86
+const LineProtocol_en_series int = 38
+
+//line machine.go.rl:313
+
+type Handler interface {
+	SetMeasurement(name []byte) error
+	AddTag(key []byte, value []byte) error
+	AddInt(key []byte, value []byte) error
+	AddUint(key []byte, value []byte) error
+	AddFloat(key []byte, value []byte) error
+	AddString(key []byte, value []byte) error
+	AddBool(key []byte, value []byte) error
+	SetTimestamp(tm []byte) error
+}
+
+type machine struct {
+	data         []byte
+	cs           int
+	p, pe, eof   int
+	pb           int
+	lineno       int
+	sol          int
+	handler      Handler
+	initState    int
+	key          []byte
+	beginMetric  bool
+	finishMetric bool
+}
+
+func NewMachine(handler Handler) *machine {
+	m := &machine{
+		handler:   handler,
+		initState: LineProtocol_en_align,
+	}
+
+//line machine.go.rl:346
+
+//line machine.go.rl:347
+
+//line machine.go.rl:348
+
+//line machine.go.rl:349
+
+//line machine.go.rl:350
+
+//line machine.go.rl:351
+
+//line machine.go:82
+	{
+		(m.cs) = LineProtocol_start
+	}
+
+//line machine.go.rl:352
+
+	return m
+}
+
+func NewSeriesMachine(handler Handler) *machine {
+	m := &machine{
+		handler:   handler,
+		initState: LineProtocol_en_series,
+	}
+
+//line machine.go.rl:363
+
+//line machine.go.rl:364
+
+//line machine.go.rl:365
+
+//line machine.go.rl:366
+
+//line machine.go.rl:367
+
+//line machine.go:109
+	{
+		(m.cs) = LineProtocol_start
+	}
+
+//line machine.go.rl:368
+
+	return m
+}
+
+func (m *machine) SetData(data []byte) {
+	m.data = data
+	m.p = 0
+	m.pb = 0
+	m.lineno = 1
+	m.sol = 0
+	m.pe = len(data)
+	m.eof = len(data)
+	m.key = nil
+	m.beginMetric = false
+	m.finishMetric = false
+
+//line machine.go:132
+	{
+		(m.cs) = LineProtocol_start
+	}
+
+//line machine.go.rl:385
+	m.cs = m.initState
+}
+
+// Next parses the next metric line and returns nil if it was successfully
+// processed.  If the line contains a syntax error an error is returned,
+// otherwise if the end of file is reached before finding a metric line then
+// EOF is returned.
+func (m *machine) Next() error {
+	if m.p == m.pe && m.pe == m.eof {
+		return EOF
+	}
+
+	m.key = nil
+	m.beginMetric = false
+	m.finishMetric = false
+
+	return m.exec()
+}
+
+func (m *machine) exec() error {
+	var err error
+
+//line machine.go:160
+	{
+		if (m.p) == (m.pe) {
+			goto _test_eof
+		}
+		goto _resume
+
+	_again:
+		switch m.cs {
+		case 47:
+			goto st47
+		case 1:
+			goto st1
+		case 2:
+			goto st2
+		case 3:
+			goto st3
+		case 0:
+			goto st0
+		case 4:
+			goto st4
+		case 5:
+			goto st5
+		case 6:
+			goto st6
+		case 7:
+			goto st7
+		case 48:
+			goto st48
+		case 49:
+			goto st49
+		case 50:
+			goto st50
+		case 8:
+			goto st8
+		case 9:
+			goto st9
+		case 10:
+			goto st10
+		case 11:
+			goto st11
+		case 51:
+			goto st51
+		case 52:
+			goto st52
+		case 53:
+			goto st53
+		case 54:
+			goto st54
+		case 55:
+			goto st55
+		case 56:
+			goto st56
+		case 57:
+			goto st57
+		case 58:
+			goto st58
+		case 59:
+			goto st59
+		case 60:
+			goto st60
+		case 61:
+			goto st61
+		case 62:
+			goto st62
+		case 63:
+			goto st63
+		case 64:
+			goto st64
+		case 65:
+			goto st65
+		case 66:
+			goto st66
+		case 67:
+			goto st67
+		case 68:
+			goto st68
+		case 69:
+			goto st69
+		case 70:
+			goto st70
+		case 12:
+			goto st12
+		case 13:
+			goto st13
+		case 14:
+			goto st14
+		case 15:
+			goto st15
+		case 16:
+			goto st16
+		case 71:
+			goto st71
+		case 17:
+			goto st17
+		case 18:
+			goto st18
+		case 72:
+			goto st72
+		case 73:
+			goto st73
+		case 74:
+			goto st74
+		case 75:
+			goto st75
+		case 76:
+			goto st76
+		case 77:
+			goto st77
+		case 78:
+			goto st78
+		case 79:
+			goto st79
+		case 80:
+			goto st80
+		case 19:
+			goto st19
+		case 20:
+			goto st20
+		case 21:
+			goto st21
+		case 81:
+			goto st81
+		case 22:
+			goto st22
+		case 23:
+			goto st23
+		case 24:
+			goto st24
+		case 82:
+			goto st82
+		case 25:
+			goto st25
+		case 26:
+			goto st26
+		case 83:
+			goto st83
+		case 84:
+			goto st84
+		case 27:
+			goto st27
+		case 28:
+			goto st28
+		case 29:
+			goto st29
+		case 30:
+			goto st30
+		case 31:
+			goto st31
+		case 32:
+			goto st32
+		case 33:
+			goto st33
+		case 34:
+			goto st34
+		case 35:
+			goto st35
+		case 85:
+			goto st85
+		case 38:
+			goto st38
+		case 87:
+			goto st87
+		case 88:
+			goto st88
+		case 39:
+			goto st39
+		case 40:
+			goto st40
+		case 41:
+			goto st41
+		case 42:
+			goto st42
+		case 89:
+			goto st89
+		case 43:
+			goto st43
+		case 90:
+			goto st90
+		case 44:
+			goto st44
+		case 45:
+			goto st45
+		case 46:
+			goto st46
+		case 86:
+			goto st86
+		case 36:
+			goto st36
+		case 37:
+			goto st37
+		}
+
+		if (m.p)++; (m.p) == (m.pe) {
+			goto _test_eof
+		}
+	_resume:
+		switch m.cs {
+		case 47:
+			goto st_case_47
+		case 1:
+			goto st_case_1
+		case 2:
+			goto st_case_2
+		case 3:
+			goto st_case_3
+		case 0:
+			goto st_case_0
+		case 4:
+			goto st_case_4
+		case 5:
+			goto st_case_5
+		case 6:
+			goto st_case_6
+		case 7:
+			goto st_case_7
+		case 48:
+			goto st_case_48
+		case 49:
+			goto st_case_49
+		case 50:
+			goto st_case_50
+		case 8:
+			goto st_case_8
+		case 9:
+			goto st_case_9
+		case 10:
+			goto st_case_10
+		case 11:
+			goto st_case_11
+		case 51:
+			goto st_case_51
+		case 52:
+			goto st_case_52
+		case 53:
+			goto st_case_53
+		case 54:
+			goto st_case_54
+		case 55:
+			goto st_case_55
+		case 56:
+			goto st_case_56
+		case 57:
+			goto st_case_57
+		case 58:
+			goto st_case_58
+		case 59:
+			goto st_case_59
+		case 60:
+			goto st_case_60
+		case 61:
+			goto st_case_61
+		case 62:
+			goto st_case_62
+		case 63:
+			goto st_case_63
+		case 64:
+			goto st_case_64
+		case 65:
+			goto st_case_65
+		case 66:
+			goto st_case_66
+		case 67:
+			goto st_case_67
+		case 68:
+			goto st_case_68
+		case 69:
+			goto st_case_69
+		case 70:
+			goto st_case_70
+		case 12:
+			goto st_case_12
+		case 13:
+			goto st_case_13
+		case 14:
+			goto st_case_14
+		case 15:
+			goto st_case_15
+		case 16:
+			goto st_case_16
+		case 71:
+			goto st_case_71
+		case 17:
+			goto st_case_17
+		case 18:
+			goto st_case_18
+		case 72:
+			goto st_case_72
+		case 73:
+			goto st_case_73
+		case 74:
+			goto st_case_74
+		case 75:
+			goto st_case_75
+		case 76:
+			goto st_case_76
+		case 77:
+			goto st_case_77
+		case 78:
+			goto st_case_78
+		case 79:
+			goto st_case_79
+		case 80:
+			goto st_case_80
+		case 19:
+			goto st_case_19
+		case 20:
+			goto st_case_20
+		case 21:
+			goto st_case_21
+		case 81:
+			goto st_case_81
+		case 22:
+			goto st_case_22
+		case 23:
+			goto st_case_23
+		case 24:
+			goto st_case_24
+		case 82:
+			goto st_case_82
+		case 25:
+			goto st_case_25
+		case 26:
+			goto st_case_26
+		case 83:
+			goto st_case_83
+		case 84:
+			goto st_case_84
+		case 27:
+			goto st_case_27
+		case 28:
+			goto st_case_28
+		case 29:
+			goto st_case_29
+		case 30:
+			goto st_case_30
+		case 31:
+			goto st_case_31
+		case 32:
+			goto st_case_32
+		case 33:
+			goto st_case_33
+		case 34:
+			goto st_case_34
+		case 35:
+			goto st_case_35
+		case 85:
+			goto st_case_85
+		case 38:
+			goto st_case_38
+		case 87:
+			goto st_case_87
+		case 88:
+			goto st_case_88
+		case 39:
+			goto st_case_39
+		case 40:
+			goto st_case_40
+		case 41:
+			goto st_case_41
+		case 42:
+			goto st_case_42
+		case 89:
+			goto st_case_89
+		case 43:
+			goto st_case_43
+		case 90:
+			goto st_case_90
+		case 44:
+			goto st_case_44
+		case 45:
+			goto st_case_45
+		case 46:
+			goto st_case_46
+		case 86:
+			goto st_case_86
+		case 36:
+			goto st_case_36
+		case 37:
+			goto st_case_37
+		}
+		goto st_out
+	st47:
+		if (m.p)++; (m.p) == (m.pe) {
+			goto _test_eof47
+		}
+	st_case_47:
+		switch (m.data)[(m.p)] {
+		case 10:
+			goto tr33
+		case 13:
+			goto tr33
+		case 32:
+			goto tr82
+		case 35:
+			goto tr33
+		case 44:
+			goto tr33
+		case 92:
+			goto tr83
+		}
+		if 9 <= (m.data)[(m.p)] && (m.data)[(m.p)] <= 12 {
+			goto tr82
+		}
+		goto tr81
+	tr31:
+//line machine.go.rl:20
+
+		m.pb = m.p
+
+		goto st1
+	tr81:
+//line machine.go.rl:74
+
+		m.beginMetric = true
+
+//line machine.go.rl:20
+
+		m.pb = m.p
+
+		goto st1
+	st1:
+		if (m.p)++; (m.p) == (m.pe) {
+			goto _test_eof1
+		}
+	st_case_1:
+//line machine.go:586
+		switch (m.data)[(m.p)] {
+		case 10:
+			goto tr2
+		case 13:
+			goto tr2
+		case 32:
+			goto tr1
+		case 44:
+			goto tr3
+		case 92:
+			goto st9
+		}
+		if 9 <= (m.data)[(m.p)] && (m.data)[(m.p)] <= 12 {
+			goto tr1
+		}
+		goto st1
+	tr1:
+		(m.cs) = 2
+//line machine.go.rl:78
+
+		err = m.handler.SetMeasurement(m.text())
+		if err != nil {
+			(m.p)--
+
+			(m.cs) = 35
+			{
+				(m.p)++
+				goto _out
+			}
+		}
+
+		goto _again
+	tr58:
+		(m.cs) = 2
+//line machine.go.rl:91
+
+		err = m.handler.AddTag(m.key, m.text())
+		if err != nil {
+			(m.p)--
+
+			(m.cs) = 35
+			{
+				(m.p)++
+				goto _out
+			}
+		}
+
+		goto _again
+	st2:
+		if (m.p)++; (m.p) == (m.pe) {
+			goto _test_eof2
+		}
+	st_case_2:
+//line machine.go:634
+		switch (m.data)[(m.p)] {
+		case 10:
+			goto tr7
+		case 13:
+			goto tr7
+		case 32:
+			goto st2
+		case 44:
+			goto tr7
+		case 61:
+			goto tr7
+		case 92:
+			goto tr8
+		}
+		if 9 <= (m.data)[(m.p)] && (m.data)[(m.p)] <= 12 {
+			goto st2
+		}
+		goto tr5
+	tr5:
+//line machine.go.rl:20
+
+		m.pb = m.p
+
+		goto st3
+	st3:
+		if (m.p)++; (m.p) == (m.pe) {
+			goto _test_eof3
+		}
+	st_case_3:
+//line machine.go:664
+		switch (m.data)[(m.p)] {
+		case 32:
+			goto tr7
+		case 44:
+			goto tr7
+		case 61:
+			goto tr10
+		case 92:
+			goto st13
+		}
+		if 9 <= (m.data)[(m.p)] && (m.data)[(m.p)] <= 13 {
+			goto tr7
+		}
+		goto st3
+	tr2:
+		(m.cs) = 0
+//line machine.go.rl:38
+
+		err = ErrTagParse
+		(m.p)--
+
+		(m.cs) = 35
+		{
+			(m.p)++
+			goto _out
+		}
+
+		goto _again
+	tr7:
+		(m.cs) = 0
+//line machine.go.rl:31
+
+		err = ErrFieldParse
+		(m.p)--
+
+		(m.cs) = 35
+		{
+			(m.p)++
+			goto _out
+		}
+
+		goto _again
+	tr33:
+		(m.cs) = 0
+//line machine.go.rl:24
+
+		err = ErrNameParse
+		(m.p)--
+
+		(m.cs) = 35
+		{
+			(m.p)++
+			goto _out
+		}
+
+		goto _again
+	tr37:
+		(m.cs) = 0
+//line machine.go.rl:45
+
+		err = ErrTimestampParse
+		(m.p)--
+
+		(m.cs) = 35
+		{
+			(m.p)++
+			goto _out
+		}
+
+		goto _again
+	tr84:
+		(m.cs) = 0
+//line machine.go.rl:31
+
+		err = ErrFieldParse
+		(m.p)--
+
+		(m.cs) = 35
+		{
+			(m.p)++
+			goto _out
+		}
+
+//line machine.go.rl:45
+
+		err = ErrTimestampParse
+		(m.p)--
+
+		(m.cs) = 35
+		{
+			(m.p)++
+			goto _out
+		}
+
+		goto _again
+	tr137:
+//line machine.go.rl:65
+
+		(m.p)--
+
+		{
+			goto st47
+		}
+
+		goto st0
+//line machine.go:750
+	st_case_0:
+	st0:
+		(m.cs) = 0
+		goto _out
+	tr10:
+//line machine.go.rl:100
+
+		m.key = m.text()
+
+		goto st4
+	st4:
+		if (m.p)++; (m.p) == (m.pe) {
+			goto _test_eof4
+		}
+	st_case_4:
+//line machine.go:766
+		switch (m.data)[(m.p)] {
+		case 34:
+			goto st5
+		case 45:
+			goto tr13
+		case 46:
+			goto tr14
+		case 48:
+			goto tr15
+		case 70:
+			goto tr17
+		case 84:
+			goto tr18
+		case 102:
+			goto tr19
+		case 116:
+			goto tr20
+		}
+		if 49 <= (m.data)[(m.p)] && (m.data)[(m.p)] <= 57 {
+			goto tr16
+		}
+		goto tr7
+	st5:
+		if (m.p)++; (m.p) == (m.pe) {
+			goto _test_eof5
+		}
+	st_case_5:
+		switch (m.data)[(m.p)] {
+		case 10:
+			goto tr22
+		case 12:
+			goto tr7
+		case 13:
+			goto tr23
+		case 34:
+			goto tr24
+		case 92:
+			goto tr25
+		}
+		goto tr21
+	tr21:
+//line machine.go.rl:20
+
+		m.pb = m.p
+
+		goto st6
+	tr22:
+//line machine.go.rl:20
+
+		m.pb = m.p
+
+//line machine.go.rl:158
+
+		m.lineno++
+		m.sol = m.p
+		m.sol++ // next char will be the first column in the line
+
+		goto st6
+	tr27:
+//line machine.go.rl:158
+
+		m.lineno++
+		m.sol = m.p
+		m.sol++ // next char will be the first column in the line
+
+		goto st6
+	st6:
+		if (m.p)++; (m.p) == (m.pe) {
+			goto _test_eof6
+		}
+	st_case_6:
+//line machine.go:838
+		switch (m.data)[(m.p)] {
+		case 10:
+			goto tr27
+		case 12:
+			goto tr7
+		case 13:
+			goto st7
+		case 34:
+			goto tr29
+		case 92:
+			goto st14
+		}
+		goto st6
+	tr23:
+//line machine.go.rl:20
+
+		m.pb = m.p
+
+		goto st7
+	st7:
+		if (m.p)++; (m.p) == (m.pe) {
+			goto _test_eof7
+		}
+	st_case_7:
+//line machine.go:863
+		if (m.data)[(m.p)] == 10 {
+			goto tr27
+		}
+		goto tr7
+	tr24:
+		(m.cs) = 48
+//line machine.go.rl:20
+
+		m.pb = m.p
+
+//line machine.go.rl:140
+
+		err = m.handler.AddString(m.key, m.text())
+		if err != nil {
+			(m.p)--
+
+			(m.cs) = 35
+			{
+				(m.p)++
+				goto _out
+			}
+		}
+
+		goto _again
+	tr29:
+		(m.cs) = 48
+//line machine.go.rl:140
+
+		err = m.handler.AddString(m.key, m.text())
+		if err != nil {
+			(m.p)--
+
+			(m.cs) = 35
+			{
+				(m.p)++
+				goto _out
+			}
+		}
+
+		goto _again
+	st48:
+		if (m.p)++; (m.p) == (m.pe) {
+			goto _test_eof48
+		}
+	st_case_48:
+//line machine.go:903
+		switch (m.data)[(m.p)] {
+		case 10:
+			goto tr36
+		case 13:
+			goto st10
+		case 32:
+			goto st49
+		case 44:
+			goto st12
+		}
+		if 9 <= (m.data)[(m.p)] && (m.data)[(m.p)] <= 12 {
+			goto st49
+		}
+		goto tr84
+	tr112:
+		(m.cs) = 49
+//line machine.go.rl:122
+
+		err = m.handler.AddFloat(m.key, m.text())
+		if err != nil {
+			(m.p)--
+
+			(m.cs) = 35
+			{
+				(m.p)++
+				goto _out
+			}
+		}
+
+		goto _again
+	tr119:
+		(m.cs) = 49
+//line machine.go.rl:104
+
+		err = m.handler.AddInt(m.key, m.text())
+		if err != nil {
+			(m.p)--
+
+			(m.cs) = 35
+			{
+				(m.p)++
+				goto _out
+			}
+		}
+
+		goto _again
+	tr124:
+		(m.cs) = 49
+//line machine.go.rl:113
+
+		err = m.handler.AddUint(m.key, m.text())
+		if err != nil {
+			(m.p)--
+
+			(m.cs) = 35
+			{
+				(m.p)++
+				goto _out
+			}
+		}
+
+		goto _again
+	tr129:
+		(m.cs) = 49
+//line machine.go.rl:131
+
+		err = m.handler.AddBool(m.key, m.text())
+		if err != nil {
+			(m.p)--
+
+			(m.cs) = 35
+			{
+				(m.p)++
+				goto _out
+			}
+		}
+
+		goto _again
+	st49:
+		if (m.p)++; (m.p) == (m.pe) {
+			goto _test_eof49
+		}
+	st_case_49:
+//line machine.go:975
+		switch (m.data)[(m.p)] {
+		case 10:
+			goto tr36
+		case 13:
+			goto st10
+		case 32:
+			goto st49
+		case 45:
+			goto tr88
+		}
+		switch {
+		case (m.data)[(m.p)] > 12:
+			if 48 <= (m.data)[(m.p)] && (m.data)[(m.p)] <= 57 {
+				goto tr89
+			}
+		case (m.data)[(m.p)] >= 9:
+			goto st49
+		}
+		goto tr37
+	tr36:
+//line machine.go.rl:158
+
+		m.lineno++
+		m.sol = m.p
+		m.sol++ // next char will be the first column in the line
+
+		goto st50
+	tr91:
+		(m.cs) = 50
+//line machine.go.rl:149
+
+		err = m.handler.SetTimestamp(m.text())
+		if err != nil {
+			(m.p)--
+
+			(m.cs) = 35
+			{
+				(m.p)++
+				goto _out
+			}
+		}
+
+//line machine.go.rl:158
+
+		m.lineno++
+		m.sol = m.p
+		m.sol++ // next char will be the first column in the line
+
+		goto _again
+	tr113:
+		(m.cs) = 50
+//line machine.go.rl:122
+
+		err = m.handler.AddFloat(m.key, m.text())
+		if err != nil {
+			(m.p)--
+
+			(m.cs) = 35
+			{
+				(m.p)++
+				goto _out
+			}
+		}
+
+//line machine.go.rl:158
+
+		m.lineno++
+		m.sol = m.p
+		m.sol++ // next char will be the first column in the line
+
+		goto _again
+	tr120:
+		(m.cs) = 50
+//line machine.go.rl:104
+
+		err = m.handler.AddInt(m.key, m.text())
+		if err != nil {
+			(m.p)--
+
+			(m.cs) = 35
+			{
+				(m.p)++
+				goto _out
+			}
+		}
+
+//line machine.go.rl:158
+
+		m.lineno++
+		m.sol = m.p
+		m.sol++ // next char will be the first column in the line
+
+		goto _again
+	tr125:
+		(m.cs) = 50
+//line machine.go.rl:113
+
+		err = m.handler.AddUint(m.key, m.text())
+		if err != nil {
+			(m.p)--
+
+			(m.cs) = 35
+			{
+				(m.p)++
+				goto _out
+			}
+		}
+
+//line machine.go.rl:158
+
+		m.lineno++
+		m.sol = m.p
+		m.sol++ // next char will be the first column in the line
+
+		goto _again
+	tr130:
+		(m.cs) = 50
+//line machine.go.rl:131
+
+		err = m.handler.AddBool(m.key, m.text())
+		if err != nil {
+			(m.p)--
+
+			(m.cs) = 35
+			{
+				(m.p)++
+				goto _out
+			}
+		}
+
+//line machine.go.rl:158
+
+		m.lineno++
+		m.sol = m.p
+		m.sol++ // next char will be the first column in the line
+
+		goto _again
+	st50:
+//line machine.go.rl:164
+
+		m.finishMetric = true
+		(m.cs) = 86
+		{
+			(m.p)++
+			goto _out
+		}
+
+		if (m.p)++; (m.p) == (m.pe) {
+			goto _test_eof50
+		}
+	st_case_50:
+//line machine.go:1109
+		switch (m.data)[(m.p)] {
+		case 10:
+			goto tr33
+		case 13:
+			goto tr33
+		case 32:
+			goto st8
+		case 35:
+			goto tr33
+		case 44:
+			goto tr33
+		case 92:
+			goto tr34
+		}
+		if 9 <= (m.data)[(m.p)] && (m.data)[(m.p)] <= 12 {
+			goto st8
+		}
+		goto tr31
+	tr82:
+//line machine.go.rl:74
+
+		m.beginMetric = true
+
+		goto st8
+	st8:
+		if (m.p)++; (m.p) == (m.pe) {
+			goto _test_eof8
+		}
+	st_case_8:
+//line machine.go:1139
+		switch (m.data)[(m.p)] {
+		case 10:
+			goto tr33
+		case 13:
+			goto tr33
+		case 32:
+			goto st8
+		case 35:
+			goto tr33
+		case 44:
+			goto tr33
+		case 92:
+			goto tr34
+		}
+		if 9 <= (m.data)[(m.p)] && (m.data)[(m.p)] <= 12 {
+			goto st8
+		}
+		goto tr31
+	tr34:
+//line machine.go.rl:20
+
+		m.pb = m.p
+
+		goto st9
+	tr83:
+//line machine.go.rl:74
+
+		m.beginMetric = true
+
+//line machine.go.rl:20
+
+		m.pb = m.p
+
+		goto st9
+	st9:
+		if (m.p)++; (m.p) == (m.pe) {
+			goto _test_eof9
+		}
+	st_case_9:
+//line machine.go:1179
+		if 9 <= (m.data)[(m.p)] && (m.data)[(m.p)] <= 13 {
+			goto st0
+		}
+		goto st1
+	tr92:
+		(m.cs) = 10
+//line machine.go.rl:149
+
+		err = m.handler.SetTimestamp(m.text())
+		if err != nil {
+			(m.p)--
+
+			(m.cs) = 35
+			{
+				(m.p)++
+				goto _out
+			}
+		}
+
+		goto _again
+	tr114:
+		(m.cs) = 10
+//line machine.go.rl:122
+
+		err = m.handler.AddFloat(m.key, m.text())
+		if err != nil {
+			(m.p)--
+
+			(m.cs) = 35
+			{
+				(m.p)++
+				goto _out
+			}
+		}
+
+		goto _again
+	tr121:
+		(m.cs) = 10
+//line machine.go.rl:104
+
+		err = m.handler.AddInt(m.key, m.text())
+		if err != nil {
+			(m.p)--
+
+			(m.cs) = 35
+			{
+				(m.p)++
+				goto _out
+			}
+		}
+
+		goto _again
+	tr126:
+		(m.cs) = 10
+//line machine.go.rl:113
+
+		err = m.handler.AddUint(m.key, m.text())
+		if err != nil {
+			(m.p)--
+
+			(m.cs) = 35
+			{
+				(m.p)++
+				goto _out
+			}
+		}
+
+		goto _again
+	tr131:
+		(m.cs) = 10
+//line machine.go.rl:131
+
+		err = m.handler.AddBool(m.key, m.text())
+		if err != nil {
+			(m.p)--
+
+			(m.cs) = 35
+			{
+				(m.p)++
+				goto _out
+			}
+		}
+
+		goto _again
+	st10:
+		if (m.p)++; (m.p) == (m.pe) {
+			goto _test_eof10
+		}
+	st_case_10:
+//line machine.go:1254
+		if (m.data)[(m.p)] == 10 {
+			goto tr36
+		}
+		goto st0
+	tr88:
+//line machine.go.rl:20
+
+		m.pb = m.p
+
+		goto st11
+	st11:
+		if (m.p)++; (m.p) == (m.pe) {
+			goto _test_eof11
+		}
+	st_case_11:
+//line machine.go:1270
+		if 48 <= (m.data)[(m.p)] && (m.data)[(m.p)] <= 57 {
+			goto st51
+		}
+		goto tr37
+	tr89:
+//line machine.go.rl:20
+
+		m.pb = m.p
+
+		goto st51
+	st51:
+		if (m.p)++; (m.p) == (m.pe) {
+			goto _test_eof51
+		}
+	st_case_51:
+//line machine.go:1286
+		switch (m.data)[(m.p)] {
+		case 10:
+			goto tr91
+		case 13:
+			goto tr92
+		case 32:
+			goto tr90
+		}
+		switch {
+		case (m.data)[(m.p)] > 12:
+			if 48 <= (m.data)[(m.p)] && (m.data)[(m.p)] <= 57 {
+				goto st53
+			}
+		case (m.data)[(m.p)] >= 9:
+			goto tr90
+		}
+		goto tr37
+	tr90:
+		(m.cs) = 52
+//line machine.go.rl:149
+
+		err = m.handler.SetTimestamp(m.text())
+		if err != nil {
+			(m.p)--
+
+			(m.cs) = 35
+			{
+				(m.p)++
+				goto _out
+			}
+		}
+
+		goto _again
+	st52:
+		if (m.p)++; (m.p) == (m.pe) {
+			goto _test_eof52
+		}
+	st_case_52:
+//line machine.go:1322
+		switch (m.data)[(m.p)] {
+		case 10:
+			goto tr36
+		case 13:
+			goto st10
+		case 32:
+			goto st52
+		}
+		if 9 <= (m.data)[(m.p)] && (m.data)[(m.p)] <= 12 {
+			goto st52
+		}
+		goto st0
+	st53:
+		if (m.p)++; (m.p) == (m.pe) {
+			goto _test_eof53
+		}
+	st_case_53:
+		switch (m.data)[(m.p)] {
+		case 10:
+			goto tr91
+		case 13:
+			goto tr92
+		case 32:
+			goto tr90
+		}
+		switch {
+		case (m.data)[(m.p)] > 12:
+			if 48 <= (m.data)[(m.p)] && (m.data)[(m.p)] <= 57 {
+				goto st54
+			}
+		case (m.data)[(m.p)] >= 9:
+			goto tr90
+		}
+		goto tr37
+	st54:
+		if (m.p)++; (m.p) == (m.pe) {
+			goto _test_eof54
+		}
+	st_case_54:
+		switch (m.data)[(m.p)] {
+		case 10:
+			goto tr91
+		case 13:
+			goto tr92
+		case 32:
+			goto tr90
+		}
+		switch {
+		case (m.data)[(m.p)] > 12:
+			if 48 <= (m.data)[(m.p)] && (m.data)[(m.p)] <= 57 {
+				goto st55
+			}
+		case (m.data)[(m.p)] >= 9:
+			goto tr90
+		}
+		goto tr37
+	st55:
+		if (m.p)++; (m.p) == (m.pe) {
+			goto _test_eof55
+		}
+	st_case_55:
+		switch (m.data)[(m.p)] {
+		case 10:
+			goto tr91
+		case 13:
+			goto tr92
+		case 32:
+			goto tr90
+		}
+		switch {
+		case (m.data)[(m.p)] > 12:
+			if 48 <= (m.data)[(m.p)] && (m.data)[(m.p)] <= 57 {
+				goto st56
+			}
+		case (m.data)[(m.p)] >= 9:
+			goto tr90
+		}
+		goto tr37
+	st56:
+		if (m.p)++; (m.p) == (m.pe) {
+			goto _test_eof56
+		}
+	st_case_56:
+		switch (m.data)[(m.p)] {
+		case 10:
+			goto tr91
+		case 13:
+			goto tr92
+		case 32:
+			goto tr90
+		}
+		switch {
+		case (m.data)[(m.p)] > 12:
+			if 48 <= (m.data)[(m.p)] && (m.data)[(m.p)] <= 57 {
+				goto st57
+			}
+		case (m.data)[(m.p)] >= 9:
+			goto tr90
+		}
+		goto tr37
+	st57:
+		if (m.p)++; (m.p) == (m.pe) {
+			goto _test_eof57
+		}
+	st_case_57:
+		switch (m.data)[(m.p)] {
+		case 10:
+			goto tr91
+		case 13:
+			goto tr92
+		case 32:
+			goto tr90
+		}
+		switch {
+		case (m.data)[(m.p)] > 12:
+			if 48 <= (m.data)[(m.p)] && (m.data)[(m.p)] <= 57 {
+				goto st58
+			}
+		case (m.data)[(m.p)] >= 9:
+			goto tr90
+		}
+		goto tr37
+	st58:
+		if (m.p)++; (m.p) == (m.pe) {
+			goto _test_eof58
+		}
+	st_case_58:
+		switch (m.data)[(m.p)] {
+		case 10:
+			goto tr91
+		case 13:
+			goto tr92
+		case 32:
+			goto tr90
+		}
+		switch {
+		case (m.data)[(m.p)] > 12:
+			if 48 <= (m.data)[(m.p)] && (m.data)[(m.p)] <= 57 {
+				goto st59
+			}
+		case (m.data)[(m.p)] >= 9:
+			goto tr90
+		}
+		goto tr37
+	st59:
+		if (m.p)++; (m.p) == (m.pe) {
+			goto _test_eof59
+		}
+	st_case_59:
+		switch (m.data)[(m.p)] {
+		case 10:
+			goto tr91
+		case 13:
+			goto tr92
+		case 32:
+			goto tr90
+		}
+		switch {
+		case (m.data)[(m.p)] > 12:
+			if 48 <= (m.data)[(m.p)] && (m.data)[(m.p)] <= 57 {
+				goto st60
+			}
+		case (m.data)[(m.p)] >= 9:
+			goto tr90
+		}
+		goto tr37
+	st60:
+		if (m.p)++; (m.p) == (m.pe) {
+			goto _test_eof60
+		}
+	st_case_60:
+		switch (m.data)[(m.p)] {
+		case 10:
+			goto tr91
+		case 13:
+			goto tr92
+		case 32:
+			goto tr90
+		}
+		switch {
+		case (m.data)[(m.p)] > 12:
+			if 48 <= (m.data)[(m.p)] && (m.data)[(m.p)] <= 57 {
+				goto st61
+			}
+		case (m.data)[(m.p)] >= 9:
+			goto tr90
+		}
+		goto tr37
+	st61:
+		if (m.p)++; (m.p) == (m.pe) {
+			goto _test_eof61
+		}
+	st_case_61:
+		switch (m.data)[(m.p)] {
+		case 10:
+			goto tr91
+		case 13:
+			goto tr92
+		case 32:
+			goto tr90
+		}
+		switch {
+		case (m.data)[(m.p)] > 12:
+			if 48 <= (m.data)[(m.p)] && (m.data)[(m.p)] <= 57 {
+				goto st62
+			}
+		case (m.data)[(m.p)] >= 9:
+			goto tr90
+		}
+		goto tr37
+	st62:
+		if (m.p)++; (m.p) == (m.pe) {
+			goto _test_eof62
+		}
+	st_case_62:
+		switch (m.data)[(m.p)] {
+		case 10:
+			goto tr91
+		case 13:
+			goto tr92
+		case 32:
+			goto tr90
+		}
+		switch {
+		case (m.data)[(m.p)] > 12:
+			if 48 <= (m.data)[(m.p)] && (m.data)[(m.p)] <= 57 {
+				goto st63
+			}
+		case (m.data)[(m.p)] >= 9:
+			goto tr90
+		}
+		goto tr37
+	st63:
+		if (m.p)++; (m.p) == (m.pe) {
+			goto _test_eof63
+		}
+	st_case_63:
+		switch (m.data)[(m.p)] {
+		case 10:
+			goto tr91
+		case 13:
+			goto tr92
+		case 32:
+			goto tr90
+		}
+		switch {
+		case (m.data)[(m.p)] > 12:
+			if 48 <= (m.data)[(m.p)] && (m.data)[(m.p)] <= 57 {
+				goto st64
+			}
+		case (m.data)[(m.p)] >= 9:
+			goto tr90
+		}
+		goto tr37
+	st64:
+		if (m.p)++; (m.p) == (m.pe) {
+			goto _test_eof64
+		}
+	st_case_64:
+		switch (m.data)[(m.p)] {
+		case 10:
+			goto tr91
+		case 13:
+			goto tr92
+		case 32:
+			goto tr90
+		}
+		switch {
+		case (m.data)[(m.p)] > 12:
+			if 48 <= (m.data)[(m.p)] && (m.data)[(m.p)] <= 57 {
+				goto st65
+			}
+		case (m.data)[(m.p)] >= 9:
+			goto tr90
+		}
+		goto tr37
+	st65:
+		if (m.p)++; (m.p) == (m.pe) {
+			goto _test_eof65
+		}
+	st_case_65:
+		switch (m.data)[(m.p)] {
+		case 10:
+			goto tr91
+		case 13:
+			goto tr92
+		case 32:
+			goto tr90
+		}
+		switch {
+		case (m.data)[(m.p)] > 12:
+			if 48 <= (m.data)[(m.p)] && (m.data)[(m.p)] <= 57 {
+				goto st66
+			}
+		case (m.data)[(m.p)] >= 9:
+			goto tr90
+		}
+		goto tr37
+	st66:
+		if (m.p)++; (m.p) == (m.pe) {
+			goto _test_eof66
+		}
+	st_case_66:
+		switch (m.data)[(m.p)] {
+		case 10:
+			goto tr91
+		case 13:
+			goto tr92
+		case 32:
+			goto tr90
+		}
+		switch {
+		case (m.data)[(m.p)] > 12:
+			if 48 <= (m.data)[(m.p)] && (m.data)[(m.p)] <= 57 {
+				goto st67
+			}
+		case (m.data)[(m.p)] >= 9:
+			goto tr90
+		}
+		goto tr37
+	st67:
+		if (m.p)++; (m.p) == (m.pe) {
+			goto _test_eof67
+		}
+	st_case_67:
+		switch (m.data)[(m.p)] {
+		case 10:
+			goto tr91
+		case 13:
+			goto tr92
+		case 32:
+			goto tr90
+		}
+		switch {
+		case (m.data)[(m.p)] > 12:
+			if 48 <= (m.data)[(m.p)] && (m.data)[(m.p)] <= 57 {
+				goto st68
+			}
+		case (m.data)[(m.p)] >= 9:
+			goto tr90
+		}
+		goto tr37
+	st68:
+		if (m.p)++; (m.p) == (m.pe) {
+			goto _test_eof68
+		}
+	st_case_68:
+		switch (m.data)[(m.p)] {
+		case 10:
+			goto tr91
+		case 13:
+			goto tr92
+		case 32:
+			goto tr90
+		}
+		switch {
+		case (m.data)[(m.p)] > 12:
+			if 48 <= (m.data)[(m.p)] && (m.data)[(m.p)] <= 57 {
+				goto st69
+			}
+		case (m.data)[(m.p)] >= 9:
+			goto tr90
+		}
+		goto tr37
+	st69:
+		if (m.p)++; (m.p) == (m.pe) {
+			goto _test_eof69
+		}
+	st_case_69:
+		switch (m.data)[(m.p)] {
+		case 10:
+			goto tr91
+		case 13:
+			goto tr92
+		case 32:
+			goto tr90
+		}
+		switch {
+		case (m.data)[(m.p)] > 12:
+			if 48 <= (m.data)[(m.p)] && (m.data)[(m.p)] <= 57 {
+				goto st70
+			}
+		case (m.data)[(m.p)] >= 9:
+			goto tr90
+		}
+		goto tr37
+	st70:
+		if (m.p)++; (m.p) == (m.pe) {
+			goto _test_eof70
+		}
+	st_case_70:
+		switch (m.data)[(m.p)] {
+		case 10:
+			goto tr91
+		case 13:
+			goto tr92
+		case 32:
+			goto tr90
+		}
+		if 9 <= (m.data)[(m.p)] && (m.data)[(m.p)] <= 12 {
+			goto tr90
+		}
+		goto tr37
+	tr115:
+		(m.cs) = 12
+//line machine.go.rl:122
+
+		err = m.handler.AddFloat(m.key, m.text())
+		if err != nil {
+			(m.p)--
+
+			(m.cs) = 35
+			{
+				(m.p)++
+				goto _out
+			}
+		}
+
+		goto _again
+	tr122:
+		(m.cs) = 12
+//line machine.go.rl:104
+
+		err = m.handler.AddInt(m.key, m.text())
+		if err != nil {
+			(m.p)--
+
+			(m.cs) = 35
+			{
+				(m.p)++
+				goto _out
+			}
+		}
+
+		goto _again
+	tr127:
+		(m.cs) = 12
+//line machine.go.rl:113
+
+		err = m.handler.AddUint(m.key, m.text())
+		if err != nil {
+			(m.p)--
+
+			(m.cs) = 35
+			{
+				(m.p)++
+				goto _out
+			}
+		}
+
+		goto _again
+	tr132:
+		(m.cs) = 12
+//line machine.go.rl:131
+
+		err = m.handler.AddBool(m.key, m.text())
+		if err != nil {
+			(m.p)--
+
+			(m.cs) = 35
+			{
+				(m.p)++
+				goto _out
+			}
+		}
+
+		goto _again
+	st12:
+		if (m.p)++; (m.p) == (m.pe) {
+			goto _test_eof12
+		}
+	st_case_12:
+//line machine.go:1783
+		switch (m.data)[(m.p)] {
+		case 32:
+			goto tr7
+		case 44:
+			goto tr7
+		case 61:
+			goto tr7
+		case 92:
+			goto tr8
+		}
+		if 9 <= (m.data)[(m.p)] && (m.data)[(m.p)] <= 13 {
+			goto tr7
+		}
+		goto tr5
+	tr8:
+//line machine.go.rl:20
+
+		m.pb = m.p
+
+		goto st13
+	st13:
+		if (m.p)++; (m.p) == (m.pe) {
+			goto _test_eof13
+		}
+	st_case_13:
+//line machine.go:1809
+		if 9 <= (m.data)[(m.p)] && (m.data)[(m.p)] <= 13 {
+			goto tr7
+		}
+		goto st3
+	tr25:
+//line machine.go.rl:20
+
+		m.pb = m.p
+
+		goto st14
+	st14:
+		if (m.p)++; (m.p) == (m.pe) {
+			goto _test_eof14
+		}
+	st_case_14:
+//line machine.go:1825
+		switch (m.data)[(m.p)] {
+		case 34:
+			goto st6
+		case 92:
+			goto st6
+		}
+		goto tr7
+	tr13:
+//line machine.go.rl:20
+
+		m.pb = m.p
+
+		goto st15
+	st15:
+		if (m.p)++; (m.p) == (m.pe) {
+			goto _test_eof15
+		}
+	st_case_15:
+//line machine.go:1844
+		switch (m.data)[(m.p)] {
+		case 46:
+			goto st16
+		case 48:
+			goto st73
+		}
+		if 49 <= (m.data)[(m.p)] && (m.data)[(m.p)] <= 57 {
+			goto st76
+		}
+		goto tr7
+	tr14:
+//line machine.go.rl:20
+
+		m.pb = m.p
+
+		goto st16
+	st16:
+		if (m.p)++; (m.p) == (m.pe) {
+			goto _test_eof16
+		}
+	st_case_16:
+//line machine.go:1866
+		if 48 <= (m.data)[(m.p)] && (m.data)[(m.p)] <= 57 {
+			goto st71
+		}
+		goto tr7
+	st71:
+		if (m.p)++; (m.p) == (m.pe) {
+			goto _test_eof71
+		}
+	st_case_71:
+		switch (m.data)[(m.p)] {
+		case 10:
+			goto tr113
+		case 13:
+			goto tr114
+		case 32:
+			goto tr112
+		case 44:
+			goto tr115
+		case 69:
+			goto st17
+		case 101:
+			goto st17
+		}
+		switch {
+		case (m.data)[(m.p)] > 12:
+			if 48 <= (m.data)[(m.p)] && (m.data)[(m.p)] <= 57 {
+				goto st71
+			}
+		case (m.data)[(m.p)] >= 9:
+			goto tr112
+		}
+		goto tr84
+	st17:
+		if (m.p)++; (m.p) == (m.pe) {
+			goto _test_eof17
+		}
+	st_case_17:
+		switch (m.data)[(m.p)] {
+		case 34:
+			goto st18
+		case 43:
+			goto st18
+		case 45:
+			goto st18
+		}
+		if 48 <= (m.data)[(m.p)] && (m.data)[(m.p)] <= 57 {
+			goto st72
+		}
+		goto tr7
+	st18:
+		if (m.p)++; (m.p) == (m.pe) {
+			goto _test_eof18
+		}
+	st_case_18:
+		if 48 <= (m.data)[(m.p)] && (m.data)[(m.p)] <= 57 {
+			goto st72
+		}
+		goto tr7
+	st72:
+		if (m.p)++; (m.p) == (m.pe) {
+			goto _test_eof72
+		}
+	st_case_72:
+		switch (m.data)[(m.p)] {
+		case 10:
+			goto tr113
+		case 13:
+			goto tr114
+		case 32:
+			goto tr112
+		case 44:
+			goto tr115
+		}
+		switch {
+		case (m.data)[(m.p)] > 12:
+			if 48 <= (m.data)[(m.p)] && (m.data)[(m.p)] <= 57 {
+				goto st72
+			}
+		case (m.data)[(m.p)] >= 9:
+			goto tr112
+		}
+		goto tr84
+	st73:
+		if (m.p)++; (m.p) == (m.pe) {
+			goto _test_eof73
+		}
+	st_case_73:
+		switch (m.data)[(m.p)] {
+		case 10:
+			goto tr113
+		case 13:
+			goto tr114
+		case 32:
+			goto tr112
+		case 44:
+			goto tr115
+		case 46:
+			goto st71
+		case 69:
+			goto st17
+		case 101:
+			goto st17
+		case 105:
+			goto st75
+		}
+		switch {
+		case (m.data)[(m.p)] > 12:
+			if 48 <= (m.data)[(m.p)] && (m.data)[(m.p)] <= 57 {
+				goto st74
+			}
+		case (m.data)[(m.p)] >= 9:
+			goto tr112
+		}
+		goto tr84
+	st74:
+		if (m.p)++; (m.p) == (m.pe) {
+			goto _test_eof74
+		}
+	st_case_74:
+		switch (m.data)[(m.p)] {
+		case 10:
+			goto tr113
+		case 13:
+			goto tr114
+		case 32:
+			goto tr112
+		case 44:
+			goto tr115
+		case 46:
+			goto st71
+		case 69:
+			goto st17
+		case 101:
+			goto st17
+		}
+		switch {
+		case (m.data)[(m.p)] > 12:
+			if 48 <= (m.data)[(m.p)] && (m.data)[(m.p)] <= 57 {
+				goto st74
+			}
+		case (m.data)[(m.p)] >= 9:
+			goto tr112
+		}
+		goto tr84
+	st75:
+		if (m.p)++; (m.p) == (m.pe) {
+			goto _test_eof75
+		}
+	st_case_75:
+		switch (m.data)[(m.p)] {
+		case 10:
+			goto tr120
+		case 13:
+			goto tr121
+		case 32:
+			goto tr119
+		case 44:
+			goto tr122
+		}
+		if 9 <= (m.data)[(m.p)] && (m.data)[(m.p)] <= 12 {
+			goto tr119
+		}
+		goto tr84
+	st76:
+		if (m.p)++; (m.p) == (m.pe) {
+			goto _test_eof76
+		}
+	st_case_76:
+		switch (m.data)[(m.p)] {
+		case 10:
+			goto tr113
+		case 13:
+			goto tr114
+		case 32:
+			goto tr112
+		case 44:
+			goto tr115
+		case 46:
+			goto st71
+		case 69:
+			goto st17
+		case 101:
+			goto st17
+		case 105:
+			goto st75
+		}
+		switch {
+		case (m.data)[(m.p)] > 12:
+			if 48 <= (m.data)[(m.p)] && (m.data)[(m.p)] <= 57 {
+				goto st76
+			}
+		case (m.data)[(m.p)] >= 9:
+			goto tr112
+		}
+		goto tr84
+	tr15:
+//line machine.go.rl:20
+
+		m.pb = m.p
+
+		goto st77
+	st77:
+		if (m.p)++; (m.p) == (m.pe) {
+			goto _test_eof77
+		}
+	st_case_77:
+//line machine.go:2073
+		switch (m.data)[(m.p)] {
+		case 10:
+			goto tr113
+		case 13:
+			goto tr114
+		case 32:
+			goto tr112
+		case 44:
+			goto tr115
+		case 46:
+			goto st71
+		case 69:
+			goto st17
+		case 101:
+			goto st17
+		case 105:
+			goto st75
+		case 117:
+			goto st78
+		}
+		switch {
+		case (m.data)[(m.p)] > 12:
+			if 48 <= (m.data)[(m.p)] && (m.data)[(m.p)] <= 57 {
+				goto st74
+			}
+		case (m.data)[(m.p)] >= 9:
+			goto tr112
+		}
+		goto tr84
+	st78:
+		if (m.p)++; (m.p) == (m.pe) {
+			goto _test_eof78
+		}
+	st_case_78:
+		switch (m.data)[(m.p)] {
+		case 10:
+			goto tr125
+		case 13:
+			goto tr126
+		case 32:
+			goto tr124
+		case 44:
+			goto tr127
+		}
+		if 9 <= (m.data)[(m.p)] && (m.data)[(m.p)] <= 12 {
+			goto tr124
+		}
+		goto tr84
+	tr16:
+//line machine.go.rl:20
+
+		m.pb = m.p
+
+		goto st79
+	st79:
+		if (m.p)++; (m.p) == (m.pe) {
+			goto _test_eof79
+		}
+	st_case_79:
+//line machine.go:2133
+		switch (m.data)[(m.p)] {
+		case 10:
+			goto tr113
+		case 13:
+			goto tr114
+		case 32:
+			goto tr112
+		case 44:
+			goto tr115
+		case 46:
+			goto st71
+		case 69:
+			goto st17
+		case 101:
+			goto st17
+		case 105:
+			goto st75
+		case 117:
+			goto st78
+		}
+		switch {
+		case (m.data)[(m.p)] > 12:
+			if 48 <= (m.data)[(m.p)] && (m.data)[(m.p)] <= 57 {
+				goto st79
+			}
+		case (m.data)[(m.p)] >= 9:
+			goto tr112
+		}
+		goto tr84
+	tr17:
+//line machine.go.rl:20
+
+		m.pb = m.p
+
+		goto st80
+	st80:
+		if (m.p)++; (m.p) == (m.pe) {
+			goto _test_eof80
+		}
+	st_case_80:
+//line machine.go:2174
+		switch (m.data)[(m.p)] {
+		case 10:
+			goto tr130
+		case 13:
+			goto tr131
+		case 32:
+			goto tr129
+		case 44:
+			goto tr132
+		case 65:
+			goto st19
+		case 97:
+			goto st22
+		}
+		if 9 <= (m.data)[(m.p)] && (m.data)[(m.p)] <= 12 {
+			goto tr129
+		}
+		goto tr84
+	st19:
+		if (m.p)++; (m.p) == (m.pe) {
+			goto _test_eof19
+		}
+	st_case_19:
+		if (m.data)[(m.p)] == 76 {
+			goto st20
+		}
+		goto tr7
+	st20:
+		if (m.p)++; (m.p) == (m.pe) {
+			goto _test_eof20
+		}
+	st_case_20:
+		if (m.data)[(m.p)] == 83 {
+			goto st21
+		}
+		goto tr7
+	st21:
+		if (m.p)++; (m.p) == (m.pe) {
+			goto _test_eof21
+		}
+	st_case_21:
+		if (m.data)[(m.p)] == 69 {
+			goto st81
+		}
+		goto tr7
+	st81:
+		if (m.p)++; (m.p) == (m.pe) {
+			goto _test_eof81
+		}
+	st_case_81:
+		switch (m.data)[(m.p)] {
+		case 10:
+			goto tr130
+		case 13:
+			goto tr131
+		case 32:
+			goto tr129
+		case 44:
+			goto tr132
+		}
+		if 9 <= (m.data)[(m.p)] && (m.data)[(m.p)] <= 12 {
+			goto tr129
+		}
+		goto tr84
+	st22:
+		if (m.p)++; (m.p) == (m.pe) {
+			goto _test_eof22
+		}
+	st_case_22:
+		if (m.data)[(m.p)] == 108 {
+			goto st23
+		}
+		goto tr7
+	st23:
+		if (m.p)++; (m.p) == (m.pe) {
+			goto _test_eof23
+		}
+	st_case_23:
+		if (m.data)[(m.p)] == 115 {
+			goto st24
+		}
+		goto tr7
+	st24:
+		if (m.p)++; (m.p) == (m.pe) {
+			goto _test_eof24
+		}
+	st_case_24:
+		if (m.data)[(m.p)] == 101 {
+			goto st81
+		}
+		goto tr7
+	tr18:
+//line machine.go.rl:20
+
+		m.pb = m.p
+
+		goto st82
+	st82:
+		if (m.p)++; (m.p) == (m.pe) {
+			goto _test_eof82
+		}
+	st_case_82:
+//line machine.go:2277
+		switch (m.data)[(m.p)] {
+		case 10:
+			goto tr130
+		case 13:
+			goto tr131
+		case 32:
+			goto tr129
+		case 44:
+			goto tr132
+		case 82:
+			goto st25
+		case 114:
+			goto st26
+		}
+		if 9 <= (m.data)[(m.p)] && (m.data)[(m.p)] <= 12 {
+			goto tr129
+		}
+		goto tr84
+	st25:
+		if (m.p)++; (m.p) == (m.pe) {
+			goto _test_eof25
+		}
+	st_case_25:
+		if (m.data)[(m.p)] == 85 {
+			goto st21
+		}
+		goto tr7
+	st26:
+		if (m.p)++; (m.p) == (m.pe) {
+			goto _test_eof26
+		}
+	st_case_26:
+		if (m.data)[(m.p)] == 117 {
+			goto st24
+		}
+		goto tr7
+	tr19:
+//line machine.go.rl:20
+
+		m.pb = m.p
+
+		goto st83
+	st83:
+		if (m.p)++; (m.p) == (m.pe) {
+			goto _test_eof83
+		}
+	st_case_83:
+//line machine.go:2325
+		switch (m.data)[(m.p)] {
+		case 10:
+			goto tr130
+		case 13:
+			goto tr131
+		case 32:
+			goto tr129
+		case 44:
+			goto tr132
+		case 97:
+			goto st22
+		}
+		if 9 <= (m.data)[(m.p)] && (m.data)[(m.p)] <= 12 {
+			goto tr129
+		}
+		goto tr84
+	tr20:
+//line machine.go.rl:20
+
+		m.pb = m.p
+
+		goto st84
+	st84:
+		if (m.p)++; (m.p) == (m.pe) {
+			goto _test_eof84
+		}
+	st_case_84:
+//line machine.go:2353
+		switch (m.data)[(m.p)] {
+		case 10:
+			goto tr130
+		case 13:
+			goto tr131
+		case 32:
+			goto tr129
+		case 44:
+			goto tr132
+		case 114:
+			goto st26
+		}
+		if 9 <= (m.data)[(m.p)] && (m.data)[(m.p)] <= 12 {
+			goto tr129
+		}
+		goto tr84
+	tr3:
+		(m.cs) = 27
+//line machine.go.rl:78
+
+		err = m.handler.SetMeasurement(m.text())
+		if err != nil {
+			(m.p)--
+
+			(m.cs) = 35
+			{
+				(m.p)++
+				goto _out
+			}
+		}
+
+		goto _again
+	tr59:
+		(m.cs) = 27
+//line machine.go.rl:91
+
+		err = m.handler.AddTag(m.key, m.text())
+		if err != nil {
+			(m.p)--
+
+			(m.cs) = 35
+			{
+				(m.p)++
+				goto _out
+			}
+		}
+
+		goto _again
+	st27:
+		if (m.p)++; (m.p) == (m.pe) {
+			goto _test_eof27
+		}
+	st_case_27:
+//line machine.go:2401
+		switch (m.data)[(m.p)] {
+		case 32:
+			goto tr2
+		case 44:
+			goto tr2
+		case 61:
+			goto tr2
+		case 92:
+			goto tr51
+		}
+		if 9 <= (m.data)[(m.p)] && (m.data)[(m.p)] <= 13 {
+			goto tr2
+		}
+		goto tr50
+	tr50:
+//line machine.go.rl:20
+
+		m.pb = m.p
+
+		goto st28
+	st28:
+		if (m.p)++; (m.p) == (m.pe) {
+			goto _test_eof28
+		}
+	st_case_28:
+//line machine.go:2427
+		switch (m.data)[(m.p)] {
+		case 32:
+			goto tr2
+		case 44:
+			goto tr2
+		case 61:
+			goto tr53
+		case 92:
+			goto st33
+		}
+		if 9 <= (m.data)[(m.p)] && (m.data)[(m.p)] <= 13 {
+			goto tr2
+		}
+		goto st28
+	tr53:
+//line machine.go.rl:87
+
+		m.key = m.text()
+
+		goto st29
+	st29:
+		if (m.p)++; (m.p) == (m.pe) {
+			goto _test_eof29
+		}
+	st_case_29:
+//line machine.go:2453
+		switch (m.data)[(m.p)] {
+		case 32:
+			goto tr2
+		case 44:
+			goto tr2
+		case 61:
+			goto tr2
+		case 92:
+			goto tr56
+		}
+		if 9 <= (m.data)[(m.p)] && (m.data)[(m.p)] <= 13 {
+			goto tr2
+		}
+		goto tr55
+	tr55:
+//line machine.go.rl:20
+
+		m.pb = m.p
+
+		goto st30
+	st30:
+		if (m.p)++; (m.p) == (m.pe) {
+			goto _test_eof30
+		}
+	st_case_30:
+//line machine.go:2479
+		switch (m.data)[(m.p)] {
+		case 10:
+			goto tr2
+		case 13:
+			goto tr2
+		case 32:
+			goto tr58
+		case 44:
+			goto tr59
+		case 61:
+			goto tr2
+		case 92:
+			goto st31
+		}
+		if 9 <= (m.data)[(m.p)] && (m.data)[(m.p)] <= 12 {
+			goto tr58
+		}
+		goto st30
+	tr56:
+//line machine.go.rl:20
+
+		m.pb = m.p
+
+		goto st31
+	st31:
+		if (m.p)++; (m.p) == (m.pe) {
+			goto _test_eof31
+		}
+	st_case_31:
+//line machine.go:2509
+		if (m.data)[(m.p)] == 92 {
+			goto st32
+		}
+		if 9 <= (m.data)[(m.p)] && (m.data)[(m.p)] <= 13 {
+			goto tr2
+		}
+		goto st30
+	st32:
+//line machine.go.rl:240
+		(m.p)--
+
+		if (m.p)++; (m.p) == (m.pe) {
+			goto _test_eof32
+		}
+	st_case_32:
+//line machine.go:2525
+		switch (m.data)[(m.p)] {
+		case 10:
+			goto tr2
+		case 13:
+			goto tr2
+		case 32:
+			goto tr58
+		case 44:
+			goto tr59
+		case 61:
+			goto tr2
+		case 92:
+			goto st31
+		}
+		if 9 <= (m.data)[(m.p)] && (m.data)[(m.p)] <= 12 {
+			goto tr58
+		}
+		goto st30
+	tr51:
+//line machine.go.rl:20
+
+		m.pb = m.p
+
+		goto st33
+	st33:
+		if (m.p)++; (m.p) == (m.pe) {
+			goto _test_eof33
+		}
+	st_case_33:
+//line machine.go:2555
+		if (m.data)[(m.p)] == 92 {
+			goto st34
+		}
+		if 9 <= (m.data)[(m.p)] && (m.data)[(m.p)] <= 13 {
+			goto tr2
+		}
+		goto st28
+	st34:
+//line machine.go.rl:240
+		(m.p)--
+
+		if (m.p)++; (m.p) == (m.pe) {
+			goto _test_eof34
+		}
+	st_case_34:
+//line machine.go:2571
+		switch (m.data)[(m.p)] {
+		case 32:
+			goto tr2
+		case 44:
+			goto tr2
+		case 61:
+			goto tr53
+		case 92:
+			goto st33
+		}
+		if 9 <= (m.data)[(m.p)] && (m.data)[(m.p)] <= 13 {
+			goto tr2
+		}
+		goto st28
+	st35:
+		if (m.p)++; (m.p) == (m.pe) {
+			goto _test_eof35
+		}
+	st_case_35:
+		if (m.data)[(m.p)] == 10 {
+			goto tr64
+		}
+		goto st35
+	tr64:
+//line machine.go.rl:158
+
+		m.lineno++
+		m.sol = m.p
+		m.sol++ // next char will be the first column in the line
+
+//line machine.go.rl:70
+
+		{
+			goto st86
+		}
+
+		goto st85
+	st85:
+		if (m.p)++; (m.p) == (m.pe) {
+			goto _test_eof85
+		}
+	st_case_85:
+//line machine.go:2612
+		goto st0
+	st38:
+		if (m.p)++; (m.p) == (m.pe) {
+			goto _test_eof38
+		}
+	st_case_38:
+		switch (m.data)[(m.p)] {
+		case 32:
+			goto tr33
+		case 35:
+			goto tr33
+		case 44:
+			goto tr33
+		case 92:
+			goto tr68
+		}
+		if 9 <= (m.data)[(m.p)] && (m.data)[(m.p)] <= 13 {
+			goto tr33
+		}
+		goto tr67
+	tr67:
+//line machine.go.rl:74
+
+		m.beginMetric = true
+
+//line machine.go.rl:20
+
+		m.pb = m.p
+
+		goto st87
+	st87:
+		if (m.p)++; (m.p) == (m.pe) {
+			goto _test_eof87
+		}
+	st_case_87:
+//line machine.go:2648
+		switch (m.data)[(m.p)] {
+		case 10:
+			goto tr140
+		case 13:
+			goto tr141
+		case 32:
+			goto tr2
+		case 44:
+			goto tr142
+		case 92:
+			goto st46
+		}
+		if 9 <= (m.data)[(m.p)] && (m.data)[(m.p)] <= 12 {
+			goto tr2
+		}
+		goto st87
+	tr69:
+//line machine.go.rl:158
+
+		m.lineno++
+		m.sol = m.p
+		m.sol++ // next char will be the first column in the line
+
+		goto st88
+	tr140:
+		(m.cs) = 88
+//line machine.go.rl:78
+
+		err = m.handler.SetMeasurement(m.text())
+		if err != nil {
+			(m.p)--
+
+			(m.cs) = 35
+			{
+				(m.p)++
+				goto _out
+			}
+		}
+
+//line machine.go.rl:158
+
+		m.lineno++
+		m.sol = m.p
+		m.sol++ // next char will be the first column in the line
+
+		goto _again
+	tr144:
+		(m.cs) = 88
+//line machine.go.rl:91
+
+		err = m.handler.AddTag(m.key, m.text())
+		if err != nil {
+			(m.p)--
+
+			(m.cs) = 35
+			{
+				(m.p)++
+				goto _out
+			}
+		}
+
+//line machine.go.rl:158
+
+		m.lineno++
+		m.sol = m.p
+		m.sol++ // next char will be the first column in the line
+
+		goto _again
+	st88:
+//line machine.go.rl:164
+
+		m.finishMetric = true
+		(m.cs) = 86
+		{
+			(m.p)++
+			goto _out
+		}
+
+		if (m.p)++; (m.p) == (m.pe) {
+			goto _test_eof88
+		}
+	st_case_88:
+//line machine.go:2722
+		goto st0
+	tr141:
+		(m.cs) = 39
+//line machine.go.rl:78
+
+		err = m.handler.SetMeasurement(m.text())
+		if err != nil {
+			(m.p)--
+
+			(m.cs) = 35
+			{
+				(m.p)++
+				goto _out
+			}
+		}
+
+		goto _again
+	tr145:
+		(m.cs) = 39
+//line machine.go.rl:91
+
+		err = m.handler.AddTag(m.key, m.text())
+		if err != nil {
+			(m.p)--
+
+			(m.cs) = 35
+			{
+				(m.p)++
+				goto _out
+			}
+		}
+
+		goto _again
+	st39:
+		if (m.p)++; (m.p) == (m.pe) {
+			goto _test_eof39
+		}
+	st_case_39:
+//line machine.go:2755
+		if (m.data)[(m.p)] == 10 {
+			goto tr69
+		}
+		goto st0
+	tr142:
+		(m.cs) = 40
+//line machine.go.rl:78
+
+		err = m.handler.SetMeasurement(m.text())
+		if err != nil {
+			(m.p)--
+
+			(m.cs) = 35
+			{
+				(m.p)++
+				goto _out
+			}
+		}
+
+		goto _again
+	tr146:
+		(m.cs) = 40
+//line machine.go.rl:91
+
+		err = m.handler.AddTag(m.key, m.text())
+		if err != nil {
+			(m.p)--
+
+			(m.cs) = 35
+			{
+				(m.p)++
+				goto _out
+			}
+		}
+
+		goto _again
+	st40:
+		if (m.p)++; (m.p) == (m.pe) {
+			goto _test_eof40
+		}
+	st_case_40:
+//line machine.go:2791
+		switch (m.data)[(m.p)] {
+		case 32:
+			goto tr2
+		case 44:
+			goto tr2
+		case 61:
+			goto tr2
+		case 92:
+			goto tr71
+		}
+		if 9 <= (m.data)[(m.p)] && (m.data)[(m.p)] <= 13 {
+			goto tr2
+		}
+		goto tr70
+	tr70:
+//line machine.go.rl:20
+
+		m.pb = m.p
+
+		goto st41
+	st41:
+		if (m.p)++; (m.p) == (m.pe) {
+			goto _test_eof41
+		}
+	st_case_41:
+//line machine.go:2817
+		switch (m.data)[(m.p)] {
+		case 32:
+			goto tr2
+		case 44:
+			goto tr2
+		case 61:
+			goto tr73
+		case 92:
+			goto st44
+		}
+		if 9 <= (m.data)[(m.p)] && (m.data)[(m.p)] <= 13 {
+			goto tr2
+		}
+		goto st41
+	tr73:
+//line machine.go.rl:87
+
+		m.key = m.text()
+
+		goto st42
+	st42:
+		if (m.p)++; (m.p) == (m.pe) {
+			goto _test_eof42
+		}
+	st_case_42:
+//line machine.go:2843
+		switch (m.data)[(m.p)] {
+		case 32:
+			goto tr2
+		case 44:
+			goto tr2
+		case 61:
+			goto tr2
+		case 92:
+			goto tr76
+		}
+		if 9 <= (m.data)[(m.p)] && (m.data)[(m.p)] <= 13 {
+			goto tr2
+		}
+		goto tr75
+	tr75:
+//line machine.go.rl:20
+
+		m.pb = m.p
+
+		goto st89
+	st89:
+		if (m.p)++; (m.p) == (m.pe) {
+			goto _test_eof89
+		}
+	st_case_89:
+//line machine.go:2869
+		switch (m.data)[(m.p)] {
+		case 10:
+			goto tr144
+		case 13:
+			goto tr145
+		case 32:
+			goto tr2
+		case 44:
+			goto tr146
+		case 61:
+			goto tr2
+		case 92:
+			goto st43
+		}
+		if 9 <= (m.data)[(m.p)] && (m.data)[(m.p)] <= 12 {
+			goto tr2
+		}
+		goto st89
+	tr76:
+//line machine.go.rl:20
+
+		m.pb = m.p
+
+		goto st43
+	st43:
+		if (m.p)++; (m.p) == (m.pe) {
+			goto _test_eof43
+		}
+	st_case_43:
+//line machine.go:2899
+		if (m.data)[(m.p)] == 92 {
+			goto st90
+		}
+		if 9 <= (m.data)[(m.p)] && (m.data)[(m.p)] <= 13 {
+			goto tr2
+		}
+		goto st89
+	st90:
+//line machine.go.rl:240
+		(m.p)--
+
+		if (m.p)++; (m.p) == (m.pe) {
+			goto _test_eof90
+		}
+	st_case_90:
+//line machine.go:2915
+		switch (m.data)[(m.p)] {
+		case 10:
+			goto tr144
+		case 13:
+			goto tr145
+		case 32:
+			goto tr2
+		case 44:
+			goto tr146
+		case 61:
+			goto tr2
+		case 92:
+			goto st43
+		}
+		if 9 <= (m.data)[(m.p)] && (m.data)[(m.p)] <= 12 {
+			goto tr2
+		}
+		goto st89
+	tr71:
+//line machine.go.rl:20
+
+		m.pb = m.p
+
+		goto st44
+	st44:
+		if (m.p)++; (m.p) == (m.pe) {
+			goto _test_eof44
+		}
+	st_case_44:
+//line machine.go:2945
+		if (m.data)[(m.p)] == 92 {
+			goto st45
+		}
+		if 9 <= (m.data)[(m.p)] && (m.data)[(m.p)] <= 13 {
+			goto tr2
+		}
+		goto st41
+	st45:
+//line machine.go.rl:240
+		(m.p)--
+
+		if (m.p)++; (m.p) == (m.pe) {
+			goto _test_eof45
+		}
+	st_case_45:
+//line machine.go:2961
+		switch (m.data)[(m.p)] {
+		case 32:
+			goto tr2
+		case 44:
+			goto tr2
+		case 61:
+			goto tr73
+		case 92:
+			goto st44
+		}
+		if 9 <= (m.data)[(m.p)] && (m.data)[(m.p)] <= 13 {
+			goto tr2
+		}
+		goto st41
+	tr68:
+//line machine.go.rl:74
+
+		m.beginMetric = true
+
+//line machine.go.rl:20
+
+		m.pb = m.p
+
+		goto st46
+	st46:
+		if (m.p)++; (m.p) == (m.pe) {
+			goto _test_eof46
+		}
+	st_case_46:
+//line machine.go:2991
+		if 9 <= (m.data)[(m.p)] && (m.data)[(m.p)] <= 13 {
+			goto st0
+		}
+		goto st87
+	tr65:
+//line machine.go.rl:158
+
+		m.lineno++
+		m.sol = m.p
+		m.sol++ // next char will be the first column in the line
+
+		goto st86
+	st86:
+		if (m.p)++; (m.p) == (m.pe) {
+			goto _test_eof86
+		}
+	st_case_86:
+//line machine.go:3009
+		switch (m.data)[(m.p)] {
+		case 10:
+			goto tr65
+		case 13:
+			goto st36
+		case 32:
+			goto st86
+		case 35:
+			goto st37
+		}
+		if 9 <= (m.data)[(m.p)] && (m.data)[(m.p)] <= 12 {
+			goto st86
+		}
+		goto tr137
+	st36:
+		if (m.p)++; (m.p) == (m.pe) {
+			goto _test_eof36
+		}
+	st_case_36:
+		if (m.data)[(m.p)] == 10 {
+			goto tr65
+		}
+		goto st0
+	st37:
+		if (m.p)++; (m.p) == (m.pe) {
+			goto _test_eof37
+		}
+	st_case_37:
+		if (m.data)[(m.p)] == 10 {
+			goto tr65
+		}
+		goto st37
+	st_out:
+	_test_eof47:
+		(m.cs) = 47
+		goto _test_eof
+	_test_eof1:
+		(m.cs) = 1
+		goto _test_eof
+	_test_eof2:
+		(m.cs) = 2
+		goto _test_eof
+	_test_eof3:
+		(m.cs) = 3
+		goto _test_eof
+	_test_eof4:
+		(m.cs) = 4
+		goto _test_eof
+	_test_eof5:
+		(m.cs) = 5
+		goto _test_eof
+	_test_eof6:
+		(m.cs) = 6
+		goto _test_eof
+	_test_eof7:
+		(m.cs) = 7
+		goto _test_eof
+	_test_eof48:
+		(m.cs) = 48
+		goto _test_eof
+	_test_eof49:
+		(m.cs) = 49
+		goto _test_eof
+	_test_eof50:
+		(m.cs) = 50
+		goto _test_eof
+	_test_eof8:
+		(m.cs) = 8
+		goto _test_eof
+	_test_eof9:
+		(m.cs) = 9
+		goto _test_eof
+	_test_eof10:
+		(m.cs) = 10
+		goto _test_eof
+	_test_eof11:
+		(m.cs) = 11
+		goto _test_eof
+	_test_eof51:
+		(m.cs) = 51
+		goto _test_eof
+	_test_eof52:
+		(m.cs) = 52
+		goto _test_eof
+	_test_eof53:
+		(m.cs) = 53
+		goto _test_eof
+	_test_eof54:
+		(m.cs) = 54
+		goto _test_eof
+	_test_eof55:
+		(m.cs) = 55
+		goto _test_eof
+	_test_eof56:
+		(m.cs) = 56
+		goto _test_eof
+	_test_eof57:
+		(m.cs) = 57
+		goto _test_eof
+	_test_eof58:
+		(m.cs) = 58
+		goto _test_eof
+	_test_eof59:
+		(m.cs) = 59
+		goto _test_eof
+	_test_eof60:
+		(m.cs) = 60
+		goto _test_eof
+	_test_eof61:
+		(m.cs) = 61
+		goto _test_eof
+	_test_eof62:
+		(m.cs) = 62
+		goto _test_eof
+	_test_eof63:
+		(m.cs) = 63
+		goto _test_eof
+	_test_eof64:
+		(m.cs) = 64
+		goto _test_eof
+	_test_eof65:
+		(m.cs) = 65
+		goto _test_eof
+	_test_eof66:
+		(m.cs) = 66
+		goto _test_eof
+	_test_eof67:
+		(m.cs) = 67
+		goto _test_eof
+	_test_eof68:
+		(m.cs) = 68
+		goto _test_eof
+	_test_eof69:
+		(m.cs) = 69
+		goto _test_eof
+	_test_eof70:
+		(m.cs) = 70
+		goto _test_eof
+	_test_eof12:
+		(m.cs) = 12
+		goto _test_eof
+	_test_eof13:
+		(m.cs) = 13
+		goto _test_eof
+	_test_eof14:
+		(m.cs) = 14
+		goto _test_eof
+	_test_eof15:
+		(m.cs) = 15
+		goto _test_eof
+	_test_eof16:
+		(m.cs) = 16
+		goto _test_eof
+	_test_eof71:
+		(m.cs) = 71
+		goto _test_eof
+	_test_eof17:
+		(m.cs) = 17
+		goto _test_eof
+	_test_eof18:
+		(m.cs) = 18
+		goto _test_eof
+	_test_eof72:
+		(m.cs) = 72
+		goto _test_eof
+	_test_eof73:
+		(m.cs) = 73
+		goto _test_eof
+	_test_eof74:
+		(m.cs) = 74
+		goto _test_eof
+	_test_eof75:
+		(m.cs) = 75
+		goto _test_eof
+	_test_eof76:
+		(m.cs) = 76
+		goto _test_eof
+	_test_eof77:
+		(m.cs) = 77
+		goto _test_eof
+	_test_eof78:
+		(m.cs) = 78
+		goto _test_eof
+	_test_eof79:
+		(m.cs) = 79
+		goto _test_eof
+	_test_eof80:
+		(m.cs) = 80
+		goto _test_eof
+	_test_eof19:
+		(m.cs) = 19
+		goto _test_eof
+	_test_eof20:
+		(m.cs) = 20
+		goto _test_eof
+	_test_eof21:
+		(m.cs) = 21
+		goto _test_eof
+	_test_eof81:
+		(m.cs) = 81
+		goto _test_eof
+	_test_eof22:
+		(m.cs) = 22
+		goto _test_eof
+	_test_eof23:
+		(m.cs) = 23
+		goto _test_eof
+	_test_eof24:
+		(m.cs) = 24
+		goto _test_eof
+	_test_eof82:
+		(m.cs) = 82
+		goto _test_eof
+	_test_eof25:
+		(m.cs) = 25
+		goto _test_eof
+	_test_eof26:
+		(m.cs) = 26
+		goto _test_eof
+	_test_eof83:
+		(m.cs) = 83
+		goto _test_eof
+	_test_eof84:
+		(m.cs) = 84
+		goto _test_eof
+	_test_eof27:
+		(m.cs) = 27
+		goto _test_eof
+	_test_eof28:
+		(m.cs) = 28
+		goto _test_eof
+	_test_eof29:
+		(m.cs) = 29
+		goto _test_eof
+	_test_eof30:
+		(m.cs) = 30
+		goto _test_eof
+	_test_eof31:
+		(m.cs) = 31
+		goto _test_eof
+	_test_eof32:
+		(m.cs) = 32
+		goto _test_eof
+	_test_eof33:
+		(m.cs) = 33
+		goto _test_eof
+	_test_eof34:
+		(m.cs) = 34
+		goto _test_eof
+	_test_eof35:
+		(m.cs) = 35
+		goto _test_eof
+	_test_eof85:
+		(m.cs) = 85
+		goto _test_eof
+	_test_eof38:
+		(m.cs) = 38
+		goto _test_eof
+	_test_eof87:
+		(m.cs) = 87
+		goto _test_eof
+	_test_eof88:
+		(m.cs) = 88
+		goto _test_eof
+	_test_eof39:
+		(m.cs) = 39
+		goto _test_eof
+	_test_eof40:
+		(m.cs) = 40
+		goto _test_eof
+	_test_eof41:
+		(m.cs) = 41
+		goto _test_eof
+	_test_eof42:
+		(m.cs) = 42
+		goto _test_eof
+	_test_eof89:
+		(m.cs) = 89
+		goto _test_eof
+	_test_eof43:
+		(m.cs) = 43
+		goto _test_eof
+	_test_eof90:
+		(m.cs) = 90
+		goto _test_eof
+	_test_eof44:
+		(m.cs) = 44
+		goto _test_eof
+	_test_eof45:
+		(m.cs) = 45
+		goto _test_eof
+	_test_eof46:
+		(m.cs) = 46
+		goto _test_eof
+	_test_eof86:
+		(m.cs) = 86
+		goto _test_eof
+	_test_eof36:
+		(m.cs) = 36
+		goto _test_eof
+	_test_eof37:
+		(m.cs) = 37
+		goto _test_eof
+
+	_test_eof:
+		{
+		}
+		if (m.p) == (m.eof) {
+			switch m.cs {
+			case 8, 38:
+//line machine.go.rl:24
+
+				err = ErrNameParse
+				(m.p)--
+
+				(m.cs) = 35
+				{
+					(m.p)++
+					(m.cs) = 0
+					goto _out
+				}
+
+			case 2, 3, 4, 5, 6, 7, 12, 13, 14, 15, 16, 17, 18, 19, 20, 21, 22, 23, 24, 25, 26:
+//line machine.go.rl:31
+
+				err = ErrFieldParse
+				(m.p)--
+
+				(m.cs) = 35
+				{
+					(m.p)++
+					(m.cs) = 0
+					goto _out
+				}
+
+			case 27, 28, 29, 31, 33, 34, 40, 41, 42, 43, 44, 45:
+//line machine.go.rl:38
+
+				err = ErrTagParse
+				(m.p)--
+
+				(m.cs) = 35
+				{
+					(m.p)++
+					(m.cs) = 0
+					goto _out
+				}
+
+			case 11:
+//line machine.go.rl:45
+
+				err = ErrTimestampParse
+				(m.p)--
+
+				(m.cs) = 35
+				{
+					(m.p)++
+					(m.cs) = 0
+					goto _out
+				}
+
+			case 87:
+//line machine.go.rl:78
+
+				err = m.handler.SetMeasurement(m.text())
+				if err != nil {
+					(m.p)--
+
+					(m.cs) = 35
+					{
+						(m.p)++
+						(m.cs) = 0
+						goto _out
+					}
+				}
+
+			case 89, 90:
+//line machine.go.rl:91
+
+				err = m.handler.AddTag(m.key, m.text())
+				if err != nil {
+					(m.p)--
+
+					(m.cs) = 35
+					{
+						(m.p)++
+						(m.cs) = 0
+						goto _out
+					}
+				}
+
+			case 48, 49, 50, 52:
+//line machine.go.rl:170
+
+				m.finishMetric = true
+
+			case 47:
+//line machine.go.rl:74
+
+				m.beginMetric = true
+
+//line machine.go.rl:170
+
+				m.finishMetric = true
+
+			case 1:
+//line machine.go.rl:78
+
+				err = m.handler.SetMeasurement(m.text())
+				if err != nil {
+					(m.p)--
+
+					(m.cs) = 35
+					{
+						(m.p)++
+						(m.cs) = 0
+						goto _out
+					}
+				}
+
+//line machine.go.rl:38
+
+				err = ErrTagParse
+				(m.p)--
+
+				(m.cs) = 35
+				{
+					(m.p)++
+					(m.cs) = 0
+					goto _out
+				}
+
+			case 30, 32:
+//line machine.go.rl:91
+
+				err = m.handler.AddTag(m.key, m.text())
+				if err != nil {
+					(m.p)--
+
+					(m.cs) = 35
+					{
+						(m.p)++
+						(m.cs) = 0
+						goto _out
+					}
+				}
+
+//line machine.go.rl:38
+
+				err = ErrTagParse
+				(m.p)--
+
+				(m.cs) = 35
+				{
+					(m.p)++
+					(m.cs) = 0
+					goto _out
+				}
+
+			case 75:
+//line machine.go.rl:104
+
+				err = m.handler.AddInt(m.key, m.text())
+				if err != nil {
+					(m.p)--
+
+					(m.cs) = 35
+					{
+						(m.p)++
+						(m.cs) = 0
+						goto _out
+					}
+				}
+
+//line machine.go.rl:170
+
+				m.finishMetric = true
+
+			case 78:
+//line machine.go.rl:113
+
+				err = m.handler.AddUint(m.key, m.text())
+				if err != nil {
+					(m.p)--
+
+					(m.cs) = 35
+					{
+						(m.p)++
+						(m.cs) = 0
+						goto _out
+					}
+				}
+
+//line machine.go.rl:170
+
+				m.finishMetric = true
+
+			case 71, 72, 73, 74, 76, 77, 79:
+//line machine.go.rl:122
+
+				err = m.handler.AddFloat(m.key, m.text())
+				if err != nil {
+					(m.p)--
+
+					(m.cs) = 35
+					{
+						(m.p)++
+						(m.cs) = 0
+						goto _out
+					}
+				}
+
+//line machine.go.rl:170
+
+				m.finishMetric = true
+
+			case 80, 81, 82, 83, 84:
+//line machine.go.rl:131
+
+				err = m.handler.AddBool(m.key, m.text())
+				if err != nil {
+					(m.p)--
+
+					(m.cs) = 35
+					{
+						(m.p)++
+						(m.cs) = 0
+						goto _out
+					}
+				}
+
+//line machine.go.rl:170
+
+				m.finishMetric = true
+
+			case 51, 53, 54, 55, 56, 57, 58, 59, 60, 61, 62, 63, 64, 65, 66, 67, 68, 69, 70:
+//line machine.go.rl:149
+
+				err = m.handler.SetTimestamp(m.text())
+				if err != nil {
+					(m.p)--
+
+					(m.cs) = 35
+					{
+						(m.p)++
+						(m.cs) = 0
+						goto _out
+					}
+				}
+
+//line machine.go.rl:170
+
+				m.finishMetric = true
+
+//line machine.go:3322
+			}
+		}
+
+	_out:
+		{
+		}
+	}
+
+//line machine.go.rl:407
+
+	if err != nil {
+		return err
+	}
+
+	// This would indicate an error in the machine that was reported with a
+	// more specific error.  We return a generic error but this should
+	// possibly be a panic.
+	if m.cs == 0 {
+		m.cs = LineProtocol_en_discard_line
+		return ErrParse
+	}
+
+	// If we haven't found a metric line yet and we reached the EOF, report it
+	// now.  This happens when the data ends with a comment or whitespace.
+	//
+	// Otherwise we have successfully parsed a metric line, so if we are at
+	// the EOF we will report it the next call.
+	if !m.beginMetric && m.p == m.pe && m.pe == m.eof {
+		return EOF
+	}
+
+	return nil
+}
+
+// Position returns the current byte offset into the data.
+func (m *machine) Position() int {
+	return m.p
+}
+
+// LineOffset returns the byte offset of the current line.
+func (m *machine) LineOffset() int {
+	return m.sol
+}
+
+// LineNumber returns the current line number.  Lines are counted based on the
+// regular expression `\r?\n`.
+func (m *machine) LineNumber() int {
+	return m.lineno
+}
+
+// Column returns the current column.
+func (m *machine) Column() int {
+	lineOffset := m.p - m.sol
+	return lineOffset + 1
+}
+
+func (m *machine) text() []byte {
+	return m.data[m.pb:m.p]
+}
+
+type streamMachine struct {
+	machine *machine
+	reader  io.Reader
+}
+
+func NewStreamMachine(r io.Reader, handler Handler) *streamMachine {
+	m := &streamMachine{
+		machine: NewMachine(handler),
+		reader:  r,
+	}
+
+	m.machine.SetData(make([]byte, 1024))
+	m.machine.pe = 0
+	m.machine.eof = -1
+	return m
+}
+
+func (m *streamMachine) Next() error {
+	// Check if we are already at EOF, this should only happen if called again
+	// after already returning EOF.
+	if m.machine.p == m.machine.pe && m.machine.pe == m.machine.eof {
+		return EOF
+	}
+
+	copy(m.machine.data, m.machine.data[m.machine.p:])
+	m.machine.pe = m.machine.pe - m.machine.p
+	m.machine.sol = m.machine.sol - m.machine.p
+	m.machine.pb = 0
+	m.machine.p = 0
+	m.machine.eof = -1
+
+	m.machine.key = nil
+	m.machine.beginMetric = false
+	m.machine.finishMetric = false
+
+	for {
+		// Expand the buffer if it is full
+		if m.machine.pe == len(m.machine.data) {
+			expanded := make([]byte, 2*len(m.machine.data))
+			copy(expanded, m.machine.data)
+			m.machine.data = expanded
+		}
+
+		n, err := m.reader.Read(m.machine.data[m.machine.pe:])
+		if n == 0 && err == io.EOF {
+			m.machine.eof = m.machine.pe
+		} else if err != nil && err != io.EOF {
+			return err
+		}
+
+		m.machine.pe += n
+
+		err = m.machine.exec()
+		if err != nil {
+			return err
+		}
+
+		// If we have successfully parsed a full metric line break out
+		if m.machine.finishMetric {
+			break
+		}
+
+	}
+
+	return nil
+}
+
+// Position returns the current byte offset into the data.
+func (m *streamMachine) Position() int {
+	return m.machine.Position()
+}
+
+// LineOffset returns the byte offset of the current line.
+func (m *streamMachine) LineOffset() int {
+	return m.machine.LineOffset()
+}
+
+// LineNumber returns the current line number.  Lines are counted based on the
+// regular expression `\r?\n`.
+func (m *streamMachine) LineNumber() int {
+	return m.machine.LineNumber()
+}
+
+// Column returns the current column.
+func (m *streamMachine) Column() int {
+	return m.machine.Column()
+}
+
+// LineText returns the text of the current line that has been parsed so far.
+func (m *streamMachine) LineText() string {
+	return string(m.machine.data[0:m.machine.p])
+}

+ 549 - 0
vendor/github.com/influxdata/line-protocol/machine.go.rl

@@ -0,0 +1,549 @@
+package protocol
+
+import (
+	"errors"
+	"io"
+)
+
+var (
+	ErrNameParse = errors.New("expected measurement name")
+	ErrFieldParse = errors.New("expected field")
+	ErrTagParse = errors.New("expected tag")
+	ErrTimestampParse = errors.New("expected timestamp")
+	ErrParse = errors.New("parse error")
+	EOF = errors.New("EOF")
+)
+
+%%{
+machine LineProtocol;
+
+action begin {
+	m.pb = m.p
+}
+
+action name_error {
+	err = ErrNameParse
+	fhold;
+	fnext discard_line;
+	fbreak;
+}
+
+action field_error {
+	err = ErrFieldParse
+	fhold;
+	fnext discard_line;
+	fbreak;
+}
+
+action tagset_error {
+	err = ErrTagParse
+	fhold;
+	fnext discard_line;
+	fbreak;
+}
+
+action timestamp_error {
+	err = ErrTimestampParse
+	fhold;
+	fnext discard_line;
+	fbreak;
+}
+
+action parse_error {
+	err = ErrParse
+	fhold;
+	fnext discard_line;
+	fbreak;
+}
+
+action align_error {
+	err = ErrParse
+	fnext discard_line;
+	fbreak;
+}
+
+action hold_recover {
+	fhold;
+	fgoto main;
+}
+
+action goto_align {
+	fgoto align;
+}
+
+action begin_metric {
+	m.beginMetric = true
+}
+
+action name {
+	err = m.handler.SetMeasurement(m.text())
+	if err != nil {
+		fhold;
+		fnext discard_line;
+		fbreak;
+	}
+}
+
+action tagkey {
+	m.key = m.text()
+}
+
+action tagvalue {
+	err = m.handler.AddTag(m.key, m.text())
+	if err != nil {
+		fhold;
+		fnext discard_line;
+		fbreak;
+	}
+}
+
+action fieldkey {
+	m.key = m.text()
+}
+
+action integer {
+	err = m.handler.AddInt(m.key, m.text())
+	if err != nil {
+		fhold;
+		fnext discard_line;
+		fbreak;
+	}
+}
+
+action unsigned {
+	err = m.handler.AddUint(m.key, m.text())
+	if err != nil {
+		fhold;
+		fnext discard_line;
+		fbreak;
+	}
+}
+
+action float {
+	err = m.handler.AddFloat(m.key, m.text())
+	if err != nil {
+		fhold;
+		fnext discard_line;
+		fbreak;
+	}
+}
+
+action bool {
+	err = m.handler.AddBool(m.key, m.text())
+	if err != nil {
+		fhold;
+		fnext discard_line;
+		fbreak;
+	}
+}
+
+action string {
+	err = m.handler.AddString(m.key, m.text())
+	if err != nil {
+		fhold;
+		fnext discard_line;
+		fbreak;
+	}
+}
+
+action timestamp {
+	err = m.handler.SetTimestamp(m.text())
+	if err != nil {
+		fhold;
+		fnext discard_line;
+		fbreak;
+	}
+}
+
+action incr_newline {
+	m.lineno++
+	m.sol = m.p
+	m.sol++ // next char will be the first column in the line
+}
+
+action eol {
+	m.finishMetric = true
+	fnext align;
+	fbreak;
+}
+
+action finish_metric {
+	m.finishMetric = true
+}
+
+ws =
+	[\t\v\f ];
+
+newline =
+	'\r'? '\n' >incr_newline;
+
+non_zero_digit =
+	[1-9];
+
+integer =
+	'-'? ( digit | ( non_zero_digit digit* ) );
+
+unsigned =
+	( digit | ( non_zero_digit digit* ) );
+
+number =
+	'-'? (digit+ ('.' digit*)? | '.' digit+);
+
+scientific =
+	number 'e'i ["\-+"]? digit+;
+
+timestamp =
+	('-'? digit{1,19}) >begin %timestamp;
+
+fieldkeychar =
+	[^\t\n\v\f\r ,=\\] | ( '\\' [^\t\n\v\f\r] );
+
+fieldkey =
+	fieldkeychar+ >begin %fieldkey;
+
+fieldfloat =
+	(scientific | number) >begin %float;
+
+fieldinteger =
+	(integer 'i') >begin %integer;
+
+fieldunsigned =
+	(unsigned 'u') >begin %unsigned;
+
+false =
+	"false" | "FALSE" | "False" | "F" | "f";
+
+true =
+	"true" | "TRUE" | "True" | "T" | "t";
+
+fieldbool =
+	(true | false) >begin %bool;
+
+fieldstringchar =
+	[^\f\r\n\\"] | '\\' [\\"] | newline;
+
+fieldstring =
+	fieldstringchar* >begin %string;
+
+fieldstringquoted =
+	'"' fieldstring '"';
+
+fieldvalue = fieldinteger | fieldunsigned | fieldfloat | fieldstringquoted | fieldbool;
+
+field =
+	fieldkey '=' fieldvalue;
+
+fieldset =
+	field ( ',' field )*;
+
+tagchar =
+	[^\t\n\v\f\r ,=\\] | ( '\\' [^\t\n\v\f\r\\] ) | '\\\\' %to{ fhold; };
+
+tagkey =
+	tagchar+ >begin %tagkey;
+
+tagvalue =
+	tagchar+ >begin %eof(tagvalue) %tagvalue;
+
+tagset =
+	((',' tagkey '=' tagvalue) $err(tagset_error))*;
+
+measurement_chars =
+	[^\t\n\v\f\r ,\\] | ( '\\' [^\t\n\v\f\r] );
+
+measurement_start =
+	measurement_chars - '#';
+
+measurement =
+	(measurement_start measurement_chars*) >begin %eof(name) %name;
+
+eol_break =
+	newline %to(eol)
+	;
+
+metric =
+	measurement >err(name_error)
+	tagset
+	ws+ fieldset $err(field_error)
+	(ws+ timestamp)? $err(timestamp_error)
+	;
+
+line_with_term =
+	ws* metric ws* eol_break
+	;
+
+line_without_term =
+	ws* metric ws*
+	;
+
+main :=
+	(line_with_term*
+	(line_with_term | line_without_term?)
+    ) >begin_metric %eof(finish_metric)
+	;
+
+# The discard_line machine discards the current line.  Useful for recovering
+# on the next line when an error occurs.
+discard_line :=
+	(any -- newline)* newline @goto_align;
+
+commentline =
+	ws* '#' (any -- newline)* newline;
+
+emptyline =
+	ws* newline;
+
+# The align machine scans forward to the start of the next line.  This machine
+# is used to skip over whitespace and comments, keeping this logic out of the
+# main machine.
+#
+# Skip valid lines that don't contain line protocol, any other data will move
+# control to the main parser via the err action.
+align :=
+	(emptyline | commentline | ws+)* %err(hold_recover);
+
+# Series is a machine for matching measurement+tagset
+series :=
+	(measurement >err(name_error) tagset eol_break?)
+	>begin_metric
+	;
+}%%
+
+%% write data;
+
+type Handler interface {
+	SetMeasurement(name []byte) error
+	AddTag(key []byte, value []byte) error
+	AddInt(key []byte, value []byte) error
+	AddUint(key []byte, value []byte) error
+	AddFloat(key []byte, value []byte) error
+	AddString(key []byte, value []byte) error
+	AddBool(key []byte, value []byte) error
+	SetTimestamp(tm []byte) error
+}
+
+type machine struct {
+	data         []byte
+	cs           int
+	p, pe, eof   int
+	pb           int
+	lineno       int
+	sol          int
+	handler      Handler
+	initState    int
+	key          []byte
+	beginMetric  bool
+	finishMetric bool
+}
+
+func NewMachine(handler Handler) *machine {
+	m := &machine{
+		handler: handler,
+		initState: LineProtocol_en_align,
+	}
+
+	%% access m.;
+	%% variable p m.p;
+	%% variable cs m.cs;
+	%% variable pe m.pe;
+	%% variable eof m.eof;
+	%% variable data m.data;
+	%% write init;
+
+	return m
+}
+
+func NewSeriesMachine(handler Handler) *machine {
+	m := &machine{
+		handler: handler,
+		initState: LineProtocol_en_series,
+	}
+
+	%% access m.;
+	%% variable p m.p;
+	%% variable pe m.pe;
+	%% variable eof m.eof;
+	%% variable data m.data;
+	%% write init;
+
+	return m
+}
+
+func (m *machine) SetData(data []byte) {
+	m.data = data
+	m.p = 0
+	m.pb = 0
+	m.lineno = 1
+	m.sol = 0
+	m.pe = len(data)
+	m.eof = len(data)
+	m.key = nil
+	m.beginMetric = false
+	m.finishMetric = false
+
+	%% write init;
+	m.cs = m.initState
+}
+
+// Next parses the next metric line and returns nil if it was successfully
+// processed.  If the line contains a syntax error an error is returned,
+// otherwise if the end of file is reached before finding a metric line then
+// EOF is returned.
+func (m *machine) Next() error {
+	if m.p == m.pe && m.pe == m.eof {
+		return EOF
+	}
+
+	m.key = nil
+	m.beginMetric = false
+	m.finishMetric = false
+
+	return m.exec()
+}
+
+func (m *machine) exec() error {
+	var err error
+	%% write exec;
+
+	if err != nil {
+		return err
+	}
+
+	// This would indicate an error in the machine that was reported with a
+	// more specific error.  We return a generic error but this should
+	// possibly be a panic.
+	if m.cs == %%{ write error; }%% {
+		m.cs = LineProtocol_en_discard_line
+		return ErrParse
+	}
+
+	// If we haven't found a metric line yet and we reached the EOF, report it
+	// now.  This happens when the data ends with a comment or whitespace.
+	//
+	// Otherwise we have successfully parsed a metric line, so if we are at
+	// the EOF we will report it the next call.
+	if !m.beginMetric && m.p == m.pe && m.pe == m.eof {
+		return EOF
+	}
+
+	return nil
+}
+
+// Position returns the current byte offset into the data.
+func (m *machine) Position() int {
+	return m.p
+}
+
+// LineOffset returns the byte offset of the current line.
+func (m *machine) LineOffset() int {
+	return m.sol
+}
+
+// LineNumber returns the current line number.  Lines are counted based on the
+// regular expression `\r?\n`.
+func (m *machine) LineNumber() int {
+	return m.lineno
+}
+
+// Column returns the current column.
+func (m *machine) Column() int {
+	lineOffset := m.p - m.sol
+	return lineOffset + 1
+}
+
+func (m *machine) text() []byte {
+	return m.data[m.pb:m.p]
+}
+
+type streamMachine struct {
+	machine *machine
+	reader  io.Reader
+}
+
+func NewStreamMachine(r io.Reader, handler Handler) *streamMachine {
+	m := &streamMachine{
+		machine: NewMachine(handler),
+		reader: r,
+	}
+
+	m.machine.SetData(make([]byte, 1024))
+	m.machine.pe = 0
+	m.machine.eof = -1
+	return m
+}
+
+func (m *streamMachine) Next() error {
+	// Check if we are already at EOF, this should only happen if called again
+	// after already returning EOF.
+	if m.machine.p == m.machine.pe && m.machine.pe == m.machine.eof {
+		return EOF
+	}
+
+	copy(m.machine.data, m.machine.data[m.machine.p:])
+	m.machine.pe = m.machine.pe - m.machine.p
+	m.machine.sol = m.machine.sol - m.machine.p
+	m.machine.pb = 0
+	m.machine.p = 0
+	m.machine.eof = -1
+
+	m.machine.key = nil
+	m.machine.beginMetric = false
+	m.machine.finishMetric = false
+
+	for {
+		// Expand the buffer if it is full
+		if m.machine.pe == len(m.machine.data) {
+			expanded := make([]byte, 2 * len(m.machine.data))
+			copy(expanded, m.machine.data)
+			m.machine.data = expanded
+		}
+
+		n, err := m.reader.Read(m.machine.data[m.machine.pe:])
+		if n == 0 && err == io.EOF {
+			m.machine.eof = m.machine.pe
+		} else if err != nil && err != io.EOF {
+			return err
+		}
+
+		m.machine.pe += n
+
+		err = m.machine.exec()
+		if err != nil {
+			return err
+		}
+
+		// If we have successfully parsed a full metric line break out
+		if m.machine.finishMetric {
+			break
+		}
+
+	}
+
+	return nil
+}
+
+// Position returns the current byte offset into the data.
+func (m *streamMachine) Position() int {
+	return m.machine.Position()
+}
+
+// LineOffset returns the byte offset of the current line.
+func (m *streamMachine) LineOffset() int {
+	return m.machine.LineOffset()
+}
+
+// LineNumber returns the current line number.  Lines are counted based on the
+// regular expression `\r?\n`.
+func (m *streamMachine) LineNumber() int {
+	return m.machine.LineNumber()
+}
+
+// Column returns the current column.
+func (m *streamMachine) Column() int {
+	return m.machine.Column()
+}
+
+// LineText returns the text of the current line that has been parsed so far.
+func (m *streamMachine) LineText() string {
+	return string(m.machine.data[0:m.machine.p])
+}

+ 428 - 0
vendor/github.com/influxdata/line-protocol/metric.go

@@ -0,0 +1,428 @@
+package protocol
+
+import (
+	"fmt"
+	"hash/fnv"
+	"sort"
+	"time"
+)
+
+// Tag holds the keys and values for a bunch of Tag k/v pairs.
+type Tag struct {
+	Key   string
+	Value string
+}
+
+// Field holds the keys and values for a bunch of Metric Field k/v pairs where Value can be a uint64, int64, int, float32, float64, string, or bool.
+type Field struct {
+	Key   string
+	Value interface{}
+}
+
+// Metric is the interface for marshaling, if you implement this interface you can be marshalled into the line protocol.  Woot!
+type Metric interface {
+	Time() time.Time
+	Name() string
+	TagList() []*Tag
+	FieldList() []*Field
+}
+
+// MutableMetric represents a metric that can be be modified.
+type MutableMetric interface {
+	Metric
+	SetTime(time.Time)
+	AddTag(key, value string)
+	AddField(key string, value interface{})
+}
+
+// FieldSortOrder is a type for controlling if Fields are sorted
+type FieldSortOrder int
+
+const (
+	// NoSortFields tells the Decoder to not sort the fields.
+	NoSortFields FieldSortOrder = iota
+
+	// SortFields tells the Decoder to sort the fields.
+	SortFields
+)
+
+// FieldTypeSupport is a type for the parser to understand its type support.
+type FieldTypeSupport int
+
+const (
+	// UintSupport means the parser understands uint64s and can store them without having to convert to int64.
+	UintSupport FieldTypeSupport = 1 << iota
+)
+
+// MetricError is an error causing a metric to be unserializable.
+type MetricError struct {
+	s string
+}
+
+func (e MetricError) Error() string {
+	return e.s
+}
+
+// FieldError is an error causing a field to be unserializable.
+type FieldError struct {
+	s string
+}
+
+func (e FieldError) Error() string {
+	return e.s
+}
+
+var (
+	// ErrNeedMoreSpace tells us that the Decoder's io.Reader is full.
+	ErrNeedMoreSpace = &MetricError{"need more space"}
+
+	// ErrInvalidName tells us that the chosen name is invalid.
+	ErrInvalidName = &MetricError{"invalid name"}
+
+	// ErrNoFields tells us that there were no serializable fields in the line/metric.
+	ErrNoFields = &MetricError{"no serializable fields"}
+)
+
+type metric struct {
+	name   string
+	tags   []*Tag
+	fields []*Field
+	tm     time.Time
+}
+
+// New creates a new metric via maps.
+func New(
+	name string,
+	tags map[string]string,
+	fields map[string]interface{},
+	tm time.Time,
+) (MutableMetric, error) {
+	m := &metric{
+		name:   name,
+		tags:   nil,
+		fields: nil,
+		tm:     tm,
+	}
+
+	if len(tags) > 0 {
+		m.tags = make([]*Tag, 0, len(tags))
+		for k, v := range tags {
+			m.tags = append(m.tags,
+				&Tag{Key: k, Value: v})
+		}
+		sort.Slice(m.tags, func(i, j int) bool { return m.tags[i].Key < m.tags[j].Key })
+	}
+
+	if len(fields) > 0 {
+		m.fields = make([]*Field, 0, len(fields))
+		for k, v := range fields {
+			v := convertField(v)
+			if v == nil {
+				continue
+			}
+			m.AddField(k, v)
+		}
+	}
+
+	return m, nil
+}
+
+// FromMetric returns a deep copy of the metric with any tracking information
+// removed.
+func FromMetric(other Metric) Metric {
+	m := &metric{
+		name:   other.Name(),
+		tags:   make([]*Tag, len(other.TagList())),
+		fields: make([]*Field, len(other.FieldList())),
+		tm:     other.Time(),
+	}
+
+	for i, tag := range other.TagList() {
+		m.tags[i] = &Tag{Key: tag.Key, Value: tag.Value}
+	}
+
+	for i, field := range other.FieldList() {
+		m.fields[i] = &Field{Key: field.Key, Value: field.Value}
+	}
+	return m
+}
+
+func (m *metric) String() string {
+	return fmt.Sprintf("%s %v %v %d", m.name, m.Tags(), m.Fields(), m.tm.UnixNano())
+}
+
+func (m *metric) Name() string {
+	return m.name
+}
+
+func (m *metric) Tags() map[string]string {
+	tags := make(map[string]string, len(m.tags))
+	for _, tag := range m.tags {
+		tags[tag.Key] = tag.Value
+	}
+	return tags
+}
+
+func (m *metric) TagList() []*Tag {
+	return m.tags
+}
+
+func (m *metric) Fields() map[string]interface{} {
+	fields := make(map[string]interface{}, len(m.fields))
+	for _, field := range m.fields {
+		fields[field.Key] = field.Value
+	}
+
+	return fields
+}
+
+func (m *metric) FieldList() []*Field {
+	return m.fields
+}
+
+func (m *metric) Time() time.Time {
+	return m.tm
+}
+
+func (m *metric) SetName(name string) {
+	m.name = name
+}
+
+func (m *metric) AddPrefix(prefix string) {
+	m.name = prefix + m.name
+}
+
+func (m *metric) AddSuffix(suffix string) {
+	m.name = m.name + suffix
+}
+
+func (m *metric) AddTag(key, value string) {
+	for i, tag := range m.tags {
+		if key > tag.Key {
+			continue
+		}
+
+		if key == tag.Key {
+			tag.Value = value
+			return
+		}
+
+		m.tags = append(m.tags, nil)
+		copy(m.tags[i+1:], m.tags[i:])
+		m.tags[i] = &Tag{Key: key, Value: value}
+		return
+	}
+
+	m.tags = append(m.tags, &Tag{Key: key, Value: value})
+}
+
+func (m *metric) HasTag(key string) bool {
+	for _, tag := range m.tags {
+		if tag.Key == key {
+			return true
+		}
+	}
+	return false
+}
+
+func (m *metric) GetTag(key string) (string, bool) {
+	for _, tag := range m.tags {
+		if tag.Key == key {
+			return tag.Value, true
+		}
+	}
+	return "", false
+}
+
+func (m *metric) RemoveTag(key string) {
+	for i, tag := range m.tags {
+		if tag.Key == key {
+			copy(m.tags[i:], m.tags[i+1:])
+			m.tags[len(m.tags)-1] = nil
+			m.tags = m.tags[:len(m.tags)-1]
+			return
+		}
+	}
+}
+
+func (m *metric) AddField(key string, value interface{}) {
+	for i, field := range m.fields {
+		if key == field.Key {
+			m.fields[i] = &Field{Key: key, Value: convertField(value)}
+			return
+		}
+	}
+	m.fields = append(m.fields, &Field{Key: key, Value: convertField(value)})
+}
+
+func (m *metric) HasField(key string) bool {
+	for _, field := range m.fields {
+		if field.Key == key {
+			return true
+		}
+	}
+	return false
+}
+
+func (m *metric) GetField(key string) (interface{}, bool) {
+	for _, field := range m.fields {
+		if field.Key == key {
+			return field.Value, true
+		}
+	}
+	return nil, false
+}
+
+func (m *metric) RemoveField(key string) {
+	for i, field := range m.fields {
+		if field.Key == key {
+			copy(m.fields[i:], m.fields[i+1:])
+			m.fields[len(m.fields)-1] = nil
+			m.fields = m.fields[:len(m.fields)-1]
+			return
+		}
+	}
+}
+
+func (m *metric) SetTime(t time.Time) {
+	m.tm = t
+}
+
+func (m *metric) Copy() Metric {
+	m2 := &metric{
+		name:   m.name,
+		tags:   make([]*Tag, len(m.tags)),
+		fields: make([]*Field, len(m.fields)),
+		tm:     m.tm,
+	}
+
+	for i, tag := range m.tags {
+		m2.tags[i] = &Tag{Key: tag.Key, Value: tag.Value}
+	}
+
+	for i, field := range m.fields {
+		m2.fields[i] = &Field{Key: field.Key, Value: field.Value}
+	}
+	return m2
+}
+
+func (m *metric) HashID() uint64 {
+	h := fnv.New64a()
+	h.Write([]byte(m.name))
+	h.Write([]byte("\n"))
+	for _, tag := range m.tags {
+		h.Write([]byte(tag.Key))
+		h.Write([]byte("\n"))
+		h.Write([]byte(tag.Value))
+		h.Write([]byte("\n"))
+	}
+	return h.Sum64()
+}
+
+func (m *metric) Accept() {
+}
+
+func (m *metric) Reject() {
+}
+
+func (m *metric) Drop() {
+}
+
+// Convert field to a supported type or nil if unconvertible
+func convertField(v interface{}) interface{} {
+	switch v := v.(type) {
+	case float64:
+		return v
+	case int64:
+		return v
+	case string:
+		return v
+	case bool:
+		return v
+	case int:
+		return int64(v)
+	case uint:
+		return uint64(v)
+	case uint64:
+		return uint64(v)
+	case []byte:
+		return string(v)
+	case int32:
+		return int64(v)
+	case int16:
+		return int64(v)
+	case int8:
+		return int64(v)
+	case uint32:
+		return uint64(v)
+	case uint16:
+		return uint64(v)
+	case uint8:
+		return uint64(v)
+	case float32:
+		return float64(v)
+	case *float64:
+		if v != nil {
+			return *v
+		}
+	case *int64:
+		if v != nil {
+			return *v
+		}
+	case *string:
+		if v != nil {
+			return *v
+		}
+	case *bool:
+		if v != nil {
+			return *v
+		}
+	case *int:
+		if v != nil {
+			return int64(*v)
+		}
+	case *uint:
+		if v != nil {
+			return uint64(*v)
+		}
+	case *uint64:
+		if v != nil {
+			return uint64(*v)
+		}
+	case *[]byte:
+		if v != nil {
+			return string(*v)
+		}
+	case *int32:
+		if v != nil {
+			return int64(*v)
+		}
+	case *int16:
+		if v != nil {
+			return int64(*v)
+		}
+	case *int8:
+		if v != nil {
+			return int64(*v)
+		}
+	case *uint32:
+		if v != nil {
+			return uint64(*v)
+		}
+	case *uint16:
+		if v != nil {
+			return uint64(*v)
+		}
+	case *uint8:
+		if v != nil {
+			return uint64(*v)
+		}
+	case *float32:
+		if v != nil {
+			return float64(*v)
+		}
+	default:
+		return nil
+	}
+	return nil
+}

+ 192 - 0
vendor/github.com/influxdata/line-protocol/parser.go

@@ -0,0 +1,192 @@
+package protocol
+
+import (
+	"fmt"
+	"io"
+	"strings"
+	"sync"
+	"time"
+)
+
+const (
+	maxErrorBufferSize = 1024
+)
+
+// TimeFunc is used to override the default time for a metric
+// with no specified timestamp.
+type TimeFunc func() time.Time
+
+// ParseError indicates a error in the parsing of the text.
+type ParseError struct {
+	Offset     int
+	LineOffset int
+	LineNumber int
+	Column     int
+	msg        string
+	buf        string
+}
+
+func (e *ParseError) Error() string {
+	buffer := e.buf[e.LineOffset:]
+	eol := strings.IndexAny(buffer, "\r\n")
+	if eol >= 0 {
+		buffer = buffer[:eol]
+	}
+	if len(buffer) > maxErrorBufferSize {
+		buffer = buffer[:maxErrorBufferSize] + "..."
+	}
+	return fmt.Sprintf("metric parse error: %s at %d:%d: %q", e.msg, e.LineNumber, e.Column, buffer)
+}
+
+// Parser is an InfluxDB Line Protocol parser that implements the
+// parsers.Parser interface.
+type Parser struct {
+	DefaultTags map[string]string
+
+	sync.Mutex
+	*machine
+	handler *MetricHandler
+}
+
+// NewParser returns a Parser than accepts line protocol
+func NewParser(handler *MetricHandler) *Parser {
+	return &Parser{
+		machine: NewMachine(handler),
+		handler: handler,
+	}
+}
+
+// NewSeriesParser returns a Parser than accepts a measurement and tagset
+func NewSeriesParser(handler *MetricHandler) *Parser {
+	return &Parser{
+		machine: NewSeriesMachine(handler),
+		handler: handler,
+	}
+}
+
+// SetTimeFunc allows default times to be set when no time is specified
+// for a metric in line-protocol.
+func (p *Parser) SetTimeFunc(f TimeFunc) {
+	p.handler.SetTimeFunc(f)
+}
+
+// Parse interprets line-protocol bytes as many metrics.
+func (p *Parser) Parse(input []byte) ([]Metric, error) {
+	p.Lock()
+	defer p.Unlock()
+	metrics := make([]Metric, 0)
+	p.machine.SetData(input)
+
+	for {
+		err := p.machine.Next()
+		if err == EOF {
+			break
+		}
+
+		if err != nil {
+			return nil, &ParseError{
+				Offset:     p.machine.Position(),
+				LineOffset: p.machine.LineOffset(),
+				LineNumber: p.machine.LineNumber(),
+				Column:     p.machine.Column(),
+				msg:        err.Error(),
+				buf:        string(input),
+			}
+		}
+
+		metric, err := p.handler.Metric()
+		if err != nil {
+			return nil, err
+		}
+
+		if metric == nil {
+			continue
+		}
+
+		metrics = append(metrics, metric)
+	}
+
+	return metrics, nil
+}
+
+// StreamParser is an InfluxDB Line Protocol parser.  It is not safe for
+// concurrent use in multiple goroutines.
+type StreamParser struct {
+	machine *streamMachine
+	handler *MetricHandler
+}
+
+// NewStreamParser parses from a reader and iterates the machine
+// metric by metric.  Not safe for concurrent use in multiple goroutines.
+func NewStreamParser(r io.Reader) *StreamParser {
+	handler := NewMetricHandler()
+	return &StreamParser{
+		machine: NewStreamMachine(r, handler),
+		handler: handler,
+	}
+}
+
+// SetTimeFunc changes the function used to determine the time of metrics
+// without a timestamp.  The default TimeFunc is time.Now.  Useful mostly for
+// testing, or perhaps if you want all metrics to have the same timestamp.
+func (p *StreamParser) SetTimeFunc(f TimeFunc) {
+	p.handler.SetTimeFunc(f)
+}
+
+// SetTimePrecision specifies units for the time stamp.
+func (p *StreamParser) SetTimePrecision(u time.Duration) {
+	p.handler.SetTimePrecision(u)
+}
+
+// Next parses the next item from the stream.  You can repeat calls to this
+// function until it returns EOF.
+func (p *StreamParser) Next() (Metric, error) {
+	err := p.machine.Next()
+	if err == EOF {
+		return nil, EOF
+	}
+
+	if err != nil {
+		return nil, &ParseError{
+			Offset:     p.machine.Position(),
+			LineOffset: p.machine.LineOffset(),
+			LineNumber: p.machine.LineNumber(),
+			Column:     p.machine.Column(),
+			msg:        err.Error(),
+			buf:        p.machine.LineText(),
+		}
+	}
+
+	metric, err := p.handler.Metric()
+	if err != nil {
+		return nil, err
+	}
+
+	return metric, nil
+}
+
+// Position returns the current byte offset into the data.
+func (p *StreamParser) Position() int {
+	return p.machine.Position()
+}
+
+// LineOffset returns the byte offset of the current line.
+func (p *StreamParser) LineOffset() int {
+	return p.machine.LineOffset()
+}
+
+// LineNumber returns the current line number.  Lines are counted based on the
+// regular expression `\r?\n`.
+func (p *StreamParser) LineNumber() int {
+	return p.machine.LineNumber()
+}
+
+// Column returns the current column.
+func (p *StreamParser) Column() int {
+	return p.machine.Column()
+}
+
+// LineText returns the text of the current line that has been parsed so far.
+func (p *StreamParser) LineText() string {
+	return p.machine.LineText()
+}

+ 130 - 0
vendor/github.com/influxdata/line-protocol/writer.go

@@ -0,0 +1,130 @@
+package protocol
+
+import (
+	"fmt"
+	"time"
+)
+
+// Write writes out data to a line protocol encoder.  Note: it does no sorting.  It assumes you have done your own sorting for tagValues
+func (e *Encoder) Write(name []byte, ts time.Time, tagKeys, tagVals, fieldKeys [][]byte, fieldVals []interface{}) (int, error) {
+	e.header = e.header[:0]
+	if len(name) == 0 || name[len(name)-1] == byte('\\') {
+		return 0, ErrInvalidName
+	}
+	nameEscapeBytes(&e.header, name)
+	for i := range tagKeys {
+		// Some keys and values are not encodeable as line protocol, such as
+		// those with a trailing '\' or empty strings.
+		if len(tagKeys[i]) == 0 || len(tagVals[i]) == 0 || tagKeys[i][len(tagKeys[i])-1] == byte('\\') {
+			if e.failOnFieldError {
+				return 0, fmt.Errorf("invalid field: key \"%s\", val \"%s\"", tagKeys[i], tagVals[i])
+			}
+			continue
+		}
+		e.header = append(e.header, byte(','))
+		escapeBytes(&e.header, tagKeys[i])
+		e.header = append(e.header, byte('='))
+		escapeBytes(&e.header, tagVals[i])
+	}
+	e.header = append(e.header, byte(' '))
+	e.buildFooter(ts)
+
+	i := 0
+	totalWritten := 0
+	pairsLen := 0
+	firstField := true
+	for i := range fieldKeys {
+		e.pair = e.pair[:0]
+		key := fieldKeys[i]
+		if len(key) == 0 || key[len(key)-1] == byte('\\') {
+			if e.failOnFieldError {
+				return 0, &FieldError{"invalid field key"}
+			}
+			continue
+		}
+		escapeBytes(&e.pair, key)
+		// Some keys are not encodeable as line protocol, such as those with a
+		// trailing '\' or empty strings.
+		e.pair = append(e.pair, byte('='))
+		err := e.buildFieldVal(fieldVals[i])
+		if err != nil {
+			if e.failOnFieldError {
+				return 0, err
+			}
+			continue
+		}
+
+		bytesNeeded := len(e.header) + pairsLen + len(e.pair) + len(e.footer)
+
+		// Additional length needed for field separator `,`
+		if !firstField {
+			bytesNeeded++
+		}
+
+		if e.maxLineBytes > 0 && bytesNeeded > e.maxLineBytes {
+			// Need at least one field per line
+			if firstField {
+				return 0, ErrNeedMoreSpace
+			}
+
+			i, err = e.w.Write(e.footer)
+			if err != nil {
+				return 0, err
+			}
+			totalWritten += i
+
+			bytesNeeded = len(e.header) + len(e.pair) + len(e.footer)
+
+			if e.maxLineBytes > 0 && bytesNeeded > e.maxLineBytes {
+				return 0, ErrNeedMoreSpace
+			}
+
+			i, err = e.w.Write(e.header)
+			if err != nil {
+				return 0, err
+			}
+			totalWritten += i
+
+			i, err = e.w.Write(e.pair)
+			if err != nil {
+				return 0, err
+			}
+			totalWritten += i
+
+			pairsLen += len(e.pair)
+			firstField = false
+			continue
+		}
+
+		if firstField {
+			i, err = e.w.Write(e.header)
+			if err != nil {
+				return 0, err
+			}
+			totalWritten += i
+
+		} else {
+			i, err = e.w.Write(comma)
+			if err != nil {
+				return 0, err
+			}
+			totalWritten += i
+
+		}
+
+		e.w.Write(e.pair)
+
+		pairsLen += len(e.pair)
+		firstField = false
+	}
+
+	if firstField {
+		return 0, ErrNoFields
+	}
+	i, err := e.w.Write(e.footer)
+	if err != nil {
+		return 0, err
+	}
+	totalWritten += i
+	return totalWritten, nil
+}

+ 20 - 0
vendor/vendor.json

@@ -116,6 +116,16 @@
 			"revision": "ebe99fcebbcedf6e7916320cce24c3e1832766ac",
 			"revisionTime": "2018-03-14T04:19:18Z"
 		},
+		{
+			"checksumSHA1": "LO7rA4jJwaHcvFuYzFktHPddfeM=",
+			"path": "github.com/deepmap/oapi-codegen",
+			"revision": "4ec8015070ad4ac5613ab12070c1b4dae48e34ad",
+			"revisionTime": "2022-11-25T18:16:03Z"
+		},
+		{
+			"path": "github.com/deepmap/oapi-codegen/pkg/runtime",
+			"revision": ""
+		},
 		{
 			"checksumSHA1": "mUeojTdLEyzYOki70VUAeeYr/wQ=",
 			"path": "github.com/dgrijalva/jwt-go",
@@ -709,6 +719,16 @@
 			"revision": "5971a9a0a9102b0c376dc6b5fe3f50906de04142",
 			"revisionTime": "2022-11-15T13:18:18Z"
 		},
+		{
+			"path": "github.com/influxdata/influxdb-client-go/v2",
+			"revision": ""
+		},
+		{
+			"checksumSHA1": "qlwAmOE/zFeVNYQeTQeWh3/MJmw=",
+			"path": "github.com/influxdata/line-protocol",
+			"revision": "b1ad95c89adff9d5cde6aba1ce81a5d8b905779a",
+			"revisionTime": "2021-09-22T20:33:50Z"
+		},
 		{
 			"checksumSHA1": "R8JkoEsr84IjUvngtp9ljjZXWVE=",
 			"origin": "github.com/kataras/iris/vendor/github.com/iris-contrib/formBinder",