From f6de69b5d7bb3ade349a6f63c70253b589daf41a Mon Sep 17 00:00:00 2001 From: gfleury Date: Thu, 15 Mar 2018 11:31:45 +0100 Subject: [PATCH] Principal call fixes and adding vendor --- api_response.go | 188 +++ cmd/main.go | 13 +- default_api.go | 168 ++- .../github.com/mitchellh/mapstructure/LICENSE | 21 + .../mitchellh/mapstructure/README.md | 46 + .../mitchellh/mapstructure/decode_hooks.go | 171 +++ .../mitchellh/mapstructure/error.go | 50 + .../mitchellh/mapstructure/mapstructure.go | 1043 +++++++++++++++++ vendor/vendor.json | 13 + 9 files changed, 1691 insertions(+), 22 deletions(-) create mode 100644 vendor/github.com/mitchellh/mapstructure/LICENSE create mode 100644 vendor/github.com/mitchellh/mapstructure/README.md create mode 100644 vendor/github.com/mitchellh/mapstructure/decode_hooks.go create mode 100644 vendor/github.com/mitchellh/mapstructure/error.go create mode 100644 vendor/github.com/mitchellh/mapstructure/mapstructure.go create mode 100644 vendor/vendor.json diff --git a/api_response.go b/api_response.go index dfcc721..9d1264e 100644 --- a/api_response.go +++ b/api_response.go @@ -7,6 +7,9 @@ package bitbucketv1 import ( "encoding/json" "net/http" + "strings" + + "github.com/mitchellh/mapstructure" ) // APIResponse contains generic data from API Response @@ -28,6 +31,191 @@ type APIResponse struct { Values map[string]interface{} } +type Project struct { + Key string `json:"key"` + ID int `json:"id"` + Name string `json:"name"` + Description string `json:"description"` + Public bool `json:"public"` + Type string `json:"type"` + Links struct { + Self []struct { + Href string `json:"href"` + } `json:"self"` + } `json:"links"` +} + +// Repository contains data from a BitBucket Repository +type Repository struct { + Slug string `json:"slug"` + ID int `json:"id"` + Name string `json:"name"` + ScmID string `json:"scmId"` + State string `json:"state"` + StatusMessage string `json:"statusMessage"` + Forkable bool `json:"forkable"` + Project Project `json:"project"` + Public bool `json:"public"` + Links struct { + Clone []struct { + Href string `json:"href"` + Name string `json:"name"` + } `json:"clone"` + Self []struct { + Href string `json:"href"` + } `json:"self"` + } `json:"links"` +} + +// SSHKey contains data from a SSHKey in the BitBucket Server +type SSHKey struct { + ID int `json:"id"` + Text string `json:"text"` + Label string `json:"label"` +} + +// Commit contains data from a commit in BitBucket +type Commit struct { + ID string `json:"id"` + DisplayID string `json:"displayId"` + Author struct { + Name string `json:"name"` + EmailAddress string `json:"emailAddress"` + } `json:"author"` + AuthorTimestamp int64 `json:"authorTimestamp"` + Committer struct { + Name string `json:"name"` + EmailAddress string `json:"emailAddress"` + } `json:"committer"` + CommitterTimestamp int64 `json:"committerTimestamp"` + Message string `json:"message"` + Parents []struct { + ID string `json:"id"` + DisplayID string `json:"displayId"` + } `json:"parents"` +} + +type Diff struct { + Diffs []struct { + Source struct { + Components []string `json:"components"` + Parent string `json:"parent"` + Name string `json:"name"` + Extension string `json:"extension"` + ToString string `json:"toString"` + } `json:"source"` + Destination struct { + Components []string `json:"components"` + Parent string `json:"parent"` + Name string `json:"name"` + Extension string `json:"extension"` + ToString string `json:"toString"` + } `json:"destination"` + Hunks []struct { + SourceLine int `json:"sourceLine"` + SourceSpan int `json:"sourceSpan"` + DestinationLine int `json:"destinationLine"` + DestinationSpan int `json:"destinationSpan"` + Segments []struct { + Type string `json:"type"` + Lines []struct { + Destination int `json:"destination"` + Source int `json:"source"` + Line string `json:"line"` + Truncated bool `json:"truncated"` + } `json:"lines"` + Truncated bool `json:"truncated"` + } `json:"segments"` + Truncated bool `json:"truncated"` + } `json:"hunks"` + Truncated bool `json:"truncated"` + ContextLines float64 `json:"contextLines"` + FromHash string `json:"fromHash"` + ToHash string `json:"toHash"` + WhiteSpace string `json:"whiteSpace"` + } `json:"diffs"` + Truncated bool `json:"truncated"` + ContextLines float64 `json:"contextLines"` + FromHash string `json:"fromHash"` + ToHash string `json:"toHash"` + WhiteSpace string `json:"whiteSpace"` +} + +// Tag contaings git Tag information +type Tag struct { + ID string `json:"id"` + DisplayID string `json:"displayId"` + Type string `json:"type"` + LatestCommit string `json:"latestCommit"` + LatestChangeset string `json:"latestChangeset"` + Hash string `json:"hash"` +} + +// Branch contains git Branch information +type Branch struct { + ID string `json:"id"` + DisplayID string `json:"displayId"` + Type string `json:"type"` + LatestCommit string `json:"latestCommit"` + LatestChangeset string `json:"latestChangeset"` + IsDefault bool `json:"isDefault"` +} + +func (k *SSHKey) String() string { + parts := make([]string, 1, 2) + parts[0] = strings.TrimSpace(k.Text) + return strings.Join(parts, " ") +} + +// GetCommitsResponse cast Commits into structure +func GetCommitsResponse(r *APIResponse) ([]Commit, error) { + var m []Commit + err := mapstructure.Decode(r.Values["values"], &m) + return m, err +} + +// GetTagsResponse cast Tags into structure +func GetTagsResponse(r *APIResponse) ([]Tag, error) { + var m []Tag + err := mapstructure.Decode(r.Values["values"], &m) + return m, err +} + +// GetBranchesResponse cast Tags into structure +func GetBranchesResponse(r *APIResponse) ([]Branch, error) { + var m []Branch + err := mapstructure.Decode(r.Values["values"], &m) + return m, err +} + +// GetRepositoriesResponse cast Repositories into structure +func GetRepositoriesResponse(r *APIResponse) ([]Repository, error) { + var m []Repository + err := mapstructure.Decode(r.Values["values"], &m) + return m, err +} + +// GetRepositoryResponse cast Repositories into structure +func GetRepositoryResponse(r *APIResponse) (Repository, error) { + var m Repository + err := mapstructure.Decode(r.Values, &m) + return m, err +} + +// GetDiffResponse cast Diff into structure +func GetDiffResponse(r *APIResponse) (Diff, error) { + var m Diff + err := mapstructure.Decode(r.Values, &m) + return m, err +} + +// GetSSHKeysResponse cast SSHKeys into structure +func GetSSHKeysResponse(r *APIResponse) ([]SSHKey, error) { + var m []SSHKey + err := mapstructure.Decode(r.Values["values"], &m) + return m, err +} + // NewAPIResponse create new APIResponse from http.Response func NewAPIResponse(r *http.Response) *APIResponse { diff --git a/cmd/main.go b/cmd/main.go index 7e61449..1d06104 100644 --- a/cmd/main.go +++ b/cmd/main.go @@ -16,12 +16,19 @@ func main() { client := bitbucketv1.NewAPIClient( ctx, - bitbucketv1.NewConfiguration("http://amazonaws.com/rest"), + bitbucketv1.NewConfiguration("https://stash.domain.com/rest"), ) - response, err := client.DefaultApi.GetUsers(nil) + username := "george.fleury" + response, err := client.DefaultApi.GetSSHKeys(username) if err != nil { fmt.Printf("%s\n", err.Error()) } - fmt.Printf("%v", response) + repos, err := bitbucketv1.GetRepositoriesResponse(response) + + if err == nil { + for _, repo := range repos { + fmt.Println(repo.Name) + } + } } diff --git a/default_api.go b/default_api.go index 318ba4e..bb3ee10 100644 --- a/default_api.go +++ b/default_api.go @@ -6,10 +6,11 @@ package bitbucketv1 import ( "fmt" - "golang.org/x/net/context" "io/ioutil" "net/url" "strings" + + "golang.org/x/net/context" ) // Linger please @@ -2903,7 +2904,7 @@ Streams an archive of the repository's contents at the requested commit. If @param "path" (string) paths to include in the streamed archive; may be repeated to include multiple paths @param "prefix" (string) a prefix to apply to all entries in the streamed archive; if the supplied prefix does not end with a trailing <code>/</code>, one will be added automatically @return */ -func (a *DefaultApiService) GetArchive(localVarOptionals map[string]interface{}) (*APIResponse, error) { +func (a *DefaultApiService) GetArchive(project, repository string, localVarOptionals map[string]interface{}) (*APIResponse, error) { var ( localVarHTTPMethod = strings.ToUpper("Get") localVarPostBody interface{} @@ -2913,6 +2914,8 @@ func (a *DefaultApiService) GetArchive(localVarOptionals map[string]interface{}) // create path and map variables localVarPath := a.client.cfg.BasePath + "/api/1.0/projects/{projectKey}/repos/{repositorySlug}/archive" + localVarPath = strings.Replace(localVarPath, "{"+"projectKey"+"}", fmt.Sprintf("%v", project), -1) + localVarPath = strings.Replace(localVarPath, "{"+"repositorySlug"+"}", fmt.Sprintf("%v", repository), -1) localVarHeaderParams := make(map[string]string) localVarQueryParams := url.Values{} @@ -3058,7 +3061,7 @@ Retrieve the branches matching the supplied <strong>filterText</strong& @param "filterText" (string) the text to match on @param "orderBy" (string) ordering of refs either ALPHABETICAL (by name) or MODIFICATION (last updated) @return */ -func (a *DefaultApiService) GetBranches(localVarOptionals map[string]interface{}) (*APIResponse, error) { +func (a *DefaultApiService) GetBranches(project, repository string, localVarOptionals map[string]interface{}) (*APIResponse, error) { var ( localVarHTTPMethod = strings.ToUpper("Get") localVarPostBody interface{} @@ -3068,6 +3071,8 @@ func (a *DefaultApiService) GetBranches(localVarOptionals map[string]interface{} // create path and map variables localVarPath := a.client.cfg.BasePath + "/api/1.0/projects/{projectKey}/repos/{repositorySlug}/branches" + localVarPath = strings.Replace(localVarPath, "{"+"projectKey"+"}", fmt.Sprintf("%v", project), -1) + localVarPath = strings.Replace(localVarPath, "{"+"repositorySlug"+"}", fmt.Sprintf("%v", repository), -1) localVarHeaderParams := make(map[string]string) localVarQueryParams := url.Values{} @@ -3631,7 +3636,7 @@ Retrieve a page of commits from a given starting commit or \"between\" @param "until" (string) the commit ID (SHA1) or ref (inclusively) to retrieve commits before @param "withCounts" (bool) optionally include the total number of commits and total number of unique authors @return */ -func (a *DefaultApiService) GetCommits(localVarOptionals map[string]interface{}) (*APIResponse, error) { +func (a *DefaultApiService) GetCommits(project, repository string, localVarOptionals map[string]interface{}) (*APIResponse, error) { var ( localVarHTTPMethod = strings.ToUpper("Get") localVarPostBody interface{} @@ -3640,7 +3645,7 @@ func (a *DefaultApiService) GetCommits(localVarOptionals map[string]interface{}) ) // create path and map variables - localVarPath := a.client.cfg.BasePath + "/api/1.0/projects/{projectKey}/repos/{repositorySlug}/commits" + localVarPath := fmt.Sprintf("%s/api/1.0/projects/%s/repos/%s/commits", a.client.cfg.BasePath, project, repository) localVarHeaderParams := make(map[string]string) localVarQueryParams := url.Values{} @@ -3689,6 +3694,9 @@ func (a *DefaultApiService) GetCommits(localVarOptionals map[string]interface{}) if localVarTempParam, localVarOk := localVarOptionals["withCounts"].(bool); localVarOk { localVarQueryParams.Add("withCounts", parameterToString(localVarTempParam, "")) } + if localVarTempParam, localVarOk := localVarOptionals["limit"].(int); localVarOk { + localVarQueryParams.Add("limit", parameterToString(localVarTempParam, "")) + } // to determine the Content-Type header localVarHTTPContentTypes := []string{} @@ -6040,7 +6048,7 @@ Retrieve the repository matching the supplied <strong>projectKey</stron @param projectKey2 the parent project key @param repositorySlug the repository slug @return */ -func (a *DefaultApiService) GetRepository(projectKey string, projectKey2 string, repositorySlug string) (*APIResponse, error) { +func (a *DefaultApiService) GetRepository(projectKey string, repositorySlug string) (*APIResponse, error) { var ( localVarHTTPMethod = strings.ToUpper("Get") localVarPostBody interface{} @@ -6051,7 +6059,6 @@ func (a *DefaultApiService) GetRepository(projectKey string, projectKey2 string, // create path and map variables localVarPath := a.client.cfg.BasePath + "/api/1.0/projects/{projectKey}/repos/{repositorySlug}" localVarPath = strings.Replace(localVarPath, "{"+"projectKey"+"}", fmt.Sprintf("%v", projectKey), -1) - localVarPath = strings.Replace(localVarPath, "{"+"projectKey"+"}", fmt.Sprintf("%v", projectKey2), -1) localVarPath = strings.Replace(localVarPath, "{"+"repositorySlug"+"}", fmt.Sprintf("%v", repositorySlug), -1) localVarHeaderParams := make(map[string]string) @@ -6735,7 +6742,7 @@ Retrieve the tags matching the supplied <strong>filterText</strong> @param "filterText" (string) the text to match on @param "orderBy" (string) ordering of refs either ALPHABETICAL (by name) or MODIFICATION (last updated) @return */ -func (a *DefaultApiService) GetTags(localVarOptionals map[string]interface{}) (*APIResponse, error) { +func (a *DefaultApiService) GetTags(project, repository string, localVarOptionals map[string]interface{}) (*APIResponse, error) { var ( localVarHTTPMethod = strings.ToUpper("Get") localVarPostBody interface{} @@ -6745,6 +6752,8 @@ func (a *DefaultApiService) GetTags(localVarOptionals map[string]interface{}) (* // create path and map variables localVarPath := a.client.cfg.BasePath + "/api/1.0/projects/{projectKey}/repos/{repositorySlug}/tags" + localVarPath = strings.Replace(localVarPath, "{"+"projectKey"+"}", fmt.Sprintf("%v", project), -1) + localVarPath = strings.Replace(localVarPath, "{"+"repositorySlug"+"}", fmt.Sprintf("%v", repository), -1) localVarHeaderParams := make(map[string]string) localVarQueryParams := url.Values{} @@ -6854,11 +6863,8 @@ func (a *DefaultApiService) GetTask(taskId int64) (*APIResponse, error) { return NewBitbucketAPIResponse(localVarHTTPResponse) } -/* DefaultApiService -Retrieve the user matching the supplied <strong>userSlug</strong>. <p> -* @param ctx context.Context for authentication, logging, tracing, etc. -@return */ -func (a *DefaultApiService) GetUser(ctx context.Context) (*APIResponse, error) { +/*GetSSHKeys retrieve ssh keys per user, param */ +func (a *DefaultApiService) GetSSHKeys(user string) (*APIResponse, error) { var ( localVarHTTPMethod = strings.ToUpper("Get") localVarPostBody interface{} @@ -6867,7 +6873,132 @@ func (a *DefaultApiService) GetUser(ctx context.Context) (*APIResponse, error) { ) // create path and map variables - localVarPath := a.client.cfg.BasePath + "/api/1.0/users/{userSlug}" + localVarPath := a.client.cfg.BasePath + "/ssh/1.0/keys" + + localVarHeaderParams := make(map[string]string) + localVarQueryParams := url.Values{} + localVarFormParams := url.Values{} + + if len(user) != 0 { + localVarQueryParams.Add("user", user) + } + + // to determine the Content-Type header + localVarHTTPContentTypes := []string{} + + // set Content-Type header + localVarHTTPContentType := selectHeaderContentType(localVarHTTPContentTypes) + if localVarHTTPContentType != "" { + localVarHeaderParams["Content-Type"] = localVarHTTPContentType + } + + // to determine the Accept header + localVarHTTPHeaderAccepts := []string{} + + // set Accept header + localVarHTTPHeaderAccept := selectHeaderAccept(localVarHTTPHeaderAccepts) + if localVarHTTPHeaderAccept != "" { + localVarHeaderParams["Accept"] = localVarHTTPHeaderAccept + } + r, err := a.client.prepareRequest(a.client.ctx, localVarPath, localVarHTTPMethod, localVarPostBody, localVarHeaderParams, localVarQueryParams, localVarFormParams, localVarFileName, localVarFileBytes) + if err != nil { + return nil, err + } + + localVarHTTPResponse, err := a.client.callAPI(r) + if err != nil || localVarHTTPResponse == nil { + return NewBitbucketAPIResponse(localVarHTTPResponse) + } + defer localVarHTTPResponse.Body.Close() + if localVarHTTPResponse.StatusCode >= 300 { + bodyBytes, _ := ioutil.ReadAll(localVarHTTPResponse.Body) + return NewAPIResponseWithError(localVarHTTPResponse, reportError("Status: %v, Body: %s", localVarHTTPResponse.Status, bodyBytes)) + } + + return NewBitbucketAPIResponse(localVarHTTPResponse) +} + +/*CreateSSHKey create ssh key for user, params user and text */ +func (a *DefaultApiService) CreateSSHKey(localVarOptionals map[string]interface{}) (*APIResponse, error) { + var ( + localVarHTTPMethod = strings.ToUpper("Post") + localVarPostBody interface{} + localVarFileName string + localVarFileBytes []byte + ) + + // create path and map variables + localVarPath := a.client.cfg.BasePath + "/ssh/1.0/keys" + + localVarHeaderParams := make(map[string]string) + localVarQueryParams := url.Values{} + localVarFormParams := url.Values{} + + if err := typeCheckParameter(localVarOptionals["user"], "string", "user"); err != nil { + return nil, err + } + if err := typeCheckParameter(localVarOptionals["text"], "string", "text"); err != nil { + return nil, err + } + + if localVarTempParam, localVarOk := localVarOptionals["user"].(string); localVarOk { + localVarQueryParams.Add("user", parameterToString(localVarTempParam, "")) + } + if localVarTempParam, localVarOk := localVarOptionals["text"].(string); localVarOk { + localVarFormParams.Add("text", parameterToString(localVarTempParam, "")) + } + + // to determine the Content-Type header + localVarHTTPContentTypes := []string{"application/json"} + + // set Content-Type header + localVarHTTPContentType := selectHeaderContentType(localVarHTTPContentTypes) + if localVarHTTPContentType != "" { + localVarHeaderParams["Content-Type"] = localVarHTTPContentType + } + + // to determine the Accept header + localVarHTTPHeaderAccepts := []string{} + + // set Accept header + localVarHTTPHeaderAccept := selectHeaderAccept(localVarHTTPHeaderAccepts) + if localVarHTTPHeaderAccept != "" { + localVarHeaderParams["Accept"] = localVarHTTPHeaderAccept + } + r, err := a.client.prepareRequest(a.client.ctx, localVarPath, localVarHTTPMethod, localVarPostBody, localVarHeaderParams, localVarQueryParams, localVarFormParams, localVarFileName, localVarFileBytes) + if err != nil { + return nil, err + } + + localVarHTTPResponse, err := a.client.callAPI(r) + if err != nil || localVarHTTPResponse == nil { + return NewBitbucketAPIResponse(localVarHTTPResponse) + } + defer localVarHTTPResponse.Body.Close() + if localVarHTTPResponse.StatusCode >= 300 { + bodyBytes, _ := ioutil.ReadAll(localVarHTTPResponse.Body) + return NewAPIResponseWithError(localVarHTTPResponse, reportError("Status: %v, Body: %s", localVarHTTPResponse.Status, bodyBytes)) + } + + return NewBitbucketAPIResponse(localVarHTTPResponse) +} + +/* DefaultApiService +Retrieve the user matching the supplied <strong>userSlug</strong>. <p> +* @param ctx context.Context for authentication, logging, tracing, etc. +@return */ +func (a *DefaultApiService) GetUser(username string) (*APIResponse, error) { + var ( + localVarHTTPMethod = strings.ToUpper("Get") + localVarPostBody interface{} + localVarFileName string + localVarFileBytes []byte + ) + + // create path and map variables + localVarPath := a.client.cfg.BasePath + "/api/1.0/users/" + + localVarPath = fmt.Sprint("%s%s", localVarPath, username) localVarHeaderParams := make(map[string]string) localVarQueryParams := url.Values{} @@ -9731,7 +9862,7 @@ Gets the commits accessible from the {@code from} commit but not in the {@code t @param "to" (string) the target commit (can be a partial/full commit ID or qualified/unqualified ref name) @param "fromRepo" (string) an optional parameter specifying the source repository containing the source commit if that commit is not present in the current repository; the repository can be specified by either its ID <em>fromRepo=42</em> or by its project key plus its repo slug separated by a slash: <em>fromRepo=projectKey/repoSlug</em> @return */ -func (a *DefaultApiService) StreamCommits(localVarOptionals map[string]interface{}) (*APIResponse, error) { +func (a *DefaultApiService) StreamCommits(project, repository string, localVarOptionals map[string]interface{}) (*APIResponse, error) { var ( localVarHTTPMethod = strings.ToUpper("Get") localVarPostBody interface{} @@ -9740,7 +9871,7 @@ func (a *DefaultApiService) StreamCommits(localVarOptionals map[string]interface ) // create path and map variables - localVarPath := a.client.cfg.BasePath + "/api/1.0/projects/{projectKey}/repos/{repositorySlug}/compare/commits" + localVarPath := fmt.Sprintf("%s/api/1.0/projects/%s/repos/%s/compare/commits", a.client.cfg.BasePath, project, repository) localVarHeaderParams := make(map[string]string) localVarQueryParams := url.Values{} @@ -10016,7 +10147,7 @@ Gets a diff of the changes available in the {@code from} commit but not in the { @param "contextLines" (int32) an optional number of context lines to include around each added or removed lines in the diff @param "whitespace" (string) an optional whitespace flag which can be set to <code>ignore-all</code> @return */ -func (a *DefaultApiService) StreamDiff_37(path string, localVarOptionals map[string]interface{}) (*APIResponse, error) { +func (a *DefaultApiService) StreamDiff_37(project, repository, path string, localVarOptionals map[string]interface{}) (*APIResponse, error) { var ( localVarHTTPMethod = strings.ToUpper("Get") localVarPostBody interface{} @@ -10025,8 +10156,7 @@ func (a *DefaultApiService) StreamDiff_37(path string, localVarOptionals map[str ) // create path and map variables - localVarPath := a.client.cfg.BasePath + "/api/1.0/projects/{projectKey}/repos/{repositorySlug}/compare/diff{path}" - localVarPath = strings.Replace(localVarPath, "{"+"path"+"}", fmt.Sprintf("%v", path), -1) + localVarPath := fmt.Sprintf("%s/api/1.0/projects/%s/repos/%s/compare/diff%s", a.client.cfg.BasePath, project, repository, path) localVarHeaderParams := make(map[string]string) localVarQueryParams := url.Values{} diff --git a/vendor/github.com/mitchellh/mapstructure/LICENSE b/vendor/github.com/mitchellh/mapstructure/LICENSE new file mode 100644 index 0000000..f9c841a --- /dev/null +++ b/vendor/github.com/mitchellh/mapstructure/LICENSE @@ -0,0 +1,21 @@ +The MIT License (MIT) + +Copyright (c) 2013 Mitchell Hashimoto + +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to deal +in the Software without restriction, including without limitation the rights +to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in +all copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN +THE SOFTWARE. diff --git a/vendor/github.com/mitchellh/mapstructure/README.md b/vendor/github.com/mitchellh/mapstructure/README.md new file mode 100644 index 0000000..0018dc7 --- /dev/null +++ b/vendor/github.com/mitchellh/mapstructure/README.md @@ -0,0 +1,46 @@ +# mapstructure [![Godoc](https://godoc.org/github.com/mitchellh/mapstructure?status.svg)](https://godoc.org/github.com/mitchellh/mapstructure) + +mapstructure is a Go library for decoding generic map values to structures +and vice versa, while providing helpful error handling. + +This library is most useful when decoding values from some data stream (JSON, +Gob, etc.) where you don't _quite_ know the structure of the underlying data +until you read a part of it. You can therefore read a `map[string]interface{}` +and use this library to decode it into the proper underlying native Go +structure. + +## Installation + +Standard `go get`: + +``` +$ go get github.com/mitchellh/mapstructure +``` + +## Usage & Example + +For usage and examples see the [Godoc](http://godoc.org/github.com/mitchellh/mapstructure). + +The `Decode` function has examples associated with it there. + +## But Why?! + +Go offers fantastic standard libraries for decoding formats such as JSON. +The standard method is to have a struct pre-created, and populate that struct +from the bytes of the encoded format. This is great, but the problem is if +you have configuration or an encoding that changes slightly depending on +specific fields. For example, consider this JSON: + +```json +{ + "type": "person", + "name": "Mitchell" +} +``` + +Perhaps we can't populate a specific structure without first reading +the "type" field from the JSON. We could always do two passes over the +decoding of the JSON (reading the "type" first, and the rest later). +However, it is much simpler to just decode this into a `map[string]interface{}` +structure, read the "type" key, then use something like this library +to decode it into the proper structure. diff --git a/vendor/github.com/mitchellh/mapstructure/decode_hooks.go b/vendor/github.com/mitchellh/mapstructure/decode_hooks.go new file mode 100644 index 0000000..2a72757 --- /dev/null +++ b/vendor/github.com/mitchellh/mapstructure/decode_hooks.go @@ -0,0 +1,171 @@ +package mapstructure + +import ( + "errors" + "reflect" + "strconv" + "strings" + "time" +) + +// typedDecodeHook takes a raw DecodeHookFunc (an interface{}) and turns +// it into the proper DecodeHookFunc type, such as DecodeHookFuncType. +func typedDecodeHook(h DecodeHookFunc) DecodeHookFunc { + // Create variables here so we can reference them with the reflect pkg + var f1 DecodeHookFuncType + var f2 DecodeHookFuncKind + + // Fill in the variables into this interface and the rest is done + // automatically using the reflect package. + potential := []interface{}{f1, f2} + + v := reflect.ValueOf(h) + vt := v.Type() + for _, raw := range potential { + pt := reflect.ValueOf(raw).Type() + if vt.ConvertibleTo(pt) { + return v.Convert(pt).Interface() + } + } + + return nil +} + +// DecodeHookExec executes the given decode hook. This should be used +// since it'll naturally degrade to the older backwards compatible DecodeHookFunc +// that took reflect.Kind instead of reflect.Type. +func DecodeHookExec( + raw DecodeHookFunc, + from reflect.Type, to reflect.Type, + data interface{}) (interface{}, error) { + switch f := typedDecodeHook(raw).(type) { + case DecodeHookFuncType: + return f(from, to, data) + case DecodeHookFuncKind: + return f(from.Kind(), to.Kind(), data) + default: + return nil, errors.New("invalid decode hook signature") + } +} + +// ComposeDecodeHookFunc creates a single DecodeHookFunc that +// automatically composes multiple DecodeHookFuncs. +// +// The composed funcs are called in order, with the result of the +// previous transformation. +func ComposeDecodeHookFunc(fs ...DecodeHookFunc) DecodeHookFunc { + return func( + f reflect.Type, + t reflect.Type, + data interface{}) (interface{}, error) { + var err error + for _, f1 := range fs { + data, err = DecodeHookExec(f1, f, t, data) + if err != nil { + return nil, err + } + + // Modify the from kind to be correct with the new data + f = nil + if val := reflect.ValueOf(data); val.IsValid() { + f = val.Type() + } + } + + return data, nil + } +} + +// StringToSliceHookFunc returns a DecodeHookFunc that converts +// string to []string by splitting on the given sep. +func StringToSliceHookFunc(sep string) DecodeHookFunc { + return func( + f reflect.Kind, + t reflect.Kind, + data interface{}) (interface{}, error) { + if f != reflect.String || t != reflect.Slice { + return data, nil + } + + raw := data.(string) + if raw == "" { + return []string{}, nil + } + + return strings.Split(raw, sep), nil + } +} + +// StringToTimeDurationHookFunc returns a DecodeHookFunc that converts +// strings to time.Duration. +func StringToTimeDurationHookFunc() DecodeHookFunc { + return func( + f reflect.Type, + t reflect.Type, + data interface{}) (interface{}, error) { + if f.Kind() != reflect.String { + return data, nil + } + if t != reflect.TypeOf(time.Duration(5)) { + return data, nil + } + + // Convert it by parsing + return time.ParseDuration(data.(string)) + } +} + +// StringToTimeHookFunc returns a DecodeHookFunc that converts +// strings to time.Time. +func StringToTimeHookFunc(layout string) DecodeHookFunc { + return func( + f reflect.Type, + t reflect.Type, + data interface{}) (interface{}, error) { + if f.Kind() != reflect.String { + return data, nil + } + if t != reflect.TypeOf(time.Time{}) { + return data, nil + } + + // Convert it by parsing + return time.Parse(layout, data.(string)) + } +} + +// WeaklyTypedHook is a DecodeHookFunc which adds support for weak typing to +// the decoder. +// +// Note that this is significantly different from the WeaklyTypedInput option +// of the DecoderConfig. +func WeaklyTypedHook( + f reflect.Kind, + t reflect.Kind, + data interface{}) (interface{}, error) { + dataVal := reflect.ValueOf(data) + switch t { + case reflect.String: + switch f { + case reflect.Bool: + if dataVal.Bool() { + return "1", nil + } + return "0", nil + case reflect.Float32: + return strconv.FormatFloat(dataVal.Float(), 'f', -1, 64), nil + case reflect.Int: + return strconv.FormatInt(dataVal.Int(), 10), nil + case reflect.Slice: + dataType := dataVal.Type() + elemKind := dataType.Elem().Kind() + if elemKind == reflect.Uint8 { + return string(dataVal.Interface().([]uint8)), nil + } + case reflect.Uint: + return strconv.FormatUint(dataVal.Uint(), 10), nil + } + } + + return data, nil +} diff --git a/vendor/github.com/mitchellh/mapstructure/error.go b/vendor/github.com/mitchellh/mapstructure/error.go new file mode 100644 index 0000000..47a99e5 --- /dev/null +++ b/vendor/github.com/mitchellh/mapstructure/error.go @@ -0,0 +1,50 @@ +package mapstructure + +import ( + "errors" + "fmt" + "sort" + "strings" +) + +// Error implements the error interface and can represents multiple +// errors that occur in the course of a single decode. +type Error struct { + Errors []string +} + +func (e *Error) Error() string { + points := make([]string, len(e.Errors)) + for i, err := range e.Errors { + points[i] = fmt.Sprintf("* %s", err) + } + + sort.Strings(points) + return fmt.Sprintf( + "%d error(s) decoding:\n\n%s", + len(e.Errors), strings.Join(points, "\n")) +} + +// WrappedErrors implements the errwrap.Wrapper interface to make this +// return value more useful with the errwrap and go-multierror libraries. +func (e *Error) WrappedErrors() []error { + if e == nil { + return nil + } + + result := make([]error, len(e.Errors)) + for i, e := range e.Errors { + result[i] = errors.New(e) + } + + return result +} + +func appendErrors(errors []string, err error) []string { + switch e := err.(type) { + case *Error: + return append(errors, e.Errors...) + default: + return append(errors, e.Error()) + } +} diff --git a/vendor/github.com/mitchellh/mapstructure/mapstructure.go b/vendor/github.com/mitchellh/mapstructure/mapstructure.go new file mode 100644 index 0000000..aaf12a2 --- /dev/null +++ b/vendor/github.com/mitchellh/mapstructure/mapstructure.go @@ -0,0 +1,1043 @@ +// Package mapstructure exposes functionality to convert an arbitrary +// map[string]interface{} into a native Go structure. +// +// The Go structure can be arbitrarily complex, containing slices, +// other structs, etc. and the decoder will properly decode nested +// maps and so on into the proper structures in the native Go struct. +// See the examples to see what the decoder is capable of. +package mapstructure + +import ( + "encoding/json" + "errors" + "fmt" + "reflect" + "sort" + "strconv" + "strings" +) + +// DecodeHookFunc is the callback function that can be used for +// data transformations. See "DecodeHook" in the DecoderConfig +// struct. +// +// The type should be DecodeHookFuncType or DecodeHookFuncKind. +// Either is accepted. Types are a superset of Kinds (Types can return +// Kinds) and are generally a richer thing to use, but Kinds are simpler +// if you only need those. +// +// The reason DecodeHookFunc is multi-typed is for backwards compatibility: +// we started with Kinds and then realized Types were the better solution, +// but have a promise to not break backwards compat so we now support +// both. +type DecodeHookFunc interface{} + +// DecodeHookFuncType is a DecodeHookFunc which has complete information about +// the source and target types. +type DecodeHookFuncType func(reflect.Type, reflect.Type, interface{}) (interface{}, error) + +// DecodeHookFuncKind is a DecodeHookFunc which knows only the Kinds of the +// source and target types. +type DecodeHookFuncKind func(reflect.Kind, reflect.Kind, interface{}) (interface{}, error) + +// DecoderConfig is the configuration that is used to create a new decoder +// and allows customization of various aspects of decoding. +type DecoderConfig struct { + // DecodeHook, if set, will be called before any decoding and any + // type conversion (if WeaklyTypedInput is on). This lets you modify + // the values before they're set down onto the resulting struct. + // + // If an error is returned, the entire decode will fail with that + // error. + DecodeHook DecodeHookFunc + + // If ErrorUnused is true, then it is an error for there to exist + // keys in the original map that were unused in the decoding process + // (extra keys). + ErrorUnused bool + + // ZeroFields, if set to true, will zero fields before writing them. + // For example, a map will be emptied before decoded values are put in + // it. If this is false, a map will be merged. + ZeroFields bool + + // If WeaklyTypedInput is true, the decoder will make the following + // "weak" conversions: + // + // - bools to string (true = "1", false = "0") + // - numbers to string (base 10) + // - bools to int/uint (true = 1, false = 0) + // - strings to int/uint (base implied by prefix) + // - int to bool (true if value != 0) + // - string to bool (accepts: 1, t, T, TRUE, true, True, 0, f, F, + // FALSE, false, False. Anything else is an error) + // - empty array = empty map and vice versa + // - negative numbers to overflowed uint values (base 10) + // - slice of maps to a merged map + // - single values are converted to slices if required. Each + // element is weakly decoded. For example: "4" can become []int{4} + // if the target type is an int slice. + // + WeaklyTypedInput bool + + // Metadata is the struct that will contain extra metadata about + // the decoding. If this is nil, then no metadata will be tracked. + Metadata *Metadata + + // Result is a pointer to the struct that will contain the decoded + // value. + Result interface{} + + // The tag name that mapstructure reads for field names. This + // defaults to "mapstructure" + TagName string +} + +// A Decoder takes a raw interface value and turns it into structured +// data, keeping track of rich error information along the way in case +// anything goes wrong. Unlike the basic top-level Decode method, you can +// more finely control how the Decoder behaves using the DecoderConfig +// structure. The top-level Decode method is just a convenience that sets +// up the most basic Decoder. +type Decoder struct { + config *DecoderConfig +} + +// Metadata contains information about decoding a structure that +// is tedious or difficult to get otherwise. +type Metadata struct { + // Keys are the keys of the structure which were successfully decoded + Keys []string + + // Unused is a slice of keys that were found in the raw value but + // weren't decoded since there was no matching field in the result interface + Unused []string +} + +// Decode takes an input structure and uses reflection to translate it to +// the output structure. output must be a pointer to a map or struct. +func Decode(input interface{}, output interface{}) error { + config := &DecoderConfig{ + Metadata: nil, + Result: output, + } + + decoder, err := NewDecoder(config) + if err != nil { + return err + } + + return decoder.Decode(input) +} + +// WeakDecode is the same as Decode but is shorthand to enable +// WeaklyTypedInput. See DecoderConfig for more info. +func WeakDecode(input, output interface{}) error { + config := &DecoderConfig{ + Metadata: nil, + Result: output, + WeaklyTypedInput: true, + } + + decoder, err := NewDecoder(config) + if err != nil { + return err + } + + return decoder.Decode(input) +} + +// DecodeMetadata is the same as Decode, but is shorthand to +// enable metadata collection. See DecoderConfig for more info. +func DecodeMetadata(input interface{}, output interface{}, metadata *Metadata) error { + config := &DecoderConfig{ + Metadata: metadata, + Result: output, + } + + decoder, err := NewDecoder(config) + if err != nil { + return err + } + + return decoder.Decode(input) +} + +// WeakDecodeMetadata is the same as Decode, but is shorthand to +// enable both WeaklyTypedInput and metadata collection. See +// DecoderConfig for more info. +func WeakDecodeMetadata(input interface{}, output interface{}, metadata *Metadata) error { + config := &DecoderConfig{ + Metadata: metadata, + Result: output, + WeaklyTypedInput: true, + } + + decoder, err := NewDecoder(config) + if err != nil { + return err + } + + return decoder.Decode(input) +} + +// NewDecoder returns a new decoder for the given configuration. Once +// a decoder has been returned, the same configuration must not be used +// again. +func NewDecoder(config *DecoderConfig) (*Decoder, error) { + val := reflect.ValueOf(config.Result) + if val.Kind() != reflect.Ptr { + return nil, errors.New("result must be a pointer") + } + + val = val.Elem() + if !val.CanAddr() { + return nil, errors.New("result must be addressable (a pointer)") + } + + if config.Metadata != nil { + if config.Metadata.Keys == nil { + config.Metadata.Keys = make([]string, 0) + } + + if config.Metadata.Unused == nil { + config.Metadata.Unused = make([]string, 0) + } + } + + if config.TagName == "" { + config.TagName = "mapstructure" + } + + result := &Decoder{ + config: config, + } + + return result, nil +} + +// Decode decodes the given raw interface to the target pointer specified +// by the configuration. +func (d *Decoder) Decode(input interface{}) error { + return d.decode("", input, reflect.ValueOf(d.config.Result).Elem()) +} + +// Decodes an unknown data type into a specific reflection value. +func (d *Decoder) decode(name string, input interface{}, outVal reflect.Value) error { + if input == nil { + // If the data is nil, then we don't set anything, unless ZeroFields is set + // to true. + if d.config.ZeroFields { + outVal.Set(reflect.Zero(outVal.Type())) + + if d.config.Metadata != nil && name != "" { + d.config.Metadata.Keys = append(d.config.Metadata.Keys, name) + } + } + return nil + } + + inputVal := reflect.ValueOf(input) + if !inputVal.IsValid() { + // If the input value is invalid, then we just set the value + // to be the zero value. + outVal.Set(reflect.Zero(outVal.Type())) + if d.config.Metadata != nil && name != "" { + d.config.Metadata.Keys = append(d.config.Metadata.Keys, name) + } + return nil + } + + if d.config.DecodeHook != nil { + // We have a DecodeHook, so let's pre-process the input. + var err error + input, err = DecodeHookExec( + d.config.DecodeHook, + inputVal.Type(), outVal.Type(), input) + if err != nil { + return fmt.Errorf("error decoding '%s': %s", name, err) + } + } + + var err error + inputKind := getKind(outVal) + switch inputKind { + case reflect.Bool: + err = d.decodeBool(name, input, outVal) + case reflect.Interface: + err = d.decodeBasic(name, input, outVal) + case reflect.String: + err = d.decodeString(name, input, outVal) + case reflect.Int: + err = d.decodeInt(name, input, outVal) + case reflect.Uint: + err = d.decodeUint(name, input, outVal) + case reflect.Float32: + err = d.decodeFloat(name, input, outVal) + case reflect.Struct: + err = d.decodeStruct(name, input, outVal) + case reflect.Map: + err = d.decodeMap(name, input, outVal) + case reflect.Ptr: + err = d.decodePtr(name, input, outVal) + case reflect.Slice: + err = d.decodeSlice(name, input, outVal) + case reflect.Array: + err = d.decodeArray(name, input, outVal) + case reflect.Func: + err = d.decodeFunc(name, input, outVal) + default: + // If we reached this point then we weren't able to decode it + return fmt.Errorf("%s: unsupported type: %s", name, inputKind) + } + + // If we reached here, then we successfully decoded SOMETHING, so + // mark the key as used if we're tracking metainput. + if d.config.Metadata != nil && name != "" { + d.config.Metadata.Keys = append(d.config.Metadata.Keys, name) + } + + return err +} + +// This decodes a basic type (bool, int, string, etc.) and sets the +// value to "data" of that type. +func (d *Decoder) decodeBasic(name string, data interface{}, val reflect.Value) error { + if val.IsValid() && val.Elem().IsValid() { + return d.decode(name, data, val.Elem()) + } + dataVal := reflect.ValueOf(data) + if !dataVal.IsValid() { + dataVal = reflect.Zero(val.Type()) + } + + dataValType := dataVal.Type() + if !dataValType.AssignableTo(val.Type()) { + return fmt.Errorf( + "'%s' expected type '%s', got '%s'", + name, val.Type(), dataValType) + } + + val.Set(dataVal) + return nil +} + +func (d *Decoder) decodeString(name string, data interface{}, val reflect.Value) error { + dataVal := reflect.ValueOf(data) + dataKind := getKind(dataVal) + + converted := true + switch { + case dataKind == reflect.String: + val.SetString(dataVal.String()) + case dataKind == reflect.Bool && d.config.WeaklyTypedInput: + if dataVal.Bool() { + val.SetString("1") + } else { + val.SetString("0") + } + case dataKind == reflect.Int && d.config.WeaklyTypedInput: + val.SetString(strconv.FormatInt(dataVal.Int(), 10)) + case dataKind == reflect.Uint && d.config.WeaklyTypedInput: + val.SetString(strconv.FormatUint(dataVal.Uint(), 10)) + case dataKind == reflect.Float32 && d.config.WeaklyTypedInput: + val.SetString(strconv.FormatFloat(dataVal.Float(), 'f', -1, 64)) + case dataKind == reflect.Slice && d.config.WeaklyTypedInput, + dataKind == reflect.Array && d.config.WeaklyTypedInput: + dataType := dataVal.Type() + elemKind := dataType.Elem().Kind() + switch elemKind { + case reflect.Uint8: + var uints []uint8 + if dataKind == reflect.Array { + uints = make([]uint8, dataVal.Len(), dataVal.Len()) + for i := range uints { + uints[i] = dataVal.Index(i).Interface().(uint8) + } + } else { + uints = dataVal.Interface().([]uint8) + } + val.SetString(string(uints)) + default: + converted = false + } + default: + converted = false + } + + if !converted { + return fmt.Errorf( + "'%s' expected type '%s', got unconvertible type '%s'", + name, val.Type(), dataVal.Type()) + } + + return nil +} + +func (d *Decoder) decodeInt(name string, data interface{}, val reflect.Value) error { + dataVal := reflect.ValueOf(data) + dataKind := getKind(dataVal) + dataType := dataVal.Type() + + switch { + case dataKind == reflect.Int: + val.SetInt(dataVal.Int()) + case dataKind == reflect.Uint: + val.SetInt(int64(dataVal.Uint())) + case dataKind == reflect.Float32: + val.SetInt(int64(dataVal.Float())) + case dataKind == reflect.Bool && d.config.WeaklyTypedInput: + if dataVal.Bool() { + val.SetInt(1) + } else { + val.SetInt(0) + } + case dataKind == reflect.String && d.config.WeaklyTypedInput: + i, err := strconv.ParseInt(dataVal.String(), 0, val.Type().Bits()) + if err == nil { + val.SetInt(i) + } else { + return fmt.Errorf("cannot parse '%s' as int: %s", name, err) + } + case dataType.PkgPath() == "encoding/json" && dataType.Name() == "Number": + jn := data.(json.Number) + i, err := jn.Int64() + if err != nil { + return fmt.Errorf( + "error decoding json.Number into %s: %s", name, err) + } + val.SetInt(i) + default: + return fmt.Errorf( + "'%s' expected type '%s', got unconvertible type '%s'", + name, val.Type(), dataVal.Type()) + } + + return nil +} + +func (d *Decoder) decodeUint(name string, data interface{}, val reflect.Value) error { + dataVal := reflect.ValueOf(data) + dataKind := getKind(dataVal) + + switch { + case dataKind == reflect.Int: + i := dataVal.Int() + if i < 0 && !d.config.WeaklyTypedInput { + return fmt.Errorf("cannot parse '%s', %d overflows uint", + name, i) + } + val.SetUint(uint64(i)) + case dataKind == reflect.Uint: + val.SetUint(dataVal.Uint()) + case dataKind == reflect.Float32: + f := dataVal.Float() + if f < 0 && !d.config.WeaklyTypedInput { + return fmt.Errorf("cannot parse '%s', %f overflows uint", + name, f) + } + val.SetUint(uint64(f)) + case dataKind == reflect.Bool && d.config.WeaklyTypedInput: + if dataVal.Bool() { + val.SetUint(1) + } else { + val.SetUint(0) + } + case dataKind == reflect.String && d.config.WeaklyTypedInput: + i, err := strconv.ParseUint(dataVal.String(), 0, val.Type().Bits()) + if err == nil { + val.SetUint(i) + } else { + return fmt.Errorf("cannot parse '%s' as uint: %s", name, err) + } + default: + return fmt.Errorf( + "'%s' expected type '%s', got unconvertible type '%s'", + name, val.Type(), dataVal.Type()) + } + + return nil +} + +func (d *Decoder) decodeBool(name string, data interface{}, val reflect.Value) error { + dataVal := reflect.ValueOf(data) + dataKind := getKind(dataVal) + + switch { + case dataKind == reflect.Bool: + val.SetBool(dataVal.Bool()) + case dataKind == reflect.Int && d.config.WeaklyTypedInput: + val.SetBool(dataVal.Int() != 0) + case dataKind == reflect.Uint && d.config.WeaklyTypedInput: + val.SetBool(dataVal.Uint() != 0) + case dataKind == reflect.Float32 && d.config.WeaklyTypedInput: + val.SetBool(dataVal.Float() != 0) + case dataKind == reflect.String && d.config.WeaklyTypedInput: + b, err := strconv.ParseBool(dataVal.String()) + if err == nil { + val.SetBool(b) + } else if dataVal.String() == "" { + val.SetBool(false) + } else { + return fmt.Errorf("cannot parse '%s' as bool: %s", name, err) + } + default: + return fmt.Errorf( + "'%s' expected type '%s', got unconvertible type '%s'", + name, val.Type(), dataVal.Type()) + } + + return nil +} + +func (d *Decoder) decodeFloat(name string, data interface{}, val reflect.Value) error { + dataVal := reflect.ValueOf(data) + dataKind := getKind(dataVal) + dataType := dataVal.Type() + + switch { + case dataKind == reflect.Int: + val.SetFloat(float64(dataVal.Int())) + case dataKind == reflect.Uint: + val.SetFloat(float64(dataVal.Uint())) + case dataKind == reflect.Float32: + val.SetFloat(dataVal.Float()) + case dataKind == reflect.Bool && d.config.WeaklyTypedInput: + if dataVal.Bool() { + val.SetFloat(1) + } else { + val.SetFloat(0) + } + case dataKind == reflect.String && d.config.WeaklyTypedInput: + f, err := strconv.ParseFloat(dataVal.String(), val.Type().Bits()) + if err == nil { + val.SetFloat(f) + } else { + return fmt.Errorf("cannot parse '%s' as float: %s", name, err) + } + case dataType.PkgPath() == "encoding/json" && dataType.Name() == "Number": + jn := data.(json.Number) + i, err := jn.Float64() + if err != nil { + return fmt.Errorf( + "error decoding json.Number into %s: %s", name, err) + } + val.SetFloat(i) + default: + return fmt.Errorf( + "'%s' expected type '%s', got unconvertible type '%s'", + name, val.Type(), dataVal.Type()) + } + + return nil +} + +func (d *Decoder) decodeMap(name string, data interface{}, val reflect.Value) error { + valType := val.Type() + valKeyType := valType.Key() + valElemType := valType.Elem() + + // By default we overwrite keys in the current map + valMap := val + + // If the map is nil or we're purposely zeroing fields, make a new map + if valMap.IsNil() || d.config.ZeroFields { + // Make a new map to hold our result + mapType := reflect.MapOf(valKeyType, valElemType) + valMap = reflect.MakeMap(mapType) + } + + // Check input type and based on the input type jump to the proper func + dataVal := reflect.Indirect(reflect.ValueOf(data)) + switch dataVal.Kind() { + case reflect.Map: + return d.decodeMapFromMap(name, dataVal, val, valMap) + + case reflect.Struct: + return d.decodeMapFromStruct(name, dataVal, val, valMap) + + case reflect.Array, reflect.Slice: + if d.config.WeaklyTypedInput { + return d.decodeMapFromSlice(name, dataVal, val, valMap) + } + + fallthrough + + default: + return fmt.Errorf("'%s' expected a map, got '%s'", name, dataVal.Kind()) + } +} + +func (d *Decoder) decodeMapFromSlice(name string, dataVal reflect.Value, val reflect.Value, valMap reflect.Value) error { + // Special case for BC reasons (covered by tests) + if dataVal.Len() == 0 { + val.Set(valMap) + return nil + } + + for i := 0; i < dataVal.Len(); i++ { + err := d.decode( + fmt.Sprintf("%s[%d]", name, i), + dataVal.Index(i).Interface(), val) + if err != nil { + return err + } + } + + return nil +} + +func (d *Decoder) decodeMapFromMap(name string, dataVal reflect.Value, val reflect.Value, valMap reflect.Value) error { + valType := val.Type() + valKeyType := valType.Key() + valElemType := valType.Elem() + + // Accumulate errors + errors := make([]string, 0) + + for _, k := range dataVal.MapKeys() { + fieldName := fmt.Sprintf("%s[%s]", name, k) + + // First decode the key into the proper type + currentKey := reflect.Indirect(reflect.New(valKeyType)) + if err := d.decode(fieldName, k.Interface(), currentKey); err != nil { + errors = appendErrors(errors, err) + continue + } + + // Next decode the data into the proper type + v := dataVal.MapIndex(k).Interface() + currentVal := reflect.Indirect(reflect.New(valElemType)) + if err := d.decode(fieldName, v, currentVal); err != nil { + errors = appendErrors(errors, err) + continue + } + + valMap.SetMapIndex(currentKey, currentVal) + } + + // Set the built up map to the value + val.Set(valMap) + + // If we had errors, return those + if len(errors) > 0 { + return &Error{errors} + } + + return nil +} + +func (d *Decoder) decodeMapFromStruct(name string, dataVal reflect.Value, val reflect.Value, valMap reflect.Value) error { + typ := dataVal.Type() + for i := 0; i < typ.NumField(); i++ { + // Get the StructField first since this is a cheap operation. If the + // field is unexported, then ignore it. + f := typ.Field(i) + if f.PkgPath != "" { + continue + } + + // Next get the actual value of this field and verify it is assignable + // to the map value. + v := dataVal.Field(i) + if !v.Type().AssignableTo(valMap.Type().Elem()) { + return fmt.Errorf("cannot assign type '%s' to map value field of type '%s'", v.Type(), valMap.Type().Elem()) + } + + // Determine the name of the key in the map + keyName := f.Name + tagValue := f.Tag.Get(d.config.TagName) + tagValue = strings.SplitN(tagValue, ",", 2)[0] + if tagValue != "" { + if tagValue == "-" { + continue + } + + keyName = tagValue + } + + switch v.Kind() { + // this is an embedded struct, so handle it differently + case reflect.Struct: + x := reflect.New(v.Type()) + x.Elem().Set(v) + + vType := valMap.Type() + vKeyType := vType.Key() + vElemType := vType.Elem() + mType := reflect.MapOf(vKeyType, vElemType) + vMap := reflect.MakeMap(mType) + + err := d.decode(keyName, x.Interface(), vMap) + if err != nil { + return err + } + + valMap.SetMapIndex(reflect.ValueOf(keyName), vMap) + + default: + valMap.SetMapIndex(reflect.ValueOf(keyName), v) + } + } + + if val.CanAddr() { + val.Set(valMap) + } + + return nil +} + +func (d *Decoder) decodePtr(name string, data interface{}, val reflect.Value) error { + // Create an element of the concrete (non pointer) type and decode + // into that. Then set the value of the pointer to this type. + valType := val.Type() + valElemType := valType.Elem() + + if val.CanSet() { + realVal := val + if realVal.IsNil() || d.config.ZeroFields { + realVal = reflect.New(valElemType) + } + + if err := d.decode(name, data, reflect.Indirect(realVal)); err != nil { + return err + } + + val.Set(realVal) + } else { + if err := d.decode(name, data, reflect.Indirect(val)); err != nil { + return err + } + } + return nil +} + +func (d *Decoder) decodeFunc(name string, data interface{}, val reflect.Value) error { + // Create an element of the concrete (non pointer) type and decode + // into that. Then set the value of the pointer to this type. + dataVal := reflect.Indirect(reflect.ValueOf(data)) + if val.Type() != dataVal.Type() { + return fmt.Errorf( + "'%s' expected type '%s', got unconvertible type '%s'", + name, val.Type(), dataVal.Type()) + } + val.Set(dataVal) + return nil +} + +func (d *Decoder) decodeSlice(name string, data interface{}, val reflect.Value) error { + dataVal := reflect.Indirect(reflect.ValueOf(data)) + dataValKind := dataVal.Kind() + valType := val.Type() + valElemType := valType.Elem() + sliceType := reflect.SliceOf(valElemType) + + valSlice := val + if valSlice.IsNil() || d.config.ZeroFields { + // Check input type + if dataValKind != reflect.Array && dataValKind != reflect.Slice { + if d.config.WeaklyTypedInput { + switch { + // Empty maps turn into empty slices + case dataValKind == reflect.Map: + if dataVal.Len() == 0 { + val.Set(reflect.MakeSlice(sliceType, 0, 0)) + return nil + } + case dataValKind == reflect.String && valElemType.Kind() == reflect.Uint8: + return d.decodeSlice(name, []byte(dataVal.String()), val) + // All other types we try to convert to the slice type + // and "lift" it into it. i.e. a string becomes a string slice. + default: + // Just re-try this function with data as a slice. + return d.decodeSlice(name, []interface{}{data}, val) + } + } + return fmt.Errorf( + "'%s': source data must be an array or slice, got %s", name, dataValKind) + + } + + // Make a new slice to hold our result, same size as the original data. + valSlice = reflect.MakeSlice(sliceType, dataVal.Len(), dataVal.Len()) + } + + // Accumulate any errors + errors := make([]string, 0) + + for i := 0; i < dataVal.Len(); i++ { + currentData := dataVal.Index(i).Interface() + for valSlice.Len() <= i { + valSlice = reflect.Append(valSlice, reflect.Zero(valElemType)) + } + currentField := valSlice.Index(i) + + fieldName := fmt.Sprintf("%s[%d]", name, i) + if err := d.decode(fieldName, currentData, currentField); err != nil { + errors = appendErrors(errors, err) + } + } + + // Finally, set the value to the slice we built up + val.Set(valSlice) + + // If there were errors, we return those + if len(errors) > 0 { + return &Error{errors} + } + + return nil +} + +func (d *Decoder) decodeArray(name string, data interface{}, val reflect.Value) error { + dataVal := reflect.Indirect(reflect.ValueOf(data)) + dataValKind := dataVal.Kind() + valType := val.Type() + valElemType := valType.Elem() + arrayType := reflect.ArrayOf(valType.Len(), valElemType) + + valArray := val + + if valArray.Interface() == reflect.Zero(valArray.Type()).Interface() || d.config.ZeroFields { + // Check input type + if dataValKind != reflect.Array && dataValKind != reflect.Slice { + if d.config.WeaklyTypedInput { + switch { + // Empty maps turn into empty arrays + case dataValKind == reflect.Map: + if dataVal.Len() == 0 { + val.Set(reflect.Zero(arrayType)) + return nil + } + + // All other types we try to convert to the array type + // and "lift" it into it. i.e. a string becomes a string array. + default: + // Just re-try this function with data as a slice. + return d.decodeArray(name, []interface{}{data}, val) + } + } + + return fmt.Errorf( + "'%s': source data must be an array or slice, got %s", name, dataValKind) + + } + if dataVal.Len() > arrayType.Len() { + return fmt.Errorf( + "'%s': expected source data to have length less or equal to %d, got %d", name, arrayType.Len(), dataVal.Len()) + + } + + // Make a new array to hold our result, same size as the original data. + valArray = reflect.New(arrayType).Elem() + } + + // Accumulate any errors + errors := make([]string, 0) + + for i := 0; i < dataVal.Len(); i++ { + currentData := dataVal.Index(i).Interface() + currentField := valArray.Index(i) + + fieldName := fmt.Sprintf("%s[%d]", name, i) + if err := d.decode(fieldName, currentData, currentField); err != nil { + errors = appendErrors(errors, err) + } + } + + // Finally, set the value to the array we built up + val.Set(valArray) + + // If there were errors, we return those + if len(errors) > 0 { + return &Error{errors} + } + + return nil +} + +func (d *Decoder) decodeStruct(name string, data interface{}, val reflect.Value) error { + dataVal := reflect.Indirect(reflect.ValueOf(data)) + + // If the type of the value to write to and the data match directly, + // then we just set it directly instead of recursing into the structure. + if dataVal.Type() == val.Type() { + val.Set(dataVal) + return nil + } + + dataValKind := dataVal.Kind() + if dataValKind != reflect.Map { + return fmt.Errorf("'%s' expected a map, got '%s'", name, dataValKind) + } + + dataValType := dataVal.Type() + if kind := dataValType.Key().Kind(); kind != reflect.String && kind != reflect.Interface { + return fmt.Errorf( + "'%s' needs a map with string keys, has '%s' keys", + name, dataValType.Key().Kind()) + } + + dataValKeys := make(map[reflect.Value]struct{}) + dataValKeysUnused := make(map[interface{}]struct{}) + for _, dataValKey := range dataVal.MapKeys() { + dataValKeys[dataValKey] = struct{}{} + dataValKeysUnused[dataValKey.Interface()] = struct{}{} + } + + errors := make([]string, 0) + + // This slice will keep track of all the structs we'll be decoding. + // There can be more than one struct if there are embedded structs + // that are squashed. + structs := make([]reflect.Value, 1, 5) + structs[0] = val + + // Compile the list of all the fields that we're going to be decoding + // from all the structs. + type field struct { + field reflect.StructField + val reflect.Value + } + fields := []field{} + for len(structs) > 0 { + structVal := structs[0] + structs = structs[1:] + + structType := structVal.Type() + + for i := 0; i < structType.NumField(); i++ { + fieldType := structType.Field(i) + fieldKind := fieldType.Type.Kind() + + // If "squash" is specified in the tag, we squash the field down. + squash := false + tagParts := strings.Split(fieldType.Tag.Get(d.config.TagName), ",") + for _, tag := range tagParts[1:] { + if tag == "squash" { + squash = true + break + } + } + + if squash { + if fieldKind != reflect.Struct { + errors = appendErrors(errors, + fmt.Errorf("%s: unsupported type for squash: %s", fieldType.Name, fieldKind)) + } else { + structs = append(structs, structVal.FieldByName(fieldType.Name)) + } + continue + } + + // Normal struct field, store it away + fields = append(fields, field{fieldType, structVal.Field(i)}) + } + } + + // for fieldType, field := range fields { + for _, f := range fields { + field, fieldValue := f.field, f.val + fieldName := field.Name + + tagValue := field.Tag.Get(d.config.TagName) + tagValue = strings.SplitN(tagValue, ",", 2)[0] + if tagValue != "" { + fieldName = tagValue + } + + rawMapKey := reflect.ValueOf(fieldName) + rawMapVal := dataVal.MapIndex(rawMapKey) + if !rawMapVal.IsValid() { + // Do a slower search by iterating over each key and + // doing case-insensitive search. + for dataValKey := range dataValKeys { + mK, ok := dataValKey.Interface().(string) + if !ok { + // Not a string key + continue + } + + if strings.EqualFold(mK, fieldName) { + rawMapKey = dataValKey + rawMapVal = dataVal.MapIndex(dataValKey) + break + } + } + + if !rawMapVal.IsValid() { + // There was no matching key in the map for the value in + // the struct. Just ignore. + continue + } + } + + // Delete the key we're using from the unused map so we stop tracking + delete(dataValKeysUnused, rawMapKey.Interface()) + + if !fieldValue.IsValid() { + // This should never happen + panic("field is not valid") + } + + // If we can't set the field, then it is unexported or something, + // and we just continue onwards. + if !fieldValue.CanSet() { + continue + } + + // If the name is empty string, then we're at the root, and we + // don't dot-join the fields. + if name != "" { + fieldName = fmt.Sprintf("%s.%s", name, fieldName) + } + + if err := d.decode(fieldName, rawMapVal.Interface(), fieldValue); err != nil { + errors = appendErrors(errors, err) + } + } + + if d.config.ErrorUnused && len(dataValKeysUnused) > 0 { + keys := make([]string, 0, len(dataValKeysUnused)) + for rawKey := range dataValKeysUnused { + keys = append(keys, rawKey.(string)) + } + sort.Strings(keys) + + err := fmt.Errorf("'%s' has invalid keys: %s", name, strings.Join(keys, ", ")) + errors = appendErrors(errors, err) + } + + if len(errors) > 0 { + return &Error{errors} + } + + // Add the unused keys to the list of unused keys if we're tracking metadata + if d.config.Metadata != nil { + for rawKey := range dataValKeysUnused { + key := rawKey.(string) + if name != "" { + key = fmt.Sprintf("%s.%s", name, key) + } + + d.config.Metadata.Unused = append(d.config.Metadata.Unused, key) + } + } + + return nil +} + +func getKind(val reflect.Value) reflect.Kind { + kind := val.Kind() + + switch { + case kind >= reflect.Int && kind <= reflect.Int64: + return reflect.Int + case kind >= reflect.Uint && kind <= reflect.Uint64: + return reflect.Uint + case kind >= reflect.Float32 && kind <= reflect.Float64: + return reflect.Float32 + default: + return kind + } +} diff --git a/vendor/vendor.json b/vendor/vendor.json new file mode 100644 index 0000000..67bb80f --- /dev/null +++ b/vendor/vendor.json @@ -0,0 +1,13 @@ +{ + "comment": "", + "ignore": "test", + "package": [ + { + "checksumSHA1": "Yvzge1YQcD/wSDXLD5qNHqmI/0s=", + "path": "github.com/mitchellh/mapstructure", + "revision": "00c29f56e2386353d58c599509e8dc3801b0d716", + "revisionTime": "2018-02-20T23:01:11Z" + } + ], + "rootPath": "github.com/gfleury/go-bitbucket-v1" +}