From 86191e7c7cc6a2475e2a3377cf2e543520c5aab6 Mon Sep 17 00:00:00 2001 From: Peter Broadhurst Date: Sun, 3 Dec 2023 00:20:03 -0500 Subject: [PATCH 01/19] Add JSON serialization of filters for building more complex queries via POST Signed-off-by: Peter Broadhurst --- pkg/ffapi/filter.go | 9 + pkg/ffapi/restfilter.go | 13 - pkg/ffapi/restfilter_json.go | 233 ++++++++++++++++ pkg/ffapi/restfilter_json_test.go | 409 +++++++++++++++++++++++++++++ pkg/i18n/en_base_error_messages.go | 3 + 5 files changed, 654 insertions(+), 13 deletions(-) create mode 100644 pkg/ffapi/restfilter_json.go create mode 100644 pkg/ffapi/restfilter_json_test.go diff --git a/pkg/ffapi/filter.go b/pkg/ffapi/filter.go index dc38c16..8fa2dbf 100644 --- a/pkg/ffapi/filter.go +++ b/pkg/ffapi/filter.go @@ -70,6 +70,7 @@ type MultiConditionFilter interface { Filter // Add adds filters to the condition Condition(...Filter) MultiConditionFilter + Conditions() []Filter } type AndFilter interface{ MultiConditionFilter } @@ -569,6 +570,10 @@ type andFilter struct { baseFilter } +func (fb *andFilter) Conditions() []Filter { + return fb.children +} + func (fb *andFilter) Condition(children ...Filter) MultiConditionFilter { fb.children = append(fb.children, children...) return fb @@ -588,6 +593,10 @@ type orFilter struct { baseFilter } +func (fb *orFilter) Conditions() []Filter { + return fb.children +} + func (fb *orFilter) Condition(children ...Filter) MultiConditionFilter { fb.children = append(fb.children, children...) return fb diff --git a/pkg/ffapi/restfilter.go b/pkg/ffapi/restfilter.go index d51865a..dc85ee6 100644 --- a/pkg/ffapi/restfilter.go +++ b/pkg/ffapi/restfilter.go @@ -29,19 +29,6 @@ import ( "github.com/hyperledger/firefly-common/pkg/log" ) -type FilterResultsWithCount struct { - Count int64 `json:"count"` - Total *int64 `json:"total,omitempty"` // omitted if a count was not calculated (AlwaysPaginate enabled, and count not specified) - Items interface{} `json:"items"` -} - -type filterModifiers struct { - negate bool - caseInsensitive bool - emptyIsNull bool - andCombine bool -} - func (hs *HandlerFactory) getValues(values url.Values, key string) (results []string) { for queryName, queryValues := range values { // We choose to be case insensitive for our filters, so protocolID and protocolid can be used interchangeably diff --git a/pkg/ffapi/restfilter_json.go b/pkg/ffapi/restfilter_json.go new file mode 100644 index 0000000..7a8dafe --- /dev/null +++ b/pkg/ffapi/restfilter_json.go @@ -0,0 +1,233 @@ +// Copyright © 2023 Kaleido, Inc. +// +// SPDX-License-Identifier: Apache-2.0 +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package ffapi + +import ( + "context" + "database/sql/driver" + "encoding/json" + "fmt" + "strconv" + + "github.com/hyperledger/firefly-common/pkg/i18n" +) + +var allMods = []string{"not", "caseInsensitive"} +var justCaseInsensitive = []string{"caseInsensitive"} + +type FilterResultsWithCount struct { + Count int64 `json:"count"` + Total *int64 `json:"total,omitempty"` // omitted if a count was not calculated (AlwaysPaginate enabled, and count not specified) + Items interface{} `json:"items"` +} + +type filterModifiers struct { + negate bool + caseInsensitive bool + emptyIsNull bool + andCombine bool +} + +type FilterJSONBase struct { + Not bool `ffstruct:"FilterJSON" json:"not,omitempty"` + CaseInsensitive bool `ffstruct:"FilterJSON" json:"caseInsensitive,omitempty"` + Field string `ffstruct:"FilterJSON" json:"field,omitempty"` +} + +type FilterJSONKeyValue struct { + FilterJSONBase + Value SimpleFilterValue `ffstruct:"FilterJSON" json:"value,omitempty"` +} + +type FilterJSONKeyValues struct { + FilterJSONBase + Values []SimpleFilterValue `ffstruct:"FilterJSON" json:"values,omitempty"` +} + +type FilterJSON struct { + Or []*FilterJSON `ffstruct:"FilterJSON" json:"or,omitempty"` + Equal []*FilterJSONKeyValue `ffstruct:"FilterJSON" json:"equal,omitempty"` + Contains []*FilterJSONKeyValue `ffstruct:"FilterJSON" json:"contains,omitempty"` + StartsWith []*FilterJSONKeyValue `ffstruct:"FilterJSON" json:"startsWith,omitempty"` + LessThan []*FilterJSONKeyValue `ffstruct:"FilterJSON" json:"lessThan,omitempty"` + LessThanOrEqual []*FilterJSONKeyValue `ffstruct:"FilterJSON" json:"lessThanOrEqual,omitempty"` + GreaterThan []*FilterJSONKeyValue `ffstruct:"FilterJSON" json:"greaterThan,omitempty"` + GreaterThanOrEqual []*FilterJSONKeyValue `ffstruct:"FilterJSON" json:"greaterThanOrEqual,omitempty"` + In []*FilterJSONKeyValues `ffstruct:"FilterJSON" json:"in,omitempty"` +} + +type QueryJSON struct { + FilterJSON + Skip *uint64 `ffstruct:"FilterJSON" json:"skip,omitempty"` + Limit *uint64 `ffstruct:"FilterJSON" json:"limit,omitempty"` + Sort []string `ffstruct:"FilterJSON" json:"sort,omitempty"` +} + +type SimpleFilterValue string + +func (js *SimpleFilterValue) UnmarshalJSON(b []byte) error { + var v interface{} + err := json.Unmarshal(b, &v) + if err != nil { + return err + } + switch vi := v.(type) { + case float64: + *js = (SimpleFilterValue)(strconv.FormatFloat(vi, 'f', -1, 64)) + return nil + case string: + *js = (SimpleFilterValue)(vi) + return nil + case bool: + *js = (SimpleFilterValue)(fmt.Sprintf("%t", vi)) + return nil + default: + return i18n.NewError(context.Background(), i18n.MsgJSONQueryValueUnsupported, string(b)) + } +} + +func (js SimpleFilterValue) String() string { + return (string)(js) +} + +func (jq *QueryJSON) BuildFilter(ctx context.Context, qf QueryFactory) (Filter, error) { + fb := qf.NewFilter(ctx) + if jq.Skip != nil { + fb = fb.Skip(*jq.Skip) + } + if jq.Limit != nil { + fb = fb.Limit(*jq.Limit) + } + for _, s := range jq.Sort { + fb = fb.Sort(s) + } + return jq.BuildSubFilter(ctx, fb, &jq.FilterJSON) +} + +func (jq *QueryJSON) addSimpleFilters(fb FilterBuilder, jsonFilter *FilterJSON, andFilter AndFilter) AndFilter { + for _, e := range jsonFilter.Equal { + if e.CaseInsensitive { + if e.Not { + andFilter = andFilter.Condition(fb.NIeq(e.Field, e.Value.String())) + } else { + andFilter = andFilter.Condition(fb.IEq(e.Field, e.Value.String())) + } + } else { + if e.Not { + andFilter = andFilter.Condition(fb.Neq(e.Field, e.Value.String())) + } else { + andFilter = andFilter.Condition(fb.Eq(e.Field, e.Value.String())) + } + } + } + for _, e := range jsonFilter.Contains { + if e.CaseInsensitive { + if e.Not { + andFilter = andFilter.Condition(fb.NotIContains(e.Field, e.Value.String())) + } else { + andFilter = andFilter.Condition(fb.IContains(e.Field, e.Value.String())) + } + } else { + if e.Not { + andFilter = andFilter.Condition(fb.NotContains(e.Field, e.Value.String())) + } else { + andFilter = andFilter.Condition(fb.Contains(e.Field, e.Value.String())) + } + } + } + for _, e := range jsonFilter.StartsWith { + if e.CaseInsensitive { + if e.Not { + andFilter = andFilter.Condition(fb.NotIStartsWith(e.Field, e.Value.String())) + } else { + andFilter = andFilter.Condition(fb.IStartsWith(e.Field, e.Value.String())) + } + } else { + if e.Not { + andFilter = andFilter.Condition(fb.NotStartsWith(e.Field, e.Value.String())) + } else { + andFilter = andFilter.Condition(fb.StartsWith(e.Field, e.Value.String())) + } + } + } + return andFilter +} + +func (jq *QueryJSON) BuildSubFilter(ctx context.Context, fb FilterBuilder, jsonFilter *FilterJSON) (Filter, error) { + andFilter := jq.addSimpleFilters(fb, jsonFilter, fb.And()) + for _, e := range jsonFilter.LessThan { + if e.CaseInsensitive || e.Not { + return nil, i18n.NewError(ctx, i18n.MsgJSONQueryOpUnsupportedMod, "lessThan", allMods) + } + andFilter = andFilter.Condition(fb.Lt(e.Field, e.Value.String())) + } + for _, e := range jsonFilter.LessThanOrEqual { + if e.CaseInsensitive || e.Not { + return nil, i18n.NewError(ctx, i18n.MsgJSONQueryOpUnsupportedMod, "lessThanOrEqual", allMods) + } + andFilter = andFilter.Condition(fb.Lte(e.Field, e.Value.String())) + } + for _, e := range jsonFilter.GreaterThan { + if e.CaseInsensitive || e.Not { + return nil, i18n.NewError(ctx, i18n.MsgJSONQueryOpUnsupportedMod, "greaterThan", allMods) + } + andFilter = andFilter.Condition(fb.Gt(e.Field, e.Value.String())) + } + for _, e := range jsonFilter.GreaterThanOrEqual { + if e.CaseInsensitive || e.Not { + return nil, i18n.NewError(ctx, i18n.MsgJSONQueryOpUnsupportedMod, "greaterThanOrEqual", allMods) + } + andFilter = andFilter.Condition(fb.Gte(e.Field, e.Value.String())) + } + for _, e := range jsonFilter.In { + if e.CaseInsensitive { + return nil, i18n.NewError(ctx, i18n.MsgJSONQueryOpUnsupportedMod, "in", justCaseInsensitive) + } + if e.Not { + andFilter = andFilter.Condition(fb.NotIn(e.Field, toDriverValues(e.Values))) + } else { + andFilter = andFilter.Condition(fb.In(e.Field, toDriverValues(e.Values))) + } + } + if len(jsonFilter.Or) > 0 { + childFilter := fb.Or() + for _, child := range jsonFilter.Or { + subFilter, err := jq.BuildSubFilter(ctx, fb, child) + if err != nil { + return nil, err + } + childFilter.Condition(subFilter) + } + if len(childFilter.Conditions()) == 1 { + andFilter.Condition(childFilter.Conditions()[0]) + } else { + andFilter.Condition(childFilter) + } + } + if len(andFilter.Conditions()) == 1 { + return andFilter.Conditions()[0], nil + } + return andFilter, nil +} + +func toDriverValues(values []SimpleFilterValue) []driver.Value { + driverValues := make([]driver.Value, len(values)) + for i, v := range values { + driverValues[i] = v.String() + } + return driverValues +} diff --git a/pkg/ffapi/restfilter_json_test.go b/pkg/ffapi/restfilter_json_test.go new file mode 100644 index 0000000..5daa12a --- /dev/null +++ b/pkg/ffapi/restfilter_json_test.go @@ -0,0 +1,409 @@ +// Copyright © 2021 Kaleido, Inc. +// +// SPDX-License-Identifier: Apache-2.0 +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package ffapi + +import ( + "context" + "encoding/json" + "testing" + + "github.com/stretchr/testify/assert" +) + +func TestBuildQueryJSONNestedAndOr(t *testing.T) { + + var qf QueryJSON + err := json.Unmarshal([]byte(`{ + "skip": 5, + "limit": 10, + "sort": [ + "tag", + "-sequence" + ], + "equal": [ + { + "field": "tag", + "value": "a" + } + ], + "greaterThan": [ + { + "field": "sequence", + "value": 10 + } + ], + "or": [ + { + "equal": [ + { + "field": "masked", + "value": true + } + ], + "in": [ + { + "field": "tag", + "values": ["a","b","c"] + } + ] + }, + { + "equal": [ + { + "field": "masked", + "value": false + } + ] + } + ] + }`), &qf) + assert.NoError(t, err) + + filter, err := qf.BuildFilter(context.Background(), TestQueryFactory) + assert.NoError(t, err) + + fi, err := filter.Finalize() + assert.NoError(t, err) + + assert.Equal(t, "( tag == 'a' ) && ( sequence >> 10 ) && ( ( ( masked == true ) && ( tag IN ['a','b','c'] ) ) || ( masked == false ) ) sort=tag,-sequence skip=5 limit=10", fi.String()) +} + +func TestBuildQuerySingleNestedOr(t *testing.T) { + + var qf QueryJSON + err := json.Unmarshal([]byte(`{ + "or": [ + { + "equal": [ + { + "field": "tag", + "value": "a" + } + ] + } + ] + }`), &qf) + assert.NoError(t, err) + + filter, err := qf.BuildFilter(context.Background(), TestQueryFactory) + assert.NoError(t, err) + + fi, err := filter.Finalize() + assert.NoError(t, err) + + assert.Equal(t, "tag == 'a'", fi.String()) +} + +func TestBuildQueryJSONEqual(t *testing.T) { + + var qf QueryJSON + err := json.Unmarshal([]byte(`{ + "skip": 5, + "limit": 10, + "sort": [ + "tag", + "sequence" + ], + "equal": [ + { + "field": "created", + "value": 0 + }, + { + "not": true, + "field": "tag", + "value": "abc" + }, + { + "caseInsensitive": true, + "field": "tag", + "value": "ABC" + }, + { + "caseInsensitive": true, + "not": true, + "field": "tag", + "value": "abc" + } + ] + }`), &qf) + assert.NoError(t, err) + + filter, err := qf.BuildFilter(context.Background(), TestQueryFactory) + assert.NoError(t, err) + + fi, err := filter.Finalize() + assert.NoError(t, err) + + assert.Equal(t, "( created == 0 ) && ( tag != 'abc' ) && ( tag := 'ABC' ) && ( tag ;= 'abc' ) sort=tag,sequence skip=5 limit=10", fi.String()) +} + +func TestBuildQueryJSONContains(t *testing.T) { + + var qf QueryJSON + err := json.Unmarshal([]byte(`{ + "skip": 5, + "limit": 10, + "contains": [ + { + "field": "tag", + "value": 0 + }, + { + "not": true, + "field": "tag", + "value": "abc" + }, + { + "caseInsensitive": true, + "field": "tag", + "value": "ABC" + }, + { + "caseInsensitive": true, + "not": true, + "field": "tag", + "value": "abc" + } + ] + }`), &qf) + assert.NoError(t, err) + + filter, err := qf.BuildFilter(context.Background(), TestQueryFactory) + assert.NoError(t, err) + + fi, err := filter.Finalize() + assert.NoError(t, err) + + assert.Equal(t, "( tag %= '0' ) && ( tag !% 'abc' ) && ( tag :% 'ABC' ) && ( tag ;% 'abc' ) skip=5 limit=10", fi.String()) +} + +func TestBuildQueryJSONStartsWith(t *testing.T) { + + var qf QueryJSON + err := json.Unmarshal([]byte(`{ + "skip": 5, + "limit": 10, + "startsWith": [ + { + "field": "tag", + "value": 0 + }, + { + "not": true, + "field": "tag", + "value": "abc" + }, + { + "caseInsensitive": true, + "field": "tag", + "value": "ABC" + }, + { + "caseInsensitive": true, + "not": true, + "field": "tag", + "value": true + } + ] + }`), &qf) + assert.NoError(t, err) + + filter, err := qf.BuildFilter(context.Background(), TestQueryFactory) + assert.NoError(t, err) + + fi, err := filter.Finalize() + assert.NoError(t, err) + + assert.Equal(t, "( tag ^= '0' ) && ( tag !^ 'abc' ) && ( tag :^ 'ABC' ) && ( tag ;^ 'true' ) skip=5 limit=10", fi.String()) +} + +func TestBuildQueryJSONGreaterThan(t *testing.T) { + + var qf QueryJSON + err := json.Unmarshal([]byte(`{ + "skip": 5, + "limit": 10, + "greaterThan": [ + { + "field": "sequence", + "value": 0 + } + ] + }`), &qf) + assert.NoError(t, err) + + filter, err := qf.BuildFilter(context.Background(), TestQueryFactory) + assert.NoError(t, err) + + fi, err := filter.Finalize() + assert.NoError(t, err) + + assert.Equal(t, "sequence >> 0 skip=5 limit=10", fi.String()) +} + +func TestBuildQueryJSONLessThan(t *testing.T) { + + var qf QueryJSON + err := json.Unmarshal([]byte(`{ + "skip": 5, + "limit": 10, + "lessThan": [ + { + "field": "sequence", + "value": "12345" + } + ] + }`), &qf) + assert.NoError(t, err) + + filter, err := qf.BuildFilter(context.Background(), TestQueryFactory) + assert.NoError(t, err) + + fi, err := filter.Finalize() + assert.NoError(t, err) + + assert.Equal(t, "sequence << 12345 skip=5 limit=10", fi.String()) +} + +func TestBuildQueryJSONGreaterThanOrEqual(t *testing.T) { + + var qf QueryJSON + err := json.Unmarshal([]byte(`{ + "skip": 5, + "limit": 10, + "greaterThanOrEqual": [ + { + "field": "sequence", + "value": 0 + } + ] + }`), &qf) + assert.NoError(t, err) + + filter, err := qf.BuildFilter(context.Background(), TestQueryFactory) + assert.NoError(t, err) + + fi, err := filter.Finalize() + assert.NoError(t, err) + + assert.Equal(t, "sequence >= 0 skip=5 limit=10", fi.String()) +} + +func TestBuildQueryJSONLessThanOrEqual(t *testing.T) { + + var qf QueryJSON + err := json.Unmarshal([]byte(`{ + "skip": 5, + "limit": 10, + "lessThanOrEqual": [ + { + "field": "sequence", + "value": "12345" + } + ] + }`), &qf) + assert.NoError(t, err) + + filter, err := qf.BuildFilter(context.Background(), TestQueryFactory) + assert.NoError(t, err) + + fi, err := filter.Finalize() + assert.NoError(t, err) + + assert.Equal(t, "sequence <= 12345 skip=5 limit=10", fi.String()) +} + +func TestBuildQueryJSONIn(t *testing.T) { + + var qf QueryJSON + err := json.Unmarshal([]byte(`{ + "skip": 5, + "limit": 10, + "in": [ + { + "field": "tag", + "values": ["a","b","c"] + }, + { + "not": true, + "field": "tag", + "values": ["x","y","z"] + } + ] + }`), &qf) + assert.NoError(t, err) + + filter, err := qf.BuildFilter(context.Background(), TestQueryFactory) + assert.NoError(t, err) + + fi, err := filter.Finalize() + assert.NoError(t, err) + + assert.Equal(t, "( tag IN ['a','b','c'] ) && ( tag NI ['x','y','z'] ) skip=5 limit=10", fi.String()) +} + +func TestBadModifiers(t *testing.T) { + + var qf1 QueryJSON + err := json.Unmarshal([]byte(`{"lessThan": [{"not": true}]}`), &qf1) + assert.NoError(t, err) + _, err = qf1.BuildFilter(context.Background(), TestQueryFactory) + assert.Regexp(t, "FF00240", err) + + var qf2 QueryJSON + err = json.Unmarshal([]byte(`{"lessThanOrEqual": [{"not": true}]}`), &qf2) + assert.NoError(t, err) + _, err = qf2.BuildFilter(context.Background(), TestQueryFactory) + assert.Regexp(t, "FF00240", err) + + var qf3 QueryJSON + err = json.Unmarshal([]byte(`{"greaterThan": [{"not": true}]}`), &qf3) + assert.NoError(t, err) + _, err = qf3.BuildFilter(context.Background(), TestQueryFactory) + assert.Regexp(t, "FF00240", err) + + var qf4 QueryJSON + err = json.Unmarshal([]byte(`{"greaterThanOrEqual": [{"not": true}]}`), &qf4) + assert.NoError(t, err) + _, err = qf4.BuildFilter(context.Background(), TestQueryFactory) + assert.Regexp(t, "FF00240", err) + + var qf5 QueryJSON + err = json.Unmarshal([]byte(`{"in": [{"caseInsensitive": true}]}`), &qf5) + assert.NoError(t, err) + _, err = qf5.BuildFilter(context.Background(), TestQueryFactory) + assert.Regexp(t, "FF00240", err) + + var qf6 QueryJSON + err = json.Unmarshal([]byte(`{"or": [{"in": [{"caseInsensitive": true}]}] }`), &qf6) + assert.NoError(t, err) + _, err = qf6.BuildFilter(context.Background(), TestQueryFactory) + assert.Regexp(t, "FF00240", err) + +} + +func TestStringableParseFail(t *testing.T) { + + var js SimpleFilterValue + err := js.UnmarshalJSON([]byte(`{!!! not json`)) + assert.Error(t, err) + + err = js.UnmarshalJSON([]byte(`{"this": "is an object"}`)) + assert.Error(t, err) + +} diff --git a/pkg/i18n/en_base_error_messages.go b/pkg/i18n/en_base_error_messages.go index 806140d..5aadff6 100644 --- a/pkg/i18n/en_base_error_messages.go +++ b/pkg/i18n/en_base_error_messages.go @@ -175,4 +175,7 @@ var ( MsgESConfigNotInitialized = ffe("FF00237", "Event stream manager configuration not initialized") MsgInvalidWebSocketURL = ffe("FF00238", "Invalid WebSocket URL (must have ws: or wss: proto, otherwise use httpUrl option): '%s'") MsgRequestYAMLInvalid = ffe("FF00239", "Unable to process input as YAML: %s", 400) + MsgJSONQueryOpUnsupportedMod = ffe("FF00240", "Operation '%s' does not support modifiers: %v", 400) + MsgJSONQueryValueUnsupported = ffe("FF00241", "Field value not supported (must be string, number, or boolean): %s", 400) + MsgJSONQuerySortUnsupported = ffe("FF00242", "Invalid 'order' for sort (must be 'asc', 'ascending', 'desc' or 'descending'): %s", 400) ) From 59fba55f4b37a23708a1d1cd1969eac8c9fd1bda Mon Sep 17 00:00:00 2001 From: Peter Broadhurst Date: Sun, 3 Dec 2023 00:55:02 -0500 Subject: [PATCH 02/19] Add FilterJSON field descriptions to common too Signed-off-by: Peter Broadhurst --- pkg/ffapi/restfilter_json_test.go | 6 ++++- pkg/i18n/en_base_field_descriptions.go | 37 ++++++++++++++++++++++++++ 2 files changed, 42 insertions(+), 1 deletion(-) create mode 100644 pkg/i18n/en_base_field_descriptions.go diff --git a/pkg/ffapi/restfilter_json_test.go b/pkg/ffapi/restfilter_json_test.go index 5daa12a..e74f7b7 100644 --- a/pkg/ffapi/restfilter_json_test.go +++ b/pkg/ffapi/restfilter_json_test.go @@ -357,7 +357,7 @@ func TestBuildQueryJSONIn(t *testing.T) { assert.Equal(t, "( tag IN ['a','b','c'] ) && ( tag NI ['x','y','z'] ) skip=5 limit=10", fi.String()) } -func TestBadModifiers(t *testing.T) { +func TestBuildQueryJSONBadModifiers(t *testing.T) { var qf1 QueryJSON err := json.Unmarshal([]byte(`{"lessThan": [{"not": true}]}`), &qf1) @@ -407,3 +407,7 @@ func TestStringableParseFail(t *testing.T) { assert.Error(t, err) } + +func TestBuildQueryJSONDocumented(t *testing.T) { + CheckObjectDocumented(&QueryJSON{}) +} diff --git a/pkg/i18n/en_base_field_descriptions.go b/pkg/i18n/en_base_field_descriptions.go new file mode 100644 index 0000000..49b1a97 --- /dev/null +++ b/pkg/i18n/en_base_field_descriptions.go @@ -0,0 +1,37 @@ +// Copyright © 2023 Kaleido, Inc. +// +// SPDX-License-Identifier: Apache-2.0 +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package i18n + +var ( + FilterJSONCaseInsensitive = ffm("FilterJSON.caseInsensitive", "Configures whether the comparison is case sensitive - not supported for all operators") + FilterJSONNot = ffm("FilterJSON.not", "Negates the comparison operation, so 'equal' becomes 'not equal' for example - not supported for all operators") + FilterJSONField = ffm("FilterJSON.field", "Name of the field for the comparison operation") + FilterJSONValue = ffm("FilterJSON.value", "A JSON simple value to use in the comparison - must be a string, number or boolean and be parsable for the type of the filter field") + FilterJSONValues = ffm("FilterJSON.values", "Array of values to use in the comparison") + FilterJSONContains = ffm("FilterJSON.contains", "Array of field + value combinations to apply as string-contains filters - all filters must match") + FilterJSONEqual = ffm("FilterJSON.equal", "Array of field + value combinations to apply as equal filters - all must match") + FilterJSONStartsWith = ffm("FilterJSON.startsWith", "Array of field + value combinations to apply as starts-with filters - all filters must match") + FilterJSONGreaterThan = ffm("FilterJSON.greaterThan", "Array of field + value combinations to apply as greater-than filters - all filters must match") + FilterJSONGreaterThanOrEqual = ffm("FilterJSON.greaterThanOrEqual", "Array of field + value combinations to apply as greater-than filters - all filters must match") + FilterJSONLessThan = ffm("FilterJSON.lessThan", "Array of field + value combinations to apply as less-than-or-equal filters - all filters must match") + FilterJSONLessThanOrEqual = ffm("FilterJSON.lessThanOrEqual", "Array of field + value combinations to apply as less-than-or-equal filters - all filters must match") + FilterJSONIn = ffm("FilterJSON.in", "Array of field + values-array combinations to apply as 'in' filters (matching one of a set of values) - all filters must match") + FilterJSONLimit = ffm("FilterJSON.limit", "Limit on the results to return") + FilterJSONSkip = ffm("FilterJSON.skip", "Number of results to skip before returning entries, for skip+limit based pagination") + FilterJSONSort = ffm("FilterJSON.sort", "Array of fields to sort by. A '-' prefix on a field requests that field is sorted in descending order") + FilterJSONOr = ffm("FilterJSON.or", "Array of sub-queries where any sub-query can match to return results (OR combined). Note that within each sub-query all filters must match (AND combined)") +) From a4888f5a5140a0e6968ba44181a2e50eb9e0cd07 Mon Sep 17 00:00:00 2001 From: Peter Broadhurst Date: Sun, 3 Dec 2023 11:58:06 -0500 Subject: [PATCH 03/19] Add count option Signed-off-by: Peter Broadhurst --- pkg/ffapi/restfilter_json.go | 4 ++++ pkg/ffapi/restfilter_json_test.go | 3 ++- pkg/i18n/en_base_field_descriptions.go | 1 + 3 files changed, 7 insertions(+), 1 deletion(-) diff --git a/pkg/ffapi/restfilter_json.go b/pkg/ffapi/restfilter_json.go index 7a8dafe..0a39f99 100644 --- a/pkg/ffapi/restfilter_json.go +++ b/pkg/ffapi/restfilter_json.go @@ -75,6 +75,7 @@ type QueryJSON struct { Skip *uint64 `ffstruct:"FilterJSON" json:"skip,omitempty"` Limit *uint64 `ffstruct:"FilterJSON" json:"limit,omitempty"` Sort []string `ffstruct:"FilterJSON" json:"sort,omitempty"` + Count *bool `ffstruct:"FilterJSON" json:"count,omitempty"` } type SimpleFilterValue string @@ -106,6 +107,9 @@ func (js SimpleFilterValue) String() string { func (jq *QueryJSON) BuildFilter(ctx context.Context, qf QueryFactory) (Filter, error) { fb := qf.NewFilter(ctx) + if jq.Count != nil { + fb = fb.Count(*jq.Count) + } if jq.Skip != nil { fb = fb.Skip(*jq.Skip) } diff --git a/pkg/ffapi/restfilter_json_test.go b/pkg/ffapi/restfilter_json_test.go index e74f7b7..74b08e2 100644 --- a/pkg/ffapi/restfilter_json_test.go +++ b/pkg/ffapi/restfilter_json_test.go @@ -114,6 +114,7 @@ func TestBuildQueryJSONEqual(t *testing.T) { err := json.Unmarshal([]byte(`{ "skip": 5, "limit": 10, + "count": true, "sort": [ "tag", "sequence" @@ -149,7 +150,7 @@ func TestBuildQueryJSONEqual(t *testing.T) { fi, err := filter.Finalize() assert.NoError(t, err) - assert.Equal(t, "( created == 0 ) && ( tag != 'abc' ) && ( tag := 'ABC' ) && ( tag ;= 'abc' ) sort=tag,sequence skip=5 limit=10", fi.String()) + assert.Equal(t, "( created == 0 ) && ( tag != 'abc' ) && ( tag := 'ABC' ) && ( tag ;= 'abc' ) sort=tag,sequence skip=5 limit=10 count=true", fi.String()) } func TestBuildQueryJSONContains(t *testing.T) { diff --git a/pkg/i18n/en_base_field_descriptions.go b/pkg/i18n/en_base_field_descriptions.go index 49b1a97..5e9a04d 100644 --- a/pkg/i18n/en_base_field_descriptions.go +++ b/pkg/i18n/en_base_field_descriptions.go @@ -33,5 +33,6 @@ var ( FilterJSONLimit = ffm("FilterJSON.limit", "Limit on the results to return") FilterJSONSkip = ffm("FilterJSON.skip", "Number of results to skip before returning entries, for skip+limit based pagination") FilterJSONSort = ffm("FilterJSON.sort", "Array of fields to sort by. A '-' prefix on a field requests that field is sorted in descending order") + FilterJSONCount = ffm("FilterJSON.count", "If true, the total number of entries that could be returned from the database will be calculated and returned as a 'total' (has a performance cost)") FilterJSONOr = ffm("FilterJSON.or", "Array of sub-queries where any sub-query can match to return results (OR combined). Note that within each sub-query all filters must match (AND combined)") ) From 80d217f9f292bf498c1dd502733c412d1b30406d Mon Sep 17 00:00:00 2001 From: Peter Broadhurst Date: Sun, 3 Dec 2023 13:23:12 -0500 Subject: [PATCH 04/19] Add accessor function to query factory Signed-off-by: Peter Broadhurst --- mocks/authmocks/plugin.go | 2 +- mocks/crudmocks/crud.go | 18 +++++++++++++++++- mocks/dbmigratemocks/driver.go | 2 +- mocks/httpservermocks/go_http_server.go | 2 +- mocks/wsservermocks/web_socket_channels.go | 2 +- mocks/wsservermocks/web_socket_server.go | 2 +- pkg/dbsql/crud.go | 5 +++++ pkg/dbsql/crud_test.go | 1 + 8 files changed, 28 insertions(+), 6 deletions(-) diff --git a/mocks/authmocks/plugin.go b/mocks/authmocks/plugin.go index 19a8b7c..0db1f59 100644 --- a/mocks/authmocks/plugin.go +++ b/mocks/authmocks/plugin.go @@ -1,4 +1,4 @@ -// Code generated by mockery v2.36.1. DO NOT EDIT. +// Code generated by mockery v2.37.1. DO NOT EDIT. package authmocks diff --git a/mocks/crudmocks/crud.go b/mocks/crudmocks/crud.go index 111508d..b0a8aa5 100644 --- a/mocks/crudmocks/crud.go +++ b/mocks/crudmocks/crud.go @@ -1,4 +1,4 @@ -// Code generated by mockery v2.36.1. DO NOT EDIT. +// Code generated by mockery v2.37.1. DO NOT EDIT. package crudmocks @@ -243,6 +243,22 @@ func (_m *CRUD[T]) GetMany(ctx context.Context, filter ffapi.Filter) ([]T, *ffap return r0, r1, r2 } +// GetQueryFactory provides a mock function with given fields: +func (_m *CRUD[T]) GetQueryFactory() ffapi.QueryFactory { + ret := _m.Called() + + var r0 ffapi.QueryFactory + if rf, ok := ret.Get(0).(func() ffapi.QueryFactory); ok { + r0 = rf() + } else { + if ret.Get(0) != nil { + r0 = ret.Get(0).(ffapi.QueryFactory) + } + } + + return r0 +} + // GetSequenceForID provides a mock function with given fields: ctx, id func (_m *CRUD[T]) GetSequenceForID(ctx context.Context, id string) (int64, error) { ret := _m.Called(ctx, id) diff --git a/mocks/dbmigratemocks/driver.go b/mocks/dbmigratemocks/driver.go index 62c57f6..6294e79 100644 --- a/mocks/dbmigratemocks/driver.go +++ b/mocks/dbmigratemocks/driver.go @@ -1,4 +1,4 @@ -// Code generated by mockery v2.36.1. DO NOT EDIT. +// Code generated by mockery v2.37.1. DO NOT EDIT. package dbmigratemocks diff --git a/mocks/httpservermocks/go_http_server.go b/mocks/httpservermocks/go_http_server.go index e4f27ee..a2d2f70 100644 --- a/mocks/httpservermocks/go_http_server.go +++ b/mocks/httpservermocks/go_http_server.go @@ -1,4 +1,4 @@ -// Code generated by mockery v2.36.1. DO NOT EDIT. +// Code generated by mockery v2.37.1. DO NOT EDIT. package httpservermocks diff --git a/mocks/wsservermocks/web_socket_channels.go b/mocks/wsservermocks/web_socket_channels.go index b6eb6e6..ed657c4 100644 --- a/mocks/wsservermocks/web_socket_channels.go +++ b/mocks/wsservermocks/web_socket_channels.go @@ -1,4 +1,4 @@ -// Code generated by mockery v2.36.1. DO NOT EDIT. +// Code generated by mockery v2.37.1. DO NOT EDIT. package wsservermocks diff --git a/mocks/wsservermocks/web_socket_server.go b/mocks/wsservermocks/web_socket_server.go index 3ba21af..60fa863 100644 --- a/mocks/wsservermocks/web_socket_server.go +++ b/mocks/wsservermocks/web_socket_server.go @@ -1,4 +1,4 @@ -// Code generated by mockery v2.36.1. DO NOT EDIT. +// Code generated by mockery v2.37.1. DO NOT EDIT. package wsservermocks diff --git a/pkg/dbsql/crud.go b/pkg/dbsql/crud.go index 626aa2d..70fa59c 100644 --- a/pkg/dbsql/crud.go +++ b/pkg/dbsql/crud.go @@ -116,6 +116,7 @@ type CRUD[T Resource] interface { DeleteMany(ctx context.Context, filter ffapi.Filter, hooks ...PostCompletionHook) (err error) // no events NewFilterBuilder(ctx context.Context) ffapi.FilterBuilder NewUpdateBuilder(ctx context.Context) ffapi.UpdateBuilder + GetQueryFactory() ffapi.QueryFactory Scoped(scope sq.Eq) CRUD[T] // allows dynamic scoping to a collection } @@ -149,6 +150,10 @@ func (c *CrudBase[T]) Scoped(scope sq.Eq) CRUD[T] { return &cScoped } +func (c *CrudBase[T]) GetQueryFactory() ffapi.QueryFactory { + return c.QueryFactory +} + func (c *CrudBase[T]) NewFilterBuilder(ctx context.Context) ffapi.FilterBuilder { if c.QueryFactory == nil { return nil diff --git a/pkg/dbsql/crud_test.go b/pkg/dbsql/crud_test.go index e306177..6eff520 100644 --- a/pkg/dbsql/crud_test.go +++ b/pkg/dbsql/crud_test.go @@ -686,6 +686,7 @@ func TestQueryFactoryAccessors(t *testing.T) { assert.NotNil(t, tc1.NewUpdateBuilder(context.Background())) tc2 := newLinkableCollection(&db.Database, "ns1") + assert.Nil(t, tc2.GetQueryFactory()) assert.Nil(t, tc2.NewFilterBuilder(context.Background())) assert.Nil(t, tc2.NewUpdateBuilder(context.Background())) } From 39b646ba918b604127d6ac19912994af3eb6b00d Mon Sep 17 00:00:00 2001 From: Peter Broadhurst Date: Sun, 3 Dec 2023 14:41:11 -0500 Subject: [PATCH 05/19] Do case insensitive field matching Signed-off-by: Peter Broadhurst --- pkg/ffapi/restfilter_json.go | 87 +++++++++++++++++++++++-------- pkg/ffapi/restfilter_json_test.go | 63 +++++++++++++++++++--- 2 files changed, 123 insertions(+), 27 deletions(-) diff --git a/pkg/ffapi/restfilter_json.go b/pkg/ffapi/restfilter_json.go index 0a39f99..6a863f1 100644 --- a/pkg/ffapi/restfilter_json.go +++ b/pkg/ffapi/restfilter_json.go @@ -22,6 +22,7 @@ import ( "encoding/json" "fmt" "strconv" + "strings" "github.com/hyperledger/firefly-common/pkg/i18n" ) @@ -122,89 +123,133 @@ func (jq *QueryJSON) BuildFilter(ctx context.Context, qf QueryFactory) (Filter, return jq.BuildSubFilter(ctx, fb, &jq.FilterJSON) } -func (jq *QueryJSON) addSimpleFilters(fb FilterBuilder, jsonFilter *FilterJSON, andFilter AndFilter) AndFilter { +func validateFilterField(ctx context.Context, fb FilterBuilder, fieldAnyCase string) (string, error) { + for _, f := range fb.Fields() { + if strings.EqualFold(fieldAnyCase, f) { + return f, nil + } + } + return "", i18n.NewError(ctx, i18n.MsgInvalidFilterField, fieldAnyCase) +} + +func (jq *QueryJSON) addSimpleFilters(ctx context.Context, fb FilterBuilder, jsonFilter *FilterJSON, andFilter AndFilter) (AndFilter, error) { for _, e := range jsonFilter.Equal { + field, err := validateFilterField(ctx, fb, e.Field) + if err != nil { + return nil, err + } if e.CaseInsensitive { if e.Not { - andFilter = andFilter.Condition(fb.NIeq(e.Field, e.Value.String())) + andFilter = andFilter.Condition(fb.NIeq(field, e.Value.String())) } else { - andFilter = andFilter.Condition(fb.IEq(e.Field, e.Value.String())) + andFilter = andFilter.Condition(fb.IEq(field, e.Value.String())) } } else { if e.Not { - andFilter = andFilter.Condition(fb.Neq(e.Field, e.Value.String())) + andFilter = andFilter.Condition(fb.Neq(field, e.Value.String())) } else { - andFilter = andFilter.Condition(fb.Eq(e.Field, e.Value.String())) + andFilter = andFilter.Condition(fb.Eq(field, e.Value.String())) } } } for _, e := range jsonFilter.Contains { + field, err := validateFilterField(ctx, fb, e.Field) + if err != nil { + return nil, err + } if e.CaseInsensitive { if e.Not { - andFilter = andFilter.Condition(fb.NotIContains(e.Field, e.Value.String())) + andFilter = andFilter.Condition(fb.NotIContains(field, e.Value.String())) } else { - andFilter = andFilter.Condition(fb.IContains(e.Field, e.Value.String())) + andFilter = andFilter.Condition(fb.IContains(field, e.Value.String())) } } else { if e.Not { - andFilter = andFilter.Condition(fb.NotContains(e.Field, e.Value.String())) + andFilter = andFilter.Condition(fb.NotContains(field, e.Value.String())) } else { - andFilter = andFilter.Condition(fb.Contains(e.Field, e.Value.String())) + andFilter = andFilter.Condition(fb.Contains(field, e.Value.String())) } } } for _, e := range jsonFilter.StartsWith { + field, err := validateFilterField(ctx, fb, e.Field) + if err != nil { + return nil, err + } if e.CaseInsensitive { if e.Not { - andFilter = andFilter.Condition(fb.NotIStartsWith(e.Field, e.Value.String())) + andFilter = andFilter.Condition(fb.NotIStartsWith(field, e.Value.String())) } else { - andFilter = andFilter.Condition(fb.IStartsWith(e.Field, e.Value.String())) + andFilter = andFilter.Condition(fb.IStartsWith(field, e.Value.String())) } } else { if e.Not { - andFilter = andFilter.Condition(fb.NotStartsWith(e.Field, e.Value.String())) + andFilter = andFilter.Condition(fb.NotStartsWith(field, e.Value.String())) } else { - andFilter = andFilter.Condition(fb.StartsWith(e.Field, e.Value.String())) + andFilter = andFilter.Condition(fb.StartsWith(field, e.Value.String())) } } } - return andFilter + return andFilter, nil } func (jq *QueryJSON) BuildSubFilter(ctx context.Context, fb FilterBuilder, jsonFilter *FilterJSON) (Filter, error) { - andFilter := jq.addSimpleFilters(fb, jsonFilter, fb.And()) + andFilter, err := jq.addSimpleFilters(ctx, fb, jsonFilter, fb.And()) + if err != nil { + return nil, err + } for _, e := range jsonFilter.LessThan { + field, err := validateFilterField(ctx, fb, e.Field) + if err != nil { + return nil, err + } if e.CaseInsensitive || e.Not { return nil, i18n.NewError(ctx, i18n.MsgJSONQueryOpUnsupportedMod, "lessThan", allMods) } - andFilter = andFilter.Condition(fb.Lt(e.Field, e.Value.String())) + andFilter = andFilter.Condition(fb.Lt(field, e.Value.String())) } for _, e := range jsonFilter.LessThanOrEqual { + field, err := validateFilterField(ctx, fb, e.Field) + if err != nil { + return nil, err + } if e.CaseInsensitive || e.Not { return nil, i18n.NewError(ctx, i18n.MsgJSONQueryOpUnsupportedMod, "lessThanOrEqual", allMods) } - andFilter = andFilter.Condition(fb.Lte(e.Field, e.Value.String())) + andFilter = andFilter.Condition(fb.Lte(field, e.Value.String())) } for _, e := range jsonFilter.GreaterThan { + field, err := validateFilterField(ctx, fb, e.Field) + if err != nil { + return nil, err + } if e.CaseInsensitive || e.Not { return nil, i18n.NewError(ctx, i18n.MsgJSONQueryOpUnsupportedMod, "greaterThan", allMods) } - andFilter = andFilter.Condition(fb.Gt(e.Field, e.Value.String())) + andFilter = andFilter.Condition(fb.Gt(field, e.Value.String())) } for _, e := range jsonFilter.GreaterThanOrEqual { + field, err := validateFilterField(ctx, fb, e.Field) + if err != nil { + return nil, err + } if e.CaseInsensitive || e.Not { return nil, i18n.NewError(ctx, i18n.MsgJSONQueryOpUnsupportedMod, "greaterThanOrEqual", allMods) } - andFilter = andFilter.Condition(fb.Gte(e.Field, e.Value.String())) + andFilter = andFilter.Condition(fb.Gte(field, e.Value.String())) } for _, e := range jsonFilter.In { + field, err := validateFilterField(ctx, fb, e.Field) + if err != nil { + return nil, err + } if e.CaseInsensitive { return nil, i18n.NewError(ctx, i18n.MsgJSONQueryOpUnsupportedMod, "in", justCaseInsensitive) } if e.Not { - andFilter = andFilter.Condition(fb.NotIn(e.Field, toDriverValues(e.Values))) + andFilter = andFilter.Condition(fb.NotIn(field, toDriverValues(e.Values))) } else { - andFilter = andFilter.Condition(fb.In(e.Field, toDriverValues(e.Values))) + andFilter = andFilter.Condition(fb.In(field, toDriverValues(e.Values))) } } if len(jsonFilter.Or) > 0 { diff --git a/pkg/ffapi/restfilter_json_test.go b/pkg/ffapi/restfilter_json_test.go index 74b08e2..b9fe7ea 100644 --- a/pkg/ffapi/restfilter_json_test.go +++ b/pkg/ffapi/restfilter_json_test.go @@ -361,37 +361,37 @@ func TestBuildQueryJSONIn(t *testing.T) { func TestBuildQueryJSONBadModifiers(t *testing.T) { var qf1 QueryJSON - err := json.Unmarshal([]byte(`{"lessThan": [{"not": true}]}`), &qf1) + err := json.Unmarshal([]byte(`{"lessThan": [{"not": true, "field": "tag"}]}`), &qf1) assert.NoError(t, err) _, err = qf1.BuildFilter(context.Background(), TestQueryFactory) assert.Regexp(t, "FF00240", err) var qf2 QueryJSON - err = json.Unmarshal([]byte(`{"lessThanOrEqual": [{"not": true}]}`), &qf2) + err = json.Unmarshal([]byte(`{"lessThanOrEqual": [{"not": true, "field": "tag"}]}`), &qf2) assert.NoError(t, err) _, err = qf2.BuildFilter(context.Background(), TestQueryFactory) assert.Regexp(t, "FF00240", err) var qf3 QueryJSON - err = json.Unmarshal([]byte(`{"greaterThan": [{"not": true}]}`), &qf3) + err = json.Unmarshal([]byte(`{"greaterThan": [{"not": true, "field": "tag"}]}`), &qf3) assert.NoError(t, err) _, err = qf3.BuildFilter(context.Background(), TestQueryFactory) assert.Regexp(t, "FF00240", err) var qf4 QueryJSON - err = json.Unmarshal([]byte(`{"greaterThanOrEqual": [{"not": true}]}`), &qf4) + err = json.Unmarshal([]byte(`{"greaterThanOrEqual": [{"not": true, "field": "tag"}]}`), &qf4) assert.NoError(t, err) _, err = qf4.BuildFilter(context.Background(), TestQueryFactory) assert.Regexp(t, "FF00240", err) var qf5 QueryJSON - err = json.Unmarshal([]byte(`{"in": [{"caseInsensitive": true}]}`), &qf5) + err = json.Unmarshal([]byte(`{"in": [{"caseInsensitive": true, "field": "tag"}]}`), &qf5) assert.NoError(t, err) _, err = qf5.BuildFilter(context.Background(), TestQueryFactory) assert.Regexp(t, "FF00240", err) var qf6 QueryJSON - err = json.Unmarshal([]byte(`{"or": [{"in": [{"caseInsensitive": true}]}] }`), &qf6) + err = json.Unmarshal([]byte(`{"or": [{"in": [{"caseInsensitive": true, "field": "tag"}]}] }`), &qf6) assert.NoError(t, err) _, err = qf6.BuildFilter(context.Background(), TestQueryFactory) assert.Regexp(t, "FF00240", err) @@ -409,6 +409,57 @@ func TestStringableParseFail(t *testing.T) { } +func TestBuildQueryJSONBadFields(t *testing.T) { + + var qf1 QueryJSON + err := json.Unmarshal([]byte(`{"equal": [{"field": "wrong"}]}`), &qf1) + assert.NoError(t, err) + _, err = qf1.BuildFilter(context.Background(), TestQueryFactory) + assert.Regexp(t, "FF00142", err) + + var qf2 QueryJSON + err = json.Unmarshal([]byte(`{"contains": [{"field": "wrong"}]}`), &qf2) + assert.NoError(t, err) + _, err = qf2.BuildFilter(context.Background(), TestQueryFactory) + assert.Regexp(t, "FF00142", err) + + var qf3 QueryJSON + err = json.Unmarshal([]byte(`{"startsWith": [{"field": "wrong"}]}`), &qf3) + assert.NoError(t, err) + _, err = qf3.BuildFilter(context.Background(), TestQueryFactory) + assert.Regexp(t, "FF00142", err) + + var qf4 QueryJSON + err = json.Unmarshal([]byte(`{"lessThan": [{"field": "wrong"}]}`), &qf4) + assert.NoError(t, err) + _, err = qf4.BuildFilter(context.Background(), TestQueryFactory) + assert.Regexp(t, "FF00142", err) + + var qf5 QueryJSON + err = json.Unmarshal([]byte(`{"lessThanOrEqual": [{"field": "wrong"}]}`), &qf5) + assert.NoError(t, err) + _, err = qf5.BuildFilter(context.Background(), TestQueryFactory) + assert.Regexp(t, "FF00142", err) + + var qf6 QueryJSON + err = json.Unmarshal([]byte(`{"greaterThan": [{"field": "wrong"}]}`), &qf6) + assert.NoError(t, err) + _, err = qf6.BuildFilter(context.Background(), TestQueryFactory) + assert.Regexp(t, "FF00142", err) + + var qf7 QueryJSON + err = json.Unmarshal([]byte(`{"greaterThanOrEqual": [{"field": "wrong"}]}`), &qf7) + assert.NoError(t, err) + _, err = qf7.BuildFilter(context.Background(), TestQueryFactory) + assert.Regexp(t, "FF00142", err) + + var qf8 QueryJSON + err = json.Unmarshal([]byte(`{"in": [{"field": "wrong"}]}`), &qf8) + assert.NoError(t, err) + _, err = qf8.BuildFilter(context.Background(), TestQueryFactory) + assert.Regexp(t, "FF00142", err) +} + func TestBuildQueryJSONDocumented(t *testing.T) { CheckObjectDocumented(&QueryJSON{}) } From eb5720a3c3d69779db5970626729913a1729b5e9 Mon Sep 17 00:00:00 2001 From: Peter Broadhurst Date: Sun, 3 Dec 2023 17:27:39 -0500 Subject: [PATCH 06/19] Reduce logging on debug to a single line per SQL, and add count on GetMany Signed-off-by: Peter Broadhurst --- pkg/dbsql/crud.go | 1 + pkg/dbsql/database.go | 40 ++++++++++++++++++++++++---------------- 2 files changed, 25 insertions(+), 16 deletions(-) diff --git a/pkg/dbsql/crud.go b/pkg/dbsql/crud.go index 70fa59c..64cfe76 100644 --- a/pkg/dbsql/crud.go +++ b/pkg/dbsql/crud.go @@ -697,6 +697,7 @@ func (c *CrudBase[T]) getManyScoped(ctx context.Context, tableFrom string, fi *f } instances = append(instances, inst) } + log.L(ctx).Debugf("SQL<- GetMany(%s): %d", c.Table, len(instances)) return instances, c.DB.QueryRes(ctx, c.Table, tx, fop, c.ReadQueryModifier, fi), err } diff --git a/pkg/dbsql/database.go b/pkg/dbsql/database.go index 926d744..69d751c 100644 --- a/pkg/dbsql/database.go +++ b/pkg/dbsql/database.go @@ -20,6 +20,7 @@ import ( "context" "database/sql" "fmt" + "time" sq "github.com/Masterminds/squirrel" "github.com/golang-migrate/migrate/v4" @@ -182,7 +183,8 @@ func (s *Database) BeginOrUseTx(ctx context.Context) (ctx1 context.Context, tx * l := log.L(ctx).WithField("dbtx", fftypes.ShortID()) ctx1 = log.WithLogger(ctx, l) - l.Debugf("SQL-> begin") + before := time.Now() + l.Tracef("SQL-> begin") sqlTX, err := s.db.Begin() if err != nil { return ctx1, nil, false, i18n.WrapError(ctx1, err, i18n.MsgDBBeginFailed) @@ -191,7 +193,7 @@ func (s *Database) BeginOrUseTx(ctx context.Context) (ctx1 context.Context, tx * sqlTX: sqlTX, } ctx1 = context.WithValue(ctx1, txContextKey{}, tx) - l.Debugf("SQL<- begin") + l.Debugf("SQL<- begin (%.2fms)", floatMillisSince(before)) return ctx1, tx, false, err } @@ -207,7 +209,7 @@ func (s *Database) QueryTx(ctx context.Context, table string, tx *TXWrapper, q s if err != nil { return nil, tx, i18n.WrapError(ctx, err, i18n.MsgDBQueryBuildFailed) } - l.Debugf(`SQL-> query %s`, table) + before := time.Now() l.Tracef(`SQL-> query: %s (args: %+v)`, sqlQuery, args) var rows *sql.Rows if tx != nil { @@ -219,7 +221,7 @@ func (s *Database) QueryTx(ctx context.Context, table string, tx *TXWrapper, q s l.Errorf(`SQL query failed: %s sql=[ %s ]`, err, sqlQuery) return nil, tx, i18n.WrapError(ctx, err, i18n.MsgDBQueryFailed) } - l.Debugf(`SQL<- query %s`, table) + l.Debugf(`SQL<- query %s (%.2fms)`, table, floatMillisSince(before)) return rows, tx, nil } @@ -246,7 +248,7 @@ func (s *Database) CountQuery(ctx context.Context, table string, tx *TXWrapper, if err != nil { return count, i18n.WrapError(ctx, err, i18n.MsgDBQueryBuildFailed) } - l.Debugf(`SQL-> count query %s`, table) + before := time.Now() l.Tracef(`SQL-> count query: %s (args: %+v)`, sqlQuery, args) var rows *sql.Rows if tx != nil { @@ -264,7 +266,7 @@ func (s *Database) CountQuery(ctx context.Context, table string, tx *TXWrapper, return count, i18n.WrapError(ctx, err, i18n.MsgDBReadErr, table) } } - l.Debugf(`SQL<- count query %s: %d`, table, count) + l.Debugf(`SQL<- count query %s: %d (%.2fms)`, table, count, floatMillisSince(before)) return count, nil } @@ -299,7 +301,7 @@ func (s *Database) InsertTxRows(ctx context.Context, table string, tx *TXWrapper if err != nil { return i18n.WrapError(ctx, err, i18n.MsgDBQueryBuildFailed) } - l.Debugf(`SQL-> insert %s`, table) + before := time.Now() l.Tracef(`SQL-> insert query: %s (args: %+v)`, sqlQuery, args) if useQuery { result, err := tx.sqlTX.QueryContext(ctx, sqlQuery, args...) @@ -332,7 +334,7 @@ func (s *Database) InsertTxRows(ctx context.Context, table string, tx *TXWrapper } sequences[0], _ = res.LastInsertId() } - l.Debugf(`SQL<- insert %s sequences=%v`, table, sequences) + l.Debugf(`SQL<- insert %s sequences=%v (%.2fms)`, table, sequences, floatMillisSince(before)) if postCommit != nil { tx.AddPostCommitHook(postCommit) @@ -346,7 +348,7 @@ func (s *Database) DeleteTx(ctx context.Context, table string, tx *TXWrapper, q if err != nil { return i18n.WrapError(ctx, err, i18n.MsgDBQueryBuildFailed) } - l.Debugf(`SQL-> delete %s`, table) + before := time.Now() l.Tracef(`SQL-> delete query: %s args: %+v`, sqlQuery, args) res, err := tx.sqlTX.ExecContext(ctx, sqlQuery, args...) if err != nil { @@ -354,7 +356,7 @@ func (s *Database) DeleteTx(ctx context.Context, table string, tx *TXWrapper, q return i18n.WrapError(ctx, err, i18n.MsgDBDeleteFailed) } ra, _ := res.RowsAffected() - l.Debugf(`SQL<- delete %s affected=%d`, table, ra) + l.Debugf(`SQL<- delete %s affected=%d (%.2fms)`, table, ra, floatMillisSince(before)) if ra < 1 { return fftypes.DeleteRecordNotFound } @@ -371,7 +373,7 @@ func (s *Database) UpdateTx(ctx context.Context, table string, tx *TXWrapper, q if err != nil { return -1, i18n.WrapError(ctx, err, i18n.MsgDBQueryBuildFailed) } - l.Debugf(`SQL-> update %s`, table) + before := time.Now() l.Tracef(`SQL-> update query: %s (args: %+v)`, sqlQuery, args) res, err := tx.sqlTX.ExecContext(ctx, sqlQuery, args...) if err != nil { @@ -379,7 +381,7 @@ func (s *Database) UpdateTx(ctx context.Context, table string, tx *TXWrapper, q return -1, i18n.WrapError(ctx, err, i18n.MsgDBUpdateFailed) } ra, _ := res.RowsAffected() - l.Debugf(`SQL<- update %s affected=%d`, table, ra) + l.Debugf(`SQL<- update %s affected=%d (%.2fms)`, table, ra, floatMillisSince(before)) if postCommit != nil { tx.AddPostCommitHook(postCommit) @@ -392,13 +394,14 @@ func (s *Database) AcquireLockTx(ctx context.Context, lockName string, tx *TXWra if s.features.AcquireLock != nil { sqlQuery := s.features.AcquireLock(lockName) - l.Debugf(`SQL-> lock %s`, lockName) + before := time.Now() + l.Tracef(`SQL-> lock %s`, lockName) _, err := tx.sqlTX.ExecContext(ctx, sqlQuery) if err != nil { l.Errorf(`SQL lock failed: %s sql=[ %s ]`, err, sqlQuery) return i18n.WrapError(ctx, err, i18n.MsgDBLockFailed) } - l.Debugf(`SQL<- lock %s`, lockName) + l.Debugf(`SQL<- lock %s (%.2fms)`, lockName, floatMillisSince(before)) } return nil } @@ -436,13 +439,14 @@ func (s *Database) CommitTx(ctx context.Context, tx *TXWrapper, autoCommit bool) } } - l.Debugf(`SQL-> commit`) + before := time.Now() + l.Tracef(`SQL-> commit`) err := tx.sqlTX.Commit() if err != nil { l.Errorf(`SQL commit failed: %s`, err) return i18n.WrapError(ctx, err, i18n.MsgDBCommitFailed) } - l.Debugf(`SQL<- commit`) + l.Debugf(`SQL<- commit (%.2fms)`, floatMillisSince(before)) // Emit any post commit events (these aren't currently allowed to cause errors) for i, pce := range tx.postCommit { @@ -454,6 +458,10 @@ func (s *Database) CommitTx(ctx context.Context, tx *TXWrapper, autoCommit bool) return nil } +func floatMillisSince(t time.Time) float64 { + return (float64)(time.Since(t).Milliseconds()) / (float64)(time.Millisecond) +} + func (s *Database) DB() *sql.DB { return s.db } From a22e1d74c73544a27d55cdeaa728a54dba1ce988 Mon Sep 17 00:00:00 2001 From: Peter Broadhurst Date: Mon, 4 Dec 2023 15:03:49 -0500 Subject: [PATCH 07/19] Fix incorrect import of FFTM into a cycle Signed-off-by: Peter Broadhurst --- go.mod | 4 ++-- go.sum | 8 +++----- pkg/eventstreams/activestream.go | 3 +-- 3 files changed, 6 insertions(+), 9 deletions(-) diff --git a/go.mod b/go.mod index 228e999..c85a876 100644 --- a/go.mod +++ b/go.mod @@ -15,7 +15,6 @@ require ( github.com/google/uuid v1.3.0 github.com/gorilla/mux v1.8.0 github.com/gorilla/websocket v1.5.0 - github.com/hyperledger/firefly-transaction-manager v1.3.4 github.com/jarcoal/httpmock v1.2.0 github.com/karlseguin/ccache v2.0.3+incompatible github.com/mattn/go-sqlite3 v1.14.16 @@ -52,6 +51,7 @@ require ( github.com/karlseguin/expect v1.0.8 // indirect github.com/lann/builder v0.0.0-20180802200727-47ae307949d0 // indirect github.com/lann/ps v0.0.0-20150810152359-62de8c46ede0 // indirect + github.com/lib/pq v1.10.9 // indirect github.com/magiconair/properties v1.8.6 // indirect github.com/mailru/easyjson v0.7.7 // indirect github.com/mattn/go-colorable v0.1.13 // indirect @@ -60,7 +60,6 @@ require ( github.com/mgutz/ansi v0.0.0-20200706080929-d51e80ef957d // indirect github.com/mitchellh/mapstructure v1.5.0 // indirect github.com/mohae/deepcopy v0.0.0-20170929034955-c48cc78d4826 // indirect - github.com/oklog/ulid/v2 v2.1.0 // indirect github.com/onsi/ginkgo v1.16.5 // indirect github.com/onsi/gomega v1.19.0 // indirect github.com/pelletier/go-toml v1.9.5 // indirect @@ -84,4 +83,5 @@ require ( gopkg.in/ini.v1 v1.67.0 // indirect gopkg.in/yaml.v2 v2.4.0 // indirect gopkg.in/yaml.v3 v3.0.1 // indirect + gotest.tools/v3 v3.1.0 // indirect ) diff --git a/go.sum b/go.sum index 4a33036..d8f5808 100644 --- a/go.sum +++ b/go.sum @@ -188,8 +188,6 @@ github.com/hashicorp/golang-lru v0.5.1/go.mod h1:/m3WP610KZHVQ1SGc6re/UDhFvYD7pJ github.com/hashicorp/hcl v1.0.0 h1:0Anlzjpi4vEasTeNFn2mLJgTSwt0+6sfsiTG8qcWGx4= github.com/hashicorp/hcl v1.0.0/go.mod h1:E5yfLk+7swimpb2L/Alb/PJmXilQ/rhwaUYs4T20WEQ= github.com/hpcloud/tail v1.0.0/go.mod h1:ab1qPbhIpdTxEkNHXyeSf5vhxWSCs/tWer42PpOxQnU= -github.com/hyperledger/firefly-transaction-manager v1.3.4 h1:L3KNuyVdOpw+wgS44gUBs+5dh3vxL921h1rlKGZFz6s= -github.com/hyperledger/firefly-transaction-manager v1.3.4/go.mod h1:Bbp4hDoOFbu463pTfyFY/MPDWyTq89uAFk4OcJS7UXY= github.com/ianlancetaylor/demangle v0.0.0-20181102032728-5e5cf60278f6/go.mod h1:aSSvb/t6k1mPoxDqO4vJh6VOCGPwU4O0C2/Eqndh1Sc= github.com/ianlancetaylor/demangle v0.0.0-20200824232613-28f6c0f3b639/go.mod h1:aSSvb/t6k1mPoxDqO4vJh6VOCGPwU4O0C2/Eqndh1Sc= github.com/inconshreveable/mousetrap v1.0.1 h1:U3uMjPSQEBMNp1lFxmllqCPM6P5u/Xq7Pgzkat/bFNc= @@ -229,6 +227,7 @@ github.com/lann/builder v0.0.0-20180802200727-47ae307949d0/go.mod h1:dXGbAdH5GtB github.com/lann/ps v0.0.0-20150810152359-62de8c46ede0 h1:P6pPBnrTSX3DEVR4fDembhRWSsG5rVo6hYhAB/ADZrk= github.com/lann/ps v0.0.0-20150810152359-62de8c46ede0/go.mod h1:vmVJ0l/dxyfGW6FmdpVm2joNMFikkuWg0EoCKLGUMNw= github.com/lib/pq v1.10.9 h1:YXG7RB+JIjhP29X+OtkiDnYaXQwpS4JEWq7dtCCRUEw= +github.com/lib/pq v1.10.9/go.mod h1:AlVN5x4E4T544tWzH6hKfbfQvm3HdbOxrmggDNAPY9o= github.com/magiconair/properties v1.8.6 h1:5ibWZ6iY0NctNGWo87LalDlEZ6R41TqbbDamhfG/Qzo= github.com/magiconair/properties v1.8.6/go.mod h1:y3VJvCyxH9uVvJTWEGAELF3aiYNyPKd5NZ3oSwXrF60= github.com/mailru/easyjson v0.0.0-20190614124828-94de47d64c63/go.mod h1:C1wdFJiN94OJF2b5HbByQZoLdCWB1Yqtg26g4irojpc= @@ -261,8 +260,6 @@ github.com/mwitkow/go-conntrack v0.0.0-20190716064945-2f068394615f/go.mod h1:qRW github.com/nxadm/tail v1.4.4/go.mod h1:kenIhsEOeOJmVchQTgglprH7qJGnHDVpk1VPCcaMI8A= github.com/nxadm/tail v1.4.8 h1:nPr65rt6Y5JFSKQO7qToXr7pePgD6Gwiw05lkbyAQTE= github.com/nxadm/tail v1.4.8/go.mod h1:+ncqLTQzXmGhMZNUePPaPqPvBxHAIsmXswZKocGu+AU= -github.com/oklog/ulid/v2 v2.1.0 h1:+9lhoxAP56we25tyYETBBY1YLA2SaoLvUFgrP2miPJU= -github.com/oklog/ulid/v2 v2.1.0/go.mod h1:rcEKHmBBKfef9DhnvX7y1HZBYxjXb0cP5ExxNsTT1QQ= github.com/onsi/ginkgo v1.6.0/go.mod h1:lLunBs/Ym6LB5Z9jYTR76FiuTmxDTDusOGeTQH+WWjE= github.com/onsi/ginkgo v1.12.1/go.mod h1:zj2OWP4+oCPe1qIXoGWkgMRwljMUYCdkwsT2108oapk= github.com/onsi/ginkgo v1.16.5 h1:8xi0RTUf59SOSfEtZMvwTvXYMzG4gV23XVHOZiXNtnE= @@ -271,7 +268,6 @@ github.com/onsi/gomega v1.7.1/go.mod h1:XdKZgCCFLUoM/7CFJVPcG8C1xQ1AJ0vpAezJrB7J github.com/onsi/gomega v1.10.1/go.mod h1:iN09h71vgCQne3DLsj+A5owkum+a2tYe+TOCB1ybHNo= github.com/onsi/gomega v1.19.0 h1:4ieX6qQjPP/BfC3mpsAtIGGlxTWPeA3Inl/7DtXw1tw= github.com/onsi/gomega v1.19.0/go.mod h1:LY+I3pBVzYsTBU1AnDwOSxaYi9WoWiqgwooUqq9yPro= -github.com/pborman/getopt v0.0.0-20170112200414-7148bc3a4c30/go.mod h1:85jBQOZwpVEaDAr341tbn15RS4fCAsIst0qp7i8ex1o= github.com/pelletier/go-toml v1.9.5 h1:4yBQzkHv+7BHq2PQUZF3Mx0IYxG7LsP222s7Agd3ve8= github.com/pelletier/go-toml v1.9.5/go.mod h1:u1nR/EPcESfeI/szUZKdtJ0xRNbUoANCkoOuaOx1Y+c= github.com/pelletier/go-toml/v2 v2.0.5 h1:ipoSadvV8oGUjnUbMub59IDPPwfxF694nG/jwbMiyQg= @@ -330,6 +326,7 @@ github.com/spf13/cobra v1.6.1 h1:o94oiPyS4KD1mPy2fmcYYHHfCxLqYjJOhGsCHFZtEzA= github.com/spf13/cobra v1.6.1/go.mod h1:IOw/AERYS7UzyrGinqmz6HLUo219MORXGxhbaJUqzrY= github.com/spf13/jwalterweatherman v1.1.0 h1:ue6voC5bR5F8YxI5S67j9i582FU4Qvo2bmqnqMYADFk= github.com/spf13/jwalterweatherman v1.1.0/go.mod h1:aNWZUN0dPAAO/Ljvb5BEdw96iTZ0EXowPYD95IqWIGo= +github.com/spf13/pflag v1.0.3/go.mod h1:DYY7MBk1bdzusC3SYhjObp+wFpr4gzcvqqNjLnInEg4= github.com/spf13/pflag v1.0.5 h1:iy+VFUOCP1a+8yFto/drg2CJ5u0yRoB7fZw3DKv/JXA= github.com/spf13/pflag v1.0.5/go.mod h1:McXfInJRrz4CZXVZOBLb0bTZqETkiAhM9Iw0y3An2Bg= github.com/spf13/viper v1.14.0 h1:Rg7d3Lo706X9tHsJMUjdiwMpHB7W8WnSVOssIY+JElU= @@ -720,6 +717,7 @@ gopkg.in/yaml.v3 v3.0.1/go.mod h1:K4uyk7z7BCEPqu6E+C64Yfv1cQ7kz7rIZviUmN+EgEM= gotest.tools v2.2.0+incompatible h1:VsBPFP1AI068pPrMxtb/S8Zkgf9xEmTLJjfM+P5UIEo= gotest.tools v2.2.0+incompatible/go.mod h1:DsYFclhRJ6vuDpmuTbkuFWG+y2sxOXAzmJt81HFBacw= gotest.tools/v3 v3.1.0 h1:rVV8Tcg/8jHUkPUorwjaMTtemIMVXfIPKiOqnhEhakk= +gotest.tools/v3 v3.1.0/go.mod h1:fHy7eyTmJFO5bQbUsEGQ1v4m2J3Jz9eWL54TP2/ZuYQ= honnef.co/go/tools v0.0.0-20190102054323-c2f93a96b099/go.mod h1:rf3lG4BRIbNafJWhAfAdb/ePZxsR/4RtNHQocxwk9r4= honnef.co/go/tools v0.0.0-20190106161140-3f1c8253044a/go.mod h1:rf3lG4BRIbNafJWhAfAdb/ePZxsR/4RtNHQocxwk9r4= honnef.co/go/tools v0.0.0-20190418001031-e561f6794a2a/go.mod h1:rf3lG4BRIbNafJWhAfAdb/ePZxsR/4RtNHQocxwk9r4= diff --git a/pkg/eventstreams/activestream.go b/pkg/eventstreams/activestream.go index 40a386f..7a308c0 100644 --- a/pkg/eventstreams/activestream.go +++ b/pkg/eventstreams/activestream.go @@ -25,7 +25,6 @@ import ( "github.com/hyperledger/firefly-common/pkg/fftypes" "github.com/hyperledger/firefly-common/pkg/i18n" "github.com/hyperledger/firefly-common/pkg/log" - "github.com/hyperledger/firefly-transaction-manager/pkg/apitypes" ) type eventStreamBatch[DataType any] struct { @@ -296,7 +295,7 @@ func (as *activeStream[CT, DT]) dispatchBatch(batch *eventStreamBatch[DT]) (err as.LastDispatchStatus = DispatchStatusBlocked log.L(as.ctx).Errorf("Batch failed short retry after %.2fs secs. ErrorHandling=%s BlockedRetryDelay=%.2fs ", time.Since(*as.LastDispatchTime.Time()).Seconds(), *as.spec.ErrorHandling, time.Duration(*as.spec.BlockedRetryDelay).Seconds()) - if *as.spec.ErrorHandling == apitypes.ErrorHandlingTypeSkip { + if *as.spec.ErrorHandling == ErrorHandlingTypeSkip { // Swallow the error now we have logged it as.LastDispatchStatus = DispatchStatusSkipped return nil From 8311546f7700236b55e718ed7a1625573cbae688 Mon Sep 17 00:00:00 2001 From: Peter Broadhurst Date: Mon, 4 Dec 2023 22:17:29 -0500 Subject: [PATCH 08/19] Relax ID requirement in-line with CRUD package Signed-off-by: Peter Broadhurst --- examples/ffpubsub.go | 6 +- mocks/authmocks/plugin.go | 14 +++- mocks/crudmocks/crud.go | 86 ++++++++++++++++++- mocks/dbmigratemocks/driver.go | 34 +++++++- mocks/httpservermocks/go_http_server.go | 18 +++- mocks/wsservermocks/web_socket_channels.go | 6 +- mocks/wsservermocks/web_socket_server.go | 6 +- pkg/eventstreams/activestream.go | 10 +-- pkg/eventstreams/e2e_test.go | 28 ++++--- pkg/eventstreams/event.go | 4 +- pkg/eventstreams/eventstreams.go | 33 ++++++-- pkg/eventstreams/eventstreams_test.go | 16 ++-- pkg/eventstreams/manager.go | 65 ++++++++------- pkg/eventstreams/manager_test.go | 96 ++++++++-------------- pkg/eventstreams/persistence.go | 24 ++++-- pkg/eventstreams/webhooks_test.go | 10 +-- pkg/eventstreams/websockets_test.go | 4 +- 17 files changed, 308 insertions(+), 152 deletions(-) diff --git a/examples/ffpubsub.go b/examples/ffpubsub.go index 6e27f06..31a6be2 100644 --- a/examples/ffpubsub.go +++ b/examples/ffpubsub.go @@ -89,6 +89,10 @@ type inMemoryStream struct { newMessages sync.Cond } +func (ims *inMemoryStream) NewID() string { + return fftypes.NewUUID().String() +} + func (ims *inMemoryStream) Validate(_ context.Context, _ *pubSubConfig) error { return nil // no config defined in pubSubConfig to validate } @@ -157,7 +161,7 @@ func setup(ctx context.Context) (pubSubESManager, *inMemoryStream, func()) { u.Scheme = "ws" log.L(ctx).Infof("Running on: %s", u) - p := eventstreams.NewEventStreamPersistence[pubSubConfig](sql) + p := eventstreams.NewEventStreamPersistence[pubSubConfig](sql, dbsql.UUIDValidator) c := eventstreams.GenerateConfig(ctx) ims := &inMemoryStream{ messages: []string{}, diff --git a/mocks/authmocks/plugin.go b/mocks/authmocks/plugin.go index 0db1f59..7fbce24 100644 --- a/mocks/authmocks/plugin.go +++ b/mocks/authmocks/plugin.go @@ -1,4 +1,4 @@ -// Code generated by mockery v2.37.1. DO NOT EDIT. +// Code generated by mockery v2.38.0. DO NOT EDIT. package authmocks @@ -21,6 +21,10 @@ type Plugin struct { func (_m *Plugin) Authorize(ctx context.Context, req *fftypes.AuthReq) error { ret := _m.Called(ctx, req) + if len(ret) == 0 { + panic("no return value specified for Authorize") + } + var r0 error if rf, ok := ret.Get(0).(func(context.Context, *fftypes.AuthReq) error); ok { r0 = rf(ctx, req) @@ -35,6 +39,10 @@ func (_m *Plugin) Authorize(ctx context.Context, req *fftypes.AuthReq) error { func (_m *Plugin) Init(ctx context.Context, name string, _a2 config.Section) error { ret := _m.Called(ctx, name, _a2) + if len(ret) == 0 { + panic("no return value specified for Init") + } + var r0 error if rf, ok := ret.Get(0).(func(context.Context, string, config.Section) error); ok { r0 = rf(ctx, name, _a2) @@ -54,6 +62,10 @@ func (_m *Plugin) InitConfig(_a0 config.Section) { func (_m *Plugin) Name() string { ret := _m.Called() + if len(ret) == 0 { + panic("no return value specified for Name") + } + var r0 string if rf, ok := ret.Get(0).(func() string); ok { r0 = rf() diff --git a/mocks/crudmocks/crud.go b/mocks/crudmocks/crud.go index b0a8aa5..65de706 100644 --- a/mocks/crudmocks/crud.go +++ b/mocks/crudmocks/crud.go @@ -1,4 +1,4 @@ -// Code generated by mockery v2.37.1. DO NOT EDIT. +// Code generated by mockery v2.38.0. DO NOT EDIT. package crudmocks @@ -22,6 +22,10 @@ type CRUD[T dbsql.Resource] struct { func (_m *CRUD[T]) Count(ctx context.Context, filter ffapi.Filter) (int64, error) { ret := _m.Called(ctx, filter) + if len(ret) == 0 { + panic("no return value specified for Count") + } + var r0 int64 var r1 error if rf, ok := ret.Get(0).(func(context.Context, ffapi.Filter) (int64, error)); ok { @@ -53,6 +57,10 @@ func (_m *CRUD[T]) Delete(ctx context.Context, id string, hooks ...dbsql.PostCom _ca = append(_ca, _va...) ret := _m.Called(_ca...) + if len(ret) == 0 { + panic("no return value specified for Delete") + } + var r0 error if rf, ok := ret.Get(0).(func(context.Context, string, ...dbsql.PostCompletionHook) error); ok { r0 = rf(ctx, id, hooks...) @@ -74,6 +82,10 @@ func (_m *CRUD[T]) DeleteMany(ctx context.Context, filter ffapi.Filter, hooks .. _ca = append(_ca, _va...) ret := _m.Called(_ca...) + if len(ret) == 0 { + panic("no return value specified for DeleteMany") + } + var r0 error if rf, ok := ret.Get(0).(func(context.Context, ffapi.Filter, ...dbsql.PostCompletionHook) error); ok { r0 = rf(ctx, filter, hooks...) @@ -95,6 +107,10 @@ func (_m *CRUD[T]) GetByID(ctx context.Context, id string, getOpts ...dbsql.GetO _ca = append(_ca, _va...) ret := _m.Called(_ca...) + if len(ret) == 0 { + panic("no return value specified for GetByID") + } + var r0 T var r1 error if rf, ok := ret.Get(0).(func(context.Context, string, ...dbsql.GetOption) (T, error)); ok { @@ -126,6 +142,10 @@ func (_m *CRUD[T]) GetByName(ctx context.Context, name string, getOpts ...dbsql. _ca = append(_ca, _va...) ret := _m.Called(_ca...) + if len(ret) == 0 { + panic("no return value specified for GetByName") + } + var r0 T var r1 error if rf, ok := ret.Get(0).(func(context.Context, string, ...dbsql.GetOption) (T, error)); ok { @@ -157,6 +177,10 @@ func (_m *CRUD[T]) GetByUUIDOrName(ctx context.Context, uuidOrName string, getOp _ca = append(_ca, _va...) ret := _m.Called(_ca...) + if len(ret) == 0 { + panic("no return value specified for GetByUUIDOrName") + } + var r0 T var r1 error if rf, ok := ret.Get(0).(func(context.Context, string, ...dbsql.GetOption) (T, error)); ok { @@ -188,6 +212,10 @@ func (_m *CRUD[T]) GetFirst(ctx context.Context, filter ffapi.Filter, getOpts .. _ca = append(_ca, _va...) ret := _m.Called(_ca...) + if len(ret) == 0 { + panic("no return value specified for GetFirst") + } + var r0 T var r1 error if rf, ok := ret.Get(0).(func(context.Context, ffapi.Filter, ...dbsql.GetOption) (T, error)); ok { @@ -212,6 +240,10 @@ func (_m *CRUD[T]) GetFirst(ctx context.Context, filter ffapi.Filter, getOpts .. func (_m *CRUD[T]) GetMany(ctx context.Context, filter ffapi.Filter) ([]T, *ffapi.FilterResult, error) { ret := _m.Called(ctx, filter) + if len(ret) == 0 { + panic("no return value specified for GetMany") + } + var r0 []T var r1 *ffapi.FilterResult var r2 error @@ -247,6 +279,10 @@ func (_m *CRUD[T]) GetMany(ctx context.Context, filter ffapi.Filter) ([]T, *ffap func (_m *CRUD[T]) GetQueryFactory() ffapi.QueryFactory { ret := _m.Called() + if len(ret) == 0 { + panic("no return value specified for GetQueryFactory") + } + var r0 ffapi.QueryFactory if rf, ok := ret.Get(0).(func() ffapi.QueryFactory); ok { r0 = rf() @@ -263,6 +299,10 @@ func (_m *CRUD[T]) GetQueryFactory() ffapi.QueryFactory { func (_m *CRUD[T]) GetSequenceForID(ctx context.Context, id string) (int64, error) { ret := _m.Called(ctx, id) + if len(ret) == 0 { + panic("no return value specified for GetSequenceForID") + } + var r0 int64 var r1 error if rf, ok := ret.Get(0).(func(context.Context, string) (int64, error)); ok { @@ -294,6 +334,10 @@ func (_m *CRUD[T]) Insert(ctx context.Context, inst T, hooks ...dbsql.PostComple _ca = append(_ca, _va...) ret := _m.Called(_ca...) + if len(ret) == 0 { + panic("no return value specified for Insert") + } + var r0 error if rf, ok := ret.Get(0).(func(context.Context, T, ...dbsql.PostCompletionHook) error); ok { r0 = rf(ctx, inst, hooks...) @@ -315,6 +359,10 @@ func (_m *CRUD[T]) InsertMany(ctx context.Context, instances []T, allowPartialSu _ca = append(_ca, _va...) ret := _m.Called(_ca...) + if len(ret) == 0 { + panic("no return value specified for InsertMany") + } + var r0 error if rf, ok := ret.Get(0).(func(context.Context, []T, bool, ...dbsql.PostCompletionHook) error); ok { r0 = rf(ctx, instances, allowPartialSuccess, hooks...) @@ -329,6 +377,10 @@ func (_m *CRUD[T]) InsertMany(ctx context.Context, instances []T, allowPartialSu func (_m *CRUD[T]) ModifyQuery(modifier func(squirrel.SelectBuilder) squirrel.SelectBuilder) dbsql.CRUDQuery[T] { ret := _m.Called(modifier) + if len(ret) == 0 { + panic("no return value specified for ModifyQuery") + } + var r0 dbsql.CRUDQuery[T] if rf, ok := ret.Get(0).(func(func(squirrel.SelectBuilder) squirrel.SelectBuilder) dbsql.CRUDQuery[T]); ok { r0 = rf(modifier) @@ -345,6 +397,10 @@ func (_m *CRUD[T]) ModifyQuery(modifier func(squirrel.SelectBuilder) squirrel.Se func (_m *CRUD[T]) NewFilterBuilder(ctx context.Context) ffapi.FilterBuilder { ret := _m.Called(ctx) + if len(ret) == 0 { + panic("no return value specified for NewFilterBuilder") + } + var r0 ffapi.FilterBuilder if rf, ok := ret.Get(0).(func(context.Context) ffapi.FilterBuilder); ok { r0 = rf(ctx) @@ -361,6 +417,10 @@ func (_m *CRUD[T]) NewFilterBuilder(ctx context.Context) ffapi.FilterBuilder { func (_m *CRUD[T]) NewUpdateBuilder(ctx context.Context) ffapi.UpdateBuilder { ret := _m.Called(ctx) + if len(ret) == 0 { + panic("no return value specified for NewUpdateBuilder") + } + var r0 ffapi.UpdateBuilder if rf, ok := ret.Get(0).(func(context.Context) ffapi.UpdateBuilder); ok { r0 = rf(ctx) @@ -384,6 +444,10 @@ func (_m *CRUD[T]) Replace(ctx context.Context, inst T, hooks ...dbsql.PostCompl _ca = append(_ca, _va...) ret := _m.Called(_ca...) + if len(ret) == 0 { + panic("no return value specified for Replace") + } + var r0 error if rf, ok := ret.Get(0).(func(context.Context, T, ...dbsql.PostCompletionHook) error); ok { r0 = rf(ctx, inst, hooks...) @@ -398,6 +462,10 @@ func (_m *CRUD[T]) Replace(ctx context.Context, inst T, hooks ...dbsql.PostCompl func (_m *CRUD[T]) Scoped(scope squirrel.Eq) dbsql.CRUD[T] { ret := _m.Called(scope) + if len(ret) == 0 { + panic("no return value specified for Scoped") + } + var r0 dbsql.CRUD[T] if rf, ok := ret.Get(0).(func(squirrel.Eq) dbsql.CRUD[T]); ok { r0 = rf(scope) @@ -421,6 +489,10 @@ func (_m *CRUD[T]) Update(ctx context.Context, id string, update ffapi.Update, h _ca = append(_ca, _va...) ret := _m.Called(_ca...) + if len(ret) == 0 { + panic("no return value specified for Update") + } + var r0 error if rf, ok := ret.Get(0).(func(context.Context, string, ffapi.Update, ...dbsql.PostCompletionHook) error); ok { r0 = rf(ctx, id, update, hooks...) @@ -442,6 +514,10 @@ func (_m *CRUD[T]) UpdateMany(ctx context.Context, filter ffapi.Filter, update f _ca = append(_ca, _va...) ret := _m.Called(_ca...) + if len(ret) == 0 { + panic("no return value specified for UpdateMany") + } + var r0 error if rf, ok := ret.Get(0).(func(context.Context, ffapi.Filter, ffapi.Update, ...dbsql.PostCompletionHook) error); ok { r0 = rf(ctx, filter, update, hooks...) @@ -463,6 +539,10 @@ func (_m *CRUD[T]) UpdateSparse(ctx context.Context, sparseUpdate T, hooks ...db _ca = append(_ca, _va...) ret := _m.Called(_ca...) + if len(ret) == 0 { + panic("no return value specified for UpdateSparse") + } + var r0 error if rf, ok := ret.Get(0).(func(context.Context, T, ...dbsql.PostCompletionHook) error); ok { r0 = rf(ctx, sparseUpdate, hooks...) @@ -484,6 +564,10 @@ func (_m *CRUD[T]) Upsert(ctx context.Context, inst T, optimization dbsql.Upsert _ca = append(_ca, _va...) ret := _m.Called(_ca...) + if len(ret) == 0 { + panic("no return value specified for Upsert") + } + var r0 bool var r1 error if rf, ok := ret.Get(0).(func(context.Context, T, dbsql.UpsertOptimization, ...dbsql.PostCompletionHook) (bool, error)); ok { diff --git a/mocks/dbmigratemocks/driver.go b/mocks/dbmigratemocks/driver.go index 6294e79..18b589e 100644 --- a/mocks/dbmigratemocks/driver.go +++ b/mocks/dbmigratemocks/driver.go @@ -1,4 +1,4 @@ -// Code generated by mockery v2.37.1. DO NOT EDIT. +// Code generated by mockery v2.38.0. DO NOT EDIT. package dbmigratemocks @@ -19,6 +19,10 @@ type Driver struct { func (_m *Driver) Close() error { ret := _m.Called() + if len(ret) == 0 { + panic("no return value specified for Close") + } + var r0 error if rf, ok := ret.Get(0).(func() error); ok { r0 = rf() @@ -33,6 +37,10 @@ func (_m *Driver) Close() error { func (_m *Driver) Drop() error { ret := _m.Called() + if len(ret) == 0 { + panic("no return value specified for Drop") + } + var r0 error if rf, ok := ret.Get(0).(func() error); ok { r0 = rf() @@ -47,6 +55,10 @@ func (_m *Driver) Drop() error { func (_m *Driver) Lock() error { ret := _m.Called() + if len(ret) == 0 { + panic("no return value specified for Lock") + } + var r0 error if rf, ok := ret.Get(0).(func() error); ok { r0 = rf() @@ -61,6 +73,10 @@ func (_m *Driver) Lock() error { func (_m *Driver) Open(url string) (database.Driver, error) { ret := _m.Called(url) + if len(ret) == 0 { + panic("no return value specified for Open") + } + var r0 database.Driver var r1 error if rf, ok := ret.Get(0).(func(string) (database.Driver, error)); ok { @@ -87,6 +103,10 @@ func (_m *Driver) Open(url string) (database.Driver, error) { func (_m *Driver) Run(migration io.Reader) error { ret := _m.Called(migration) + if len(ret) == 0 { + panic("no return value specified for Run") + } + var r0 error if rf, ok := ret.Get(0).(func(io.Reader) error); ok { r0 = rf(migration) @@ -101,6 +121,10 @@ func (_m *Driver) Run(migration io.Reader) error { func (_m *Driver) SetVersion(version int, dirty bool) error { ret := _m.Called(version, dirty) + if len(ret) == 0 { + panic("no return value specified for SetVersion") + } + var r0 error if rf, ok := ret.Get(0).(func(int, bool) error); ok { r0 = rf(version, dirty) @@ -115,6 +139,10 @@ func (_m *Driver) SetVersion(version int, dirty bool) error { func (_m *Driver) Unlock() error { ret := _m.Called() + if len(ret) == 0 { + panic("no return value specified for Unlock") + } + var r0 error if rf, ok := ret.Get(0).(func() error); ok { r0 = rf() @@ -129,6 +157,10 @@ func (_m *Driver) Unlock() error { func (_m *Driver) Version() (int, bool, error) { ret := _m.Called() + if len(ret) == 0 { + panic("no return value specified for Version") + } + var r0 int var r1 bool var r2 error diff --git a/mocks/httpservermocks/go_http_server.go b/mocks/httpservermocks/go_http_server.go index a2d2f70..7ab9c91 100644 --- a/mocks/httpservermocks/go_http_server.go +++ b/mocks/httpservermocks/go_http_server.go @@ -1,4 +1,4 @@ -// Code generated by mockery v2.37.1. DO NOT EDIT. +// Code generated by mockery v2.38.0. DO NOT EDIT. package httpservermocks @@ -19,6 +19,10 @@ type GoHTTPServer struct { func (_m *GoHTTPServer) Close() error { ret := _m.Called() + if len(ret) == 0 { + panic("no return value specified for Close") + } + var r0 error if rf, ok := ret.Get(0).(func() error); ok { r0 = rf() @@ -33,6 +37,10 @@ func (_m *GoHTTPServer) Close() error { func (_m *GoHTTPServer) Serve(l net.Listener) error { ret := _m.Called(l) + if len(ret) == 0 { + panic("no return value specified for Serve") + } + var r0 error if rf, ok := ret.Get(0).(func(net.Listener) error); ok { r0 = rf(l) @@ -47,6 +55,10 @@ func (_m *GoHTTPServer) Serve(l net.Listener) error { func (_m *GoHTTPServer) ServeTLS(l net.Listener, certFile string, keyFile string) error { ret := _m.Called(l, certFile, keyFile) + if len(ret) == 0 { + panic("no return value specified for ServeTLS") + } + var r0 error if rf, ok := ret.Get(0).(func(net.Listener, string, string) error); ok { r0 = rf(l, certFile, keyFile) @@ -61,6 +73,10 @@ func (_m *GoHTTPServer) ServeTLS(l net.Listener, certFile string, keyFile string func (_m *GoHTTPServer) Shutdown(ctx context.Context) error { ret := _m.Called(ctx) + if len(ret) == 0 { + panic("no return value specified for Shutdown") + } + var r0 error if rf, ok := ret.Get(0).(func(context.Context) error); ok { r0 = rf(ctx) diff --git a/mocks/wsservermocks/web_socket_channels.go b/mocks/wsservermocks/web_socket_channels.go index ed657c4..1123e65 100644 --- a/mocks/wsservermocks/web_socket_channels.go +++ b/mocks/wsservermocks/web_socket_channels.go @@ -1,4 +1,4 @@ -// Code generated by mockery v2.37.1. DO NOT EDIT. +// Code generated by mockery v2.38.0. DO NOT EDIT. package wsservermocks @@ -16,6 +16,10 @@ type WebSocketChannels struct { func (_m *WebSocketChannels) GetChannels(streamName string) (chan<- interface{}, chan<- interface{}, <-chan *wsserver.WebSocketCommandMessageOrError) { ret := _m.Called(streamName) + if len(ret) == 0 { + panic("no return value specified for GetChannels") + } + var r0 chan<- interface{} var r1 chan<- interface{} var r2 <-chan *wsserver.WebSocketCommandMessageOrError diff --git a/mocks/wsservermocks/web_socket_server.go b/mocks/wsservermocks/web_socket_server.go index 60fa863..cfcaece 100644 --- a/mocks/wsservermocks/web_socket_server.go +++ b/mocks/wsservermocks/web_socket_server.go @@ -1,4 +1,4 @@ -// Code generated by mockery v2.37.1. DO NOT EDIT. +// Code generated by mockery v2.38.0. DO NOT EDIT. package wsservermocks @@ -23,6 +23,10 @@ func (_m *WebSocketServer) Close() { func (_m *WebSocketServer) GetChannels(streamName string) (chan<- interface{}, chan<- interface{}, <-chan *wsserver.WebSocketCommandMessageOrError) { ret := _m.Called(streamName) + if len(ret) == 0 { + panic("no return value specified for GetChannels") + } + var r0 chan<- interface{} var r1 chan<- interface{} var r2 <-chan *wsserver.WebSocketCommandMessageOrError diff --git a/pkg/eventstreams/activestream.go b/pkg/eventstreams/activestream.go index 7a308c0..d6dcd3e 100644 --- a/pkg/eventstreams/activestream.go +++ b/pkg/eventstreams/activestream.go @@ -91,8 +91,8 @@ func (as *activeStream[CT, DT]) runEventLoop() { func (as *activeStream[CT, DT]) loadCheckpoint() (sequencedID string, err error) { err = as.retry.Do(as.ctx, "load checkpoint", func(attempt int) (retry bool, err error) { - log.L(as.ctx).Debugf("Loading checkpoint: %s", as.spec.ID) - cp, err := as.persistence.Checkpoints().GetByID(as.ctx, as.spec.ID.String()) + log.L(as.ctx).Debugf("Loading checkpoint: %s", as.spec.GetID()) + cp, err := as.persistence.Checkpoints().GetByID(as.ctx, as.spec.GetID()) if err != nil { return true, err } @@ -242,9 +242,7 @@ func (as *activeStream[CT, DT]) checkpointRoutine() { } err := as.retry.Do(as.ctx, "checkpoint", func(attempt int) (retry bool, err error) { _, err = as.esm.persistence.Checkpoints().Upsert(as.ctx, &EventStreamCheckpoint{ - ResourceBase: dbsql.ResourceBase{ - ID: as.spec.ID, // the ID of the stream is the ID of the checkpoint - }, + ID: ptrTo(as.spec.GetID()), // the ID of the stream is the ID of the checkpoint SequenceID: &checkpointSequenceID, }, dbsql.UpsertOptimizationExisting) return true, err @@ -273,7 +271,7 @@ func (as *activeStream[CT, DT]) dispatchBatch(batch *eventStreamBatch[DT]) (err err := as.retry.Do(as.ctx, "action", func(_ int) (retry bool, err error) { err = as.action.AttemptDispatch(as.ctx, as.LastDispatchAttempts, &EventBatch[DT]{ Type: MessageTypeEventBatch, - StreamID: as.spec.ID, + StreamID: as.spec.GetID(), BatchNumber: batch.number, Events: batch.events, }) diff --git a/pkg/eventstreams/e2e_test.go b/pkg/eventstreams/e2e_test.go index 31aba27..56805d1 100644 --- a/pkg/eventstreams/e2e_test.go +++ b/pkg/eventstreams/e2e_test.go @@ -100,6 +100,10 @@ func (ts *testSource) Run(ctx context.Context, spec *EventStreamSpec[testESConfi } } +func (ts *testSource) NewID() string { + return fftypes.NewUUID().String() +} + // This test demonstrates the runtime function of the event stream module through a simple test, // using an SQLite database for CRUD operations on the persisted event streams, // and a fake stream of events. @@ -234,23 +238,23 @@ func TestE2E_WebsocketDeliveryRestartReset(t *testing.T) { var ess *EventStreamWithStatus[testESConfig] for ess == nil || ess.Statistics == nil || ess.Statistics.Checkpoint == "" { time.Sleep(1 * time.Millisecond) - ess, err = mgr.GetStreamByID(ctx, es1.ID) + ess, err = mgr.GetStreamByID(ctx, es1.GetID()) assert.NoError(t, err) } // Restart and check we get called with the checkpoint - note we don't reconnect the // websocket or restart that - it remains "started" from the websocket protocol // perspective throughout - err = mgr.StopStream(ctx, es1.ID) + err = mgr.StopStream(ctx, es1.GetID()) assert.NoError(t, err) - err = mgr.StartStream(ctx, es1.ID) + err = mgr.StartStream(ctx, es1.GetID()) assert.NoError(t, err) wsReceiveAck(ctx, t, wsc, func(batch *EventBatch[testData]) {}) assert.Equal(t, "000000000091", ts.sequenceStartedWith) assert.Equal(t, 2, ts.startCount) // Reset it and check we get the reset - err = mgr.ResetStream(ctx, es1.ID, "first") + err = mgr.ResetStream(ctx, es1.GetID(), "first") assert.NoError(t, err) wsReceiveAck(ctx, t, wsc, func(batch *EventBatch[testData]) {}) assert.Equal(t, "first", ts.sequenceStartedWith) @@ -447,7 +451,7 @@ func TestE2E_CRUDLifecycle(t *testing.T) { assert.Equal(t, EventStreamStatusStopped, esList[0].Status) // Get the first by ID - es1c, err := mgr.GetStreamByID(ctx, es1.ID, dbsql.FailIfNotFound) + es1c, err := mgr.GetStreamByID(ctx, es1.GetID(), dbsql.FailIfNotFound) assert.NoError(t, err) assert.Equal(t, "stream1", *es1c.Name) assert.Equal(t, EventStreamStatusStarted, es1c.Status) @@ -459,22 +463,22 @@ func TestE2E_CRUDLifecycle(t *testing.T) { assert.False(t, created) // Start and re-stop, then delete the second event stream - err = mgr.StartStream(ctx, es2.ID) + err = mgr.StartStream(ctx, es2.GetID()) assert.NoError(t, err) - es2c, err := mgr.GetStreamByID(ctx, es2.ID, dbsql.FailIfNotFound) + es2c, err := mgr.GetStreamByID(ctx, es2.GetID(), dbsql.FailIfNotFound) assert.NoError(t, err) assert.Equal(t, EventStreamStatusStarted, es2c.Status) assert.Equal(t, "stream2a", *es2c.Name) - err = mgr.StopStream(ctx, es2.ID) + err = mgr.StopStream(ctx, es2.GetID()) assert.NoError(t, err) - es2c, err = mgr.GetStreamByID(ctx, es2.ID, dbsql.FailIfNotFound) + es2c, err = mgr.GetStreamByID(ctx, es2.GetID(), dbsql.FailIfNotFound) assert.NoError(t, err) assert.Equal(t, EventStreamStatusStopped, es2c.Status) - err = mgr.DeleteStream(ctx, es2.ID) + err = mgr.DeleteStream(ctx, es2.GetID()) assert.NoError(t, err) // Delete the first stream (which is running still) - err = mgr.DeleteStream(ctx, es1.ID) + err = mgr.DeleteStream(ctx, es1.GetID()) assert.NoError(t, err) // Check no streams left @@ -530,7 +534,7 @@ func setupE2ETest(t *testing.T, extraSetup ...func()) (context.Context, Persiste db, err := dbsql.NewSQLiteProvider(ctx, dbConf) assert.NoError(t, err) - p := NewEventStreamPersistence[testESConfig](db) + p := NewEventStreamPersistence[testESConfig](db, dbsql.UUIDValidator) p.EventStreams().Validate() p.Checkpoints().Validate() diff --git a/pkg/eventstreams/event.go b/pkg/eventstreams/event.go index 5853c3c..3f67f7a 100644 --- a/pkg/eventstreams/event.go +++ b/pkg/eventstreams/event.go @@ -18,15 +18,13 @@ package eventstreams import ( "encoding/json" - - "github.com/hyperledger/firefly-common/pkg/fftypes" ) const MessageTypeEventBatch = "event_batch" type EventBatch[DataType any] struct { Type string `json:"type"` // always MessageTypeEventBatch (for consistent WebSocket flow control) - StreamID *fftypes.UUID `json:"stream"` // the ID of the event stream for this event + StreamID string `json:"stream"` // the ID of the event stream for this event BatchNumber int64 `json:"batchNumber"` // should be provided back in the ack Events []*Event[DataType] `json:"events"` // an array of events allows efficient batch acknowledgment } diff --git a/pkg/eventstreams/eventstreams.go b/pkg/eventstreams/eventstreams.go index 854e575..b5a820b 100644 --- a/pkg/eventstreams/eventstreams.go +++ b/pkg/eventstreams/eventstreams.go @@ -24,7 +24,6 @@ import ( "regexp" "sync" - "github.com/hyperledger/firefly-common/pkg/dbsql" "github.com/hyperledger/firefly-common/pkg/fftypes" "github.com/hyperledger/firefly-common/pkg/i18n" "github.com/hyperledger/firefly-common/pkg/log" @@ -73,7 +72,9 @@ type DBSerializable interface { } type EventStreamSpec[CT any] struct { - dbsql.ResourceBase + ID *string `ffstruct:"eventstream" json:"id"` + Created *fftypes.FFTime `ffstruct:"eventstream" json:"created"` + Updated *fftypes.FFTime `ffstruct:"eventstream" json:"updated"` Name *string `ffstruct:"eventstream" json:"name,omitempty"` Status *EventStreamStatus `ffstruct:"eventstream" json:"status,omitempty"` Type *EventStreamType `ffstruct:"eventstream" json:"type,omitempty" ffenum:"estype"` @@ -94,7 +95,10 @@ type EventStreamSpec[CT any] struct { } func (esc *EventStreamSpec[CT]) GetID() string { - return esc.ID.String() + if esc.ID == nil { + return "" + } + return *esc.ID } func (esc *EventStreamSpec[CT]) SetCreated(t *fftypes.FFTime) { @@ -124,8 +128,25 @@ type EventStreamWithStatus[CT any] struct { } type EventStreamCheckpoint struct { - dbsql.ResourceBase - SequenceID *string `ffstruct:"EventStreamCheckpoint" json:"sequenceId,omitempty"` + ID *string `ffstruct:"EventStreamCheckpoint" json:"id"` + Created *fftypes.FFTime `ffstruct:"EventStreamCheckpoint" json:"created"` + Updated *fftypes.FFTime `ffstruct:"EventStreamCheckpoint" json:"updated"` + SequenceID *string `ffstruct:"EventStreamCheckpoint" json:"sequenceId,omitempty"` +} + +func (esc *EventStreamCheckpoint) GetID() string { + if esc.ID == nil { + return "" + } + return *esc.ID +} + +func (esc *EventStreamCheckpoint) SetCreated(t *fftypes.FFTime) { + esc.Created = t +} + +func (esc *EventStreamCheckpoint) SetUpdated(t *fftypes.FFTime) { + esc.Updated = t } type EventBatchDispatcher[DT any] interface { @@ -367,7 +388,7 @@ func (es *eventStream[CT, DT]) checkSetStatus(ctx context.Context, targetStatus func (es *eventStream[CT, DT]) persistStatus(ctx context.Context, targetStatus EventStreamStatus) error { fb := EventStreamFilters.NewUpdate(ctx) - return es.esm.persistence.EventStreams().Update(ctx, es.spec.ID.String(), fb.Set("status", targetStatus)) + return es.esm.persistence.EventStreams().Update(ctx, es.spec.GetID(), fb.Set("status", targetStatus)) } func (es *eventStream[CT, DT]) stopOrDelete(ctx context.Context, targetStatus EventStreamStatus) error { diff --git a/pkg/eventstreams/eventstreams_test.go b/pkg/eventstreams/eventstreams_test.go index 3be3409..bdefa6a 100644 --- a/pkg/eventstreams/eventstreams_test.go +++ b/pkg/eventstreams/eventstreams_test.go @@ -21,7 +21,6 @@ import ( "fmt" "testing" - "github.com/hyperledger/firefly-common/pkg/dbsql" "github.com/hyperledger/firefly-common/pkg/ffapi" "github.com/hyperledger/firefly-common/pkg/fftypes" "github.com/stretchr/testify/assert" @@ -34,9 +33,7 @@ func newTestEventStream(t *testing.T, extraSetup ...func(mdb *mockPersistence)) }) ctx, mgr, mes, done := newMockESManager(t, extraSetup...) es, err := mgr.initEventStream(ctx, &EventStreamSpec[testESConfig]{ - ResourceBase: dbsql.ResourceBase{ - ID: fftypes.NewUUID(), - }, + ID: ptrTo(fftypes.NewUUID().String()), Name: ptrTo(t.Name()), Status: ptrTo(EventStreamStatusStopped), }) @@ -48,11 +45,9 @@ func newTestEventStream(t *testing.T, extraSetup ...func(mdb *mockPersistence)) func TestEventStreamFields(t *testing.T) { es := &EventStreamSpec[testESConfig]{ - ResourceBase: dbsql.ResourceBase{ - ID: fftypes.NewUUID(), - }, + ID: ptrTo(fftypes.NewUUID().String()), } - assert.Equal(t, es.ID.String(), es.GetID()) + assert.Equal(t, es.GetID(), es.GetID()) t1 := fftypes.Now() es.SetCreated(t1) assert.Equal(t, t1, es.Created) @@ -293,3 +288,8 @@ func TestSuspendTimeout(t *testing.T) { assert.Regexp(t, "FF00229", err) } + +func TestGetIDNil(t *testing.T) { + assert.Empty(t, (&EventStreamSpec[testESConfig]{}).GetID()) + assert.Empty(t, (&EventStreamCheckpoint{}).GetID()) +} diff --git a/pkg/eventstreams/manager.go b/pkg/eventstreams/manager.go index 9db83b7..df58905 100644 --- a/pkg/eventstreams/manager.go +++ b/pkg/eventstreams/manager.go @@ -25,7 +25,6 @@ import ( "github.com/hyperledger/firefly-common/pkg/dbsql" "github.com/hyperledger/firefly-common/pkg/ffapi" "github.com/hyperledger/firefly-common/pkg/fftls" - "github.com/hyperledger/firefly-common/pkg/fftypes" "github.com/hyperledger/firefly-common/pkg/i18n" "github.com/hyperledger/firefly-common/pkg/log" "github.com/hyperledger/firefly-common/pkg/wsserver" @@ -33,12 +32,12 @@ import ( type Manager[CT any] interface { UpsertStream(ctx context.Context, esSpec *EventStreamSpec[CT]) (bool, error) - GetStreamByID(ctx context.Context, id *fftypes.UUID, opts ...dbsql.GetOption) (*EventStreamWithStatus[CT], error) + GetStreamByID(ctx context.Context, id string, opts ...dbsql.GetOption) (*EventStreamWithStatus[CT], error) ListStreams(ctx context.Context, filter ffapi.Filter) ([]*EventStreamWithStatus[CT], *ffapi.FilterResult, error) - StopStream(ctx context.Context, id *fftypes.UUID) error - StartStream(ctx context.Context, id *fftypes.UUID) error - ResetStream(ctx context.Context, id *fftypes.UUID, sequenceID string) error - DeleteStream(ctx context.Context, id *fftypes.UUID) error + StopStream(ctx context.Context, id string) error + StartStream(ctx context.Context, id string) error + ResetStream(ctx context.Context, id string, sequenceID string) error + DeleteStream(ctx context.Context, id string) error Close(ctx context.Context) } @@ -53,6 +52,8 @@ type Deliver[DT any] func(events []*Event[DT]) SourceInstruction // Runtime is the required implementation extension for the EventStream common utility type Runtime[ConfigType any, DataType any] interface { + // Generate a new unique resource ID (such as a UUID) + NewID() string // Type specific config validation goes here Validate(ctx context.Context, config *ConfigType) error // The run function should execute in a loop detecting events until instructed to stop: @@ -71,7 +72,7 @@ type Runtime[ConfigType any, DataType any] interface { type esManager[CT any, DT any] struct { config Config mux sync.Mutex - streams map[fftypes.UUID]*eventStream[CT, DT] + streams map[string]*eventStream[CT, DT] tlsConfigs map[string]*tls.Config wsChannels wsserver.WebSocketChannels persistence Persistence[CT] @@ -102,7 +103,7 @@ func NewEventStreamManager[CT any, DT any](ctx context.Context, config *Config, runtime: source, persistence: p, wsChannels: wsChannels, - streams: map[fftypes.UUID]*eventStream[CT, DT]{}, + streams: map[string]*eventStream[CT, DT]{}, } if err = esm.initialize(ctx); err != nil { return nil, err @@ -111,22 +112,22 @@ func NewEventStreamManager[CT any, DT any](ctx context.Context, config *Config, } func (esm *esManager[CT, DT]) addStream(ctx context.Context, es *eventStream[CT, DT]) { - log.L(ctx).Infof("Adding stream '%s' [%s] (%s)", *es.spec.Name, es.spec.ID, es.Status(ctx).Status) + log.L(ctx).Infof("Adding stream '%s' [%s] (%s)", *es.spec.Name, es.spec.GetID(), es.Status(ctx).Status) esm.mux.Lock() defer esm.mux.Unlock() - esm.streams[*es.spec.ID] = es + esm.streams[es.spec.GetID()] = es } -func (esm *esManager[CT, DT]) getStream(id *fftypes.UUID) *eventStream[CT, DT] { +func (esm *esManager[CT, DT]) getStream(id string) *eventStream[CT, DT] { esm.mux.Lock() defer esm.mux.Unlock() - return esm.streams[*id] + return esm.streams[id] } -func (esm *esManager[CT, DT]) removeStream(id *fftypes.UUID) { +func (esm *esManager[CT, DT]) removeStream(id string) { esm.mux.Lock() defer esm.mux.Unlock() - delete(esm.streams, *id) + delete(esm.streams, id) } func (esm *esManager[CT, DT]) initialize(ctx context.Context) error { @@ -143,7 +144,7 @@ func (esm *esManager[CT, DT]) initialize(ctx context.Context) error { } for _, esSpec := range streams { if *esSpec.Status == EventStreamStatusDeleted { - if err := esm.persistence.EventStreams().Delete(ctx, esSpec.ID.String()); err != nil { + if err := esm.persistence.EventStreams().Delete(ctx, esSpec.GetID()); err != nil { return err } } else { @@ -161,10 +162,10 @@ func (esm *esManager[CT, DT]) initialize(ctx context.Context) error { func (esm *esManager[CT, DT]) UpsertStream(ctx context.Context, esSpec *EventStreamSpec[CT]) (bool, error) { var existing *eventStream[CT, DT] - if esSpec.ID == nil { - esSpec.ID = fftypes.NewUUID() + if esSpec.ID == nil || len(*esSpec.ID) == 0 { + esSpec.ID = ptrTo(esm.runtime.NewID()) } else { - existing = esm.getStream(esSpec.ID) + existing = esm.getStream(esSpec.GetID()) } // Only statuses that can be asserted externally are started/stopped @@ -207,7 +208,7 @@ func (esm *esManager[CT, DT]) reInit(ctx context.Context, esSpec *EventStreamSpe return nil } -func (esm *esManager[CT, DT]) DeleteStream(ctx context.Context, id *fftypes.UUID) error { +func (esm *esManager[CT, DT]) DeleteStream(ctx context.Context, id string) error { es := esm.getStream(id) if es == nil { return i18n.NewError(ctx, i18n.Msg404NoResult) @@ -216,14 +217,14 @@ func (esm *esManager[CT, DT]) DeleteStream(ctx context.Context, id *fftypes.UUID return err } // Now we can delete it fully from the DB - if err := esm.persistence.EventStreams().Delete(ctx, id.String()); err != nil { + if err := esm.persistence.EventStreams().Delete(ctx, id); err != nil { return err } esm.removeStream(id) return nil } -func (esm *esManager[CT, DT]) StopStream(ctx context.Context, id *fftypes.UUID) error { +func (esm *esManager[CT, DT]) StopStream(ctx context.Context, id string) error { es := esm.getStream(id) if es == nil { return i18n.NewError(ctx, i18n.Msg404NoResult) @@ -231,7 +232,7 @@ func (esm *esManager[CT, DT]) StopStream(ctx context.Context, id *fftypes.UUID) return es.stop(ctx) } -func (esm *esManager[CT, DT]) ResetStream(ctx context.Context, id *fftypes.UUID, sequenceID string) error { +func (esm *esManager[CT, DT]) ResetStream(ctx context.Context, id string, sequenceID string) error { es := esm.getStream(id) if es == nil { return i18n.NewError(ctx, i18n.Msg404NoResult) @@ -247,9 +248,7 @@ func (esm *esManager[CT, DT]) ResetStream(ctx context.Context, id *fftypes.UUID, // store the initial_sequence_id back to the object, and update our in-memory record es.spec.InitialSequenceID = &sequenceID if err := esm.persistence.EventStreams().UpdateSparse(ctx, &EventStreamSpec[CT]{ - ResourceBase: dbsql.ResourceBase{ - ID: id, - }, + ID: &id, InitialSequenceID: &sequenceID, }); err != nil { return err @@ -261,7 +260,7 @@ func (esm *esManager[CT, DT]) ResetStream(ctx context.Context, id *fftypes.UUID, return nil } -func (esm *esManager[CT, DT]) StartStream(ctx context.Context, id *fftypes.UUID) error { +func (esm *esManager[CT, DT]) StartStream(ctx context.Context, id string) error { es := esm.getStream(id) if es == nil { return i18n.NewError(ctx, i18n.Msg404NoResult) @@ -271,11 +270,11 @@ func (esm *esManager[CT, DT]) StartStream(ctx context.Context, id *fftypes.UUID) func (esm *esManager[CT, DT]) enrichGetStream(ctx context.Context, esSpec *EventStreamSpec[CT]) *EventStreamWithStatus[CT] { // Grab the live status - if es := esm.getStream(esSpec.ID); es != nil { + if es := esm.getStream(esSpec.GetID()); es != nil { return es.Status(ctx) } // Fallback to unknown status rather than failing - log.L(ctx).Errorf("No in-memory state for stream '%s'", esSpec.ID) + log.L(ctx).Errorf("No in-memory state for stream '%s'", esSpec.GetID()) return &EventStreamWithStatus[CT]{ EventStreamSpec: esSpec, Status: EventStreamStatusUnknown, @@ -295,8 +294,8 @@ func (esm *esManager[CT, DT]) ListStreams(ctx context.Context, filter ffapi.Filt return enriched, fr, err } -func (esm *esManager[CT, DT]) GetStreamByID(ctx context.Context, id *fftypes.UUID, opts ...dbsql.GetOption) (*EventStreamWithStatus[CT], error) { - esSpec, err := esm.persistence.EventStreams().GetByID(ctx, id.String(), opts...) +func (esm *esManager[CT, DT]) GetStreamByID(ctx context.Context, id string, opts ...dbsql.GetOption) (*EventStreamWithStatus[CT], error) { + esSpec, err := esm.persistence.EventStreams().GetByID(ctx, id, opts...) if err != nil { return nil, err } @@ -306,7 +305,11 @@ func (esm *esManager[CT, DT]) GetStreamByID(ctx context.Context, id *fftypes.UUI func (esm *esManager[CT, DT]) Close(ctx context.Context) { for _, es := range esm.streams { if err := es.suspend(ctx); err != nil { - log.L(ctx).Warnf("Failed to stop event stream %s: %s", es.spec.ID, err) + log.L(ctx).Warnf("Failed to stop event stream %s: %s", es.spec.GetID(), err) } } } + +func ptrTo[T any](v T) *T { + return &v +} diff --git a/pkg/eventstreams/manager_test.go b/pkg/eventstreams/manager_test.go index ed9cb0a..7be41bc 100644 --- a/pkg/eventstreams/manager_test.go +++ b/pkg/eventstreams/manager_test.go @@ -38,6 +38,10 @@ type mockEventSource struct { run func(ctx context.Context, es *EventStreamSpec[testESConfig], checkpointSequenceId string, deliver Deliver[testData]) error } +func (mes *mockEventSource) NewID() string { + return fftypes.NewUUID().String() +} + func (mes *mockEventSource) Run(ctx context.Context, es *EventStreamSpec[testESConfig], checkpointSequenceId string, deliver Deliver[testData]) error { return mes.run(ctx, es, checkpointSequenceId, deliver) } @@ -67,10 +71,6 @@ func (mp *mockPersistence) Checkpoints() dbsql.CRUD[*EventStreamCheckpoint] { } func (mp *mockPersistence) Close() {} -func ptrTo[T any](v T) *T { - return &v -} - func newMockESManager(t *testing.T, extraSetup ...func(mp *mockPersistence)) (context.Context, *esManager[testESConfig, testData], *mockEventSource, func()) { logrus.SetLevel(logrus.DebugLevel) @@ -155,9 +155,7 @@ func TestInitFail(t *testing.T) { func TestInitWithStreams(t *testing.T) { es := &EventStreamSpec[testESConfig]{ - ResourceBase: dbsql.ResourceBase{ - ID: fftypes.NewUUID(), - }, + ID: ptrTo(fftypes.NewUUID().String()), Name: ptrTo("stream1"), Status: ptrTo(EventStreamStatusStarted), } @@ -178,9 +176,7 @@ func TestInitWithStreamsCleanupFail(t *testing.T) { ctx := context.Background() InitConfig(config.RootSection("ut")) es := &EventStreamSpec[testESConfig]{ - ResourceBase: dbsql.ResourceBase{ - ID: fftypes.NewUUID(), - }, + ID: ptrTo(fftypes.NewUUID().String()), Name: ptrTo("stream1"), Status: ptrTo(EventStreamStatusDeleted), } @@ -198,9 +194,7 @@ func TestInitWithStreamsInitFail(t *testing.T) { ctx := context.Background() InitConfig(config.RootSection("ut")) es := &EventStreamSpec[testESConfig]{ - ResourceBase: dbsql.ResourceBase{ - ID: fftypes.NewUUID(), - }, + ID: ptrTo(fftypes.NewUUID().String()), Name: ptrTo("stream1"), Status: ptrTo(EventStreamStatusStarted), } @@ -215,9 +209,7 @@ func TestInitWithStreamsInitFail(t *testing.T) { func TestUpsertStreamDeleted(t *testing.T) { es := &EventStreamSpec[testESConfig]{ - ResourceBase: dbsql.ResourceBase{ - ID: fftypes.NewUUID(), - }, + ID: ptrTo(fftypes.NewUUID().String()), Name: ptrTo("stream1"), Status: ptrTo(EventStreamStatusStopped), } @@ -227,7 +219,7 @@ func TestUpsertStreamDeleted(t *testing.T) { }) defer done() - esm.getStream(es.ID).spec.Status = ptrTo(EventStreamStatusDeleted) + esm.getStream(es.GetID()).spec.Status = ptrTo(EventStreamStatusDeleted) _, err := esm.UpsertStream(ctx, es) assert.Regexp(t, "FF00236", err) @@ -235,9 +227,7 @@ func TestUpsertStreamDeleted(t *testing.T) { func TestUpsertStreamBadUpdate(t *testing.T) { es := &EventStreamSpec[testESConfig]{ - ResourceBase: dbsql.ResourceBase{ - ID: fftypes.NewUUID(), - }, + ID: ptrTo(fftypes.NewUUID().String()), Name: ptrTo("stream1"), Status: ptrTo(EventStreamStatusStopped), } @@ -256,9 +246,7 @@ func TestUpsertStreamBadUpdate(t *testing.T) { func TestUpsertStreamUpsertFail(t *testing.T) { es := &EventStreamSpec[testESConfig]{ - ResourceBase: dbsql.ResourceBase{ - ID: fftypes.NewUUID(), - }, + ID: ptrTo(fftypes.NewUUID().String()), Name: ptrTo("stream1"), Status: ptrTo(EventStreamStatusStopped), } @@ -276,9 +264,7 @@ func TestUpsertStreamUpsertFail(t *testing.T) { func TestUpsertReInitExistingFailTimeout(t *testing.T) { es := &EventStreamSpec[testESConfig]{ - ResourceBase: dbsql.ResourceBase{ - ID: fftypes.NewUUID(), - }, + ID: ptrTo(fftypes.NewUUID().String()), Name: ptrTo("stream1"), Status: ptrTo(EventStreamStatusStopped), } @@ -298,9 +284,7 @@ func TestUpsertReInitExistingFailTimeout(t *testing.T) { func TestUpsertReInitExistingFailInit(t *testing.T) { es := &EventStreamSpec[testESConfig]{ - ResourceBase: dbsql.ResourceBase{ - ID: fftypes.NewUUID(), - }, + ID: ptrTo(fftypes.NewUUID().String()), Name: ptrTo("stream1"), Status: ptrTo(fftypes.FFEnum("wrong")), } @@ -320,7 +304,7 @@ func TestDeleteStreamNotKnown(t *testing.T) { }) defer done() - err := esm.DeleteStream(ctx, fftypes.NewUUID()) + err := esm.DeleteStream(ctx, fftypes.NewUUID().String()) assert.Regexp(t, "FF00164", err) } @@ -331,7 +315,7 @@ func TestResetStreamNotKnown(t *testing.T) { }) defer done() - err := esm.ResetStream(ctx, fftypes.NewUUID(), "") + err := esm.ResetStream(ctx, fftypes.NewUUID().String(), "") assert.Regexp(t, "FF00164", err) } @@ -342,7 +326,7 @@ func TestStopStreamNotKnown(t *testing.T) { }) defer done() - err := esm.StopStream(ctx, fftypes.NewUUID()) + err := esm.StopStream(ctx, fftypes.NewUUID().String()) assert.Regexp(t, "FF00164", err) } @@ -353,7 +337,7 @@ func TestStartStreamNotKnown(t *testing.T) { }) defer done() - err := esm.StartStream(ctx, fftypes.NewUUID()) + err := esm.StartStream(ctx, fftypes.NewUUID().String()) assert.Regexp(t, "FF00164", err) } @@ -365,9 +349,7 @@ func TestEnrichStreamNotKnown(t *testing.T) { defer done() es := esm.enrichGetStream(ctx, &EventStreamSpec[testESConfig]{ - ResourceBase: dbsql.ResourceBase{ - ID: fftypes.NewUUID(), - }, + ID: ptrTo(fftypes.NewUUID().String()), }) assert.NotNil(t, es) assert.Equal(t, EventStreamStatusUnknown, es.Status) @@ -376,9 +358,7 @@ func TestEnrichStreamNotKnown(t *testing.T) { func TestDeleteStreamFail(t *testing.T) { es := &EventStreamSpec[testESConfig]{ - ResourceBase: dbsql.ResourceBase{ - ID: fftypes.NewUUID(), - }, + ID: ptrTo(fftypes.NewUUID().String()), Name: ptrTo("stream1"), Status: ptrTo(EventStreamStatusStopped), } @@ -389,16 +369,14 @@ func TestDeleteStreamFail(t *testing.T) { }) defer done() - err := esm.DeleteStream(ctx, es.ID) + err := esm.DeleteStream(ctx, es.GetID()) assert.Regexp(t, "pop", err) } func TestDeleteStreamFailDelete(t *testing.T) { es := &EventStreamSpec[testESConfig]{ - ResourceBase: dbsql.ResourceBase{ - ID: fftypes.NewUUID(), - }, + ID: ptrTo(fftypes.NewUUID().String()), Name: ptrTo("stream1"), Status: ptrTo(EventStreamStatusStopped), } @@ -410,7 +388,7 @@ func TestDeleteStreamFailDelete(t *testing.T) { }) defer done() - err := esm.DeleteStream(ctx, es.ID) + err := esm.DeleteStream(ctx, es.GetID()) assert.Regexp(t, "pop", err) } @@ -425,15 +403,13 @@ func TestResetStreamStopFailTimeout(t *testing.T) { activeState: &activeStream[testESConfig, testData]{}, stopping: make(chan struct{}), spec: &EventStreamSpec[testESConfig]{ - ResourceBase: dbsql.ResourceBase{ - ID: fftypes.NewUUID(), - }, + ID: ptrTo(fftypes.NewUUID().String()), Name: ptrTo("stream1"), Status: ptrTo(EventStreamStatusStopped), }, } esm.addStream(ctx, existing) - err := esm.ResetStream(ctx, existing.spec.ID, "") + err := esm.ResetStream(ctx, existing.spec.GetID(), "") assert.Regexp(t, "FF00229", err) } @@ -447,15 +423,13 @@ func TestResetStreamStopFailDeleteCheckpoint(t *testing.T) { existing := &eventStream[testESConfig, testData]{ spec: &EventStreamSpec[testESConfig]{ - ResourceBase: dbsql.ResourceBase{ - ID: fftypes.NewUUID(), - }, + ID: ptrTo(fftypes.NewUUID().String()), Name: ptrTo("stream1"), Status: ptrTo(EventStreamStatusStopped), }, } esm.addStream(ctx, existing) - err := esm.ResetStream(ctx, existing.spec.ID, "") + err := esm.ResetStream(ctx, existing.spec.GetID(), "") assert.Regexp(t, "pop", err) } @@ -470,15 +444,13 @@ func TestResetStreamStopFailUpdateSequence(t *testing.T) { existing := &eventStream[testESConfig, testData]{ spec: &EventStreamSpec[testESConfig]{ - ResourceBase: dbsql.ResourceBase{ - ID: fftypes.NewUUID(), - }, + ID: ptrTo(fftypes.NewUUID().String()), Name: ptrTo("stream1"), Status: ptrTo(EventStreamStatusStopped), }, } esm.addStream(ctx, existing) - err := esm.ResetStream(ctx, existing.spec.ID, "12345") + err := esm.ResetStream(ctx, existing.spec.GetID(), "12345") assert.Regexp(t, "pop", err) } @@ -493,15 +465,13 @@ func TestResetStreamNoOp(t *testing.T) { existing := &eventStream[testESConfig, testData]{ spec: &EventStreamSpec[testESConfig]{ - ResourceBase: dbsql.ResourceBase{ - ID: fftypes.NewUUID(), - }, + ID: ptrTo(fftypes.NewUUID().String()), Name: ptrTo("stream1"), Status: ptrTo(EventStreamStatusStopped), }, } esm.addStream(ctx, existing) - err := esm.ResetStream(ctx, existing.spec.ID, "12345") + err := esm.ResetStream(ctx, existing.spec.GetID(), "12345") assert.NoError(t, err) } @@ -525,7 +495,7 @@ func TestGetStreamByIDFail(t *testing.T) { }) defer done() - _, err := esm.GetStreamByID(ctx, fftypes.NewUUID()) + _, err := esm.GetStreamByID(ctx, fftypes.NewUUID().String()) assert.Regexp(t, "pop", err) } @@ -538,9 +508,7 @@ func TestCloseSuspendFail(t *testing.T) { existing := &eventStream[testESConfig, testData]{ spec: &EventStreamSpec[testESConfig]{ - ResourceBase: dbsql.ResourceBase{ - ID: fftypes.NewUUID(), - }, + ID: ptrTo(fftypes.NewUUID().String()), Name: ptrTo("stream1"), Status: ptrTo(EventStreamStatusStopped), }, diff --git a/pkg/eventstreams/persistence.go b/pkg/eventstreams/persistence.go index 6701f81..c5ba409 100644 --- a/pkg/eventstreams/persistence.go +++ b/pkg/eventstreams/persistence.go @@ -17,6 +17,8 @@ package eventstreams import ( + "context" + sq "github.com/Masterminds/squirrel" "github.com/hyperledger/firefly-common/pkg/dbsql" "github.com/hyperledger/firefly-common/pkg/ffapi" @@ -29,7 +31,7 @@ type Persistence[CT any] interface { } var EventStreamFilters = &ffapi.QueryFields{ - "id": &ffapi.UUIDField{}, + "id": &ffapi.StringField{}, "created": &ffapi.TimeField{}, "updated": &ffapi.TimeField{}, "name": &ffapi.StringField{}, @@ -39,18 +41,24 @@ var EventStreamFilters = &ffapi.QueryFields{ } var CheckpointFilters = &ffapi.QueryFields{ - "id": &ffapi.UUIDField{}, + "id": &ffapi.StringField{}, "created": &ffapi.TimeField{}, "updated": &ffapi.TimeField{}, "sequenceid": &ffapi.StringField{}, } -func NewEventStreamPersistence[CT any](db *dbsql.Database) Persistence[CT] { - return &esPersistence[CT]{db: db} +type IDValidator func(ctx context.Context, idStr string) error + +func NewEventStreamPersistence[CT any](db *dbsql.Database, idValidator IDValidator) Persistence[CT] { + return &esPersistence[CT]{ + db: db, + idValidator: idValidator, + } } type esPersistence[CT any] struct { - db *dbsql.Database + db *dbsql.Database + idValidator IDValidator } func (p *esPersistence[CT]) EventStreams() dbsql.CRUD[*EventStreamSpec[CT]] { @@ -84,7 +92,7 @@ func (p *esPersistence[CT]) EventStreams() dbsql.CRUD[*EventStreamSpec[CT]] { EventHandler: nil, // set below NameField: "name", QueryFactory: EventStreamFilters, - IDValidator: dbsql.UUIDValidator, + IDValidator: p.idValidator, GetFieldPtr: func(inst *EventStreamSpec[CT], col string) interface{} { switch col { case dbsql.ColumnID: @@ -141,8 +149,8 @@ func (p *esPersistence[CT]) Checkpoints() dbsql.CRUD[*EventStreamCheckpoint] { NilValue: func() *EventStreamCheckpoint { return nil }, NewInstance: func() *EventStreamCheckpoint { return &EventStreamCheckpoint{} }, ScopedFilter: func() sq.Eq { return sq.Eq{} }, - EventHandler: nil, // set below - IDValidator: dbsql.UUIDValidator, + EventHandler: nil, // set below + IDValidator: p.idValidator, // checkpoints share the ID of the eventstream GetFieldPtr: func(inst *EventStreamCheckpoint, col string) interface{} { switch col { case dbsql.ColumnID: diff --git a/pkg/eventstreams/webhooks_test.go b/pkg/eventstreams/webhooks_test.go index 0fed989..10270f3 100644 --- a/pkg/eventstreams/webhooks_test.go +++ b/pkg/eventstreams/webhooks_test.go @@ -72,7 +72,7 @@ func TestWebhooksBadHost(t *testing.T) { wh := newTestWebhooks(t, &WebhookConfig{URL: &u}) err := wh.AttemptDispatch(context.Background(), 0, &EventBatch[testData]{ - StreamID: fftypes.NewUUID(), + StreamID: fftypes.NewUUID().String(), BatchNumber: 1, Events: []*Event[testData]{ {Data: &testData{Field1: 12345}}, @@ -88,7 +88,7 @@ func TestWebhooksPrivateBlocked(t *testing.T) { }) err := wh.AttemptDispatch(context.Background(), 0, &EventBatch[testData]{ - StreamID: fftypes.NewUUID(), + StreamID: fftypes.NewUUID().String(), BatchNumber: 1, Events: []*Event[testData]{ {Data: &testData{Field1: 12345}}, @@ -120,7 +120,7 @@ func TestWebhooksCustomHeaders403(t *testing.T) { done := make(chan struct{}) go func() { err := wh.AttemptDispatch(context.Background(), 0, &EventBatch[testData]{ - StreamID: fftypes.NewUUID(), + StreamID: fftypes.NewUUID().String(), BatchNumber: 1, Events: []*Event[testData]{ {Data: &testData{Field1: 12345}}, @@ -143,7 +143,7 @@ func TestWebhooksCustomHeadersConnectFail(t *testing.T) { done := make(chan struct{}) go func() { err := wh.AttemptDispatch(context.Background(), 0, &EventBatch[testData]{ - StreamID: fftypes.NewUUID(), + StreamID: fftypes.NewUUID().String(), BatchNumber: 1, Events: []*Event[testData]{ {Data: &testData{Field1: 12345}}, @@ -179,7 +179,7 @@ func TestWebhooksTLS(t *testing.T) { done := make(chan struct{}) go func() { err := wh.AttemptDispatch(context.Background(), 0, &EventBatch[testData]{ - StreamID: fftypes.NewUUID(), + StreamID: fftypes.NewUUID().String(), BatchNumber: 1, Events: []*Event[testData]{ {Data: &testData{Field1: 12345}}, diff --git a/pkg/eventstreams/websockets_test.go b/pkg/eventstreams/websockets_test.go index c5999bf..deb291e 100644 --- a/pkg/eventstreams/websockets_test.go +++ b/pkg/eventstreams/websockets_test.go @@ -55,7 +55,7 @@ func TestWSAttemptIgnoreWrongAcks(t *testing.T) { }, "ut_stream") err := wsa.AttemptDispatch(context.Background(), 0, &EventBatch[testData]{ - StreamID: fftypes.NewUUID(), + StreamID: fftypes.NewUUID().String(), BatchNumber: 1, Events: []*Event[testData]{ {Data: &testData{Field1: 12345}}, @@ -81,7 +81,7 @@ func TestWSattemptDispatchExitPushingEvent(t *testing.T) { ctx, cancel := context.WithCancel(context.Background()) cancel() err := wsa.AttemptDispatch(ctx, 0, &EventBatch[testData]{ - StreamID: fftypes.NewUUID(), + StreamID: fftypes.NewUUID().String(), BatchNumber: 1, Events: []*Event[testData]{ {Data: &testData{Field1: 12345}}, From d69d7eeecd994af5a87186af59544327a4ba6fea Mon Sep 17 00:00:00 2001 From: Peter Broadhurst Date: Mon, 4 Dec 2023 22:53:02 -0500 Subject: [PATCH 09/19] Add checkpoint filters, and avoid supressing errors logs for conflicts Signed-off-by: Peter Broadhurst --- pkg/dbsql/database.go | 7 +------ pkg/eventstreams/persistence.go | 3 ++- 2 files changed, 3 insertions(+), 7 deletions(-) diff --git a/pkg/dbsql/database.go b/pkg/dbsql/database.go index 69d751c..cb2512f 100644 --- a/pkg/dbsql/database.go +++ b/pkg/dbsql/database.go @@ -29,7 +29,6 @@ import ( "github.com/hyperledger/firefly-common/pkg/fftypes" "github.com/hyperledger/firefly-common/pkg/i18n" "github.com/hyperledger/firefly-common/pkg/log" - "github.com/sirupsen/logrus" // Import migrate file source _ "github.com/golang-migrate/migrate/v4/source/file" @@ -316,11 +315,7 @@ func (s *Database) InsertTxRows(ctx context.Context, table string, tx *TXWrapper result.Close() } if err != nil { - level := logrus.DebugLevel - if !requestConflictEmptyResult { - level = logrus.ErrorLevel - } - l.Logf(level, `SQL insert failed (conflictEmptyRequested=%t): %s sql=[ %s ]: %s`, requestConflictEmptyResult, err, sqlQuery, err) + l.Errorf(`SQL insert failed (conflictEmptyRequested=%t): %s sql=[ %s ]: %s`, requestConflictEmptyResult, err, sqlQuery, err) return i18n.WrapError(ctx, err, i18n.MsgDBInsertFailed) } } else { diff --git a/pkg/eventstreams/persistence.go b/pkg/eventstreams/persistence.go index c5ba409..66f8639 100644 --- a/pkg/eventstreams/persistence.go +++ b/pkg/eventstreams/persistence.go @@ -149,7 +149,8 @@ func (p *esPersistence[CT]) Checkpoints() dbsql.CRUD[*EventStreamCheckpoint] { NilValue: func() *EventStreamCheckpoint { return nil }, NewInstance: func() *EventStreamCheckpoint { return &EventStreamCheckpoint{} }, ScopedFilter: func() sq.Eq { return sq.Eq{} }, - EventHandler: nil, // set below + EventHandler: nil, // set below + QueryFactory: CheckpointFilters, IDValidator: p.idValidator, // checkpoints share the ID of the eventstream GetFieldPtr: func(inst *EventStreamCheckpoint, col string) interface{} { switch col { From e10fbf1bcf7d8522faadbfc1d6ad783f1d52e0ce Mon Sep 17 00:00:00 2001 From: Peter Broadhurst Date: Wed, 6 Dec 2023 00:04:40 -0500 Subject: [PATCH 10/19] Provide acces to GetTXFromContext (as well as BeginOrUseTx) Signed-off-by: Peter Broadhurst --- pkg/dbsql/database.go | 10 +++++----- 1 file changed, 5 insertions(+), 5 deletions(-) diff --git a/pkg/dbsql/database.go b/pkg/dbsql/database.go index cb2512f..1f0d2f2 100644 --- a/pkg/dbsql/database.go +++ b/pkg/dbsql/database.go @@ -121,7 +121,7 @@ func (s *Database) SequenceColumn() string { } func (s *Database) RunAsGroup(ctx context.Context, fn func(ctx context.Context) error) error { - if tx := getTXFromContext(ctx); tx != nil { + if tx := GetTXFromContext(ctx); tx != nil { // transaction already exists - just continue using it return fn(ctx) } @@ -160,7 +160,7 @@ func (s *Database) applyDBMigrations(ctx context.Context, config config.Section, return nil } -func getTXFromContext(ctx context.Context) *TXWrapper { +func GetTXFromContext(ctx context.Context) *TXWrapper { ctxKey := txContextKey{} txi := ctx.Value(ctxKey) if txi != nil { @@ -173,7 +173,7 @@ func getTXFromContext(ctx context.Context) *TXWrapper { func (s *Database) BeginOrUseTx(ctx context.Context) (ctx1 context.Context, tx *TXWrapper, autoCommit bool, err error) { - tx = getTXFromContext(ctx) + tx = GetTXFromContext(ctx) if tx != nil { // There is s transaction on the context already. // return existing with auto-commit flag, to prevent early commit @@ -200,7 +200,7 @@ func (s *Database) QueryTx(ctx context.Context, table string, tx *TXWrapper, q s if tx == nil { // If there is a transaction in the context, we should use it to provide consistency // in the read operations (read after insert for example). - tx = getTXFromContext(ctx) + tx = GetTXFromContext(ctx) } l := log.L(ctx) @@ -234,7 +234,7 @@ func (s *Database) CountQuery(ctx context.Context, table string, tx *TXWrapper, if tx == nil { // If there is a transaction in the context, we should use it to provide consistency // in the read operations (read after insert for example). - tx = getTXFromContext(ctx) + tx = GetTXFromContext(ctx) } if countExpr == "" { countExpr = "*" From fbd6c482c14d9ae37a9f073dd2717c33f76eb298 Mon Sep 17 00:00:00 2001 From: Peter Broadhurst Date: Wed, 6 Dec 2023 14:59:29 -0500 Subject: [PATCH 11/19] Make it so that trace level logging can see the values of strings Signed-off-by: Peter Broadhurst --- pkg/dbsql/crud.go | 18 ++++++- pkg/dbsql/crud_test.go | 47 ++++++++++++------- .../000001_create_crudables_table.up.sql | 4 +- 3 files changed, 50 insertions(+), 19 deletions(-) diff --git a/pkg/dbsql/crud.go b/pkg/dbsql/crud.go index 64cfe76..f871a5b 100644 --- a/pkg/dbsql/crud.go +++ b/pkg/dbsql/crud.go @@ -295,7 +295,23 @@ func (c *CrudBase[T]) updateFromInstance(ctx context.Context, tx *TXWrapper, ins func (c *CrudBase[T]) getFieldValue(inst T, col string) interface{} { // Validate() will have checked this is safe for microservices (as long as they use that at build time in their UTs) - return reflect.ValueOf(c.GetFieldPtr(inst, col)).Elem().Interface() + val := reflect.ValueOf(c.GetFieldPtr(inst, col)).Elem().Interface() + // Primarily for debugging, we de-reference simple pointer type in fields + switch vt := val.(type) { + case *string: + if vt != nil { + val = *vt + } + case *int64: + if vt != nil { + val = *vt + } + case *bool: + if vt != nil { + val = *vt + } + } + return val } func (c *CrudBase[T]) setInsertTimestamps(inst T) { diff --git a/pkg/dbsql/crud_test.go b/pkg/dbsql/crud_test.go index 6eff520..dd8b566 100644 --- a/pkg/dbsql/crud_test.go +++ b/pkg/dbsql/crud_test.go @@ -41,6 +41,8 @@ type TestCRUDable struct { Field1 *string `json:"f1"` Field2 *fftypes.FFBigInt `json:"f2"` Field3 *fftypes.JSONAny `json:"f3"` + Field4 *int64 `json:"f4"` + Field5 *bool `json:"f5"` } var CRUDableQueryFactory = &ffapi.QueryFields{ @@ -52,6 +54,9 @@ var CRUDableQueryFactory = &ffapi.QueryFields{ "f1": &ffapi.StringField{}, "f2": &ffapi.BigIntField{}, "f3": &ffapi.JSONField{}, + "f4": &ffapi.Int64Field{}, + "f5": &ffapi.JSONField{}, + "f6": &ffapi.BoolField{}, } // TestHistory shows a simple object: @@ -170,6 +175,8 @@ func newCRUDCollection(db *Database, ns string) *TestCRUD { "field1", "field2", "field3", + "field4", + "field5", }, FilterFieldMap: map[string]string{ "f1": "field1", @@ -201,6 +208,10 @@ func newCRUDCollection(db *Database, ns string) *TestCRUD { return &inst.Field2 case "field3": return &inst.Field3 + case "field4": + return &inst.Field4 + case "field5": + return &inst.Field5 } return nil }, @@ -323,8 +334,8 @@ func checkEqualExceptTimes(t *testing.T, o1, o2 TestCRUDable) { assert.JSONEq(t, string(j1), string(j2)) } -func strPtr(s string) *string { - return &s +func ptrTo[T any](v T) *T { + return &v } func TestCRUDWithDBEnd2End(t *testing.T) { @@ -338,11 +349,13 @@ func TestCRUDWithDBEnd2End(t *testing.T) { ResourceBase: ResourceBase{ ID: fftypes.NewUUID(), }, - Name: strPtr("bob"), - NS: strPtr("ns1"), - Field1: strPtr("hello1"), + Name: ptrTo("bob"), + NS: ptrTo("ns1"), + Field1: ptrTo("hello1"), Field2: fftypes.NewFFBigInt(12345), Field3: fftypes.JSONAnyPtr(`{"some":"stuff"}`), + Field4: ptrTo(int64(12345)), + Field5: ptrTo(true), } collection := newCRUDCollection(sql.db, "ns1") @@ -392,7 +405,7 @@ func TestCRUDWithDBEnd2End(t *testing.T) { checkEqualExceptTimes(t, *c1, *c1copy) // Upsert the existing row optimized - c1copy.Field1 = strPtr("hello again - 1") + c1copy.Field1 = ptrTo("hello again - 1") created, err := iCrud.Upsert(ctx, c1copy, UpsertOptimizationExisting) assert.NoError(t, err) assert.False(t, created) @@ -404,7 +417,7 @@ func TestCRUDWithDBEnd2End(t *testing.T) { collection.events = nil // Upsert the existing row un-optimized - c1copy.Field1 = strPtr("hello again - 2") + c1copy.Field1 = ptrTo("hello again - 2") created, err = iCrud.Upsert(ctx, c1copy, UpsertOptimizationNew, collection.postCommit) assert.NoError(t, err) assert.False(t, created) @@ -413,7 +426,7 @@ func TestCRUDWithDBEnd2End(t *testing.T) { checkEqualExceptTimes(t, *c1copy, *c1copy2) // Explicitly replace it - c1copy.Field1 = strPtr("hello again - 3") + c1copy.Field1 = ptrTo("hello again - 3") err = iCrud.Replace(ctx, c1copy, collection.postCommit) assert.NoError(t, err) c1copy3, err := iCrud.GetByID(ctx, c1.ID.String()) @@ -421,7 +434,7 @@ func TestCRUDWithDBEnd2End(t *testing.T) { checkEqualExceptTimes(t, *c1copy, *c1copy3) // Explicitly update it - c1copy.Field1 = strPtr("hello again - 4") + c1copy.Field1 = ptrTo("hello again - 4") err = iCrud.Update(ctx, c1copy.ID.String(), CRUDableQueryFactory.NewUpdate(ctx).Set( "f1", *c1copy.Field1, ), collection.postCommit) @@ -431,12 +444,12 @@ func TestCRUDWithDBEnd2End(t *testing.T) { checkEqualExceptTimes(t, *c1copy, *c1copy4) // Use simple PATCH semantics to updated it - c1copy.Field1 = strPtr("hello again - 5") + c1copy.Field1 = ptrTo("hello again - 5") sparseUpdate := &TestCRUDable{ ResourceBase: ResourceBase{ ID: c1copy.ID, }, - Field1: strPtr("hello again - 5"), + Field1: ptrTo("hello again - 5"), } err = iCrud.UpdateSparse(ctx, sparseUpdate, collection.postCommit) assert.NoError(t, err) @@ -447,8 +460,8 @@ func TestCRUDWithDBEnd2End(t *testing.T) { // Cannot replace something that doesn't exist c2 := *c1 c2.ID = fftypes.NewUUID() - c2.NS = strPtr("ns1") - c2.Field1 = strPtr("bonjour") + c2.NS = ptrTo("ns1") + c2.Field1 = ptrTo("bonjour") err = iCrud.Replace(ctx, &c2, collection.postCommit) assert.Regexp(t, "FF00205", err) @@ -487,8 +500,8 @@ func TestCRUDWithDBEnd2End(t *testing.T) { ResourceBase: ResourceBase{ ID: fftypes.NewUUID(), }, - NS: strPtr("ns1"), - Field1: strPtr(fmt.Sprintf("crudable[%.5d]", i)), + NS: ptrTo("ns1"), + Field1: ptrTo(fmt.Sprintf("crudable[%.5d]", i)), Field2: fftypes.NewFFBigInt(919191), } } @@ -641,8 +654,8 @@ func TestLeftJOINExample(t *testing.T) { ResourceBase: ResourceBase{ ID: fftypes.NewUUID(), }, - NS: strPtr("ns1"), - Field1: strPtr("linked1"), + NS: ptrTo("ns1"), + Field1: ptrTo("linked1"), Field2: fftypes.NewFFBigInt(11111), Field3: fftypes.JSONAnyPtr(`{"linked":1}`), } diff --git a/test/dbmigrations/000001_create_crudables_table.up.sql b/test/dbmigrations/000001_create_crudables_table.up.sql index 7a4bc75..738dba2 100644 --- a/test/dbmigrations/000001_create_crudables_table.up.sql +++ b/test/dbmigrations/000001_create_crudables_table.up.sql @@ -7,6 +7,8 @@ CREATE TABLE crudables ( name TEXT, field1 TEXT, field2 VARCHAR(65), - field3 TEXT + field3 TEXT, + field4 BIGINT, + field5 BOOLEAN ); CREATE UNIQUE INDEX crudables_id ON crudables(ns, id); From b4aadf8b2f504d5f3d0c5ffcd0566cd77de36ab4 Mon Sep 17 00:00:00 2001 From: Peter Broadhurst Date: Wed, 6 Dec 2023 23:13:43 -0500 Subject: [PATCH 12/19] Further tidy-up of logging for duplicate resolution case Signed-off-by: Peter Broadhurst --- pkg/dbsql/database.go | 8 +++++++- pkg/dbsql/database_test.go | 14 ++++++++++++++ pkg/dbsql/mock_provider.go | 2 ++ 3 files changed, 23 insertions(+), 1 deletion(-) diff --git a/pkg/dbsql/database.go b/pkg/dbsql/database.go index 1f0d2f2..e74c242 100644 --- a/pkg/dbsql/database.go +++ b/pkg/dbsql/database.go @@ -303,11 +303,13 @@ func (s *Database) InsertTxRows(ctx context.Context, table string, tx *TXWrapper before := time.Now() l.Tracef(`SQL-> insert query: %s (args: %+v)`, sqlQuery, args) if useQuery { + noInsert := false result, err := tx.sqlTX.QueryContext(ctx, sqlQuery, args...) for i := 0; i < len(sequences) && err == nil; i++ { if result.Next() { err = result.Scan(&sequences[i]) } else { + noInsert = true err = i18n.NewError(ctx, i18n.MsgDBNoSequence, i+1) } } @@ -315,7 +317,11 @@ func (s *Database) InsertTxRows(ctx context.Context, table string, tx *TXWrapper result.Close() } if err != nil { - l.Errorf(`SQL insert failed (conflictEmptyRequested=%t): %s sql=[ %s ]: %s`, requestConflictEmptyResult, err, sqlQuery, err) + if requestConflictEmptyResult && noInsert { + l.Infof(`SQL insert returning partial result: %s`, err) + } else { + l.Errorf(`SQL insert failed (conflictEmptyRequested=%t) sql=[ %s ]: %s`, requestConflictEmptyResult, sqlQuery, err) + } return i18n.WrapError(ctx, err, i18n.MsgDBInsertFailed) } } else { diff --git a/pkg/dbsql/database_test.go b/pkg/dbsql/database_test.go index 34711a3..84c0e41 100644 --- a/pkg/dbsql/database_test.go +++ b/pkg/dbsql/database_test.go @@ -164,6 +164,20 @@ func TestInsertTxPostgreSQLReturnedSyntax(t *testing.T) { assert.Equal(t, int64(12345), sequence) } +func TestInsertFailMultipleReturnPartialResult(t *testing.T) { + mp := NewMockProvider() + mp.MultiRowInsert = true + s, mdb := mp.UTInit() + mdb.ExpectBegin() + mdb.ExpectQuery("INSERT.*").WillReturnRows(sqlmock.NewRows([]string{s.SequenceColumn()})) + ctx, tx, _, err := s.BeginOrUseTx(context.Background()) + assert.NoError(t, err) + s.FakePSQLInsert = true + sb := sq.Insert("table").Columns("col1").Values(("val1")) + _, err = s.InsertTxExt(ctx, "table1", tx, sb, nil, true) + assert.Regexp(t, "FF00177", err) +} + func TestInsertTxPostgreSQLReturnedSyntaxFail(t *testing.T) { s, mdb := NewMockProvider().UTInit() mdb.ExpectBegin() diff --git a/pkg/dbsql/mock_provider.go b/pkg/dbsql/mock_provider.go index 13fdf3e..b01c75a 100644 --- a/pkg/dbsql/mock_provider.go +++ b/pkg/dbsql/mock_provider.go @@ -45,6 +45,7 @@ type MockProviderConfig struct { OpenError error GetMigrationDriverError error IndividualSort bool + MultiRowInsert bool } func NewMockProvider() *MockProvider { @@ -85,6 +86,7 @@ func (mp *MockProvider) Features() SQLFeatures { features.AcquireLock = func(lockName string) string { return fmt.Sprintf(``, lockName) } + features.MultiRowInsert = mp.MultiRowInsert return features } From f2e5774bfee4b6129873790b09ba12e6baf5cd79 Mon Sep 17 00:00:00 2001 From: Peter Broadhurst Date: Thu, 14 Dec 2023 13:56:17 -0500 Subject: [PATCH 13/19] Allow override of default sort Signed-off-by: Peter Broadhurst --- pkg/dbsql/crud.go | 11 ++++++++--- pkg/dbsql/crud_test.go | 4 ++++ 2 files changed, 12 insertions(+), 3 deletions(-) diff --git a/pkg/dbsql/crud.go b/pkg/dbsql/crud.go index f871a5b..c6e82a0 100644 --- a/pkg/dbsql/crud.go +++ b/pkg/dbsql/crud.go @@ -130,6 +130,7 @@ type CrudBase[T Resource] struct { ImmutableColumns []string NameField string // If supporting name semantics QueryFactory ffapi.QueryFactory // Must be set when name is set + DefaultSort func() []interface{} // optionally override the default sort - array of *ffapi.SortField or string IDValidator func(ctx context.Context, idStr string) error // if IDs must conform to a pattern, such as a UUID (prebuilt UUIDValidator provided for that) NilValue func() T // nil value typed to T @@ -688,10 +689,14 @@ func (c *CrudBase[T]) GetByUUIDOrName(ctx context.Context, uuidOrName string, ge } func (c *CrudBase[T]) getManyScoped(ctx context.Context, tableFrom string, fi *ffapi.FilterInfo, cols, readCols []string, preconditions []sq.Sqlizer) (instances []T, fr *ffapi.FilterResult, err error) { + defaultSort := []interface{}{ + &ffapi.SortField{Field: c.DB.sequenceColumn, Descending: true}, + } + if c.DefaultSort != nil { + defaultSort = c.DefaultSort() + } query, fop, fi, err := c.DB.filterSelectFinalized(ctx, c.ReadTableAlias, sq.Select(readCols...).From(tableFrom), fi, c.FilterFieldMap, - []interface{}{ - &ffapi.SortField{Field: c.DB.sequenceColumn, Descending: true}, - }, preconditions...) + defaultSort, preconditions...) if err != nil { return nil, nil, err } diff --git a/pkg/dbsql/crud_test.go b/pkg/dbsql/crud_test.go index dd8b566..bfc71e7 100644 --- a/pkg/dbsql/crud_test.go +++ b/pkg/dbsql/crud_test.go @@ -291,6 +291,10 @@ func newLinkableCollection(db *Database, ns string) *CrudBase[*TestLinkable] { ReadQueryModifier: func(query sq.SelectBuilder) sq.SelectBuilder { return query.LeftJoin("crudables AS c ON c.id = l.crud_id") }, + DefaultSort: func() []interface{} { + // Return an empty list + return []interface{}{} + }, NilValue: func() *TestLinkable { return nil }, NewInstance: func() *TestLinkable { return &TestLinkable{} }, ScopedFilter: func() squirrel.Eq { return sq.Eq{"l.ns": ns} }, From 159a1b752ae72c9340407cd41f3d7b40b7c84d53 Mon Sep 17 00:00:00 2001 From: Peter Broadhurst Date: Thu, 14 Dec 2023 19:43:07 -0500 Subject: [PATCH 14/19] Allow access to Mux to install custom handlers Signed-off-by: Peter Broadhurst --- pkg/ffapi/apiserver.go | 12 +++++++++++- 1 file changed, 11 insertions(+), 1 deletion(-) diff --git a/pkg/ffapi/apiserver.go b/pkg/ffapi/apiserver.go index 436e1d1..2d43f02 100644 --- a/pkg/ffapi/apiserver.go +++ b/pkg/ffapi/apiserver.go @@ -44,6 +44,7 @@ const APIServerMetricsSubSystemName = "api_server_rest" type APIServer interface { Serve(ctx context.Context) error Started() <-chan struct{} + MuxRouter(ctx context.Context) *mux.Router APIPublicURL() string // valid to call after server is successfully started } @@ -62,6 +63,7 @@ type apiServer[T any] struct { metricsEnabled bool metricsPath string metricsPublicURL string + mux *mux.Router APIServerOptions[T] } @@ -119,6 +121,14 @@ func NewAPIServer[T any](ctx context.Context, options APIServerOptions[T]) APISe return as } +// Can be called before Serve, but MUST use the background context if so +func (as *apiServer[T]) MuxRouter(ctx context.Context) *mux.Router { + if as.mux == nil { + as.mux = as.createMuxRouter(ctx) + } + return as.mux +} + // Serve is the main entry point for the API Server func (as *apiServer[T]) Serve(ctx context.Context) (err error) { started := false @@ -132,7 +142,7 @@ func (as *apiServer[T]) Serve(ctx context.Context) (err error) { httpErrChan := make(chan error) metricsErrChan := make(chan error) - apiHTTPServer, err := httpserver.NewHTTPServer(ctx, "api", as.createMuxRouter(ctx), httpErrChan, as.APIConfig, as.CORSConfig, &httpserver.ServerOptions{ + apiHTTPServer, err := httpserver.NewHTTPServer(ctx, "api", as.MuxRouter(ctx), httpErrChan, as.APIConfig, as.CORSConfig, &httpserver.ServerOptions{ MaximumRequestTimeout: as.requestMaxTimeout, }) if err != nil { From a295317c30bfa527c4bb5005a3f8e7e074bccf4e Mon Sep 17 00:00:00 2001 From: Peter Broadhurst Date: Sun, 17 Dec 2023 16:13:13 -0500 Subject: [PATCH 15/19] Fix up the comment Signed-off-by: Peter Broadhurst --- pkg/eventstreams/event.go | 5 ++++- 1 file changed, 4 insertions(+), 1 deletion(-) diff --git a/pkg/eventstreams/event.go b/pkg/eventstreams/event.go index 3f67f7a..d160e0b 100644 --- a/pkg/eventstreams/event.go +++ b/pkg/eventstreams/event.go @@ -31,7 +31,10 @@ type EventBatch[DataType any] struct { type Event[DataType any] struct { EventCommon - Data *DataType `json:"-"` // can be anything to deliver for the event - must be JSON marshalable, and should not define topic or sequence. Will be flattened into the struct + // Data can be anything to deliver for the event - must be JSON marshalable. + // Will be flattened into the struct. + // Can define topic and/or sequenceId, but these will overridden with EventCommon strings in the JSON serialization. + Data *DataType `json:"-"` } type EventCommon struct { From a97784a881e1707c8754212ff1bfab33192bee1b Mon Sep 17 00:00:00 2001 From: Peter Broadhurst Date: Thu, 21 Dec 2023 14:05:27 -0500 Subject: [PATCH 16/19] Add shortcuts eq,neq,lt,nlt,gt,ngt,lte,nlte,gte,ngte Signed-off-by: Peter Broadhurst --- pkg/ffapi/restfilter_json.go | 42 ++++++++++++++++++++++---- pkg/ffapi/restfilter_json_test.go | 14 ++++++++- pkg/i18n/en_base_field_descriptions.go | 11 +++++++ 3 files changed, 60 insertions(+), 7 deletions(-) diff --git a/pkg/ffapi/restfilter_json.go b/pkg/ffapi/restfilter_json.go index 6a863f1..ba3f46e 100644 --- a/pkg/ffapi/restfilter_json.go +++ b/pkg/ffapi/restfilter_json.go @@ -62,13 +62,24 @@ type FilterJSONKeyValues struct { type FilterJSON struct { Or []*FilterJSON `ffstruct:"FilterJSON" json:"or,omitempty"` Equal []*FilterJSONKeyValue `ffstruct:"FilterJSON" json:"equal,omitempty"` + Eq []*FilterJSONKeyValue `ffstruct:"FilterJSON" json:"eq,omitempty"` // short name + NEq []*FilterJSONKeyValue `ffstruct:"FilterJSON" json:"neq,omitempty"` // negated short name Contains []*FilterJSONKeyValue `ffstruct:"FilterJSON" json:"contains,omitempty"` StartsWith []*FilterJSONKeyValue `ffstruct:"FilterJSON" json:"startsWith,omitempty"` LessThan []*FilterJSONKeyValue `ffstruct:"FilterJSON" json:"lessThan,omitempty"` + LT []*FilterJSONKeyValue `ffstruct:"FilterJSON" json:"lt,omitempty"` // short name + NLT []*FilterJSONKeyValue `ffstruct:"FilterJSON" json:"nlt,omitempty"` // negated short name LessThanOrEqual []*FilterJSONKeyValue `ffstruct:"FilterJSON" json:"lessThanOrEqual,omitempty"` + LTE []*FilterJSONKeyValue `ffstruct:"FilterJSON" json:"lte,omitempty"` // short name + NLTE []*FilterJSONKeyValue `ffstruct:"FilterJSON" json:"nlte,omitempty"` // negated short name GreaterThan []*FilterJSONKeyValue `ffstruct:"FilterJSON" json:"greaterThan,omitempty"` + GT []*FilterJSONKeyValue `ffstruct:"FilterJSON" json:"gt,omitempty"` // short name + NGT []*FilterJSONKeyValue `ffstruct:"FilterJSON" json:"ngt,omitempty"` // negated short name GreaterThanOrEqual []*FilterJSONKeyValue `ffstruct:"FilterJSON" json:"greaterThanOrEqual,omitempty"` + GTE []*FilterJSONKeyValue `ffstruct:"FilterJSON" json:"gte,omitempty"` // short name + NGTE []*FilterJSONKeyValue `ffstruct:"FilterJSON" json:"ngte,omitempty"` // negated short name In []*FilterJSONKeyValues `ffstruct:"FilterJSON" json:"in,omitempty"` + NIn []*FilterJSONKeyValues `ffstruct:"FilterJSON" json:"nin,omitempty"` // negated short name } type QueryJSON struct { @@ -133,7 +144,7 @@ func validateFilterField(ctx context.Context, fb FilterBuilder, fieldAnyCase str } func (jq *QueryJSON) addSimpleFilters(ctx context.Context, fb FilterBuilder, jsonFilter *FilterJSON, andFilter AndFilter) (AndFilter, error) { - for _, e := range jsonFilter.Equal { + for _, e := range joinShortNames(jsonFilter.Equal, jsonFilter.Eq, jsonFilter.NEq) { field, err := validateFilterField(ctx, fb, e.Field) if err != nil { return nil, err @@ -193,12 +204,31 @@ func (jq *QueryJSON) addSimpleFilters(ctx context.Context, fb FilterBuilder, jso return andFilter, nil } +func joinShortNames(long, short, negated []*FilterJSONKeyValue) []*FilterJSONKeyValue { + res := make([]*FilterJSONKeyValue, len(long)+len(short)+len(negated)) + copy(res, long) + copy(res[len(short):], short) + negs := res[len(short)+len(long):] + copy(negs, negated) + for _, n := range negs { + n.Not = true + } + return res +} + +func joinInAndNin(in, nin []*FilterJSONKeyValues) []*FilterJSONKeyValues { + res := make([]*FilterJSONKeyValues, len(in)+len(nin)) + copy(res, in) + copy(res[len(in):], nin) + return res +} + func (jq *QueryJSON) BuildSubFilter(ctx context.Context, fb FilterBuilder, jsonFilter *FilterJSON) (Filter, error) { andFilter, err := jq.addSimpleFilters(ctx, fb, jsonFilter, fb.And()) if err != nil { return nil, err } - for _, e := range jsonFilter.LessThan { + for _, e := range joinShortNames(jsonFilter.LessThan, jsonFilter.LT, jsonFilter.NLT) { field, err := validateFilterField(ctx, fb, e.Field) if err != nil { return nil, err @@ -208,7 +238,7 @@ func (jq *QueryJSON) BuildSubFilter(ctx context.Context, fb FilterBuilder, jsonF } andFilter = andFilter.Condition(fb.Lt(field, e.Value.String())) } - for _, e := range jsonFilter.LessThanOrEqual { + for _, e := range joinShortNames(jsonFilter.LessThanOrEqual, jsonFilter.LTE, jsonFilter.NLTE) { field, err := validateFilterField(ctx, fb, e.Field) if err != nil { return nil, err @@ -218,7 +248,7 @@ func (jq *QueryJSON) BuildSubFilter(ctx context.Context, fb FilterBuilder, jsonF } andFilter = andFilter.Condition(fb.Lte(field, e.Value.String())) } - for _, e := range jsonFilter.GreaterThan { + for _, e := range joinShortNames(jsonFilter.GreaterThan, jsonFilter.GT, jsonFilter.NGT) { field, err := validateFilterField(ctx, fb, e.Field) if err != nil { return nil, err @@ -228,7 +258,7 @@ func (jq *QueryJSON) BuildSubFilter(ctx context.Context, fb FilterBuilder, jsonF } andFilter = andFilter.Condition(fb.Gt(field, e.Value.String())) } - for _, e := range jsonFilter.GreaterThanOrEqual { + for _, e := range joinShortNames(jsonFilter.GreaterThanOrEqual, jsonFilter.GTE, jsonFilter.NGTE) { field, err := validateFilterField(ctx, fb, e.Field) if err != nil { return nil, err @@ -238,7 +268,7 @@ func (jq *QueryJSON) BuildSubFilter(ctx context.Context, fb FilterBuilder, jsonF } andFilter = andFilter.Condition(fb.Gte(field, e.Value.String())) } - for _, e := range jsonFilter.In { + for _, e := range joinInAndNin(jsonFilter.In, jsonFilter.NIn) { field, err := validateFilterField(ctx, fb, e.Field) if err != nil { return nil, err diff --git a/pkg/ffapi/restfilter_json_test.go b/pkg/ffapi/restfilter_json_test.go index b9fe7ea..0675192 100644 --- a/pkg/ffapi/restfilter_json_test.go +++ b/pkg/ffapi/restfilter_json_test.go @@ -40,6 +40,18 @@ func TestBuildQueryJSONNestedAndOr(t *testing.T) { "value": "a" } ], + "eq": [ + { + "field": "masked", + "value": true + } + ], + "neq": [ + { + "field": "sequence", + "value": 999 + } + ], "greaterThan": [ { "field": "sequence", @@ -79,7 +91,7 @@ func TestBuildQueryJSONNestedAndOr(t *testing.T) { fi, err := filter.Finalize() assert.NoError(t, err) - assert.Equal(t, "( tag == 'a' ) && ( sequence >> 10 ) && ( ( ( masked == true ) && ( tag IN ['a','b','c'] ) ) || ( masked == false ) ) sort=tag,-sequence skip=5 limit=10", fi.String()) + assert.Equal(t, "( tag == 'a' ) && ( masked == true ) && ( sequence != 999 ) && ( sequence >> 10 ) && ( ( ( masked == true ) && ( tag IN ['a','b','c'] ) ) || ( masked == false ) ) sort=tag,-sequence skip=5 limit=10", fi.String()) } func TestBuildQuerySingleNestedOr(t *testing.T) { diff --git a/pkg/i18n/en_base_field_descriptions.go b/pkg/i18n/en_base_field_descriptions.go index 5e9a04d..dcbf673 100644 --- a/pkg/i18n/en_base_field_descriptions.go +++ b/pkg/i18n/en_base_field_descriptions.go @@ -24,12 +24,23 @@ var ( FilterJSONValues = ffm("FilterJSON.values", "Array of values to use in the comparison") FilterJSONContains = ffm("FilterJSON.contains", "Array of field + value combinations to apply as string-contains filters - all filters must match") FilterJSONEqual = ffm("FilterJSON.equal", "Array of field + value combinations to apply as equal filters - all must match") + FilterJSONEq = ffm("FilterJSON.eq", "Shortname for equal") + FilterJSONNEq = ffm("FilterJSON.neq", "Shortcut for equal with all conditions negated (the not property of all children is overridden)") FilterJSONStartsWith = ffm("FilterJSON.startsWith", "Array of field + value combinations to apply as starts-with filters - all filters must match") FilterJSONGreaterThan = ffm("FilterJSON.greaterThan", "Array of field + value combinations to apply as greater-than filters - all filters must match") + FilterJSONGT = ffm("FilterJSON.gt", "Short name for greaterThan") + FilterJSONNGT = ffm("FilterJSON.ngt", "Shortcut for greaterThan with all conditions negated (the not property of all children is overridden)") FilterJSONGreaterThanOrEqual = ffm("FilterJSON.greaterThanOrEqual", "Array of field + value combinations to apply as greater-than filters - all filters must match") + FilterJSONGTE = ffm("FilterJSON.gte", "Short name for greaterThanOrEqual") + FilterJSONNGTE = ffm("FilterJSON.ngte", "Shortcut for greaterThanOrEqual with all conditions negated (the not property of all children is overridden)") FilterJSONLessThan = ffm("FilterJSON.lessThan", "Array of field + value combinations to apply as less-than-or-equal filters - all filters must match") + FilterJSONLT = ffm("FilterJSON.lt", "Short name for lessThan") + FilterJSONNLT = ffm("FilterJSON.nlt", "Shortcut for lessThan with all conditions negated (the not property of all children is overridden)") FilterJSONLessThanOrEqual = ffm("FilterJSON.lessThanOrEqual", "Array of field + value combinations to apply as less-than-or-equal filters - all filters must match") + FilterJSONLTE = ffm("FilterJSON.lte", "Short name for lessThanOrEqual") + FilterJSONNLTE = ffm("FilterJSON.nlte", "Shortcut for lessThanOrEqual with all conditions negated (the not property of all children is overridden)") FilterJSONIn = ffm("FilterJSON.in", "Array of field + values-array combinations to apply as 'in' filters (matching one of a set of values) - all filters must match") + FilterJSONNIn = ffm("FilterJSON.nin", "Shortcut for in with all conditions negated (the not property of all children is overridden)") FilterJSONLimit = ffm("FilterJSON.limit", "Limit on the results to return") FilterJSONSkip = ffm("FilterJSON.skip", "Number of results to skip before returning entries, for skip+limit based pagination") FilterJSONSort = ffm("FilterJSON.sort", "Array of fields to sort by. A '-' prefix on a field requests that field is sorted in descending order") From 1fae02c3e63f20129346e58edc97a771f44e3df1 Mon Sep 17 00:00:00 2001 From: Peter Broadhurst Date: Thu, 21 Dec 2023 14:07:35 -0500 Subject: [PATCH 17/19] Rename Conditions() to avoid confusion Signed-off-by: Peter Broadhurst --- pkg/ffapi/filter.go | 6 +++--- pkg/ffapi/restfilter_json.go | 8 ++++---- 2 files changed, 7 insertions(+), 7 deletions(-) diff --git a/pkg/ffapi/filter.go b/pkg/ffapi/filter.go index 8fa2dbf..d37050e 100644 --- a/pkg/ffapi/filter.go +++ b/pkg/ffapi/filter.go @@ -70,7 +70,7 @@ type MultiConditionFilter interface { Filter // Add adds filters to the condition Condition(...Filter) MultiConditionFilter - Conditions() []Filter + GetConditions() []Filter } type AndFilter interface{ MultiConditionFilter } @@ -570,7 +570,7 @@ type andFilter struct { baseFilter } -func (fb *andFilter) Conditions() []Filter { +func (fb *andFilter) GetConditions() []Filter { return fb.children } @@ -593,7 +593,7 @@ type orFilter struct { baseFilter } -func (fb *orFilter) Conditions() []Filter { +func (fb *orFilter) GetConditions() []Filter { return fb.children } diff --git a/pkg/ffapi/restfilter_json.go b/pkg/ffapi/restfilter_json.go index ba3f46e..5f5f2f2 100644 --- a/pkg/ffapi/restfilter_json.go +++ b/pkg/ffapi/restfilter_json.go @@ -291,14 +291,14 @@ func (jq *QueryJSON) BuildSubFilter(ctx context.Context, fb FilterBuilder, jsonF } childFilter.Condition(subFilter) } - if len(childFilter.Conditions()) == 1 { - andFilter.Condition(childFilter.Conditions()[0]) + if len(childFilter.GetConditions()) == 1 { + andFilter.Condition(childFilter.GetConditions()[0]) } else { andFilter.Condition(childFilter) } } - if len(andFilter.Conditions()) == 1 { - return andFilter.Conditions()[0], nil + if len(andFilter.GetConditions()) == 1 { + return andFilter.GetConditions()[0], nil } return andFilter, nil } From fa4b399ae31ab956292491dc0146728491445eb6 Mon Sep 17 00:00:00 2001 From: Peter Broadhurst Date: Thu, 21 Dec 2023 14:19:37 -0500 Subject: [PATCH 18/19] Fixed missing test and negation Signed-off-by: Peter Broadhurst --- pkg/ffapi/restfilter_json.go | 6 +++++- pkg/ffapi/restfilter_json_test.go | 12 +++++++++++- 2 files changed, 16 insertions(+), 2 deletions(-) diff --git a/pkg/ffapi/restfilter_json.go b/pkg/ffapi/restfilter_json.go index 5f5f2f2..f3d214e 100644 --- a/pkg/ffapi/restfilter_json.go +++ b/pkg/ffapi/restfilter_json.go @@ -219,7 +219,11 @@ func joinShortNames(long, short, negated []*FilterJSONKeyValue) []*FilterJSONKey func joinInAndNin(in, nin []*FilterJSONKeyValues) []*FilterJSONKeyValues { res := make([]*FilterJSONKeyValues, len(in)+len(nin)) copy(res, in) - copy(res[len(in):], nin) + negs := res[len(in):] + copy(negs, nin) + for _, n := range negs { + n.Not = true + } return res } diff --git a/pkg/ffapi/restfilter_json_test.go b/pkg/ffapi/restfilter_json_test.go index 0675192..4989657 100644 --- a/pkg/ffapi/restfilter_json_test.go +++ b/pkg/ffapi/restfilter_json_test.go @@ -71,6 +71,16 @@ func TestBuildQueryJSONNestedAndOr(t *testing.T) { "field": "tag", "values": ["a","b","c"] } + ], + "nin": [ + { + "field": "tag", + "values": ["x","y"] + }, + { + "field": "tag", + "values": ["z"] + } ] }, { @@ -91,7 +101,7 @@ func TestBuildQueryJSONNestedAndOr(t *testing.T) { fi, err := filter.Finalize() assert.NoError(t, err) - assert.Equal(t, "( tag == 'a' ) && ( masked == true ) && ( sequence != 999 ) && ( sequence >> 10 ) && ( ( ( masked == true ) && ( tag IN ['a','b','c'] ) ) || ( masked == false ) ) sort=tag,-sequence skip=5 limit=10", fi.String()) + assert.Equal(t, "( tag == 'a' ) && ( masked == true ) && ( sequence != 999 ) && ( sequence >> 10 ) && ( ( ( masked == true ) && ( tag IN ['a','b','c'] ) && ( tag NI ['x','y'] ) && ( tag NI ['z'] ) ) || ( masked == false ) ) sort=tag,-sequence skip=5 limit=10", fi.String()) } func TestBuildQuerySingleNestedOr(t *testing.T) { From 9f0d22ce42418c476f113d94bf87c883e40ff0d5 Mon Sep 17 00:00:00 2001 From: Peter Broadhurst Date: Thu, 21 Dec 2023 14:24:22 -0500 Subject: [PATCH 19/19] Remove the unnecessary negation Signed-off-by: Peter Broadhurst --- pkg/ffapi/restfilter_json.go | 20 ++++++++------------ pkg/i18n/en_base_field_descriptions.go | 4 ---- 2 files changed, 8 insertions(+), 16 deletions(-) diff --git a/pkg/ffapi/restfilter_json.go b/pkg/ffapi/restfilter_json.go index f3d214e..02b0fff 100644 --- a/pkg/ffapi/restfilter_json.go +++ b/pkg/ffapi/restfilter_json.go @@ -67,17 +67,13 @@ type FilterJSON struct { Contains []*FilterJSONKeyValue `ffstruct:"FilterJSON" json:"contains,omitempty"` StartsWith []*FilterJSONKeyValue `ffstruct:"FilterJSON" json:"startsWith,omitempty"` LessThan []*FilterJSONKeyValue `ffstruct:"FilterJSON" json:"lessThan,omitempty"` - LT []*FilterJSONKeyValue `ffstruct:"FilterJSON" json:"lt,omitempty"` // short name - NLT []*FilterJSONKeyValue `ffstruct:"FilterJSON" json:"nlt,omitempty"` // negated short name + LT []*FilterJSONKeyValue `ffstruct:"FilterJSON" json:"lt,omitempty"` // short name LessThanOrEqual []*FilterJSONKeyValue `ffstruct:"FilterJSON" json:"lessThanOrEqual,omitempty"` - LTE []*FilterJSONKeyValue `ffstruct:"FilterJSON" json:"lte,omitempty"` // short name - NLTE []*FilterJSONKeyValue `ffstruct:"FilterJSON" json:"nlte,omitempty"` // negated short name + LTE []*FilterJSONKeyValue `ffstruct:"FilterJSON" json:"lte,omitempty"` // short name GreaterThan []*FilterJSONKeyValue `ffstruct:"FilterJSON" json:"greaterThan,omitempty"` - GT []*FilterJSONKeyValue `ffstruct:"FilterJSON" json:"gt,omitempty"` // short name - NGT []*FilterJSONKeyValue `ffstruct:"FilterJSON" json:"ngt,omitempty"` // negated short name + GT []*FilterJSONKeyValue `ffstruct:"FilterJSON" json:"gt,omitempty"` // short name GreaterThanOrEqual []*FilterJSONKeyValue `ffstruct:"FilterJSON" json:"greaterThanOrEqual,omitempty"` - GTE []*FilterJSONKeyValue `ffstruct:"FilterJSON" json:"gte,omitempty"` // short name - NGTE []*FilterJSONKeyValue `ffstruct:"FilterJSON" json:"ngte,omitempty"` // negated short name + GTE []*FilterJSONKeyValue `ffstruct:"FilterJSON" json:"gte,omitempty"` // short name In []*FilterJSONKeyValues `ffstruct:"FilterJSON" json:"in,omitempty"` NIn []*FilterJSONKeyValues `ffstruct:"FilterJSON" json:"nin,omitempty"` // negated short name } @@ -232,7 +228,7 @@ func (jq *QueryJSON) BuildSubFilter(ctx context.Context, fb FilterBuilder, jsonF if err != nil { return nil, err } - for _, e := range joinShortNames(jsonFilter.LessThan, jsonFilter.LT, jsonFilter.NLT) { + for _, e := range joinShortNames(jsonFilter.LessThan, jsonFilter.LT, nil) { field, err := validateFilterField(ctx, fb, e.Field) if err != nil { return nil, err @@ -242,7 +238,7 @@ func (jq *QueryJSON) BuildSubFilter(ctx context.Context, fb FilterBuilder, jsonF } andFilter = andFilter.Condition(fb.Lt(field, e.Value.String())) } - for _, e := range joinShortNames(jsonFilter.LessThanOrEqual, jsonFilter.LTE, jsonFilter.NLTE) { + for _, e := range joinShortNames(jsonFilter.LessThanOrEqual, jsonFilter.LTE, nil) { field, err := validateFilterField(ctx, fb, e.Field) if err != nil { return nil, err @@ -252,7 +248,7 @@ func (jq *QueryJSON) BuildSubFilter(ctx context.Context, fb FilterBuilder, jsonF } andFilter = andFilter.Condition(fb.Lte(field, e.Value.String())) } - for _, e := range joinShortNames(jsonFilter.GreaterThan, jsonFilter.GT, jsonFilter.NGT) { + for _, e := range joinShortNames(jsonFilter.GreaterThan, jsonFilter.GT, nil) { field, err := validateFilterField(ctx, fb, e.Field) if err != nil { return nil, err @@ -262,7 +258,7 @@ func (jq *QueryJSON) BuildSubFilter(ctx context.Context, fb FilterBuilder, jsonF } andFilter = andFilter.Condition(fb.Gt(field, e.Value.String())) } - for _, e := range joinShortNames(jsonFilter.GreaterThanOrEqual, jsonFilter.GTE, jsonFilter.NGTE) { + for _, e := range joinShortNames(jsonFilter.GreaterThanOrEqual, jsonFilter.GTE, nil) { field, err := validateFilterField(ctx, fb, e.Field) if err != nil { return nil, err diff --git a/pkg/i18n/en_base_field_descriptions.go b/pkg/i18n/en_base_field_descriptions.go index dcbf673..862be88 100644 --- a/pkg/i18n/en_base_field_descriptions.go +++ b/pkg/i18n/en_base_field_descriptions.go @@ -29,16 +29,12 @@ var ( FilterJSONStartsWith = ffm("FilterJSON.startsWith", "Array of field + value combinations to apply as starts-with filters - all filters must match") FilterJSONGreaterThan = ffm("FilterJSON.greaterThan", "Array of field + value combinations to apply as greater-than filters - all filters must match") FilterJSONGT = ffm("FilterJSON.gt", "Short name for greaterThan") - FilterJSONNGT = ffm("FilterJSON.ngt", "Shortcut for greaterThan with all conditions negated (the not property of all children is overridden)") FilterJSONGreaterThanOrEqual = ffm("FilterJSON.greaterThanOrEqual", "Array of field + value combinations to apply as greater-than filters - all filters must match") FilterJSONGTE = ffm("FilterJSON.gte", "Short name for greaterThanOrEqual") - FilterJSONNGTE = ffm("FilterJSON.ngte", "Shortcut for greaterThanOrEqual with all conditions negated (the not property of all children is overridden)") FilterJSONLessThan = ffm("FilterJSON.lessThan", "Array of field + value combinations to apply as less-than-or-equal filters - all filters must match") FilterJSONLT = ffm("FilterJSON.lt", "Short name for lessThan") - FilterJSONNLT = ffm("FilterJSON.nlt", "Shortcut for lessThan with all conditions negated (the not property of all children is overridden)") FilterJSONLessThanOrEqual = ffm("FilterJSON.lessThanOrEqual", "Array of field + value combinations to apply as less-than-or-equal filters - all filters must match") FilterJSONLTE = ffm("FilterJSON.lte", "Short name for lessThanOrEqual") - FilterJSONNLTE = ffm("FilterJSON.nlte", "Shortcut for lessThanOrEqual with all conditions negated (the not property of all children is overridden)") FilterJSONIn = ffm("FilterJSON.in", "Array of field + values-array combinations to apply as 'in' filters (matching one of a set of values) - all filters must match") FilterJSONNIn = ffm("FilterJSON.nin", "Shortcut for in with all conditions negated (the not property of all children is overridden)") FilterJSONLimit = ffm("FilterJSON.limit", "Limit on the results to return")