diff --git a/app/keepers/keepers.go b/app/keepers/keepers.go index 57aad919b..2bb7dca57 100644 --- a/app/keepers/keepers.go +++ b/app/keepers/keepers.go @@ -394,6 +394,7 @@ func (a *AppKeepers) InitKeepers( ) a.StreamerKeeper = *streamermodulekeeper.NewKeeper( + appCodec, a.keys[streamermoduletypes.StoreKey], a.GetSubspace(streamermoduletypes.ModuleName), a.BankKeeper, @@ -568,6 +569,7 @@ func (a *AppKeepers) SetupHooks() { a.TxFeesKeeper.Hooks(), a.DelayedAckKeeper.GetEpochHooks(), a.DymNSKeeper.GetEpochHooks(), + a.RollappKeeper.GetEpochHooks(), ), ) diff --git a/app/keepers/modules.go b/app/keepers/modules.go index e4e8c9159..c2d3fe71b 100644 --- a/app/keepers/modules.go +++ b/app/keepers/modules.go @@ -361,12 +361,12 @@ var InitGenesis = []string{ rollappmoduletypes.ModuleName, sequencertypes.ModuleName, sponsorshiptypes.ModuleName, - streamermoduletypes.ModuleName, denommetadatamoduletypes.ModuleName, // must after `x/bank` to trigger hooks delayedacktypes.ModuleName, eibcmoduletypes.ModuleName, dymnstypes.ModuleName, epochstypes.ModuleName, + streamermoduletypes.ModuleName, // must be after x/epochs to fill epoch pointers lockuptypes.ModuleName, gammtypes.ModuleName, poolmanagertypes.ModuleName, diff --git a/app/upgrades/v4/upgrade.go b/app/upgrades/v4/upgrade.go index c8a14893f..94647dec8 100644 --- a/app/upgrades/v4/upgrade.go +++ b/app/upgrades/v4/upgrade.go @@ -3,6 +3,7 @@ package v4 import ( "github.com/cometbft/cometbft/crypto" "github.com/cosmos/cosmos-sdk/baseapp" + epochskeeper "github.com/osmosis-labs/osmosis/v15/x/epochs/keeper" sdk "github.com/cosmos/cosmos-sdk/types" "github.com/cosmos/cosmos-sdk/types/module" @@ -35,6 +36,8 @@ import ( rollapptypes "github.com/dymensionxyz/dymension/v3/x/rollapp/types" sequencerkeeper "github.com/dymensionxyz/dymension/v3/x/sequencer/keeper" sequencertypes "github.com/dymensionxyz/dymension/v3/x/sequencer/types" + streamerkeeper "github.com/dymensionxyz/dymension/v3/x/streamer/keeper" + streamertypes "github.com/dymensionxyz/dymension/v3/x/streamer/types" ) // CreateUpgradeHandler creates an SDK upgrade handler for v4 @@ -58,6 +61,9 @@ func CreateUpgradeHandler( migrateSequencers(ctx, keepers.SequencerKeeper) migrateRollappLightClients(ctx, keepers.RollappKeeper, keepers.LightClientKeeper, keepers.IBCKeeper.ChannelKeeper) + if err := migrateStreamer(ctx, keepers.StreamerKeeper, keepers.EpochsKeeper); err != nil { + return nil, err + } migrateIncentivesParams(ctx, keepers.IncentivesKeeper) // TODO: create rollapp gauges for each existing rollapp (https://github.com/dymensionxyz/dymension/issues/1005) @@ -176,6 +182,17 @@ func migrateRollappLightClients(ctx sdk.Context, rollappkeeper *rollappkeeper.Ke } } +// migrateStreamer creates epoch pointers for all epoch infos. +func migrateStreamer(ctx sdk.Context, sk streamerkeeper.Keeper, ek *epochskeeper.Keeper) error { + for _, epoch := range ek.AllEpochInfos(ctx) { + err := sk.SaveEpochPointer(ctx, streamertypes.NewEpochPointer(epoch.Identifier, epoch.Duration)) + if err != nil { + return err + } + } + return nil +} + func migrateIncentivesParams(ctx sdk.Context, ik *incentiveskeeper.Keeper) { params := ik.GetParams(ctx) defaultParams := incentivestypes.DefaultParams() diff --git a/app/upgrades/v4/upgrade_test.go b/app/upgrades/v4/upgrade_test.go index b377672b9..dfb1e14e1 100644 --- a/app/upgrades/v4/upgrade_test.go +++ b/app/upgrades/v4/upgrade_test.go @@ -21,6 +21,7 @@ import ( "github.com/dymensionxyz/dymension/v3/testutil/sample" rollapptypes "github.com/dymensionxyz/dymension/v3/x/rollapp/types" sequencertypes "github.com/dymensionxyz/dymension/v3/x/sequencer/types" + streamertypes "github.com/dymensionxyz/dymension/v3/x/streamer/types" ) // UpgradeTestSuite defines the structure for the upgrade test suite @@ -120,6 +121,8 @@ func (s *UpgradeTestSuite) TestUpgrade() { return } + s.validateStreamerMigration() + // TODO: check for rollapp gauges creation return @@ -228,6 +231,21 @@ func (s *UpgradeTestSuite) validateSequencersMigration(numSeq int) error { return nil } +func (s *UpgradeTestSuite) validateStreamerMigration() { + epochInfos := s.App.EpochsKeeper.AllEpochInfos(s.Ctx) + + pointers, err := s.App.StreamerKeeper.GetAllEpochPointers(s.Ctx) + s.Require().NoError(err) + + var expected []streamertypes.EpochPointer + for _, info := range epochInfos { + expected = append(expected, streamertypes.NewEpochPointer(info.Identifier, info.Duration)) + } + + // Equal also checks the order of pointers + s.Require().Equal(expected, pointers) +} + func (s *UpgradeTestSuite) seedAndStoreRollapps(numRollapps int) { for _, rollapp := range s.seedRollapps(numRollapps) { s.App.RollappKeeper.SetRollapp(s.Ctx, rollapp) diff --git a/docs/static/openapi.yml b/docs/static/openapi.yml index a3cc0da74..2fa38ceac 100644 --- a/docs/static/openapi.yml +++ b/docs/static/openapi.yml @@ -3513,8 +3513,9 @@ paths: launched: type: boolean description: >- - // launched is a boolean that indicates that the Rollapp has - been launched and the immutable fields are no longer updatable. + launched is a boolean that indicates that the Rollapp has + been launched and the immutable fields are no longer + updatable. liveness_event_height: type: string format: int64 @@ -3644,9 +3645,9 @@ paths: description: type: string title: description is the description of the App - image: + image_url: type: string - title: image is the relative path to the App image + title: image_url is the URL to the App's image url: type: string title: url is the URL to the App's website @@ -3854,8 +3855,8 @@ paths: title: >- The time (num hub blocks) a sequencer can be down after which he will be jailed rather than slashed - app_creation_cost: - title: app_creation_cost is the cost for registering the App + app_registration_fee: + title: app_registration_fee is the fee for registering an App type: object properties: denom: @@ -4075,8 +4076,9 @@ paths: launched: type: boolean description: >- - // launched is a boolean that indicates that the Rollapp has - been launched and the immutable fields are no longer updatable. + launched is a boolean that indicates that the + Rollapp has been launched and the immutable fields + are no longer updatable. liveness_event_height: type: string format: int64 @@ -4210,9 +4212,9 @@ paths: description: type: string title: description is the description of the App - image: + image_url: type: string - title: image is the relative path to the App image + title: image_url is the URL to the App's image url: type: string title: url is the URL to the App's website @@ -4503,7 +4505,8 @@ paths: type: boolean description: >- launched is a boolean that indicates that the Rollapp has - been launched and the immutable fields are no longer updatable. + been launched and the immutable fields are no longer + updatable. liveness_event_height: type: string format: int64 @@ -4633,9 +4636,9 @@ paths: description: type: string title: description is the description of the App - image: + image_url: type: string - title: image is the relative path to the App image + title: image_url is the URL to the App's image url: type: string title: url is the URL to the App's website @@ -5309,19 +5312,388 @@ paths: } tags: - Query - /dymensionxyz/dymension/sequencer/proposer/{rollappId}: + /dymensionxyz/dymension/sequencer/proposers: get: - summary: Queries the current proposer by rollappId. - operationId: GetProposerByRollapp + summary: Queries a list of proposers. + operationId: Proposers responses: '200': description: A successful response. schema: type: object properties: - proposerAddr: - type: string - description: Response type for the GetProposerByRollapp RPC method. + proposers: + type: array + items: + type: object + properties: + address: + type: string + description: >- + address is the bech32-encoded address of the sequencer + account which is the account that the message was sent + from. + dymintPubKey: + type: object + properties: + type_url: + type: string + description: >- + A URL/resource name that uniquely identifies the + type of the serialized + + protocol buffer message. This string must contain at + least + + one "/" character. The last segment of the URL's + path must represent + + the fully qualified name of the type (as in + + `path/google.protobuf.Duration`). The name should be + in a canonical form + + (e.g., leading "." is not accepted). + + + In practice, teams usually precompile into the + binary all types that they + + expect it to use in the context of Any. However, for + URLs which use the + + scheme `http`, `https`, or no scheme, one can + optionally set up a type + + server that maps type URLs to message definitions as + follows: + + + * If no scheme is provided, `https` is assumed. + + * An HTTP GET on the URL must yield a + [google.protobuf.Type][] + value in binary format, or produce an error. + * Applications are allowed to cache lookup results + based on the + URL, or have them precompiled into a binary to avoid any + lookup. Therefore, binary compatibility needs to be preserved + on changes to types. (Use versioned type names to manage + breaking changes.) + + Note: this functionality is not currently available + in the official + + protobuf release, and it is not used for type URLs + beginning with + + type.googleapis.com. + + + Schemes other than `http`, `https` (or the empty + scheme) might be + + used with implementation specific semantics. + value: + type: string + format: byte + description: >- + Must be a valid serialized protocol buffer of the + above specified type. + description: >- + `Any` contains an arbitrary serialized protocol buffer + message along with a + + URL that describes the type of the serialized message. + + + Protobuf library provides support to pack/unpack Any + values in the form + + of utility functions or additional generated methods of + the Any type. + + + Example 1: Pack and unpack a message in C++. + + Foo foo = ...; + Any any; + any.PackFrom(foo); + ... + if (any.UnpackTo(&foo)) { + ... + } + + Example 2: Pack and unpack a message in Java. + + Foo foo = ...; + Any any = Any.pack(foo); + ... + if (any.is(Foo.class)) { + foo = any.unpack(Foo.class); + } + // or ... + if (any.isSameTypeAs(Foo.getDefaultInstance())) { + foo = any.unpack(Foo.getDefaultInstance()); + } + + Example 3: Pack and unpack a message in Python. + + foo = Foo(...) + any = Any() + any.Pack(foo) + ... + if any.Is(Foo.DESCRIPTOR): + any.Unpack(foo) + ... + + Example 4: Pack and unpack a message in Go + + foo := &pb.Foo{...} + any, err := anypb.New(foo) + if err != nil { + ... + } + ... + foo := &pb.Foo{} + if err := any.UnmarshalTo(foo); err != nil { + ... + } + + The pack methods provided by protobuf library will by + default use + + 'type.googleapis.com/full.type.name' as the type URL and + the unpack + + methods only use the fully qualified type name after the + last '/' + + in the type URL, for example "foo.bar.com/x/y.z" will + yield type + + name "y.z". + + + JSON + + + The JSON representation of an `Any` value uses the + regular + + representation of the deserialized, embedded message, + with an + + additional field `@type` which contains the type URL. + Example: + + package google.profile; + message Person { + string first_name = 1; + string last_name = 2; + } + + { + "@type": "type.googleapis.com/google.profile.Person", + "firstName": , + "lastName": + } + + If the embedded message type is well-known and has a + custom JSON + + representation, that representation will be embedded + adding a field + + `value` which holds the custom JSON in addition to the + `@type` + + field. Example (for message + [google.protobuf.Duration][]): + + { + "@type": "type.googleapis.com/google.protobuf.Duration", + "value": "1.212s" + } + rollappId: + type: string + description: >- + rollappId defines the rollapp to which the sequencer + belongs. + metadata: + description: >- + metadata defines the extra information for the + sequencer. + type: object + properties: + moniker: + type: string + description: >- + moniker defines a human-readable name for the + sequencer. + details: + type: string + description: details define other optional details. + p2p_seeds: + type: array + items: + type: string + title: bootstrap nodes list + rpcs: + type: array + items: + type: string + title: RPCs list + evm_rpcs: + type: array + items: + type: string + title: evm RPCs list + rest_api_urls: + type: array + items: + type: string + title: REST API URLs + explorer_url: + type: string + title: block explorer URL + genesis_urls: + type: array + items: + type: string + title: genesis URLs + contact_details: + title: contact details + type: object + properties: + website: + type: string + title: website URL + telegram: + type: string + title: telegram link + x: + type: string + title: twitter link + extra_data: + type: string + format: byte + title: json dump the sequencer can add (limited by size) + snapshots: + type: array + items: + type: object + properties: + snapshot_url: + type: string + title: the snapshot url + height: + type: string + format: uint64 + title: The snapshot height + checksum: + type: string + title: sha-256 checksum value for the snapshot file + title: snapshots of the sequencer + gas_price: + type: string + title: gas_price defines the value for each gas unit + jailed: + type: boolean + description: >- + jailed defined whether the sequencer has been jailed + from bonded status or not. + proposer: + type: boolean + status: + description: >- + status is the sequencer status + (bonded/unbonding/unbonded). + type: string + enum: + - OPERATING_STATUS_UNBONDED + - OPERATING_STATUS_UNBONDING + - OPERATING_STATUS_BONDED + default: OPERATING_STATUS_UNBONDED + title: >- + OperatingStatus defines the operating status of a + sequencer + tokens: + type: array + items: + type: object + properties: + denom: + type: string + amount: + type: string + description: >- + Coin defines a token with a denomination and an + amount. + + + NOTE: The amount field is an Int which implements the + custom method + + signatures required by gogoproto. + description: >- + tokens define the delegated tokens (incl. + self-delegation). + unbond_request_height: + type: string + format: int64 + description: >- + unbond_request_height stores the height at which this + sequencer has + + requested to unbond. + unbond_time: + type: string + format: date-time + description: >- + unbond_time defines the time when the sequencer will + complete unbonding. + notice_period_time: + type: string + format: date-time + title: >- + notice_period_time defines the time when the sequencer + will finish it's notice period if started + description: >- + Sequencer defines a sequencer identified by its' address + (sequencerAddress). + + The sequencer could be attached to only one rollapp + (rollappId). + pagination: + type: object + properties: + next_key: + type: string + format: byte + description: |- + next_key is the key to be passed to PageRequest.key to + query the next page most efficiently. It will be empty if + there are no more results. + total: + type: string + format: uint64 + title: >- + total is total number of results available if + PageRequest.count_total + + was set, its value is undefined otherwise + description: >- + PageResponse is to be embedded in gRPC response messages where + the + + corresponding request message has used PageRequest. + + message SomeResponse { + repeated Bar results = 1; + PageResponse page = 2; + } + description: Response type for the Proposers RPC method. default: description: An unexpected error response. schema: @@ -5517,393 +5889,77 @@ paths: "value": "1.212s" } parameters: - - name: rollappId - in: path - required: true + - name: pagination.key + description: |- + key is a value returned in PageResponse.next_key to begin + querying the next page most efficiently. Only one of offset or key + should be set. + in: query + required: false + type: string + format: byte + - name: pagination.offset + description: >- + offset is a numeric offset that can be used when key is unavailable. + + It is less efficient than using key. Only one of offset or key + should + + be set. + in: query + required: false + type: string + format: uint64 + - name: pagination.limit + description: >- + limit is the total number of results to be returned in the result + page. + + If left empty it will default to a value to be set by each app. + in: query + required: false type: string + format: uint64 + - name: pagination.count_total + description: >- + count_total is set to true to indicate that the result set should + include + + a count of the total number of items available for pagination in + UIs. + + count_total is only respected when offset is used. It is ignored + when key + + is set. + in: query + required: false + type: boolean + - name: pagination.reverse + description: >- + reverse is set to true if results are to be returned in the + descending order. + + + Since: cosmos-sdk 0.43 + in: query + required: false + type: boolean tags: - Query - /dymensionxyz/dymension/sequencer/sequencer: + /dymensionxyz/dymension/sequencer/proposers/{rollappId}: get: - summary: Queries a list of Sequencer items. - operationId: Sequencers + summary: Queries the current proposer by rollappId. + operationId: GetProposerByRollapp responses: '200': description: A successful response. schema: type: object properties: - sequencers: - type: array - items: - type: object - properties: - address: - type: string - description: >- - address is the bech32-encoded address of the sequencer - account which is the account that the message was sent - from. - dymintPubKey: - type: object - properties: - type_url: - type: string - description: >- - A URL/resource name that uniquely identifies the - type of the serialized - - protocol buffer message. This string must contain at - least - - one "/" character. The last segment of the URL's - path must represent - - the fully qualified name of the type (as in - - `path/google.protobuf.Duration`). The name should be - in a canonical form - - (e.g., leading "." is not accepted). - - - In practice, teams usually precompile into the - binary all types that they - - expect it to use in the context of Any. However, for - URLs which use the - - scheme `http`, `https`, or no scheme, one can - optionally set up a type - - server that maps type URLs to message definitions as - follows: - - - * If no scheme is provided, `https` is assumed. - - * An HTTP GET on the URL must yield a - [google.protobuf.Type][] - value in binary format, or produce an error. - * Applications are allowed to cache lookup results - based on the - URL, or have them precompiled into a binary to avoid any - lookup. Therefore, binary compatibility needs to be preserved - on changes to types. (Use versioned type names to manage - breaking changes.) - - Note: this functionality is not currently available - in the official - - protobuf release, and it is not used for type URLs - beginning with - - type.googleapis.com. - - - Schemes other than `http`, `https` (or the empty - scheme) might be - - used with implementation specific semantics. - value: - type: string - format: byte - description: >- - Must be a valid serialized protocol buffer of the - above specified type. - description: >- - `Any` contains an arbitrary serialized protocol buffer - message along with a - - URL that describes the type of the serialized message. - - - Protobuf library provides support to pack/unpack Any - values in the form - - of utility functions or additional generated methods of - the Any type. - - - Example 1: Pack and unpack a message in C++. - - Foo foo = ...; - Any any; - any.PackFrom(foo); - ... - if (any.UnpackTo(&foo)) { - ... - } - - Example 2: Pack and unpack a message in Java. - - Foo foo = ...; - Any any = Any.pack(foo); - ... - if (any.is(Foo.class)) { - foo = any.unpack(Foo.class); - } - // or ... - if (any.isSameTypeAs(Foo.getDefaultInstance())) { - foo = any.unpack(Foo.getDefaultInstance()); - } - - Example 3: Pack and unpack a message in Python. - - foo = Foo(...) - any = Any() - any.Pack(foo) - ... - if any.Is(Foo.DESCRIPTOR): - any.Unpack(foo) - ... - - Example 4: Pack and unpack a message in Go - - foo := &pb.Foo{...} - any, err := anypb.New(foo) - if err != nil { - ... - } - ... - foo := &pb.Foo{} - if err := any.UnmarshalTo(foo); err != nil { - ... - } - - The pack methods provided by protobuf library will by - default use - - 'type.googleapis.com/full.type.name' as the type URL and - the unpack - - methods only use the fully qualified type name after the - last '/' - - in the type URL, for example "foo.bar.com/x/y.z" will - yield type - - name "y.z". - - - JSON - - - The JSON representation of an `Any` value uses the - regular - - representation of the deserialized, embedded message, - with an - - additional field `@type` which contains the type URL. - Example: - - package google.profile; - message Person { - string first_name = 1; - string last_name = 2; - } - - { - "@type": "type.googleapis.com/google.profile.Person", - "firstName": , - "lastName": - } - - If the embedded message type is well-known and has a - custom JSON - - representation, that representation will be embedded - adding a field - - `value` which holds the custom JSON in addition to the - `@type` - - field. Example (for message - [google.protobuf.Duration][]): - - { - "@type": "type.googleapis.com/google.protobuf.Duration", - "value": "1.212s" - } - rollappId: - type: string - description: >- - rollappId defines the rollapp to which the sequencer - belongs. - metadata: - description: >- - metadata defines the extra information for the - sequencer. - type: object - properties: - moniker: - type: string - description: >- - moniker defines a human-readable name for the - sequencer. - details: - type: string - description: details define other optional details. - p2p_seeds: - type: array - items: - type: string - title: bootstrap nodes list - rpcs: - type: array - items: - type: string - title: RPCs list - evm_rpcs: - type: array - items: - type: string - title: evm RPCs list - rest_api_urls: - type: array - items: - type: string - title: REST API URLs - explorer_url: - type: string - title: block explorer URL - genesis_urls: - type: array - items: - type: string - title: genesis URLs - contact_details: - title: contact details - type: object - properties: - website: - type: string - title: website URL - telegram: - type: string - title: telegram link - x: - type: string - title: twitter link - extra_data: - type: string - format: byte - title: json dump the sequencer can add (limited by size) - snapshots: - type: array - items: - type: object - properties: - snapshot_url: - type: string - title: the snapshot url - height: - type: string - format: uint64 - title: The snapshot height - checksum: - type: string - title: sha-256 checksum value for the snapshot file - title: snapshots of the sequencer - gas_price: - type: string - title: gas_price defines the value for each gas unit - jailed: - type: boolean - description: >- - jailed defined whether the sequencer has been jailed - from bonded status or not. - proposer: - type: boolean - status: - description: >- - status is the sequencer status - (bonded/unbonding/unbonded). - type: string - enum: - - OPERATING_STATUS_UNBONDED - - OPERATING_STATUS_UNBONDING - - OPERATING_STATUS_BONDED - default: OPERATING_STATUS_UNBONDED - title: >- - OperatingStatus defines the operating status of a - sequencer - tokens: - type: array - items: - type: object - properties: - denom: - type: string - amount: - type: string - description: >- - Coin defines a token with a denomination and an - amount. - - - NOTE: The amount field is an Int which implements the - custom method - - signatures required by gogoproto. - description: >- - tokens define the delegated tokens (incl. - self-delegation). - unbond_request_height: - type: string - format: int64 - description: >- - unbond_request_height stores the height at which this - sequencer has - - requested to unbond. - unbond_time: - type: string - format: date-time - description: >- - unbond_time defines the time when the sequencer will - complete unbonding. - notice_period_time: - type: string - format: date-time - title: >- - notice_period_time defines the time when the sequencer - will finish it's notice period if started - description: >- - Sequencer defines a sequencer identified by its' address - (sequencerAddress). - - The sequencer could be attached to only one rollapp - (rollappId). - pagination: - type: object - properties: - next_key: - type: string - format: byte - description: |- - next_key is the key to be passed to PageRequest.key to - query the next page most efficiently. It will be empty if - there are no more results. - total: - type: string - format: uint64 - title: >- - total is total number of results available if - PageRequest.count_total - - was set, its value is undefined otherwise - description: >- - PageResponse is to be embedded in gRPC response messages where - the - - corresponding request message has used PageRequest. - - message SomeResponse { - repeated Bar results = 1; - PageResponse page = 2; - } + proposerAddr: + type: string + description: Response type for the GetProposerByRollapp RPC method. default: description: An unexpected error response. schema: @@ -6099,62 +6155,10 @@ paths: "value": "1.212s" } parameters: - - name: pagination.key - description: |- - key is a value returned in PageResponse.next_key to begin - querying the next page most efficiently. Only one of offset or key - should be set. - in: query - required: false - type: string - format: byte - - name: pagination.offset - description: >- - offset is a numeric offset that can be used when key is unavailable. - - It is less efficient than using key. Only one of offset or key - should - - be set. - in: query - required: false - type: string - format: uint64 - - name: pagination.limit - description: >- - limit is the total number of results to be returned in the result - page. - - If left empty it will default to a value to be set by each app. - in: query - required: false + - name: rollappId + in: path + required: true type: string - format: uint64 - - name: pagination.count_total - description: >- - count_total is set to true to indicate that the result set should - include - - a count of the total number of items available for pagination in - UIs. - - count_total is only respected when offset is used. It is ignored - when key - - is set. - in: query - required: false - type: boolean - - name: pagination.reverse - description: >- - reverse is set to true if results are to be returned in the - descending order. - - - Since: cosmos-sdk 0.43 - in: query - required: false - type: boolean tags: - Query /dymensionxyz/dymension/sequencer/sequencer/{sequencerAddress}: @@ -6704,6 +6708,640 @@ paths: type: string tags: - Query + /dymensionxyz/dymension/sequencer/sequencers: + get: + summary: Queries a list of Sequencer items. + operationId: Sequencers + responses: + '200': + description: A successful response. + schema: + type: object + properties: + sequencers: + type: array + items: + type: object + properties: + address: + type: string + description: >- + address is the bech32-encoded address of the sequencer + account which is the account that the message was sent + from. + dymintPubKey: + type: object + properties: + type_url: + type: string + description: >- + A URL/resource name that uniquely identifies the + type of the serialized + + protocol buffer message. This string must contain at + least + + one "/" character. The last segment of the URL's + path must represent + + the fully qualified name of the type (as in + + `path/google.protobuf.Duration`). The name should be + in a canonical form + + (e.g., leading "." is not accepted). + + + In practice, teams usually precompile into the + binary all types that they + + expect it to use in the context of Any. However, for + URLs which use the + + scheme `http`, `https`, or no scheme, one can + optionally set up a type + + server that maps type URLs to message definitions as + follows: + + + * If no scheme is provided, `https` is assumed. + + * An HTTP GET on the URL must yield a + [google.protobuf.Type][] + value in binary format, or produce an error. + * Applications are allowed to cache lookup results + based on the + URL, or have them precompiled into a binary to avoid any + lookup. Therefore, binary compatibility needs to be preserved + on changes to types. (Use versioned type names to manage + breaking changes.) + + Note: this functionality is not currently available + in the official + + protobuf release, and it is not used for type URLs + beginning with + + type.googleapis.com. + + + Schemes other than `http`, `https` (or the empty + scheme) might be + + used with implementation specific semantics. + value: + type: string + format: byte + description: >- + Must be a valid serialized protocol buffer of the + above specified type. + description: >- + `Any` contains an arbitrary serialized protocol buffer + message along with a + + URL that describes the type of the serialized message. + + + Protobuf library provides support to pack/unpack Any + values in the form + + of utility functions or additional generated methods of + the Any type. + + + Example 1: Pack and unpack a message in C++. + + Foo foo = ...; + Any any; + any.PackFrom(foo); + ... + if (any.UnpackTo(&foo)) { + ... + } + + Example 2: Pack and unpack a message in Java. + + Foo foo = ...; + Any any = Any.pack(foo); + ... + if (any.is(Foo.class)) { + foo = any.unpack(Foo.class); + } + // or ... + if (any.isSameTypeAs(Foo.getDefaultInstance())) { + foo = any.unpack(Foo.getDefaultInstance()); + } + + Example 3: Pack and unpack a message in Python. + + foo = Foo(...) + any = Any() + any.Pack(foo) + ... + if any.Is(Foo.DESCRIPTOR): + any.Unpack(foo) + ... + + Example 4: Pack and unpack a message in Go + + foo := &pb.Foo{...} + any, err := anypb.New(foo) + if err != nil { + ... + } + ... + foo := &pb.Foo{} + if err := any.UnmarshalTo(foo); err != nil { + ... + } + + The pack methods provided by protobuf library will by + default use + + 'type.googleapis.com/full.type.name' as the type URL and + the unpack + + methods only use the fully qualified type name after the + last '/' + + in the type URL, for example "foo.bar.com/x/y.z" will + yield type + + name "y.z". + + + JSON + + + The JSON representation of an `Any` value uses the + regular + + representation of the deserialized, embedded message, + with an + + additional field `@type` which contains the type URL. + Example: + + package google.profile; + message Person { + string first_name = 1; + string last_name = 2; + } + + { + "@type": "type.googleapis.com/google.profile.Person", + "firstName": , + "lastName": + } + + If the embedded message type is well-known and has a + custom JSON + + representation, that representation will be embedded + adding a field + + `value` which holds the custom JSON in addition to the + `@type` + + field. Example (for message + [google.protobuf.Duration][]): + + { + "@type": "type.googleapis.com/google.protobuf.Duration", + "value": "1.212s" + } + rollappId: + type: string + description: >- + rollappId defines the rollapp to which the sequencer + belongs. + metadata: + description: >- + metadata defines the extra information for the + sequencer. + type: object + properties: + moniker: + type: string + description: >- + moniker defines a human-readable name for the + sequencer. + details: + type: string + description: details define other optional details. + p2p_seeds: + type: array + items: + type: string + title: bootstrap nodes list + rpcs: + type: array + items: + type: string + title: RPCs list + evm_rpcs: + type: array + items: + type: string + title: evm RPCs list + rest_api_urls: + type: array + items: + type: string + title: REST API URLs + explorer_url: + type: string + title: block explorer URL + genesis_urls: + type: array + items: + type: string + title: genesis URLs + contact_details: + title: contact details + type: object + properties: + website: + type: string + title: website URL + telegram: + type: string + title: telegram link + x: + type: string + title: twitter link + extra_data: + type: string + format: byte + title: json dump the sequencer can add (limited by size) + snapshots: + type: array + items: + type: object + properties: + snapshot_url: + type: string + title: the snapshot url + height: + type: string + format: uint64 + title: The snapshot height + checksum: + type: string + title: sha-256 checksum value for the snapshot file + title: snapshots of the sequencer + gas_price: + type: string + title: gas_price defines the value for each gas unit + jailed: + type: boolean + description: >- + jailed defined whether the sequencer has been jailed + from bonded status or not. + proposer: + type: boolean + status: + description: >- + status is the sequencer status + (bonded/unbonding/unbonded). + type: string + enum: + - OPERATING_STATUS_UNBONDED + - OPERATING_STATUS_UNBONDING + - OPERATING_STATUS_BONDED + default: OPERATING_STATUS_UNBONDED + title: >- + OperatingStatus defines the operating status of a + sequencer + tokens: + type: array + items: + type: object + properties: + denom: + type: string + amount: + type: string + description: >- + Coin defines a token with a denomination and an + amount. + + + NOTE: The amount field is an Int which implements the + custom method + + signatures required by gogoproto. + description: >- + tokens define the delegated tokens (incl. + self-delegation). + unbond_request_height: + type: string + format: int64 + description: >- + unbond_request_height stores the height at which this + sequencer has + + requested to unbond. + unbond_time: + type: string + format: date-time + description: >- + unbond_time defines the time when the sequencer will + complete unbonding. + notice_period_time: + type: string + format: date-time + title: >- + notice_period_time defines the time when the sequencer + will finish it's notice period if started + description: >- + Sequencer defines a sequencer identified by its' address + (sequencerAddress). + + The sequencer could be attached to only one rollapp + (rollappId). + pagination: + type: object + properties: + next_key: + type: string + format: byte + description: |- + next_key is the key to be passed to PageRequest.key to + query the next page most efficiently. It will be empty if + there are no more results. + total: + type: string + format: uint64 + title: >- + total is total number of results available if + PageRequest.count_total + + was set, its value is undefined otherwise + description: >- + PageResponse is to be embedded in gRPC response messages where + the + + corresponding request message has used PageRequest. + + message SomeResponse { + repeated Bar results = 1; + PageResponse page = 2; + } + default: + description: An unexpected error response. + schema: + type: object + properties: + error: + type: string + code: + type: integer + format: int32 + message: + type: string + details: + type: array + items: + type: object + properties: + type_url: + type: string + description: >- + A URL/resource name that uniquely identifies the type of + the serialized + + protocol buffer message. This string must contain at + least + + one "/" character. The last segment of the URL's path + must represent + + the fully qualified name of the type (as in + + `path/google.protobuf.Duration`). The name should be in + a canonical form + + (e.g., leading "." is not accepted). + + + In practice, teams usually precompile into the binary + all types that they + + expect it to use in the context of Any. However, for + URLs which use the + + scheme `http`, `https`, or no scheme, one can optionally + set up a type + + server that maps type URLs to message definitions as + follows: + + + * If no scheme is provided, `https` is assumed. + + * An HTTP GET on the URL must yield a + [google.protobuf.Type][] + value in binary format, or produce an error. + * Applications are allowed to cache lookup results based + on the + URL, or have them precompiled into a binary to avoid any + lookup. Therefore, binary compatibility needs to be preserved + on changes to types. (Use versioned type names to manage + breaking changes.) + + Note: this functionality is not currently available in + the official + + protobuf release, and it is not used for type URLs + beginning with + + type.googleapis.com. + + + Schemes other than `http`, `https` (or the empty scheme) + might be + + used with implementation specific semantics. + value: + type: string + format: byte + description: >- + Must be a valid serialized protocol buffer of the above + specified type. + description: >- + `Any` contains an arbitrary serialized protocol buffer + message along with a + + URL that describes the type of the serialized message. + + + Protobuf library provides support to pack/unpack Any values + in the form + + of utility functions or additional generated methods of the + Any type. + + + Example 1: Pack and unpack a message in C++. + + Foo foo = ...; + Any any; + any.PackFrom(foo); + ... + if (any.UnpackTo(&foo)) { + ... + } + + Example 2: Pack and unpack a message in Java. + + Foo foo = ...; + Any any = Any.pack(foo); + ... + if (any.is(Foo.class)) { + foo = any.unpack(Foo.class); + } + // or ... + if (any.isSameTypeAs(Foo.getDefaultInstance())) { + foo = any.unpack(Foo.getDefaultInstance()); + } + + Example 3: Pack and unpack a message in Python. + + foo = Foo(...) + any = Any() + any.Pack(foo) + ... + if any.Is(Foo.DESCRIPTOR): + any.Unpack(foo) + ... + + Example 4: Pack and unpack a message in Go + + foo := &pb.Foo{...} + any, err := anypb.New(foo) + if err != nil { + ... + } + ... + foo := &pb.Foo{} + if err := any.UnmarshalTo(foo); err != nil { + ... + } + + The pack methods provided by protobuf library will by + default use + + 'type.googleapis.com/full.type.name' as the type URL and the + unpack + + methods only use the fully qualified type name after the + last '/' + + in the type URL, for example "foo.bar.com/x/y.z" will yield + type + + name "y.z". + + + JSON + + + The JSON representation of an `Any` value uses the regular + + representation of the deserialized, embedded message, with + an + + additional field `@type` which contains the type URL. + Example: + + package google.profile; + message Person { + string first_name = 1; + string last_name = 2; + } + + { + "@type": "type.googleapis.com/google.profile.Person", + "firstName": , + "lastName": + } + + If the embedded message type is well-known and has a custom + JSON + + representation, that representation will be embedded adding + a field + + `value` which holds the custom JSON in addition to the + `@type` + + field. Example (for message [google.protobuf.Duration][]): + + { + "@type": "type.googleapis.com/google.protobuf.Duration", + "value": "1.212s" + } + parameters: + - name: pagination.key + description: |- + key is a value returned in PageResponse.next_key to begin + querying the next page most efficiently. Only one of offset or key + should be set. + in: query + required: false + type: string + format: byte + - name: pagination.offset + description: >- + offset is a numeric offset that can be used when key is unavailable. + + It is less efficient than using key. Only one of offset or key + should + + be set. + in: query + required: false + type: string + format: uint64 + - name: pagination.limit + description: >- + limit is the total number of results to be returned in the result + page. + + If left empty it will default to a value to be set by each app. + in: query + required: false + type: string + format: uint64 + - name: pagination.count_total + description: >- + count_total is set to true to indicate that the result set should + include + + a count of the total number of items available for pagination in + UIs. + + count_total is only respected when offset is used. It is ignored + when key + + is set. + in: query + required: false + type: boolean + - name: pagination.reverse + description: >- + reverse is set to true if results are to be returned in the + descending order. + + + Since: cosmos-sdk 0.43 + in: query + required: false + type: boolean + tags: + - Query /dymensionxyz/dymension/sequencer/sequencers_by_rollapp/{rollappId}: get: summary: Queries a SequencersByRollapp by rollappId. @@ -7931,6 +8569,27 @@ paths: description: >- Sponsored indicates if the stream is based on the sponsorship distribution. + epoch_coins: + type: array + items: + type: object + properties: + denom: + type: string + amount: + type: string + description: >- + Coin defines a token with a denomination and an + amount. + + + NOTE: The amount field is an Int which implements the + custom method + + signatures required by gogoproto. + description: >- + EpochCoins are coins that need to be distributed in this + epoch. description: >- Stream is an object that stores and distributes yields to recipients who @@ -8050,209 +8709,480 @@ paths: type: boolean tags: - Query - /dymensionxyz/dymension/streamer/module_to_distribute_coins: - get: - summary: ModuleToDistributeCoins returns coins that are going to be distributed - operationId: StreamerModuleToDistributeCoins - responses: - '200': - description: A successful response. - schema: - type: object - properties: - coins: - type: array - items: - type: object - properties: - denom: - type: string - amount: - type: string - description: >- - Coin defines a token with a denomination and an amount. - - - NOTE: The amount field is an Int which implements the custom - method - - signatures required by gogoproto. - title: Coins that have yet to be distributed - default: - description: An unexpected error response. - schema: - type: object - properties: - error: - type: string - code: - type: integer - format: int32 - message: - type: string - details: - type: array - items: - type: object - properties: - type_url: - type: string - value: - type: string - format: byte - tags: - - Query - /dymensionxyz/dymension/streamer/stream_by_id/{id}: - get: - summary: StreamByID returns streams by their respective ID - operationId: StreamByID - responses: - '200': - description: A successful response. - schema: - type: object - properties: - stream: - type: object - properties: - id: - type: string - format: uint64 - title: id is the unique ID of a Stream - distribute_to: - description: distribute_to is the distr_info. - type: object - properties: - total_weight: - type: string - records: - type: array - items: - type: object - properties: - gauge_id: - type: string - format: uint64 - weight: - type: string - coins: - type: array - items: - type: object - properties: - denom: - type: string - amount: - type: string - description: >- - Coin defines a token with a denomination and an amount. - - - NOTE: The amount field is an Int which implements the - custom method - - signatures required by gogoproto. - title: >- - coins is the total amount of coins that have been in the - stream - - Can distribute multiple coin denoms - start_time: - type: string - format: date-time - title: start_time is the distribution start time - distr_epoch_identifier: - type: string - title: >- - distr_epoch_identifier is what epoch type di-stribution - will be triggered by - - (day, week, etc.) - num_epochs_paid_over: - type: string - format: uint64 - title: >- - num_epochs_paid_over is the number of total epochs - distribution will be - - completed over - filled_epochs: - type: string - format: uint64 - title: >- - filled_epochs is the number of epochs distribution has - been completed on - - already - distributed_coins: - type: array - items: - type: object - properties: - denom: - type: string - amount: - type: string - description: >- - Coin defines a token with a denomination and an amount. - - - NOTE: The amount field is an Int which implements the - custom method - - signatures required by gogoproto. - title: >- - distributed_coins are coins that have been distributed - already - sponsored: - type: boolean - description: >- - Sponsored indicates if the stream is based on the - sponsorship distribution. - description: >- - Stream is an object that stores and distributes yields to - recipients who - - satisfy certain conditions. Currently streams support - conditions around the - - duration for which a given denom is locked. - title: Stream that corresponds to provided gague ID - default: - description: An unexpected error response. - schema: - type: object - properties: - error: - type: string - code: - type: integer - format: int32 - message: - type: string - details: - type: array - items: - type: object - properties: - type_url: - type: string - value: - type: string - format: byte - parameters: - - name: id - description: Gague ID being queried - in: path - required: true - type: string - format: uint64 - tags: - - Query - /dymensionxyz/dymension/streamer/streams: + /dymensionxyz/dymension/streamer/module_to_distribute_coins: + get: + summary: ModuleToDistributeCoins returns coins that are going to be distributed + operationId: StreamerModuleToDistributeCoins + responses: + '200': + description: A successful response. + schema: + type: object + properties: + coins: + type: array + items: + type: object + properties: + denom: + type: string + amount: + type: string + description: >- + Coin defines a token with a denomination and an amount. + + + NOTE: The amount field is an Int which implements the custom + method + + signatures required by gogoproto. + title: Coins that have yet to be distributed + default: + description: An unexpected error response. + schema: + type: object + properties: + error: + type: string + code: + type: integer + format: int32 + message: + type: string + details: + type: array + items: + type: object + properties: + type_url: + type: string + value: + type: string + format: byte + tags: + - Query + /dymensionxyz/dymension/streamer/stream_by_id/{id}: + get: + summary: StreamByID returns streams by their respective ID + operationId: StreamByID + responses: + '200': + description: A successful response. + schema: + type: object + properties: + stream: + type: object + properties: + id: + type: string + format: uint64 + title: id is the unique ID of a Stream + distribute_to: + description: distribute_to is the distr_info. + type: object + properties: + total_weight: + type: string + records: + type: array + items: + type: object + properties: + gauge_id: + type: string + format: uint64 + weight: + type: string + coins: + type: array + items: + type: object + properties: + denom: + type: string + amount: + type: string + description: >- + Coin defines a token with a denomination and an amount. + + + NOTE: The amount field is an Int which implements the + custom method + + signatures required by gogoproto. + title: >- + coins is the total amount of coins that have been in the + stream + + Can distribute multiple coin denoms + start_time: + type: string + format: date-time + title: start_time is the distribution start time + distr_epoch_identifier: + type: string + title: >- + distr_epoch_identifier is what epoch type di-stribution + will be triggered by + + (day, week, etc.) + num_epochs_paid_over: + type: string + format: uint64 + title: >- + num_epochs_paid_over is the number of total epochs + distribution will be + + completed over + filled_epochs: + type: string + format: uint64 + title: >- + filled_epochs is the number of epochs distribution has + been completed on + + already + distributed_coins: + type: array + items: + type: object + properties: + denom: + type: string + amount: + type: string + description: >- + Coin defines a token with a denomination and an amount. + + + NOTE: The amount field is an Int which implements the + custom method + + signatures required by gogoproto. + title: >- + distributed_coins are coins that have been distributed + already + sponsored: + type: boolean + description: >- + Sponsored indicates if the stream is based on the + sponsorship distribution. + epoch_coins: + type: array + items: + type: object + properties: + denom: + type: string + amount: + type: string + description: >- + Coin defines a token with a denomination and an amount. + + + NOTE: The amount field is an Int which implements the + custom method + + signatures required by gogoproto. + description: >- + EpochCoins are coins that need to be distributed in this + epoch. + description: >- + Stream is an object that stores and distributes yields to + recipients who + + satisfy certain conditions. Currently streams support + conditions around the + + duration for which a given denom is locked. + title: Stream that corresponds to provided gague ID + default: + description: An unexpected error response. + schema: + type: object + properties: + error: + type: string + code: + type: integer + format: int32 + message: + type: string + details: + type: array + items: + type: object + properties: + type_url: + type: string + value: + type: string + format: byte + parameters: + - name: id + description: Gague ID being queried + in: path + required: true + type: string + format: uint64 + tags: + - Query + /dymensionxyz/dymension/streamer/streams: + get: + summary: Streams returns both upcoming and active streams + operationId: Streams + responses: + '200': + description: A successful response. + schema: + type: object + properties: + data: + type: array + items: + type: object + properties: + id: + type: string + format: uint64 + title: id is the unique ID of a Stream + distribute_to: + description: distribute_to is the distr_info. + type: object + properties: + total_weight: + type: string + records: + type: array + items: + type: object + properties: + gauge_id: + type: string + format: uint64 + weight: + type: string + coins: + type: array + items: + type: object + properties: + denom: + type: string + amount: + type: string + description: >- + Coin defines a token with a denomination and an + amount. + + + NOTE: The amount field is an Int which implements the + custom method + + signatures required by gogoproto. + title: >- + coins is the total amount of coins that have been in the + stream + + Can distribute multiple coin denoms + start_time: + type: string + format: date-time + title: start_time is the distribution start time + distr_epoch_identifier: + type: string + title: >- + distr_epoch_identifier is what epoch type di-stribution + will be triggered by + + (day, week, etc.) + num_epochs_paid_over: + type: string + format: uint64 + title: >- + num_epochs_paid_over is the number of total epochs + distribution will be + + completed over + filled_epochs: + type: string + format: uint64 + title: >- + filled_epochs is the number of epochs distribution has + been completed on + + already + distributed_coins: + type: array + items: + type: object + properties: + denom: + type: string + amount: + type: string + description: >- + Coin defines a token with a denomination and an + amount. + + + NOTE: The amount field is an Int which implements the + custom method + + signatures required by gogoproto. + title: >- + distributed_coins are coins that have been distributed + already + sponsored: + type: boolean + description: >- + Sponsored indicates if the stream is based on the + sponsorship distribution. + epoch_coins: + type: array + items: + type: object + properties: + denom: + type: string + amount: + type: string + description: >- + Coin defines a token with a denomination and an + amount. + + + NOTE: The amount field is an Int which implements the + custom method + + signatures required by gogoproto. + description: >- + EpochCoins are coins that need to be distributed in this + epoch. + description: >- + Stream is an object that stores and distributes yields to + recipients who + + satisfy certain conditions. Currently streams support + conditions around the + + duration for which a given denom is locked. + title: Upcoming and active streams + pagination: + title: Pagination defines pagination for the response + type: object + properties: + next_key: + type: string + format: byte + description: |- + next_key is the key to be passed to PageRequest.key to + query the next page most efficiently. It will be empty if + there are no more results. + total: + type: string + format: uint64 + title: >- + total is total number of results available if + PageRequest.count_total + + was set, its value is undefined otherwise + description: >- + PageResponse is to be embedded in gRPC response messages where + the + + corresponding request message has used PageRequest. + + message SomeResponse { + repeated Bar results = 1; + PageResponse page = 2; + } + default: + description: An unexpected error response. + schema: + type: object + properties: + error: + type: string + code: + type: integer + format: int32 + message: + type: string + details: + type: array + items: + type: object + properties: + type_url: + type: string + value: + type: string + format: byte + parameters: + - name: pagination.key + description: |- + key is a value returned in PageResponse.next_key to begin + querying the next page most efficiently. Only one of offset or key + should be set. + in: query + required: false + type: string + format: byte + - name: pagination.offset + description: >- + offset is a numeric offset that can be used when key is unavailable. + + It is less efficient than using key. Only one of offset or key + should + + be set. + in: query + required: false + type: string + format: uint64 + - name: pagination.limit + description: >- + limit is the total number of results to be returned in the result + page. + + If left empty it will default to a value to be set by each app. + in: query + required: false + type: string + format: uint64 + - name: pagination.count_total + description: >- + count_total is set to true to indicate that the result set should + include + + a count of the total number of items available for pagination in + UIs. + + count_total is only respected when offset is used. It is ignored + when key + + is set. + in: query + required: false + type: boolean + - name: pagination.reverse + description: >- + reverse is set to true if results are to be returned in the + descending order. + + + Since: cosmos-sdk 0.43 + in: query + required: false + type: boolean + tags: + - Query + /dymensionxyz/dymension/streamer/upcoming_streams: get: - summary: Streams returns both upcoming and active streams - operationId: Streams + summary: Returns scheduled streams that have not yet occurred + operationId: UpcomingStreams responses: '200': description: A successful response. @@ -8360,161 +9290,7 @@ paths: description: >- Sponsored indicates if the stream is based on the sponsorship distribution. - description: >- - Stream is an object that stores and distributes yields to - recipients who - - satisfy certain conditions. Currently streams support - conditions around the - - duration for which a given denom is locked. - title: Upcoming and active streams - pagination: - title: Pagination defines pagination for the response - type: object - properties: - next_key: - type: string - format: byte - description: |- - next_key is the key to be passed to PageRequest.key to - query the next page most efficiently. It will be empty if - there are no more results. - total: - type: string - format: uint64 - title: >- - total is total number of results available if - PageRequest.count_total - - was set, its value is undefined otherwise - description: >- - PageResponse is to be embedded in gRPC response messages where - the - - corresponding request message has used PageRequest. - - message SomeResponse { - repeated Bar results = 1; - PageResponse page = 2; - } - default: - description: An unexpected error response. - schema: - type: object - properties: - error: - type: string - code: - type: integer - format: int32 - message: - type: string - details: - type: array - items: - type: object - properties: - type_url: - type: string - value: - type: string - format: byte - parameters: - - name: pagination.key - description: |- - key is a value returned in PageResponse.next_key to begin - querying the next page most efficiently. Only one of offset or key - should be set. - in: query - required: false - type: string - format: byte - - name: pagination.offset - description: >- - offset is a numeric offset that can be used when key is unavailable. - - It is less efficient than using key. Only one of offset or key - should - - be set. - in: query - required: false - type: string - format: uint64 - - name: pagination.limit - description: >- - limit is the total number of results to be returned in the result - page. - - If left empty it will default to a value to be set by each app. - in: query - required: false - type: string - format: uint64 - - name: pagination.count_total - description: >- - count_total is set to true to indicate that the result set should - include - - a count of the total number of items available for pagination in - UIs. - - count_total is only respected when offset is used. It is ignored - when key - - is set. - in: query - required: false - type: boolean - - name: pagination.reverse - description: >- - reverse is set to true if results are to be returned in the - descending order. - - - Since: cosmos-sdk 0.43 - in: query - required: false - type: boolean - tags: - - Query - /dymensionxyz/dymension/streamer/upcoming_streams: - get: - summary: Returns scheduled streams that have not yet occurred - operationId: UpcomingStreams - responses: - '200': - description: A successful response. - schema: - type: object - properties: - data: - type: array - items: - type: object - properties: - id: - type: string - format: uint64 - title: id is the unique ID of a Stream - distribute_to: - description: distribute_to is the distr_info. - type: object - properties: - total_weight: - type: string - records: - type: array - items: - type: object - properties: - gauge_id: - type: string - format: uint64 - weight: - type: string - coins: + epoch_coins: type: array items: type: object @@ -8532,64 +9308,9 @@ paths: custom method signatures required by gogoproto. - title: >- - coins is the total amount of coins that have been in the - stream - - Can distribute multiple coin denoms - start_time: - type: string - format: date-time - title: start_time is the distribution start time - distr_epoch_identifier: - type: string - title: >- - distr_epoch_identifier is what epoch type di-stribution - will be triggered by - - (day, week, etc.) - num_epochs_paid_over: - type: string - format: uint64 - title: >- - num_epochs_paid_over is the number of total epochs - distribution will be - - completed over - filled_epochs: - type: string - format: uint64 - title: >- - filled_epochs is the number of epochs distribution has - been completed on - - already - distributed_coins: - type: array - items: - type: object - properties: - denom: - type: string - amount: - type: string - description: >- - Coin defines a token with a denomination and an - amount. - - - NOTE: The amount field is an Int which implements the - custom method - - signatures required by gogoproto. - title: >- - distributed_coins are coins that have been distributed - already - sponsored: - type: boolean description: >- - Sponsored indicates if the stream is based on the - sponsorship distribution. + EpochCoins are coins that need to be distributed in this + epoch. description: >- Stream is an object that stores and distributes yields to recipients who @@ -17764,12 +18485,7 @@ paths: refilled, Intended for use with incentives that get refilled daily. - distribute_to: - title: >- - distribute_to is where the gauge rewards are distributed - to. - - This is queried via lock duration or by timestamp + asset: type: object properties: lock_query_type: @@ -17821,6 +18537,11 @@ paths: depending on the LockQueryType. + rollapp: + type: object + properties: + rollapp_id: + type: string coins: type: array items: @@ -18040,12 +18761,7 @@ paths: refilled, Intended for use with incentives that get refilled daily. - distribute_to: - title: >- - distribute_to is where the gauge rewards are distributed - to. - - This is queried via lock duration or by timestamp + asset: type: object properties: lock_query_type: @@ -18097,6 +18813,11 @@ paths: depending on the LockQueryType. + rollapp: + type: object + properties: + rollapp_id: + type: string coins: type: array items: @@ -18319,12 +19040,7 @@ paths: refilled, Intended for use with incentives that get refilled daily. - distribute_to: - title: >- - distribute_to is where the gauge rewards are distributed - to. - - This is queried via lock duration or by timestamp + asset: type: object properties: lock_query_type: @@ -18376,6 +19092,11 @@ paths: depending on the LockQueryType. + rollapp: + type: object + properties: + rollapp_id: + type: string coins: type: array items: @@ -18514,12 +19235,7 @@ paths: refilled, Intended for use with incentives that get refilled daily. - distribute_to: - title: >- - distribute_to is where the gauge rewards are distributed - to. - - This is queried via lock duration or by timestamp + asset: type: object properties: lock_query_type: @@ -18571,6 +19287,11 @@ paths: depending on the LockQueryType. + rollapp: + type: object + properties: + rollapp_id: + type: string coins: type: array items: @@ -18851,44 +19572,245 @@ paths: format: byte tags: - Query - /dymensionxyz/dymension/incentives/v1beta1/rewards_est/{owner}: + /dymensionxyz/dymension/incentives/v1beta1/params: get: - summary: >- - RewardsEst returns an estimate of the rewards from now until a specified - - time in the future The querier either provides an address or a set of - locks + operationId: IncentivesParams + responses: + '200': + description: A successful response. + schema: + type: object + properties: + params: + title: Params defines the set of incentive parameters + type: object + properties: + distr_epoch_identifier: + type: string + title: >- + distr_epoch_identifier is what epoch type distribution + will be triggered by - for which they want to find the associated rewards - operationId: RewardsEst + (day, week, etc.) + default: + description: An unexpected error response. + schema: + type: object + properties: + error: + type: string + code: + type: integer + format: int32 + message: + type: string + details: + type: array + items: + type: object + properties: + type_url: + type: string + value: + type: string + format: byte + tags: + - Query + /dymensionxyz/dymension/incentives/v1beta1/rollapp_gauges: + get: + summary: RollappGauges returns both upcoming and active rollapp gauges + operationId: RollappGauges responses: '200': description: A successful response. schema: type: object properties: - coins: + data: type: array items: type: object properties: - denom: + id: type: string - amount: + format: uint64 + title: id is the unique ID of a Gauge + is_perpetual: + type: boolean + description: >- + is_perpetual is a flag to show if it's a perpetual or + non-perpetual gauge + + Non-perpetual gauges distribute their tokens equally per + epoch while the + + gauge is in the active period. Perpetual gauges + distribute all their tokens + + at a single time and only distribute their tokens again + once the gauge is + + refilled, Intended for use with incentives that get + refilled daily. + asset: + type: object + properties: + lock_query_type: + title: >- + LockQueryType is a type of lock query, + ByLockDuration | ByLockTime + type: string + enum: + - ByDuration + - ByTime + default: ByDuration + description: >- + LockQueryType defines the type of the lock query + that can + + either be by duration or start time of the lock. + denom: + type: string + title: >- + Denom represents the token denomination we are + looking to lock up + duration: + type: string + description: >- + Duration is used to query locks with longer duration + than the specified + + duration. Duration field must not be nil when the + lock query type is + + `ByLockDuration`. + timestamp: + type: string + format: date-time + description: >- + Timestamp is used by locks started before the + specified duration. + + Timestamp field must not be nil when the lock query + type is `ByLockTime`. + + Querying locks with timestamp is currently not + implemented. + description: >- + QueryCondition is a struct used for querying locks upon + different conditions. + + Duration field and timestamp fields could be optional, + depending on the + + LockQueryType. + rollapp: + type: object + properties: + rollapp_id: + type: string + coins: + type: array + items: + type: object + properties: + denom: + type: string + amount: + type: string + description: >- + Coin defines a token with a denomination and an + amount. + + + NOTE: The amount field is an Int which implements the + custom method + + signatures required by gogoproto. + title: >- + coins is the total amount of coins that have been in the + gauge + + Can distribute multiple coin denoms + start_time: type: string + format: date-time + title: start_time is the distribution start time + num_epochs_paid_over: + type: string + format: uint64 + title: >- + num_epochs_paid_over is the number of total epochs + distribution will be + + completed over + filled_epochs: + type: string + format: uint64 + title: >- + filled_epochs is the number of epochs distribution has + been completed on + + already + distributed_coins: + type: array + items: + type: object + properties: + denom: + type: string + amount: + type: string + description: >- + Coin defines a token with a denomination and an + amount. + + + NOTE: The amount field is an Int which implements the + custom method + + signatures required by gogoproto. + title: >- + distributed_coins are coins that have been distributed + already description: >- - Coin defines a token with a denomination and an amount. + Gauge is an object that stores and distributes yields to + recipients who + satisfy certain conditions. Currently gauges support + conditions around the - NOTE: The amount field is an Int which implements the custom - method + duration for which a given denom is locked. + title: Upcoming and active gauges + pagination: + title: Pagination defines pagination for the response + type: object + properties: + next_key: + type: string + format: byte + description: |- + next_key is the key to be passed to PageRequest.key to + query the next page most efficiently. It will be empty if + there are no more results. + total: + type: string + format: uint64 + title: >- + total is total number of results available if + PageRequest.count_total - signatures required by gogoproto. - title: >- - Estimated coin rewards that will be recieved at provided - address + was set, its value is undefined otherwise + description: >- + PageResponse is to be embedded in gRPC response messages where + the + + corresponding request message has used PageRequest. - from specified locks between current time and end epoch + message SomeResponse { + repeated Bar results = 1; + PageResponse page = 2; + } default: description: An unexpected error response. schema: @@ -18912,28 +19834,62 @@ paths: type: string format: byte parameters: - - name: owner - description: Address that is being queried for future estimated rewards - in: path - required: true + - name: pagination.key + description: |- + key is a value returned in PageResponse.next_key to begin + querying the next page most efficiently. Only one of offset or key + should be set. + in: query + required: false type: string - - name: lock_ids - description: Lock IDs included in future reward estimation. + format: byte + - name: pagination.offset + description: >- + offset is a numeric offset that can be used when key is unavailable. + + It is less efficient than using key. Only one of offset or key + should + + be set. in: query required: false - type: array - items: - type: string - format: uint64 - collectionFormat: multi - - name: end_epoch - description: |- - Upper time limit of reward estimation - Lower limit is current epoch. + type: string + format: uint64 + - name: pagination.limit + description: >- + limit is the total number of results to be returned in the result + page. + + If left empty it will default to a value to be set by each app. in: query required: false type: string - format: int64 + format: uint64 + - name: pagination.count_total + description: >- + count_total is set to true to indicate that the result set should + include + + a count of the total number of items available for pagination in + UIs. + + count_total is only respected when offset is used. It is ignored + when key + + is set. + in: query + required: false + type: boolean + - name: pagination.reverse + description: >- + reverse is set to true if results are to be returned in the + descending order. + + + Since: cosmos-sdk 0.43 + in: query + required: false + type: boolean tags: - Query /dymensionxyz/dymension/incentives/v1beta1/upcoming_gauges: @@ -18972,12 +19928,7 @@ paths: refilled, Intended for use with incentives that get refilled daily. - distribute_to: - title: >- - distribute_to is where the gauge rewards are distributed - to. - - This is queried via lock duration or by timestamp + asset: type: object properties: lock_query_type: @@ -19029,6 +19980,11 @@ paths: depending on the LockQueryType. + rollapp: + type: object + properties: + rollapp_id: + type: string coins: type: array items: @@ -19252,12 +20208,7 @@ paths: refilled, Intended for use with incentives that get refilled daily. - distribute_to: - title: >- - distribute_to is where the gauge rewards are distributed - to. - - This is queried via lock duration or by timestamp + asset: type: object properties: lock_query_type: @@ -19309,6 +20260,11 @@ paths: depending on the LockQueryType. + rollapp: + type: object + properties: + rollapp_id: + type: string coins: type: array items: @@ -61275,9 +62231,9 @@ definitions: description: type: string title: description is the description of the App - image: + image_url: type: string - title: image is the relative path to the App image + title: image_url is the URL to the App's image url: type: string title: url is the URL to the App's website @@ -61391,8 +62347,8 @@ definitions: title: >- The time (num hub blocks) a sequencer can be down after which he will be jailed rather than slashed - app_creation_cost: - title: app_creation_cost is the cost for registering the App + app_registration_fee: + title: app_registration_fee is the fee for registering an App type: object properties: denom: @@ -61563,7 +62519,8 @@ definitions: type: boolean description: >- launched is a boolean that indicates that the Rollapp has - been launched and the immutable fields are no longer updatable. + been launched and the immutable fields are no longer + updatable. liveness_event_height: type: string format: int64 @@ -61689,9 +62646,9 @@ definitions: description: type: string title: description is the description of the App - image: + image_url: type: string - title: image is the relative path to the App image + title: image_url is the URL to the App's image url: type: string title: url is the URL to the App's website @@ -61917,8 +62874,8 @@ definitions: launched: type: boolean description: >- - // launched is a boolean that indicates that the Rollapp has been - launched and the immutable fields are no longer updatable. + launched is a boolean that indicates that the Rollapp has been + launched and the immutable fields are no longer updatable. liveness_event_height: type: string format: int64 @@ -62038,9 +62995,9 @@ definitions: description: type: string title: description is the description of the App - image: + image_url: type: string - title: image is the relative path to the App image + title: image_url is the URL to the App's image url: type: string title: url is the URL to the App's website @@ -62180,8 +63137,8 @@ definitions: title: >- The time (num hub blocks) a sequencer can be down after which he will be jailed rather than slashed - app_creation_cost: - title: app_creation_cost is the cost for registering the App + app_registration_fee: + title: app_registration_fee is the fee for registering an App type: object properties: denom: @@ -62339,8 +63296,8 @@ definitions: launched: type: boolean description: >- - launched is a boolean that indicates that the Rollapp has - been launched and the immutable fields are no longer updatable. + launched is a boolean that indicates that the Rollapp has been + launched and the immutable fields are no longer updatable. liveness_event_height: type: string format: int64 @@ -63362,10 +64319,374 @@ definitions: (sequencerAddress). The sequencer could be attached to only one rollapp (rollappId). - dymensionxyz.dymension.sequencer.QueryGetSequencersByRollappResponse: + dymensionxyz.dymension.sequencer.QueryGetSequencersByRollappResponse: + type: object + properties: + sequencers: + type: array + items: + type: object + properties: + address: + type: string + description: >- + address is the bech32-encoded address of the sequencer account + which is the account that the message was sent from. + dymintPubKey: + type: object + properties: + type_url: + type: string + description: >- + A URL/resource name that uniquely identifies the type of the + serialized + + protocol buffer message. This string must contain at least + + one "/" character. The last segment of the URL's path must + represent + + the fully qualified name of the type (as in + + `path/google.protobuf.Duration`). The name should be in a + canonical form + + (e.g., leading "." is not accepted). + + + In practice, teams usually precompile into the binary all + types that they + + expect it to use in the context of Any. However, for URLs + which use the + + scheme `http`, `https`, or no scheme, one can optionally set + up a type + + server that maps type URLs to message definitions as + follows: + + + * If no scheme is provided, `https` is assumed. + + * An HTTP GET on the URL must yield a + [google.protobuf.Type][] + value in binary format, or produce an error. + * Applications are allowed to cache lookup results based on + the + URL, or have them precompiled into a binary to avoid any + lookup. Therefore, binary compatibility needs to be preserved + on changes to types. (Use versioned type names to manage + breaking changes.) + + Note: this functionality is not currently available in the + official + + protobuf release, and it is not used for type URLs beginning + with + + type.googleapis.com. + + + Schemes other than `http`, `https` (or the empty scheme) + might be + + used with implementation specific semantics. + value: + type: string + format: byte + description: >- + Must be a valid serialized protocol buffer of the above + specified type. + description: >- + `Any` contains an arbitrary serialized protocol buffer message + along with a + + URL that describes the type of the serialized message. + + + Protobuf library provides support to pack/unpack Any values in + the form + + of utility functions or additional generated methods of the Any + type. + + + Example 1: Pack and unpack a message in C++. + + Foo foo = ...; + Any any; + any.PackFrom(foo); + ... + if (any.UnpackTo(&foo)) { + ... + } + + Example 2: Pack and unpack a message in Java. + + Foo foo = ...; + Any any = Any.pack(foo); + ... + if (any.is(Foo.class)) { + foo = any.unpack(Foo.class); + } + // or ... + if (any.isSameTypeAs(Foo.getDefaultInstance())) { + foo = any.unpack(Foo.getDefaultInstance()); + } + + Example 3: Pack and unpack a message in Python. + + foo = Foo(...) + any = Any() + any.Pack(foo) + ... + if any.Is(Foo.DESCRIPTOR): + any.Unpack(foo) + ... + + Example 4: Pack and unpack a message in Go + + foo := &pb.Foo{...} + any, err := anypb.New(foo) + if err != nil { + ... + } + ... + foo := &pb.Foo{} + if err := any.UnmarshalTo(foo); err != nil { + ... + } + + The pack methods provided by protobuf library will by default + use + + 'type.googleapis.com/full.type.name' as the type URL and the + unpack + + methods only use the fully qualified type name after the last + '/' + + in the type URL, for example "foo.bar.com/x/y.z" will yield type + + name "y.z". + + + JSON + + + The JSON representation of an `Any` value uses the regular + + representation of the deserialized, embedded message, with an + + additional field `@type` which contains the type URL. Example: + + package google.profile; + message Person { + string first_name = 1; + string last_name = 2; + } + + { + "@type": "type.googleapis.com/google.profile.Person", + "firstName": , + "lastName": + } + + If the embedded message type is well-known and has a custom JSON + + representation, that representation will be embedded adding a + field + + `value` which holds the custom JSON in addition to the `@type` + + field. Example (for message [google.protobuf.Duration][]): + + { + "@type": "type.googleapis.com/google.protobuf.Duration", + "value": "1.212s" + } + rollappId: + type: string + description: rollappId defines the rollapp to which the sequencer belongs. + metadata: + description: metadata defines the extra information for the sequencer. + type: object + properties: + moniker: + type: string + description: moniker defines a human-readable name for the sequencer. + details: + type: string + description: details define other optional details. + p2p_seeds: + type: array + items: + type: string + title: bootstrap nodes list + rpcs: + type: array + items: + type: string + title: RPCs list + evm_rpcs: + type: array + items: + type: string + title: evm RPCs list + rest_api_urls: + type: array + items: + type: string + title: REST API URLs + explorer_url: + type: string + title: block explorer URL + genesis_urls: + type: array + items: + type: string + title: genesis URLs + contact_details: + title: contact details + type: object + properties: + website: + type: string + title: website URL + telegram: + type: string + title: telegram link + x: + type: string + title: twitter link + extra_data: + type: string + format: byte + title: json dump the sequencer can add (limited by size) + snapshots: + type: array + items: + type: object + properties: + snapshot_url: + type: string + title: the snapshot url + height: + type: string + format: uint64 + title: The snapshot height + checksum: + type: string + title: sha-256 checksum value for the snapshot file + title: snapshots of the sequencer + gas_price: + type: string + title: gas_price defines the value for each gas unit + jailed: + type: boolean + description: >- + jailed defined whether the sequencer has been jailed from bonded + status or not. + proposer: + type: boolean + status: + description: status is the sequencer status (bonded/unbonding/unbonded). + type: string + enum: + - OPERATING_STATUS_UNBONDED + - OPERATING_STATUS_UNBONDING + - OPERATING_STATUS_BONDED + default: OPERATING_STATUS_UNBONDED + title: OperatingStatus defines the operating status of a sequencer + tokens: + type: array + items: + type: object + properties: + denom: + type: string + amount: + type: string + description: >- + Coin defines a token with a denomination and an amount. + + + NOTE: The amount field is an Int which implements the custom + method + + signatures required by gogoproto. + description: tokens define the delegated tokens (incl. self-delegation). + unbond_request_height: + type: string + format: int64 + description: >- + unbond_request_height stores the height at which this sequencer + has + + requested to unbond. + unbond_time: + type: string + format: date-time + description: >- + unbond_time defines the time when the sequencer will complete + unbonding. + notice_period_time: + type: string + format: date-time + title: >- + notice_period_time defines the time when the sequencer will + finish it's notice period if started + description: >- + Sequencer defines a sequencer identified by its' address + (sequencerAddress). + + The sequencer could be attached to only one rollapp (rollappId). + dymensionxyz.dymension.sequencer.QueryParamsResponse: type: object properties: - sequencers: + params: + description: params holds all the parameters of this module. + type: object + properties: + min_bond: + type: object + properties: + denom: + type: string + amount: + type: string + description: >- + Coin defines a token with a denomination and an amount. + + + NOTE: The amount field is an Int which implements the custom + method + + signatures required by gogoproto. + unbonding_time: + type: string + description: unbonding_time is the time duration of unbonding. + notice_period: + type: string + description: >- + notice_period is the time duration of notice period. + + notice period is the duration between the unbond request and the + actual + + unbonding starting. the proposer is still bonded during this + period. + liveness_slash_multiplier: + type: string + description: >- + LivenessSlashMultiplier multiplies with the tokens of the slashed + sequencer to compute the burn amount. + description: QueryParamsResponse is response type for the Query/Params RPC method. + dymensionxyz.dymension.sequencer.QueryProposersResponse: + type: object + properties: + proposers: type: array items: type: object @@ -63685,47 +65006,33 @@ definitions: (sequencerAddress). The sequencer could be attached to only one rollapp (rollappId). - dymensionxyz.dymension.sequencer.QueryParamsResponse: - type: object - properties: - params: - description: params holds all the parameters of this module. + pagination: type: object properties: - min_bond: - type: object - properties: - denom: - type: string - amount: - type: string - description: >- - Coin defines a token with a denomination and an amount. - - - NOTE: The amount field is an Int which implements the custom - method - - signatures required by gogoproto. - unbonding_time: + next_key: type: string - description: unbonding_time is the time duration of unbonding. - notice_period: + format: byte + description: |- + next_key is the key to be passed to PageRequest.key to + query the next page most efficiently. It will be empty if + there are no more results. + total: type: string - description: >- - notice_period is the time duration of notice period. + format: uint64 + title: >- + total is total number of results available if + PageRequest.count_total - notice period is the duration between the unbond request and the - actual + was set, its value is undefined otherwise + description: |- + PageResponse is to be embedded in gRPC response messages where the + corresponding request message has used PageRequest. - unbonding starting. the proposer is still bonded during this - period. - liveness_slash_multiplier: - type: string - description: >- - LivenessSlashMultiplier multiplies with the tokens of the slashed - sequencer to compute the burn amount. - description: QueryParamsResponse is response type for the Query/Params RPC method. + message SomeResponse { + repeated Bar results = 1; + PageResponse page = 2; + } + description: Response type for the Proposers RPC method. dymensionxyz.dymension.sequencer.QuerySequencersResponse: type: object properties: @@ -64563,6 +65870,24 @@ definitions: description: >- Sponsored indicates if the stream is based on the sponsorship distribution. + epoch_coins: + type: array + items: + type: object + properties: + denom: + type: string + amount: + type: string + description: >- + Coin defines a token with a denomination and an amount. + + + NOTE: The amount field is an Int which implements the custom + method + + signatures required by gogoproto. + description: EpochCoins are coins that need to be distributed in this epoch. description: >- Stream is an object that stores and distributes yields to recipients who @@ -64727,6 +66052,21 @@ definitions: description: >- Sponsored indicates if the stream is based on the sponsorship distribution. + epoch_coins: + type: array + items: + type: object + properties: + denom: + type: string + amount: + type: string + description: |- + Coin defines a token with a denomination and an amount. + + NOTE: The amount field is an Int which implements the custom method + signatures required by gogoproto. + description: EpochCoins are coins that need to be distributed in this epoch. description: >- Stream is an object that stores and distributes yields to recipients who @@ -64830,6 +66170,24 @@ definitions: description: >- Sponsored indicates if the stream is based on the sponsorship distribution. + epoch_coins: + type: array + items: + type: object + properties: + denom: + type: string + amount: + type: string + description: >- + Coin defines a token with a denomination and an amount. + + + NOTE: The amount field is an Int which implements the custom + method + + signatures required by gogoproto. + description: EpochCoins are coins that need to be distributed in this epoch. description: >- Stream is an object that stores and distributes yields to recipients who @@ -64937,6 +66295,24 @@ definitions: description: >- Sponsored indicates if the stream is based on the sponsorship distribution. + epoch_coins: + type: array + items: + type: object + properties: + denom: + type: string + amount: + type: string + description: >- + Coin defines a token with a denomination and an amount. + + + NOTE: The amount field is an Int which implements the custom + method + + signatures required by gogoproto. + description: EpochCoins are coins that need to be distributed in this epoch. description: >- Stream is an object that stores and distributes yields to recipients who @@ -65071,6 +66447,24 @@ definitions: description: >- Sponsored indicates if the stream is based on the sponsorship distribution. + epoch_coins: + type: array + items: + type: object + properties: + denom: + type: string + amount: + type: string + description: >- + Coin defines a token with a denomination and an amount. + + + NOTE: The amount field is an Int which implements the custom + method + + signatures required by gogoproto. + description: EpochCoins are coins that need to be distributed in this epoch. description: >- Stream is an object that stores and distributes yields to recipients who @@ -67436,10 +68830,7 @@ definitions: refilled, Intended for use with incentives that get refilled daily. - distribute_to: - title: |- - distribute_to is where the gauge rewards are distributed to. - This is queried via lock duration or by timestamp + asset: type: object properties: lock_query_type: @@ -67488,6 +68879,11 @@ definitions: on the LockQueryType. + rollapp: + type: object + properties: + rollapp_id: + type: string coins: type: array items: @@ -67611,10 +69007,7 @@ definitions: refilled, Intended for use with incentives that get refilled daily. - distribute_to: - title: |- - distribute_to is where the gauge rewards are distributed to. - This is queried via lock duration or by timestamp + asset: type: object properties: lock_query_type: @@ -67663,6 +69056,11 @@ definitions: on the LockQueryType. + rollapp: + type: object + properties: + rollapp_id: + type: string coins: type: array items: @@ -67780,10 +69178,7 @@ definitions: is refilled, Intended for use with incentives that get refilled daily. - distribute_to: - title: |- - distribute_to is where the gauge rewards are distributed to. - This is queried via lock duration or by timestamp + asset: type: object properties: lock_query_type: @@ -67827,6 +69222,11 @@ definitions: the LockQueryType. + rollapp: + type: object + properties: + rollapp_id: + type: string coins: type: array items: @@ -67910,10 +69310,7 @@ definitions: refilled, Intended for use with incentives that get refilled daily. - distribute_to: - title: |- - distribute_to is where the gauge rewards are distributed to. - This is queried via lock duration or by timestamp + asset: type: object properties: lock_query_type: @@ -67962,6 +69359,11 @@ definitions: on the LockQueryType. + rollapp: + type: object + properties: + rollapp_id: + type: string coins: type: array items: @@ -68058,10 +69460,7 @@ definitions: refilled, Intended for use with incentives that get refilled daily. - distribute_to: - title: |- - distribute_to is where the gauge rewards are distributed to. - This is queried via lock duration or by timestamp + asset: type: object properties: lock_query_type: @@ -68110,6 +69509,11 @@ definitions: on the LockQueryType. + rollapp: + type: object + properties: + rollapp_id: + type: string coins: type: array items: @@ -68222,6 +69626,31 @@ definitions: NOTE: The amount field is an Int which implements the custom method signatures required by gogoproto. title: Coins that have yet to be distributed + dymensionxyz.dymension.incentives.Params: + type: object + properties: + distr_epoch_identifier: + type: string + title: >- + distr_epoch_identifier is what epoch type distribution will be + triggered by + + (day, week, etc.) + title: Params holds parameters for the incentives module + dymensionxyz.dymension.incentives.ParamsResponse: + type: object + properties: + params: + title: Params defines the set of incentive parameters + type: object + properties: + distr_epoch_identifier: + type: string + title: >- + distr_epoch_identifier is what epoch type distribution will be + triggered by + + (day, week, etc.) dymensionxyz.dymension.incentives.QueryLockableDurationsResponse: type: object properties: @@ -68232,26 +69661,11 @@ definitions: title: >- Time durations that users can lock coins for in order to recieve rewards - dymensionxyz.dymension.incentives.RewardsEstResponse: + dymensionxyz.dymension.incentives.RollappGauge: type: object properties: - coins: - type: array - items: - type: object - properties: - denom: - type: string - amount: - type: string - description: |- - Coin defines a token with a denomination and an amount. - - NOTE: The amount field is an Int which implements the custom method - signatures required by gogoproto. - title: |- - Estimated coin rewards that will be recieved at provided address - from specified locks between current time and end epoch + rollapp_id: + type: string dymensionxyz.dymension.incentives.UpcomingGaugesPerDenomResponse: type: object properties: @@ -68281,10 +69695,7 @@ definitions: refilled, Intended for use with incentives that get refilled daily. - distribute_to: - title: |- - distribute_to is where the gauge rewards are distributed to. - This is queried via lock duration or by timestamp + asset: type: object properties: lock_query_type: @@ -68333,6 +69744,11 @@ definitions: on the LockQueryType. + rollapp: + type: object + properties: + rollapp_id: + type: string coins: type: array items: @@ -68456,10 +69872,7 @@ definitions: refilled, Intended for use with incentives that get refilled daily. - distribute_to: - title: |- - distribute_to is where the gauge rewards are distributed to. - This is queried via lock duration or by timestamp + asset: type: object properties: lock_query_type: @@ -68508,6 +69921,11 @@ definitions: on the LockQueryType. + rollapp: + type: object + properties: + rollapp_id: + type: string coins: type: array items: diff --git a/proto/dymensionxyz/dymension/rollapp/app.proto b/proto/dymensionxyz/dymension/rollapp/app.proto index f4e479a70..0c7533f5c 100644 --- a/proto/dymensionxyz/dymension/rollapp/app.proto +++ b/proto/dymensionxyz/dymension/rollapp/app.proto @@ -4,16 +4,18 @@ package dymensionxyz.dymension.rollapp; option go_package = "github.com/dymensionxyz/dymension/v3/x/rollapp/types"; message App { + // id is the unique App's id in the Rollapp + uint64 id = 1; // name is the unique App's name - string name = 1; + string name = 2; // rollapp_id is the id of the Rollapp the App belongs to - string rollapp_id = 2; + string rollapp_id = 3; // description is the description of the App - string description = 3; + string description = 4; // image_url is the URL to the App's image - string image_url = 4; + string image_url = 5; // url is the URL to the App's website - string url = 5; + string url = 6; // order is the order of the App in the Rollapp - int32 order = 6; + int32 order = 7; } diff --git a/proto/dymensionxyz/dymension/rollapp/params.proto b/proto/dymensionxyz/dymension/rollapp/params.proto index 0e13262b7..e2d2899e7 100644 --- a/proto/dymensionxyz/dymension/rollapp/params.proto +++ b/proto/dymensionxyz/dymension/rollapp/params.proto @@ -9,7 +9,7 @@ import "cosmos/base/v1beta1/coin.proto"; // Params defines the parameters for the module. message Params { option (gogoproto.goproto_stringer) = false; - + // dispute_period_in_blocks the number of blocks it takes // to change a status of a state from received to finalized. // during that period, any user could submit fraud proof @@ -29,4 +29,7 @@ message Params { (gogoproto.nullable) = false, (gogoproto.moretags) = "yaml:\"app_registration_fee\"" ]; + // state_info_deletion_epoch_identifier is used to control the interval at which the state info records will be deleted. + string state_info_deletion_epoch_identifier = 8 + [ (gogoproto.moretags) = "yaml:\"state_info_deletion_epoch_identifier\"" ]; } diff --git a/proto/dymensionxyz/dymension/rollapp/state_info.proto b/proto/dymensionxyz/dymension/rollapp/state_info.proto index ccf9257df..1794e2224 100644 --- a/proto/dymensionxyz/dymension/rollapp/state_info.proto +++ b/proto/dymensionxyz/dymension/rollapp/state_info.proto @@ -4,6 +4,7 @@ package dymensionxyz.dymension.rollapp; option go_package = "github.com/dymensionxyz/dymension/v3/x/rollapp/types"; import "gogoproto/gogo.proto"; +import "google/protobuf/timestamp.proto"; import "dymensionxyz/dymension/rollapp/block_descriptor.proto"; import "dymensionxyz/dymension/common/status.proto"; @@ -46,6 +47,12 @@ message StateInfo { // BDs is a list of block description objects (one per block) // the list must be ordered by height, starting from startHeight to startHeight+numBlocks-1 BlockDescriptors BDs = 9 [(gogoproto.nullable) = false]; + // created_at is the timestamp at which the StateInfo was created + google.protobuf.Timestamp created_at = 10 [ + (gogoproto.stdtime) = true, + (gogoproto.nullable) = false, + (gogoproto.moretags) = "yaml:\"created_at\"" + ]; } // StateInfoSummary is a compact representation of StateInfo diff --git a/proto/dymensionxyz/dymension/rollapp/tx.proto b/proto/dymensionxyz/dymension/rollapp/tx.proto index 0ba19592a..00633ef4b 100644 --- a/proto/dymensionxyz/dymension/rollapp/tx.proto +++ b/proto/dymensionxyz/dymension/rollapp/tx.proto @@ -133,18 +133,20 @@ message MsgUpdateApp { option (cosmos.msg.v1.signer) = "creator"; // creator is the bech32-encoded address of the app owner string creator = 1; + // id is the unique App's id in the Rollapp + uint64 id = 2; // name is the unique App's name (immutable) - string name = 2; + string name = 3; // rollapp_id is the id of the Rollapp the App belongs to - string rollapp_id = 3; + string rollapp_id = 4; // description is the description of the App - string description = 4; + string description = 5; // image is the url to the App image - string image = 5; + string image = 6; // url is the URL to the App's website - string url = 6; + string url = 7; // order is the order of the App in the Rollapp - int32 order = 7; + int32 order = 8; } message MsgUpdateAppResponse { @@ -155,8 +157,8 @@ message MsgRemoveApp { option (cosmos.msg.v1.signer) = "creator"; // creator is the bech32-encoded address of the app owner string creator = 1; - // name is the unique App's name - string name = 2; + // id is the unique App's id in the Rollapp + uint64 id = 2; // rollapp_id is the id of the Rollapp the App belongs to string rollapp_id = 3; } diff --git a/proto/dymensionxyz/dymension/sequencer/query.proto b/proto/dymensionxyz/dymension/sequencer/query.proto index 7cb15d4bf..67657eeb1 100644 --- a/proto/dymensionxyz/dymension/sequencer/query.proto +++ b/proto/dymensionxyz/dymension/sequencer/query.proto @@ -26,7 +26,7 @@ service Query { // Queries a list of Sequencer items. rpc Sequencers(QuerySequencersRequest) returns (QuerySequencersResponse) { option (google.api.http).get = - "/dymensionxyz/dymension/sequencer/sequencer"; + "/dymensionxyz/dymension/sequencer/sequencers"; } // Queries a SequencersByRollapp by rollappId. @@ -39,15 +39,15 @@ service Query { // Queries a SequencersByRollappByStatus rpc SequencersByRollappByStatus(QueryGetSequencersByRollappByStatusRequest) returns (QueryGetSequencersByRollappByStatusResponse) { - option (google.api.http).get = "/dymensionxyz/dymension/sequencer/" - "sequencers_by_rollapp/{rollappId}/{status}"; + option (google.api.http).get = + "/dymensionxyz/dymension/sequencer/sequencers_by_rollapp/{rollappId}/{status}"; } // Queries the current proposer by rollappId. rpc GetProposerByRollapp(QueryGetProposerByRollappRequest) returns (QueryGetProposerByRollappResponse) { option (google.api.http).get = - "/dymensionxyz/dymension/sequencer/proposer/{rollappId}"; + "/dymensionxyz/dymension/sequencer/proposers/{rollappId}"; } // Queries the next proposer by rollappId. @@ -56,6 +56,12 @@ service Query { option (google.api.http).get = "/dymensionxyz/dymension/sequencer/next_proposer/{rollappId}"; } + + // Queries a list of proposers. + rpc Proposers(QueryProposersRequest) returns (QueryProposersResponse) { + option (google.api.http).get = + "/dymensionxyz/dymension/sequencer/proposers"; + } } // QueryParamsRequest is request type for the Query/Params RPC method. @@ -114,3 +120,14 @@ message QueryGetNextProposerByRollappResponse { // rotationInProgress is true if the proposer rotation is in progress. bool rotationInProgress = 2; } + +// Request type for the Proposers RPC method. +message QueryProposersRequest { + cosmos.base.query.v1beta1.PageRequest pagination = 1; +} + +// Response type for the Proposers RPC method. +message QueryProposersResponse { + repeated Sequencer proposers = 1 [ (gogoproto.nullable) = false ]; + cosmos.base.query.v1beta1.PageResponse pagination = 2; +} \ No newline at end of file diff --git a/proto/dymensionxyz/dymension/streamer/events.proto b/proto/dymensionxyz/dymension/streamer/events.proto new file mode 100644 index 000000000..1f85df031 --- /dev/null +++ b/proto/dymensionxyz/dymension/streamer/events.proto @@ -0,0 +1,31 @@ +syntax = "proto3"; +package dymensionxyz.dymension.streamer; + +import "gogoproto/gogo.proto"; +import "cosmos/base/v1beta1/coin.proto"; +import "dymensionxyz/dymension/streamer/streamer.proto"; + +option go_package = "github.com/dymensionxyz/dymension/v3/x/streamer/types"; + +message EventEndBlock { + uint64 iterations = 1; + uint64 max_iterations = 2; + // Distributed is the total amount of coins that have been distributed + repeated cosmos.base.v1beta1.Coin distributed = 3 [ + (gogoproto.nullable) = false, + (gogoproto.castrepeated) = "github.com/cosmos/cosmos-sdk/types.Coins" + ]; +} + +message EventEpochEnd { + uint64 iterations = 1; + // Distributed is the total amount of coins that have been distributed + repeated cosmos.base.v1beta1.Coin distributed = 2 [ + (gogoproto.nullable) = false, + (gogoproto.castrepeated) = "github.com/cosmos/cosmos-sdk/types.Coins" + ]; +} + +message EventEpochStart { + uint64 active_streams_num = 1; +} \ No newline at end of file diff --git a/proto/dymensionxyz/dymension/streamer/genesis.proto b/proto/dymensionxyz/dymension/streamer/genesis.proto index 46793e9c7..1e043a9a9 100644 --- a/proto/dymensionxyz/dymension/streamer/genesis.proto +++ b/proto/dymensionxyz/dymension/streamer/genesis.proto @@ -5,6 +5,7 @@ import "gogoproto/gogo.proto"; import "google/protobuf/duration.proto"; import "dymensionxyz/dymension/streamer/params.proto"; import "dymensionxyz/dymension/streamer/stream.proto"; +import "dymensionxyz/dymension/streamer/streamer.proto"; option go_package = "github.com/dymensionxyz/dymension/v3/x/streamer/types"; @@ -19,4 +20,6 @@ message GenesisState { // last_stream_id is what the stream number will increment from when creating // the next stream after genesis uint64 last_stream_id = 3; + // EpochPointers are pointers to the last rewarded gauges + repeated EpochPointer epoch_pointers = 4 [ (gogoproto.nullable) = false ]; } diff --git a/proto/dymensionxyz/dymension/streamer/params.proto b/proto/dymensionxyz/dymension/streamer/params.proto index df4913a5d..150e66d62 100644 --- a/proto/dymensionxyz/dymension/streamer/params.proto +++ b/proto/dymensionxyz/dymension/streamer/params.proto @@ -9,4 +9,7 @@ option go_package = "github.com/dymensionxyz/dymension/v3/x/streamer/types"; // Params holds parameters for the streamer module message Params { + // MaxIterationPerBlock defines the maximum number of gauges that could be processed in a single block. + // This param is used during the pagination process. + uint64 max_iterations_per_block = 1; } diff --git a/proto/dymensionxyz/dymension/streamer/stream.proto b/proto/dymensionxyz/dymension/streamer/stream.proto index 7ca1e8533..10811461f 100644 --- a/proto/dymensionxyz/dymension/streamer/stream.proto +++ b/proto/dymensionxyz/dymension/streamer/stream.proto @@ -53,4 +53,10 @@ message Stream { // Sponsored indicates if the stream is based on the sponsorship distribution. bool sponsored = 9; + + // EpochCoins are coins that need to be distributed in this epoch. + repeated cosmos.base.v1beta1.Coin epoch_coins = 10 [ + (gogoproto.nullable) = false, + (gogoproto.castrepeated) = "github.com/cosmos/cosmos-sdk/types.Coins" + ]; } \ No newline at end of file diff --git a/proto/dymensionxyz/dymension/streamer/streamer.proto b/proto/dymensionxyz/dymension/streamer/streamer.proto new file mode 100644 index 000000000..8d9d404b1 --- /dev/null +++ b/proto/dymensionxyz/dymension/streamer/streamer.proto @@ -0,0 +1,25 @@ +syntax = "proto3"; +package dymensionxyz.dymension.streamer; + +import "gogoproto/gogo.proto"; +import "google/protobuf/duration.proto"; + +option go_package = "github.com/dymensionxyz/dymension/v3/x/streamer/types"; + +// EpochPointer is a special object used for the streamer pagination. It helps iterate over +// streams with the specified epoch identifier within one epoch. Additionally, holds coins +// that must be distributed in this epoch. +message EpochPointer { + // StreamID is the ID of a stream. + uint64 stream_id = 1; + // GaugeID is the ID of a gauge. + uint64 gauge_id = 2; + // EpochIdentifier is a unique reference to this particular timer. + string epoch_identifier = 3; + // EpochDuration is the time in between epoch ticks. It is stored in order to have + // an ability to sort the EpochPointer slice. + google.protobuf.Duration epoch_duration = 4 [ + (gogoproto.nullable) = false, + (gogoproto.stdduration) = true + ]; +} \ No newline at end of file diff --git a/testutil/keeper/streamer.go b/testutil/keeper/streamer.go index a95d139fe..08996e406 100644 --- a/testutil/keeper/streamer.go +++ b/testutil/keeper/streamer.go @@ -38,6 +38,7 @@ func StreamerKeeper(t testing.TB) (*keeper.Keeper, sdk.Context) { "StreamerParams", ) k := keeper.NewKeeper( + cdc, storeKey, paramsSubspace, nil, diff --git a/utils/pagination/paginate.go b/utils/pagination/paginate.go new file mode 100644 index 000000000..5a2df451b --- /dev/null +++ b/utils/pagination/paginate.go @@ -0,0 +1,29 @@ +package pagination + +type Iterator[T any] interface { + Next() + Value() T + Valid() bool +} + +type Stop bool + +const ( + Break Stop = true + Continue Stop = false +) + +// Paginate is a function that paginates over an iterator. The callback is executed for each iteration and if it +// returns true, the pagination stops. The function returns the amount of iterations before stopping. +func Paginate[T any](iter Iterator[T], perPage uint64, cb func(T) Stop) uint64 { + iterations := uint64(0) + for ; iterations < perPage && iter.Valid(); iter.Next() { + iterations++ + + stop := cb(iter.Value()) + if stop { + break + } + } + return iterations +} diff --git a/utils/pagination/paginate_test.go b/utils/pagination/paginate_test.go new file mode 100644 index 000000000..6e842c3b2 --- /dev/null +++ b/utils/pagination/paginate_test.go @@ -0,0 +1,83 @@ +package pagination_test + +import ( + "testing" + + "github.com/stretchr/testify/require" + + "github.com/dymensionxyz/dymension/v3/utils/pagination" +) + +type testIterator struct { + data []int + index int +} + +func newTestIterator(data []int) *testIterator { + return &testIterator{data: data, index: 0} +} + +func (t *testIterator) Next() { + t.index++ +} + +func (t *testIterator) Value() int { + return t.data[t.index] +} + +func (t *testIterator) Valid() bool { + return t.index < len(t.data) +} + +func TestPaginate(t *testing.T) { + testCases := []struct { + name string + iterator pagination.Iterator[int] + perPage uint64 + stopValue int + expected uint64 + }{ + { + name: "Empty iterator", + iterator: newTestIterator([]int{}), + perPage: 5, + stopValue: -1, + expected: 0, + }, + { + name: "Non-Empty iterator less than perPage", + iterator: newTestIterator([]int{1, 2, 3}), + perPage: 10, + stopValue: -1, + expected: 3, + }, + { + name: "Non-empty iterator greater than perPage", + iterator: newTestIterator([]int{1, 2, 3, 4, 5, 6, 7}), + perPage: 5, + stopValue: -1, + expected: 5, + }, + { + name: "Zero perPage", + iterator: newTestIterator([]int{1, 2, 3, 4, 5, 6, 7}), + perPage: 0, + stopValue: 6, + expected: 0, + }, + { + name: "Non-Empty iterator with stop condition", + iterator: newTestIterator([]int{1, 2, 3, 4, 5, 6, 7}), + perPage: 10, + stopValue: 3, + expected: 3, + }, + } + + for _, tc := range testCases { + t.Run(tc.name, func(t *testing.T) { + result := pagination.Paginate(tc.iterator, tc.perPage, func(i int) pagination.Stop { return i == tc.stopValue }) + require.Equal(t, tc.expected, result) + }) + } +} diff --git a/x/denommetadata/ibc_middleware.go b/x/denommetadata/ibc_middleware.go index 1746678db..b9276c1dc 100644 --- a/x/denommetadata/ibc_middleware.go +++ b/x/denommetadata/ibc_middleware.go @@ -217,12 +217,20 @@ func (m *ICS4Wrapper) SendPacket( // At the first match, we assume that the rollapp already contains the metadata. // It would be technically possible to have a race condition where the denom metadata is added to the rollapp // from another packet before this packet is acknowledged. - if Contains(rollapp.RegisteredDenoms, packet.Denom) { + // The value of `packet.Denom` here can be one of two things: + // 1. Base denom (e.g. "adym") for the native token of the hub, and + // 2. IBC trace (e.g. "transfer/channel-1/arax") for a third party token. + // We need to handle both cases: + // 1. We use the value of `packet.Denom` as the baseDenom + // 2. We parse the IBC denom trace into IBC denom hash and prepend it with "ibc/" to get the baseDenom + baseDenom := transfertypes.ParseDenomTrace(packet.Denom).IBCDenom() + + if Contains(rollapp.RegisteredDenoms, baseDenom) { return m.ICS4Wrapper.SendPacket(ctx, chanCap, sourcePort, sourceChannel, timeoutHeight, timeoutTimestamp, data) } // get the denom metadata from the bank keeper, if it doesn't exist, move on to the next middleware in the chain - denomMetadata, ok := m.bankKeeper.GetDenomMetaData(ctx, packet.Denom) + denomMetadata, ok := m.bankKeeper.GetDenomMetaData(ctx, baseDenom) if !ok { return m.ICS4Wrapper.SendPacket(ctx, chanCap, sourcePort, sourceChannel, timeoutHeight, timeoutTimestamp, data) } diff --git a/x/incentives/README.md b/x/incentives/README.md index 988522e72..ccfe2620a 100644 --- a/x/incentives/README.md +++ b/x/incentives/README.md @@ -12,7 +12,7 @@ There are two kinds of `gauges`, perpetual and non-perpetual ones. - Non perpetual ones get removed from active queue after the distribution period finish but perpetual ones persist. - For non perpetual ones, they distribute the tokens equally per epoch during the `gauge` is in the active period. -- For perpetual ones, it distribute all the tokens at a single time. Those gauges needs to be filled externally. +- For perpetual ones, it distributes all the tokens at a single time. Those gauges need to be filled externally. ## Contents @@ -230,7 +230,7 @@ The incentives module contains the following parameters: | -------------------- | ------ | -------- | | DistrEpochIdentifier | string | "weekly" | -Note: DistrEpochIdentifier is a epoch identifier, and module distribute +Note: DistrEpochIdentifier is an epoch identifier, and module distribute rewards at the end of epochs. As `epochs` module is handling multiple epochs, the identifier is required to check if distribution should be done at `AfterEpochEnd` hook diff --git a/x/lightclient/README.md b/x/lightclient/README.md new file mode 100644 index 000000000..6e04db386 --- /dev/null +++ b/x/lightclient/README.md @@ -0,0 +1,53 @@ +# Light client + +This module implements the 'canonical light client' concept. Each established Rollapp has an associated canonical light client, which allows safe IBC light clients to be created and operated permissionlessly. + +# Operator Info + +## Help! My IBC channel isn't working! + +This is a help section for operators - people running rollapps against the Hub in any environment (prod, testnet, local or e2e tests). + +If you cannot create a useable IBC channel between your rollapp and the Hub, it may be due to the light client for your rollapp on the Hub not being marked as canonical by the Hub state. + +### An overview of the protocol + +The Hub will allow the creation of new light clients by anyone. The first light client to match the state info's sent by the sequencer to the Hub is marked 'canonical', which will allow it to be used to create IBC channels which use EIBC. That means it's very important that your light client be marked canonical before it is used to create a channel, which in turn requires the state update to have arrived on the Hub from the sequencer for a height which is *at least* the Rollapp height that the light client was created from. + +To summarize, the order of steps is + +1. Create light clients +2. Wait for another state update to arrive on the Hub from the Rollapp sequencer +3. Create IBC transfer channel + +The Dymension relayer supports this flow out of the box. + +Moreover, it is important to create the light client for the Rollapp on the Hub with the right parameters. The correct parameters can be seen with `dymd q lightclient expected`, and relevant parameters are the trust level, trusting period, unbonding period and max clock drift. The Dymension relayer ensures these parameters have the correct values. If in doubt, compare the output of `dymd q ibc client state 07-tendermint-x` for your light client with the expected values from the Hub. + +When combined, this flow implies a few relationships between parameters + +``` +dymint max idle time < trusting period < rollapp x/sequencers unbonding period = hub x/sequencer unbonding period +``` + +and additionally, before creating the channel it is wise also set `dymint max batch time` to a small value, since step (2) in the procedure above requires a state update. + + +### Operator checklist + +- [ ] Using latest compatible relayer from https://github.com/dymensionxyz/go-relayer (main-dym) branch +- [ ] Rollapp x/sequencers unbonding period is equal to the Hub x/sequencer unbonding period +- [ ] Dymint idle time is less than trusting period +- [ ] Dymint batch time is short when creating the client and channel +- [ ] Client params equal to expected values (may need to pass `--max-clock-drift` to relayer) + +### Additional tips + +#### Verifying the result + +Check if the light client is canonical with `dymd q lightclient light-client $ROLLAPP_CHAIN_ID`. + +#### Small trusting period + +Try the relayer `--time-threshold` [flag](https://github.com/cosmos/relayer/blob/main/docs/advanced_usage.md#auto-update-light-client) to make sure the light client does not expire. + diff --git a/x/lightclient/ante/ibc_msg_update_client.go b/x/lightclient/ante/ibc_msg_update_client.go index 5caf22e73..f70d7481a 100644 --- a/x/lightclient/ante/ibc_msg_update_client.go +++ b/x/lightclient/ante/ibc_msg_update_client.go @@ -1,10 +1,12 @@ package ante import ( + errorsmod "cosmossdk.io/errors" sdk "github.com/cosmos/cosmos-sdk/types" ibcclienttypes "github.com/cosmos/ibc-go/v7/modules/core/02-client/types" ibctm "github.com/cosmos/ibc-go/v7/modules/light-clients/07-tendermint" "github.com/dymensionxyz/dymension/v3/x/lightclient/types" + "github.com/dymensionxyz/gerr-cosmos/gerrc" ) func (i IBCMessagesDecorator) HandleMsgUpdateClient(ctx sdk.Context, msg *ibcclienttypes.MsgUpdateClient) error { @@ -23,10 +25,15 @@ func (i IBCMessagesDecorator) HandleMsgUpdateClient(ctx sdk.Context, msg *ibccli if canonicalClient != msg.ClientId { return nil // The client is not a rollapp's canonical client. Continue with default behaviour. } + clientMessage, err := ibcclienttypes.UnpackClientMessage(msg.ClientMessage) if err != nil { return nil } + _, ok = clientMessage.(*ibctm.Misbehaviour) + if ok { + return errorsmod.Wrap(gerrc.ErrFailedPrecondition, "misbehavior evidence is disabled for canonical clients") + } header, ok := clientMessage.(*ibctm.Header) if !ok { return nil diff --git a/x/lightclient/ante/ibc_msg_update_client_test.go b/x/lightclient/ante/ibc_msg_update_client_test.go index c17e3e67c..227ab6a63 100644 --- a/x/lightclient/ante/ibc_msg_update_client_test.go +++ b/x/lightclient/ante/ibc_msg_update_client_test.go @@ -292,6 +292,42 @@ func TestHandleMsgUpdateClient(t *testing.T) { require.NoError(t, err) }, }, + { + name: "Client is not a known canonical client of a rollapp", + prepare: func(ctx sdk.Context, k keeper.Keeper) testInput { + return testInput{ + msg: &ibcclienttypes.MsgUpdateClient{ + ClientId: "canon-client-id", + }, + } + }, + assert: func(ctx sdk.Context, k keeper.Keeper, err error) { + require.NoError(t, err) + }, + }, + { + name: "SubmitMisbehavior for a canonical chain", + prepare: func(ctx sdk.Context, k keeper.Keeper) testInput { + k.SetCanonicalClient(ctx, "rollapp-has-canon-client", "canon-client-id") + m := &ibctm.Misbehaviour{} + mAny, _ := ibcclienttypes.PackClientMessage(m) + + return testInput{ + msg: &ibcclienttypes.MsgUpdateClient{ + ClientId: "canon-client-id", + ClientMessage: mAny, + }, + rollapps: map[string]rollapptypes.Rollapp{ + "rollapp-has-canon-client": { + RollappId: "rollapp-has-canon-client", + }, + }, + } + }, + assert: func(ctx sdk.Context, k keeper.Keeper, err error) { + require.Error(t, err) + }, + }, } for _, tc := range testCases { t.Run(tc.name, func(t *testing.T) { diff --git a/x/lockup/README.md b/x/lockup/README.md index e62fa9266..11d1b03be 100644 --- a/x/lockup/README.md +++ b/x/lockup/README.md @@ -4,7 +4,7 @@ Lockup module provides an interface for users to lock tokens (also known as bonding) into the module to get incentives. -After tokens have been added to a specific pool and turned into LP shares through the GAMM module, users can then lock these LP shares with a specific duration in order to begin earing rewards. +After tokens have been added to a specific pool and turned into LP shares through the GAMM module, users can then lock these LP shares with a specific duration in order to begin earning rewards. To unlock these LP shares, users must trigger the unlock timer and wait for the unlock period that was set initially to be completed. After the unlock period is over, users can turn LP shares back into their respective share of tokens. @@ -55,11 +55,11 @@ After the first day passes, they will only receive rewards for the 1 day and 1 w ### Locked coins management Locked coins are all stored in module account for `lockup` module which -is called `LockPool`. When user lock coins within `lockup` module, it's +is called `LockPool`. When user locks coins within `lockup` module, it's moved from user account to `LockPool` and a record (`PeriodLock` struct) is created. -Once the period is over, user can withdraw it at anytime from +Once the period is over, user can withdraw it at any time from `LockPool`. User can withdraw by PeriodLock ID or withdraw all `UnlockableCoins` at a time. @@ -102,7 +102,7 @@ references. (`b_prefix_key`) 3. `{KeyPrefixDenomLockDuration}{Denom}{Duration}` 4. `{KeyPrefixAccountDenomLockDuration}{Owner}{Denom}{Duration}` -If the lock is unlocking, it also stores the below referneces. +If the lock is unlocking, it also stores the below references. 1. `{KeyPrefixLockTimestamp}{LockEndTime}` 2. `{KeyPrefixAccountLockTimestamp}{Owner}{LockEndTime}` @@ -116,7 +116,7 @@ For end time keys, they are converted to sortable string by using stores accumulation store for efficient rewards distribution mechanism. For reference management, `addLockRefByKey` function is used a lot. Here -key is the prefix key to be used for iteration. It is combination of two +key is the prefix key to be used for iteration. It is a combination of two prefix keys.(`{a_prefix_key}{b_prefix_key}`) ``` {.go} @@ -397,7 +397,7 @@ Lockup admin keeper provides god privilege functions to remove tokens from locks and create new locks. ```go -// AdminKeeper defines a god priviledge keeper functions to remove tokens from locks and create new locks +// AdminKeeper defines a god privilege keeper functions to remove tokens from locks and create new locks // For the governance system of token pools, we want a "ragequit" feature // So governance changes will take 1 week to go into effect // During that time, people can choose to "ragequit" which means they would leave the original pool @@ -410,14 +410,14 @@ type AdminKeeper interface { // this unlock previous lockID and create a new lock with newCoins with same duration and endtime Relock(sdk.Context, lockID uint64, newCoins sdk.Coins) error - // this unlock without time check with an admin priviledge + // this unlock without time check with an admin privilege BreakLock(sdk.Context, lockID uint64) error } ``` ## Hooks -In this section we describe the "hooks" that `lockup` module provide for +In this section we describe the "hooks" that `lockup` module provides for other modules. ### Tokens Locked diff --git a/x/rollapp/client/cli/tx_remove_app.go b/x/rollapp/client/cli/tx_remove_app.go index 2d297de35..01b84f2d8 100644 --- a/x/rollapp/client/cli/tx_remove_app.go +++ b/x/rollapp/client/cli/tx_remove_app.go @@ -15,18 +15,23 @@ var _ = strconv.Itoa(0) func CmdRemoveApp() *cobra.Command { cmd := &cobra.Command{ - Use: "remove-app [name] [rollapp-id]", + Use: "remove-app [app-id] [rollapp-id]", Short: "Remove an app", - Args: cobra.ExactArgs(6), + Args: cobra.ExactArgs(2), RunE: func(cmd *cobra.Command, args []string) (err error) { clientCtx, err := client.GetClientTxContext(cmd) if err != nil { return err } + appID, err := strconv.ParseUint(args[0], 10, 64) + if err != nil { + return err + } + msg := types.NewMsgRemoveApp( clientCtx.GetFromAddress().String(), - args[0], + appID, args[1], ) diff --git a/x/rollapp/client/cli/tx_update_app.go b/x/rollapp/client/cli/tx_update_app.go index b1fedbac9..6a359591b 100644 --- a/x/rollapp/client/cli/tx_update_app.go +++ b/x/rollapp/client/cli/tx_update_app.go @@ -13,22 +13,27 @@ import ( func CmdUpdateApp() *cobra.Command { cmd := &cobra.Command{ - Use: "update-app [name] [rollapp-id] [description] [logo] [url] [order]", + Use: "update-app [id] [name] [rollapp-id] [description] [logo] [url] [order]", Short: "Update an app", - Example: "dymd tx app update-app 'app1' 'rollapp_1234-1' 1 'A description' '/logos/apps/app1.jpeg' 'https://app1.com/'", - Args: cobra.MinimumNArgs(1), + Example: "dymd tx rollapp update-app 1 'app1' 'rollapp_1234-1' 'A description' '/logos/apps/app1.jpeg' 'https://app1.com/' 3", + Args: cobra.MinimumNArgs(6), RunE: func(cmd *cobra.Command, args []string) (err error) { var ( - name = args[0] - rollappId = args[1] - description = args[2] - logo = args[3] - url = args[4] + name = args[1] + rollappId = args[2] + description = args[3] + logo = args[4] + url = args[5] order int64 = -1 ) - if len(args) == 6 { - order, err = strconv.ParseInt(args[5], 10, 32) + id, err := strconv.ParseUint(args[0], 10, 64) + if err != nil { + return err + } + + if len(args) == 7 { + order, err = strconv.ParseInt(args[6], 10, 32) if err != nil { return err } @@ -41,6 +46,7 @@ func CmdUpdateApp() *cobra.Command { msg := types.NewMsgUpdateApp( clientCtx.GetFromAddress().String(), + id, name, rollappId, description, diff --git a/x/rollapp/keeper/app.go b/x/rollapp/keeper/app.go index 7bce359e5..461352405 100644 --- a/x/rollapp/keeper/app.go +++ b/x/rollapp/keeper/app.go @@ -23,9 +23,9 @@ func (k Keeper) DeleteApp(ctx sdk.Context, app types.App) { store.Delete(key) } -func (k Keeper) GetApp(ctx sdk.Context, name, rollappId string) (val types.App, found bool) { +func (k Keeper) GetApp(ctx sdk.Context, id uint64, rollappId string) (val types.App, found bool) { store := prefix.NewStore(ctx.KVStore(k.storeKey), types.KeyPrefix(types.AppKeyPrefix)) - key := types.AppKey(types.App{Name: name, RollappId: rollappId}) + key := types.AppKey(types.App{Id: id, RollappId: rollappId}) b := store.Get(key) if b == nil { return val, false @@ -55,3 +55,22 @@ func (k Keeper) GetRollappApps(ctx sdk.Context, rollappId string) (list []*types return list } + +// GenerateNextAppID increments and returns the next available App ID for a specific Rollapp. +func (k Keeper) GenerateNextAppID(ctx sdk.Context, rollappID string) uint64 { + store := prefix.NewStore(ctx.KVStore(k.storeKey), types.KeyPrefix(types.AppSequenceKeyPrefix)) + sequenceKey := types.AppSequenceKey(rollappID) + + bz := store.Get(sequenceKey) + var seq uint64 + if bz == nil { + seq = 0 + } else { + seq = sdk.BigEndianToUint64(bz) + } + + seq++ + store.Set(sequenceKey, sdk.Uint64ToBigEndian(seq)) + + return seq +} diff --git a/x/rollapp/keeper/hooks.go b/x/rollapp/keeper/hooks.go new file mode 100644 index 000000000..2d91cd5fb --- /dev/null +++ b/x/rollapp/keeper/hooks.go @@ -0,0 +1,38 @@ +package keeper + +import ( + sdk "github.com/cosmos/cosmos-sdk/types" + epochstypes "github.com/osmosis-labs/osmosis/v15/x/epochs/types" +) + +var _ epochstypes.EpochHooks = epochHooks{} + +type epochHooks struct { + Keeper +} + +func (k Keeper) GetEpochHooks() epochstypes.EpochHooks { + return epochHooks{ + Keeper: k, + } +} + +// AfterEpochEnd is the epoch end hook. +// We want to clean up all the state info records that are older than the sequencer unbonding time. +func (e epochHooks) AfterEpochEnd(ctx sdk.Context, epochIdentifier string, _ int64) error { + if epochIdentifier != e.StateInfoDeletionEpochIdentifier(ctx) { + return nil + } + + currentTimestamp := ctx.BlockTime() + // for the time being, we can assume that the sequencer unbonding time will not change, therefore + // we can assume that the number of resulting deletable state updates will remain constant + seqUnbondingTime := e.sequencerKeeper.UnbondingTime(ctx) + endTimestamp := currentTimestamp.Add(-seqUnbondingTime) + + e.DeleteStateInfoUntilTimestamp(ctx, endTimestamp) + return nil +} + +// BeforeEpochStart is the epoch start hook. +func (e epochHooks) BeforeEpochStart(sdk.Context, string, int64) error { return nil } diff --git a/x/rollapp/keeper/latest_finalized_state_index.go b/x/rollapp/keeper/latest_finalized_state_index.go index e3a637f2e..18444fe99 100644 --- a/x/rollapp/keeper/latest_finalized_state_index.go +++ b/x/rollapp/keeper/latest_finalized_state_index.go @@ -3,6 +3,7 @@ package keeper import ( "github.com/cosmos/cosmos-sdk/store/prefix" sdk "github.com/cosmos/cosmos-sdk/types" + "github.com/dymensionxyz/dymension/v3/x/rollapp/types" ) @@ -46,7 +47,7 @@ func (k Keeper) RemoveLatestFinalizedStateIndex( )) } -// GetAllLatestFinalizedStateIndex returns all latestFinalizedStateIndex +// GetAllLatestFinalizedStateIndex returns latestFinalizedStateIndex for all rollapps func (k Keeper) GetAllLatestFinalizedStateIndex(ctx sdk.Context) (list []types.StateInfoIndex) { store := prefix.NewStore(ctx.KVStore(k.storeKey), types.KeyPrefix(types.LatestFinalizedStateIndexKeyPrefix)) iterator := sdk.KVStorePrefixIterator(store, []byte{}) diff --git a/x/rollapp/keeper/latest_state_info_index.go b/x/rollapp/keeper/latest_state_info_index.go index 8df8a7827..bf1f0ab3b 100644 --- a/x/rollapp/keeper/latest_state_info_index.go +++ b/x/rollapp/keeper/latest_state_info_index.go @@ -3,6 +3,7 @@ package keeper import ( "github.com/cosmos/cosmos-sdk/store/prefix" sdk "github.com/cosmos/cosmos-sdk/types" + "github.com/dymensionxyz/dymension/v3/x/rollapp/types" ) @@ -46,7 +47,7 @@ func (k Keeper) RemoveLatestStateInfoIndex( )) } -// GetAllLatestStateInfoIndex returns all latestStateInfoIndex +// GetAllLatestStateInfoIndex returns latestStateInfoIndex for all rollapps func (k Keeper) GetAllLatestStateInfoIndex(ctx sdk.Context) (list []types.StateInfoIndex) { store := prefix.NewStore(ctx.KVStore(k.storeKey), types.KeyPrefix(types.LatestStateInfoIndexKeyPrefix)) iterator := sdk.KVStorePrefixIterator(store, []byte{}) diff --git a/x/rollapp/keeper/liveness_test.go b/x/rollapp/keeper/liveness_test.go index 21deb60ce..9b4887b22 100644 --- a/x/rollapp/keeper/liveness_test.go +++ b/x/rollapp/keeper/liveness_test.go @@ -5,6 +5,7 @@ import ( "fmt" "slices" "testing" + "time" sdk "github.com/cosmos/cosmos-sdk/types" "github.com/dymensionxyz/sdk-utils/utils/urand" @@ -147,6 +148,10 @@ type livenessMockSequencerKeeper struct { jails map[string]int } +func (l livenessMockSequencerKeeper) UnbondingTime(sdk.Context) (res time.Duration) { + return time.Minute +} + func newLivenessMockSequencerKeeper() livenessMockSequencerKeeper { return livenessMockSequencerKeeper{ make(map[string]int), diff --git a/x/rollapp/keeper/msg_server_app.go b/x/rollapp/keeper/msg_server_app.go index 475a0d036..c38abace1 100644 --- a/x/rollapp/keeper/msg_server_app.go +++ b/x/rollapp/keeper/msg_server_app.go @@ -35,6 +35,8 @@ func (k msgServer) AddApp(goCtx context.Context, msg *types.MsgAddApp) (*types.M } app := msg.GetApp() + app.Id = k.GenerateNextAppID(ctx, app.RollappId) + k.SetApp(ctx, app) if err := ctx.EventManager().EmitTypedEvent(app.GetAddedEvent()); err != nil { @@ -83,9 +85,10 @@ func (k msgServer) RemoveApp(goCtx context.Context, msg *types.MsgRemoveApp) (*t } func (k msgServer) checkInputs(ctx sdk.Context, msg appMsg) error { - rollapp, foundRollapp := k.GetRollapp(ctx, msg.GetRollappId()) + app := msg.GetApp() + rollapp, foundRollapp := k.GetRollapp(ctx, app.GetRollappId()) if !foundRollapp { - return gerrc.ErrNotFound.Wrapf("rollappId: %s", msg.GetRollappId()) + return gerrc.ErrNotFound.Wrapf("rollappId: %s", app.GetRollappId()) } // check if the sender is the owner of the app @@ -93,26 +96,43 @@ func (k msgServer) checkInputs(ctx sdk.Context, msg appMsg) error { return gerrc.ErrPermissionDenied.Wrap("not the owner of the RollApp") } - // check if the app already exists - _, foundApp := k.GetApp(ctx, msg.GetName(), msg.GetRollappId()) switch msg.(type) { - case *types.MsgAddApp: - if foundApp { - return gerrc.ErrAlreadyExists.Wrap("app already exists") - } - case *types.MsgUpdateApp, *types.MsgRemoveApp: - if !foundApp { + case *types.MsgRemoveApp, *types.MsgUpdateApp: + if idExists := k.appIDExists(ctx, app); !idExists { return gerrc.ErrNotFound.Wrap("app not found") } } + switch msg.(type) { + case *types.MsgAddApp, *types.MsgUpdateApp: + apps := k.GetRollappApps(ctx, app.GetRollappId()) + if nameExists := k.appNameExists(apps, app); nameExists { + return gerrc.ErrAlreadyExists.Wrap("app name already exists") + } + } return nil } +func (k msgServer) appNameExists(apps []*types.App, app types.App) bool { + for _, a := range apps { + // does name already exist: + // - id=0 means it is a new app + // - skip if the id is the same as the app being checked + if (app.GetId() == 0 || a.Id != app.GetId()) && a.Name == app.GetName() { + return true + } + } + return false +} + +func (k msgServer) appIDExists(ctx sdk.Context, app types.App) bool { + _, foundApp := k.GetApp(ctx, app.GetId(), app.GetRollappId()) + return foundApp +} + type appMsg interface { - GetName() string - GetRollappId() string GetCreator() string + GetApp() types.App } var _ types.MsgServer = msgServer{} diff --git a/x/rollapp/keeper/msg_server_app_test.go b/x/rollapp/keeper/msg_server_app_test.go index 27196ce22..6d8ef8073 100644 --- a/x/rollapp/keeper/msg_server_app_test.go +++ b/x/rollapp/keeper/msg_server_app_test.go @@ -42,6 +42,7 @@ func (suite *RollappTestSuite) createRollappWithApp() types.RollappSummary { suite.Require().Nil(err) appExpect := types.App{ + Id: 1, Name: req.GetName(), RollappId: req.GetRollappId(), Description: req.GetDescription(), @@ -50,7 +51,7 @@ func (suite *RollappTestSuite) createRollappWithApp() types.RollappSummary { Order: req.GetOrder(), } - app, ok := suite.App.RollappKeeper.GetApp(suite.Ctx, req.Name, res.RollappId) + app, ok := suite.App.RollappKeeper.GetApp(suite.Ctx, 1, res.RollappId) suite.Require().True(ok) suite.Require().EqualValues(&appExpect, &app) suite.Require().Len(queryResponse.Apps, 1) @@ -224,9 +225,18 @@ func (suite *RollappTestSuite) Test_msgServer_AddApp() { } for _, msg := range tt.msgs { - _, err := suite.msgServer.AddApp(goCtx, msg) + err := func() error { + err := msg.ValidateBasic() + if err != nil { + return err + } + _, err = suite.msgServer.AddApp(goCtx, msg) + return err + }() if tt.wantErr != nil { suite.Require().ErrorContains(err, tt.wantErr.Error()) + } else { + suite.Require().NoError(err) } } @@ -240,13 +250,21 @@ func (suite *RollappTestSuite) Test_msgServer_AddApp() { suite.Require().NoError(err) suite.Require().Len(rollapp.Apps, len(tt.msgs)) + // check if the apps are ordered correctly slices.SortFunc(tt.msgs, func(a, b *types.MsgAddApp) int { return cmp.Compare(a.Order, b.Order) }) - for i, app := range rollapp.Apps { suite.Require().Equal(tt.msgs[i].Order, app.Order) } + + // check if the app ids are sequenced correctly + slices.SortFunc(rollapp.Apps, func(a, b *types.App) int { + return int(a.Id) - int(b.Id) + }) + for i, app := range rollapp.Apps { + suite.Require().Equal(uint64(i+1), app.Id) + } }) } } @@ -264,6 +282,7 @@ func (suite *RollappTestSuite) Test_msgServer_UpdateApp() { name: "success: update existing app", msgs: []*types.MsgUpdateApp{ { + Id: 1, Creator: alice, Name: "app1", RollappId: rollappID, @@ -275,6 +294,7 @@ func (suite *RollappTestSuite) Test_msgServer_UpdateApp() { }, malleate: func() { suite.App.RollappKeeper.SetApp(suite.Ctx, types.App{ + Id: 1, Name: "app1", RollappId: rollappID, Order: 1, @@ -284,6 +304,7 @@ func (suite *RollappTestSuite) Test_msgServer_UpdateApp() { name: "fail: update non-existent app", msgs: []*types.MsgUpdateApp{ { + Id: 1, Creator: alice, Name: "non_existent_app", RollappId: rollappID, @@ -294,10 +315,26 @@ func (suite *RollappTestSuite) Test_msgServer_UpdateApp() { }, }, wantErr: gerrc.ErrNotFound, + }, { + name: "fail: update app with ID 0", + msgs: []*types.MsgUpdateApp{ + { + Id: 0, + Creator: alice, + Name: "app1", + RollappId: rollappID, + Description: "Updated description", + Image: "http://example.com/updated_image", + Url: "http://example.com/updated_app", + Order: 2, + }, + }, + wantErr: types.ErrInvalidAppID, }, { name: "fail: update app with different creator", msgs: []*types.MsgUpdateApp{ { + Id: 1, Creator: bob, Name: "app1", RollappId: rollappID, @@ -309,6 +346,7 @@ func (suite *RollappTestSuite) Test_msgServer_UpdateApp() { }, malleate: func() { suite.App.RollappKeeper.SetApp(suite.Ctx, types.App{ + Id: 1, Name: "app1", RollappId: rollappID, Order: 1, @@ -319,6 +357,7 @@ func (suite *RollappTestSuite) Test_msgServer_UpdateApp() { name: "success: update multiple apps", msgs: []*types.MsgUpdateApp{ { + Id: 1, Creator: alice, Name: "app1", RollappId: rollappID, @@ -327,6 +366,7 @@ func (suite *RollappTestSuite) Test_msgServer_UpdateApp() { Url: "http://example.com/updated_app1", Order: 3, }, { + Id: 2, Creator: alice, Name: "app2", RollappId: rollappID, @@ -338,11 +378,13 @@ func (suite *RollappTestSuite) Test_msgServer_UpdateApp() { }, malleate: func() { suite.App.RollappKeeper.SetApp(suite.Ctx, types.App{ + Id: 1, Name: "app1", RollappId: rollappID, Order: 2, }) suite.App.RollappKeeper.SetApp(suite.Ctx, types.App{ + Id: 2, Name: "app2", RollappId: rollappID, Order: 1, @@ -352,6 +394,7 @@ func (suite *RollappTestSuite) Test_msgServer_UpdateApp() { name: "fail: update app with different rollapp", msgs: []*types.MsgUpdateApp{ { + Id: 1, Creator: alice, Name: "app1", RollappId: urand.RollappID(), @@ -361,6 +404,16 @@ func (suite *RollappTestSuite) Test_msgServer_UpdateApp() { Order: 1, }, }, + malleate: func() { + otherRollappID := urand.RollappID() + suite.createRollappWithIDAndCreator(otherRollappID, alice) + suite.App.RollappKeeper.SetApp(suite.Ctx, types.App{ + Id: 1, + Name: "app1", + RollappId: otherRollappID, + Order: 1, + }) + }, wantErr: types.ErrNotFound, }, } @@ -377,9 +430,18 @@ func (suite *RollappTestSuite) Test_msgServer_UpdateApp() { } for _, msg := range tt.msgs { - _, err := suite.msgServer.UpdateApp(goCtx, msg) + err := func() error { + err := msg.ValidateBasic() + if err != nil { + return err + } + _, err = suite.msgServer.UpdateApp(goCtx, msg) + return err + }() if tt.wantErr != nil { suite.Require().ErrorContains(err, tt.wantErr.Error()) + } else { + suite.Require().NoError(err) } } @@ -398,10 +460,10 @@ func (suite *RollappTestSuite) Test_msgServer_UpdateApp() { }) for i, app := range rollapp.Apps { - suite.Require().Equal(tt.msgs[i].Order, app.Order) - suite.Require().Equal(tt.msgs[i].Description, app.Description) - suite.Require().Equal(tt.msgs[i].Image, app.ImageUrl) - suite.Require().Equal(tt.msgs[i].Url, app.Url) + suite.Assert().Equal(tt.msgs[i].Order, app.Order) + suite.Assert().Equal(tt.msgs[i].Description, app.Description) + suite.Assert().Equal(tt.msgs[i].Image, app.ImageUrl) + suite.Assert().Equal(tt.msgs[i].Url, app.Url) } }) } @@ -421,12 +483,13 @@ func (suite *RollappTestSuite) Test_msgServer_RemoveApp() { msgs: []*types.MsgRemoveApp{ { Creator: alice, - Name: "app1", + Id: 1, RollappId: rollappID, }, }, malleate: func() { suite.App.RollappKeeper.SetApp(suite.Ctx, types.App{ + Id: 1, Name: "app1", RollappId: rollappID, }) @@ -436,7 +499,7 @@ func (suite *RollappTestSuite) Test_msgServer_RemoveApp() { msgs: []*types.MsgRemoveApp{ { Creator: alice, - Name: "non_existent_app", + Id: 144, RollappId: rollappID, }, }, @@ -446,12 +509,13 @@ func (suite *RollappTestSuite) Test_msgServer_RemoveApp() { msgs: []*types.MsgRemoveApp{ { Creator: bob, - Name: "app1", + Id: 1, RollappId: rollappID, }, }, malleate: func() { suite.App.RollappKeeper.SetApp(suite.Ctx, types.App{ + Id: 1, Name: "app1", RollappId: rollappID, }) @@ -462,12 +526,13 @@ func (suite *RollappTestSuite) Test_msgServer_RemoveApp() { msgs: []*types.MsgRemoveApp{ { Creator: alice, - Name: "app1", + Id: 1, RollappId: urand.RollappID(), }, }, malleate: func() { suite.App.RollappKeeper.SetApp(suite.Ctx, types.App{ + Id: 1, Name: "app1", RollappId: rollappID, }) @@ -478,20 +543,22 @@ func (suite *RollappTestSuite) Test_msgServer_RemoveApp() { msgs: []*types.MsgRemoveApp{ { Creator: alice, - Name: "app1", + Id: 1, RollappId: rollappID, }, { Creator: alice, - Name: "app2", + Id: 2, RollappId: rollappID, }, }, malleate: func() { suite.App.RollappKeeper.SetApp(suite.Ctx, types.App{ + Id: 1, Name: "app1", RollappId: rollappID, }) suite.App.RollappKeeper.SetApp(suite.Ctx, types.App{ + Id: 2, Name: "app2", RollappId: rollappID, }) @@ -517,9 +584,18 @@ func (suite *RollappTestSuite) Test_msgServer_RemoveApp() { createdAppsCount := len(rollapp.Apps) for _, msg := range tt.msgs { - _, err := suite.msgServer.RemoveApp(goCtx, msg) + err := func() error { + err := msg.ValidateBasic() + if err != nil { + return err + } + _, err = suite.msgServer.RemoveApp(goCtx, msg) + return err + }() if tt.wantErr != nil { suite.Require().ErrorContains(err, tt.wantErr.Error()) + } else { + suite.Require().NoError(err) } } @@ -536,7 +612,7 @@ func (suite *RollappTestSuite) Test_msgServer_RemoveApp() { suite.Require().Len(rollapp.Apps, expectAppsCount) for _, msg := range tt.msgs { - _, found := suite.App.RollappKeeper.GetApp(suite.Ctx, msg.Name, msg.RollappId) + _, found := suite.App.RollappKeeper.GetApp(suite.Ctx, msg.GetId(), msg.RollappId) suite.Require().False(found) } }) diff --git a/x/rollapp/keeper/msg_server_update_state.go b/x/rollapp/keeper/msg_server_update_state.go index 1bfab7dd3..d3ea818a4 100644 --- a/x/rollapp/keeper/msg_server_update_state.go +++ b/x/rollapp/keeper/msg_server_update_state.go @@ -76,7 +76,18 @@ func (k msgServer) UpdateState(goCtx context.Context, msg *types.MsgUpdateState) }) creationHeight := uint64(ctx.BlockHeight()) - stateInfo := types.NewStateInfo(msg.RollappId, newIndex, msg.Creator, msg.StartHeight, msg.NumBlocks, msg.DAPath, creationHeight, msg.BDs) + blockTime := ctx.BlockTime() + stateInfo := types.NewStateInfo( + msg.RollappId, + newIndex, + msg.Creator, + msg.StartHeight, + msg.NumBlocks, + msg.DAPath, + creationHeight, + msg.BDs, + blockTime, + ) // Write new state information to the store indexed by k.SetStateInfo(ctx, *stateInfo) diff --git a/x/rollapp/keeper/params.go b/x/rollapp/keeper/params.go index eaca0da56..312325334 100644 --- a/x/rollapp/keeper/params.go +++ b/x/rollapp/keeper/params.go @@ -14,6 +14,7 @@ func (k Keeper) GetParams(ctx sdk.Context) types.Params { k.LivenessSlashInterval(ctx), k.LivenessJailBlocks(ctx), k.AppRegistrationFee(ctx), + k.StateInfoDeletionEpochIdentifier(ctx), ) } @@ -48,3 +49,8 @@ func (k Keeper) AppRegistrationFee(ctx sdk.Context) (res sdk.Coin) { k.paramstore.Get(ctx, types.KeyAppRegistrationFee, &res) return } + +func (k Keeper) StateInfoDeletionEpochIdentifier(ctx sdk.Context) (res string) { + k.paramstore.Get(ctx, types.KeyStateInfoDeletionEpochIdentifier, &res) + return +} diff --git a/x/rollapp/keeper/state_info.go b/x/rollapp/keeper/state_info.go index c2abda15a..754a6dd03 100644 --- a/x/rollapp/keeper/state_info.go +++ b/x/rollapp/keeper/state_info.go @@ -2,9 +2,11 @@ package keeper import ( "fmt" + "time" "github.com/cosmos/cosmos-sdk/store/prefix" sdk "github.com/cosmos/cosmos-sdk/types" + "github.com/dymensionxyz/dymension/v3/x/rollapp/types" ) @@ -15,6 +17,12 @@ func (k Keeper) SetStateInfo(ctx sdk.Context, stateInfo types.StateInfo) { store.Set(types.StateInfoKey( stateInfo.StateInfoIndex, ), b) + + // store a key prefixed with the creation timestamp + storeTS := prefix.NewStore(ctx.KVStore(k.storeKey), types.KeyPrefix(types.TimestampedStateInfoKeyPrefix)) + storeTS.Set(types.StateInfoTimestampKey( + stateInfo, + ), []byte{}) } // GetStateInfo returns a stateInfo from its index @@ -85,3 +93,53 @@ func (k Keeper) GetAllStateInfo(ctx sdk.Context) (list []types.StateInfo) { return } + +// DeleteStateInfoUntilTimestamp deletes all stateInfo until the given timestamp +func (k Keeper) DeleteStateInfoUntilTimestamp(ctx sdk.Context, endTimestampExcl time.Time) { + store := prefix.NewStore(ctx.KVStore(k.storeKey), types.KeyPrefix(types.StateInfoKeyPrefix)) + storeTS := prefix.NewStore(ctx.KVStore(k.storeKey), types.KeyPrefix(types.TimestampedStateInfoKeyPrefix)) + + // Note that for the active sequencer, the latest state info index will not be within the range of + // the state updates to be deleted, as it will be more recent. + // For a sequencer that is inactive for 21 days or more, it will be within range, furthermore + // the latest state info index and the latest finalized state info index will be the same. + latestIndexes := k.GetAllLatestStateInfoIndex(ctx) + skipStateInfoIndexes := make(map[string]uint64, len(latestIndexes)) + for _, index := range latestIndexes { + skipStateInfoIndexes[index.RollappId] = index.Index + } + + k.IterateStateInfoWithTimestamp(storeTS, endTimestampExcl.UnixMicro(), func(keyTS []byte) bool { + key := types.StateInfoIndexKeyFromTimestampKey(keyTS) + // skip latest stateInfo and latest finalized stateInfo + stateInfoIndex := types.StateInfoIndexFromKey(key) + index := skipStateInfoIndexes[stateInfoIndex.RollappId] + if index == stateInfoIndex.Index { + return false + } + + store.Delete(key) + storeTS.Delete(keyTS) + return false + }) +} + +// IterateStateInfoWithTimestamp iterates over stateInfo until timestamp +func (k Keeper) IterateStateInfoWithTimestamp(store prefix.Store, endTimestampUNIX int64, fn func(key []byte) (stop bool)) { + endKey := types.StateInfoTimestampKeyPrefix(endTimestampUNIX) + iterator := store.ReverseIterator(nil, endKey) + + defer iterator.Close() // nolint: errcheck + + for ; iterator.Valid(); iterator.Next() { + if fn(iterator.Key()) { + break + } + } +} + +// HasStateInfoTimestampKey checks if the stateInfo has a timestamp key - used for testing +func (k Keeper) HasStateInfoTimestampKey(ctx sdk.Context, stateInfo types.StateInfo) bool { + storeTS := prefix.NewStore(ctx.KVStore(k.storeKey), types.KeyPrefix(types.TimestampedStateInfoKeyPrefix)) + return storeTS.Has(types.StateInfoTimestampKey(stateInfo)) +} diff --git a/x/rollapp/keeper/state_info_test.go b/x/rollapp/keeper/state_info_test.go index ab01490ee..99123fffa 100644 --- a/x/rollapp/keeper/state_info_test.go +++ b/x/rollapp/keeper/state_info_test.go @@ -3,13 +3,16 @@ package keeper_test import ( "strconv" "testing" + "time" sdk "github.com/cosmos/cosmos-sdk/types" + "github.com/stretchr/testify/assert" + "github.com/stretchr/testify/require" + keepertest "github.com/dymensionxyz/dymension/v3/testutil/keeper" "github.com/dymensionxyz/dymension/v3/testutil/nullify" "github.com/dymensionxyz/dymension/v3/x/rollapp/keeper" "github.com/dymensionxyz/dymension/v3/x/rollapp/types" - "github.com/stretchr/testify/require" ) // Prevent strconv unused error @@ -38,11 +41,11 @@ func createNStateInfo(keeper *keeper.Keeper, ctx sdk.Context, n int) ([]types.St } func TestStateInfoGet(t *testing.T) { - keeper, ctx := keepertest.RollappKeeper(t) - items, _ := createNStateInfo(keeper, ctx, 10) + k, ctx := keepertest.RollappKeeper(t) + items, _ := createNStateInfo(k, ctx, 10) for _, item := range items { item := item - rst, found := keeper.GetStateInfo(ctx, + rst, found := k.GetStateInfo(ctx, item.StateInfoIndex.RollappId, item.StateInfoIndex.Index, ) @@ -55,14 +58,14 @@ func TestStateInfoGet(t *testing.T) { } func TestStateInfoRemove(t *testing.T) { - keeper, ctx := keepertest.RollappKeeper(t) - items, _ := createNStateInfo(keeper, ctx, 10) + k, ctx := keepertest.RollappKeeper(t) + items, _ := createNStateInfo(k, ctx, 10) for _, item := range items { - keeper.RemoveStateInfo(ctx, + k.RemoveStateInfo(ctx, item.StateInfoIndex.RollappId, item.StateInfoIndex.Index, ) - _, found := keeper.GetStateInfo(ctx, + _, found := k.GetStateInfo(ctx, item.StateInfoIndex.RollappId, item.StateInfoIndex.Index, ) @@ -71,10 +74,76 @@ func TestStateInfoRemove(t *testing.T) { } func TestStateInfoGetAll(t *testing.T) { - keeper, ctx := keepertest.RollappKeeper(t) - items, _ := createNStateInfo(keeper, ctx, 10) + k, ctx := keepertest.RollappKeeper(t) + items, _ := createNStateInfo(k, ctx, 10) require.ElementsMatch(t, nullify.Fill(items), - nullify.Fill(keeper.GetAllStateInfo(ctx)), + nullify.Fill(k.GetAllStateInfo(ctx)), + ) +} + +func TestKeeper_DeleteStateInfoUntilTimestamp(t *testing.T) { + k, ctx := keepertest.RollappKeeper(t) + + ts1 := time.Date(2020, time.May, 1, 10, 22, 0, 0, time.UTC) + ts2 := ts1.Add(9 * time.Second) + ts3 := ts2.Add(11 * time.Second) + ts4 := ts3.Add(13 * time.Second) + + items := []types.StateInfo{ + {CreatedAt: ts1}, + {CreatedAt: ts2}, + {CreatedAt: ts3}, + {CreatedAt: ts4}, + } + for i := range items { + items[i].StateInfoIndex.RollappId = strconv.Itoa(i + 1) + items[i].StateInfoIndex.Index = 1 + uint64(i) + + k.SetStateInfo(ctx, items[i]) + } + + lastItem := items[len(items)-1] + latestStateInfoIndex := types.StateInfoIndex{ + RollappId: lastItem.StateInfoIndex.RollappId, + Index: lastItem.StateInfoIndex.Index, + } + k.SetLatestStateInfoIndex(ctx, latestStateInfoIndex) + + // delete all before ts3: only ts3 and ts4 should be found + k.DeleteStateInfoUntilTimestamp(ctx, ts2.Add(time.Second)) + + for _, item := range items { + _, found := k.GetStateInfo(ctx, + item.StateInfoIndex.RollappId, + item.StateInfoIndex.Index, + ) + + foundTSKey := k.HasStateInfoTimestampKey(ctx, item) + + if item.CreatedAt.After(ts2) { + assert.True(t, found) + assert.True(t, foundTSKey) + continue + } + assert.Falsef(t, found, "item %v", item) + assert.False(t, foundTSKey) + } + + // delete all: only ts4 should be found, as it's the latest and has an index + k.DeleteStateInfoUntilTimestamp(ctx, ts4.Add(time.Second)) + + info3 := items[2] + _, found := k.GetStateInfo(ctx, + info3.StateInfoIndex.RollappId, + info3.StateInfoIndex.Index, + ) + assert.False(t, found) + + info4 := items[3] + _, found = k.GetStateInfo(ctx, + info4.StateInfoIndex.RollappId, + info4.StateInfoIndex.Index, ) + assert.True(t, found) } diff --git a/x/rollapp/types/app.go b/x/rollapp/types/app.go index 7c4df30eb..68930d93b 100644 --- a/x/rollapp/types/app.go +++ b/x/rollapp/types/app.go @@ -4,8 +4,9 @@ import ( errorsmod "cosmossdk.io/errors" ) -func NewApp(name, rollappId, description, image, url string, order int32) App { +func NewApp(id uint64, name, rollappId, description, image, url string, order int32) App { return App{ + Id: id, Name: name, RollappId: rollappId, Description: description, diff --git a/x/rollapp/types/app.pb.go b/x/rollapp/types/app.pb.go index 19751ff71..7db4ce808 100644 --- a/x/rollapp/types/app.pb.go +++ b/x/rollapp/types/app.pb.go @@ -23,18 +23,20 @@ var _ = math.Inf const _ = proto.GoGoProtoPackageIsVersion3 // please upgrade the proto package type App struct { + // id is the unique App's id in the Rollapp + Id uint64 `protobuf:"varint,1,opt,name=id,proto3" json:"id,omitempty"` // name is the unique App's name - Name string `protobuf:"bytes,1,opt,name=name,proto3" json:"name,omitempty"` + Name string `protobuf:"bytes,2,opt,name=name,proto3" json:"name,omitempty"` // rollapp_id is the id of the Rollapp the App belongs to - RollappId string `protobuf:"bytes,2,opt,name=rollapp_id,json=rollappId,proto3" json:"rollapp_id,omitempty"` + RollappId string `protobuf:"bytes,3,opt,name=rollapp_id,json=rollappId,proto3" json:"rollapp_id,omitempty"` // description is the description of the App - Description string `protobuf:"bytes,3,opt,name=description,proto3" json:"description,omitempty"` + Description string `protobuf:"bytes,4,opt,name=description,proto3" json:"description,omitempty"` // image_url is the URL to the App's image - ImageUrl string `protobuf:"bytes,4,opt,name=image_url,json=imageUrl,proto3" json:"image_url,omitempty"` + ImageUrl string `protobuf:"bytes,5,opt,name=image_url,json=imageUrl,proto3" json:"image_url,omitempty"` // url is the URL to the App's website - Url string `protobuf:"bytes,5,opt,name=url,proto3" json:"url,omitempty"` + Url string `protobuf:"bytes,6,opt,name=url,proto3" json:"url,omitempty"` // order is the order of the App in the Rollapp - Order int32 `protobuf:"varint,6,opt,name=order,proto3" json:"order,omitempty"` + Order int32 `protobuf:"varint,7,opt,name=order,proto3" json:"order,omitempty"` } func (m *App) Reset() { *m = App{} } @@ -70,6 +72,13 @@ func (m *App) XXX_DiscardUnknown() { var xxx_messageInfo_App proto.InternalMessageInfo +func (m *App) GetId() uint64 { + if m != nil { + return m.Id + } + return 0 +} + func (m *App) GetName() string { if m != nil { return m.Name @@ -121,22 +130,23 @@ func init() { } var fileDescriptor_8f01e4af248858b2 = []byte{ - // 243 bytes of a gzipped FileDescriptorProto - 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0xe2, 0xd2, 0x48, 0xa9, 0xcc, 0x4d, - 0xcd, 0x2b, 0xce, 0xcc, 0xcf, 0xab, 0xa8, 0xac, 0xd2, 0x87, 0x73, 0xf4, 0x8b, 0xf2, 0x73, 0x72, - 0x12, 0x0b, 0x0a, 0xf4, 0x13, 0x0b, 0x0a, 0xf4, 0x0a, 0x8a, 0xf2, 0x4b, 0xf2, 0x85, 0xe4, 0x90, - 0x55, 0xea, 0xc1, 0x39, 0x7a, 0x50, 0x95, 0x4a, 0xf3, 0x19, 0xb9, 0x98, 0x1d, 0x0b, 0x0a, 0x84, - 0x84, 0xb8, 0x58, 0xf2, 0x12, 0x73, 0x53, 0x25, 0x18, 0x15, 0x18, 0x35, 0x38, 0x83, 0xc0, 0x6c, - 0x21, 0x59, 0x2e, 0x2e, 0xa8, 0xb2, 0xf8, 0xcc, 0x14, 0x09, 0x26, 0xb0, 0x0c, 0x27, 0x54, 0xc4, - 0x33, 0x45, 0x48, 0x81, 0x8b, 0x3b, 0x25, 0xb5, 0x38, 0xb9, 0x28, 0xb3, 0xa0, 0x24, 0x33, 0x3f, - 0x4f, 0x82, 0x19, 0x2c, 0x8f, 0x2c, 0x24, 0x24, 0xcd, 0xc5, 0x99, 0x99, 0x9b, 0x98, 0x9e, 0x1a, - 0x5f, 0x5a, 0x94, 0x23, 0xc1, 0x02, 0x96, 0xe7, 0x00, 0x0b, 0x84, 0x16, 0xe5, 0x08, 0x09, 0x70, - 0x31, 0x83, 0x84, 0x59, 0xc1, 0xc2, 0x20, 0xa6, 0x90, 0x08, 0x17, 0x6b, 0x7e, 0x51, 0x4a, 0x6a, - 0x91, 0x04, 0x9b, 0x02, 0xa3, 0x06, 0x6b, 0x10, 0x84, 0xe3, 0xe4, 0x77, 0xe2, 0x91, 0x1c, 0xe3, - 0x85, 0x47, 0x72, 0x8c, 0x0f, 0x1e, 0xc9, 0x31, 0x4e, 0x78, 0x2c, 0xc7, 0x70, 0xe1, 0xb1, 0x1c, - 0xc3, 0x8d, 0xc7, 0x72, 0x0c, 0x51, 0x26, 0xe9, 0x99, 0x25, 0x19, 0xa5, 0x49, 0x7a, 0xc9, 0xf9, - 0xb9, 0xfa, 0x38, 0x02, 0xa4, 0xcc, 0x58, 0xbf, 0x02, 0x1e, 0x2a, 0x25, 0x95, 0x05, 0xa9, 0xc5, - 0x49, 0x6c, 0xe0, 0x80, 0x31, 0x06, 0x04, 0x00, 0x00, 0xff, 0xff, 0x6e, 0xfe, 0xed, 0x45, 0x44, + // 259 bytes of a gzipped FileDescriptorProto + 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0x74, 0x90, 0xbd, 0x4a, 0xc4, 0x40, + 0x14, 0x85, 0x33, 0xf9, 0x59, 0xcd, 0x15, 0x44, 0x06, 0x8b, 0x01, 0x71, 0x08, 0x56, 0xa9, 0x32, + 0xc5, 0xfa, 0x02, 0xda, 0xd9, 0x58, 0x04, 0x6c, 0x6c, 0x96, 0xec, 0xce, 0xb0, 0x0e, 0x24, 0x99, + 0xcb, 0x24, 0x2b, 0x1b, 0x9f, 0xc2, 0x37, 0xf1, 0x35, 0x2c, 0xb7, 0xb4, 0x94, 0xe4, 0x45, 0x64, + 0xc7, 0x10, 0xd2, 0x6c, 0x77, 0xcf, 0x77, 0xce, 0x6d, 0x3e, 0x48, 0x65, 0x57, 0xa9, 0xba, 0xd1, + 0xa6, 0xde, 0x77, 0x1f, 0x62, 0x0a, 0xc2, 0x9a, 0xb2, 0x2c, 0x10, 0x45, 0x81, 0x98, 0xa1, 0x35, + 0xad, 0xa1, 0x7c, 0xbe, 0xcc, 0xa6, 0x90, 0x8d, 0xcb, 0xbb, 0x2f, 0x02, 0xc1, 0x03, 0x22, 0xbd, + 0x04, 0x5f, 0x4b, 0x46, 0x12, 0x92, 0x86, 0xb9, 0xaf, 0x25, 0xa5, 0x10, 0xd6, 0x45, 0xa5, 0x98, + 0x9f, 0x90, 0x34, 0xce, 0xdd, 0x4d, 0x6f, 0x01, 0xc6, 0xb7, 0x95, 0x96, 0x2c, 0x70, 0x4d, 0x3c, + 0x92, 0x27, 0x49, 0x13, 0xb8, 0x90, 0xaa, 0xd9, 0x58, 0x8d, 0xad, 0x36, 0x35, 0x0b, 0x5d, 0x3f, + 0x47, 0xf4, 0x06, 0x62, 0x5d, 0x15, 0x5b, 0xb5, 0xda, 0xd9, 0x92, 0x45, 0xae, 0x3f, 0x77, 0xe0, + 0xc5, 0x96, 0xf4, 0x0a, 0x82, 0x23, 0x5e, 0x38, 0x7c, 0x3c, 0xe9, 0x35, 0x44, 0xc6, 0x4a, 0x65, + 0xd9, 0x59, 0x42, 0xd2, 0x28, 0xff, 0x0f, 0x8f, 0xcf, 0xdf, 0x3d, 0x27, 0x87, 0x9e, 0x93, 0xdf, + 0x9e, 0x93, 0xcf, 0x81, 0x7b, 0x87, 0x81, 0x7b, 0x3f, 0x03, 0xf7, 0x5e, 0xef, 0xb7, 0xba, 0x7d, + 0xdb, 0xad, 0xb3, 0x8d, 0xa9, 0xc4, 0x09, 0x41, 0xef, 0x4b, 0xb1, 0x9f, 0x2c, 0xb5, 0x1d, 0xaa, + 0x66, 0xbd, 0x70, 0xa2, 0x96, 0x7f, 0x01, 0x00, 0x00, 0xff, 0xff, 0x50, 0xa1, 0x34, 0x60, 0x54, 0x01, 0x00, 0x00, } @@ -163,42 +173,47 @@ func (m *App) MarshalToSizedBuffer(dAtA []byte) (int, error) { if m.Order != 0 { i = encodeVarintApp(dAtA, i, uint64(m.Order)) i-- - dAtA[i] = 0x30 + dAtA[i] = 0x38 } if len(m.Url) > 0 { i -= len(m.Url) copy(dAtA[i:], m.Url) i = encodeVarintApp(dAtA, i, uint64(len(m.Url))) i-- - dAtA[i] = 0x2a + dAtA[i] = 0x32 } if len(m.ImageUrl) > 0 { i -= len(m.ImageUrl) copy(dAtA[i:], m.ImageUrl) i = encodeVarintApp(dAtA, i, uint64(len(m.ImageUrl))) i-- - dAtA[i] = 0x22 + dAtA[i] = 0x2a } if len(m.Description) > 0 { i -= len(m.Description) copy(dAtA[i:], m.Description) i = encodeVarintApp(dAtA, i, uint64(len(m.Description))) i-- - dAtA[i] = 0x1a + dAtA[i] = 0x22 } if len(m.RollappId) > 0 { i -= len(m.RollappId) copy(dAtA[i:], m.RollappId) i = encodeVarintApp(dAtA, i, uint64(len(m.RollappId))) i-- - dAtA[i] = 0x12 + dAtA[i] = 0x1a } if len(m.Name) > 0 { i -= len(m.Name) copy(dAtA[i:], m.Name) i = encodeVarintApp(dAtA, i, uint64(len(m.Name))) i-- - dAtA[i] = 0xa + dAtA[i] = 0x12 + } + if m.Id != 0 { + i = encodeVarintApp(dAtA, i, uint64(m.Id)) + i-- + dAtA[i] = 0x8 } return len(dAtA) - i, nil } @@ -220,6 +235,9 @@ func (m *App) Size() (n int) { } var l int _ = l + if m.Id != 0 { + n += 1 + sovApp(uint64(m.Id)) + } l = len(m.Name) if l > 0 { n += 1 + l + sovApp(uint64(l)) @@ -282,6 +300,25 @@ func (m *App) Unmarshal(dAtA []byte) error { } switch fieldNum { case 1: + if wireType != 0 { + return fmt.Errorf("proto: wrong wireType = %d for field Id", wireType) + } + m.Id = 0 + for shift := uint(0); ; shift += 7 { + if shift >= 64 { + return ErrIntOverflowApp + } + if iNdEx >= l { + return io.ErrUnexpectedEOF + } + b := dAtA[iNdEx] + iNdEx++ + m.Id |= uint64(b&0x7F) << shift + if b < 0x80 { + break + } + } + case 2: if wireType != 2 { return fmt.Errorf("proto: wrong wireType = %d for field Name", wireType) } @@ -313,7 +350,7 @@ func (m *App) Unmarshal(dAtA []byte) error { } m.Name = string(dAtA[iNdEx:postIndex]) iNdEx = postIndex - case 2: + case 3: if wireType != 2 { return fmt.Errorf("proto: wrong wireType = %d for field RollappId", wireType) } @@ -345,7 +382,7 @@ func (m *App) Unmarshal(dAtA []byte) error { } m.RollappId = string(dAtA[iNdEx:postIndex]) iNdEx = postIndex - case 3: + case 4: if wireType != 2 { return fmt.Errorf("proto: wrong wireType = %d for field Description", wireType) } @@ -377,7 +414,7 @@ func (m *App) Unmarshal(dAtA []byte) error { } m.Description = string(dAtA[iNdEx:postIndex]) iNdEx = postIndex - case 4: + case 5: if wireType != 2 { return fmt.Errorf("proto: wrong wireType = %d for field ImageUrl", wireType) } @@ -409,7 +446,7 @@ func (m *App) Unmarshal(dAtA []byte) error { } m.ImageUrl = string(dAtA[iNdEx:postIndex]) iNdEx = postIndex - case 5: + case 6: if wireType != 2 { return fmt.Errorf("proto: wrong wireType = %d for field Url", wireType) } @@ -441,7 +478,7 @@ func (m *App) Unmarshal(dAtA []byte) error { } m.Url = string(dAtA[iNdEx:postIndex]) iNdEx = postIndex - case 6: + case 7: if wireType != 0 { return fmt.Errorf("proto: wrong wireType = %d for field Order", wireType) } diff --git a/x/rollapp/types/errors.go b/x/rollapp/types/errors.go index ff1ee0f96..773208b86 100644 --- a/x/rollapp/types/errors.go +++ b/x/rollapp/types/errors.go @@ -40,6 +40,7 @@ var ( ErrSameOwner = errorsmod.Wrap(gerrc.ErrInvalidArgument, "same owner") ErrInvalidRequest = errorsmod.Wrap(gerrc.ErrInvalidArgument, "invalid request") ErrInvalidVMType = errorsmod.Wrap(gerrc.ErrInvalidArgument, "invalid vm type") + ErrInvalidAppID = errorsmod.Wrap(gerrc.ErrInvalidArgument, "app id") ErrInvalidAppName = errorsmod.Wrap(gerrc.ErrInvalidArgument, "app name") ErrInvalidAppImage = errorsmod.Wrap(gerrc.ErrInvalidArgument, "app image path") ErrInvalidBlockDescriptorTimestamp = errorsmod.Wrap(gerrc.ErrInvalidArgument, "invalid block descriptor timestamp") diff --git a/x/rollapp/types/events.go b/x/rollapp/types/events.go index fad8b67b0..6c3480ed5 100644 --- a/x/rollapp/types/events.go +++ b/x/rollapp/types/events.go @@ -10,6 +10,7 @@ const ( AttributeKeyNumBlocks = "num_blocks" AttributeKeyDAPath = "da_path" AttributeKeyStatus = "status" + AttributeKeyCreatedAt = "created_at" // EventTypeFraud is emitted when a fraud evidence is submitted EventTypeFraud = "fraud_proposal" diff --git a/x/rollapp/types/expected_keepers.go b/x/rollapp/types/expected_keepers.go index 4d6cdeba1..fad3a041b 100644 --- a/x/rollapp/types/expected_keepers.go +++ b/x/rollapp/types/expected_keepers.go @@ -1,6 +1,8 @@ package types import ( + "time" + sdk "github.com/cosmos/cosmos-sdk/types" "github.com/cosmos/ibc-go/v7/modules/core/exported" ) @@ -17,6 +19,7 @@ type ChannelKeeper interface { type SequencerKeeper interface { SlashLiveness(ctx sdk.Context, rollappID string) error JailLiveness(ctx sdk.Context, rollappID string) error + UnbondingTime(ctx sdk.Context) (res time.Duration) } // BankKeeper defines the expected interface needed to retrieve account balances. diff --git a/x/rollapp/types/key_app.go b/x/rollapp/types/key_app.go index 91ac977af..b0b316a87 100644 --- a/x/rollapp/types/key_app.go +++ b/x/rollapp/types/key_app.go @@ -8,7 +8,8 @@ var _ binary.ByteOrder const ( // AppKeyPrefix is the prefix to retrieve all App - AppKeyPrefix = "App/value/" + AppKeyPrefix = "App/value/" + AppSequenceKeyPrefix = "App/sequence/" ) // AppKey returns the store key to retrieve an App from the index fields @@ -18,8 +19,9 @@ func AppKey(app App) []byte { rollappIDBytes := []byte(app.RollappId) key = append(key, rollappIDBytes...) key = append(key, []byte("/")...) - appNameBytes := []byte(app.Name) - key = append(key, appNameBytes...) + idBytes := make([]byte, 8) + binary.BigEndian.PutUint64(idBytes, app.Id) + key = append(key, idBytes...) return key } @@ -27,3 +29,7 @@ func AppKey(app App) []byte { func RollappAppKeyPrefix(rollappId string) []byte { return append([]byte(rollappId), []byte("/")...) } + +func AppSequenceKey(rollappId string) []byte { + return []byte(rollappId) +} diff --git a/x/rollapp/types/key_state_info.go b/x/rollapp/types/key_state_info.go index 1b92f13ad..ef3fba926 100644 --- a/x/rollapp/types/key_state_info.go +++ b/x/rollapp/types/key_state_info.go @@ -10,7 +10,8 @@ var _ binary.ByteOrder const ( // StateInfoKeyPrefix is the prefix to retrieve all StateInfo - StateInfoKeyPrefix = "StateInfo/value/" + StateInfoKeyPrefix = "StateInfo/value/" + StateInfoIndexKeyPartLength = 8 + 1 + 1 // BigEndian + "/" + "/" ) // StateInfoKey returns the store key to retrieve a StateInfo from the index fields @@ -29,3 +30,21 @@ func StateInfoKey( return key } + +// StateInfoIndexFromKey returns the StateInfoIndex from a store key. +// The value of StateInfoIndexKeyPartLength will always be shorter than the key itself, +// because the key contains the rollappId and the BigEndian representation of the index, +// which is always 8 bytes long. +func StateInfoIndexFromKey(key []byte) StateInfoIndex { + l := len(key) + rollappId := string(key[:l-StateInfoIndexKeyPartLength]) + return StateInfoIndex{ + RollappId: rollappId, + Index: sdk.BigEndianToUint64(key[len(rollappId)+1 : l-1]), + } +} + +// StateInfoIndexKeyFromTimestampKey returns the StateInfoIndex key from a timestamp key by removing the timestamp prefix. +func StateInfoIndexKeyFromTimestampKey(keyTS []byte) []byte { + return keyTS[TimestampPrefixLen:] // remove the timestamp prefix +} diff --git a/x/rollapp/types/key_state_info_test.go b/x/rollapp/types/key_state_info_test.go new file mode 100644 index 000000000..b0b70c081 --- /dev/null +++ b/x/rollapp/types/key_state_info_test.go @@ -0,0 +1,42 @@ +package types + +import ( + "math/rand" + "reflect" + "testing" + + "github.com/dymensionxyz/sdk-utils/utils/urand" +) + +// nolint: gosec +func TestStateInfoIndexFromKey(t *testing.T) { + index := StateInfoIndex{ + RollappId: urand.RollappID(), + Index: rand.Uint64(), + } + + type args struct { + key []byte + } + + tests := []struct { + name string + args args + want StateInfoIndex + }{ + { + name: "Test StateInfoIndexFromKey", + args: args{ + key: StateInfoKey(index), + }, + want: index, + }, + } + for _, tt := range tests { + t.Run(tt.name, func(t *testing.T) { + if got := StateInfoIndexFromKey(tt.args.key); !reflect.DeepEqual(got, tt.want) { + t.Errorf("StateInfoIndexFromKey() = %v, want %v", got, tt.want) + } + }) + } +} diff --git a/x/rollapp/types/key_timestamped_state_info.go b/x/rollapp/types/key_timestamped_state_info.go new file mode 100644 index 000000000..b6508107a --- /dev/null +++ b/x/rollapp/types/key_timestamped_state_info.go @@ -0,0 +1,35 @@ +package types + +import ( + "encoding/binary" + "fmt" +) + +var _ binary.ByteOrder + +const ( + // TimestampedStateInfoKeyPrefix is the prefix to retrieve all StateInfo with the timestamp prefix + TimestampedStateInfoKeyPrefix = "TimestampedStateInfoKeyPrefix/value/" + // TimestampPrefixLen is the length of the timestamp prefix: len(fmt.Sprint(time.Time{}.UnixMicro())) + 1 + TimestampPrefixLen = 17 +) + +// StateInfoTimestampKeyPrefix returns the store key prefix to range over all state infos with the timestamp prefix +func StateInfoTimestampKeyPrefix(timestampUNIX int64) []byte { + return []byte(fmt.Sprint(timestampUNIX)) +} + +// StateInfoTimestampKey returns the store key to retrieve state infos using the timestamp prefix +func StateInfoTimestampKey( + stateInfo StateInfo, +) []byte { + var key []byte + + timestampPrefix := StateInfoTimestampKeyPrefix(stateInfo.CreatedAt.UnixMicro()) + stateInfoKey := StateInfoKey(stateInfo.StateInfoIndex) + key = append(key, timestampPrefix...) + key = append(key, []byte("/")...) + key = append(key, stateInfoKey...) + + return key +} diff --git a/x/rollapp/types/message_add_app.go b/x/rollapp/types/message_add_app.go index 9922b10f5..2d05a8342 100644 --- a/x/rollapp/types/message_add_app.go +++ b/x/rollapp/types/message_add_app.go @@ -44,6 +44,7 @@ func (msg *MsgAddApp) GetSignBytes() []byte { func (msg *MsgAddApp) GetApp() App { return NewApp( + 0, msg.Name, msg.RollappId, msg.Description, diff --git a/x/rollapp/types/message_remove_app.go b/x/rollapp/types/message_remove_app.go index 74352cfbc..1d9bc8207 100644 --- a/x/rollapp/types/message_remove_app.go +++ b/x/rollapp/types/message_remove_app.go @@ -9,10 +9,10 @@ const TypeMsgRemoveApp = "remove_app" var _ sdk.Msg = &MsgRemoveApp{} -func NewMsgRemoveApp(creator, name, rollappId string) *MsgRemoveApp { +func NewMsgRemoveApp(creator string, id uint64, rollappId string) *MsgRemoveApp { return &MsgRemoveApp{ Creator: creator, - Name: name, + Id: id, RollappId: rollappId, } } @@ -40,7 +40,8 @@ func (msg *MsgRemoveApp) GetSignBytes() []byte { func (msg *MsgRemoveApp) GetApp() App { return NewApp( - msg.Name, + msg.Id, + "", msg.RollappId, "", "", @@ -55,8 +56,8 @@ func (msg *MsgRemoveApp) ValidateBasic() error { return errorsmod.Wrap(ErrInvalidCreatorAddress, err.Error()) } - if len(msg.Name) == 0 { - return errorsmod.Wrap(ErrInvalidAppName, "App name cannot be empty") + if msg.Id == 0 { + return errorsmod.Wrap(ErrInvalidAppID, "App id cannot be zero") } if len(msg.RollappId) == 0 { diff --git a/x/rollapp/types/message_update_app.go b/x/rollapp/types/message_update_app.go index 29821da74..a98d36bbe 100644 --- a/x/rollapp/types/message_update_app.go +++ b/x/rollapp/types/message_update_app.go @@ -9,8 +9,9 @@ const TypeMsgUpdateApp = "update_app" var _ sdk.Msg = &MsgUpdateApp{} -func NewMsgUpdateApp(creator, name, rollappId, description, image, url string, order int32) *MsgUpdateApp { +func NewMsgUpdateApp(creator string, id uint64, name, rollappId, description, image, url string, order int32) *MsgUpdateApp { return &MsgUpdateApp{ + Id: id, Creator: creator, Name: name, RollappId: rollappId, @@ -44,6 +45,7 @@ func (msg *MsgUpdateApp) GetSignBytes() []byte { func (msg *MsgUpdateApp) GetApp() App { return NewApp( + msg.Id, msg.Name, msg.RollappId, msg.Description, @@ -59,6 +61,10 @@ func (msg *MsgUpdateApp) ValidateBasic() error { return errorsmod.Wrap(ErrInvalidCreatorAddress, err.Error()) } + if msg.Id == 0 { + return errorsmod.Wrap(ErrInvalidAppID, "App id cannot be zero") + } + app := msg.GetApp() if err = app.ValidateBasic(); err != nil { return err diff --git a/x/rollapp/types/params.go b/x/rollapp/types/params.go index adc59ba32..3d804322a 100644 --- a/x/rollapp/types/params.go +++ b/x/rollapp/types/params.go @@ -9,6 +9,7 @@ import ( sdk "github.com/cosmos/cosmos-sdk/types" paramtypes "github.com/cosmos/cosmos-sdk/x/params/types" "github.com/dymensionxyz/sdk-utils/utils/uparam" + "github.com/osmosis-labs/osmosis/v15/x/epochs/types" "gopkg.in/yaml.v2" "github.com/dymensionxyz/dymension/v3/app/params" @@ -25,11 +26,14 @@ var ( KeyLivenessJailBlocks = []byte("LivenessJailBlocks") // KeyAppRegistrationFee defines the key to store the cost of the app - KeyAppRegistrationFee = []byte("KeyAppRegistrationFee") + KeyAppRegistrationFee = []byte("AppRegistrationFee") // DYM is 1dym DYM = sdk.NewIntFromBigInt(new(big.Int).Exp(big.NewInt(10), big.NewInt(18), nil)) DefaultAppRegistrationFee = sdk.NewCoin(params.BaseDenom, DYM) + + // KeyStateInfoDeletionEpochIdentifier defines the key to store the epoch identifier + KeyStateInfoDeletionEpochIdentifier = []byte("StateInfoDeletionEpochIdentifier") ) const ( @@ -40,6 +44,7 @@ const ( DefaultLivenessSlashBlocks = uint64(7200) // 12 hours at 1 block per 6 seconds DefaultLivenessSlashInterval = uint64(3600) // 1 hour at 1 block per 6 seconds DefaultLivenessJailBlocks = uint64(28800) // 48 hours at 1 block per 6 seconds + defaultEpochIdentifier = "hour" ) // ParamKeyTable the param key table for launch module @@ -54,13 +59,15 @@ func NewParams( livenessSlashInterval uint64, livenessJailBlocks uint64, appRegistrationFee sdk.Coin, + epochIdentifier string, ) Params { return Params{ - DisputePeriodInBlocks: disputePeriodInBlocks, - LivenessSlashBlocks: livenessSlashBlocks, - LivenessSlashInterval: livenessSlashInterval, - LivenessJailBlocks: livenessJailBlocks, - AppRegistrationFee: appRegistrationFee, + DisputePeriodInBlocks: disputePeriodInBlocks, + LivenessSlashBlocks: livenessSlashBlocks, + LivenessSlashInterval: livenessSlashInterval, + LivenessJailBlocks: livenessJailBlocks, + AppRegistrationFee: appRegistrationFee, + StateInfoDeletionEpochIdentifier: epochIdentifier, } } @@ -71,6 +78,7 @@ func DefaultParams() Params { DefaultLivenessSlashInterval, DefaultLivenessJailBlocks, DefaultAppRegistrationFee, + defaultEpochIdentifier, ) } @@ -82,6 +90,7 @@ func (p *Params) ParamSetPairs() paramtypes.ParamSetPairs { paramtypes.NewParamSetPair(KeyLivenessSlashInterval, &p.LivenessSlashInterval, validateLivenessSlashInterval), paramtypes.NewParamSetPair(KeyLivenessJailBlocks, &p.LivenessJailBlocks, validateLivenessJailBlocks), paramtypes.NewParamSetPair(KeyAppRegistrationFee, &p.AppRegistrationFee, validateAppRegistrationFee), + paramtypes.NewParamSetPair(KeyStateInfoDeletionEpochIdentifier, &p.StateInfoDeletionEpochIdentifier, types.ValidateEpochIdentifierInterface), } } diff --git a/x/rollapp/types/params.pb.go b/x/rollapp/types/params.pb.go index e53d22453..544b7efef 100644 --- a/x/rollapp/types/params.pb.go +++ b/x/rollapp/types/params.pb.go @@ -38,6 +38,8 @@ type Params struct { LivenessJailBlocks uint64 `protobuf:"varint,6,opt,name=liveness_jail_blocks,json=livenessJailBlocks,proto3" json:"liveness_jail_blocks,omitempty" yaml:"liveness_jail_blocks"` // app_registration_fee is the fee for registering an App AppRegistrationFee types.Coin `protobuf:"bytes,7,opt,name=app_registration_fee,json=appRegistrationFee,proto3" json:"app_registration_fee" yaml:"app_registration_fee"` + // state_info_deletion_epoch_identifier is used to control the interval at which the state info records will be deleted. + StateInfoDeletionEpochIdentifier string `protobuf:"bytes,8,opt,name=state_info_deletion_epoch_identifier,json=stateInfoDeletionEpochIdentifier,proto3" json:"state_info_deletion_epoch_identifier,omitempty" yaml:"state_info_deletion_epoch_identifier"` } func (m *Params) Reset() { *m = Params{} } @@ -107,6 +109,13 @@ func (m *Params) GetAppRegistrationFee() types.Coin { return types.Coin{} } +func (m *Params) GetStateInfoDeletionEpochIdentifier() string { + if m != nil { + return m.StateInfoDeletionEpochIdentifier + } + return "" +} + func init() { proto.RegisterType((*Params)(nil), "dymensionxyz.dymension.rollapp.Params") } @@ -116,34 +125,38 @@ func init() { } var fileDescriptor_75a44aa904ae1ba5 = []byte{ - // 427 bytes of a gzipped FileDescriptorProto - 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0x74, 0x92, 0x4f, 0x6f, 0xd3, 0x30, - 0x18, 0x87, 0x13, 0x1a, 0xca, 0x14, 0x2e, 0x53, 0x68, 0x45, 0x19, 0xc8, 0xae, 0xbc, 0xcb, 0x24, - 0x24, 0x5b, 0x63, 0x9c, 0x76, 0x2c, 0x12, 0xd2, 0x7a, 0x40, 0x23, 0x70, 0x9a, 0x90, 0x22, 0xa7, - 0x35, 0x9d, 0xc1, 0xb1, 0xad, 0xd8, 0xab, 0x16, 0x3e, 0x05, 0x47, 0x8e, 0x7c, 0x9c, 0x1d, 0x77, - 0xe4, 0x14, 0xa1, 0xf6, 0x03, 0x20, 0xe5, 0x13, 0xa0, 0x3a, 0x7f, 0x28, 0x53, 0x77, 0xb3, 0x7f, - 0xef, 0xe3, 0xc7, 0xaf, 0xf4, 0xbe, 0xe1, 0xcb, 0x79, 0x91, 0x31, 0x69, 0xb8, 0x92, 0xd7, 0xc5, - 0x37, 0xd2, 0x5d, 0x48, 0xae, 0x84, 0xa0, 0x5a, 0x13, 0x4d, 0x73, 0x9a, 0x19, 0xac, 0x73, 0x65, - 0x55, 0x04, 0xb6, 0x61, 0xdc, 0x5d, 0x70, 0x03, 0x1f, 0x0c, 0x16, 0x6a, 0xa1, 0x1c, 0x4a, 0x36, - 0xa7, 0xfa, 0xd5, 0x01, 0x98, 0x29, 0x93, 0x29, 0x43, 0x52, 0x6a, 0x18, 0x59, 0x1e, 0xa7, 0xcc, - 0xd2, 0x63, 0x32, 0x53, 0x5c, 0xd6, 0x75, 0xf4, 0xa7, 0x17, 0xf6, 0xcf, 0xdd, 0x37, 0xd1, 0xa7, - 0x70, 0x34, 0xe7, 0x46, 0x5f, 0x59, 0x96, 0x68, 0x96, 0x73, 0x35, 0x4f, 0xb8, 0x4c, 0x52, 0xa1, - 0x66, 0x5f, 0xcd, 0xc8, 0x1f, 0xfb, 0x47, 0xc1, 0xe4, 0xb0, 0x2a, 0x21, 0x2c, 0x68, 0x26, 0x4e, - 0xd1, 0x7d, 0x24, 0x8a, 0x87, 0x4d, 0xe9, 0xdc, 0x55, 0xce, 0xe4, 0xc4, 0xe5, 0xd1, 0xc7, 0x70, - 0x28, 0xf8, 0x92, 0x49, 0x66, 0x4c, 0x62, 0x04, 0x35, 0x97, 0xad, 0x3a, 0x70, 0xea, 0x71, 0x55, - 0xc2, 0x17, 0xb5, 0x7a, 0x27, 0x86, 0xe2, 0x27, 0x6d, 0xfe, 0x61, 0x13, 0x37, 0xd6, 0x8b, 0xf0, - 0xe9, 0x1d, 0x9c, 0x4b, 0xcb, 0xf2, 0x25, 0x15, 0xa3, 0x87, 0xce, 0x8b, 0xaa, 0x12, 0x82, 0x9d, - 0xde, 0x16, 0x44, 0xf1, 0xf0, 0x3f, 0xf3, 0x59, 0x93, 0x47, 0xef, 0xc3, 0x41, 0xf7, 0xe4, 0x0b, - 0xe5, 0xa2, 0x6d, 0xb8, 0xef, 0xc4, 0xb0, 0x2a, 0xe1, 0xf3, 0x3b, 0xe2, 0x2d, 0x0a, 0xc5, 0x51, - 0x1b, 0x4f, 0x29, 0x17, 0x4d, 0xbb, 0x3a, 0x1c, 0x50, 0xad, 0x93, 0x9c, 0x2d, 0xb8, 0xb1, 0x39, - 0xb5, 0x5c, 0xc9, 0xe4, 0x33, 0x63, 0xa3, 0x47, 0x63, 0xff, 0xe8, 0xf1, 0xab, 0x67, 0xb8, 0x1e, - 0x16, 0xde, 0x0c, 0x0b, 0x37, 0xc3, 0xc2, 0x6f, 0x14, 0x97, 0x93, 0xc3, 0x9b, 0x12, 0x7a, 0xff, - 0x7e, 0xdc, 0x25, 0x41, 0x71, 0x44, 0xb5, 0x8e, 0xb7, 0xd2, 0xb7, 0x8c, 0x9d, 0x06, 0x3f, 0x7e, - 0x42, 0x6f, 0x1a, 0xec, 0x3d, 0xd8, 0xef, 0x4d, 0x83, 0xbd, 0xde, 0x7e, 0x30, 0x79, 0x77, 0xb3, - 0x02, 0xfe, 0xed, 0x0a, 0xf8, 0xbf, 0x57, 0xc0, 0xff, 0xbe, 0x06, 0xde, 0xed, 0x1a, 0x78, 0xbf, - 0xd6, 0xc0, 0xbb, 0x78, 0xbd, 0xe0, 0xf6, 0xf2, 0x2a, 0xc5, 0x33, 0x95, 0x91, 0x7b, 0x36, 0x73, - 0x79, 0x42, 0xae, 0xbb, 0xf5, 0xb4, 0x85, 0x66, 0x26, 0xed, 0xbb, 0x45, 0x3a, 0xf9, 0x1b, 0x00, - 0x00, 0xff, 0xff, 0xd4, 0xcf, 0x4a, 0xc3, 0xcd, 0x02, 0x00, 0x00, + // 486 bytes of a gzipped FileDescriptorProto + 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0x8c, 0x93, 0xcd, 0x6e, 0xd3, 0x40, + 0x10, 0xc7, 0x63, 0x6a, 0x42, 0x30, 0x97, 0xca, 0x24, 0x22, 0x14, 0x64, 0x47, 0x2e, 0x87, 0x48, + 0x95, 0xbc, 0x2a, 0xe5, 0xd4, 0x63, 0xf8, 0x90, 0x92, 0x03, 0x2a, 0x86, 0x53, 0x85, 0x64, 0xad, + 0xed, 0x49, 0xb2, 0xb0, 0xde, 0x5d, 0x79, 0xb7, 0x51, 0xc3, 0x85, 0x57, 0xe0, 0xc8, 0x91, 0xc7, + 0xe9, 0xb1, 0x47, 0x4e, 0x16, 0x4a, 0x5e, 0x00, 0xf9, 0x09, 0x50, 0xd6, 0x1f, 0x84, 0x2a, 0x95, + 0xb8, 0x79, 0xff, 0xf3, 0x9b, 0xdf, 0xac, 0x34, 0x5e, 0xeb, 0x28, 0x59, 0xa6, 0xc0, 0x24, 0xe1, + 0xec, 0x72, 0xf9, 0x05, 0x35, 0x07, 0x94, 0x71, 0x4a, 0xb1, 0x10, 0x48, 0xe0, 0x0c, 0xa7, 0xd2, + 0x17, 0x19, 0x57, 0xdc, 0x76, 0xb6, 0x61, 0xbf, 0x39, 0xf8, 0x15, 0x7c, 0xd0, 0x9d, 0xf1, 0x19, + 0xd7, 0x28, 0xda, 0x7c, 0x95, 0x5d, 0x07, 0x4e, 0xcc, 0x65, 0xca, 0x25, 0x8a, 0xb0, 0x04, 0xb4, + 0x38, 0x8e, 0x40, 0xe1, 0x63, 0x14, 0x73, 0xc2, 0xca, 0xba, 0xf7, 0xdb, 0xb4, 0xda, 0x67, 0x7a, + 0x8c, 0xfd, 0xd1, 0xea, 0x27, 0x44, 0x8a, 0x0b, 0x05, 0xa1, 0x80, 0x8c, 0xf0, 0x24, 0x24, 0x2c, + 0x8c, 0x28, 0x8f, 0x3f, 0xcb, 0xbe, 0x31, 0x30, 0x86, 0xe6, 0xe8, 0xb0, 0xc8, 0x5d, 0x77, 0x89, + 0x53, 0x7a, 0xea, 0xdd, 0x46, 0x7a, 0x41, 0xaf, 0x2a, 0x9d, 0xe9, 0xca, 0x98, 0x8d, 0x74, 0x6e, + 0x7f, 0xb0, 0x7a, 0x94, 0x2c, 0x80, 0x81, 0x94, 0xa1, 0xa4, 0x58, 0xce, 0x6b, 0xb5, 0xa9, 0xd5, + 0x83, 0x22, 0x77, 0x9f, 0x96, 0xea, 0x9d, 0x98, 0x17, 0x3c, 0xac, 0xf3, 0xf7, 0x9b, 0xb8, 0xb2, + 0x9e, 0x5b, 0x8f, 0x6e, 0xe0, 0x84, 0x29, 0xc8, 0x16, 0x98, 0xf6, 0xef, 0x6a, 0xaf, 0x57, 0xe4, + 0xae, 0xb3, 0xd3, 0x5b, 0x83, 0x5e, 0xd0, 0xfb, 0xc7, 0x3c, 0xae, 0x72, 0xfb, 0x9d, 0xd5, 0x6d, + 0x5a, 0x3e, 0x61, 0x42, 0xeb, 0x0b, 0xb7, 0xb5, 0xd8, 0x2d, 0x72, 0xf7, 0xc9, 0x0d, 0xf1, 0x16, + 0xe5, 0x05, 0x76, 0x1d, 0x4f, 0x30, 0xa1, 0xd5, 0x75, 0x85, 0xd5, 0xc5, 0x42, 0x84, 0x19, 0xcc, + 0x88, 0x54, 0x19, 0x56, 0x84, 0xb3, 0x70, 0x0a, 0xd0, 0xbf, 0x37, 0x30, 0x86, 0x0f, 0x9e, 0x3f, + 0xf6, 0xcb, 0x65, 0xf9, 0x9b, 0x65, 0xf9, 0xd5, 0xb2, 0xfc, 0x97, 0x9c, 0xb0, 0xd1, 0xe1, 0x55, + 0xee, 0xb6, 0xfe, 0x4e, 0xdc, 0x25, 0xf1, 0x02, 0x1b, 0x0b, 0x11, 0x6c, 0xa5, 0x6f, 0x00, 0xec, + 0xaf, 0xd6, 0x33, 0xa9, 0xb0, 0x82, 0x90, 0xb0, 0x29, 0x0f, 0x13, 0xa0, 0xa0, 0x79, 0x10, 0x3c, + 0x9e, 0x87, 0x24, 0x01, 0xa6, 0xc8, 0x94, 0x40, 0xd6, 0xef, 0x0c, 0x8c, 0xe1, 0xfd, 0x11, 0x2a, + 0x72, 0xf7, 0xa8, 0x1c, 0xf1, 0x3f, 0x5d, 0x5e, 0x30, 0xd0, 0xd8, 0x98, 0x4d, 0xf9, 0xab, 0x0a, + 0x7a, 0xbd, 0x61, 0xc6, 0x0d, 0x72, 0x6a, 0x7e, 0xff, 0xe1, 0xb6, 0x26, 0x66, 0xe7, 0xce, 0xfe, + 0xde, 0xc4, 0xec, 0xec, 0xed, 0x9b, 0xa3, 0xb7, 0x57, 0x2b, 0xc7, 0xb8, 0x5e, 0x39, 0xc6, 0xaf, + 0x95, 0x63, 0x7c, 0x5b, 0x3b, 0xad, 0xeb, 0xb5, 0xd3, 0xfa, 0xb9, 0x76, 0x5a, 0xe7, 0x2f, 0x66, + 0x44, 0xcd, 0x2f, 0x22, 0x3f, 0xe6, 0x29, 0xba, 0xe5, 0x69, 0x2c, 0x4e, 0xd0, 0x65, 0xf3, 0x3e, + 0xd4, 0x52, 0x80, 0x8c, 0xda, 0xfa, 0x4f, 0x3e, 0xf9, 0x13, 0x00, 0x00, 0xff, 0xff, 0xa9, 0xd2, + 0xe3, 0x18, 0x4e, 0x03, 0x00, 0x00, } func (m *Params) Marshal() (dAtA []byte, err error) { @@ -166,6 +179,13 @@ func (m *Params) MarshalToSizedBuffer(dAtA []byte) (int, error) { _ = i var l int _ = l + if len(m.StateInfoDeletionEpochIdentifier) > 0 { + i -= len(m.StateInfoDeletionEpochIdentifier) + copy(dAtA[i:], m.StateInfoDeletionEpochIdentifier) + i = encodeVarintParams(dAtA, i, uint64(len(m.StateInfoDeletionEpochIdentifier))) + i-- + dAtA[i] = 0x42 + } { size, err := m.AppRegistrationFee.MarshalToSizedBuffer(dAtA[:i]) if err != nil { @@ -230,6 +250,10 @@ func (m *Params) Size() (n int) { } l = m.AppRegistrationFee.Size() n += 1 + l + sovParams(uint64(l)) + l = len(m.StateInfoDeletionEpochIdentifier) + if l > 0 { + n += 1 + l + sovParams(uint64(l)) + } return n } @@ -377,6 +401,38 @@ func (m *Params) Unmarshal(dAtA []byte) error { return err } iNdEx = postIndex + case 8: + if wireType != 2 { + return fmt.Errorf("proto: wrong wireType = %d for field StateInfoDeletionEpochIdentifier", wireType) + } + var stringLen uint64 + for shift := uint(0); ; shift += 7 { + if shift >= 64 { + return ErrIntOverflowParams + } + if iNdEx >= l { + return io.ErrUnexpectedEOF + } + b := dAtA[iNdEx] + iNdEx++ + stringLen |= uint64(b&0x7F) << shift + if b < 0x80 { + break + } + } + intStringLen := int(stringLen) + if intStringLen < 0 { + return ErrInvalidLengthParams + } + postIndex := iNdEx + intStringLen + if postIndex < 0 { + return ErrInvalidLengthParams + } + if postIndex > l { + return io.ErrUnexpectedEOF + } + m.StateInfoDeletionEpochIdentifier = string(dAtA[iNdEx:postIndex]) + iNdEx = postIndex default: iNdEx = preIndex skippy, err := skipParams(dAtA[iNdEx:]) diff --git a/x/rollapp/types/state_info.go b/x/rollapp/types/state_info.go index a90bf3cce..bad73ec89 100644 --- a/x/rollapp/types/state_info.go +++ b/x/rollapp/types/state_info.go @@ -2,13 +2,24 @@ package types import ( "strconv" + "time" sdk "github.com/cosmos/cosmos-sdk/types" common "github.com/dymensionxyz/dymension/v3/x/common/types" ) -func NewStateInfo(rollappId string, newIndex uint64, creator string, startHeight uint64, numBlocks uint64, daPath string, height uint64, BDs BlockDescriptors) *StateInfo { +func NewStateInfo( + rollappId string, + newIndex uint64, + creator string, + startHeight uint64, + numBlocks uint64, + daPath string, + height uint64, + BDs BlockDescriptors, + createdAt time.Time, +) *StateInfo { stateInfoIndex := StateInfoIndex{RollappId: rollappId, Index: newIndex} status := common.Status_PENDING return &StateInfo{ @@ -20,6 +31,7 @@ func NewStateInfo(rollappId string, newIndex uint64, creator string, startHeight CreationHeight: height, Status: status, BDs: BDs, + CreatedAt: createdAt, } } @@ -59,6 +71,7 @@ func (s *StateInfo) GetEvents() []sdk.Attribute { sdk.NewAttribute(AttributeKeyNumBlocks, strconv.FormatUint(s.NumBlocks, 10)), sdk.NewAttribute(AttributeKeyDAPath, s.DAPath), sdk.NewAttribute(AttributeKeyStatus, s.Status.String()), + sdk.NewAttribute(AttributeKeyCreatedAt, s.CreatedAt.Format(time.RFC3339)), } return eventAttributes } diff --git a/x/rollapp/types/state_info.pb.go b/x/rollapp/types/state_info.pb.go index 9bb8adc59..6c357435a 100644 --- a/x/rollapp/types/state_info.pb.go +++ b/x/rollapp/types/state_info.pb.go @@ -7,16 +7,20 @@ import ( fmt "fmt" _ "github.com/cosmos/gogoproto/gogoproto" proto "github.com/cosmos/gogoproto/proto" + github_com_cosmos_gogoproto_types "github.com/cosmos/gogoproto/types" types "github.com/dymensionxyz/dymension/v3/x/common/types" + _ "google.golang.org/protobuf/types/known/timestamppb" io "io" math "math" math_bits "math/bits" + time "time" ) // Reference imports to suppress errors if they are not otherwise used. var _ = proto.Marshal var _ = fmt.Errorf var _ = math.Inf +var _ = time.Kitchen // This is a compile-time assertion to ensure that this generated file // is compatible with the proto package it is being compiled against. @@ -106,6 +110,8 @@ type StateInfo struct { // BDs is a list of block description objects (one per block) // the list must be ordered by height, starting from startHeight to startHeight+numBlocks-1 BDs BlockDescriptors `protobuf:"bytes,9,opt,name=BDs,proto3" json:"BDs"` + // created_at is the timestamp at which the StateInfo was created + CreatedAt time.Time `protobuf:"bytes,10,opt,name=created_at,json=createdAt,proto3,stdtime" json:"created_at" yaml:"created_at"` } func (m *StateInfo) Reset() { *m = StateInfo{} } @@ -197,6 +203,13 @@ func (m *StateInfo) GetBDs() BlockDescriptors { return BlockDescriptors{} } +func (m *StateInfo) GetCreatedAt() time.Time { + if m != nil { + return m.CreatedAt + } + return time.Time{} +} + // StateInfoSummary is a compact representation of StateInfo type StateInfoSummary struct { // stateInfoIndex defines what rollapp the state belongs to @@ -330,38 +343,42 @@ func init() { } var fileDescriptor_750f3a9f16533ec4 = []byte{ - // 481 bytes of a gzipped FileDescriptorProto - 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0xb4, 0x94, 0x4f, 0x6b, 0x13, 0x41, - 0x18, 0xc6, 0x33, 0xd9, 0x34, 0xed, 0x4e, 0x21, 0xd4, 0xa1, 0xc8, 0x52, 0x64, 0x5c, 0x02, 0x4a, - 0xf0, 0xb0, 0x2b, 0xad, 0x1e, 0x3d, 0x18, 0x82, 0x34, 0x1e, 0x44, 0xb7, 0x9e, 0x44, 0x28, 0x9b, - 0xcd, 0x24, 0x19, 0xcc, 0xce, 0xac, 0xf3, 0x47, 0x92, 0x7e, 0x0a, 0x4f, 0x7e, 0x0a, 0x3f, 0x48, - 0x8f, 0xbd, 0xe9, 0x49, 0x24, 0xf9, 0x22, 0x32, 0xb3, 0xcb, 0x6e, 0x9b, 0x26, 0x16, 0x02, 0xbd, - 0xed, 0xfb, 0xee, 0xfb, 0x3c, 0xfb, 0xcc, 0xef, 0x65, 0x16, 0x86, 0xc3, 0x79, 0x4a, 0x98, 0xa4, - 0x9c, 0xcd, 0xe6, 0x17, 0x55, 0x11, 0x0a, 0x3e, 0x9d, 0xc6, 0x59, 0x16, 0x4a, 0x15, 0x2b, 0x72, - 0x4e, 0xd9, 0x88, 0x07, 0x99, 0xe0, 0x8a, 0x23, 0x7c, 0x5d, 0x10, 0x94, 0x45, 0x50, 0x08, 0x8e, - 0x0e, 0xc7, 0x7c, 0xcc, 0xed, 0x68, 0x68, 0x9e, 0x72, 0xd5, 0xd1, 0xcb, 0x3b, 0x3e, 0x33, 0x98, - 0xf2, 0xe4, 0xcb, 0xf9, 0x90, 0xc8, 0x44, 0xd0, 0x4c, 0x71, 0x51, 0xc8, 0x9e, 0x6d, 0x90, 0x25, - 0x3c, 0x4d, 0x39, 0xb3, 0xe1, 0xb4, 0xcc, 0x67, 0xdb, 0x3d, 0xd8, 0x3a, 0x33, 0x61, 0xfb, 0x6c, - 0xc4, 0xfb, 0x6c, 0x48, 0x66, 0xe8, 0x11, 0x74, 0x0b, 0xff, 0xfe, 0xd0, 0x03, 0x3e, 0xe8, 0xb8, - 0x51, 0xd5, 0x40, 0x87, 0x70, 0x87, 0x9a, 0x31, 0xaf, 0xee, 0x83, 0x4e, 0x23, 0xca, 0x8b, 0xf6, - 0x0f, 0x07, 0xba, 0xa5, 0x0d, 0xfa, 0x0c, 0x5b, 0xf2, 0x86, 0xa7, 0xb5, 0xd9, 0x3f, 0x0e, 0x82, - 0xff, 0x53, 0x08, 0x6e, 0x26, 0xe9, 0x36, 0x2e, 0xff, 0x3c, 0xae, 0x45, 0x2b, 0x5e, 0x26, 0x9f, - 0x24, 0x5f, 0x35, 0x61, 0x09, 0x11, 0x36, 0x85, 0x1b, 0x55, 0x0d, 0xe4, 0xc3, 0x7d, 0xa9, 0x62, - 0xa1, 0x4e, 0x09, 0x1d, 0x4f, 0x94, 0xe7, 0xd8, 0x94, 0xd7, 0x5b, 0x46, 0xcf, 0x74, 0xda, 0x35, - 0xe8, 0xa4, 0xd7, 0xb0, 0xef, 0xab, 0x06, 0x7a, 0x08, 0x9b, 0xbd, 0xd7, 0xef, 0x63, 0x35, 0xf1, - 0x76, 0xac, 0x75, 0x51, 0xa1, 0xa7, 0xb0, 0x95, 0x08, 0x12, 0x2b, 0xca, 0x59, 0x61, 0xbd, 0x6b, - 0xa5, 0x2b, 0x5d, 0xf4, 0x0a, 0x36, 0x73, 0xbe, 0xde, 0x9e, 0x0f, 0x3a, 0xad, 0xe3, 0x27, 0x9b, - 0xce, 0x9c, 0x2f, 0xc3, 0x1e, 0x59, 0xcb, 0xa8, 0x10, 0xa1, 0x53, 0xe8, 0x74, 0x7b, 0xd2, 0x73, - 0x2d, 0xaf, 0xe7, 0x77, 0xf1, 0xb2, 0x99, 0x7b, 0xe5, 0xfa, 0x65, 0x41, 0xcc, 0x58, 0xbc, 0x6d, - 0xec, 0x35, 0x0f, 0x76, 0xdb, 0xbf, 0x00, 0x3c, 0x28, 0xa9, 0x9e, 0xe9, 0x34, 0x8d, 0xc5, 0xfc, - 0x9e, 0xf7, 0x53, 0x11, 0xa8, 0x6f, 0x43, 0xe0, 0x36, 0x68, 0x67, 0x1d, 0xe8, 0xf6, 0x4f, 0x00, - 0xb1, 0x3d, 0x7f, 0x5e, 0x7f, 0xe4, 0x6f, 0x28, 0x8b, 0xa7, 0xf4, 0xc2, 0xce, 0x7c, 0xd0, 0x44, - 0x93, 0x35, 0x56, 0x60, 0xed, 0xce, 0x06, 0xf0, 0xc1, 0x68, 0x55, 0xec, 0xd5, 0x7d, 0x67, 0x6b, - 0x24, 0xb7, 0xed, 0xba, 0xef, 0x2e, 0x17, 0x18, 0x5c, 0x2d, 0x30, 0xf8, 0xbb, 0xc0, 0xe0, 0xfb, - 0x12, 0xd7, 0xae, 0x96, 0xb8, 0xf6, 0x7b, 0x89, 0x6b, 0x9f, 0x5e, 0x8c, 0xa9, 0x9a, 0xe8, 0x81, - 0xc1, 0xb1, 0xe9, 0xb7, 0xf2, 0xed, 0x24, 0x9c, 0x95, 0x97, 0x5e, 0xcd, 0x33, 0x22, 0x07, 0x4d, - 0x7b, 0x7d, 0x4f, 0xfe, 0x05, 0x00, 0x00, 0xff, 0xff, 0xe8, 0xd5, 0xe4, 0x88, 0x8a, 0x04, 0x00, - 0x00, + // 549 bytes of a gzipped FileDescriptorProto + 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0xb4, 0x54, 0x41, 0x6a, 0xdb, 0x40, + 0x14, 0xf5, 0xd8, 0x8e, 0x13, 0x4d, 0xc0, 0x24, 0x43, 0x28, 0xc2, 0xb4, 0xb2, 0x11, 0xb4, 0x98, + 0x2e, 0xa4, 0x92, 0xb4, 0x9b, 0x42, 0x17, 0x31, 0xa6, 0xc4, 0x5d, 0x94, 0x56, 0xc9, 0xa2, 0x94, + 0x82, 0x19, 0x5b, 0x63, 0x79, 0xa8, 0x34, 0xa3, 0x6a, 0x46, 0xc5, 0xca, 0x29, 0x72, 0x90, 0x1e, + 0x24, 0xcb, 0xec, 0xda, 0x55, 0x5a, 0xec, 0x0b, 0x94, 0x9e, 0xa0, 0x68, 0xa4, 0x48, 0x89, 0x63, + 0x37, 0x10, 0xe8, 0x4e, 0xff, 0xeb, 0xbf, 0xc7, 0xfb, 0xef, 0x7d, 0x06, 0xda, 0x6e, 0x12, 0x10, + 0x26, 0x28, 0x67, 0xb3, 0xe4, 0xb4, 0x2c, 0xec, 0x88, 0xfb, 0x3e, 0x0e, 0x43, 0x5b, 0x48, 0x2c, + 0xc9, 0x90, 0xb2, 0x09, 0xb7, 0xc2, 0x88, 0x4b, 0x8e, 0x8c, 0xeb, 0x00, 0xab, 0x28, 0xac, 0x1c, + 0xd0, 0xda, 0xf3, 0xb8, 0xc7, 0xd5, 0xa8, 0x9d, 0x7e, 0x65, 0xa8, 0x56, 0xdb, 0xe3, 0xdc, 0xf3, + 0x89, 0xad, 0xaa, 0x51, 0x3c, 0xb1, 0x25, 0x0d, 0x88, 0x90, 0x38, 0x08, 0xf3, 0x81, 0x17, 0x77, + 0xe8, 0x18, 0xf9, 0x7c, 0xfc, 0x79, 0xe8, 0x12, 0x31, 0x8e, 0x68, 0x28, 0x79, 0x94, 0xc3, 0x9e, + 0xae, 0x81, 0x8d, 0x79, 0x10, 0x70, 0xa6, 0xd4, 0xc7, 0x22, 0x9b, 0x35, 0xfb, 0xb0, 0x79, 0x9c, + 0x6e, 0x33, 0x60, 0x13, 0x3e, 0x60, 0x2e, 0x99, 0xa1, 0x87, 0x50, 0xcb, 0xf9, 0x07, 0xae, 0x0e, + 0x3a, 0xa0, 0xab, 0x39, 0x65, 0x03, 0xed, 0xc1, 0x0d, 0x9a, 0x8e, 0xe9, 0xd5, 0x0e, 0xe8, 0xd6, + 0x9d, 0xac, 0x30, 0x7f, 0xd7, 0xa0, 0x56, 0xd0, 0xa0, 0x4f, 0xb0, 0x29, 0x6e, 0x70, 0x2a, 0x9a, + 0xed, 0x7d, 0xcb, 0xfa, 0xb7, 0x4d, 0xd6, 0x4d, 0x25, 0xbd, 0xfa, 0xf9, 0x65, 0xbb, 0xe2, 0x2c, + 0x71, 0xa5, 0xfa, 0x04, 0xf9, 0x12, 0x13, 0x36, 0x26, 0x91, 0x52, 0xa1, 0x39, 0x65, 0x03, 0x75, + 0xe0, 0xb6, 0x90, 0x38, 0x92, 0x47, 0x84, 0x7a, 0x53, 0xa9, 0xd7, 0x94, 0xca, 0xeb, 0xad, 0x14, + 0xcf, 0xe2, 0xa0, 0x97, 0x5a, 0x27, 0xf4, 0xba, 0xfa, 0x5f, 0x36, 0xd0, 0x03, 0xd8, 0xe8, 0x1f, + 0xbe, 0xc3, 0x72, 0xaa, 0x6f, 0x28, 0xea, 0xbc, 0x42, 0x4f, 0x60, 0x73, 0x1c, 0x11, 0x2c, 0x29, + 0x67, 0x39, 0xf5, 0xa6, 0x82, 0x2e, 0x75, 0xd1, 0x2b, 0xd8, 0xc8, 0xfc, 0xd5, 0xb7, 0x3a, 0xa0, + 0xdb, 0xdc, 0x7f, 0xbc, 0x6e, 0xe7, 0x2c, 0x0c, 0xb5, 0x72, 0x2c, 0x9c, 0x1c, 0x84, 0x8e, 0x60, + 0xad, 0xd7, 0x17, 0xba, 0xa6, 0xfc, 0x7a, 0x76, 0x97, 0x5f, 0x4a, 0x73, 0xbf, 0x88, 0x5f, 0xe4, + 0x8e, 0xa5, 0x14, 0xe8, 0x03, 0x84, 0x4a, 0x1a, 0x71, 0x87, 0x58, 0xea, 0x50, 0x11, 0xb6, 0xac, + 0xec, 0xe2, 0xac, 0xab, 0x8b, 0xb3, 0x4e, 0xae, 0x2e, 0xae, 0xf7, 0x28, 0x85, 0xfe, 0xb9, 0x6c, + 0xef, 0x26, 0x38, 0xf0, 0x5f, 0x9a, 0x25, 0xd6, 0x3c, 0xfb, 0xd9, 0x06, 0x8e, 0x96, 0x37, 0x0e, + 0xe5, 0x9b, 0xfa, 0x56, 0x63, 0x67, 0xd3, 0xfc, 0x0e, 0xe0, 0x4e, 0x91, 0xd7, 0x71, 0x1c, 0x04, + 0x38, 0x4a, 0xfe, 0x73, 0xf2, 0xa5, 0xb7, 0xd5, 0xfb, 0x78, 0x7b, 0x3b, 0xc2, 0xda, 0xaa, 0x08, + 0xcd, 0x6f, 0x00, 0x1a, 0xca, 0xd9, 0xac, 0x3e, 0xe1, 0xaf, 0x29, 0xc3, 0x3e, 0x3d, 0x55, 0x33, + 0xef, 0x63, 0x12, 0x93, 0x15, 0x54, 0x60, 0xe5, 0x35, 0x8c, 0xe0, 0xee, 0x64, 0x19, 0xac, 0x57, + 0x3b, 0xb5, 0x7b, 0x5b, 0x72, 0x9b, 0xae, 0xf7, 0xf6, 0x7c, 0x6e, 0x80, 0x8b, 0xb9, 0x01, 0x7e, + 0xcd, 0x0d, 0x70, 0xb6, 0x30, 0x2a, 0x17, 0x0b, 0xa3, 0xf2, 0x63, 0x61, 0x54, 0x3e, 0x3e, 0xf7, + 0xa8, 0x9c, 0xc6, 0xa3, 0xd4, 0x8e, 0x75, 0x2f, 0xda, 0xd7, 0x03, 0x7b, 0x56, 0x3c, 0x27, 0x32, + 0x09, 0x89, 0x18, 0x35, 0xd4, 0x71, 0x1c, 0xfc, 0x0d, 0x00, 0x00, 0xff, 0xff, 0xe2, 0x90, 0x75, + 0x3b, 0x05, 0x05, 0x00, 0x00, } func (m *StateInfoIndex) Marshal() (dAtA []byte, err error) { @@ -419,6 +436,14 @@ func (m *StateInfo) MarshalToSizedBuffer(dAtA []byte) (int, error) { _ = i var l int _ = l + n1, err1 := github_com_cosmos_gogoproto_types.StdTimeMarshalTo(m.CreatedAt, dAtA[i-github_com_cosmos_gogoproto_types.SizeOfStdTime(m.CreatedAt):]) + if err1 != nil { + return 0, err1 + } + i -= n1 + i = encodeVarintStateInfo(dAtA, i, uint64(n1)) + i-- + dAtA[i] = 0x52 { size, err := m.BDs.MarshalToSizedBuffer(dAtA[:i]) if err != nil { @@ -618,6 +643,8 @@ func (m *StateInfo) Size() (n int) { } l = m.BDs.Size() n += 1 + l + sovStateInfo(uint64(l)) + l = github_com_cosmos_gogoproto_types.SizeOfStdTime(m.CreatedAt) + n += 1 + l + sovStateInfo(uint64(l)) return n } @@ -998,6 +1025,39 @@ func (m *StateInfo) Unmarshal(dAtA []byte) error { return err } iNdEx = postIndex + case 10: + if wireType != 2 { + return fmt.Errorf("proto: wrong wireType = %d for field CreatedAt", wireType) + } + var msglen int + for shift := uint(0); ; shift += 7 { + if shift >= 64 { + return ErrIntOverflowStateInfo + } + if iNdEx >= l { + return io.ErrUnexpectedEOF + } + b := dAtA[iNdEx] + iNdEx++ + msglen |= int(b&0x7F) << shift + if b < 0x80 { + break + } + } + if msglen < 0 { + return ErrInvalidLengthStateInfo + } + postIndex := iNdEx + msglen + if postIndex < 0 { + return ErrInvalidLengthStateInfo + } + if postIndex > l { + return io.ErrUnexpectedEOF + } + if err := github_com_cosmos_gogoproto_types.StdTimeUnmarshal(&m.CreatedAt, dAtA[iNdEx:postIndex]); err != nil { + return err + } + iNdEx = postIndex default: iNdEx = preIndex skippy, err := skipStateInfo(dAtA[iNdEx:]) diff --git a/x/rollapp/types/tx.pb.go b/x/rollapp/types/tx.pb.go index c01e86dd6..f37d5a9aa 100644 --- a/x/rollapp/types/tx.pb.go +++ b/x/rollapp/types/tx.pb.go @@ -666,18 +666,20 @@ var xxx_messageInfo_MsgAddAppResponse proto.InternalMessageInfo type MsgUpdateApp struct { // creator is the bech32-encoded address of the app owner Creator string `protobuf:"bytes,1,opt,name=creator,proto3" json:"creator,omitempty"` + // id is the unique App's id in the Rollapp + Id uint64 `protobuf:"varint,2,opt,name=id,proto3" json:"id,omitempty"` // name is the unique App's name (immutable) - Name string `protobuf:"bytes,2,opt,name=name,proto3" json:"name,omitempty"` + Name string `protobuf:"bytes,3,opt,name=name,proto3" json:"name,omitempty"` // rollapp_id is the id of the Rollapp the App belongs to - RollappId string `protobuf:"bytes,3,opt,name=rollapp_id,json=rollappId,proto3" json:"rollapp_id,omitempty"` + RollappId string `protobuf:"bytes,4,opt,name=rollapp_id,json=rollappId,proto3" json:"rollapp_id,omitempty"` // description is the description of the App - Description string `protobuf:"bytes,4,opt,name=description,proto3" json:"description,omitempty"` + Description string `protobuf:"bytes,5,opt,name=description,proto3" json:"description,omitempty"` // image is the url to the App image - Image string `protobuf:"bytes,5,opt,name=image,proto3" json:"image,omitempty"` + Image string `protobuf:"bytes,6,opt,name=image,proto3" json:"image,omitempty"` // url is the URL to the App's website - Url string `protobuf:"bytes,6,opt,name=url,proto3" json:"url,omitempty"` + Url string `protobuf:"bytes,7,opt,name=url,proto3" json:"url,omitempty"` // order is the order of the App in the Rollapp - Order int32 `protobuf:"varint,7,opt,name=order,proto3" json:"order,omitempty"` + Order int32 `protobuf:"varint,8,opt,name=order,proto3" json:"order,omitempty"` } func (m *MsgUpdateApp) Reset() { *m = MsgUpdateApp{} } @@ -720,6 +722,13 @@ func (m *MsgUpdateApp) GetCreator() string { return "" } +func (m *MsgUpdateApp) GetId() uint64 { + if m != nil { + return m.Id + } + return 0 +} + func (m *MsgUpdateApp) GetName() string { if m != nil { return m.Name @@ -802,8 +811,8 @@ var xxx_messageInfo_MsgUpdateAppResponse proto.InternalMessageInfo type MsgRemoveApp struct { // creator is the bech32-encoded address of the app owner Creator string `protobuf:"bytes,1,opt,name=creator,proto3" json:"creator,omitempty"` - // name is the unique App's name - Name string `protobuf:"bytes,2,opt,name=name,proto3" json:"name,omitempty"` + // id is the unique App's id in the Rollapp + Id uint64 `protobuf:"varint,2,opt,name=id,proto3" json:"id,omitempty"` // rollapp_id is the id of the Rollapp the App belongs to RollappId string `protobuf:"bytes,3,opt,name=rollapp_id,json=rollappId,proto3" json:"rollapp_id,omitempty"` } @@ -848,11 +857,11 @@ func (m *MsgRemoveApp) GetCreator() string { return "" } -func (m *MsgRemoveApp) GetName() string { +func (m *MsgRemoveApp) GetId() uint64 { if m != nil { - return m.Name + return m.Id } - return "" + return 0 } func (m *MsgRemoveApp) GetRollappId() string { @@ -920,65 +929,67 @@ func init() { } var fileDescriptor_1a86300fb8647ecb = []byte{ - // 918 bytes of a gzipped FileDescriptorProto - 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0xd4, 0x56, 0x4f, 0x6f, 0x1b, 0x45, - 0x14, 0xcf, 0xc6, 0x8e, 0x63, 0x3f, 0x3b, 0xc1, 0x19, 0xa2, 0xb0, 0x72, 0x8b, 0xb1, 0x5c, 0x21, - 0x02, 0x2d, 0xbb, 0x34, 0x0d, 0x08, 0x05, 0x2e, 0x49, 0x23, 0xb5, 0x05, 0x99, 0xc2, 0x36, 0x70, - 0xe0, 0x62, 0x6d, 0xbc, 0x93, 0xcd, 0x0a, 0xef, 0xcc, 0x32, 0x33, 0x76, 0x62, 0xb8, 0x71, 0xe1, - 0x80, 0x84, 0x10, 0x67, 0x3e, 0x04, 0x07, 0x3e, 0x03, 0x2a, 0xb7, 0x1e, 0x39, 0x21, 0x94, 0x1c, - 0xb8, 0xf0, 0x21, 0xd0, 0xcc, 0x8e, 0xc7, 0x4e, 0x9c, 0xd8, 0x9b, 0xd0, 0x4b, 0x4f, 0x3b, 0xef, - 0xed, 0xef, 0xfd, 0x99, 0xdf, 0x6f, 0xe6, 0xed, 0xc2, 0x1b, 0xc1, 0x20, 0xc6, 0x84, 0x47, 0x94, - 0x1c, 0x0f, 0xbe, 0x71, 0x8d, 0xe1, 0x32, 0xda, 0xed, 0xfa, 0x49, 0xe2, 0x8a, 0x63, 0x27, 0x61, - 0x54, 0x50, 0x54, 0x1f, 0x07, 0x3a, 0xc6, 0x70, 0x34, 0xb0, 0xf6, 0x4a, 0x87, 0xf2, 0x98, 0x72, - 0x37, 0xe6, 0xa1, 0xdb, 0xbf, 0x2b, 0x1f, 0x69, 0x60, 0xed, 0xdd, 0x19, 0x15, 0xf6, 0xbb, 0xb4, - 0xf3, 0x55, 0x3b, 0xc0, 0xbc, 0xc3, 0xa2, 0x44, 0x50, 0xa6, 0xc3, 0xee, 0xcc, 0x08, 0xd3, 0x4f, - 0x8d, 0x7e, 0x7b, 0x06, 0x3a, 0xc6, 0xc2, 0x0f, 0x7c, 0xe1, 0x6b, 0xf8, 0x6a, 0x48, 0x43, 0xaa, - 0x96, 0xae, 0x5c, 0xa5, 0xde, 0xe6, 0x8f, 0x39, 0xa8, 0xb6, 0x78, 0x78, 0x9f, 0x61, 0x5f, 0x60, - 0x2f, 0x8d, 0x44, 0x36, 0x2c, 0x76, 0xa4, 0x83, 0x32, 0xdb, 0x6a, 0x58, 0xeb, 0x25, 0x6f, 0x68, - 0xa2, 0x57, 0x01, 0x74, 0xfa, 0x76, 0x14, 0xd8, 0xf3, 0xea, 0x65, 0x49, 0x7b, 0x1e, 0x05, 0xe8, - 0x36, 0xac, 0x44, 0x24, 0x12, 0x91, 0xdf, 0x6d, 0x73, 0xfc, 0x75, 0x0f, 0x93, 0x0e, 0x66, 0x76, - 0x59, 0xa1, 0xaa, 0xfa, 0xc5, 0x93, 0xa1, 0x1f, 0xad, 0xc2, 0x82, 0xdf, 0x8d, 0x7c, 0x6e, 0x57, - 0x14, 0x20, 0x35, 0xd0, 0xc7, 0x50, 0x1c, 0x36, 0x6e, 0x2f, 0x35, 0xac, 0xf5, 0xf2, 0x86, 0xeb, - 0x4c, 0x97, 0xc1, 0xd1, 0x6d, 0xb7, 0x74, 0x98, 0x67, 0x12, 0xa0, 0x3d, 0xa8, 0x84, 0x98, 0x60, - 0x1e, 0xf1, 0x76, 0x44, 0x0e, 0xa8, 0xbd, 0xac, 0x12, 0xde, 0x9e, 0x95, 0xf0, 0x41, 0x1a, 0xf3, - 0x88, 0x1c, 0xd0, 0x9d, 0xfc, 0xd3, 0xbf, 0x5e, 0x9b, 0xf3, 0xca, 0xe1, 0xc8, 0x85, 0x1e, 0xc0, - 0x62, 0x3f, 0x6e, 0x8b, 0x41, 0x82, 0xed, 0x97, 0x1a, 0xd6, 0xfa, 0xf2, 0x86, 0x93, 0xb1, 0x43, - 0xe7, 0x8b, 0xd6, 0xde, 0x20, 0xc1, 0x5e, 0xa1, 0x1f, 0xcb, 0xe7, 0x56, 0xe5, 0xbb, 0x7f, 0x7e, - 0x7d, 0x6b, 0xc8, 0xed, 0x47, 0xf9, 0x62, 0xae, 0x5a, 0x6e, 0xd6, 0xc0, 0x3e, 0xaf, 0x87, 0x87, - 0x79, 0x42, 0x09, 0xc7, 0xcd, 0xdf, 0xe6, 0xe1, 0x46, 0x8b, 0x87, 0x9f, 0x27, 0xc1, 0xe8, 0xa5, - 0xec, 0x88, 0xc5, 0xbe, 0x88, 0x28, 0x91, 0x8c, 0xd2, 0x23, 0x82, 0x87, 0xaa, 0xa5, 0xc6, 0xb5, - 0x34, 0xcb, 0x5d, 0xa2, 0xd9, 0x67, 0x63, 0xea, 0x2c, 0x5c, 0x4b, 0x1d, 0x45, 0xa8, 0x35, 0x45, - 0xa3, 0xc2, 0xf3, 0xd0, 0x68, 0x0b, 0x24, 0xb5, 0x29, 0x01, 0xcd, 0xd7, 0xe1, 0xd6, 0x14, 0xd6, - 0x0c, 0xbb, 0x3f, 0xcf, 0xc3, 0xb2, 0xc1, 0x3d, 0x11, 0xbe, 0xc0, 0x53, 0x2e, 0xc2, 0x4d, 0x18, - 0x51, 0x38, 0xc9, 0x69, 0x03, 0xca, 0x5c, 0xf8, 0x4c, 0x3c, 0xc4, 0x51, 0x78, 0x28, 0x14, 0x9b, - 0x79, 0x6f, 0xdc, 0x25, 0xe3, 0x49, 0x2f, 0xde, 0x91, 0x73, 0x80, 0xdb, 0x79, 0xf5, 0x7e, 0xe4, - 0x40, 0x6b, 0x50, 0xd8, 0xdd, 0xfe, 0xd4, 0x17, 0x87, 0x8a, 0xe4, 0x92, 0xa7, 0x2d, 0xf4, 0x10, - 0x72, 0x3b, 0xbb, 0xdc, 0x5e, 0x54, 0x14, 0xbd, 0x33, 0x8b, 0x22, 0x95, 0x6c, 0xd7, 0x0c, 0x19, - 0xae, 0x79, 0x92, 0x29, 0x10, 0x82, 0x7c, 0xd7, 0xe7, 0xc2, 0x2e, 0x36, 0xac, 0xf5, 0xa2, 0xa7, - 0xd6, 0x13, 0xc7, 0xb1, 0x50, 0x5d, 0x6c, 0xda, 0xb0, 0x76, 0x96, 0x13, 0x43, 0xd7, 0x0f, 0x16, - 0xac, 0xb6, 0x78, 0xb8, 0xc7, 0x7c, 0xc2, 0x0f, 0x30, 0x7b, 0x2c, 0xa9, 0xe6, 0x87, 0x51, 0x82, - 0x6e, 0xc1, 0x52, 0xa7, 0xc7, 0x18, 0x26, 0xa2, 0x3d, 0x7e, 0x1a, 0x2b, 0xda, 0xa9, 0x80, 0xe8, - 0x06, 0x94, 0x08, 0x3e, 0xd2, 0x80, 0x94, 0xbf, 0x22, 0xc1, 0x47, 0x8f, 0x2f, 0x38, 0xb1, 0xb9, - 0x73, 0xec, 0x6e, 0x21, 0xd9, 0xe7, 0xd9, 0x1a, 0xcd, 0x3a, 0xdc, 0xbc, 0xa8, 0x19, 0xd3, 0xed, - 0xef, 0x16, 0x94, 0x5a, 0x3c, 0xdc, 0x0e, 0x82, 0xed, 0xa9, 0x03, 0x0e, 0x41, 0x9e, 0xf8, 0x31, - 0xd6, 0x2d, 0xa9, 0xf5, 0x8c, 0x76, 0xa4, 0xd8, 0xc3, 0x49, 0x1e, 0x51, 0xa2, 0xc4, 0x2c, 0x79, - 0xe3, 0x2e, 0x79, 0x2f, 0xa3, 0xd8, 0x0f, 0xb1, 0x56, 0x33, 0x35, 0x50, 0x15, 0x72, 0x3d, 0xd6, - 0x55, 0xe7, 0xbd, 0xe4, 0xc9, 0xa5, 0xba, 0xbf, 0x2c, 0xc0, 0x4c, 0x09, 0xbc, 0xe0, 0xa5, 0xc6, - 0x59, 0x59, 0x9a, 0x2f, 0xc3, 0x8a, 0xd9, 0x87, 0xd9, 0xdd, 0x1f, 0x16, 0x54, 0x8c, 0x4c, 0x2f, - 0xf8, 0x06, 0xd7, 0xd4, 0xb1, 0x32, 0x5b, 0x31, 0x7b, 0x8c, 0xd4, 0x16, 0x3d, 0x1c, 0xd3, 0xfe, - 0x73, 0xdf, 0xe2, 0x85, 0x2d, 0x98, 0x52, 0xc3, 0x16, 0x36, 0xfe, 0x2d, 0x40, 0xae, 0xc5, 0x43, - 0xf4, 0x2d, 0x2c, 0x9d, 0xfd, 0x60, 0xce, 0xbc, 0x8a, 0xe7, 0x47, 0x7a, 0xed, 0xfd, 0xab, 0x46, - 0x0c, 0x9b, 0x40, 0xbf, 0x58, 0x60, 0x5f, 0xfa, 0x05, 0xf8, 0x20, 0x43, 0xda, 0xcb, 0x82, 0x6b, - 0xf7, 0xff, 0x47, 0xb0, 0x69, 0xaf, 0x07, 0xe5, 0xf1, 0x09, 0xea, 0x64, 0xce, 0xa9, 0xf0, 0xb5, - 0xf7, 0xae, 0x86, 0x37, 0x65, 0xbf, 0xb7, 0x60, 0x65, 0x72, 0x14, 0x6d, 0x66, 0xc8, 0x36, 0x11, - 0x55, 0xfb, 0xf0, 0x3a, 0x51, 0xa6, 0x93, 0x03, 0x28, 0xe8, 0x29, 0xf3, 0x66, 0x86, 0x3c, 0x29, - 0xb4, 0x76, 0x37, 0x33, 0xd4, 0xd4, 0xa1, 0x50, 0x1a, 0xdd, 0xf7, 0x3b, 0x99, 0x69, 0x93, 0xd5, - 0x36, 0xaf, 0x82, 0x1e, 0x2f, 0x38, 0xba, 0x7d, 0x59, 0x0a, 0x1a, 0x74, 0xa6, 0x82, 0x13, 0xd7, - 0x6d, 0xe7, 0x93, 0xa7, 0x27, 0x75, 0xeb, 0xd9, 0x49, 0xdd, 0xfa, 0xfb, 0xa4, 0x6e, 0xfd, 0x74, - 0x5a, 0x9f, 0x7b, 0x76, 0x5a, 0x9f, 0xfb, 0xf3, 0xb4, 0x3e, 0xf7, 0xe5, 0x66, 0x18, 0x89, 0xc3, - 0xde, 0xbe, 0xd3, 0xa1, 0xb1, 0x7b, 0xc9, 0x5f, 0x70, 0xff, 0x9e, 0x7b, 0x3c, 0xfa, 0xa3, 0x1f, - 0x24, 0x98, 0xef, 0x17, 0xd4, 0x2f, 0xef, 0xbd, 0xff, 0x02, 0x00, 0x00, 0xff, 0xff, 0x91, 0x02, - 0x9b, 0x67, 0x00, 0x0c, 0x00, 0x00, + // 947 bytes of a gzipped FileDescriptorProto + 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0xac, 0x56, 0x4d, 0x6f, 0xe3, 0x44, + 0x18, 0xae, 0x93, 0x34, 0x1f, 0x6f, 0xd2, 0x92, 0x0e, 0x55, 0xb1, 0xb2, 0x4b, 0x88, 0xb2, 0x42, + 0x14, 0x76, 0xb1, 0xd9, 0x6e, 0x41, 0xa8, 0x70, 0x69, 0xb7, 0xd2, 0xee, 0x82, 0xc2, 0x82, 0xb7, + 0x70, 0xe0, 0x12, 0xb9, 0xf1, 0xd4, 0xb5, 0x88, 0x67, 0x8c, 0x67, 0x92, 0x36, 0x70, 0xe3, 0xc2, + 0x01, 0x09, 0x21, 0xce, 0xfc, 0x08, 0x0e, 0xfc, 0x06, 0xb4, 0xc7, 0x1e, 0xe1, 0x82, 0x50, 0x7b, + 0xe0, 0xc2, 0x8f, 0x40, 0x1e, 0x8f, 0x27, 0x69, 0xf3, 0x61, 0xb7, 0xec, 0xc9, 0x33, 0xaf, 0x9f, + 0xf7, 0xeb, 0x79, 0x66, 0x5e, 0x1b, 0xde, 0x70, 0x46, 0x3e, 0x26, 0xcc, 0xa3, 0xe4, 0x74, 0xf4, + 0x8d, 0xa9, 0x36, 0x66, 0x48, 0xfb, 0x7d, 0x3b, 0x08, 0x4c, 0x7e, 0x6a, 0x04, 0x21, 0xe5, 0x14, + 0x35, 0x27, 0x81, 0x86, 0xda, 0x18, 0x12, 0xd8, 0x78, 0xa5, 0x47, 0x99, 0x4f, 0x99, 0xe9, 0x33, + 0xd7, 0x1c, 0xde, 0x8f, 0x1e, 0xb1, 0x63, 0xe3, 0xdd, 0x94, 0x0c, 0x87, 0x7d, 0xda, 0xfb, 0xaa, + 0xeb, 0x60, 0xd6, 0x0b, 0xbd, 0x80, 0xd3, 0x50, 0xba, 0xdd, 0x4b, 0x71, 0x93, 0x4f, 0x89, 0x7e, + 0x3b, 0x05, 0xed, 0x63, 0x6e, 0x3b, 0x36, 0xb7, 0x25, 0x7c, 0xdd, 0xa5, 0x2e, 0x15, 0x4b, 0x33, + 0x5a, 0xc5, 0xd6, 0xf6, 0x8f, 0x79, 0xa8, 0x77, 0x98, 0xfb, 0x30, 0xc4, 0x36, 0xc7, 0x56, 0xec, + 0x89, 0x74, 0x28, 0xf5, 0x22, 0x03, 0x0d, 0x75, 0xad, 0xa5, 0x6d, 0x56, 0xac, 0x64, 0x8b, 0x5e, + 0x05, 0x90, 0xe1, 0xbb, 0x9e, 0xa3, 0xe7, 0xc4, 0xcb, 0x8a, 0xb4, 0x3c, 0x71, 0xd0, 0x5d, 0x58, + 0xf3, 0x88, 0xc7, 0x3d, 0xbb, 0xdf, 0x65, 0xf8, 0xeb, 0x01, 0x26, 0x3d, 0x1c, 0xea, 0x55, 0x81, + 0xaa, 0xcb, 0x17, 0xcf, 0x12, 0x3b, 0x5a, 0x87, 0x65, 0xbb, 0xef, 0xd9, 0x4c, 0xaf, 0x09, 0x40, + 0xbc, 0x41, 0x1f, 0x43, 0x39, 0x29, 0x5c, 0x5f, 0x69, 0x69, 0x9b, 0xd5, 0x2d, 0xd3, 0x58, 0x2c, + 0x83, 0x21, 0xcb, 0xee, 0x48, 0x37, 0x4b, 0x05, 0x40, 0x07, 0x50, 0x73, 0x31, 0xc1, 0xcc, 0x63, + 0x5d, 0x8f, 0x1c, 0x51, 0x7d, 0x55, 0x04, 0xbc, 0x9b, 0x16, 0xf0, 0x51, 0xec, 0xf3, 0x84, 0x1c, + 0xd1, 0xbd, 0xc2, 0xf3, 0xbf, 0x5e, 0x5b, 0xb2, 0xaa, 0xee, 0xd8, 0x84, 0x1e, 0x41, 0x69, 0xe8, + 0x77, 0xf9, 0x28, 0xc0, 0xfa, 0x4b, 0x2d, 0x6d, 0x73, 0x75, 0xcb, 0xc8, 0x58, 0xa1, 0xf1, 0x45, + 0xe7, 0x60, 0x14, 0x60, 0xab, 0x38, 0xf4, 0xa3, 0xe7, 0x4e, 0xed, 0xbb, 0x7f, 0x7e, 0x7d, 0x2b, + 0xe1, 0xf6, 0xa3, 0x42, 0x39, 0x5f, 0xaf, 0xb6, 0x1b, 0xa0, 0x5f, 0xd5, 0xc3, 0xc2, 0x2c, 0xa0, + 0x84, 0xe1, 0xf6, 0x6f, 0x39, 0xb8, 0xd5, 0x61, 0xee, 0xe7, 0x81, 0x33, 0x7e, 0x19, 0x55, 0x14, + 0xfa, 0x36, 0xf7, 0x28, 0x89, 0x18, 0xa5, 0x27, 0x04, 0x27, 0xaa, 0xc5, 0x9b, 0x1b, 0x69, 0x96, + 0x9f, 0xa3, 0xd9, 0x67, 0x13, 0xea, 0x2c, 0xdf, 0x48, 0x1d, 0x41, 0xa8, 0xb6, 0x40, 0xa3, 0xe2, + 0x8b, 0xd0, 0x68, 0x07, 0x22, 0x6a, 0x63, 0x02, 0xda, 0xaf, 0xc3, 0x9d, 0x05, 0xac, 0x29, 0x76, + 0x7f, 0xce, 0xc1, 0xaa, 0xc2, 0x3d, 0xe3, 0x36, 0xc7, 0x0b, 0x2e, 0xc2, 0x6d, 0x18, 0x53, 0x38, + 0xcd, 0x69, 0x0b, 0xaa, 0x8c, 0xdb, 0x21, 0x7f, 0x8c, 0x3d, 0xf7, 0x98, 0x0b, 0x36, 0x0b, 0xd6, + 0xa4, 0x29, 0xf2, 0x27, 0x03, 0x7f, 0x2f, 0x9a, 0x03, 0x4c, 0x2f, 0x88, 0xf7, 0x63, 0x03, 0xda, + 0x80, 0xe2, 0xfe, 0xee, 0xa7, 0x36, 0x3f, 0x16, 0x24, 0x57, 0x2c, 0xb9, 0x43, 0x8f, 0x21, 0xbf, + 0xb7, 0xcf, 0xf4, 0x92, 0xa0, 0xe8, 0x9d, 0x34, 0x8a, 0x44, 0xb0, 0x7d, 0x35, 0x64, 0x98, 0xe4, + 0x29, 0x0a, 0x81, 0x10, 0x14, 0xfa, 0x36, 0xe3, 0x7a, 0xb9, 0xa5, 0x6d, 0x96, 0x2d, 0xb1, 0x9e, + 0x3a, 0x8e, 0xc5, 0x7a, 0xa9, 0xad, 0xc3, 0xc6, 0x65, 0x4e, 0x14, 0x5d, 0x3f, 0x68, 0xb0, 0xde, + 0x61, 0xee, 0x41, 0x68, 0x13, 0x76, 0x84, 0xc3, 0xa7, 0x11, 0xd5, 0xec, 0xd8, 0x0b, 0xd0, 0x1d, + 0x58, 0xe9, 0x0d, 0xc2, 0x10, 0x13, 0xde, 0x9d, 0x3c, 0x8d, 0x35, 0x69, 0x14, 0x40, 0x74, 0x0b, + 0x2a, 0x04, 0x9f, 0x48, 0x40, 0xcc, 0x5f, 0x99, 0xe0, 0x93, 0xa7, 0x33, 0x4e, 0x6c, 0xfe, 0x0a, + 0xbb, 0x3b, 0x28, 0xaa, 0xf3, 0x72, 0x8e, 0x76, 0x13, 0x6e, 0xcf, 0x2a, 0x46, 0x55, 0xfb, 0xbb, + 0x06, 0x95, 0x0e, 0x73, 0x77, 0x1d, 0x67, 0x77, 0xe1, 0x80, 0x43, 0x50, 0x20, 0xb6, 0x8f, 0x65, + 0x49, 0x62, 0x9d, 0x52, 0x4e, 0x24, 0x76, 0x32, 0xc9, 0x3d, 0x4a, 0x84, 0x98, 0x15, 0x6b, 0xd2, + 0x14, 0xdd, 0x4b, 0xcf, 0xb7, 0x5d, 0x2c, 0xd5, 0x8c, 0x37, 0xa8, 0x0e, 0xf9, 0x41, 0xd8, 0x17, + 0xe7, 0xbd, 0x62, 0x45, 0x4b, 0x71, 0x7f, 0x43, 0x07, 0x87, 0x42, 0xe0, 0x65, 0x2b, 0xde, 0x5c, + 0x96, 0xa5, 0xfd, 0x32, 0xac, 0xa9, 0x3e, 0x54, 0x77, 0x7f, 0x6a, 0x50, 0x53, 0x32, 0x2d, 0x6e, + 0x70, 0x15, 0x72, 0x72, 0x0a, 0x14, 0xac, 0x9c, 0xe7, 0xa8, 0x86, 0xf3, 0x73, 0x1b, 0x2e, 0xa4, + 0x34, 0xbc, 0xbc, 0xa0, 0xe1, 0xe2, 0x8c, 0x86, 0x4b, 0x33, 0x1a, 0x2e, 0xcf, 0x6f, 0x78, 0x43, + 0x1c, 0x33, 0xd5, 0x9a, 0xea, 0x19, 0x8b, 0x96, 0x2d, 0xec, 0xd3, 0xe1, 0x35, 0x5b, 0x4e, 0x39, + 0x5e, 0xb3, 0xd2, 0xab, 0x34, 0x49, 0xfa, 0xad, 0x7f, 0x8b, 0x90, 0xef, 0x30, 0x17, 0x7d, 0x0b, + 0x2b, 0x97, 0x3f, 0x9e, 0xa9, 0xd7, 0xf2, 0xea, 0x78, 0x6f, 0xbc, 0x7f, 0x5d, 0x8f, 0xa4, 0x08, + 0xf4, 0x8b, 0x06, 0xfa, 0xdc, 0xaf, 0xc1, 0x07, 0x19, 0xc2, 0xce, 0x73, 0x6e, 0x3c, 0xfc, 0x1f, + 0xce, 0xaa, 0xbc, 0x01, 0x54, 0x27, 0xa7, 0xa9, 0x91, 0x39, 0xa6, 0xc0, 0x37, 0xde, 0xbb, 0x1e, + 0x5e, 0xa5, 0xfd, 0x5e, 0x83, 0xb5, 0xe9, 0xb1, 0xb4, 0x9d, 0x21, 0xda, 0x94, 0x57, 0xe3, 0xc3, + 0x9b, 0x78, 0xa9, 0x4a, 0x8e, 0xa0, 0x28, 0x27, 0xce, 0x9b, 0x19, 0xe2, 0xc4, 0xd0, 0xc6, 0xfd, + 0xcc, 0x50, 0x95, 0x87, 0x42, 0x65, 0x7c, 0xf7, 0xef, 0x65, 0xa6, 0x2d, 0xca, 0xb6, 0x7d, 0x1d, + 0xf4, 0x64, 0xc2, 0xf1, 0xcd, 0xcb, 0x92, 0x50, 0xa1, 0x33, 0x25, 0x9c, 0xba, 0x6e, 0x7b, 0x9f, + 0x3c, 0x3f, 0x6f, 0x6a, 0x67, 0xe7, 0x4d, 0xed, 0xef, 0xf3, 0xa6, 0xf6, 0xd3, 0x45, 0x73, 0xe9, + 0xec, 0xa2, 0xb9, 0xf4, 0xc7, 0x45, 0x73, 0xe9, 0xcb, 0x6d, 0xd7, 0xe3, 0xc7, 0x83, 0x43, 0xa3, + 0x47, 0x7d, 0x73, 0xce, 0x1f, 0xf1, 0xf0, 0x81, 0x79, 0x3a, 0xfe, 0xbb, 0x1f, 0x05, 0x98, 0x1d, + 0x16, 0xc5, 0xef, 0xef, 0x83, 0xff, 0x02, 0x00, 0x00, 0xff, 0xff, 0xa0, 0x0c, 0x19, 0x5d, 0x0c, + 0x0c, 0x00, 0x00, } // Reference imports to suppress errors if they are not otherwise used. @@ -1747,42 +1758,47 @@ func (m *MsgUpdateApp) MarshalToSizedBuffer(dAtA []byte) (int, error) { if m.Order != 0 { i = encodeVarintTx(dAtA, i, uint64(m.Order)) i-- - dAtA[i] = 0x38 + dAtA[i] = 0x40 } if len(m.Url) > 0 { i -= len(m.Url) copy(dAtA[i:], m.Url) i = encodeVarintTx(dAtA, i, uint64(len(m.Url))) i-- - dAtA[i] = 0x32 + dAtA[i] = 0x3a } if len(m.Image) > 0 { i -= len(m.Image) copy(dAtA[i:], m.Image) i = encodeVarintTx(dAtA, i, uint64(len(m.Image))) i-- - dAtA[i] = 0x2a + dAtA[i] = 0x32 } if len(m.Description) > 0 { i -= len(m.Description) copy(dAtA[i:], m.Description) i = encodeVarintTx(dAtA, i, uint64(len(m.Description))) i-- - dAtA[i] = 0x22 + dAtA[i] = 0x2a } if len(m.RollappId) > 0 { i -= len(m.RollappId) copy(dAtA[i:], m.RollappId) i = encodeVarintTx(dAtA, i, uint64(len(m.RollappId))) i-- - dAtA[i] = 0x1a + dAtA[i] = 0x22 } if len(m.Name) > 0 { i -= len(m.Name) copy(dAtA[i:], m.Name) i = encodeVarintTx(dAtA, i, uint64(len(m.Name))) i-- - dAtA[i] = 0x12 + dAtA[i] = 0x1a + } + if m.Id != 0 { + i = encodeVarintTx(dAtA, i, uint64(m.Id)) + i-- + dAtA[i] = 0x10 } if len(m.Creator) > 0 { i -= len(m.Creator) @@ -1844,12 +1860,10 @@ func (m *MsgRemoveApp) MarshalToSizedBuffer(dAtA []byte) (int, error) { i-- dAtA[i] = 0x1a } - if len(m.Name) > 0 { - i -= len(m.Name) - copy(dAtA[i:], m.Name) - i = encodeVarintTx(dAtA, i, uint64(len(m.Name))) + if m.Id != 0 { + i = encodeVarintTx(dAtA, i, uint64(m.Id)) i-- - dAtA[i] = 0x12 + dAtA[i] = 0x10 } if len(m.Creator) > 0 { i -= len(m.Creator) @@ -2100,6 +2114,9 @@ func (m *MsgUpdateApp) Size() (n int) { if l > 0 { n += 1 + l + sovTx(uint64(l)) } + if m.Id != 0 { + n += 1 + sovTx(uint64(m.Id)) + } l = len(m.Name) if l > 0 { n += 1 + l + sovTx(uint64(l)) @@ -2145,9 +2162,8 @@ func (m *MsgRemoveApp) Size() (n int) { if l > 0 { n += 1 + l + sovTx(uint64(l)) } - l = len(m.Name) - if l > 0 { - n += 1 + l + sovTx(uint64(l)) + if m.Id != 0 { + n += 1 + sovTx(uint64(m.Id)) } l = len(m.RollappId) if l > 0 { @@ -3608,6 +3624,25 @@ func (m *MsgUpdateApp) Unmarshal(dAtA []byte) error { m.Creator = string(dAtA[iNdEx:postIndex]) iNdEx = postIndex case 2: + if wireType != 0 { + return fmt.Errorf("proto: wrong wireType = %d for field Id", wireType) + } + m.Id = 0 + for shift := uint(0); ; shift += 7 { + if shift >= 64 { + return ErrIntOverflowTx + } + if iNdEx >= l { + return io.ErrUnexpectedEOF + } + b := dAtA[iNdEx] + iNdEx++ + m.Id |= uint64(b&0x7F) << shift + if b < 0x80 { + break + } + } + case 3: if wireType != 2 { return fmt.Errorf("proto: wrong wireType = %d for field Name", wireType) } @@ -3639,7 +3674,7 @@ func (m *MsgUpdateApp) Unmarshal(dAtA []byte) error { } m.Name = string(dAtA[iNdEx:postIndex]) iNdEx = postIndex - case 3: + case 4: if wireType != 2 { return fmt.Errorf("proto: wrong wireType = %d for field RollappId", wireType) } @@ -3671,7 +3706,7 @@ func (m *MsgUpdateApp) Unmarshal(dAtA []byte) error { } m.RollappId = string(dAtA[iNdEx:postIndex]) iNdEx = postIndex - case 4: + case 5: if wireType != 2 { return fmt.Errorf("proto: wrong wireType = %d for field Description", wireType) } @@ -3703,7 +3738,7 @@ func (m *MsgUpdateApp) Unmarshal(dAtA []byte) error { } m.Description = string(dAtA[iNdEx:postIndex]) iNdEx = postIndex - case 5: + case 6: if wireType != 2 { return fmt.Errorf("proto: wrong wireType = %d for field Image", wireType) } @@ -3735,7 +3770,7 @@ func (m *MsgUpdateApp) Unmarshal(dAtA []byte) error { } m.Image = string(dAtA[iNdEx:postIndex]) iNdEx = postIndex - case 6: + case 7: if wireType != 2 { return fmt.Errorf("proto: wrong wireType = %d for field Url", wireType) } @@ -3767,7 +3802,7 @@ func (m *MsgUpdateApp) Unmarshal(dAtA []byte) error { } m.Url = string(dAtA[iNdEx:postIndex]) iNdEx = postIndex - case 7: + case 8: if wireType != 0 { return fmt.Errorf("proto: wrong wireType = %d for field Order", wireType) } @@ -3919,10 +3954,10 @@ func (m *MsgRemoveApp) Unmarshal(dAtA []byte) error { m.Creator = string(dAtA[iNdEx:postIndex]) iNdEx = postIndex case 2: - if wireType != 2 { - return fmt.Errorf("proto: wrong wireType = %d for field Name", wireType) + if wireType != 0 { + return fmt.Errorf("proto: wrong wireType = %d for field Id", wireType) } - var stringLen uint64 + m.Id = 0 for shift := uint(0); ; shift += 7 { if shift >= 64 { return ErrIntOverflowTx @@ -3932,24 +3967,11 @@ func (m *MsgRemoveApp) Unmarshal(dAtA []byte) error { } b := dAtA[iNdEx] iNdEx++ - stringLen |= uint64(b&0x7F) << shift + m.Id |= uint64(b&0x7F) << shift if b < 0x80 { break } } - intStringLen := int(stringLen) - if intStringLen < 0 { - return ErrInvalidLengthTx - } - postIndex := iNdEx + intStringLen - if postIndex < 0 { - return ErrInvalidLengthTx - } - if postIndex > l { - return io.ErrUnexpectedEOF - } - m.Name = string(dAtA[iNdEx:postIndex]) - iNdEx = postIndex case 3: if wireType != 2 { return fmt.Errorf("proto: wrong wireType = %d for field RollappId", wireType) diff --git a/x/sequencer/client/cli/query.go b/x/sequencer/client/cli/query.go index ee4d9992e..909b04228 100644 --- a/x/sequencer/client/cli/query.go +++ b/x/sequencer/client/cli/query.go @@ -10,7 +10,7 @@ import ( ) // GetQueryCmd returns the cli query commands for this module -func GetQueryCmd(queryRoute string) *cobra.Command { +func GetQueryCmd() *cobra.Command { // Group sequencer queries under a subcommand cmd := &cobra.Command{ Use: types.ModuleName, @@ -26,6 +26,7 @@ func GetQueryCmd(queryRoute string) *cobra.Command { cmd.AddCommand(CmdShowSequencersByRollapp()) cmd.AddCommand(CmdGetProposerByRollapp()) cmd.AddCommand(CmdGetNextProposerByRollapp()) + cmd.AddCommand(CmdGetAllProposers()) return cmd } diff --git a/x/sequencer/client/cli/query_sequencers_by_rollapp.go b/x/sequencer/client/cli/query_sequencers_by_rollapp.go index f3ab66b01..f86aa8fe7 100644 --- a/x/sequencer/client/cli/query_sequencers_by_rollapp.go +++ b/x/sequencer/client/cli/query_sequencers_by_rollapp.go @@ -101,3 +101,29 @@ func CmdGetNextProposerByRollapp() *cobra.Command { flags.AddQueryFlagsToCmd(cmd) return cmd } + +func CmdGetAllProposers() *cobra.Command { + cmd := &cobra.Command{ + Use: "list-proposer", + Short: "List all proposers", + RunE: func(cmd *cobra.Command, args []string) error { + params := &types.QueryProposersRequest{} + + clientCtx, err := client.GetClientQueryContext(cmd) + if err != nil { + return err + } + queryClient := types.NewQueryClient(clientCtx) + + res, err := queryClient.Proposers(cmd.Context(), params) + if err != nil { + return err + } + + return clientCtx.PrintProto(res) + }, + } + + flags.AddQueryFlagsToCmd(cmd) + return cmd +} diff --git a/x/sequencer/keeper/grpc_query_sequencers_by_rollapp.go b/x/sequencer/keeper/grpc_query_sequencers_by_rollapp.go index 45efada78..26054665a 100644 --- a/x/sequencer/keeper/grpc_query_sequencers_by_rollapp.go +++ b/x/sequencer/keeper/grpc_query_sequencers_by_rollapp.go @@ -4,9 +4,14 @@ import ( "context" "errors" + "github.com/cosmos/cosmos-sdk/store/prefix" sdk "github.com/cosmos/cosmos-sdk/types" - "github.com/dymensionxyz/dymension/v3/x/sequencer/types" + "github.com/cosmos/cosmos-sdk/types/query" "github.com/dymensionxyz/gerr-cosmos/gerrc" + "google.golang.org/grpc/codes" + "google.golang.org/grpc/status" + + "github.com/dymensionxyz/dymension/v3/x/sequencer/types" ) func (k Keeper) SequencersByRollapp(c context.Context, req *types.QueryGetSequencersByRollappRequest) (*types.QueryGetSequencersByRollappResponse, error) { @@ -77,3 +82,28 @@ func (k Keeper) GetNextProposerByRollapp(c context.Context, req *types.QueryGetN RotationInProgress: k.IsRotating(ctx, req.RollappId), }, nil } + +func (k Keeper) Proposers(c context.Context, req *types.QueryProposersRequest) (*types.QueryProposersResponse, error) { + if req == nil { + return nil, status.Error(codes.InvalidArgument, "invalid request") + } + + var proposers []types.Sequencer + ctx := sdk.UnwrapSDKContext(c) + + store := ctx.KVStore(k.storeKey) + sequencerStore := prefix.NewStore(store, types.ProposerByRollappKey("")) + + pageRes, err := query.Paginate(sequencerStore, req.Pagination, func(key []byte, value []byte) error { + proposer, ok := k.GetSequencer(ctx, string(value)) + if ok { + proposers = append(proposers, proposer) + } + return nil + }) + if err != nil { + return nil, status.Error(codes.Internal, err.Error()) + } + + return &types.QueryProposersResponse{Proposers: proposers, Pagination: pageRes}, nil +} diff --git a/x/sequencer/module.go b/x/sequencer/module.go index 9baa951df..40e35b8dc 100644 --- a/x/sequencer/module.go +++ b/x/sequencer/module.go @@ -87,7 +87,7 @@ func (a AppModuleBasic) GetTxCmd() *cobra.Command { // GetQueryCmd returns the capability module's root query command. func (AppModuleBasic) GetQueryCmd() *cobra.Command { - return cli.GetQueryCmd(types.StoreKey) + return cli.GetQueryCmd() } // ---------------------------------------------------------------------------- diff --git a/x/sequencer/types/query.pb.go b/x/sequencer/types/query.pb.go index 0a814bfc4..e5d24a7ee 100644 --- a/x/sequencer/types/query.pb.go +++ b/x/sequencer/types/query.pb.go @@ -680,6 +680,104 @@ func (m *QueryGetNextProposerByRollappResponse) GetRotationInProgress() bool { return false } +// Request type for the Proposers RPC method. +type QueryProposersRequest struct { + Pagination *query.PageRequest `protobuf:"bytes,1,opt,name=pagination,proto3" json:"pagination,omitempty"` +} + +func (m *QueryProposersRequest) Reset() { *m = QueryProposersRequest{} } +func (m *QueryProposersRequest) String() string { return proto.CompactTextString(m) } +func (*QueryProposersRequest) ProtoMessage() {} +func (*QueryProposersRequest) Descriptor() ([]byte, []int) { + return fileDescriptor_c6af1252721903a2, []int{14} +} +func (m *QueryProposersRequest) XXX_Unmarshal(b []byte) error { + return m.Unmarshal(b) +} +func (m *QueryProposersRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + if deterministic { + return xxx_messageInfo_QueryProposersRequest.Marshal(b, m, deterministic) + } else { + b = b[:cap(b)] + n, err := m.MarshalToSizedBuffer(b) + if err != nil { + return nil, err + } + return b[:n], nil + } +} +func (m *QueryProposersRequest) XXX_Merge(src proto.Message) { + xxx_messageInfo_QueryProposersRequest.Merge(m, src) +} +func (m *QueryProposersRequest) XXX_Size() int { + return m.Size() +} +func (m *QueryProposersRequest) XXX_DiscardUnknown() { + xxx_messageInfo_QueryProposersRequest.DiscardUnknown(m) +} + +var xxx_messageInfo_QueryProposersRequest proto.InternalMessageInfo + +func (m *QueryProposersRequest) GetPagination() *query.PageRequest { + if m != nil { + return m.Pagination + } + return nil +} + +// Response type for the Proposers RPC method. +type QueryProposersResponse struct { + Proposers []Sequencer `protobuf:"bytes,1,rep,name=proposers,proto3" json:"proposers"` + Pagination *query.PageResponse `protobuf:"bytes,2,opt,name=pagination,proto3" json:"pagination,omitempty"` +} + +func (m *QueryProposersResponse) Reset() { *m = QueryProposersResponse{} } +func (m *QueryProposersResponse) String() string { return proto.CompactTextString(m) } +func (*QueryProposersResponse) ProtoMessage() {} +func (*QueryProposersResponse) Descriptor() ([]byte, []int) { + return fileDescriptor_c6af1252721903a2, []int{15} +} +func (m *QueryProposersResponse) XXX_Unmarshal(b []byte) error { + return m.Unmarshal(b) +} +func (m *QueryProposersResponse) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + if deterministic { + return xxx_messageInfo_QueryProposersResponse.Marshal(b, m, deterministic) + } else { + b = b[:cap(b)] + n, err := m.MarshalToSizedBuffer(b) + if err != nil { + return nil, err + } + return b[:n], nil + } +} +func (m *QueryProposersResponse) XXX_Merge(src proto.Message) { + xxx_messageInfo_QueryProposersResponse.Merge(m, src) +} +func (m *QueryProposersResponse) XXX_Size() int { + return m.Size() +} +func (m *QueryProposersResponse) XXX_DiscardUnknown() { + xxx_messageInfo_QueryProposersResponse.DiscardUnknown(m) +} + +var xxx_messageInfo_QueryProposersResponse proto.InternalMessageInfo + +func (m *QueryProposersResponse) GetProposers() []Sequencer { + if m != nil { + return m.Proposers + } + return nil +} + +func (m *QueryProposersResponse) GetPagination() *query.PageResponse { + if m != nil { + return m.Pagination + } + return nil +} + func init() { proto.RegisterType((*QueryParamsRequest)(nil), "dymensionxyz.dymension.sequencer.QueryParamsRequest") proto.RegisterType((*QueryParamsResponse)(nil), "dymensionxyz.dymension.sequencer.QueryParamsResponse") @@ -695,6 +793,8 @@ func init() { proto.RegisterType((*QueryGetProposerByRollappResponse)(nil), "dymensionxyz.dymension.sequencer.QueryGetProposerByRollappResponse") proto.RegisterType((*QueryGetNextProposerByRollappRequest)(nil), "dymensionxyz.dymension.sequencer.QueryGetNextProposerByRollappRequest") proto.RegisterType((*QueryGetNextProposerByRollappResponse)(nil), "dymensionxyz.dymension.sequencer.QueryGetNextProposerByRollappResponse") + proto.RegisterType((*QueryProposersRequest)(nil), "dymensionxyz.dymension.sequencer.QueryProposersRequest") + proto.RegisterType((*QueryProposersResponse)(nil), "dymensionxyz.dymension.sequencer.QueryProposersResponse") } func init() { @@ -702,60 +802,64 @@ func init() { } var fileDescriptor_c6af1252721903a2 = []byte{ - // 846 bytes of a gzipped FileDescriptorProto - 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0xb4, 0x56, 0xdd, 0x4e, 0xdb, 0x48, - 0x14, 0x8e, 0xd9, 0xdd, 0x68, 0x73, 0x16, 0xad, 0xd0, 0x80, 0x76, 0x59, 0x2f, 0xca, 0xb2, 0xee, - 0x1f, 0x0a, 0xc5, 0xc3, 0x4f, 0x4b, 0x69, 0x51, 0x11, 0x0d, 0x90, 0x08, 0x95, 0x42, 0x08, 0xbd, - 0xaa, 0x54, 0xa5, 0x0e, 0x19, 0xb9, 0x91, 0x12, 0x8f, 0xf1, 0x38, 0x28, 0x29, 0x42, 0xaa, 0xda, - 0x17, 0x40, 0x42, 0x7d, 0x86, 0x3e, 0x41, 0xfb, 0x0c, 0x5c, 0xf4, 0x02, 0xa9, 0x37, 0xbd, 0x6a, - 0x2b, 0xe8, 0x7d, 0xfb, 0x08, 0x95, 0xc7, 0x13, 0x27, 0x21, 0x21, 0x76, 0x12, 0xb8, 0x8b, 0x67, - 0xce, 0xf9, 0xce, 0xf7, 0x9d, 0x99, 0xf9, 0x4e, 0xe0, 0x66, 0xae, 0x52, 0x24, 0x06, 0xcb, 0x53, - 0xa3, 0x5c, 0x79, 0x81, 0xbd, 0x0f, 0xcc, 0xc8, 0x4e, 0x89, 0x18, 0xdb, 0xc4, 0xc2, 0x3b, 0x25, - 0x62, 0x55, 0x54, 0xd3, 0xa2, 0x36, 0x45, 0xa3, 0xf5, 0xd1, 0xaa, 0xf7, 0xa1, 0x7a, 0xd1, 0xf2, - 0x90, 0x4e, 0x75, 0xca, 0x83, 0xb1, 0xf3, 0xcb, 0xcd, 0x93, 0x47, 0x74, 0x4a, 0xf5, 0x02, 0xc1, - 0x9a, 0x99, 0xc7, 0x9a, 0x61, 0x50, 0x5b, 0xb3, 0xf3, 0xd4, 0x60, 0x62, 0x37, 0xb6, 0x4d, 0x59, - 0x91, 0x32, 0x9c, 0xd5, 0x18, 0x71, 0xcb, 0xe1, 0xdd, 0xa9, 0x2c, 0xb1, 0xb5, 0x29, 0x6c, 0x6a, - 0x7a, 0xde, 0xe0, 0xc1, 0x22, 0x76, 0xc2, 0x97, 0xaf, 0xa9, 0x59, 0x5a, 0xb1, 0x0a, 0x3d, 0xe9, - 0x1b, 0xee, 0xfd, 0x12, 0x19, 0x77, 0x7c, 0x33, 0xa8, 0x49, 0x2c, 0xcd, 0xce, 0x1b, 0x7a, 0x86, - 0xd9, 0x9a, 0x5d, 0x12, 0xa5, 0x94, 0x21, 0x40, 0x9b, 0x0e, 0xf7, 0x14, 0xaf, 0x9f, 0x76, 0xc2, - 0x99, 0xad, 0x3c, 0x85, 0xc1, 0x86, 0x55, 0x66, 0x52, 0x83, 0x11, 0x94, 0x80, 0xb0, 0xcb, 0x73, - 0x58, 0x1a, 0x95, 0xc6, 0xfe, 0x98, 0x1e, 0x53, 0xfd, 0x3a, 0xab, 0xba, 0x08, 0xf1, 0x5f, 0x8f, - 0x3e, 0xff, 0x17, 0x4a, 0x8b, 0x6c, 0x25, 0x01, 0xc3, 0x1c, 0x3e, 0x49, 0xec, 0xad, 0x6a, 0xa4, - 0x28, 0x8d, 0x62, 0x30, 0xe0, 0x65, 0x3f, 0xc8, 0xe5, 0x2c, 0xc2, 0xdc, 0x6a, 0x91, 0x74, 0xd3, - 0xba, 0x52, 0x80, 0x7f, 0x5a, 0xe0, 0x08, 0xb2, 0x1b, 0x10, 0xf1, 0x12, 0x04, 0xdf, 0x71, 0x7f, - 0xbe, 0x1e, 0x8e, 0xa0, 0x5c, 0xc3, 0x50, 0x9e, 0xc1, 0x5f, 0xbc, 0x9a, 0x17, 0x52, 0x6d, 0x17, - 0x4a, 0x00, 0xd4, 0x8e, 0x5c, 0xd4, 0xba, 0xae, 0xba, 0xf7, 0x43, 0x75, 0xee, 0x87, 0xea, 0x5e, - 0x47, 0x71, 0x3f, 0xd4, 0x94, 0xa6, 0x13, 0x91, 0x9b, 0xae, 0xcb, 0x54, 0xde, 0x49, 0xf0, 0x77, - 0x53, 0x09, 0x21, 0x67, 0x13, 0xc0, 0xa3, 0xe2, 0x74, 0xe4, 0x97, 0xee, 0xf4, 0xd4, 0x81, 0xa0, - 0x64, 0x03, 0xed, 0x3e, 0x4e, 0xfb, 0x86, 0x2f, 0x6d, 0x97, 0x4f, 0x03, 0xef, 0x38, 0x28, 0x4d, - 0xe7, 0xc0, 0xe2, 0x95, 0x34, 0x2d, 0x14, 0x34, 0xd3, 0xac, 0x76, 0x69, 0x04, 0x22, 0x96, 0xbb, - 0xb2, 0x9a, 0x13, 0x47, 0x5a, 0x5b, 0x50, 0xca, 0x70, 0xa5, 0x2d, 0xc6, 0xa5, 0xb5, 0x41, 0x79, - 0x23, 0x41, 0xac, 0x4d, 0xe9, 0x78, 0x65, 0x8b, 0x3f, 0x98, 0x40, 0x32, 0xd0, 0x2a, 0x84, 0xdd, - 0xf7, 0xc5, 0xfb, 0xf9, 0xe7, 0xf4, 0x94, 0x3f, 0xb7, 0x8d, 0xea, 0xcb, 0x14, 0x75, 0x04, 0x80, - 0xf2, 0x52, 0x82, 0xf1, 0x40, 0xbc, 0x2e, 0xaf, 0x35, 0x8b, 0x30, 0x5a, 0x65, 0x90, 0xb2, 0xa8, - 0x49, 0x19, 0xb1, 0x3a, 0x3c, 0xd6, 0x24, 0xfc, 0xdf, 0x06, 0x41, 0x30, 0x57, 0xa0, 0xdf, 0x14, - 0x9b, 0xce, 0xd3, 0x16, 0x28, 0x0d, 0x6b, 0xca, 0x32, 0x5c, 0xad, 0x02, 0xad, 0x93, 0x72, 0xb7, - 0x74, 0x5e, 0x4b, 0x70, 0xcd, 0x07, 0x46, 0x70, 0x8a, 0xc1, 0x80, 0x51, 0x17, 0x50, 0xc7, 0xab, - 0x69, 0x1d, 0xa9, 0x80, 0x2c, 0x31, 0x1d, 0x56, 0x8d, 0x94, 0x45, 0x75, 0xee, 0x5a, 0xce, 0x05, - 0xf8, 0x3d, 0xdd, 0x62, 0x67, 0xfa, 0xb0, 0x1f, 0x7e, 0xe3, 0x2c, 0xd0, 0x5b, 0x09, 0xc2, 0xae, - 0x45, 0xa2, 0x5b, 0xfe, 0x47, 0xd5, 0xec, 0xd4, 0xf2, 0xed, 0x0e, 0xb3, 0x5c, 0x75, 0xca, 0xe4, - 0xab, 0x8f, 0xdf, 0x0e, 0xfb, 0x62, 0x68, 0x0c, 0x07, 0x9c, 0x4c, 0xe8, 0x83, 0x04, 0x11, 0xef, - 0xaa, 0xa0, 0x7b, 0x01, 0xcb, 0xb6, 0x70, 0x78, 0x79, 0xbe, 0xab, 0x5c, 0x41, 0x3c, 0xc1, 0x89, - 0x2f, 0xa2, 0x05, 0x1c, 0x7c, 0x46, 0xe2, 0xbd, 0xb3, 0x93, 0x63, 0x1f, 0xbd, 0x97, 0x00, 0x6a, - 0x8f, 0x0a, 0xcd, 0x05, 0xe4, 0xd4, 0xe4, 0xfd, 0xf2, 0xdd, 0x2e, 0x32, 0x85, 0x96, 0x19, 0xae, - 0x65, 0x02, 0x8d, 0x77, 0xa0, 0x05, 0x7d, 0x97, 0x60, 0xb0, 0x85, 0x1b, 0xa0, 0xe5, 0x2e, 0xba, - 0xda, 0xe4, 0xd1, 0xf2, 0x4a, 0x8f, 0x28, 0x42, 0xd9, 0x43, 0xae, 0x6c, 0x05, 0x2d, 0x75, 0xa0, - 0x8c, 0x65, 0xb2, 0x95, 0x8c, 0x78, 0xa9, 0x78, 0xcf, 0x7b, 0xb2, 0xfb, 0xe8, 0xa0, 0x0f, 0xfe, - 0x6d, 0xe3, 0x7f, 0x68, 0xad, 0x27, 0xce, 0x67, 0xec, 0x5d, 0x7e, 0x74, 0x41, 0x68, 0xa2, 0x13, - 0x8f, 0x79, 0x27, 0xd6, 0xd1, 0xda, 0x05, 0x74, 0x02, 0xef, 0xb9, 0x93, 0x61, 0x1f, 0x7d, 0x91, - 0x60, 0xa8, 0x95, 0xa3, 0xa2, 0x78, 0x70, 0xf6, 0xe7, 0x39, 0xa8, 0xbc, 0xd4, 0x13, 0x86, 0xd0, - 0xbd, 0xc0, 0x75, 0xcf, 0xa1, 0xd9, 0x00, 0x06, 0x23, 0x40, 0x1a, 0x0e, 0xfd, 0x87, 0x04, 0xc3, - 0xe7, 0x79, 0x34, 0x4a, 0x04, 0x67, 0xd8, 0x6e, 0x56, 0xc8, 0xc9, 0x9e, 0x71, 0x84, 0xda, 0x25, - 0xae, 0xf6, 0x3e, 0x9a, 0xf7, 0x57, 0xeb, 0x0c, 0x8f, 0x4c, 0x2b, 0xc9, 0xf1, 0xd4, 0xd1, 0x49, - 0x54, 0x3a, 0x3e, 0x89, 0x4a, 0x5f, 0x4f, 0xa2, 0xd2, 0xc1, 0x69, 0x34, 0x74, 0x7c, 0x1a, 0x0d, - 0x7d, 0x3a, 0x8d, 0x86, 0x9e, 0xcc, 0xea, 0x79, 0xfb, 0x79, 0x29, 0xab, 0x6e, 0xd3, 0xe2, 0x79, - 0x05, 0x76, 0x67, 0x70, 0xb9, 0xae, 0x8a, 0x5d, 0x31, 0x09, 0xcb, 0x86, 0xf9, 0x7f, 0xfc, 0x99, - 0x9f, 0x01, 0x00, 0x00, 0xff, 0xff, 0x7a, 0x51, 0xb4, 0xed, 0x2f, 0x0d, 0x00, 0x00, + // 905 bytes of a gzipped FileDescriptorProto + 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0xb4, 0x57, 0xdd, 0x4e, 0xdb, 0x48, + 0x14, 0xce, 0xb0, 0xbb, 0xd1, 0xe6, 0xec, 0x8f, 0xd0, 0xc0, 0xee, 0xb2, 0x5e, 0x94, 0x65, 0xbd, + 0x7f, 0x28, 0x80, 0xcd, 0x5f, 0x0b, 0x14, 0xb5, 0xa5, 0x01, 0x12, 0xa1, 0x52, 0x08, 0xa1, 0x57, + 0x95, 0xaa, 0xd4, 0x21, 0x23, 0x37, 0x52, 0xe2, 0x31, 0xb6, 0x83, 0x92, 0x22, 0xa4, 0xaa, 0x7d, + 0x01, 0xa4, 0xaa, 0xcf, 0xd0, 0xfb, 0x56, 0x55, 0x5f, 0x01, 0xa9, 0xbd, 0x40, 0xea, 0x4d, 0xaf, + 0x2a, 0x04, 0xbd, 0x6f, 0x1f, 0xa1, 0xf2, 0x78, 0xec, 0x24, 0x24, 0xc4, 0xce, 0x0f, 0x77, 0xf6, + 0xf8, 0x9c, 0xef, 0x7c, 0xdf, 0x99, 0x99, 0xf3, 0x25, 0x30, 0x9e, 0xab, 0x14, 0x89, 0x66, 0xe6, + 0xa9, 0x56, 0xae, 0x3c, 0x92, 0xbd, 0x17, 0xd9, 0x24, 0xbb, 0x25, 0xa2, 0xed, 0x10, 0x43, 0xde, + 0x2d, 0x11, 0xa3, 0x22, 0xe9, 0x06, 0xb5, 0x28, 0x1e, 0xa9, 0x8d, 0x96, 0xbc, 0x17, 0xc9, 0x8b, + 0x16, 0x06, 0x55, 0xaa, 0x52, 0x16, 0x2c, 0xdb, 0x4f, 0x4e, 0x9e, 0x30, 0xac, 0x52, 0xaa, 0x16, + 0x88, 0xac, 0xe8, 0x79, 0x59, 0xd1, 0x34, 0x6a, 0x29, 0x56, 0x9e, 0x6a, 0x26, 0xff, 0x1a, 0xdb, + 0xa1, 0x66, 0x91, 0x9a, 0x72, 0x56, 0x31, 0x89, 0x53, 0x4e, 0xde, 0x9b, 0xca, 0x12, 0x4b, 0x99, + 0x92, 0x75, 0x45, 0xcd, 0x6b, 0x2c, 0x98, 0xc7, 0x4e, 0xf8, 0xf2, 0xd5, 0x15, 0x43, 0x29, 0xba, + 0xd0, 0x93, 0xbe, 0xe1, 0xde, 0x13, 0xcf, 0x98, 0xf3, 0xcd, 0xa0, 0x3a, 0x31, 0x14, 0x2b, 0xaf, + 0xa9, 0x19, 0xd3, 0x52, 0xac, 0x12, 0x2f, 0x25, 0x0e, 0x02, 0xde, 0xb2, 0xb9, 0xa7, 0x58, 0xfd, + 0xb4, 0x1d, 0x6e, 0x5a, 0xe2, 0x7d, 0x18, 0xa8, 0x5b, 0x35, 0x75, 0xaa, 0x99, 0x04, 0x27, 0x20, + 0xec, 0xf0, 0x1c, 0x42, 0x23, 0x68, 0xf4, 0x87, 0xe9, 0x51, 0xc9, 0xaf, 0xb3, 0x92, 0x83, 0x10, + 0xff, 0xf6, 0xe8, 0xe3, 0x9f, 0xa1, 0x34, 0xcf, 0x16, 0x13, 0x30, 0xc4, 0xe0, 0x93, 0xc4, 0xda, + 0x76, 0x23, 0x79, 0x69, 0x1c, 0x83, 0x7e, 0x2f, 0xfb, 0x56, 0x2e, 0x67, 0x10, 0xd3, 0xa9, 0x16, + 0x49, 0x37, 0xac, 0x8b, 0x05, 0xf8, 0xbd, 0x09, 0x0e, 0x27, 0xbb, 0x09, 0x11, 0x2f, 0x81, 0xf3, + 0x1d, 0xf3, 0xe7, 0xeb, 0xe1, 0x70, 0xca, 0x55, 0x0c, 0xf1, 0x01, 0xfc, 0xca, 0xaa, 0x79, 0x21, + 0x6e, 0xbb, 0x70, 0x02, 0xa0, 0xba, 0xe5, 0xbc, 0xd6, 0x7f, 0x92, 0x73, 0x3e, 0x24, 0xfb, 0x7c, + 0x48, 0xce, 0x71, 0xe4, 0xe7, 0x43, 0x4a, 0x29, 0x2a, 0xe1, 0xb9, 0xe9, 0x9a, 0x4c, 0xf1, 0x35, + 0x82, 0xdf, 0x1a, 0x4a, 0x70, 0x39, 0x5b, 0x00, 0x1e, 0x15, 0xbb, 0x23, 0xdf, 0x74, 0xa6, 0xa7, + 0x06, 0x04, 0x27, 0xeb, 0x68, 0xf7, 0x31, 0xda, 0xff, 0xfb, 0xd2, 0x76, 0xf8, 0xd4, 0xf1, 0x8e, + 0x83, 0xd8, 0xb0, 0x0f, 0x66, 0xbc, 0x92, 0xa6, 0x85, 0x82, 0xa2, 0xeb, 0x6e, 0x97, 0x86, 0x21, + 0x62, 0x38, 0x2b, 0x6b, 0x39, 0xbe, 0xa5, 0xd5, 0x05, 0xb1, 0x0c, 0x7f, 0xb7, 0xc4, 0xb8, 0xb4, + 0x36, 0x88, 0xcf, 0x11, 0xc4, 0x5a, 0x94, 0x8e, 0x57, 0xb6, 0xd9, 0x85, 0x09, 0x24, 0x03, 0xaf, + 0x41, 0xd8, 0xb9, 0x5f, 0xac, 0x9f, 0x3f, 0x4f, 0x4f, 0xf9, 0x73, 0xdb, 0x74, 0x6f, 0x26, 0xaf, + 0xc3, 0x01, 0xc4, 0xc7, 0x08, 0xc6, 0x02, 0xf1, 0xba, 0xbc, 0xd6, 0x2c, 0xc1, 0x88, 0xcb, 0x20, + 0x65, 0x50, 0x9d, 0x9a, 0xc4, 0x68, 0x73, 0x5b, 0x93, 0xf0, 0x57, 0x0b, 0x04, 0xce, 0x5c, 0x84, + 0x1f, 0x75, 0xfe, 0xd1, 0xbe, 0xda, 0x1c, 0xa5, 0x6e, 0x4d, 0x5c, 0x81, 0x7f, 0x5c, 0xa0, 0x0d, + 0x52, 0xee, 0x94, 0xce, 0x53, 0x04, 0xff, 0xfa, 0xc0, 0x70, 0x4e, 0x31, 0xe8, 0xd7, 0x6a, 0x02, + 0x6a, 0x78, 0x35, 0xac, 0x63, 0x09, 0xb0, 0xc1, 0xdd, 0x61, 0x4d, 0x4b, 0x19, 0x54, 0x65, 0x53, + 0xcb, 0x3e, 0x00, 0xdf, 0xa7, 0x9b, 0x7c, 0x11, 0x33, 0xf0, 0x8b, 0x33, 0x5e, 0x39, 0x48, 0xcf, + 0x07, 0xc9, 0x4b, 0xc4, 0x67, 0x55, 0x4d, 0x85, 0xea, 0x58, 0x74, 0xfb, 0xda, 0xc5, 0x21, 0xa9, + 0x62, 0xf4, 0x6c, 0x8a, 0x4c, 0xbf, 0xfd, 0x09, 0xbe, 0x63, 0xa4, 0xf1, 0x0b, 0x04, 0x61, 0xc7, + 0x38, 0xf0, 0xac, 0x3f, 0xb7, 0x46, 0xff, 0x12, 0xae, 0xb4, 0x99, 0xe5, 0xb0, 0x11, 0x27, 0x9f, + 0xbc, 0xff, 0xf4, 0xac, 0x2f, 0x86, 0x47, 0xe5, 0x80, 0x7e, 0x8d, 0xdf, 0x21, 0x88, 0x78, 0xbd, + 0xc1, 0xd7, 0x02, 0x96, 0x6d, 0xe2, 0x7b, 0xc2, 0x62, 0x47, 0xb9, 0x9c, 0x78, 0x82, 0x11, 0x5f, + 0xc2, 0x37, 0xe4, 0xe0, 0xbf, 0x1c, 0xe4, 0xfd, 0xf3, 0x7e, 0x7a, 0x80, 0xdf, 0x20, 0x80, 0xea, + 0xa8, 0xc1, 0xf3, 0x01, 0x39, 0x35, 0x38, 0xa2, 0xb0, 0xd0, 0x41, 0x26, 0xd7, 0x32, 0xcb, 0xb4, + 0x48, 0x78, 0xbc, 0x0d, 0x2d, 0x26, 0xfe, 0x8c, 0x60, 0xa0, 0xc9, 0x90, 0xc4, 0x2b, 0x1d, 0xb4, + 0xb5, 0xc1, 0xba, 0x84, 0xd5, 0x2e, 0x51, 0xb8, 0xb4, 0xdb, 0x4c, 0xda, 0x2a, 0x5e, 0x6e, 0x47, + 0x5a, 0x26, 0x5b, 0xc9, 0xf0, 0x01, 0x26, 0xef, 0x7b, 0x93, 0xec, 0x00, 0x1f, 0xf6, 0xc1, 0x1f, + 0x2d, 0x6c, 0x01, 0xaf, 0x77, 0xc5, 0xf9, 0x9c, 0xeb, 0x09, 0x77, 0x7a, 0x84, 0xc6, 0x3b, 0x71, + 0x97, 0x75, 0x62, 0x03, 0xaf, 0xf7, 0xa0, 0x13, 0xf2, 0xbe, 0x63, 0x98, 0x07, 0xf8, 0x04, 0xc1, + 0x60, 0x33, 0xa3, 0xc1, 0xf1, 0xe0, 0xec, 0x2f, 0x32, 0x16, 0x61, 0xb9, 0x2b, 0x0c, 0xae, 0xfb, + 0x26, 0xd3, 0xbd, 0x80, 0xe7, 0x02, 0x4c, 0x18, 0x77, 0xc2, 0xd6, 0xed, 0xfa, 0x17, 0x04, 0x43, + 0x17, 0x79, 0x17, 0x4e, 0x04, 0xa7, 0xd8, 0xca, 0x43, 0x85, 0x64, 0xd7, 0x38, 0x5c, 0xee, 0x32, + 0x93, 0x7b, 0x1d, 0x2f, 0xfa, 0xcb, 0xb5, 0x4d, 0x35, 0xe3, 0x6a, 0xae, 0x93, 0xfc, 0x0a, 0x41, + 0xc4, 0xf3, 0x31, 0x3c, 0x17, 0x74, 0xb4, 0x9f, 0xf3, 0x56, 0x61, 0xbe, 0xfd, 0x44, 0xae, 0x62, + 0x86, 0xa9, 0x98, 0xc0, 0x63, 0x6d, 0x6c, 0x5a, 0x3c, 0x75, 0x74, 0x1a, 0x45, 0xc7, 0xa7, 0x51, + 0x74, 0x72, 0x1a, 0x45, 0x87, 0x67, 0xd1, 0xd0, 0xf1, 0x59, 0x34, 0xf4, 0xe1, 0x2c, 0x1a, 0xba, + 0x77, 0x55, 0xcd, 0x5b, 0x0f, 0x4b, 0x59, 0x69, 0x87, 0x16, 0x2f, 0x02, 0xdc, 0x9b, 0x91, 0xcb, + 0x35, 0xa8, 0x56, 0x45, 0x27, 0x66, 0x36, 0xcc, 0xfe, 0xb1, 0xcd, 0x7c, 0x0d, 0x00, 0x00, 0xff, + 0xff, 0xc6, 0x93, 0x72, 0x1a, 0xfd, 0x0e, 0x00, 0x00, } // Reference imports to suppress errors if they are not otherwise used. @@ -784,6 +888,8 @@ type QueryClient interface { GetProposerByRollapp(ctx context.Context, in *QueryGetProposerByRollappRequest, opts ...grpc.CallOption) (*QueryGetProposerByRollappResponse, error) // Queries the next proposer by rollappId. GetNextProposerByRollapp(ctx context.Context, in *QueryGetNextProposerByRollappRequest, opts ...grpc.CallOption) (*QueryGetNextProposerByRollappResponse, error) + // Queries a list of proposers. + Proposers(ctx context.Context, in *QueryProposersRequest, opts ...grpc.CallOption) (*QueryProposersResponse, error) } type queryClient struct { @@ -857,6 +963,15 @@ func (c *queryClient) GetNextProposerByRollapp(ctx context.Context, in *QueryGet return out, nil } +func (c *queryClient) Proposers(ctx context.Context, in *QueryProposersRequest, opts ...grpc.CallOption) (*QueryProposersResponse, error) { + out := new(QueryProposersResponse) + err := c.cc.Invoke(ctx, "/dymensionxyz.dymension.sequencer.Query/Proposers", in, out, opts...) + if err != nil { + return nil, err + } + return out, nil +} + // QueryServer is the server API for Query service. type QueryServer interface { // Parameters queries the parameters of the module. @@ -873,6 +988,8 @@ type QueryServer interface { GetProposerByRollapp(context.Context, *QueryGetProposerByRollappRequest) (*QueryGetProposerByRollappResponse, error) // Queries the next proposer by rollappId. GetNextProposerByRollapp(context.Context, *QueryGetNextProposerByRollappRequest) (*QueryGetNextProposerByRollappResponse, error) + // Queries a list of proposers. + Proposers(context.Context, *QueryProposersRequest) (*QueryProposersResponse, error) } // UnimplementedQueryServer can be embedded to have forward compatible implementations. @@ -900,6 +1017,9 @@ func (*UnimplementedQueryServer) GetProposerByRollapp(ctx context.Context, req * func (*UnimplementedQueryServer) GetNextProposerByRollapp(ctx context.Context, req *QueryGetNextProposerByRollappRequest) (*QueryGetNextProposerByRollappResponse, error) { return nil, status.Errorf(codes.Unimplemented, "method GetNextProposerByRollapp not implemented") } +func (*UnimplementedQueryServer) Proposers(ctx context.Context, req *QueryProposersRequest) (*QueryProposersResponse, error) { + return nil, status.Errorf(codes.Unimplemented, "method Proposers not implemented") +} func RegisterQueryServer(s grpc1.Server, srv QueryServer) { s.RegisterService(&_Query_serviceDesc, srv) @@ -1031,6 +1151,24 @@ func _Query_GetNextProposerByRollapp_Handler(srv interface{}, ctx context.Contex return interceptor(ctx, in, info, handler) } +func _Query_Proposers_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(QueryProposersRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(QueryServer).Proposers(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: "/dymensionxyz.dymension.sequencer.Query/Proposers", + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(QueryServer).Proposers(ctx, req.(*QueryProposersRequest)) + } + return interceptor(ctx, in, info, handler) +} + var _Query_serviceDesc = grpc.ServiceDesc{ ServiceName: "dymensionxyz.dymension.sequencer.Query", HandlerType: (*QueryServer)(nil), @@ -1063,6 +1201,10 @@ var _Query_serviceDesc = grpc.ServiceDesc{ MethodName: "GetNextProposerByRollapp", Handler: _Query_GetNextProposerByRollapp_Handler, }, + { + MethodName: "Proposers", + Handler: _Query_Proposers_Handler, + }, }, Streams: []grpc.StreamDesc{}, Metadata: "dymensionxyz/dymension/sequencer/query.proto", @@ -1540,6 +1682,90 @@ func (m *QueryGetNextProposerByRollappResponse) MarshalToSizedBuffer(dAtA []byte return len(dAtA) - i, nil } +func (m *QueryProposersRequest) Marshal() (dAtA []byte, err error) { + size := m.Size() + dAtA = make([]byte, size) + n, err := m.MarshalToSizedBuffer(dAtA[:size]) + if err != nil { + return nil, err + } + return dAtA[:n], nil +} + +func (m *QueryProposersRequest) MarshalTo(dAtA []byte) (int, error) { + size := m.Size() + return m.MarshalToSizedBuffer(dAtA[:size]) +} + +func (m *QueryProposersRequest) MarshalToSizedBuffer(dAtA []byte) (int, error) { + i := len(dAtA) + _ = i + var l int + _ = l + if m.Pagination != nil { + { + size, err := m.Pagination.MarshalToSizedBuffer(dAtA[:i]) + if err != nil { + return 0, err + } + i -= size + i = encodeVarintQuery(dAtA, i, uint64(size)) + } + i-- + dAtA[i] = 0xa + } + return len(dAtA) - i, nil +} + +func (m *QueryProposersResponse) Marshal() (dAtA []byte, err error) { + size := m.Size() + dAtA = make([]byte, size) + n, err := m.MarshalToSizedBuffer(dAtA[:size]) + if err != nil { + return nil, err + } + return dAtA[:n], nil +} + +func (m *QueryProposersResponse) MarshalTo(dAtA []byte) (int, error) { + size := m.Size() + return m.MarshalToSizedBuffer(dAtA[:size]) +} + +func (m *QueryProposersResponse) MarshalToSizedBuffer(dAtA []byte) (int, error) { + i := len(dAtA) + _ = i + var l int + _ = l + if m.Pagination != nil { + { + size, err := m.Pagination.MarshalToSizedBuffer(dAtA[:i]) + if err != nil { + return 0, err + } + i -= size + i = encodeVarintQuery(dAtA, i, uint64(size)) + } + i-- + dAtA[i] = 0x12 + } + if len(m.Proposers) > 0 { + for iNdEx := len(m.Proposers) - 1; iNdEx >= 0; iNdEx-- { + { + size, err := m.Proposers[iNdEx].MarshalToSizedBuffer(dAtA[:i]) + if err != nil { + return 0, err + } + i -= size + i = encodeVarintQuery(dAtA, i, uint64(size)) + } + i-- + dAtA[i] = 0xa + } + } + return len(dAtA) - i, nil +} + func encodeVarintQuery(dAtA []byte, offset int, v uint64) int { offset -= sovQuery(v) base := offset @@ -1741,6 +1967,38 @@ func (m *QueryGetNextProposerByRollappResponse) Size() (n int) { return n } +func (m *QueryProposersRequest) Size() (n int) { + if m == nil { + return 0 + } + var l int + _ = l + if m.Pagination != nil { + l = m.Pagination.Size() + n += 1 + l + sovQuery(uint64(l)) + } + return n +} + +func (m *QueryProposersResponse) Size() (n int) { + if m == nil { + return 0 + } + var l int + _ = l + if len(m.Proposers) > 0 { + for _, e := range m.Proposers { + l = e.Size() + n += 1 + l + sovQuery(uint64(l)) + } + } + if m.Pagination != nil { + l = m.Pagination.Size() + n += 1 + l + sovQuery(uint64(l)) + } + return n +} + func sovQuery(x uint64) (n int) { return (math_bits.Len64(x|1) + 6) / 7 } @@ -2950,6 +3208,212 @@ func (m *QueryGetNextProposerByRollappResponse) Unmarshal(dAtA []byte) error { } return nil } +func (m *QueryProposersRequest) Unmarshal(dAtA []byte) error { + l := len(dAtA) + iNdEx := 0 + for iNdEx < l { + preIndex := iNdEx + var wire uint64 + for shift := uint(0); ; shift += 7 { + if shift >= 64 { + return ErrIntOverflowQuery + } + if iNdEx >= l { + return io.ErrUnexpectedEOF + } + b := dAtA[iNdEx] + iNdEx++ + wire |= uint64(b&0x7F) << shift + if b < 0x80 { + break + } + } + fieldNum := int32(wire >> 3) + wireType := int(wire & 0x7) + if wireType == 4 { + return fmt.Errorf("proto: QueryProposersRequest: wiretype end group for non-group") + } + if fieldNum <= 0 { + return fmt.Errorf("proto: QueryProposersRequest: illegal tag %d (wire type %d)", fieldNum, wire) + } + switch fieldNum { + case 1: + if wireType != 2 { + return fmt.Errorf("proto: wrong wireType = %d for field Pagination", wireType) + } + var msglen int + for shift := uint(0); ; shift += 7 { + if shift >= 64 { + return ErrIntOverflowQuery + } + if iNdEx >= l { + return io.ErrUnexpectedEOF + } + b := dAtA[iNdEx] + iNdEx++ + msglen |= int(b&0x7F) << shift + if b < 0x80 { + break + } + } + if msglen < 0 { + return ErrInvalidLengthQuery + } + postIndex := iNdEx + msglen + if postIndex < 0 { + return ErrInvalidLengthQuery + } + if postIndex > l { + return io.ErrUnexpectedEOF + } + if m.Pagination == nil { + m.Pagination = &query.PageRequest{} + } + if err := m.Pagination.Unmarshal(dAtA[iNdEx:postIndex]); err != nil { + return err + } + iNdEx = postIndex + default: + iNdEx = preIndex + skippy, err := skipQuery(dAtA[iNdEx:]) + if err != nil { + return err + } + if (skippy < 0) || (iNdEx+skippy) < 0 { + return ErrInvalidLengthQuery + } + if (iNdEx + skippy) > l { + return io.ErrUnexpectedEOF + } + iNdEx += skippy + } + } + + if iNdEx > l { + return io.ErrUnexpectedEOF + } + return nil +} +func (m *QueryProposersResponse) Unmarshal(dAtA []byte) error { + l := len(dAtA) + iNdEx := 0 + for iNdEx < l { + preIndex := iNdEx + var wire uint64 + for shift := uint(0); ; shift += 7 { + if shift >= 64 { + return ErrIntOverflowQuery + } + if iNdEx >= l { + return io.ErrUnexpectedEOF + } + b := dAtA[iNdEx] + iNdEx++ + wire |= uint64(b&0x7F) << shift + if b < 0x80 { + break + } + } + fieldNum := int32(wire >> 3) + wireType := int(wire & 0x7) + if wireType == 4 { + return fmt.Errorf("proto: QueryProposersResponse: wiretype end group for non-group") + } + if fieldNum <= 0 { + return fmt.Errorf("proto: QueryProposersResponse: illegal tag %d (wire type %d)", fieldNum, wire) + } + switch fieldNum { + case 1: + if wireType != 2 { + return fmt.Errorf("proto: wrong wireType = %d for field Proposers", wireType) + } + var msglen int + for shift := uint(0); ; shift += 7 { + if shift >= 64 { + return ErrIntOverflowQuery + } + if iNdEx >= l { + return io.ErrUnexpectedEOF + } + b := dAtA[iNdEx] + iNdEx++ + msglen |= int(b&0x7F) << shift + if b < 0x80 { + break + } + } + if msglen < 0 { + return ErrInvalidLengthQuery + } + postIndex := iNdEx + msglen + if postIndex < 0 { + return ErrInvalidLengthQuery + } + if postIndex > l { + return io.ErrUnexpectedEOF + } + m.Proposers = append(m.Proposers, Sequencer{}) + if err := m.Proposers[len(m.Proposers)-1].Unmarshal(dAtA[iNdEx:postIndex]); err != nil { + return err + } + iNdEx = postIndex + case 2: + if wireType != 2 { + return fmt.Errorf("proto: wrong wireType = %d for field Pagination", wireType) + } + var msglen int + for shift := uint(0); ; shift += 7 { + if shift >= 64 { + return ErrIntOverflowQuery + } + if iNdEx >= l { + return io.ErrUnexpectedEOF + } + b := dAtA[iNdEx] + iNdEx++ + msglen |= int(b&0x7F) << shift + if b < 0x80 { + break + } + } + if msglen < 0 { + return ErrInvalidLengthQuery + } + postIndex := iNdEx + msglen + if postIndex < 0 { + return ErrInvalidLengthQuery + } + if postIndex > l { + return io.ErrUnexpectedEOF + } + if m.Pagination == nil { + m.Pagination = &query.PageResponse{} + } + if err := m.Pagination.Unmarshal(dAtA[iNdEx:postIndex]); err != nil { + return err + } + iNdEx = postIndex + default: + iNdEx = preIndex + skippy, err := skipQuery(dAtA[iNdEx:]) + if err != nil { + return err + } + if (skippy < 0) || (iNdEx+skippy) < 0 { + return ErrInvalidLengthQuery + } + if (iNdEx + skippy) > l { + return io.ErrUnexpectedEOF + } + iNdEx += skippy + } + } + + if iNdEx > l { + return io.ErrUnexpectedEOF + } + return nil +} func skipQuery(dAtA []byte) (n int, err error) { l := len(dAtA) iNdEx := 0 diff --git a/x/sequencer/types/query.pb.gw.go b/x/sequencer/types/query.pb.gw.go index ea7d257aa..46fe6c481 100644 --- a/x/sequencer/types/query.pb.gw.go +++ b/x/sequencer/types/query.pb.gw.go @@ -385,6 +385,42 @@ func local_request_Query_GetNextProposerByRollapp_0(ctx context.Context, marshal } +var ( + filter_Query_Proposers_0 = &utilities.DoubleArray{Encoding: map[string]int{}, Base: []int(nil), Check: []int(nil)} +) + +func request_Query_Proposers_0(ctx context.Context, marshaler runtime.Marshaler, client QueryClient, req *http.Request, pathParams map[string]string) (proto.Message, runtime.ServerMetadata, error) { + var protoReq QueryProposersRequest + var metadata runtime.ServerMetadata + + if err := req.ParseForm(); err != nil { + return nil, metadata, status.Errorf(codes.InvalidArgument, "%v", err) + } + if err := runtime.PopulateQueryParameters(&protoReq, req.Form, filter_Query_Proposers_0); err != nil { + return nil, metadata, status.Errorf(codes.InvalidArgument, "%v", err) + } + + msg, err := client.Proposers(ctx, &protoReq, grpc.Header(&metadata.HeaderMD), grpc.Trailer(&metadata.TrailerMD)) + return msg, metadata, err + +} + +func local_request_Query_Proposers_0(ctx context.Context, marshaler runtime.Marshaler, server QueryServer, req *http.Request, pathParams map[string]string) (proto.Message, runtime.ServerMetadata, error) { + var protoReq QueryProposersRequest + var metadata runtime.ServerMetadata + + if err := req.ParseForm(); err != nil { + return nil, metadata, status.Errorf(codes.InvalidArgument, "%v", err) + } + if err := runtime.PopulateQueryParameters(&protoReq, req.Form, filter_Query_Proposers_0); err != nil { + return nil, metadata, status.Errorf(codes.InvalidArgument, "%v", err) + } + + msg, err := server.Proposers(ctx, &protoReq) + return msg, metadata, err + +} + // RegisterQueryHandlerServer registers the http handlers for service Query to "mux". // UnaryRPC :call QueryServer directly. // StreamingRPC :currently unsupported pending https://github.com/grpc/grpc-go/issues/906. @@ -552,6 +588,29 @@ func RegisterQueryHandlerServer(ctx context.Context, mux *runtime.ServeMux, serv }) + mux.Handle("GET", pattern_Query_Proposers_0, func(w http.ResponseWriter, req *http.Request, pathParams map[string]string) { + ctx, cancel := context.WithCancel(req.Context()) + defer cancel() + var stream runtime.ServerTransportStream + ctx = grpc.NewContextWithServerTransportStream(ctx, &stream) + inboundMarshaler, outboundMarshaler := runtime.MarshalerForRequest(mux, req) + rctx, err := runtime.AnnotateIncomingContext(ctx, mux, req) + if err != nil { + runtime.HTTPError(ctx, mux, outboundMarshaler, w, req, err) + return + } + resp, md, err := local_request_Query_Proposers_0(rctx, inboundMarshaler, server, req, pathParams) + md.HeaderMD, md.TrailerMD = metadata.Join(md.HeaderMD, stream.Header()), metadata.Join(md.TrailerMD, stream.Trailer()) + ctx = runtime.NewServerMetadataContext(ctx, md) + if err != nil { + runtime.HTTPError(ctx, mux, outboundMarshaler, w, req, err) + return + } + + forward_Query_Proposers_0(ctx, mux, outboundMarshaler, w, req, resp, mux.GetForwardResponseOptions()...) + + }) + return nil } @@ -733,6 +792,26 @@ func RegisterQueryHandlerClient(ctx context.Context, mux *runtime.ServeMux, clie }) + mux.Handle("GET", pattern_Query_Proposers_0, func(w http.ResponseWriter, req *http.Request, pathParams map[string]string) { + ctx, cancel := context.WithCancel(req.Context()) + defer cancel() + inboundMarshaler, outboundMarshaler := runtime.MarshalerForRequest(mux, req) + rctx, err := runtime.AnnotateContext(ctx, mux, req) + if err != nil { + runtime.HTTPError(ctx, mux, outboundMarshaler, w, req, err) + return + } + resp, md, err := request_Query_Proposers_0(rctx, inboundMarshaler, client, req, pathParams) + ctx = runtime.NewServerMetadataContext(ctx, md) + if err != nil { + runtime.HTTPError(ctx, mux, outboundMarshaler, w, req, err) + return + } + + forward_Query_Proposers_0(ctx, mux, outboundMarshaler, w, req, resp, mux.GetForwardResponseOptions()...) + + }) + return nil } @@ -741,15 +820,17 @@ var ( pattern_Query_Sequencer_0 = runtime.MustPattern(runtime.NewPattern(1, []int{2, 0, 2, 1, 2, 2, 2, 2, 1, 0, 4, 1, 5, 3}, []string{"dymensionxyz", "dymension", "sequencer", "sequencerAddress"}, "", runtime.AssumeColonVerbOpt(false))) - pattern_Query_Sequencers_0 = runtime.MustPattern(runtime.NewPattern(1, []int{2, 0, 2, 1, 2, 2, 2, 2}, []string{"dymensionxyz", "dymension", "sequencer"}, "", runtime.AssumeColonVerbOpt(false))) + pattern_Query_Sequencers_0 = runtime.MustPattern(runtime.NewPattern(1, []int{2, 0, 2, 1, 2, 2, 2, 3}, []string{"dymensionxyz", "dymension", "sequencer", "sequencers"}, "", runtime.AssumeColonVerbOpt(false))) pattern_Query_SequencersByRollapp_0 = runtime.MustPattern(runtime.NewPattern(1, []int{2, 0, 2, 1, 2, 2, 2, 3, 1, 0, 4, 1, 5, 4}, []string{"dymensionxyz", "dymension", "sequencer", "sequencers_by_rollapp", "rollappId"}, "", runtime.AssumeColonVerbOpt(false))) pattern_Query_SequencersByRollappByStatus_0 = runtime.MustPattern(runtime.NewPattern(1, []int{2, 0, 2, 1, 2, 2, 2, 3, 1, 0, 4, 1, 5, 4, 1, 0, 4, 1, 5, 5}, []string{"dymensionxyz", "dymension", "sequencer", "sequencers_by_rollapp", "rollappId", "status"}, "", runtime.AssumeColonVerbOpt(false))) - pattern_Query_GetProposerByRollapp_0 = runtime.MustPattern(runtime.NewPattern(1, []int{2, 0, 2, 1, 2, 2, 2, 3, 1, 0, 4, 1, 5, 4}, []string{"dymensionxyz", "dymension", "sequencer", "proposer", "rollappId"}, "", runtime.AssumeColonVerbOpt(false))) + pattern_Query_GetProposerByRollapp_0 = runtime.MustPattern(runtime.NewPattern(1, []int{2, 0, 2, 1, 2, 2, 2, 3, 1, 0, 4, 1, 5, 4}, []string{"dymensionxyz", "dymension", "sequencer", "proposers", "rollappId"}, "", runtime.AssumeColonVerbOpt(false))) pattern_Query_GetNextProposerByRollapp_0 = runtime.MustPattern(runtime.NewPattern(1, []int{2, 0, 2, 1, 2, 2, 2, 3, 1, 0, 4, 1, 5, 4}, []string{"dymensionxyz", "dymension", "sequencer", "next_proposer", "rollappId"}, "", runtime.AssumeColonVerbOpt(false))) + + pattern_Query_Proposers_0 = runtime.MustPattern(runtime.NewPattern(1, []int{2, 0, 2, 1, 2, 2, 2, 3}, []string{"dymensionxyz", "dymension", "sequencer", "proposers"}, "", runtime.AssumeColonVerbOpt(false))) ) var ( @@ -766,4 +847,6 @@ var ( forward_Query_GetProposerByRollapp_0 = runtime.ForwardResponseMessage forward_Query_GetNextProposerByRollapp_0 = runtime.ForwardResponseMessage + + forward_Query_Proposers_0 = runtime.ForwardResponseMessage ) diff --git a/x/streamer/keeper/abci.go b/x/streamer/keeper/abci.go new file mode 100644 index 000000000..c86de13f1 --- /dev/null +++ b/x/streamer/keeper/abci.go @@ -0,0 +1,64 @@ +package keeper + +import ( + "fmt" + + sdk "github.com/cosmos/cosmos-sdk/types" + "github.com/dymensionxyz/sdk-utils/utils/uevent" + + "github.com/dymensionxyz/dymension/v3/x/streamer/types" +) + +func (k Keeper) EndBlock(ctx sdk.Context) error { + streams := k.GetActiveStreams(ctx) + + epochPointers, err := k.GetAllEpochPointers(ctx) + if err != nil { + return fmt.Errorf("get all epoch pointers: %w", err) + } + + // Sort epoch pointers to distribute to shorter epochs first + types.SortEpochPointers(epochPointers) + + maxIterations := k.GetParams(ctx).MaxIterationsPerBlock + totalIterations := uint64(0) + totalDistributed := sdk.NewCoins() + + for _, p := range epochPointers { + remainIterations := maxIterations - totalIterations + + if remainIterations <= 0 { + break // no more iterations available for this block + } + + result := k.DistributeRewards(ctx, p, remainIterations, streams) + + totalIterations += result.Iterations + totalDistributed = totalDistributed.Add(result.DistributedCoins...) + streams = result.FilledStreams + + err = k.SaveEpochPointer(ctx, result.NewPointer) + if err != nil { + return fmt.Errorf("save epoch pointer: %w", err) + } + } + + // Save stream updates + for _, stream := range streams { + err = k.SetStream(ctx, &stream) + if err != nil { + return fmt.Errorf("set stream: %w", err) + } + } + + err = uevent.EmitTypedEvent(ctx, &types.EventEndBlock{ + Iterations: totalIterations, + MaxIterations: maxIterations, + Distributed: totalDistributed, + }) + if err != nil { + return fmt.Errorf("emit typed event: %w", err) + } + + return nil +} diff --git a/x/streamer/keeper/abci_test.go b/x/streamer/keeper/abci_test.go new file mode 100644 index 000000000..6ef87c046 --- /dev/null +++ b/x/streamer/keeper/abci_test.go @@ -0,0 +1,886 @@ +package keeper_test + +import ( + "fmt" + "time" + + "cosmossdk.io/math" + sdk "github.com/cosmos/cosmos-sdk/types" + + "github.com/dymensionxyz/dymension/v3/x/streamer/types" +) + +func (s *KeeperTestSuite) TestProcessEpochPointer() { + tests := []struct { + name string + maxIterationsPerBlock uint64 + numGauges int + blocksInEpoch int + streams []types.Stream + expectedBlockResults []blockResults + }{ + { + name: "1 block in the epoch", + maxIterationsPerBlock: 9, + numGauges: 16, + blocksInEpoch: 1, + streams: []types.Stream{ + { + Id: 1, + DistrEpochIdentifier: "hour", + Coins: sdk.NewCoins(sdk.NewInt64Coin("udym", 100)), + DistributeTo: &types.DistrInfo{ + Records: []types.DistrRecord{ + {GaugeId: 1, Weight: math.NewInt(25)}, + {GaugeId: 2, Weight: math.NewInt(25)}, + {GaugeId: 3, Weight: math.NewInt(25)}, + {GaugeId: 4, Weight: math.NewInt(25)}, + }, + }, + }, + { + Id: 2, + DistrEpochIdentifier: "day", + Coins: sdk.NewCoins(sdk.NewInt64Coin("udym", 100)), + DistributeTo: &types.DistrInfo{ + Records: []types.DistrRecord{ + {GaugeId: 5, Weight: math.NewInt(25)}, + {GaugeId: 6, Weight: math.NewInt(25)}, + {GaugeId: 7, Weight: math.NewInt(25)}, + {GaugeId: 8, Weight: math.NewInt(25)}, + }, + }, + }, + { + Id: 3, + DistrEpochIdentifier: "day", + Coins: sdk.NewCoins(sdk.NewInt64Coin("udym", 100)), + DistributeTo: &types.DistrInfo{ + Records: []types.DistrRecord{ + {GaugeId: 9, Weight: math.NewInt(25)}, + {GaugeId: 10, Weight: math.NewInt(25)}, + {GaugeId: 11, Weight: math.NewInt(25)}, + {GaugeId: 12, Weight: math.NewInt(25)}, + }, + }, + }, + { + Id: 4, + DistrEpochIdentifier: "hour", + Coins: sdk.NewCoins(sdk.NewInt64Coin("udym", 100)), + DistributeTo: &types.DistrInfo{ + Records: []types.DistrRecord{ + {GaugeId: 13, Weight: math.NewInt(25)}, + {GaugeId: 14, Weight: math.NewInt(25)}, + {GaugeId: 15, Weight: math.NewInt(25)}, + {GaugeId: 16, Weight: math.NewInt(25)}, + }, + }, + }, + }, + expectedBlockResults: []blockResults{ + { + height: 0, + epochPointers: []types.EpochPointer{ + { + StreamId: types.MaxStreamID, + GaugeId: types.MaxStreamID, + EpochIdentifier: "hour", + EpochDuration: time.Hour, + }, + { + StreamId: 2, + GaugeId: 6, + EpochIdentifier: "day", + EpochDuration: 24 * time.Hour, + }, + // week epoch pointer is not used + { + StreamId: types.MinStreamID, + GaugeId: types.MinStreamID, + EpochIdentifier: "week", + EpochDuration: 7 * 24 * time.Hour, + }, + }, + distributedCoins: []distributedCoins{ + {streamID: 1, coins: sdk.NewCoins(sdk.NewInt64Coin("udym", 100))}, + {streamID: 2, coins: sdk.NewCoins(sdk.NewInt64Coin("udym", 25))}, + {streamID: 3, coins: nil}, + {streamID: 4, coins: sdk.NewCoins(sdk.NewInt64Coin("udym", 100))}, + }, + gauges: []gaugeCoins{ + // 1st stream + {gaugeID: 1, coins: sdk.NewCoins(sdk.NewInt64Coin("udym", 25))}, + {gaugeID: 2, coins: sdk.NewCoins(sdk.NewInt64Coin("udym", 25))}, + {gaugeID: 3, coins: sdk.NewCoins(sdk.NewInt64Coin("udym", 25))}, + {gaugeID: 4, coins: sdk.NewCoins(sdk.NewInt64Coin("udym", 25))}, + // 2nd stream + {gaugeID: 5, coins: sdk.NewCoins(sdk.NewInt64Coin("udym", 25))}, + {gaugeID: 6, coins: nil}, + {gaugeID: 7, coins: nil}, + {gaugeID: 8, coins: nil}, + // 3rd stream + {gaugeID: 9, coins: nil}, + {gaugeID: 10, coins: nil}, + {gaugeID: 11, coins: nil}, + {gaugeID: 12, coins: nil}, + // 4th stream + {gaugeID: 13, coins: sdk.NewCoins(sdk.NewInt64Coin("udym", 25))}, + {gaugeID: 14, coins: sdk.NewCoins(sdk.NewInt64Coin("udym", 25))}, + {gaugeID: 15, coins: sdk.NewCoins(sdk.NewInt64Coin("udym", 25))}, + {gaugeID: 16, coins: sdk.NewCoins(sdk.NewInt64Coin("udym", 25))}, + }, + }, + }, + }, + { + name: "Several blocks in the epoch", + maxIterationsPerBlock: 5, + numGauges: 16, + blocksInEpoch: 2, + streams: []types.Stream{ + { + Id: 1, + DistrEpochIdentifier: "hour", + Coins: sdk.NewCoins(sdk.NewInt64Coin("udym", 100)), + DistributeTo: &types.DistrInfo{ + Records: []types.DistrRecord{ + {GaugeId: 1, Weight: math.NewInt(25)}, + {GaugeId: 2, Weight: math.NewInt(25)}, + {GaugeId: 3, Weight: math.NewInt(25)}, + {GaugeId: 4, Weight: math.NewInt(25)}, + }, + }, + }, + { + Id: 2, + DistrEpochIdentifier: "day", + Coins: sdk.NewCoins(sdk.NewInt64Coin("udym", 100)), + DistributeTo: &types.DistrInfo{ + Records: []types.DistrRecord{ + {GaugeId: 5, Weight: math.NewInt(25)}, + {GaugeId: 6, Weight: math.NewInt(25)}, + {GaugeId: 7, Weight: math.NewInt(25)}, + {GaugeId: 8, Weight: math.NewInt(25)}, + }, + }, + }, + { + Id: 3, + DistrEpochIdentifier: "day", + Coins: sdk.NewCoins(sdk.NewInt64Coin("udym", 100)), + DistributeTo: &types.DistrInfo{ + Records: []types.DistrRecord{ + {GaugeId: 9, Weight: math.NewInt(25)}, + {GaugeId: 10, Weight: math.NewInt(25)}, + {GaugeId: 11, Weight: math.NewInt(25)}, + {GaugeId: 12, Weight: math.NewInt(25)}, + }, + }, + }, + { + Id: 4, + DistrEpochIdentifier: "hour", + Coins: sdk.NewCoins(sdk.NewInt64Coin("udym", 100)), + DistributeTo: &types.DistrInfo{ + Records: []types.DistrRecord{ + {GaugeId: 13, Weight: math.NewInt(25)}, + {GaugeId: 14, Weight: math.NewInt(25)}, + {GaugeId: 15, Weight: math.NewInt(25)}, + {GaugeId: 16, Weight: math.NewInt(25)}, + }, + }, + }, + }, + expectedBlockResults: []blockResults{ + { + height: 0, + epochPointers: []types.EpochPointer{ + { + StreamId: 4, + GaugeId: 14, + EpochIdentifier: "hour", + EpochDuration: time.Hour, + }, + { + StreamId: 0, + GaugeId: 0, + EpochIdentifier: "day", + EpochDuration: 24 * time.Hour, + }, + // week epoch pointer is not used + { + StreamId: types.MinStreamID, + GaugeId: types.MinStreamID, + EpochIdentifier: "week", + EpochDuration: 7 * 24 * time.Hour, + }, + }, + distributedCoins: []distributedCoins{ + {streamID: 1, coins: sdk.NewCoins(sdk.NewInt64Coin("udym", 100))}, + {streamID: 2, coins: nil}, + {streamID: 3, coins: nil}, + {streamID: 4, coins: sdk.NewCoins(sdk.NewInt64Coin("udym", 25))}, + }, + gauges: []gaugeCoins{ + // 1st stream + {gaugeID: 1, coins: sdk.NewCoins(sdk.NewInt64Coin("udym", 25))}, + {gaugeID: 2, coins: sdk.NewCoins(sdk.NewInt64Coin("udym", 25))}, + {gaugeID: 3, coins: sdk.NewCoins(sdk.NewInt64Coin("udym", 25))}, + {gaugeID: 4, coins: sdk.NewCoins(sdk.NewInt64Coin("udym", 25))}, + // 2nd stream + {gaugeID: 5, coins: nil}, + {gaugeID: 6, coins: nil}, + {gaugeID: 7, coins: nil}, + {gaugeID: 8, coins: nil}, + // 3rd stream + {gaugeID: 9, coins: nil}, + {gaugeID: 10, coins: nil}, + {gaugeID: 11, coins: nil}, + {gaugeID: 12, coins: nil}, + // 4th stream + {gaugeID: 13, coins: sdk.NewCoins(sdk.NewInt64Coin("udym", 25))}, + {gaugeID: 14, coins: nil}, + {gaugeID: 15, coins: nil}, + {gaugeID: 16, coins: nil}, + }, + }, + { + height: 1, + epochPointers: []types.EpochPointer{ + { + StreamId: types.MaxStreamID, + GaugeId: types.MaxStreamID, + EpochIdentifier: "hour", + EpochDuration: time.Hour, + }, + { + StreamId: 2, + GaugeId: 7, + EpochIdentifier: "day", + EpochDuration: 24 * time.Hour, + }, + // week epoch pointer is not used + { + StreamId: types.MinStreamID, + GaugeId: types.MinStreamID, + EpochIdentifier: "week", + EpochDuration: 7 * 24 * time.Hour, + }, + }, + distributedCoins: []distributedCoins{ + {streamID: 1, coins: sdk.NewCoins(sdk.NewInt64Coin("udym", 100))}, + {streamID: 2, coins: sdk.NewCoins(sdk.NewInt64Coin("udym", 50))}, + {streamID: 3, coins: nil}, + {streamID: 4, coins: sdk.NewCoins(sdk.NewInt64Coin("udym", 100))}, + }, + gauges: []gaugeCoins{ + // 1st stream + {gaugeID: 1, coins: sdk.NewCoins(sdk.NewInt64Coin("udym", 25))}, + {gaugeID: 2, coins: sdk.NewCoins(sdk.NewInt64Coin("udym", 25))}, + {gaugeID: 3, coins: sdk.NewCoins(sdk.NewInt64Coin("udym", 25))}, + {gaugeID: 4, coins: sdk.NewCoins(sdk.NewInt64Coin("udym", 25))}, + // 2nd stream + {gaugeID: 5, coins: sdk.NewCoins(sdk.NewInt64Coin("udym", 25))}, + {gaugeID: 6, coins: sdk.NewCoins(sdk.NewInt64Coin("udym", 25))}, + {gaugeID: 7, coins: nil}, + {gaugeID: 8, coins: nil}, + // 3rd stream + {gaugeID: 9, coins: nil}, + {gaugeID: 10, coins: nil}, + {gaugeID: 11, coins: nil}, + {gaugeID: 12, coins: nil}, + // 4th stream + {gaugeID: 13, coins: sdk.NewCoins(sdk.NewInt64Coin("udym", 25))}, + {gaugeID: 14, coins: sdk.NewCoins(sdk.NewInt64Coin("udym", 25))}, + {gaugeID: 15, coins: sdk.NewCoins(sdk.NewInt64Coin("udym", 25))}, + {gaugeID: 16, coins: sdk.NewCoins(sdk.NewInt64Coin("udym", 25))}, + }, + }, + }, + }, + { + name: "Send all reward in one single block", + maxIterationsPerBlock: 5, + numGauges: 4, + blocksInEpoch: 5, + streams: []types.Stream{ + { + Id: 1, + DistrEpochIdentifier: "hour", + Coins: sdk.NewCoins(sdk.NewInt64Coin("udym", 1)), + DistributeTo: &types.DistrInfo{ + Records: []types.DistrRecord{ + {GaugeId: 1, Weight: math.NewInt(1)}, + }, + }, + }, + { + Id: 2, + DistrEpochIdentifier: "day", + Coins: sdk.NewCoins(sdk.NewInt64Coin("udym", 1)), + DistributeTo: &types.DistrInfo{ + Records: []types.DistrRecord{ + {GaugeId: 2, Weight: math.NewInt(1)}, + }, + }, + }, + { + Id: 3, + DistrEpochIdentifier: "day", + Coins: sdk.NewCoins(sdk.NewInt64Coin("udym", 1)), + DistributeTo: &types.DistrInfo{ + Records: []types.DistrRecord{ + {GaugeId: 3, Weight: math.NewInt(1)}, + }, + }, + }, + { + Id: 4, + DistrEpochIdentifier: "hour", + Coins: sdk.NewCoins(sdk.NewInt64Coin("udym", 1)), + DistributeTo: &types.DistrInfo{ + Records: []types.DistrRecord{ + {GaugeId: 4, Weight: math.NewInt(1)}, + }, + }, + }, + }, + expectedBlockResults: []blockResults{ + { + height: 0, + epochPointers: []types.EpochPointer{ + { + StreamId: types.MaxStreamID, + GaugeId: types.MaxStreamID, + EpochIdentifier: "hour", + EpochDuration: time.Hour, + }, + { + StreamId: types.MaxStreamID, + GaugeId: types.MaxStreamID, + EpochIdentifier: "day", + EpochDuration: 24 * time.Hour, + }, + // week epoch pointer is not used, however it points on the last gauge + { + StreamId: types.MaxStreamID, + GaugeId: types.MaxStreamID, + EpochIdentifier: "week", + EpochDuration: 7 * 24 * time.Hour, + }, + }, + distributedCoins: []distributedCoins{ + {streamID: 1, coins: sdk.NewCoins(sdk.NewInt64Coin("udym", 1))}, + {streamID: 2, coins: sdk.NewCoins(sdk.NewInt64Coin("udym", 1))}, + {streamID: 3, coins: sdk.NewCoins(sdk.NewInt64Coin("udym", 1))}, + {streamID: 4, coins: sdk.NewCoins(sdk.NewInt64Coin("udym", 1))}, + }, + gauges: []gaugeCoins{ + {gaugeID: 1, coins: sdk.NewCoins(sdk.NewInt64Coin("udym", 1))}, + {gaugeID: 2, coins: sdk.NewCoins(sdk.NewInt64Coin("udym", 1))}, + {gaugeID: 3, coins: sdk.NewCoins(sdk.NewInt64Coin("udym", 1))}, + {gaugeID: 4, coins: sdk.NewCoins(sdk.NewInt64Coin("udym", 1))}, + }, + }, + }, + }, + { + name: "Many blocks", + maxIterationsPerBlock: 3, + numGauges: 16, + blocksInEpoch: 100, + streams: []types.Stream{ + { + Id: 1, + DistrEpochIdentifier: "hour", + Coins: sdk.NewCoins(sdk.NewInt64Coin("udym", 100)), + DistributeTo: &types.DistrInfo{ + Records: []types.DistrRecord{ + {GaugeId: 1, Weight: math.NewInt(25)}, + {GaugeId: 2, Weight: math.NewInt(25)}, + {GaugeId: 3, Weight: math.NewInt(25)}, + {GaugeId: 4, Weight: math.NewInt(25)}, + }, + }, + }, + { + Id: 2, + DistrEpochIdentifier: "day", + Coins: sdk.NewCoins(sdk.NewInt64Coin("udym", 100)), + DistributeTo: &types.DistrInfo{ + Records: []types.DistrRecord{ + {GaugeId: 5, Weight: math.NewInt(25)}, + {GaugeId: 6, Weight: math.NewInt(25)}, + {GaugeId: 7, Weight: math.NewInt(25)}, + {GaugeId: 8, Weight: math.NewInt(25)}, + }, + }, + }, + { + Id: 3, + DistrEpochIdentifier: "day", + Coins: sdk.NewCoins(sdk.NewInt64Coin("udym", 100)), + DistributeTo: &types.DistrInfo{ + Records: []types.DistrRecord{ + {GaugeId: 9, Weight: math.NewInt(25)}, + {GaugeId: 10, Weight: math.NewInt(25)}, + {GaugeId: 11, Weight: math.NewInt(25)}, + {GaugeId: 12, Weight: math.NewInt(25)}, + }, + }, + }, + { + Id: 4, + DistrEpochIdentifier: "hour", + Coins: sdk.NewCoins(sdk.NewInt64Coin("udym", 100)), + DistributeTo: &types.DistrInfo{ + Records: []types.DistrRecord{ + {GaugeId: 13, Weight: math.NewInt(25)}, + {GaugeId: 14, Weight: math.NewInt(25)}, + {GaugeId: 15, Weight: math.NewInt(25)}, + {GaugeId: 16, Weight: math.NewInt(25)}, + }, + }, + }, + }, + expectedBlockResults: []blockResults{ + { + height: 0, + epochPointers: []types.EpochPointer{ + { + StreamId: 1, + GaugeId: 4, + EpochIdentifier: "hour", + EpochDuration: time.Hour, + }, + { + StreamId: 0, + GaugeId: 0, + EpochIdentifier: "day", + EpochDuration: 24 * time.Hour, + }, + // week epoch pointer is not used + { + StreamId: types.MinStreamID, + GaugeId: types.MinStreamID, + EpochIdentifier: "week", + EpochDuration: 7 * 24 * time.Hour, + }, + }, + distributedCoins: []distributedCoins{ + {streamID: 1, coins: sdk.NewCoins(sdk.NewInt64Coin("udym", 75))}, + {streamID: 2, coins: nil}, + {streamID: 3, coins: nil}, + {streamID: 4, coins: nil}, + }, + gauges: []gaugeCoins{ + // 1st stream + {gaugeID: 1, coins: sdk.NewCoins(sdk.NewInt64Coin("udym", 25))}, + {gaugeID: 2, coins: sdk.NewCoins(sdk.NewInt64Coin("udym", 25))}, + {gaugeID: 3, coins: sdk.NewCoins(sdk.NewInt64Coin("udym", 25))}, + {gaugeID: 4, coins: nil}, + // 2nd stream + {gaugeID: 5, coins: nil}, + {gaugeID: 6, coins: nil}, + {gaugeID: 7, coins: nil}, + {gaugeID: 8, coins: nil}, + // 3rd stream + {gaugeID: 9, coins: nil}, + {gaugeID: 10, coins: nil}, + {gaugeID: 11, coins: nil}, + {gaugeID: 12, coins: nil}, + // 4th stream + {gaugeID: 13, coins: nil}, + {gaugeID: 14, coins: nil}, + {gaugeID: 15, coins: nil}, + {gaugeID: 16, coins: nil}, + }, + }, + { + height: 1, + epochPointers: []types.EpochPointer{ + { + StreamId: 4, + GaugeId: 15, + EpochIdentifier: "hour", + EpochDuration: time.Hour, + }, + { + StreamId: 0, + GaugeId: 0, + EpochIdentifier: "day", + EpochDuration: 24 * time.Hour, + }, + // week epoch pointer is not used + { + StreamId: types.MinStreamID, + GaugeId: types.MinStreamID, + EpochIdentifier: "week", + EpochDuration: 7 * 24 * time.Hour, + }, + }, + distributedCoins: []distributedCoins{ + {streamID: 1, coins: sdk.NewCoins(sdk.NewInt64Coin("udym", 100))}, + {streamID: 2, coins: nil}, + {streamID: 3, coins: nil}, + {streamID: 4, coins: sdk.NewCoins(sdk.NewInt64Coin("udym", 50))}, + }, + gauges: []gaugeCoins{ + // 1st stream + {gaugeID: 1, coins: sdk.NewCoins(sdk.NewInt64Coin("udym", 25))}, + {gaugeID: 2, coins: sdk.NewCoins(sdk.NewInt64Coin("udym", 25))}, + {gaugeID: 3, coins: sdk.NewCoins(sdk.NewInt64Coin("udym", 25))}, + {gaugeID: 4, coins: sdk.NewCoins(sdk.NewInt64Coin("udym", 25))}, + // 2nd stream + {gaugeID: 5, coins: nil}, + {gaugeID: 6, coins: nil}, + {gaugeID: 7, coins: nil}, + {gaugeID: 8, coins: nil}, + // 3rd stream + {gaugeID: 9, coins: nil}, + {gaugeID: 10, coins: nil}, + {gaugeID: 11, coins: nil}, + {gaugeID: 12, coins: nil}, + // 4th stream + {gaugeID: 13, coins: sdk.NewCoins(sdk.NewInt64Coin("udym", 25))}, + {gaugeID: 14, coins: sdk.NewCoins(sdk.NewInt64Coin("udym", 25))}, + {gaugeID: 15, coins: nil}, + {gaugeID: 16, coins: nil}, + }, + }, + { + height: 2, + epochPointers: []types.EpochPointer{ + { + StreamId: types.MaxStreamID, + GaugeId: types.MaxGaugeID, + EpochIdentifier: "hour", + EpochDuration: time.Hour, + }, + { + StreamId: 2, + GaugeId: 6, + EpochIdentifier: "day", + EpochDuration: 24 * time.Hour, + }, + // week epoch pointer is not used + { + StreamId: types.MinStreamID, + GaugeId: types.MinStreamID, + EpochIdentifier: "week", + EpochDuration: 7 * 24 * time.Hour, + }, + }, + distributedCoins: []distributedCoins{ + {streamID: 1, coins: sdk.NewCoins(sdk.NewInt64Coin("udym", 100))}, + {streamID: 2, coins: sdk.NewCoins(sdk.NewInt64Coin("udym", 25))}, + {streamID: 3, coins: nil}, + {streamID: 4, coins: sdk.NewCoins(sdk.NewInt64Coin("udym", 100))}, + }, + gauges: []gaugeCoins{ + // 1st stream + {gaugeID: 1, coins: sdk.NewCoins(sdk.NewInt64Coin("udym", 25))}, + {gaugeID: 2, coins: sdk.NewCoins(sdk.NewInt64Coin("udym", 25))}, + {gaugeID: 3, coins: sdk.NewCoins(sdk.NewInt64Coin("udym", 25))}, + {gaugeID: 4, coins: sdk.NewCoins(sdk.NewInt64Coin("udym", 25))}, + // 2nd stream + {gaugeID: 5, coins: sdk.NewCoins(sdk.NewInt64Coin("udym", 25))}, + {gaugeID: 6, coins: nil}, + {gaugeID: 7, coins: nil}, + {gaugeID: 8, coins: nil}, + // 3rd stream + {gaugeID: 9, coins: nil}, + {gaugeID: 10, coins: nil}, + {gaugeID: 11, coins: nil}, + {gaugeID: 12, coins: nil}, + // 4th stream + {gaugeID: 13, coins: sdk.NewCoins(sdk.NewInt64Coin("udym", 25))}, + {gaugeID: 14, coins: sdk.NewCoins(sdk.NewInt64Coin("udym", 25))}, + {gaugeID: 15, coins: sdk.NewCoins(sdk.NewInt64Coin("udym", 25))}, + {gaugeID: 16, coins: sdk.NewCoins(sdk.NewInt64Coin("udym", 25))}, + }, + }, + { + height: 3, + epochPointers: []types.EpochPointer{ + { + StreamId: types.MaxStreamID, + GaugeId: types.MaxGaugeID, + EpochIdentifier: "hour", + EpochDuration: time.Hour, + }, + { + StreamId: 3, + GaugeId: 9, + EpochIdentifier: "day", + EpochDuration: 24 * time.Hour, + }, + // week epoch pointer is not used + { + StreamId: types.MinStreamID, + GaugeId: types.MinStreamID, + EpochIdentifier: "week", + EpochDuration: 7 * 24 * time.Hour, + }, + }, + distributedCoins: []distributedCoins{ + {streamID: 1, coins: sdk.NewCoins(sdk.NewInt64Coin("udym", 100))}, + {streamID: 2, coins: sdk.NewCoins(sdk.NewInt64Coin("udym", 100))}, + {streamID: 3, coins: nil}, + {streamID: 4, coins: sdk.NewCoins(sdk.NewInt64Coin("udym", 100))}, + }, + gauges: []gaugeCoins{ + // 1st stream + {gaugeID: 1, coins: sdk.NewCoins(sdk.NewInt64Coin("udym", 25))}, + {gaugeID: 2, coins: sdk.NewCoins(sdk.NewInt64Coin("udym", 25))}, + {gaugeID: 3, coins: sdk.NewCoins(sdk.NewInt64Coin("udym", 25))}, + {gaugeID: 4, coins: sdk.NewCoins(sdk.NewInt64Coin("udym", 25))}, + // 2nd stream + {gaugeID: 5, coins: sdk.NewCoins(sdk.NewInt64Coin("udym", 25))}, + {gaugeID: 6, coins: sdk.NewCoins(sdk.NewInt64Coin("udym", 25))}, + {gaugeID: 7, coins: sdk.NewCoins(sdk.NewInt64Coin("udym", 25))}, + {gaugeID: 8, coins: sdk.NewCoins(sdk.NewInt64Coin("udym", 25))}, + // 3rd stream + {gaugeID: 9, coins: nil}, + {gaugeID: 10, coins: nil}, + {gaugeID: 11, coins: nil}, + {gaugeID: 12, coins: nil}, + // 4th stream + {gaugeID: 13, coins: sdk.NewCoins(sdk.NewInt64Coin("udym", 25))}, + {gaugeID: 14, coins: sdk.NewCoins(sdk.NewInt64Coin("udym", 25))}, + {gaugeID: 15, coins: sdk.NewCoins(sdk.NewInt64Coin("udym", 25))}, + {gaugeID: 16, coins: sdk.NewCoins(sdk.NewInt64Coin("udym", 25))}, + }, + }, + { + height: 3, + epochPointers: []types.EpochPointer{ + { + StreamId: types.MaxStreamID, + GaugeId: types.MaxGaugeID, + EpochIdentifier: "hour", + EpochDuration: time.Hour, + }, + { + StreamId: 3, + GaugeId: 12, + EpochIdentifier: "day", + EpochDuration: 24 * time.Hour, + }, + // week epoch pointer is not used + { + StreamId: types.MinStreamID, + GaugeId: types.MinStreamID, + EpochIdentifier: "week", + EpochDuration: 7 * 24 * time.Hour, + }, + }, + distributedCoins: []distributedCoins{ + {streamID: 1, coins: sdk.NewCoins(sdk.NewInt64Coin("udym", 100))}, + {streamID: 2, coins: sdk.NewCoins(sdk.NewInt64Coin("udym", 100))}, + {streamID: 3, coins: sdk.NewCoins(sdk.NewInt64Coin("udym", 75))}, + {streamID: 4, coins: sdk.NewCoins(sdk.NewInt64Coin("udym", 100))}, + }, + gauges: []gaugeCoins{ + // 1st stream + {gaugeID: 1, coins: sdk.NewCoins(sdk.NewInt64Coin("udym", 25))}, + {gaugeID: 2, coins: sdk.NewCoins(sdk.NewInt64Coin("udym", 25))}, + {gaugeID: 3, coins: sdk.NewCoins(sdk.NewInt64Coin("udym", 25))}, + {gaugeID: 4, coins: sdk.NewCoins(sdk.NewInt64Coin("udym", 25))}, + // 2nd stream + {gaugeID: 5, coins: sdk.NewCoins(sdk.NewInt64Coin("udym", 25))}, + {gaugeID: 6, coins: sdk.NewCoins(sdk.NewInt64Coin("udym", 25))}, + {gaugeID: 7, coins: sdk.NewCoins(sdk.NewInt64Coin("udym", 25))}, + {gaugeID: 8, coins: sdk.NewCoins(sdk.NewInt64Coin("udym", 25))}, + // 3rd stream + {gaugeID: 9, coins: sdk.NewCoins(sdk.NewInt64Coin("udym", 25))}, + {gaugeID: 10, coins: sdk.NewCoins(sdk.NewInt64Coin("udym", 25))}, + {gaugeID: 11, coins: sdk.NewCoins(sdk.NewInt64Coin("udym", 25))}, + {gaugeID: 12, coins: nil}, + // 4th stream + {gaugeID: 13, coins: sdk.NewCoins(sdk.NewInt64Coin("udym", 25))}, + {gaugeID: 14, coins: sdk.NewCoins(sdk.NewInt64Coin("udym", 25))}, + {gaugeID: 15, coins: sdk.NewCoins(sdk.NewInt64Coin("udym", 25))}, + {gaugeID: 16, coins: sdk.NewCoins(sdk.NewInt64Coin("udym", 25))}, + }, + }, + { + height: 4, + epochPointers: []types.EpochPointer{ + { + StreamId: types.MaxStreamID, + GaugeId: types.MaxGaugeID, + EpochIdentifier: "hour", + EpochDuration: time.Hour, + }, + { + StreamId: types.MaxStreamID, + GaugeId: types.MaxStreamID, + EpochIdentifier: "day", + EpochDuration: 24 * time.Hour, + }, + // week epoch pointer is not used, however it points on the last gauge + { + StreamId: types.MaxStreamID, + GaugeId: types.MaxStreamID, + EpochIdentifier: "week", + EpochDuration: 7 * 24 * time.Hour, + }, + }, + distributedCoins: []distributedCoins{ + {streamID: 1, coins: sdk.NewCoins(sdk.NewInt64Coin("udym", 100))}, + {streamID: 2, coins: sdk.NewCoins(sdk.NewInt64Coin("udym", 100))}, + {streamID: 3, coins: sdk.NewCoins(sdk.NewInt64Coin("udym", 100))}, + {streamID: 4, coins: sdk.NewCoins(sdk.NewInt64Coin("udym", 100))}, + }, + gauges: []gaugeCoins{ + // 1st stream + {gaugeID: 1, coins: sdk.NewCoins(sdk.NewInt64Coin("udym", 25))}, + {gaugeID: 2, coins: sdk.NewCoins(sdk.NewInt64Coin("udym", 25))}, + {gaugeID: 3, coins: sdk.NewCoins(sdk.NewInt64Coin("udym", 25))}, + {gaugeID: 4, coins: sdk.NewCoins(sdk.NewInt64Coin("udym", 25))}, + // 2nd stream + {gaugeID: 5, coins: sdk.NewCoins(sdk.NewInt64Coin("udym", 25))}, + {gaugeID: 6, coins: sdk.NewCoins(sdk.NewInt64Coin("udym", 25))}, + {gaugeID: 7, coins: sdk.NewCoins(sdk.NewInt64Coin("udym", 25))}, + {gaugeID: 8, coins: sdk.NewCoins(sdk.NewInt64Coin("udym", 25))}, + // 3rd stream + {gaugeID: 9, coins: sdk.NewCoins(sdk.NewInt64Coin("udym", 25))}, + {gaugeID: 10, coins: sdk.NewCoins(sdk.NewInt64Coin("udym", 25))}, + {gaugeID: 11, coins: sdk.NewCoins(sdk.NewInt64Coin("udym", 25))}, + {gaugeID: 12, coins: sdk.NewCoins(sdk.NewInt64Coin("udym", 25))}, + // 4th stream + {gaugeID: 13, coins: sdk.NewCoins(sdk.NewInt64Coin("udym", 25))}, + {gaugeID: 14, coins: sdk.NewCoins(sdk.NewInt64Coin("udym", 25))}, + {gaugeID: 15, coins: sdk.NewCoins(sdk.NewInt64Coin("udym", 25))}, + {gaugeID: 16, coins: sdk.NewCoins(sdk.NewInt64Coin("udym", 25))}, + }, + }, + }, + }, + } + + // Run tests + for _, tc := range tests { + s.Run(tc.name, func() { + s.SetupTest() + + s.CreateGaugesUntil(tc.numGauges) + + s.Require().LessOrEqual(len(tc.expectedBlockResults), tc.blocksInEpoch) + + // Update module params + params := s.App.StreamerKeeper.GetParams(s.Ctx) + params.MaxIterationsPerBlock = tc.maxIterationsPerBlock + s.App.StreamerKeeper.SetParams(s.Ctx, params) + + for _, stream := range tc.streams { + s.CreateStream(stream.DistributeTo.Records, stream.Coins, time.Now().Add(-time.Minute), stream.DistrEpochIdentifier, 1) + } + + // Start epochs + err := s.App.StreamerKeeper.BeforeEpochStart(s.Ctx, "hour") + s.Require().NoError(err) + err = s.App.StreamerKeeper.BeforeEpochStart(s.Ctx, "day") + s.Require().NoError(err) + + for i := range tc.blocksInEpoch { + err = s.App.StreamerKeeper.EndBlock(s.Ctx) + s.Require().NoError(err) + + // Check expected rewards against actual rewards received + gauges := s.App.IncentivesKeeper.GetGauges(s.Ctx) + actualGauges := make(gaugeCoinsSlice, 0, len(gauges)) + for _, gauge := range gauges { + actualGauges = append(actualGauges, gaugeCoins{gaugeID: gauge.Id, coins: gauge.Coins}) + } + + // Check block results + idx := i + if idx >= len(tc.expectedBlockResults) { + idx = len(tc.expectedBlockResults) - 1 + } + expected := tc.expectedBlockResults[idx] + + // Verify epoch pointers are valid + pointers, err := s.App.StreamerKeeper.GetAllEpochPointers(s.Ctx) + s.Require().NoError(err) + // Equality here is important! Pointers must be filled from shorter to longer. + types.SortEpochPointers(pointers) + s.Require().Equal(expected.epochPointers, pointers) + + // Verify gauges are rewarded. Equality here is important! + s.Require().Equal(expected.gauges, actualGauges, "block height: %d\nexpect: %s\nactual: %s", i, expected, actualGauges) + + // Verify streams are valid + active := s.App.StreamerKeeper.GetActiveStreams(s.Ctx) + actualActive := make(distributedCoinsSlice, 0, len(gauges)) + for _, a := range active { + actualActive = append(actualActive, distributedCoins{streamID: a.Id, coins: a.DistributedCoins}) + } + // Equality here is important! + s.Require().Equal(expected.distributedCoins, actualActive) + } + }) + } +} + +type gaugeCoins struct { + gaugeID uint64 + coins sdk.Coins +} + +func (g gaugeCoins) String() string { + return fmt.Sprintf("gaugeID: %d, coins: %s", g.gaugeID, g.coins) +} + +type gaugeCoinsSlice []gaugeCoins + +func (s gaugeCoinsSlice) String() string { + var result string + result += "[" + for i, v := range s { + result += v.String() + if i < len(s)-1 { + result += ", " + } + } + result += "]" + return result +} + +type distributedCoins struct { + streamID uint64 + coins sdk.Coins +} + +func (d distributedCoins) String() string { + return fmt.Sprintf("streamID: %d, coins: %s", d.streamID, d.coins) +} + +type distributedCoinsSlice []distributedCoins + +func (s distributedCoinsSlice) String() string { + var result string + result += "[" + for i, v := range s { + result += v.String() + if i < len(s)-1 { + result += ", " + } + } + result += "]" + return result +} + +type blockResults struct { + height uint64 + epochPointers []types.EpochPointer + distributedCoins distributedCoinsSlice + gauges gaugeCoinsSlice +} + +func (b blockResults) String() string { + return fmt.Sprintf("height: %d, epochPointer: %v, distributedCoins: %s, gauges: %s", b.height, b.epochPointers, b.distributedCoins, b.gauges) +} diff --git a/x/streamer/keeper/distr_info_test.go b/x/streamer/keeper/distr_info_test.go index 13709a909..37d08c398 100644 --- a/x/streamer/keeper/distr_info_test.go +++ b/x/streamer/keeper/distr_info_test.go @@ -5,8 +5,9 @@ import ( "time" sdk "github.com/cosmos/cosmos-sdk/types" - "github.com/dymensionxyz/dymension/v3/x/streamer/types" "github.com/stretchr/testify/require" + + "github.com/dymensionxyz/dymension/v3/x/streamer/types" ) func (suite *KeeperTestSuite) TestAllocateToGauges() { @@ -49,18 +50,11 @@ func (suite *KeeperTestSuite) TestAllocateToGauges() { }, } - for name, test := range tests { + for _, test := range tests { suite.Run(test.name, func() { var streams []types.Stream - err := suite.CreateGauge() - suite.Require().NoError(err) - err = suite.CreateGauge() - suite.Require().NoError(err) - err = suite.CreateGauge() - suite.Require().NoError(err) - - keeper := suite.App.StreamerKeeper + suite.CreateGauges(3) // create a stream suite.CreateStream(test.testingDistrRecord, sdk.NewCoins(test.mintedCoins), time.Now(), "day", 1) @@ -68,13 +62,8 @@ func (suite *KeeperTestSuite) TestAllocateToGauges() { // move all created streams from upcoming to active suite.Ctx = suite.Ctx.WithBlockTime(time.Now()) streams = suite.App.StreamerKeeper.GetStreams(suite.Ctx) - for _, stream := range streams { - err := suite.App.StreamerKeeper.MoveUpcomingStreamToActiveStream(suite.Ctx, stream) - suite.Require().NoError(err) - } - _, err = keeper.Distribute(suite.Ctx, streams) - suite.Require().NoError(err, name) + suite.DistributeAllRewards(streams) for i := 0; i < len(test.testingDistrRecord); i++ { if test.testingDistrRecord[i].GaugeId == 0 { @@ -82,7 +71,7 @@ func (suite *KeeperTestSuite) TestAllocateToGauges() { } gauge, err := suite.App.IncentivesKeeper.GetGaugeByID(suite.Ctx, test.testingDistrRecord[i].GaugeId) suite.Require().NoError(err) - suite.Require().Equal(test.expectedGaugesBalances[i], gauge.Coins) + suite.Require().ElementsMatch(test.expectedGaugesBalances[i], gauge.Coins) } }) } diff --git a/x/streamer/keeper/distribute.go b/x/streamer/keeper/distribute.go index 576b30e6b..4adb1352c 100644 --- a/x/streamer/keeper/distribute.go +++ b/x/streamer/keeper/distribute.go @@ -3,143 +3,112 @@ package keeper import ( "fmt" + "cosmossdk.io/math" sdk "github.com/cosmos/cosmos-sdk/types" "github.com/osmosis-labs/osmosis/v15/osmoutils" + "github.com/dymensionxyz/dymension/v3/utils/pagination" "github.com/dymensionxyz/dymension/v3/x/streamer/types" ) -// DistributeByWeights allocates and distributes coin according a gauge’s proportional weight that is recorded in the record. -func (k Keeper) DistributeByWeights(ctx sdk.Context, coins sdk.Coins, distrInfo *types.DistrInfo) (sdk.Coins, error) { - logger := k.Logger(ctx) - +func (k Keeper) DistributeToGauge(ctx sdk.Context, coins sdk.Coins, record types.DistrRecord, totalWeight math.Int) (sdk.Coins, error) { if coins.Empty() { - return coins, fmt.Errorf("coins to allocate cannot be empty") + return sdk.Coins{}, fmt.Errorf("coins to allocate cannot be empty") } - if distrInfo.TotalWeight.IsZero() { + if totalWeight.IsZero() { return sdk.Coins{}, fmt.Errorf("distribution total weight cannot be zero") } - totalDistrCoins := sdk.NewCoins() - totalWeightDec := sdk.NewDecFromInt(distrInfo.TotalWeight) + totalAllocated := sdk.NewCoins() for _, coin := range coins { if coin.IsZero() { continue } + assetAmountDec := sdk.NewDecFromInt(coin.Amount) - for _, record := range distrInfo.Records { - allocatingAmount := assetAmountDec.Mul(sdk.NewDecFromInt(record.Weight).Quo(totalWeightDec)).TruncateInt() - - // when weight is too small and no amount is allocated, just skip this to avoid zero coin send issues - if !allocatingAmount.IsPositive() { - logger.Info(fmt.Sprintf("allocating amount for (%d, %s) record is not positive", record.GaugeId, record.Weight.String())) - continue - } - - _, err := k.ik.GetGaugeByID(ctx, record.GaugeId) - if err != nil { - logger.Error(fmt.Sprintf("failed to get gauge %d", record.GaugeId), "error", err.Error()) - continue - } - - allocatedCoin := sdk.Coin{Denom: coin.Denom, Amount: allocatingAmount} - err = k.ik.AddToGaugeRewards(ctx, k.ak.GetModuleAddress(types.ModuleName), sdk.NewCoins(allocatedCoin), record.GaugeId) - if err != nil { - logger.Error("failed to add to gauge rewards", "error", err.Error()) - continue - } - totalDistrCoins = totalDistrCoins.Add(allocatedCoin) - } - } + weightDec := sdk.NewDecFromInt(record.Weight) + totalDec := sdk.NewDecFromInt(totalWeight) + allocatingAmount := assetAmountDec.Mul(weightDec.Quo(totalDec)).TruncateInt() - return totalDistrCoins, nil -} + // when weight is too small and no amount is allocated, just skip this to avoid zero coin send issues + if !allocatingAmount.IsPositive() { + k.Logger(ctx).Info(fmt.Sprintf("allocating amount for (%d, %s) record is not positive", record.GaugeId, record.Weight.String())) + continue + } -// Distribute distributes coins from an array of streams to all eligible locks. -func (k Keeper) Distribute(ctx sdk.Context, streams []types.Stream) (sdk.Coins, error) { - totalDistributedCoins := sdk.Coins{} - streamDistributedCoins := sdk.Coins{} - for _, stream := range streams { - wrappedDistributeFn := func(ctx sdk.Context) error { - var err error - streamDistributedCoins, err = k.distributeStream(ctx, stream) - return err + _, err := k.ik.GetGaugeByID(ctx, record.GaugeId) + if err != nil { + return sdk.Coins{}, fmt.Errorf("get gauge %d: %w", record.GaugeId, err) } - err := osmoutils.ApplyFuncIfNoError(ctx, wrappedDistributeFn) + allocatedCoin := sdk.Coin{Denom: coin.Denom, Amount: allocatingAmount} + err = k.ik.AddToGaugeRewards(ctx, k.ak.GetModuleAddress(types.ModuleName), sdk.NewCoins(allocatedCoin), record.GaugeId) if err != nil { - ctx.Logger().Error("Failed to distribute stream", "streamID", stream.Id, "error", err.Error()) - continue + return sdk.Coins{}, fmt.Errorf("add rewards to gauge %d: %w", record.GaugeId, err) } - totalDistributedCoins = totalDistributedCoins.Add(streamDistributedCoins...) + + totalAllocated = totalAllocated.Add(allocatedCoin) } - return totalDistributedCoins, nil + return totalAllocated, nil } -// distributeStream runs the distribution logic for a stream, and adds the sends to -// the distrInfo struct. It also updates the stream for the distribution. -func (k Keeper) distributeStream(ctx sdk.Context, stream types.Stream) (sdk.Coins, error) { - totalDistrCoins := sdk.NewCoins() - remainCoins := stream.Coins.Sub(stream.DistributedCoins...) - remainEpochs := stream.NumEpochsPaidOver - stream.FilledEpochs - - for _, coin := range remainCoins { - epochAmt := coin.Amount.Quo(sdk.NewInt(int64(remainEpochs))) - if epochAmt.IsPositive() { - totalDistrCoins = totalDistrCoins.Add(sdk.Coin{Denom: coin.Denom, Amount: epochAmt}) - } - } +type DistributeRewardsResult struct { + NewPointer types.EpochPointer + FilledStreams []types.Stream + DistributedCoins sdk.Coins + Iterations uint64 +} - // If the stream uses a sponsorship plan, query it and update stream distr info. The distribution - // might be empty and this is a valid scenario. In that case, we'll just skip at without - // filling the epoch. - if stream.Sponsored { - distr, err := k.sk.GetDistribution(ctx) +// DistributeRewards distributes all streams rewards to the corresponding gauges starting with +// the specified pointer and considering the limit. +func (k Keeper) DistributeRewards( + ctx sdk.Context, + pointer types.EpochPointer, + limit uint64, + streams []types.Stream, +) DistributeRewardsResult { + totalDistributed := sdk.NewCoins() + + // Temporary map for convenient calculations + streamUpdates := make(map[uint64]sdk.Coins, len(streams)) + + // Distribute to all the remaining gauges that are left after EndBlock + newPointer, iterations := IterateEpochPointer(pointer, streams, limit, func(v StreamGauge) pagination.Stop { + var distributed sdk.Coins + err := osmoutils.ApplyFuncIfNoError(ctx, func(ctx sdk.Context) error { + var err error + distributed, err = k.DistributeToGauge(ctx, v.Stream.EpochCoins, v.Gauge, v.Stream.DistributeTo.TotalWeight) + return err + }) if err != nil { - return sdk.Coins{}, fmt.Errorf("failed to get sponsorship distribution: %w", err) + // Ignore this gauge + k.Logger(ctx). + With("streamID", v.Stream.Id, "gaugeID", v.Gauge.GaugeId, "error", err.Error()). + Error("Failed to distribute to gauge") + return pagination.Continue } - info := types.DistrInfoFromDistribution(distr) - // Update stream distr info - stream.DistributeTo = info - } - totalDistrCoins, err := k.DistributeByWeights(ctx, totalDistrCoins, stream.DistributeTo) - if err != nil { - return nil, err - } + totalDistributed = totalDistributed.Add(distributed...) - err = k.updateStreamPostDistribute(ctx, stream, totalDistrCoins) - if err != nil { - return nil, err - } + // Update distributed coins for the stream + update := streamUpdates[v.Stream.Id] + update = update.Add(distributed...) + streamUpdates[v.Stream.Id] = update - ctx.EventManager().EmitEvents(sdk.Events{ - sdk.NewEvent( - types.TypeEvtDistribution, - sdk.NewAttribute(types.AttributeStreamID, osmoutils.Uint64ToString(stream.Id)), - sdk.NewAttribute(types.AttributeAmount, totalDistrCoins.String()), - ), + return pagination.Continue }) - return totalDistrCoins, nil -} -// updateStreamPostDistribute increments the stream's filled epochs field. -// Also adds the coins that were just distributed to the stream's distributed coins field. -func (k Keeper) updateStreamPostDistribute(ctx sdk.Context, stream types.Stream, newlyDistributedCoins sdk.Coins) error { - stream.FilledEpochs += 1 - stream.DistributedCoins = stream.DistributedCoins.Add(newlyDistributedCoins...) - if err := k.setStream(ctx, &stream); err != nil { - return err + for i, s := range streams { + s.DistributedCoins = s.DistributedCoins.Add(streamUpdates[s.Id]...) + streams[i] = s } - // Check if stream has completed its distribution - if stream.FilledEpochs >= stream.NumEpochsPaidOver { - if err := k.moveActiveStreamToFinishedStream(ctx, stream); err != nil { - return err - } + return DistributeRewardsResult{ + NewPointer: newPointer, + FilledStreams: streams, // Make sure that the returning slice is always sorted + DistributedCoins: totalDistributed, + Iterations: iterations, } - - return nil } diff --git a/x/streamer/keeper/distribute_test.go b/x/streamer/keeper/distribute_test.go index 2ff0f70b1..64986d932 100644 --- a/x/streamer/keeper/distribute_test.go +++ b/x/streamer/keeper/distribute_test.go @@ -53,39 +53,44 @@ func (suite *KeeperTestSuite) TestDistribute() { }, }, } + for _, tc := range tests { - suite.SetupTest() - // setup streams and defined in the above tests, then distribute to them - - var streams []types.Stream - gaugesExpectedRewards := make(map[uint64]sdk.Coins) - for _, stream := range tc.streams { - // create a stream - _, newstream := suite.CreateStream(stream.distrInfo, stream.coins, time.Now(), "day", stream.numOfEpochs) - streams = append(streams, *newstream) - - // calculate expected rewards - for _, coin := range stream.coins { - epochAmt := coin.Amount.Quo(sdk.NewInt(int64(stream.numOfEpochs))) - if !epochAmt.IsPositive() { - continue - } - for _, record := range newstream.DistributeTo.Records { - expectedAmtFromStream := epochAmt.Mul(record.Weight).Quo(newstream.DistributeTo.TotalWeight) - expectedCoins := sdk.Coin{Denom: coin.Denom, Amount: expectedAmtFromStream} - gaugesExpectedRewards[record.GaugeId] = gaugesExpectedRewards[record.GaugeId].Add(expectedCoins) + suite.Run(tc.name, func() { + suite.SetupTest() + // Setup streams and defined in the above tests, then distribute to them + + var streams []types.Stream + gaugesExpectedRewards := make(map[uint64]sdk.Coins) + for _, stream := range tc.streams { + // Create a stream, move it from upcoming to active and update its parameters + _, newStream := suite.CreateStream(stream.distrInfo, stream.coins, time.Now().Add(-time.Minute), "day", stream.numOfEpochs) + + streams = append(streams, *newStream) + + // Calculate expected rewards + for _, coin := range stream.coins { + epochAmt := coin.Amount.Quo(sdk.NewInt(int64(stream.numOfEpochs))) + if !epochAmt.IsPositive() { + continue + } + for _, record := range newStream.DistributeTo.Records { + expectedAmtFromStream := epochAmt.Mul(record.Weight).Quo(newStream.DistributeTo.TotalWeight) + expectedCoins := sdk.Coin{Denom: coin.Denom, Amount: expectedAmtFromStream} + gaugesExpectedRewards[record.GaugeId] = gaugesExpectedRewards[record.GaugeId].Add(expectedCoins) + } } } - } - _, err := suite.App.StreamerKeeper.Distribute(suite.Ctx, streams) - suite.Require().NoError(err) - // check expected rewards against actual rewards received - gauges := suite.App.IncentivesKeeper.GetGauges(suite.Ctx) - suite.Require().Equal(len(gaugesExpectedRewards), len(gauges), tc.name) - for _, gauge := range gauges { - suite.Require().Equal(gaugesExpectedRewards[gauge.Id], gauge.Coins, tc.name) - } + // Trigger the distribution + suite.DistributeAllRewards(streams) + + // Check expected rewards against actual rewards received + gauges := suite.App.IncentivesKeeper.GetGauges(suite.Ctx) + suite.Require().Equal(len(gaugesExpectedRewards), len(gauges), tc.name) + for _, gauge := range gauges { + suite.Require().ElementsMatch(gaugesExpectedRewards[gauge.Id], gauge.Coins, tc.name) + } + }) } } @@ -110,8 +115,8 @@ func (suite *KeeperTestSuite) TestSponsoredDistribute() { hasIntermediateDistr bool // the vote that forms the intermediate distribution intermediateVote sponsorshiptypes.MsgVote - // number of epochs filled after the Distribute call - filledEpochs uint64 + // is the epoch filled as a side effect + fillEpochs bool }{ { name: "single-coin stream, no initial nor intermediate distributions", @@ -124,7 +129,7 @@ func (suite *KeeperTestSuite) TestSponsoredDistribute() { initialVote: sponsorshiptypes.MsgVote{}, hasIntermediateDistr: false, intermediateVote: sponsorshiptypes.MsgVote{}, - filledEpochs: 0, + fillEpochs: false, }, { name: "single-coin stream, initial distribution", @@ -143,7 +148,7 @@ func (suite *KeeperTestSuite) TestSponsoredDistribute() { }, hasIntermediateDistr: false, intermediateVote: sponsorshiptypes.MsgVote{}, - filledEpochs: 1, + fillEpochs: true, }, { name: "single-coin stream, intermediate distribution", @@ -162,7 +167,7 @@ func (suite *KeeperTestSuite) TestSponsoredDistribute() { {GaugeId: 2, Weight: sponsorshiptypes.DYM.MulRaw(90)}, }, }, - filledEpochs: 1, + fillEpochs: true, }, { name: "single-coin stream, initial and intermediate distributions", @@ -187,7 +192,7 @@ func (suite *KeeperTestSuite) TestSponsoredDistribute() { {GaugeId: 2, Weight: sponsorshiptypes.DYM.MulRaw(90)}, }, }, - filledEpochs: 1, + fillEpochs: true, }, { name: "stream distr info doesn't play any role", @@ -222,7 +227,7 @@ func (suite *KeeperTestSuite) TestSponsoredDistribute() { {GaugeId: 2, Weight: sponsorshiptypes.DYM.MulRaw(90)}, }, }, - filledEpochs: 1, + fillEpochs: true, }, } for _, tc := range tests { @@ -235,7 +240,7 @@ func (suite *KeeperTestSuite) TestSponsoredDistribute() { } // Create a stream - sID, s := suite.CreateSponsoredStream(tc.stream.distrInfo, tc.stream.coins, time.Now(), "day", tc.stream.numOfEpochs) + sID, s := suite.CreateSponsoredStream(tc.stream.distrInfo, tc.stream.coins, time.Now().Add(-time.Minute), "day", tc.stream.numOfEpochs) // Check that the stream distr matches the current sponsorship distr actualDistr, err := suite.App.StreamerKeeper.GetStreamByID(suite.Ctx, sID) @@ -252,7 +257,13 @@ func (suite *KeeperTestSuite) TestSponsoredDistribute() { } // Distribute - _, err = suite.App.StreamerKeeper.Distribute(suite.Ctx, []types.Stream{*actualDistr}) + // First, simulate the epoch start. This moves gauges from upcoming to active and + // updates corresponding streams parameters. + err = suite.App.StreamerKeeper.BeforeEpochStart(suite.Ctx, "day") + suite.Require().NoError(err) + + // Then, simulate the epoch end. This triggers the distribution of the rewards. + _, err = suite.App.StreamerKeeper.AfterEpochEnd(suite.Ctx, "day") suite.Require().NoError(err) // Check that the stream distr matches the current sponsorship distr @@ -266,7 +277,7 @@ func (suite *KeeperTestSuite) TestSponsoredDistribute() { // Check the state actual, err := suite.App.StreamerKeeper.GetStreamByID(suite.Ctx, sID) suite.Require().NoError(err) - suite.Require().Equal(tc.filledEpochs, actual.FilledEpochs) + suite.Require().Equal(tc.fillEpochs, actual.FilledEpochs > 0) // Calculate expected rewards. The result is based on the merged initial and intermediate distributions. expectedDistr := types.DistrInfoFromDistribution(initialDistr.Merge(intermediateDistr)) @@ -326,17 +337,13 @@ func (suite *KeeperTestSuite) TestGetModuleToDistributeCoins() { // move all created streams from upcoming to active suite.Ctx = suite.Ctx.WithBlockTime(time.Now()) streams := suite.App.StreamerKeeper.GetStreams(suite.Ctx) - for _, stream := range streams { - err := suite.App.StreamerKeeper.MoveUpcomingStreamToActiveStream(suite.Ctx, stream) - suite.Require().NoError(err) - } // distribute coins to stakers - distrCoins, err := suite.App.StreamerKeeper.Distribute(suite.Ctx, streams) + distrCoins := suite.DistributeAllRewards(streams) suite.Require().NoError(err) suite.Require().Equal(sdk.Coins{sdk.NewInt64Coin("stake", 20000), sdk.NewInt64Coin("udym", 10000)}, distrCoins) // check stream changes after distribution coins = suite.App.StreamerKeeper.GetModuleToDistributeCoins(suite.Ctx) - suite.Require().Equal(coins, streamCoins.Add(streamCoins2...).Sub(distrCoins...)) + suite.Require().ElementsMatch(coins, streamCoins.Add(streamCoins2...).Sub(distrCoins...)) } diff --git a/x/streamer/keeper/gauges_hooks.go b/x/streamer/keeper/gauges_hooks.go index 9c72026e1..3fb950c98 100644 --- a/x/streamer/keeper/gauges_hooks.go +++ b/x/streamer/keeper/gauges_hooks.go @@ -37,9 +37,6 @@ func (k Keeper) CreatePoolGauge(ctx sdk.Context, poolId uint64) error { } func (k Keeper) CreateRollappGauge(ctx sdk.Context, rollappID string) error { - _, err := k.ik.CreateRollappGauge( - ctx, - rollappID, - ) + _, err := k.ik.CreateRollappGauge(ctx, rollappID) return err } diff --git a/x/streamer/keeper/genesis.go b/x/streamer/keeper/genesis.go index 80ed96706..ac9f31d5c 100644 --- a/x/streamer/keeper/genesis.go +++ b/x/streamer/keeper/genesis.go @@ -2,6 +2,7 @@ package keeper import ( "fmt" + "slices" sdk "github.com/cosmos/cosmos-sdk/types" @@ -16,21 +17,45 @@ func (k Keeper) InitGenesis(ctx sdk.Context, genState types.GenesisState) { } k.SetParams(ctx, genState.Params) + + slices.SortFunc(genState.Streams, CmpStreams) + for _, stream := range genState.Streams { - stream := stream err := k.SetStreamWithRefKey(ctx, &stream) if err != nil { panic(err) } } + k.SetLastStreamID(ctx, genState.LastStreamId) + + // Create epoch pointers for all epoch infos + for _, epoch := range k.ek.AllEpochInfos(ctx) { + err := k.SaveEpochPointer(ctx, types.NewEpochPointer(epoch.Identifier, epoch.Duration)) + if err != nil { + panic(err) + } + } + + // Fill epoch pointers specified in the genesis + for _, pointer := range genState.EpochPointers { + err := k.SaveEpochPointer(ctx, pointer) + if err != nil { + panic(err) + } + } } // ExportGenesis returns the x/streamer module's exported genesis. func (k Keeper) ExportGenesis(ctx sdk.Context) *types.GenesisState { + pointers, err := k.GetAllEpochPointers(ctx) + if err != nil { + panic(err) + } return &types.GenesisState{ - Params: k.GetParams(ctx), - Streams: k.GetNotFinishedStreams(ctx), - LastStreamId: k.GetLastStreamID(ctx), + Params: k.GetParams(ctx), + Streams: k.GetNotFinishedStreams(ctx), + LastStreamId: k.GetLastStreamID(ctx), + EpochPointers: pointers, } } diff --git a/x/streamer/keeper/genesis_test.go b/x/streamer/keeper/genesis_test.go index 053c5ebf2..ab515e2d5 100644 --- a/x/streamer/keeper/genesis_test.go +++ b/x/streamer/keeper/genesis_test.go @@ -63,15 +63,18 @@ func TestStreamerExportGenesis(t *testing.T) { require.NoError(t, err) // ensure the first stream listed in the exported genesis explicitly matches expectation + const numEpochsPaidOver = 30 require.Equal(t, genesis.Streams[0], types.Stream{ Id: streamID, DistributeTo: distInfo, Coins: coins, - NumEpochsPaidOver: 30, + StartTime: startTime.UTC(), DistrEpochIdentifier: "day", + NumEpochsPaidOver: numEpochsPaidOver, FilledEpochs: 0, DistributedCoins: sdk.Coins(nil), - StartTime: startTime.UTC(), + Sponsored: false, + EpochCoins: coins.QuoInt(math.NewInt(numEpochsPaidOver)), }) } @@ -114,10 +117,16 @@ func TestStreamerInitGenesis(t *testing.T) { } // initialize genesis with specified parameter, the stream created earlier, and lockable durations + expectedPointer := types.EpochPointer{ + StreamId: 1, + GaugeId: 1, + EpochIdentifier: "day", + } app.StreamerKeeper.InitGenesis(ctx, types.GenesisState{ - Params: types.Params{}, - Streams: []types.Stream{stream}, - LastStreamId: 1, + Params: types.Params{}, + Streams: []types.Stream{stream}, + LastStreamId: 1, + EpochPointers: []types.EpochPointer{expectedPointer}, }) // check that the stream created earlier was initialized through initGenesis and still exists on chain @@ -126,6 +135,9 @@ func TestStreamerInitGenesis(t *testing.T) { require.Len(t, streams, 1) require.Equal(t, streams[0], stream) require.Equal(t, lastStreamID, uint64(1)) + ep, err := app.StreamerKeeper.GetEpochPointer(ctx, "day") + require.NoError(t, err) + require.Equal(t, expectedPointer, ep) } func TestStreamerOrder(t *testing.T) { diff --git a/x/streamer/keeper/grpc_query_test.go b/x/streamer/keeper/grpc_query_test.go index 744eb4b27..8b07ef369 100644 --- a/x/streamer/keeper/grpc_query_test.go +++ b/x/streamer/keeper/grpc_query_test.go @@ -185,26 +185,33 @@ func (suite *KeeperTestSuite) TestGRPCToDistributeCoins() { stream, err := suite.querier.GetStreamByID(suite.Ctx, streamID) suite.Require().NoError(err) suite.Require().NotNil(stream) - streams := []types.Stream{*stream} // check to distribute coins after stream creation, but before stream active res, err = suite.querier.ModuleToDistributeCoins(sdk.WrapSDKContext(suite.Ctx), &types.ModuleToDistributeCoinsRequest{}) suite.Require().NoError(err) suite.Require().Equal(res.Coins, coins) + // check to distribute coins after stream creation + // ensure this equals the coins within the previously created non-perpetual stream + res, err = suite.querier.ModuleToDistributeCoins(sdk.WrapSDKContext(suite.Ctx), &types.ModuleToDistributeCoinsRequest{}) + suite.Require().NoError(err) + suite.Require().Equal(res.Coins, coins) + // move stream from an upcoming to an active status - // suite.Ctx = suite.Ctx.WithBlockTime(startTime) - err = suite.querier.MoveUpcomingStreamToActiveStream(suite.Ctx, *stream) + // this simulates the new epoch start + // the stream is moved to active and its rewards are to be distributed during the epoch + err = suite.App.StreamerKeeper.BeforeEpochStart(suite.Ctx, "day") suite.Require().NoError(err) - // check to distribute coins after stream creation - // ensure this equals the coins within the previously created non perpetual stream + // check to distribute coins after the epoch start + // ensure this equals the coins within the previously created non-perpetual stream + // the rewards are not distributed yet res, err = suite.querier.ModuleToDistributeCoins(sdk.WrapSDKContext(suite.Ctx), &types.ModuleToDistributeCoinsRequest{}) suite.Require().NoError(err) suite.Require().Equal(res.Coins, coins) - // distribute coins to stakers - distrCoins, err := suite.querier.Distribute(suite.Ctx, streams) + // trigger the epoch end. this will distribute all rewards assigned to this epoch + distrCoins, err := suite.App.StreamerKeeper.AfterEpochEnd(suite.Ctx, "day") suite.Require().NoError(err) suite.Require().Equal(distrCoins, sdk.Coins{sdk.NewInt64Coin("stake", 10000)}) @@ -216,18 +223,29 @@ func (suite *KeeperTestSuite) TestGRPCToDistributeCoins() { suite.Require().NotNil(stream) suite.Require().Equal(stream.FilledEpochs, uint64(1)) suite.Require().Equal(stream.DistributedCoins, sdk.Coins{sdk.NewInt64Coin("stake", 10000)}) - streams = []types.Stream{*stream} // check that the to distribute coins is equal to the initial stream coin balance minus what has been distributed already (10-4=6) res, err = suite.querier.ModuleToDistributeCoins(sdk.WrapSDKContext(suite.Ctx), &types.ModuleToDistributeCoinsRequest{}) suite.Require().NoError(err) suite.Require().Equal(res.Coins, coins.Sub(distrCoins...)) - // distribute second round to stakers - distrCoins, err = suite.querier.Distribute(suite.Ctx, streams) + // trigger the next epoch start and then the next epoch end. + // this simulates the executed epoch and consequently distributes the second round. + err = suite.App.StreamerKeeper.BeforeEpochStart(suite.Ctx, "day") + suite.Require().NoError(err) + distrCoins, err = suite.App.StreamerKeeper.AfterEpochEnd(suite.Ctx, "day") suite.Require().NoError(err) suite.Require().Equal(sdk.Coins{sdk.NewInt64Coin("stake", 10000)}, distrCoins) + // check stream changes after distribution + // ensure the stream's filled epochs have been increased by 1 + // ensure we have distributed 4 out of the 10 stake tokens + stream, err = suite.querier.GetStreamByID(suite.Ctx, streamID) + suite.Require().NoError(err) + suite.Require().NotNil(stream) + suite.Require().Equal(stream.FilledEpochs, uint64(2)) + suite.Require().Equal(stream.DistributedCoins, sdk.Coins{sdk.NewInt64Coin("stake", 20000)}) + // now that all coins have been distributed (4 in first found 6 in the second round) // to distribute coins should be null res, err = suite.querier.ModuleToDistributeCoins(sdk.WrapSDKContext(suite.Ctx), &types.ModuleToDistributeCoinsRequest{}) diff --git a/x/streamer/keeper/hooks.go b/x/streamer/keeper/hooks.go index bf216cf82..2c1fd4667 100644 --- a/x/streamer/keeper/hooks.go +++ b/x/streamer/keeper/hooks.go @@ -1,6 +1,9 @@ package keeper import ( + "fmt" + + "github.com/dymensionxyz/sdk-utils/utils/uevent" epochstypes "github.com/osmosis-labs/osmosis/v15/x/epochs/types" gammtypes "github.com/osmosis-labs/osmosis/v15/x/gamm/types" @@ -33,55 +36,114 @@ func (k Keeper) Hooks() Hooks { /* epoch hooks */ /* -------------------------------------------------------------------------- */ -// BeforeEpochStart is the epoch start hook. -func (k Keeper) BeforeEpochStart(ctx sdk.Context, epochIdentifier string, epochNumber int64) error { +// BeforeEpochStart updates the streams based on a new epoch and emits an event. +// It moves upcoming streams to active if the start time has been reached. +// It updates active streams with respect to the new epoch and saves them. +// Finally, it emits an event with the number of active streams. +func (k Keeper) BeforeEpochStart(ctx sdk.Context, epochIdentifier string) error { + // Move upcoming streams to active if start time reached + upcomingStreams := k.GetUpcomingStreams(ctx) + for _, s := range upcomingStreams { + if !ctx.BlockTime().Before(s.StartTime) { + err := k.moveUpcomingStreamToActiveStream(ctx, s) + if err != nil { + return fmt.Errorf("move upcoming stream to active stream: %w", err) + } + } + } + + toStart := k.GetActiveStreamsForEpoch(ctx, epochIdentifier) + + // Update streams with respect to a new epoch and save them + for _, s := range toStart { + updated, err := k.UpdateStreamAtEpochStart(ctx, s) + if err != nil { + return fmt.Errorf("update stream '%d' at epoch start: %w", s.Id, err) + } + // Save the stream + err = k.SetStream(ctx, &updated) + if err != nil { + return fmt.Errorf("set stream: %w", err) + } + } + + err := uevent.EmitTypedEvent(ctx, &types.EventEpochStart{ + ActiveStreamsNum: uint64(len(toStart)), + }) + if err != nil { + return fmt.Errorf("emit typed event: %w", err) + } + return nil } -// AfterEpochEnd is the epoch end hook. -func (k Keeper) AfterEpochEnd(ctx sdk.Context, epochIdentifier string, epochNumber int64) error { - streams := k.GetUpcomingStreams(ctx) - // move to active if start time reached - for _, stream := range streams { - if !ctx.BlockTime().Before(stream.StartTime) { - if err := k.moveUpcomingStreamToActiveStream(ctx, stream); err != nil { - return err - } - } +// AfterEpochEnd distributes rewards, updates streams, and saves the changes to the state after the epoch end. +// It distributes rewards to streams that have the specified epoch identifier or aborts if there are no streams +// in this epoch. After the distribution, it resets the epoch pointer to the very fist gauge. +func (k Keeper) AfterEpochEnd(ctx sdk.Context, epochIdentifier string) (sdk.Coins, error) { + toDistribute := k.GetActiveStreamsForEpoch(ctx, epochIdentifier) + + if len(toDistribute) == 0 { + // Nothing to distribute + return sdk.Coins{}, nil + } + + epochPointer, err := k.GetEpochPointer(ctx, epochIdentifier) + if err != nil { + return sdk.Coins{}, fmt.Errorf("get epoch pointer for epoch '%s': %w", epochIdentifier, err) } - // distribute due to epoch event - streams = k.GetActiveStreams(ctx) - distrStreams := []types.Stream{} - for _, stream := range streams { - // begin distribution if it's correct epoch - if epochIdentifier != stream.DistrEpochIdentifier { - continue + distrResult := k.DistributeRewards(ctx, epochPointer, types.IterationsNoLimit, toDistribute) + + // Update streams with respect to a new epoch and save them + for _, s := range distrResult.FilledStreams { + updated, err := k.UpdateStreamAtEpochEnd(ctx, s) + if err != nil { + return sdk.Coins{}, fmt.Errorf("update stream '%d' at epoch start: %w", s.Id, err) + } + // Save the stream + err = k.SetStream(ctx, &updated) + if err != nil { + return sdk.Coins{}, fmt.Errorf("set stream: %w", err) } - distrStreams = append(distrStreams, stream) } - if len(distrStreams) == 0 { - return nil + // Reset the epoch pointer + distrResult.NewPointer.SetToFirstGauge() + err = k.SaveEpochPointer(ctx, distrResult.NewPointer) + if err != nil { + return sdk.Coins{}, fmt.Errorf("save epoch pointer: %w", err) } - distributedAmt, err := k.Distribute(ctx, distrStreams) + err = ctx.EventManager().EmitTypedEvent(&types.EventEpochEnd{ + Iterations: distrResult.Iterations, + Distributed: distrResult.DistributedCoins, + }) if err != nil { - return err + return sdk.Coins{}, fmt.Errorf("emit typed event: %w", err) } - ctx.Logger().Info("Streamer distributed coins", "amount", distributedAmt.String()) - return nil + ctx.Logger().Info("Streamer distributed coins", "amount", distrResult.DistributedCoins.String()) + + return distrResult.DistributedCoins, nil } // BeforeEpochStart is the epoch start hook. -func (h Hooks) BeforeEpochStart(ctx sdk.Context, epochIdentifier string, epochNumber int64) error { - return h.k.BeforeEpochStart(ctx, epochIdentifier, epochNumber) +func (h Hooks) BeforeEpochStart(ctx sdk.Context, epochIdentifier string, _ int64) error { + err := h.k.BeforeEpochStart(ctx, epochIdentifier) + if err != nil { + return fmt.Errorf("x/streamer: before epoch '%s' start: %w", epochIdentifier, err) + } + return nil } // AfterEpochEnd is the epoch end hook. -func (h Hooks) AfterEpochEnd(ctx sdk.Context, epochIdentifier string, epochNumber int64) error { - return h.k.AfterEpochEnd(ctx, epochIdentifier, epochNumber) +func (h Hooks) AfterEpochEnd(ctx sdk.Context, epochIdentifier string, _ int64) error { + _, err := h.k.AfterEpochEnd(ctx, epochIdentifier) + if err != nil { + return fmt.Errorf("x/streamer: after epoch '%s' end: %w", epochIdentifier, err) + } + return nil } /* -------------------------------------------------------------------------- */ diff --git a/x/streamer/keeper/hooks_test.go b/x/streamer/keeper/hooks_test.go index 0340dbc9a..6a78957d4 100644 --- a/x/streamer/keeper/hooks_test.go +++ b/x/streamer/keeper/hooks_test.go @@ -13,7 +13,7 @@ import ( var _ = suite.TestingSuite(nil) -var singleDistrInfo []types.DistrRecord = []types.DistrRecord{ +var singleDistrInfo = []types.DistrRecord{ { GaugeId: 1, Weight: math.NewInt(100), @@ -59,6 +59,10 @@ func (suite *KeeperTestSuite) TestHookOperation() { /* ----------- call the epoch hook with month (no stream related) ----------- */ ctx := suite.Ctx.WithBlockTime(time.Now()) + + err = suite.App.StreamerKeeper.Hooks().BeforeEpochStart(ctx, "month", 0) + suite.Require().NoError(err) + err = suite.App.StreamerKeeper.Hooks().AfterEpochEnd(ctx, "month", 0) suite.Require().NoError(err) @@ -67,6 +71,9 @@ func (suite *KeeperTestSuite) TestHookOperation() { suite.Require().Len(streams, 3) /* --------- call the epoch hook with day (2 active and one future) --------- */ + err = suite.App.StreamerKeeper.Hooks().BeforeEpochStart(ctx, "day", 0) + suite.Require().NoError(err) + err = suite.App.StreamerKeeper.Hooks().AfterEpochEnd(ctx, "day", 0) suite.Require().NoError(err) @@ -84,6 +91,9 @@ func (suite *KeeperTestSuite) TestHookOperation() { suite.Require().Equal(sdk.NewCoins(sdk.NewInt64Coin("stake", 2000)).String(), gauge.Coins.String()) /* ------------------------- call weekly epoch hook ------------------------- */ + err = suite.App.StreamerKeeper.Hooks().BeforeEpochStart(ctx, "week", 0) + suite.Require().NoError(err) + err = suite.App.StreamerKeeper.Hooks().AfterEpochEnd(ctx, "week", 0) suite.Require().NoError(err) @@ -101,6 +111,9 @@ func (suite *KeeperTestSuite) TestHookOperation() { suite.Require().Equal(sdk.NewCoins(sdk.NewInt64Coin("stake", 3000)).String(), gauge.Coins.String()) /* ------- call daily epoch hook again, check both stream distirubute ------- */ + err = suite.App.StreamerKeeper.Hooks().BeforeEpochStart(ctx, "day", 0) + suite.Require().NoError(err) + err = suite.App.StreamerKeeper.Hooks().AfterEpochEnd(ctx, "day", 0) suite.Require().NoError(err) @@ -110,6 +123,9 @@ func (suite *KeeperTestSuite) TestHookOperation() { suite.Require().Equal(sdk.NewCoins(sdk.NewInt64Coin("stake", 5000)).String(), gauge.Coins.String()) /* ------- call daily epoch hook again, check both stream distirubute ------- */ + err = suite.App.StreamerKeeper.Hooks().BeforeEpochStart(ctx, "day", 0) + suite.Require().NoError(err) + err = suite.App.StreamerKeeper.Hooks().AfterEpochEnd(ctx, "day", 0) suite.Require().NoError(err) diff --git a/x/streamer/keeper/keeper.go b/x/streamer/keeper/keeper.go index 6afde48d9..faf4978d3 100644 --- a/x/streamer/keeper/keeper.go +++ b/x/streamer/keeper/keeper.go @@ -4,10 +4,13 @@ import ( "fmt" "time" + "cosmossdk.io/collections" "github.com/cometbft/cometbft/libs/log" + "github.com/cosmos/cosmos-sdk/codec" "github.com/osmosis-labs/osmosis/v15/osmoutils" epochstypes "github.com/osmosis-labs/osmosis/v15/x/epochs/types" + "github.com/dymensionxyz/dymension/v3/internal/collcompat" "github.com/dymensionxyz/dymension/v3/x/streamer/types" storetypes "github.com/cosmos/cosmos-sdk/store/types" @@ -25,14 +28,28 @@ type Keeper struct { ak types.AccountKeeper ik types.IncentivesKeeper sk types.SponsorshipKeeper + + // epochPointers holds a mapping from the epoch identifier to EpochPointer. + epochPointers collections.Map[string, types.EpochPointer] } // NewKeeper returns a new instance of the incentive module keeper struct. -func NewKeeper(storeKey storetypes.StoreKey, paramSpace paramtypes.Subspace, bk types.BankKeeper, ek types.EpochKeeper, ak types.AccountKeeper, ik types.IncentivesKeeper, sk types.SponsorshipKeeper) *Keeper { +func NewKeeper( + cdc codec.BinaryCodec, + storeKey storetypes.StoreKey, + paramSpace paramtypes.Subspace, + bk types.BankKeeper, + ek types.EpochKeeper, + ak types.AccountKeeper, + ik types.IncentivesKeeper, + sk types.SponsorshipKeeper, +) *Keeper { if !paramSpace.HasKeyTable() { paramSpace = paramSpace.WithKeyTable(types.ParamKeyTable()) } + sb := collections.NewSchemaBuilder(collcompat.NewKVStoreService(storeKey)) + return &Keeper{ storeKey: storeKey, paramSpace: paramSpace, @@ -41,6 +58,13 @@ func NewKeeper(storeKey storetypes.StoreKey, paramSpace paramtypes.Subspace, bk ak: ak, ik: ik, sk: sk, + epochPointers: collections.NewMap( + sb, + types.KeyPrefixEpochPointers, + "epoch_pointers", + collections.StringKey, + collcompat.ProtoValue[types.EpochPointer](cdc), + ), } } @@ -100,7 +124,7 @@ func (k Keeper) CreateStream(ctx sdk.Context, coins sdk.Coins, records []types.D sponsored, ) - err := k.setStream(ctx, &stream) + err := k.SetStream(ctx, &stream) if err != nil { return 0, err } diff --git a/x/streamer/keeper/keeper_replace_update_distribution.go b/x/streamer/keeper/keeper_replace_update_distribution.go index be63ab7c6..bedcb9177 100644 --- a/x/streamer/keeper/keeper_replace_update_distribution.go +++ b/x/streamer/keeper/keeper_replace_update_distribution.go @@ -22,7 +22,7 @@ func (k Keeper) ReplaceDistrRecords(ctx sdk.Context, streamId uint64, records [] stream.DistributeTo = distrInfo - err = k.setStream(ctx, stream) + err = k.SetStream(ctx, stream) if err != nil { return err } @@ -70,7 +70,7 @@ func (k Keeper) UpdateDistrRecords(ctx sdk.Context, streamId uint64, records []t stream.DistributeTo = distrInfo - err = k.setStream(ctx, stream) + err = k.SetStream(ctx, stream) if err != nil { return err } diff --git a/x/streamer/keeper/keeper_terminate_stream_test.go b/x/streamer/keeper/keeper_terminate_stream_test.go index 6289ac6ad..1d0eb6958 100644 --- a/x/streamer/keeper/keeper_terminate_stream_test.go +++ b/x/streamer/keeper/keeper_terminate_stream_test.go @@ -4,7 +4,6 @@ import ( "time" sdk "github.com/cosmos/cosmos-sdk/types" - "github.com/dymensionxyz/dymension/v3/x/streamer/types" "github.com/stretchr/testify/suite" ) @@ -108,17 +107,16 @@ func (suite *KeeperTestSuite) TestTerminateStream_ModuleDistributedCoins() { coinUpcoming := sdk.NewInt64Coin("stake", 10000) coinActive := coinUpcoming - err := suite.CreateGauge() - suite.Require().NoError(err) - err = suite.CreateGauge() - suite.Require().NoError(err) + suite.CreateGauges(2) // create upcoming stream id1, _ := suite.CreateStream(defaultDistrInfo, sdk.Coins{coinUpcoming}, time.Now().Add(10*time.Minute), "day", 30) // create active stream - id2, stream2 := suite.CreateStream(defaultDistrInfo, sdk.Coins{coinActive}, time.Time{}, "day", 10) - err = suite.App.StreamerKeeper.MoveUpcomingStreamToActiveStream(suite.Ctx, *stream2) + id2, _ := suite.CreateStream(defaultDistrInfo, sdk.Coins{coinActive}, time.Time{}, "day", 10) + + // simulate the epoch start: activate the stream + err := suite.App.StreamerKeeper.BeforeEpochStart(suite.Ctx, "day") suite.Require().NoError(err) toDist := suite.App.StreamerKeeper.GetModuleToDistributeCoins(suite.Ctx) @@ -134,9 +132,10 @@ func (suite *KeeperTestSuite) TestTerminateStream_ModuleDistributedCoins() { toDist = suite.App.StreamerKeeper.GetModuleToDistributeCoins(suite.Ctx) suite.Require().Equal(sdk.ZeroInt(), toDist.AmountOf("stake")) - distributed, err := suite.App.StreamerKeeper.Distribute(suite.Ctx, []types.Stream{*stream2}) + // simulate the epoch end + distributed, err := suite.App.StreamerKeeper.AfterEpochEnd(suite.Ctx, "day") suite.Require().NoError(err) - suite.Require().True(distributed.AmountOf("stake").IsPositive()) + suite.Require().Empty(distributed) /* ---------------------- check ModuleDistributedCoins ---------------------- */ expectedDist := suite.App.StreamerKeeper.GetModuleDistributedCoins(suite.Ctx) diff --git a/x/streamer/keeper/store.go b/x/streamer/keeper/store.go index a70e81328..c64f82885 100644 --- a/x/streamer/keeper/store.go +++ b/x/streamer/keeper/store.go @@ -4,10 +4,10 @@ import ( "encoding/json" "fmt" + sdk "github.com/cosmos/cosmos-sdk/types" "github.com/cosmos/gogoproto/proto" - "github.com/dymensionxyz/dymension/v3/x/streamer/types" - sdk "github.com/cosmos/cosmos-sdk/types" + "github.com/dymensionxyz/dymension/v3/x/streamer/types" ) // GetLastStreamID returns the last used stream ID. @@ -42,7 +42,7 @@ func (k Keeper) CreateStreamRefKeys(ctx sdk.Context, stream *types.Stream, combi // SetStreamWithRefKey takes a single stream and assigns a key. // Takes combinedKey (the keyPrefix for upcoming, active, or finished streams combined with stream start time) and adds a reference to the respective stream ID. func (k Keeper) SetStreamWithRefKey(ctx sdk.Context, stream *types.Stream) error { - err := k.setStream(ctx, stream) + err := k.SetStream(ctx, stream) if err != nil { return err } @@ -67,8 +67,8 @@ func streamStoreKey(ID uint64) []byte { return combineKeys(types.KeyPrefixPeriodStream, sdk.Uint64ToBigEndian(ID)) } -// setStream set the stream inside store. -func (k Keeper) setStream(ctx sdk.Context, stream *types.Stream) error { +// SetStream set the stream inside store. +func (k Keeper) SetStream(ctx sdk.Context, stream *types.Stream) error { store := ctx.KVStore(k.storeKey) bz, err := proto.Marshal(stream) if err != nil { @@ -127,3 +127,23 @@ func (k Keeper) deleteStreamRefByKey(ctx sdk.Context, key []byte, streamID uint6 } return nil } + +func (k Keeper) GetAllEpochPointers(ctx sdk.Context) ([]types.EpochPointer, error) { + iter, err := k.epochPointers.Iterate(ctx, nil) + if err != nil { + return nil, err + } + return iter.Values() +} + +func (k Keeper) GetEpochPointer(ctx sdk.Context, epochIdentifier string) (types.EpochPointer, error) { + return k.epochPointers.Get(ctx, epochIdentifier) +} + +func (k Keeper) HasEpochPointer(ctx sdk.Context, epochIdentifier string) (bool, error) { + return k.epochPointers.Has(ctx, epochIdentifier) +} + +func (k Keeper) SaveEpochPointer(ctx sdk.Context, p types.EpochPointer) error { + return k.epochPointers.Set(ctx, p.EpochIdentifier, p) +} diff --git a/x/streamer/keeper/stream.go b/x/streamer/keeper/stream.go index b0a4013a8..7aacfe47c 100644 --- a/x/streamer/keeper/stream.go +++ b/x/streamer/keeper/stream.go @@ -4,13 +4,56 @@ import ( "fmt" "sort" + "cosmossdk.io/math" + sdk "github.com/cosmos/cosmos-sdk/types" "github.com/cosmos/gogoproto/proto" "github.com/dymensionxyz/dymension/v3/x/streamer/types" - - sdk "github.com/cosmos/cosmos-sdk/types" ) +// UpdateStreamAtEpochStart updates the stream for a new epoch: estimates coins that streamer will +// distribute during this epoch and updates a sponsored distribution if needed. +func (k Keeper) UpdateStreamAtEpochStart(ctx sdk.Context, stream types.Stream) (types.Stream, error) { + remainCoins := stream.Coins.Sub(stream.DistributedCoins...) + remainEpochs := stream.NumEpochsPaidOver - stream.FilledEpochs + epochCoins := remainCoins.QuoInt(math.NewIntFromUint64(remainEpochs)) + + // If the stream uses a sponsorship plan, query it and update stream distr info. The distribution + // might be empty and this is a valid scenario. In that case, we'll just skip without filling the epoch. + if stream.Sponsored { + distr, err := k.sk.GetDistribution(ctx) + if err != nil { + return types.Stream{}, fmt.Errorf("get sponsorship distribution: %w", err) + } + // Update stream distr info + stream.DistributeTo = types.DistrInfoFromDistribution(distr) + } + + // Add coins to distribute during the next epoch + stream.EpochCoins = epochCoins + + return stream, nil +} + +// UpdateStreamAtEpochEnd updates the stream at the end of the epoch: increases the filled epoch number +// and makes the stream finished if needed. +func (k Keeper) UpdateStreamAtEpochEnd(ctx sdk.Context, stream types.Stream) (types.Stream, error) { + // Don't fill streams in which there's nothing to fill. This might happen when using sponsored streams. + if !stream.DistributeTo.TotalWeight.IsZero() { + stream.FilledEpochs += 1 + } + + // Check if stream has completed its distribution. This is a post factum check. + if stream.FilledEpochs >= stream.NumEpochsPaidOver { + err := k.moveActiveStreamToFinishedStream(ctx, stream) + if err != nil { + return types.Stream{}, fmt.Errorf("move active stream to finished stream: %w", err) + } + } + + return stream, nil +} + // GetStreamByID returns stream from stream ID. func (k Keeper) GetStreamByID(ctx sdk.Context, streamID uint64) (*types.Stream, error) { stream := types.Stream{} @@ -26,19 +69,6 @@ func (k Keeper) GetStreamByID(ctx sdk.Context, streamID uint64) (*types.Stream, return &stream, nil } -// GetStreamFromIDs returns multiple streams from a streamIDs array. -func (k Keeper) GetStreamFromIDs(ctx sdk.Context, streamIDs []uint64) ([]types.Stream, error) { - streams := []types.Stream{} - for _, streamID := range streamIDs { - stream, err := k.GetStreamByID(ctx, streamID) - if err != nil { - return []types.Stream{}, err - } - streams = append(streams, *stream) - } - return streams, nil -} - // GetStreams returns upcoming, active, and finished streams. func (k Keeper) GetStreams(ctx sdk.Context) []types.Stream { streams := k.getStreamsFromIterator(ctx, k.StreamsIterator(ctx)) @@ -59,6 +89,18 @@ func (k Keeper) GetActiveStreams(ctx sdk.Context) []types.Stream { return k.getStreamsFromIterator(ctx, k.ActiveStreamsIterator(ctx)) } +// GetActiveStreamsForEpoch returns active streams with the specified epoch identifier. +func (k Keeper) GetActiveStreamsForEpoch(ctx sdk.Context, epochIdentifier string) []types.Stream { + streams := k.getStreamsFromIterator(ctx, k.ActiveStreamsIterator(ctx)) + activeStreams := make([]types.Stream, 0) + for _, stream := range streams { + if stream.DistrEpochIdentifier == epochIdentifier { + activeStreams = append(activeStreams, stream) + } + } + return activeStreams +} + // GetUpcomingStreams returns upcoming streams. func (k Keeper) GetUpcomingStreams(ctx sdk.Context) []types.Stream { return k.getStreamsFromIterator(ctx, k.UpcomingStreamsIterator(ctx)) diff --git a/x/streamer/keeper/stream_iterator.go b/x/streamer/keeper/stream_iterator.go new file mode 100644 index 000000000..ce8ff28fc --- /dev/null +++ b/x/streamer/keeper/stream_iterator.go @@ -0,0 +1,147 @@ +package keeper + +import ( + "slices" + + "github.com/dymensionxyz/dymension/v3/utils/pagination" + "github.com/dymensionxyz/dymension/v3/x/streamer/types" +) + +func IterateEpochPointer( + p types.EpochPointer, + streams []types.Stream, + maxIterations uint64, + cb func(v StreamGauge) pagination.Stop, +) (types.EpochPointer, uint64) { + iter := NewStreamIterator(streams, p.StreamId, p.GaugeId, p.EpochIdentifier) + iterations := pagination.Paginate(iter, maxIterations, cb) + + // Set pointer to the next unprocessed gauge. If the iterator is invalid, then + // the last gauge is reached. Use special values in that case. + if iter.Valid() { + v := iter.Value() + p.Set(v.Stream.Id, v.Gauge.GaugeId) + } else { + p.SetToLastGauge() + } + + return p, iterations +} + +// StreamGauge is a special type to help StreamIterator implement pagination.Iterator. +type StreamGauge struct { + Stream types.Stream + Gauge types.DistrRecord +} + +var _ pagination.Iterator[StreamGauge] = new(StreamIterator) + +type StreamIterator struct { + data []types.Stream + streamIdx int + gaugeIdx int + epochIdentifier string +} + +// NewStreamIterator a new StreamIterator starting from the provided stream and gauge IDs. First, it finds a starting +// position in the stream slice. Then it checks if it is valid and tries to find the next appropriate stream if not. +func NewStreamIterator(data []types.Stream, startStreamID uint64, startGaugeID uint64, epochIdentifier string) *StreamIterator { + // streamIdx is the position where the stream is found, or the position where it would appear in the sort order + streamIdx, _ := slices.BinarySearchFunc(data, startStreamID, func(stream types.Stream, targetID uint64) int { + return cmpUint64(stream.Id, targetID) + }) + + // startStreamID is greater than all the existing streams, the pointer is initially invalid + if streamIdx >= len(data) { + return &StreamIterator{ + data: data, + streamIdx: streamIdx, + gaugeIdx: 0, + epochIdentifier: epochIdentifier, + } + } + + // gaugeIdx is the position where the gauge is found, or the position where it would appear in the sort order + gaugeIdx, _ := slices.BinarySearchFunc(data[streamIdx].DistributeTo.Records, startGaugeID, func(record types.DistrRecord, targetID uint64) int { + return cmpUint64(record.GaugeId, targetID) + }) + + iter := &StreamIterator{ + data: data, + streamIdx: streamIdx, + gaugeIdx: gaugeIdx, + epochIdentifier: epochIdentifier, + } + + if !iter.validInvariants() { + iter.findNextStream() + } + + return iter +} + +// Next iterates to the next appropriate gauge. It can make the iterator invalid. +func (i *StreamIterator) Next() { + i.gaugeIdx++ + + if !i.validInvariants() { + i.findNextStream() + } +} + +// findNextStream find the next appropriate stream. +func (i *StreamIterator) findNextStream() { + // Put the pointer to the next stream + i.gaugeIdx = 0 + i.streamIdx++ + for ; i.streamIdx < len(i.data); i.streamIdx++ { + if i.validInvariants() { + return + } + } +} + +// validInvariants validates the iterator invariants: +// 1. streamIdx is less than the number of streams: the iterator points to the existing stream +// 2. Stream is non-empty: there are some gauges assigned to this stream +// 3. Stream epoch identifier matches the provided +// 4. gaugeIdx is less than the number of gauges: the iterator points to the existing gauge +func (i StreamIterator) validInvariants() bool { + ///// 1. streamIdx is less than the number of streams + return i.streamIdx < len(i.data) && + + // 2. stream is non-empty + len(i.data[i.streamIdx].DistributeTo.Records) != 0 && + + // 3. stream epoch identifier matches the provided + i.data[i.streamIdx].DistrEpochIdentifier == i.epochIdentifier && + + // 4. gaugeIdx is less than the number of gauges + i.gaugeIdx < len(i.data[i.streamIdx].DistributeTo.Records) +} + +func (i StreamIterator) Value() StreamGauge { + return StreamGauge{ + Stream: i.data[i.streamIdx], + Gauge: i.data[i.streamIdx].DistributeTo.Records[i.gaugeIdx], + } +} + +func (i StreamIterator) Valid() bool { + return i.validInvariants() +} + +func CmpStreams(a, b types.Stream) int { + return cmpUint64(a.Id, b.Id) +} + +func cmpUint64(a, b uint64) int { + switch { + case a < b: + return -1 + case a > b: + return 1 + default: + return 0 + } +} diff --git a/x/streamer/keeper/stream_iterator_test.go b/x/streamer/keeper/stream_iterator_test.go new file mode 100644 index 000000000..5d8dee748 --- /dev/null +++ b/x/streamer/keeper/stream_iterator_test.go @@ -0,0 +1,1305 @@ +package keeper_test + +import ( + "testing" + + "github.com/stretchr/testify/require" + + "github.com/dymensionxyz/dymension/v3/utils/pagination" + "github.com/dymensionxyz/dymension/v3/x/streamer/keeper" + "github.com/dymensionxyz/dymension/v3/x/streamer/types" +) + +func TestStreamIterator(t *testing.T) { + t.Parallel() + + // newStream is a helper function + newStream := func(id uint64, epochID string, gaugeIDs ...uint64) types.Stream { + g := make([]types.DistrRecord, 0, len(gaugeIDs)) + for _, gID := range gaugeIDs { + g = append(g, types.DistrRecord{GaugeId: gID}) + } + return types.Stream{ + Id: id, + DistributeTo: &types.DistrInfo{Records: g}, + DistrEpochIdentifier: epochID, + } + } + + tests := []struct { + name string + maxIters uint64 + pointer types.EpochPointer + streams []types.Stream + expectedIters uint64 + expectedTraversal [][2]uint64 // holds an expected stream slice traversal. [2]uint64 is a pair of streamID and gaugeID. + expectedPointer types.EpochPointer + }{ + { + name: "No streams", + maxIters: 100, + pointer: types.EpochPointer{ + StreamId: 0, + GaugeId: 0, + EpochIdentifier: "day", + }, + streams: []types.Stream{}, + expectedIters: 0, + expectedTraversal: nil, + expectedPointer: types.EpochPointer{ + StreamId: types.MaxStreamID, + GaugeId: types.MaxGaugeID, + EpochIdentifier: "day", + }, + }, + { + name: "One relevant stream", + maxIters: 100, + pointer: types.EpochPointer{ + StreamId: 0, + GaugeId: 0, + EpochIdentifier: "day", + }, + streams: []types.Stream{ + newStream(1, "day", 1, 2, 3), + }, + expectedIters: 3, + expectedTraversal: [][2]uint64{ + {1, 1}, + {1, 2}, + {1, 3}, + }, + expectedPointer: types.EpochPointer{ + StreamId: types.MaxStreamID, + GaugeId: types.MaxGaugeID, + EpochIdentifier: "day", + }, + }, + { + name: "One relevant empty stream", + maxIters: 100, + pointer: types.EpochPointer{ + StreamId: 0, + GaugeId: 0, + EpochIdentifier: "day", + }, + streams: []types.Stream{ + newStream(1, "day"), + }, + expectedIters: 0, + expectedTraversal: nil, + expectedPointer: types.EpochPointer{ + StreamId: types.MaxStreamID, + GaugeId: types.MaxGaugeID, + EpochIdentifier: "day", + }, + }, + { + name: "One irrelevant non-empty stream", + maxIters: 100, + pointer: types.EpochPointer{ + StreamId: 0, + GaugeId: 0, + EpochIdentifier: "day", + }, + streams: []types.Stream{ + newStream(1, "hour", 1, 2, 3), + }, + expectedIters: 0, + expectedTraversal: nil, + expectedPointer: types.EpochPointer{ + StreamId: types.MaxStreamID, + GaugeId: types.MaxGaugeID, + EpochIdentifier: "day", + }, + }, + { + name: "One irrelevant empty stream", + maxIters: 100, + pointer: types.EpochPointer{ + StreamId: 0, + GaugeId: 0, + EpochIdentifier: "day", + }, + streams: []types.Stream{ + newStream(1, "hour"), + }, + expectedIters: 0, + expectedTraversal: nil, + expectedPointer: types.EpochPointer{ + StreamId: types.MaxStreamID, + GaugeId: types.MaxGaugeID, + EpochIdentifier: "day", + }, + }, + { + name: "Two streams: 1st is relevant", + maxIters: 100, + pointer: types.EpochPointer{ + StreamId: 0, + GaugeId: 0, + EpochIdentifier: "day", + }, + streams: []types.Stream{ + newStream(1, "day", 1, 2, 3), + newStream(2, "hour", 1, 2, 3), + }, + expectedIters: 3, + expectedTraversal: [][2]uint64{ + {1, 1}, + {1, 2}, + {1, 3}, + }, + expectedPointer: types.EpochPointer{ + StreamId: types.MaxStreamID, + GaugeId: types.MaxGaugeID, + EpochIdentifier: "day", + }, + }, + { + name: "Two streams: 2nd is relevant", + maxIters: 100, + pointer: types.EpochPointer{ + StreamId: 0, + GaugeId: 0, + EpochIdentifier: "day", + }, + streams: []types.Stream{ + newStream(1, "hour", 1, 2, 3), + newStream(2, "day", 1, 2, 3), + }, + expectedIters: 3, + expectedTraversal: [][2]uint64{ + {2, 1}, + {2, 2}, + {2, 3}, + }, + expectedPointer: types.EpochPointer{ + StreamId: types.MaxStreamID, + GaugeId: types.MaxGaugeID, + EpochIdentifier: "day", + }, + }, + { + name: "Two streams: 1, 2 relevant", + maxIters: 100, + pointer: types.EpochPointer{ + StreamId: 0, + GaugeId: 0, + EpochIdentifier: "day", + }, + streams: []types.Stream{ + newStream(1, "day", 1, 2, 3), + newStream(2, "day", 1, 2, 3), + }, + expectedIters: 6, + expectedTraversal: [][2]uint64{ + {1, 1}, + {1, 2}, + {1, 3}, + {2, 1}, + {2, 2}, + {2, 3}, + }, + expectedPointer: types.EpochPointer{ + StreamId: types.MaxStreamID, + GaugeId: types.MaxGaugeID, + EpochIdentifier: "day", + }, + }, + { + name: "Two streams: none relevant", + maxIters: 100, + pointer: types.EpochPointer{ + StreamId: 0, + GaugeId: 0, + EpochIdentifier: "day", + }, + streams: []types.Stream{ + newStream(1, "hour", 1, 2, 3), + newStream(2, "hour", 1, 2, 3), + }, + expectedIters: 0, + expectedTraversal: nil, + expectedPointer: types.EpochPointer{ + StreamId: types.MaxStreamID, + GaugeId: types.MaxGaugeID, + EpochIdentifier: "day", + }, + }, + { + name: "Two streams: 1st is relevant but empty", + maxIters: 100, + pointer: types.EpochPointer{ + StreamId: 0, + GaugeId: 0, + EpochIdentifier: "day", + }, + streams: []types.Stream{ + newStream(1, "day"), + newStream(2, "hour", 1, 2, 3), + }, + expectedIters: 0, + expectedTraversal: nil, + expectedPointer: types.EpochPointer{ + StreamId: types.MaxStreamID, + GaugeId: types.MaxGaugeID, + EpochIdentifier: "day", + }, + }, + { + name: "Two streams: 2nd is relevant but empty", + maxIters: 100, + pointer: types.EpochPointer{ + StreamId: 0, + GaugeId: 0, + EpochIdentifier: "day", + }, + streams: []types.Stream{ + newStream(1, "hour", 1, 2, 3), + newStream(2, "day"), + }, + expectedIters: 0, + expectedTraversal: nil, + expectedPointer: types.EpochPointer{ + StreamId: types.MaxStreamID, + GaugeId: types.MaxGaugeID, + EpochIdentifier: "day", + }, + }, + { + name: "Two streams: 1, 2 relevant but empty", + maxIters: 100, + pointer: types.EpochPointer{ + StreamId: 0, + GaugeId: 0, + EpochIdentifier: "day", + }, + streams: []types.Stream{ + newStream(1, "day"), + newStream(2, "day"), + }, + expectedIters: 0, + expectedTraversal: nil, + expectedPointer: types.EpochPointer{ + StreamId: types.MaxStreamID, + GaugeId: types.MaxGaugeID, + EpochIdentifier: "day", + }, + }, + { + name: "Two streams: 1st is relevant, irrelevant is empty", + maxIters: 100, + pointer: types.EpochPointer{ + StreamId: 0, + GaugeId: 0, + EpochIdentifier: "day", + }, + streams: []types.Stream{ + newStream(1, "day", 1, 2, 3), + newStream(2, "hour"), + }, + expectedIters: 3, + expectedTraversal: [][2]uint64{ + {1, 1}, + {1, 2}, + {1, 3}, + }, + expectedPointer: types.EpochPointer{ + StreamId: types.MaxStreamID, + GaugeId: types.MaxGaugeID, + EpochIdentifier: "day", + }, + }, + { + name: "Two streams: 2nd is relevant, irrelevant is empty", + maxIters: 100, + pointer: types.EpochPointer{ + StreamId: 0, + GaugeId: 0, + EpochIdentifier: "day", + }, + streams: []types.Stream{ + newStream(1, "hour"), + newStream(2, "day", 1, 2, 3), + }, + expectedIters: 3, + expectedTraversal: [][2]uint64{ + {2, 1}, + {2, 2}, + {2, 3}, + }, + expectedPointer: types.EpochPointer{ + StreamId: types.MaxStreamID, + GaugeId: types.MaxGaugeID, + EpochIdentifier: "day", + }, + }, + { + name: "Two streams: both irrelevant are empty", + maxIters: 100, + pointer: types.EpochPointer{ + StreamId: 0, + GaugeId: 0, + EpochIdentifier: "day", + }, + streams: []types.Stream{ + newStream(1, "hour"), + newStream(2, "hour"), + }, + expectedIters: 0, + expectedTraversal: nil, + expectedPointer: types.EpochPointer{ + StreamId: types.MaxStreamID, + GaugeId: types.MaxGaugeID, + EpochIdentifier: "day", + }, + }, + { + name: "Pointer at the very beginning 1", + maxIters: 100, + pointer: types.EpochPointer{ + StreamId: 0, + GaugeId: 0, + EpochIdentifier: "day", + }, + streams: []types.Stream{ + newStream(1, "day", 1, 2, 3), + newStream(2, "hour", 2, 3, 4), + newStream(3, "hour", 1, 5, 6), + newStream(4, "day", 2, 5, 7), + }, + expectedIters: 6, + expectedTraversal: [][2]uint64{ + {1, 1}, + {1, 2}, + {1, 3}, + {4, 2}, + {4, 5}, + {4, 7}, + }, + expectedPointer: types.EpochPointer{ + StreamId: types.MaxStreamID, + GaugeId: types.MaxGaugeID, + EpochIdentifier: "day", + }, + }, + { + name: "Pointer at the very beginning 2", + maxIters: 100, + pointer: types.EpochPointer{ + StreamId: 0, + GaugeId: 0, + EpochIdentifier: "hour", + }, + streams: []types.Stream{ + newStream(1, "day", 1, 2, 3), + newStream(2, "hour", 2, 3, 4), + newStream(3, "hour", 1, 5, 6), + newStream(4, "day", 2, 5, 7), + }, + expectedIters: 6, + expectedTraversal: [][2]uint64{ + {2, 2}, + {2, 3}, + {2, 4}, + {3, 1}, + {3, 5}, + {3, 6}, + }, + expectedPointer: types.EpochPointer{ + StreamId: types.MaxStreamID, + GaugeId: types.MaxGaugeID, + EpochIdentifier: "hour", + }, + }, + { + name: "Pointer at the very end 1", + maxIters: 100, + pointer: types.EpochPointer{ + StreamId: types.MaxStreamID, + GaugeId: types.MaxGaugeID, + EpochIdentifier: "day", + }, + streams: []types.Stream{ + newStream(1, "day", 1, 2, 3), + newStream(2, "hour", 2, 3, 4), + newStream(3, "hour", 1, 5, 6), + newStream(4, "day", 2, 5, 7), + }, + expectedIters: 0, + expectedTraversal: nil, + expectedPointer: types.EpochPointer{ + StreamId: types.MaxStreamID, + GaugeId: types.MaxGaugeID, + EpochIdentifier: "day", + }, + }, + { + name: "Pointer at the very end 2", + maxIters: 100, + pointer: types.EpochPointer{ + StreamId: types.MaxStreamID, + GaugeId: types.MaxGaugeID, + EpochIdentifier: "hour", + }, + streams: []types.Stream{ + newStream(1, "day", 1, 2, 3), + newStream(2, "hour", 2, 3, 4), + newStream(3, "hour", 1, 5, 6), + newStream(4, "day", 2, 5, 7), + }, + expectedIters: 0, + expectedTraversal: nil, + expectedPointer: types.EpochPointer{ + StreamId: types.MaxStreamID, + GaugeId: types.MaxGaugeID, + EpochIdentifier: "hour", + }, + }, + { + name: "Empty stream 1", + maxIters: 100, + pointer: types.EpochPointer{ + StreamId: types.MaxStreamID, + GaugeId: types.MaxGaugeID, + EpochIdentifier: "hour", + }, + streams: []types.Stream{ + newStream(1, "day", 1, 2, 3), + newStream(2, "hour", 2, 3, 4), + newStream(3, "hour"), + newStream(4, "hour", 1, 5, 6), + newStream(5, "day", 2, 5, 7), + }, + expectedIters: 0, + expectedTraversal: nil, + expectedPointer: types.EpochPointer{ + StreamId: types.MaxStreamID, + GaugeId: types.MaxGaugeID, + EpochIdentifier: "hour", + }, + }, + { + name: "Empty stream 2", + maxIters: 100, + pointer: types.EpochPointer{ + StreamId: 0, + GaugeId: 0, + EpochIdentifier: "hour", + }, + streams: []types.Stream{ + newStream(1, "day", 1, 2, 3), + newStream(2, "hour", 2, 3, 4), + newStream(3, "hour"), + newStream(4, "hour", 1, 5, 6), + newStream(5, "day", 2, 5, 7), + }, + expectedIters: 6, + expectedTraversal: [][2]uint64{ + {2, 2}, + {2, 3}, + {2, 4}, + {4, 1}, + {4, 5}, + {4, 6}, + }, + expectedPointer: types.EpochPointer{ + StreamId: types.MaxStreamID, + GaugeId: types.MaxGaugeID, + EpochIdentifier: "hour", + }, + }, + { + name: "Empty stream 3: the last stream is empty", + maxIters: 100, + pointer: types.EpochPointer{ + StreamId: 0, + GaugeId: 0, + EpochIdentifier: "day", + }, + streams: []types.Stream{ + newStream(1, "day", 1, 2, 3), + newStream(2, "hour", 2, 3, 4), + newStream(3, "hour"), + newStream(4, "hour", 1, 5, 6), + newStream(5, "day"), + }, + expectedIters: 3, + expectedTraversal: [][2]uint64{ + {1, 1}, + {1, 2}, + {1, 3}, + }, + expectedPointer: types.EpochPointer{ + StreamId: types.MaxStreamID, + GaugeId: types.MaxGaugeID, + EpochIdentifier: "day", + }, + }, + { + name: "Empty stream 4: the first stream is empty", + maxIters: 100, + pointer: types.EpochPointer{ + StreamId: 0, + GaugeId: 0, + EpochIdentifier: "day", + }, + streams: []types.Stream{ + newStream(1, "day"), + newStream(2, "hour", 2, 3, 4), + newStream(3, "hour"), + newStream(4, "hour", 1, 5, 6), + newStream(5, "day", 1, 2, 3), + }, + expectedIters: 3, + expectedTraversal: [][2]uint64{ + {5, 1}, + {5, 2}, + {5, 3}, + }, + expectedPointer: types.EpochPointer{ + StreamId: types.MaxStreamID, + GaugeId: types.MaxGaugeID, + EpochIdentifier: "day", + }, + }, + // All further cases intend to test with limited iterations + { + name: "One relevant stream, 0 iterations", + maxIters: 0, + pointer: types.EpochPointer{ + StreamId: 0, + GaugeId: 0, + EpochIdentifier: "day", + }, + streams: []types.Stream{ + newStream(1, "day", 1, 2, 3), + }, + expectedIters: 0, + expectedTraversal: nil, + expectedPointer: types.EpochPointer{ + StreamId: 1, + GaugeId: 1, + EpochIdentifier: "day", + }, + }, + { + name: "One relevant stream, 1 iteration", + maxIters: 1, + pointer: types.EpochPointer{ + StreamId: 0, + GaugeId: 0, + EpochIdentifier: "day", + }, + streams: []types.Stream{ + newStream(1, "day", 1, 2, 3), + }, + expectedIters: 1, + expectedTraversal: [][2]uint64{ + {1, 1}, + }, + expectedPointer: types.EpochPointer{ + StreamId: 1, + GaugeId: 2, + EpochIdentifier: "day", + }, + }, + { + name: "One relevant stream, 2 iterations", + maxIters: 2, + pointer: types.EpochPointer{ + StreamId: 0, + GaugeId: 0, + EpochIdentifier: "day", + }, + streams: []types.Stream{ + newStream(1, "day", 1, 2, 3), + }, + expectedIters: 2, + expectedTraversal: [][2]uint64{ + {1, 1}, + {1, 2}, + }, + expectedPointer: types.EpochPointer{ + StreamId: 1, + GaugeId: 3, + EpochIdentifier: "day", + }, + }, + { + name: "One relevant stream, iterations equal to num of gauges", + maxIters: 3, + pointer: types.EpochPointer{ + StreamId: 0, + GaugeId: 0, + EpochIdentifier: "day", + }, + streams: []types.Stream{ + newStream(1, "day", 1, 2, 3), + }, + expectedIters: 3, + expectedTraversal: [][2]uint64{ + {1, 1}, + {1, 2}, + {1, 3}, + }, + expectedPointer: types.EpochPointer{ + StreamId: types.MaxStreamID, + GaugeId: types.MaxGaugeID, + EpochIdentifier: "day", + }, + }, + { + name: "One relevant stream, iterations is greater than num of gauges", + maxIters: 4, + pointer: types.EpochPointer{ + StreamId: 0, + GaugeId: 0, + EpochIdentifier: "day", + }, + streams: []types.Stream{ + newStream(1, "day", 1, 2, 3), + }, + expectedIters: 3, + expectedTraversal: [][2]uint64{ + {1, 1}, + {1, 2}, + {1, 3}, + }, + expectedPointer: types.EpochPointer{ + StreamId: types.MaxStreamID, + GaugeId: types.MaxGaugeID, + EpochIdentifier: "day", + }, + }, + { + name: "One irrelevant stream, 0 iterations", + maxIters: 0, + pointer: types.EpochPointer{ + StreamId: 0, + GaugeId: 0, + EpochIdentifier: "day", + }, + streams: []types.Stream{ + newStream(1, "hour", 1, 2, 3), + }, + expectedIters: 0, + expectedTraversal: nil, + expectedPointer: types.EpochPointer{ + StreamId: types.MaxStreamID, + GaugeId: types.MaxGaugeID, + EpochIdentifier: "day", + }, + }, + { + name: "One irrelevant stream, 1 iteration", + maxIters: 1, + pointer: types.EpochPointer{ + StreamId: 0, + GaugeId: 0, + EpochIdentifier: "day", + }, + streams: []types.Stream{ + newStream(1, "hour", 1, 2, 3), + }, + expectedIters: 0, + expectedTraversal: nil, + expectedPointer: types.EpochPointer{ + StreamId: types.MaxStreamID, + GaugeId: types.MaxGaugeID, + EpochIdentifier: "day", + }, + }, + { + name: "One irrelevant stream, 2 iterations", + maxIters: 2, + pointer: types.EpochPointer{ + StreamId: 0, + GaugeId: 0, + EpochIdentifier: "day", + }, + streams: []types.Stream{ + newStream(1, "hour", 1, 2, 3), + }, + expectedIters: 0, + expectedTraversal: nil, + expectedPointer: types.EpochPointer{ + StreamId: types.MaxStreamID, + GaugeId: types.MaxGaugeID, + EpochIdentifier: "day", + }, + }, + { + name: "One irrelevant stream, iterations equal to num of gauges", + maxIters: 3, + pointer: types.EpochPointer{ + StreamId: 0, + GaugeId: 0, + EpochIdentifier: "day", + }, + streams: []types.Stream{ + newStream(1, "hour", 1, 2, 3), + }, + expectedIters: 0, + expectedTraversal: nil, + expectedPointer: types.EpochPointer{ + StreamId: types.MaxStreamID, + GaugeId: types.MaxGaugeID, + EpochIdentifier: "day", + }, + }, + { + name: "One irrelevant stream, iterations is greater than num of gauges", + maxIters: 4, + pointer: types.EpochPointer{ + StreamId: 0, + GaugeId: 0, + EpochIdentifier: "day", + }, + streams: []types.Stream{ + newStream(1, "hour", 1, 2, 3), + }, + expectedIters: 0, + expectedTraversal: nil, + expectedPointer: types.EpochPointer{ + StreamId: types.MaxStreamID, + GaugeId: types.MaxGaugeID, + EpochIdentifier: "day", + }, + }, + { + name: "One relevant stream, start from the valid gauge, 1 iteration", + maxIters: 1, + pointer: types.EpochPointer{ + StreamId: 1, + GaugeId: 5, + EpochIdentifier: "day", + }, + streams: []types.Stream{ + newStream(1, "day", 1, 5, 8, 12), + }, + expectedIters: 1, + expectedTraversal: [][2]uint64{ + {1, 5}, + }, + expectedPointer: types.EpochPointer{ + StreamId: 1, + GaugeId: 8, + EpochIdentifier: "day", + }, + }, + { + name: "One relevant stream, start from the valid gauge, 2 iterations", + maxIters: 2, + pointer: types.EpochPointer{ + StreamId: 1, + GaugeId: 5, + EpochIdentifier: "day", + }, + streams: []types.Stream{ + newStream(1, "day", 1, 5, 8, 12), + }, + expectedIters: 2, + expectedTraversal: [][2]uint64{ + {1, 5}, + {1, 8}, + }, + expectedPointer: types.EpochPointer{ + StreamId: 1, + GaugeId: 12, + EpochIdentifier: "day", + }, + }, + { + name: "One relevant stream, start from the valid gauge, iteration equal to num of gauges", + maxIters: 3, + pointer: types.EpochPointer{ + StreamId: 1, + GaugeId: 5, + EpochIdentifier: "day", + }, + streams: []types.Stream{ + newStream(1, "day", 1, 5, 8, 12), + }, + expectedIters: 3, + expectedTraversal: [][2]uint64{ + {1, 5}, + {1, 8}, + {1, 12}, + }, + expectedPointer: types.EpochPointer{ + StreamId: types.MaxStreamID, + GaugeId: types.MaxGaugeID, + EpochIdentifier: "day", + }, + }, + { + name: "One relevant stream, start from the valid gauge, iteration is greater than num of gauges", + maxIters: 4, + pointer: types.EpochPointer{ + StreamId: 1, + GaugeId: 5, + EpochIdentifier: "day", + }, + streams: []types.Stream{ + newStream(1, "day", 1, 5, 8, 12), + }, + expectedIters: 3, + expectedTraversal: [][2]uint64{ + {1, 5}, + {1, 8}, + {1, 12}, + }, + expectedPointer: types.EpochPointer{ + StreamId: types.MaxStreamID, + GaugeId: types.MaxGaugeID, + EpochIdentifier: "day", + }, + }, + { + name: "One relevant stream, start from the invalid gauge, 1 iteration", + maxIters: 1, + pointer: types.EpochPointer{ + StreamId: 1, + GaugeId: 3, // invalid + EpochIdentifier: "day", + }, + streams: []types.Stream{ + newStream(1, "day", 1, 5, 8, 12), + }, + expectedIters: 1, + expectedTraversal: [][2]uint64{ + {1, 5}, + }, + expectedPointer: types.EpochPointer{ + StreamId: 1, + GaugeId: 8, + EpochIdentifier: "day", + }, + }, + { + name: "One relevant stream, start from the invalid gauge, 2 iterations", + maxIters: 2, + pointer: types.EpochPointer{ + StreamId: 1, + GaugeId: 3, // invalid + EpochIdentifier: "day", + }, + streams: []types.Stream{ + newStream(1, "day", 1, 5, 8, 12), + }, + expectedIters: 2, + expectedTraversal: [][2]uint64{ + {1, 5}, + {1, 8}, + }, + expectedPointer: types.EpochPointer{ + StreamId: 1, + GaugeId: 12, + EpochIdentifier: "day", + }, + }, + { + name: "One irrelevant stream, start from the valid gauge, 1 iteration", + maxIters: 1, + pointer: types.EpochPointer{ + StreamId: 1, + GaugeId: 5, + EpochIdentifier: "day", + }, + streams: []types.Stream{ + newStream(1, "hour", 1, 5, 8, 12), + }, + expectedIters: 0, + expectedTraversal: nil, + expectedPointer: types.EpochPointer{ + StreamId: types.MaxStreamID, + GaugeId: types.MaxGaugeID, + EpochIdentifier: "day", + }, + }, + { + name: "One irrelevant stream, start from the valid gauge, 2 iterations", + maxIters: 2, + pointer: types.EpochPointer{ + StreamId: 1, + GaugeId: 5, + EpochIdentifier: "day", + }, + streams: []types.Stream{ + newStream(1, "hour", 1, 5, 8, 12), + }, + expectedIters: 0, + expectedTraversal: nil, + expectedPointer: types.EpochPointer{ + StreamId: types.MaxStreamID, + GaugeId: types.MaxGaugeID, + EpochIdentifier: "day", + }, + }, + { + name: "One irrelevant stream, start from the valid gauge, iteration equal to num of gauges", + maxIters: 3, + pointer: types.EpochPointer{ + StreamId: 1, + GaugeId: 5, + EpochIdentifier: "day", + }, + streams: []types.Stream{ + newStream(1, "hour", 1, 5, 8, 12), + }, + expectedIters: 0, + expectedTraversal: nil, + expectedPointer: types.EpochPointer{ + StreamId: types.MaxStreamID, + GaugeId: types.MaxGaugeID, + EpochIdentifier: "day", + }, + }, + { + name: "One irrelevant stream, start from the valid gauge, iteration is greater than num of gauges", + maxIters: 4, + pointer: types.EpochPointer{ + StreamId: 1, + GaugeId: 5, + EpochIdentifier: "day", + }, + streams: []types.Stream{ + newStream(1, "hour", 1, 5, 8, 12), + }, + expectedIters: 0, + expectedTraversal: nil, + expectedPointer: types.EpochPointer{ + StreamId: types.MaxStreamID, + GaugeId: types.MaxGaugeID, + EpochIdentifier: "day", + }, + }, + { + name: "One irrelevant stream, start from the invalid gauge, 1 iteration", + maxIters: 1, + pointer: types.EpochPointer{ + StreamId: 1, + GaugeId: 3, // invalid + EpochIdentifier: "day", + }, + streams: []types.Stream{ + newStream(1, "hour", 1, 5, 8, 12), + }, + expectedIters: 0, + expectedTraversal: nil, + expectedPointer: types.EpochPointer{ + StreamId: types.MaxStreamID, + GaugeId: types.MaxGaugeID, + EpochIdentifier: "day", + }, + }, + { + name: "One irrelevant stream, start from the invalid gauge, 2 iterations", + maxIters: 2, + pointer: types.EpochPointer{ + StreamId: 1, + GaugeId: 3, // invalid + EpochIdentifier: "day", + }, + streams: []types.Stream{ + newStream(1, "hour", 1, 5, 8, 12), + }, + expectedIters: 0, + expectedTraversal: nil, + expectedPointer: types.EpochPointer{ + StreamId: types.MaxStreamID, + GaugeId: types.MaxGaugeID, + EpochIdentifier: "day", + }, + }, + { + name: "Three streams: 1, 3 are relevant, start from the valid gauge, 1 iteration", + maxIters: 1, + pointer: types.EpochPointer{ + StreamId: 1, + GaugeId: 5, + EpochIdentifier: "day", + }, + streams: []types.Stream{ + newStream(1, "day", 1, 5, 8, 12), + newStream(2, "hour", 1, 5, 8, 12), + newStream(3, "day", 1, 5, 8, 12), + }, + expectedIters: 1, + expectedTraversal: [][2]uint64{ + {1, 5}, + }, + expectedPointer: types.EpochPointer{ + StreamId: 1, + GaugeId: 8, + EpochIdentifier: "day", + }, + }, + { + name: "Three streams: 1, 3 are relevant, start from the invalid gauge, 1 iteration", + maxIters: 1, + pointer: types.EpochPointer{ + StreamId: 1, + GaugeId: 6, + EpochIdentifier: "day", + }, + streams: []types.Stream{ + newStream(1, "day", 1, 5, 8, 12), + newStream(2, "hour", 1, 5, 8, 12), + newStream(3, "day", 1, 5, 8, 12), + }, + expectedIters: 1, + expectedTraversal: [][2]uint64{ + {1, 8}, + }, + expectedPointer: types.EpochPointer{ + StreamId: 1, + GaugeId: 12, + EpochIdentifier: "day", + }, + }, + { + name: "Three streams: 1, 3 are relevant, start from the invalid gauge, stop at the last element of the stream", + maxIters: 2, + pointer: types.EpochPointer{ + StreamId: 1, + GaugeId: 6, + EpochIdentifier: "day", + }, + streams: []types.Stream{ + newStream(1, "day", 1, 5, 8, 12), + newStream(2, "hour", 1, 5, 8, 12), + newStream(3, "day", 1, 5, 8, 12), + }, + expectedIters: 2, + expectedTraversal: [][2]uint64{ + {1, 8}, + {1, 12}, + }, + expectedPointer: types.EpochPointer{ + StreamId: 3, + GaugeId: 1, + EpochIdentifier: "day", + }, + }, + { + name: "Three streams: 1, 3 are relevant, start from the invalid gauge, stop at the next stream", + maxIters: 3, + pointer: types.EpochPointer{ + StreamId: 1, + GaugeId: 6, + EpochIdentifier: "day", + }, + streams: []types.Stream{ + newStream(1, "day", 1, 5, 8, 12), + newStream(2, "hour", 1, 5, 8, 12), + newStream(3, "day", 1, 5, 8, 12), + }, + expectedIters: 3, + expectedTraversal: [][2]uint64{ + {1, 8}, + {1, 12}, + {3, 1}, + }, + expectedPointer: types.EpochPointer{ + StreamId: 3, + GaugeId: 5, + EpochIdentifier: "day", + }, + }, + { + name: "Three streams: 1, 3 are relevant, start from the invalid gauge, more iterations than gauges", + maxIters: 6, + pointer: types.EpochPointer{ + StreamId: 1, + GaugeId: 6, + EpochIdentifier: "day", + }, + streams: []types.Stream{ + newStream(1, "day", 1, 5, 8, 12), + newStream(2, "hour", 1, 5, 8, 12), + newStream(3, "day", 1, 5, 8, 12), + }, + expectedIters: 6, + expectedTraversal: [][2]uint64{ + {1, 8}, + {1, 12}, + {3, 1}, + {3, 5}, + {3, 8}, + {3, 12}, + }, + expectedPointer: types.EpochPointer{ + StreamId: types.MaxStreamID, + GaugeId: types.MaxGaugeID, + EpochIdentifier: "day", + }, + }, + { + name: "Three streams: 1, 3 are relevant, start from the invalid gauge, 3 is empty", + maxIters: 6, + pointer: types.EpochPointer{ + StreamId: 1, + GaugeId: 6, + EpochIdentifier: "day", + }, + streams: []types.Stream{ + newStream(1, "day", 1, 5, 8, 12), + newStream(2, "hour", 1, 5, 8, 12), + newStream(3, "day"), + }, + expectedIters: 2, + expectedTraversal: [][2]uint64{ + {1, 8}, + {1, 12}, + }, + expectedPointer: types.EpochPointer{ + StreamId: types.MaxStreamID, + GaugeId: types.MaxGaugeID, + EpochIdentifier: "day", + }, + }, + { + name: "Three streams: 1, 3 are relevant, start from the invalid stream, 1 iteration", + maxIters: 1, + pointer: types.EpochPointer{ + StreamId: 2, + GaugeId: 5, + EpochIdentifier: "day", + }, + streams: []types.Stream{ + newStream(1, "day", 1, 5, 8, 12), + newStream(2, "hour", 1, 5, 8, 12), + newStream(3, "day", 1, 5, 8, 12), + }, + expectedIters: 1, + expectedTraversal: [][2]uint64{ + {3, 1}, + }, + expectedPointer: types.EpochPointer{ + StreamId: 3, + GaugeId: 5, + EpochIdentifier: "day", + }, + }, + { + name: "Three streams: 1, 3 are relevant, start from the invalid stream, 3 iterations", + maxIters: 3, + pointer: types.EpochPointer{ + StreamId: 2, + GaugeId: 5, + EpochIdentifier: "day", + }, + streams: []types.Stream{ + newStream(1, "day", 1, 5, 8, 12), + newStream(2, "hour", 1, 5, 8, 12), + newStream(3, "day", 1, 5, 8, 12), + }, + expectedIters: 3, + expectedTraversal: [][2]uint64{ + {3, 1}, + {3, 5}, + {3, 8}, + }, + expectedPointer: types.EpochPointer{ + StreamId: 3, + GaugeId: 12, + EpochIdentifier: "day", + }, + }, + { + name: "Three streams: 1, 2, 3 are relevant, start from the relevant but empty stream, 1 iteration", + maxIters: 1, + pointer: types.EpochPointer{ + StreamId: 2, + GaugeId: 1, + EpochIdentifier: "day", + }, + streams: []types.Stream{ + newStream(1, "day", 1, 5, 8, 12), + newStream(2, "day"), + newStream(3, "day", 1, 5, 8, 12), + }, + expectedIters: 1, + expectedTraversal: [][2]uint64{ + {3, 1}, + }, + expectedPointer: types.EpochPointer{ + StreamId: 3, + GaugeId: 5, + EpochIdentifier: "day", + }, + }, + { + name: "Pointer stops at the middle gauge of the stream", + maxIters: 4, + pointer: types.EpochPointer{ + StreamId: 0, + GaugeId: 0, + EpochIdentifier: "day", + }, + streams: []types.Stream{ + newStream(1, "day", 1, 2, 3), + newStream(2, "hour", 2, 3, 4), + newStream(3, "hour", 1, 5, 6), + newStream(4, "day", 2, 5, 7), + }, + expectedIters: 4, + expectedTraversal: [][2]uint64{ + {1, 1}, + {1, 2}, + {1, 3}, + {4, 2}, + }, + expectedPointer: types.EpochPointer{ + StreamId: 4, + GaugeId: 5, + EpochIdentifier: "day", + }, + }, + { + name: "Pointer stops at the last gauge of the stream", + maxIters: 3, + pointer: types.EpochPointer{ + StreamId: 0, + GaugeId: 0, + EpochIdentifier: "day", + }, + streams: []types.Stream{ + newStream(1, "day", 1, 2, 3), + newStream(2, "hour", 2, 3, 4), + newStream(3, "hour", 1, 5, 6), + newStream(4, "day", 2, 5, 7), + }, + expectedIters: 3, + expectedTraversal: [][2]uint64{ + {1, 1}, + {1, 2}, + {1, 3}, + }, + expectedPointer: types.EpochPointer{ + StreamId: 4, + GaugeId: 2, + EpochIdentifier: "day", + }, + }, + } + + for _, tc := range tests { + t.Run(tc.name, func(t *testing.T) { + t.Parallel() + + var traversal [][2]uint64 + newPointer, iters := keeper.IterateEpochPointer(tc.pointer, tc.streams, tc.maxIters, func(v keeper.StreamGauge) pagination.Stop { + traversal = append(traversal, [2]uint64{v.Stream.Id, v.Gauge.GaugeId}) + return pagination.Continue + }) + + require.Equal(t, tc.expectedIters, iters) + require.Equal(t, tc.expectedTraversal, traversal) + require.Equal(t, tc.expectedPointer, newPointer) + }) + } +} diff --git a/x/streamer/keeper/suite_test.go b/x/streamer/keeper/suite_test.go index 137bfe2bf..0a93fcc02 100644 --- a/x/streamer/keeper/suite_test.go +++ b/x/streamer/keeper/suite_test.go @@ -1,6 +1,7 @@ package keeper_test import ( + "slices" "testing" "time" @@ -25,7 +26,7 @@ const ( NonSponsored = false ) -var defaultDistrInfo []types.DistrRecord = []types.DistrRecord{ +var defaultDistrInfo = []types.DistrRecord{ { GaugeId: 1, Weight: math.NewInt(50), @@ -75,8 +76,8 @@ func (suite *KeeperTestSuite) CreateGauge() error { } // CreateStream creates a non-sponsored stream struct given the required params. -func (suite *KeeperTestSuite) CreateStream(distrTo []types.DistrRecord, coins sdk.Coins, startTime time.Time, epochIdetifier string, numEpoch uint64) (uint64, *types.Stream) { - streamID, err := suite.App.StreamerKeeper.CreateStream(suite.Ctx, coins, distrTo, startTime, epochIdetifier, numEpoch, NonSponsored) +func (suite *KeeperTestSuite) CreateStream(distrTo []types.DistrRecord, coins sdk.Coins, startTime time.Time, epochIdentifier string, numEpoch uint64) (uint64, *types.Stream) { + streamID, err := suite.App.StreamerKeeper.CreateStream(suite.Ctx, coins, distrTo, startTime, epochIdentifier, numEpoch, NonSponsored) suite.Require().NoError(err) stream, err := suite.App.StreamerKeeper.GetStreamByID(suite.Ctx, streamID) suite.Require().NoError(err) @@ -100,15 +101,18 @@ func (suite *KeeperTestSuite) ExpectedDefaultStream(streamID uint64, starttime t distInfo, err := types.NewDistrInfo(defaultDistrInfo) suite.Require().NoError(err) + const numEpochsPaidOver = 30 return types.Stream{ Id: streamID, DistributeTo: distInfo, Coins: coins, StartTime: starttime, DistrEpochIdentifier: "day", - NumEpochsPaidOver: 30, + NumEpochsPaidOver: numEpochsPaidOver, FilledEpochs: 0, DistributedCoins: sdk.Coins{}, + Sponsored: false, + EpochCoins: coins.QuoInt(math.NewInt(numEpochsPaidOver)), } } @@ -121,6 +125,18 @@ func (suite *KeeperTestSuite) CreateGauges(num int) { } } +func (suite *KeeperTestSuite) CreateGaugesUntil(num int) { + suite.T().Helper() + + gauges := suite.App.IncentivesKeeper.GetGauges(suite.Ctx) + remain := num - len(gauges) + + for i := 0; i < remain; i++ { + err := suite.CreateGauge() + suite.Require().NoError(err) + } +} + func (suite *KeeperTestSuite) Distribution() sponsorshiptypes.Distribution { queryServer := sponsorshipkeeper.NewQueryServer(suite.App.SponsorshipKeeper) d, err := queryServer.Distribution(suite.Ctx, new(sponsorshiptypes.QueryDistributionRequest)) @@ -210,3 +226,22 @@ func (suite *KeeperTestSuite) Delegate(delAddr sdk.AccAddress, valAddr sdk.ValAd return del } + +func (suite *KeeperTestSuite) DistributeAllRewards(streams []types.Stream) sdk.Coins { + rewards := sdk.Coins{} + suite.Require().True(slices.IsSortedFunc(streams, keeper.CmpStreams)) + for _, stream := range streams { + epoch := suite.App.EpochsKeeper.GetEpochInfo(suite.Ctx, stream.DistrEpochIdentifier) + res := suite.App.StreamerKeeper.DistributeRewards( + suite.Ctx, + types.NewEpochPointer(epoch.Identifier, epoch.Duration), + types.IterationsNoLimit, + []types.Stream{stream}, + ) + suite.Require().Len(res.FilledStreams, 1) + err := suite.App.StreamerKeeper.SetStream(suite.Ctx, &res.FilledStreams[0]) + suite.Require().NoError(err) + rewards = rewards.Add(res.DistributedCoins...) + } + return rewards +} diff --git a/x/streamer/module.go b/x/streamer/module.go index de8d65ce8..651075d32 100644 --- a/x/streamer/module.go +++ b/x/streamer/module.go @@ -155,7 +155,11 @@ func (am AppModule) BeginBlock(_ sdk.Context, _ abci.RequestBeginBlock) {} // EndBlock executes all ABCI EndBlock logic respective to the module. // Returns a nil validatorUpdate struct array. -func (am AppModule) EndBlock(_ sdk.Context, _ abci.RequestEndBlock) []abci.ValidatorUpdate { +func (am AppModule) EndBlock(ctx sdk.Context, _ abci.RequestEndBlock) []abci.ValidatorUpdate { + err := am.keeper.EndBlock(ctx) + if err != nil { + panic(err) + } return []abci.ValidatorUpdate{} } diff --git a/x/streamer/types/constants.go b/x/streamer/types/constants.go new file mode 100644 index 000000000..2578b0bc1 --- /dev/null +++ b/x/streamer/types/constants.go @@ -0,0 +1,5 @@ +package types + +const ( + DefaultMaxIterationsPerBlock = 500 +) diff --git a/x/streamer/types/events.pb.go b/x/streamer/types/events.pb.go new file mode 100644 index 000000000..bd102b17f --- /dev/null +++ b/x/streamer/types/events.pb.go @@ -0,0 +1,785 @@ +// Code generated by protoc-gen-gogo. DO NOT EDIT. +// source: dymensionxyz/dymension/streamer/events.proto + +package types + +import ( + fmt "fmt" + github_com_cosmos_cosmos_sdk_types "github.com/cosmos/cosmos-sdk/types" + types "github.com/cosmos/cosmos-sdk/types" + _ "github.com/cosmos/gogoproto/gogoproto" + proto "github.com/cosmos/gogoproto/proto" + io "io" + math "math" + math_bits "math/bits" +) + +// Reference imports to suppress errors if they are not otherwise used. +var _ = proto.Marshal +var _ = fmt.Errorf +var _ = math.Inf + +// This is a compile-time assertion to ensure that this generated file +// is compatible with the proto package it is being compiled against. +// A compilation error at this line likely means your copy of the +// proto package needs to be updated. +const _ = proto.GoGoProtoPackageIsVersion3 // please upgrade the proto package + +type EventEndBlock struct { + Iterations uint64 `protobuf:"varint,1,opt,name=iterations,proto3" json:"iterations,omitempty"` + MaxIterations uint64 `protobuf:"varint,2,opt,name=max_iterations,json=maxIterations,proto3" json:"max_iterations,omitempty"` + // Distributed is the total amount of coins that have been distributed + Distributed github_com_cosmos_cosmos_sdk_types.Coins `protobuf:"bytes,3,rep,name=distributed,proto3,castrepeated=github.com/cosmos/cosmos-sdk/types.Coins" json:"distributed"` +} + +func (m *EventEndBlock) Reset() { *m = EventEndBlock{} } +func (m *EventEndBlock) String() string { return proto.CompactTextString(m) } +func (*EventEndBlock) ProtoMessage() {} +func (*EventEndBlock) Descriptor() ([]byte, []int) { + return fileDescriptor_4840a29c1bf68fa5, []int{0} +} +func (m *EventEndBlock) XXX_Unmarshal(b []byte) error { + return m.Unmarshal(b) +} +func (m *EventEndBlock) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + if deterministic { + return xxx_messageInfo_EventEndBlock.Marshal(b, m, deterministic) + } else { + b = b[:cap(b)] + n, err := m.MarshalToSizedBuffer(b) + if err != nil { + return nil, err + } + return b[:n], nil + } +} +func (m *EventEndBlock) XXX_Merge(src proto.Message) { + xxx_messageInfo_EventEndBlock.Merge(m, src) +} +func (m *EventEndBlock) XXX_Size() int { + return m.Size() +} +func (m *EventEndBlock) XXX_DiscardUnknown() { + xxx_messageInfo_EventEndBlock.DiscardUnknown(m) +} + +var xxx_messageInfo_EventEndBlock proto.InternalMessageInfo + +func (m *EventEndBlock) GetIterations() uint64 { + if m != nil { + return m.Iterations + } + return 0 +} + +func (m *EventEndBlock) GetMaxIterations() uint64 { + if m != nil { + return m.MaxIterations + } + return 0 +} + +func (m *EventEndBlock) GetDistributed() github_com_cosmos_cosmos_sdk_types.Coins { + if m != nil { + return m.Distributed + } + return nil +} + +type EventEpochEnd struct { + Iterations uint64 `protobuf:"varint,1,opt,name=iterations,proto3" json:"iterations,omitempty"` + // Distributed is the total amount of coins that have been distributed + Distributed github_com_cosmos_cosmos_sdk_types.Coins `protobuf:"bytes,2,rep,name=distributed,proto3,castrepeated=github.com/cosmos/cosmos-sdk/types.Coins" json:"distributed"` +} + +func (m *EventEpochEnd) Reset() { *m = EventEpochEnd{} } +func (m *EventEpochEnd) String() string { return proto.CompactTextString(m) } +func (*EventEpochEnd) ProtoMessage() {} +func (*EventEpochEnd) Descriptor() ([]byte, []int) { + return fileDescriptor_4840a29c1bf68fa5, []int{1} +} +func (m *EventEpochEnd) XXX_Unmarshal(b []byte) error { + return m.Unmarshal(b) +} +func (m *EventEpochEnd) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + if deterministic { + return xxx_messageInfo_EventEpochEnd.Marshal(b, m, deterministic) + } else { + b = b[:cap(b)] + n, err := m.MarshalToSizedBuffer(b) + if err != nil { + return nil, err + } + return b[:n], nil + } +} +func (m *EventEpochEnd) XXX_Merge(src proto.Message) { + xxx_messageInfo_EventEpochEnd.Merge(m, src) +} +func (m *EventEpochEnd) XXX_Size() int { + return m.Size() +} +func (m *EventEpochEnd) XXX_DiscardUnknown() { + xxx_messageInfo_EventEpochEnd.DiscardUnknown(m) +} + +var xxx_messageInfo_EventEpochEnd proto.InternalMessageInfo + +func (m *EventEpochEnd) GetIterations() uint64 { + if m != nil { + return m.Iterations + } + return 0 +} + +func (m *EventEpochEnd) GetDistributed() github_com_cosmos_cosmos_sdk_types.Coins { + if m != nil { + return m.Distributed + } + return nil +} + +type EventEpochStart struct { + ActiveStreamsNum uint64 `protobuf:"varint,1,opt,name=active_streams_num,json=activeStreamsNum,proto3" json:"active_streams_num,omitempty"` +} + +func (m *EventEpochStart) Reset() { *m = EventEpochStart{} } +func (m *EventEpochStart) String() string { return proto.CompactTextString(m) } +func (*EventEpochStart) ProtoMessage() {} +func (*EventEpochStart) Descriptor() ([]byte, []int) { + return fileDescriptor_4840a29c1bf68fa5, []int{2} +} +func (m *EventEpochStart) XXX_Unmarshal(b []byte) error { + return m.Unmarshal(b) +} +func (m *EventEpochStart) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + if deterministic { + return xxx_messageInfo_EventEpochStart.Marshal(b, m, deterministic) + } else { + b = b[:cap(b)] + n, err := m.MarshalToSizedBuffer(b) + if err != nil { + return nil, err + } + return b[:n], nil + } +} +func (m *EventEpochStart) XXX_Merge(src proto.Message) { + xxx_messageInfo_EventEpochStart.Merge(m, src) +} +func (m *EventEpochStart) XXX_Size() int { + return m.Size() +} +func (m *EventEpochStart) XXX_DiscardUnknown() { + xxx_messageInfo_EventEpochStart.DiscardUnknown(m) +} + +var xxx_messageInfo_EventEpochStart proto.InternalMessageInfo + +func (m *EventEpochStart) GetActiveStreamsNum() uint64 { + if m != nil { + return m.ActiveStreamsNum + } + return 0 +} + +func init() { + proto.RegisterType((*EventEndBlock)(nil), "dymensionxyz.dymension.streamer.EventEndBlock") + proto.RegisterType((*EventEpochEnd)(nil), "dymensionxyz.dymension.streamer.EventEpochEnd") + proto.RegisterType((*EventEpochStart)(nil), "dymensionxyz.dymension.streamer.EventEpochStart") +} + +func init() { + proto.RegisterFile("dymensionxyz/dymension/streamer/events.proto", fileDescriptor_4840a29c1bf68fa5) +} + +var fileDescriptor_4840a29c1bf68fa5 = []byte{ + // 359 bytes of a gzipped FileDescriptorProto + 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0xb4, 0x92, 0xc1, 0x4e, 0xc2, 0x40, + 0x10, 0x40, 0x5b, 0x30, 0x1e, 0x96, 0xa0, 0xa6, 0xf1, 0x80, 0x1c, 0x16, 0x42, 0x62, 0xc2, 0x01, + 0x77, 0x45, 0xe2, 0xd9, 0x04, 0xc3, 0xc1, 0x8b, 0x26, 0x70, 0xf3, 0x42, 0xb6, 0xed, 0x06, 0x36, + 0xb8, 0xbb, 0xa4, 0x3b, 0x6d, 0x8a, 0x5f, 0xe1, 0x17, 0xf8, 0x01, 0xfe, 0x87, 0x09, 0x47, 0x8e, + 0x9e, 0xd4, 0xc0, 0x8f, 0x18, 0xda, 0x02, 0x8d, 0x89, 0xe1, 0xe4, 0xa9, 0x33, 0x9d, 0x37, 0x9b, + 0x37, 0x99, 0x41, 0x2d, 0x7f, 0x26, 0xb9, 0x32, 0x42, 0xab, 0x78, 0xf6, 0x4c, 0xb7, 0x09, 0x35, + 0x10, 0x70, 0x26, 0x79, 0x40, 0x79, 0xc4, 0x15, 0x18, 0x32, 0x0d, 0x34, 0x68, 0xa7, 0x96, 0xa7, + 0xc9, 0x36, 0x21, 0x1b, 0xba, 0x7a, 0x3a, 0xd2, 0x23, 0x9d, 0xb0, 0x74, 0x1d, 0xa5, 0x6d, 0x55, + 0xec, 0x69, 0x23, 0xb5, 0xa1, 0x2e, 0x33, 0x9c, 0x46, 0x6d, 0x97, 0x03, 0x6b, 0x53, 0x4f, 0x0b, + 0x95, 0xd5, 0xc9, 0x3e, 0x89, 0x4d, 0x90, 0xf2, 0x8d, 0x77, 0x1b, 0x95, 0x7b, 0x6b, 0xaf, 0x9e, + 0xf2, 0xbb, 0x4f, 0xda, 0x9b, 0x38, 0x18, 0x21, 0x01, 0x3c, 0x60, 0x20, 0xb4, 0x32, 0x15, 0xbb, + 0x6e, 0x37, 0x0f, 0xfa, 0xb9, 0x3f, 0xce, 0x39, 0x3a, 0x92, 0x2c, 0x1e, 0xe6, 0x98, 0x42, 0xc2, + 0x94, 0x25, 0x8b, 0xef, 0x76, 0x98, 0x44, 0x25, 0x5f, 0x18, 0x08, 0x84, 0x1b, 0x02, 0xf7, 0x2b, + 0xc5, 0x7a, 0xb1, 0x59, 0xba, 0x3a, 0x23, 0xa9, 0x3e, 0x59, 0xeb, 0x93, 0x4c, 0x9f, 0xdc, 0x6a, + 0xa1, 0xba, 0x97, 0xf3, 0xcf, 0x9a, 0xf5, 0xf6, 0x55, 0x6b, 0x8e, 0x04, 0x8c, 0x43, 0x97, 0x78, + 0x5a, 0xd2, 0x6c, 0xd6, 0xf4, 0x73, 0x61, 0xfc, 0x09, 0x85, 0xd9, 0x94, 0x9b, 0xa4, 0xc1, 0xf4, + 0xf3, 0xef, 0x37, 0x5e, 0xb7, 0x73, 0x4c, 0xb5, 0x37, 0xee, 0x29, 0x7f, 0xef, 0x1c, 0xbf, 0x04, + 0x0b, 0xff, 0x2c, 0x78, 0x83, 0x8e, 0x77, 0x7e, 0x03, 0x60, 0x01, 0x38, 0x2d, 0xe4, 0x30, 0x0f, + 0x44, 0xc4, 0x87, 0xe9, 0x52, 0xcc, 0x50, 0x85, 0x32, 0x33, 0x3d, 0x49, 0x2b, 0x83, 0xb4, 0x70, + 0x1f, 0xca, 0xee, 0xc3, 0x7c, 0x89, 0xed, 0xc5, 0x12, 0xdb, 0xdf, 0x4b, 0x6c, 0xbf, 0xac, 0xb0, + 0xb5, 0x58, 0x61, 0xeb, 0x63, 0x85, 0xad, 0xc7, 0xeb, 0x9c, 0xd1, 0x1f, 0xeb, 0x8f, 0x3a, 0x34, + 0xde, 0xdd, 0x40, 0x22, 0xe9, 0x1e, 0x26, 0x17, 0xd0, 0xf9, 0x09, 0x00, 0x00, 0xff, 0xff, 0xa9, + 0x1c, 0xaa, 0x03, 0xb8, 0x02, 0x00, 0x00, +} + +func (m *EventEndBlock) Marshal() (dAtA []byte, err error) { + size := m.Size() + dAtA = make([]byte, size) + n, err := m.MarshalToSizedBuffer(dAtA[:size]) + if err != nil { + return nil, err + } + return dAtA[:n], nil +} + +func (m *EventEndBlock) MarshalTo(dAtA []byte) (int, error) { + size := m.Size() + return m.MarshalToSizedBuffer(dAtA[:size]) +} + +func (m *EventEndBlock) MarshalToSizedBuffer(dAtA []byte) (int, error) { + i := len(dAtA) + _ = i + var l int + _ = l + if len(m.Distributed) > 0 { + for iNdEx := len(m.Distributed) - 1; iNdEx >= 0; iNdEx-- { + { + size, err := m.Distributed[iNdEx].MarshalToSizedBuffer(dAtA[:i]) + if err != nil { + return 0, err + } + i -= size + i = encodeVarintEvents(dAtA, i, uint64(size)) + } + i-- + dAtA[i] = 0x1a + } + } + if m.MaxIterations != 0 { + i = encodeVarintEvents(dAtA, i, uint64(m.MaxIterations)) + i-- + dAtA[i] = 0x10 + } + if m.Iterations != 0 { + i = encodeVarintEvents(dAtA, i, uint64(m.Iterations)) + i-- + dAtA[i] = 0x8 + } + return len(dAtA) - i, nil +} + +func (m *EventEpochEnd) Marshal() (dAtA []byte, err error) { + size := m.Size() + dAtA = make([]byte, size) + n, err := m.MarshalToSizedBuffer(dAtA[:size]) + if err != nil { + return nil, err + } + return dAtA[:n], nil +} + +func (m *EventEpochEnd) MarshalTo(dAtA []byte) (int, error) { + size := m.Size() + return m.MarshalToSizedBuffer(dAtA[:size]) +} + +func (m *EventEpochEnd) MarshalToSizedBuffer(dAtA []byte) (int, error) { + i := len(dAtA) + _ = i + var l int + _ = l + if len(m.Distributed) > 0 { + for iNdEx := len(m.Distributed) - 1; iNdEx >= 0; iNdEx-- { + { + size, err := m.Distributed[iNdEx].MarshalToSizedBuffer(dAtA[:i]) + if err != nil { + return 0, err + } + i -= size + i = encodeVarintEvents(dAtA, i, uint64(size)) + } + i-- + dAtA[i] = 0x12 + } + } + if m.Iterations != 0 { + i = encodeVarintEvents(dAtA, i, uint64(m.Iterations)) + i-- + dAtA[i] = 0x8 + } + return len(dAtA) - i, nil +} + +func (m *EventEpochStart) Marshal() (dAtA []byte, err error) { + size := m.Size() + dAtA = make([]byte, size) + n, err := m.MarshalToSizedBuffer(dAtA[:size]) + if err != nil { + return nil, err + } + return dAtA[:n], nil +} + +func (m *EventEpochStart) MarshalTo(dAtA []byte) (int, error) { + size := m.Size() + return m.MarshalToSizedBuffer(dAtA[:size]) +} + +func (m *EventEpochStart) MarshalToSizedBuffer(dAtA []byte) (int, error) { + i := len(dAtA) + _ = i + var l int + _ = l + if m.ActiveStreamsNum != 0 { + i = encodeVarintEvents(dAtA, i, uint64(m.ActiveStreamsNum)) + i-- + dAtA[i] = 0x8 + } + return len(dAtA) - i, nil +} + +func encodeVarintEvents(dAtA []byte, offset int, v uint64) int { + offset -= sovEvents(v) + base := offset + for v >= 1<<7 { + dAtA[offset] = uint8(v&0x7f | 0x80) + v >>= 7 + offset++ + } + dAtA[offset] = uint8(v) + return base +} +func (m *EventEndBlock) Size() (n int) { + if m == nil { + return 0 + } + var l int + _ = l + if m.Iterations != 0 { + n += 1 + sovEvents(uint64(m.Iterations)) + } + if m.MaxIterations != 0 { + n += 1 + sovEvents(uint64(m.MaxIterations)) + } + if len(m.Distributed) > 0 { + for _, e := range m.Distributed { + l = e.Size() + n += 1 + l + sovEvents(uint64(l)) + } + } + return n +} + +func (m *EventEpochEnd) Size() (n int) { + if m == nil { + return 0 + } + var l int + _ = l + if m.Iterations != 0 { + n += 1 + sovEvents(uint64(m.Iterations)) + } + if len(m.Distributed) > 0 { + for _, e := range m.Distributed { + l = e.Size() + n += 1 + l + sovEvents(uint64(l)) + } + } + return n +} + +func (m *EventEpochStart) Size() (n int) { + if m == nil { + return 0 + } + var l int + _ = l + if m.ActiveStreamsNum != 0 { + n += 1 + sovEvents(uint64(m.ActiveStreamsNum)) + } + return n +} + +func sovEvents(x uint64) (n int) { + return (math_bits.Len64(x|1) + 6) / 7 +} +func sozEvents(x uint64) (n int) { + return sovEvents(uint64((x << 1) ^ uint64((int64(x) >> 63)))) +} +func (m *EventEndBlock) Unmarshal(dAtA []byte) error { + l := len(dAtA) + iNdEx := 0 + for iNdEx < l { + preIndex := iNdEx + var wire uint64 + for shift := uint(0); ; shift += 7 { + if shift >= 64 { + return ErrIntOverflowEvents + } + if iNdEx >= l { + return io.ErrUnexpectedEOF + } + b := dAtA[iNdEx] + iNdEx++ + wire |= uint64(b&0x7F) << shift + if b < 0x80 { + break + } + } + fieldNum := int32(wire >> 3) + wireType := int(wire & 0x7) + if wireType == 4 { + return fmt.Errorf("proto: EventEndBlock: wiretype end group for non-group") + } + if fieldNum <= 0 { + return fmt.Errorf("proto: EventEndBlock: illegal tag %d (wire type %d)", fieldNum, wire) + } + switch fieldNum { + case 1: + if wireType != 0 { + return fmt.Errorf("proto: wrong wireType = %d for field Iterations", wireType) + } + m.Iterations = 0 + for shift := uint(0); ; shift += 7 { + if shift >= 64 { + return ErrIntOverflowEvents + } + if iNdEx >= l { + return io.ErrUnexpectedEOF + } + b := dAtA[iNdEx] + iNdEx++ + m.Iterations |= uint64(b&0x7F) << shift + if b < 0x80 { + break + } + } + case 2: + if wireType != 0 { + return fmt.Errorf("proto: wrong wireType = %d for field MaxIterations", wireType) + } + m.MaxIterations = 0 + for shift := uint(0); ; shift += 7 { + if shift >= 64 { + return ErrIntOverflowEvents + } + if iNdEx >= l { + return io.ErrUnexpectedEOF + } + b := dAtA[iNdEx] + iNdEx++ + m.MaxIterations |= uint64(b&0x7F) << shift + if b < 0x80 { + break + } + } + case 3: + if wireType != 2 { + return fmt.Errorf("proto: wrong wireType = %d for field Distributed", wireType) + } + var msglen int + for shift := uint(0); ; shift += 7 { + if shift >= 64 { + return ErrIntOverflowEvents + } + if iNdEx >= l { + return io.ErrUnexpectedEOF + } + b := dAtA[iNdEx] + iNdEx++ + msglen |= int(b&0x7F) << shift + if b < 0x80 { + break + } + } + if msglen < 0 { + return ErrInvalidLengthEvents + } + postIndex := iNdEx + msglen + if postIndex < 0 { + return ErrInvalidLengthEvents + } + if postIndex > l { + return io.ErrUnexpectedEOF + } + m.Distributed = append(m.Distributed, types.Coin{}) + if err := m.Distributed[len(m.Distributed)-1].Unmarshal(dAtA[iNdEx:postIndex]); err != nil { + return err + } + iNdEx = postIndex + default: + iNdEx = preIndex + skippy, err := skipEvents(dAtA[iNdEx:]) + if err != nil { + return err + } + if (skippy < 0) || (iNdEx+skippy) < 0 { + return ErrInvalidLengthEvents + } + if (iNdEx + skippy) > l { + return io.ErrUnexpectedEOF + } + iNdEx += skippy + } + } + + if iNdEx > l { + return io.ErrUnexpectedEOF + } + return nil +} +func (m *EventEpochEnd) Unmarshal(dAtA []byte) error { + l := len(dAtA) + iNdEx := 0 + for iNdEx < l { + preIndex := iNdEx + var wire uint64 + for shift := uint(0); ; shift += 7 { + if shift >= 64 { + return ErrIntOverflowEvents + } + if iNdEx >= l { + return io.ErrUnexpectedEOF + } + b := dAtA[iNdEx] + iNdEx++ + wire |= uint64(b&0x7F) << shift + if b < 0x80 { + break + } + } + fieldNum := int32(wire >> 3) + wireType := int(wire & 0x7) + if wireType == 4 { + return fmt.Errorf("proto: EventEpochEnd: wiretype end group for non-group") + } + if fieldNum <= 0 { + return fmt.Errorf("proto: EventEpochEnd: illegal tag %d (wire type %d)", fieldNum, wire) + } + switch fieldNum { + case 1: + if wireType != 0 { + return fmt.Errorf("proto: wrong wireType = %d for field Iterations", wireType) + } + m.Iterations = 0 + for shift := uint(0); ; shift += 7 { + if shift >= 64 { + return ErrIntOverflowEvents + } + if iNdEx >= l { + return io.ErrUnexpectedEOF + } + b := dAtA[iNdEx] + iNdEx++ + m.Iterations |= uint64(b&0x7F) << shift + if b < 0x80 { + break + } + } + case 2: + if wireType != 2 { + return fmt.Errorf("proto: wrong wireType = %d for field Distributed", wireType) + } + var msglen int + for shift := uint(0); ; shift += 7 { + if shift >= 64 { + return ErrIntOverflowEvents + } + if iNdEx >= l { + return io.ErrUnexpectedEOF + } + b := dAtA[iNdEx] + iNdEx++ + msglen |= int(b&0x7F) << shift + if b < 0x80 { + break + } + } + if msglen < 0 { + return ErrInvalidLengthEvents + } + postIndex := iNdEx + msglen + if postIndex < 0 { + return ErrInvalidLengthEvents + } + if postIndex > l { + return io.ErrUnexpectedEOF + } + m.Distributed = append(m.Distributed, types.Coin{}) + if err := m.Distributed[len(m.Distributed)-1].Unmarshal(dAtA[iNdEx:postIndex]); err != nil { + return err + } + iNdEx = postIndex + default: + iNdEx = preIndex + skippy, err := skipEvents(dAtA[iNdEx:]) + if err != nil { + return err + } + if (skippy < 0) || (iNdEx+skippy) < 0 { + return ErrInvalidLengthEvents + } + if (iNdEx + skippy) > l { + return io.ErrUnexpectedEOF + } + iNdEx += skippy + } + } + + if iNdEx > l { + return io.ErrUnexpectedEOF + } + return nil +} +func (m *EventEpochStart) Unmarshal(dAtA []byte) error { + l := len(dAtA) + iNdEx := 0 + for iNdEx < l { + preIndex := iNdEx + var wire uint64 + for shift := uint(0); ; shift += 7 { + if shift >= 64 { + return ErrIntOverflowEvents + } + if iNdEx >= l { + return io.ErrUnexpectedEOF + } + b := dAtA[iNdEx] + iNdEx++ + wire |= uint64(b&0x7F) << shift + if b < 0x80 { + break + } + } + fieldNum := int32(wire >> 3) + wireType := int(wire & 0x7) + if wireType == 4 { + return fmt.Errorf("proto: EventEpochStart: wiretype end group for non-group") + } + if fieldNum <= 0 { + return fmt.Errorf("proto: EventEpochStart: illegal tag %d (wire type %d)", fieldNum, wire) + } + switch fieldNum { + case 1: + if wireType != 0 { + return fmt.Errorf("proto: wrong wireType = %d for field ActiveStreamsNum", wireType) + } + m.ActiveStreamsNum = 0 + for shift := uint(0); ; shift += 7 { + if shift >= 64 { + return ErrIntOverflowEvents + } + if iNdEx >= l { + return io.ErrUnexpectedEOF + } + b := dAtA[iNdEx] + iNdEx++ + m.ActiveStreamsNum |= uint64(b&0x7F) << shift + if b < 0x80 { + break + } + } + default: + iNdEx = preIndex + skippy, err := skipEvents(dAtA[iNdEx:]) + if err != nil { + return err + } + if (skippy < 0) || (iNdEx+skippy) < 0 { + return ErrInvalidLengthEvents + } + if (iNdEx + skippy) > l { + return io.ErrUnexpectedEOF + } + iNdEx += skippy + } + } + + if iNdEx > l { + return io.ErrUnexpectedEOF + } + return nil +} +func skipEvents(dAtA []byte) (n int, err error) { + l := len(dAtA) + iNdEx := 0 + depth := 0 + for iNdEx < l { + var wire uint64 + for shift := uint(0); ; shift += 7 { + if shift >= 64 { + return 0, ErrIntOverflowEvents + } + if iNdEx >= l { + return 0, io.ErrUnexpectedEOF + } + b := dAtA[iNdEx] + iNdEx++ + wire |= (uint64(b) & 0x7F) << shift + if b < 0x80 { + break + } + } + wireType := int(wire & 0x7) + switch wireType { + case 0: + for shift := uint(0); ; shift += 7 { + if shift >= 64 { + return 0, ErrIntOverflowEvents + } + if iNdEx >= l { + return 0, io.ErrUnexpectedEOF + } + iNdEx++ + if dAtA[iNdEx-1] < 0x80 { + break + } + } + case 1: + iNdEx += 8 + case 2: + var length int + for shift := uint(0); ; shift += 7 { + if shift >= 64 { + return 0, ErrIntOverflowEvents + } + if iNdEx >= l { + return 0, io.ErrUnexpectedEOF + } + b := dAtA[iNdEx] + iNdEx++ + length |= (int(b) & 0x7F) << shift + if b < 0x80 { + break + } + } + if length < 0 { + return 0, ErrInvalidLengthEvents + } + iNdEx += length + case 3: + depth++ + case 4: + if depth == 0 { + return 0, ErrUnexpectedEndOfGroupEvents + } + depth-- + case 5: + iNdEx += 4 + default: + return 0, fmt.Errorf("proto: illegal wireType %d", wireType) + } + if iNdEx < 0 { + return 0, ErrInvalidLengthEvents + } + if depth == 0 { + return iNdEx, nil + } + } + return 0, io.ErrUnexpectedEOF +} + +var ( + ErrInvalidLengthEvents = fmt.Errorf("proto: negative length found during unmarshaling") + ErrIntOverflowEvents = fmt.Errorf("proto: integer overflow") + ErrUnexpectedEndOfGroupEvents = fmt.Errorf("proto: unexpected end of group") +) diff --git a/x/streamer/types/expected_keepers.go b/x/streamer/types/expected_keepers.go index f0275f967..51572aead 100644 --- a/x/streamer/types/expected_keepers.go +++ b/x/streamer/types/expected_keepers.go @@ -21,6 +21,7 @@ type BankKeeper interface { // EpochKeeper defines the expected interface needed to retrieve epoch info. type EpochKeeper interface { GetEpochInfo(ctx sdk.Context, identifier string) epochstypes.EpochInfo + AllEpochInfos(ctx sdk.Context) []epochstypes.EpochInfo } type AccountKeeper interface { diff --git a/x/streamer/types/genesis.go b/x/streamer/types/genesis.go index 1029ef8f0..14508379c 100644 --- a/x/streamer/types/genesis.go +++ b/x/streamer/types/genesis.go @@ -12,9 +12,10 @@ const DefaultIndex uint64 = 1 // DefaultGenesis returns the default genesis state func DefaultGenesis() *GenesisState { return &GenesisState{ - Params: DefaultParams(), - Streams: []Stream{}, - LastStreamId: 0, + Params: DefaultParams(), + Streams: []Stream{}, + LastStreamId: 0, + EpochPointers: []EpochPointer{}, } } diff --git a/x/streamer/types/genesis.pb.go b/x/streamer/types/genesis.pb.go index b9d0c65d8..6f2ab6469 100644 --- a/x/streamer/types/genesis.pb.go +++ b/x/streamer/types/genesis.pb.go @@ -34,6 +34,8 @@ type GenesisState struct { // last_stream_id is what the stream number will increment from when creating // the next stream after genesis LastStreamId uint64 `protobuf:"varint,3,opt,name=last_stream_id,json=lastStreamId,proto3" json:"last_stream_id,omitempty"` + // EpochPointers are pointers to the last rewarded gauges + EpochPointers []EpochPointer `protobuf:"bytes,4,rep,name=epoch_pointers,json=epochPointers,proto3" json:"epoch_pointers"` } func (m *GenesisState) Reset() { *m = GenesisState{} } @@ -90,6 +92,13 @@ func (m *GenesisState) GetLastStreamId() uint64 { return 0 } +func (m *GenesisState) GetEpochPointers() []EpochPointer { + if m != nil { + return m.EpochPointers + } + return nil +} + func init() { proto.RegisterType((*GenesisState)(nil), "dymensionxyz.dymension.streamer.GenesisState") } @@ -99,25 +108,28 @@ func init() { } var fileDescriptor_29bb00b957b5a0f5 = []byte{ - // 278 bytes of a gzipped FileDescriptorProto - 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0xe2, 0xd2, 0x4d, 0xa9, 0xcc, 0x4d, - 0xcd, 0x2b, 0xce, 0xcc, 0xcf, 0xab, 0xa8, 0xac, 0xd2, 0x87, 0x73, 0xf4, 0x8b, 0x4b, 0x8a, 0x52, - 0x13, 0x73, 0x53, 0x8b, 0xf4, 0xd3, 0x53, 0xf3, 0x52, 0x8b, 0x33, 0x8b, 0xf5, 0x0a, 0x8a, 0xf2, - 0x4b, 0xf2, 0x85, 0xe4, 0x91, 0x95, 0xeb, 0xc1, 0x39, 0x7a, 0x30, 0xe5, 0x52, 0x22, 0xe9, 0xf9, - 0xe9, 0xf9, 0x60, 0xb5, 0xfa, 0x20, 0x16, 0x44, 0x9b, 0x94, 0x5c, 0x7a, 0x7e, 0x7e, 0x7a, 0x4e, - 0xaa, 0x3e, 0x98, 0x97, 0x54, 0x9a, 0xa6, 0x9f, 0x52, 0x5a, 0x94, 0x58, 0x02, 0xd2, 0x08, 0x91, - 0xd7, 0x21, 0xe4, 0x8a, 0x82, 0xc4, 0xa2, 0xc4, 0xdc, 0x62, 0x62, 0x55, 0x43, 0x18, 0x10, 0xd5, - 0x4a, 0x47, 0x18, 0xb9, 0x78, 0xdc, 0x21, 0x9e, 0x08, 0x2e, 0x49, 0x2c, 0x49, 0x15, 0x72, 0xe5, - 0x62, 0x83, 0x18, 0x27, 0xc1, 0xa8, 0xc0, 0xa8, 0xc1, 0x6d, 0xa4, 0xae, 0x47, 0xc0, 0x53, 0x7a, - 0x01, 0x60, 0xe5, 0x4e, 0x2c, 0x27, 0xee, 0xc9, 0x33, 0x04, 0x41, 0x35, 0x0b, 0xb9, 0x73, 0xb1, - 0x43, 0x14, 0x14, 0x4b, 0x30, 0x29, 0x30, 0x13, 0x65, 0x4e, 0x30, 0x98, 0x01, 0x35, 0x07, 0xa6, - 0x5b, 0x48, 0x85, 0x8b, 0x2f, 0x27, 0xb1, 0xb8, 0x24, 0x1e, 0xc2, 0x8f, 0xcf, 0x4c, 0x91, 0x60, - 0x56, 0x60, 0xd4, 0x60, 0x09, 0xe2, 0x01, 0x89, 0x42, 0xb4, 0x78, 0xa6, 0x38, 0xf9, 0x9f, 0x78, - 0x24, 0xc7, 0x78, 0xe1, 0x91, 0x1c, 0xe3, 0x83, 0x47, 0x72, 0x8c, 0x13, 0x1e, 0xcb, 0x31, 0x5c, - 0x78, 0x2c, 0xc7, 0x70, 0xe3, 0xb1, 0x1c, 0x43, 0x94, 0x69, 0x7a, 0x66, 0x49, 0x46, 0x69, 0x92, - 0x5e, 0x72, 0x7e, 0xae, 0x3e, 0x8e, 0x90, 0x29, 0x33, 0xd6, 0xaf, 0x40, 0x04, 0x4f, 0x49, 0x65, - 0x41, 0x6a, 0x71, 0x12, 0x1b, 0x38, 0x78, 0x8c, 0x01, 0x01, 0x00, 0x00, 0xff, 0xff, 0x9e, 0x84, - 0x7f, 0x68, 0x02, 0x02, 0x00, 0x00, + // 322 bytes of a gzipped FileDescriptorProto + 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0x8c, 0x90, 0xc1, 0x6a, 0x32, 0x31, + 0x14, 0x85, 0x27, 0x2a, 0xfe, 0x90, 0xdf, 0xba, 0x18, 0xba, 0x18, 0x5c, 0x44, 0x29, 0x85, 0xba, + 0xa8, 0x09, 0x54, 0xfa, 0x02, 0x82, 0x48, 0x57, 0x15, 0xdd, 0xb9, 0x91, 0xe8, 0xdc, 0xc6, 0x01, + 0x67, 0x32, 0x24, 0xb1, 0x68, 0x9f, 0xa2, 0x8f, 0xd0, 0xc7, 0x71, 0xe9, 0xb2, 0xab, 0x52, 0xf4, + 0x45, 0xca, 0x24, 0xa3, 0x75, 0x53, 0x9c, 0xdd, 0x3d, 0x77, 0xbe, 0x73, 0xcf, 0xe4, 0xe0, 0x4e, + 0xb8, 0x89, 0x21, 0xd1, 0x91, 0x4c, 0xd6, 0x9b, 0x37, 0x76, 0x12, 0x4c, 0x1b, 0x05, 0x3c, 0x06, + 0xc5, 0x04, 0x24, 0xa0, 0x23, 0x4d, 0x53, 0x25, 0x8d, 0xf4, 0x9b, 0xe7, 0x38, 0x3d, 0x09, 0x7a, + 0xc4, 0x1b, 0xd7, 0x42, 0x0a, 0x69, 0x59, 0x96, 0x4d, 0xce, 0xd6, 0x20, 0x42, 0x4a, 0xb1, 0x04, + 0x66, 0xd5, 0x6c, 0xf5, 0xc2, 0xc2, 0x95, 0xe2, 0x26, 0x33, 0xba, 0xef, 0xf7, 0x97, 0xfe, 0x22, + 0xe5, 0x8a, 0xc7, 0xba, 0x28, 0xed, 0x86, 0x9c, 0xa6, 0xc5, 0x68, 0x50, 0x8e, 0xbf, 0xf9, 0x28, + 0xe1, 0xda, 0xc0, 0x3d, 0x7a, 0x6c, 0xb8, 0x01, 0xbf, 0x8f, 0xab, 0x2e, 0x3e, 0x40, 0x2d, 0xd4, + 0xfe, 0xff, 0x70, 0x47, 0x2f, 0x94, 0x40, 0x87, 0x16, 0xef, 0x55, 0xb6, 0x5f, 0x4d, 0x6f, 0x94, + 0x9b, 0xfd, 0x01, 0xfe, 0xe7, 0x00, 0x1d, 0x94, 0x5a, 0xe5, 0x42, 0x77, 0xc6, 0x76, 0xc8, 0xef, + 0x1c, 0xdd, 0xfe, 0x2d, 0xae, 0x2f, 0xb9, 0x36, 0x53, 0xa7, 0xa7, 0x51, 0x18, 0x94, 0x5b, 0xa8, + 0x5d, 0x19, 0xd5, 0xb2, 0xad, 0xb3, 0x3c, 0x85, 0xfe, 0x04, 0xd7, 0x21, 0x95, 0xf3, 0xc5, 0x34, + 0x95, 0x51, 0x62, 0x40, 0xe9, 0xa0, 0x62, 0x53, 0x3b, 0x17, 0x53, 0xfb, 0x99, 0x6d, 0xe8, 0x5c, + 0x79, 0xf6, 0x15, 0x9c, 0xed, 0x74, 0xef, 0x79, 0xbb, 0x27, 0x68, 0xb7, 0x27, 0xe8, 0x7b, 0x4f, + 0xd0, 0xfb, 0x81, 0x78, 0xbb, 0x03, 0xf1, 0x3e, 0x0f, 0xc4, 0x9b, 0x3c, 0x8a, 0xc8, 0x2c, 0x56, + 0x33, 0x3a, 0x97, 0x31, 0xfb, 0xa3, 0xf7, 0xd7, 0x2e, 0x5b, 0xff, 0x96, 0x6f, 0x36, 0x29, 0xe8, + 0x59, 0xd5, 0x56, 0xdf, 0xfd, 0x09, 0x00, 0x00, 0xff, 0xff, 0xa8, 0x70, 0x4f, 0x91, 0x8e, 0x02, + 0x00, 0x00, } func (m *GenesisState) Marshal() (dAtA []byte, err error) { @@ -140,6 +152,20 @@ func (m *GenesisState) MarshalToSizedBuffer(dAtA []byte) (int, error) { _ = i var l int _ = l + if len(m.EpochPointers) > 0 { + for iNdEx := len(m.EpochPointers) - 1; iNdEx >= 0; iNdEx-- { + { + size, err := m.EpochPointers[iNdEx].MarshalToSizedBuffer(dAtA[:i]) + if err != nil { + return 0, err + } + i -= size + i = encodeVarintGenesis(dAtA, i, uint64(size)) + } + i-- + dAtA[i] = 0x22 + } + } if m.LastStreamId != 0 { i = encodeVarintGenesis(dAtA, i, uint64(m.LastStreamId)) i-- @@ -200,6 +226,12 @@ func (m *GenesisState) Size() (n int) { if m.LastStreamId != 0 { n += 1 + sovGenesis(uint64(m.LastStreamId)) } + if len(m.EpochPointers) > 0 { + for _, e := range m.EpochPointers { + l = e.Size() + n += 1 + l + sovGenesis(uint64(l)) + } + } return n } @@ -324,6 +356,40 @@ func (m *GenesisState) Unmarshal(dAtA []byte) error { break } } + case 4: + if wireType != 2 { + return fmt.Errorf("proto: wrong wireType = %d for field EpochPointers", wireType) + } + var msglen int + for shift := uint(0); ; shift += 7 { + if shift >= 64 { + return ErrIntOverflowGenesis + } + if iNdEx >= l { + return io.ErrUnexpectedEOF + } + b := dAtA[iNdEx] + iNdEx++ + msglen |= int(b&0x7F) << shift + if b < 0x80 { + break + } + } + if msglen < 0 { + return ErrInvalidLengthGenesis + } + postIndex := iNdEx + msglen + if postIndex < 0 { + return ErrInvalidLengthGenesis + } + if postIndex > l { + return io.ErrUnexpectedEOF + } + m.EpochPointers = append(m.EpochPointers, EpochPointer{}) + if err := m.EpochPointers[len(m.EpochPointers)-1].Unmarshal(dAtA[iNdEx:postIndex]); err != nil { + return err + } + iNdEx = postIndex default: iNdEx = preIndex skippy, err := skipGenesis(dAtA[iNdEx:]) diff --git a/x/streamer/types/keys.go b/x/streamer/types/keys.go index b16499ea6..14eb55451 100644 --- a/x/streamer/types/keys.go +++ b/x/streamer/types/keys.go @@ -1,6 +1,6 @@ package types -var ( +const ( // ModuleName defines the module name. ModuleName = "streamer" @@ -10,12 +10,11 @@ var ( // RouterKey is the message route for slashing. RouterKey = ModuleName - // QuerierRoute defines the module's query routing key. - QuerierRoute = ModuleName - // MemStoreKey defines the in-memory store key. MemStoreKey = "mem_capability" +) +var ( // KeyPrefixTimestamp defines prefix key for timestamp iterator key. KeyPrefixTimestamp = []byte{0x01} @@ -39,4 +38,7 @@ var ( // KeyIndexSeparator defines key for merging bytes. KeyIndexSeparator = []byte{0x07} + + // KeyPrefixEpochPointers defines a prefix key holding EpochPointer objects. + KeyPrefixEpochPointers = []byte{0x08} ) diff --git a/x/streamer/types/params.go b/x/streamer/types/params.go index 4f3215e35..b1d2f5ad2 100644 --- a/x/streamer/types/params.go +++ b/x/streamer/types/params.go @@ -1,9 +1,15 @@ package types import ( + "fmt" + paramtypes "github.com/cosmos/cosmos-sdk/x/params/types" ) +const ( + KeyMaxIterationsPerBlock = "MaxIterationsPerBlock" +) + var _ paramtypes.ParamSet = (*Params)(nil) // ParamKeyTable the param key table for launch module @@ -12,18 +18,31 @@ func ParamKeyTable() paramtypes.KeyTable { } // NewParams creates a new Params instance -func NewParams() Params { - return Params{} +func NewParams(maxIterationsPerBlock uint64) Params { + return Params{ + MaxIterationsPerBlock: maxIterationsPerBlock, + } } // DefaultParams returns a default set of parameters func DefaultParams() Params { - return NewParams() + return NewParams(DefaultMaxIterationsPerBlock) } // ParamSetPairs get the params.ParamSet func (p *Params) ParamSetPairs() paramtypes.ParamSetPairs { - return paramtypes.ParamSetPairs{} + return paramtypes.ParamSetPairs{ + paramtypes.NewParamSetPair([]byte(KeyMaxIterationsPerBlock), &p.MaxIterationsPerBlock, validateMaxIterationsPerBlock), + } +} + +// validateDisputePeriodInBlocks validates the DisputePeriodInBlocks param +func validateMaxIterationsPerBlock(v interface{}) error { + _, ok := v.(uint64) + if !ok { + return fmt.Errorf("invalid parameter type: %T", v) + } + return nil } // Validate validates the set of params diff --git a/x/streamer/types/params.pb.go b/x/streamer/types/params.pb.go index d91c3fd75..0f2f7a4b8 100644 --- a/x/streamer/types/params.pb.go +++ b/x/streamer/types/params.pb.go @@ -25,6 +25,9 @@ const _ = proto.GoGoProtoPackageIsVersion3 // please upgrade the proto package // Params holds parameters for the streamer module type Params struct { + // MaxIterationPerBlock defines the maximum number of gauges that could be processed in a single block. + // This param is used during the pagination process. + MaxIterationsPerBlock uint64 `protobuf:"varint,1,opt,name=max_iterations_per_block,json=maxIterationsPerBlock,proto3" json:"max_iterations_per_block,omitempty"` } func (m *Params) Reset() { *m = Params{} } @@ -60,6 +63,13 @@ func (m *Params) XXX_DiscardUnknown() { var xxx_messageInfo_Params proto.InternalMessageInfo +func (m *Params) GetMaxIterationsPerBlock() uint64 { + if m != nil { + return m.MaxIterationsPerBlock + } + return 0 +} + func init() { proto.RegisterType((*Params)(nil), "dymensionxyz.dymension.streamer.Params") } @@ -69,17 +79,20 @@ func init() { } var fileDescriptor_aeb7e6340b70fcc4 = []byte{ - // 152 bytes of a gzipped FileDescriptorProto + // 200 bytes of a gzipped FileDescriptorProto 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0xe2, 0xd2, 0x49, 0xa9, 0xcc, 0x4d, 0xcd, 0x2b, 0xce, 0xcc, 0xcf, 0xab, 0xa8, 0xac, 0xd2, 0x87, 0x73, 0xf4, 0x8b, 0x4b, 0x8a, 0x52, 0x13, 0x73, 0x53, 0x8b, 0xf4, 0x0b, 0x12, 0x8b, 0x12, 0x73, 0x8b, 0xf5, 0x0a, 0x8a, 0xf2, 0x4b, 0xf2, 0x85, 0xe4, 0x91, 0x55, 0xeb, 0xc1, 0x39, 0x7a, 0x30, 0xd5, 0x52, 0x22, 0xe9, 0xf9, 0xe9, - 0xf9, 0x60, 0xb5, 0xfa, 0x20, 0x16, 0x44, 0x9b, 0x12, 0x07, 0x17, 0x5b, 0x00, 0xd8, 0x18, 0x27, - 0xff, 0x13, 0x8f, 0xe4, 0x18, 0x2f, 0x3c, 0x92, 0x63, 0x7c, 0xf0, 0x48, 0x8e, 0x71, 0xc2, 0x63, - 0x39, 0x86, 0x0b, 0x8f, 0xe5, 0x18, 0x6e, 0x3c, 0x96, 0x63, 0x88, 0x32, 0x4d, 0xcf, 0x2c, 0xc9, - 0x28, 0x4d, 0xd2, 0x4b, 0xce, 0xcf, 0xd5, 0xc7, 0xe1, 0xa6, 0x32, 0x63, 0xfd, 0x0a, 0x84, 0xc3, - 0x4a, 0x2a, 0x0b, 0x52, 0x8b, 0x93, 0xd8, 0xc0, 0x36, 0x18, 0x03, 0x02, 0x00, 0x00, 0xff, 0xff, - 0xf8, 0xb1, 0x1f, 0x37, 0xc8, 0x00, 0x00, 0x00, + 0xf9, 0x60, 0xb5, 0xfa, 0x20, 0x16, 0x44, 0x9b, 0x92, 0x23, 0x17, 0x5b, 0x00, 0xd8, 0x18, 0x21, + 0x73, 0x2e, 0x89, 0xdc, 0xc4, 0x8a, 0xf8, 0xcc, 0x92, 0xd4, 0xa2, 0xc4, 0x92, 0xcc, 0xfc, 0xbc, + 0xe2, 0xf8, 0x82, 0xd4, 0xa2, 0xf8, 0xa4, 0x9c, 0xfc, 0xe4, 0x6c, 0x09, 0x46, 0x05, 0x46, 0x0d, + 0x96, 0x20, 0xd1, 0xdc, 0xc4, 0x0a, 0x4f, 0xb8, 0x74, 0x40, 0x6a, 0x91, 0x13, 0x48, 0xd2, 0xc9, + 0xff, 0xc4, 0x23, 0x39, 0xc6, 0x0b, 0x8f, 0xe4, 0x18, 0x1f, 0x3c, 0x92, 0x63, 0x9c, 0xf0, 0x58, + 0x8e, 0xe1, 0xc2, 0x63, 0x39, 0x86, 0x1b, 0x8f, 0xe5, 0x18, 0xa2, 0x4c, 0xd3, 0x33, 0x4b, 0x32, + 0x4a, 0x93, 0xf4, 0x92, 0xf3, 0x73, 0xf5, 0x71, 0x78, 0xa6, 0xcc, 0x58, 0xbf, 0x02, 0xe1, 0xa3, + 0x92, 0xca, 0x82, 0xd4, 0xe2, 0x24, 0x36, 0xb0, 0xd3, 0x8c, 0x01, 0x01, 0x00, 0x00, 0xff, 0xff, + 0x46, 0xa8, 0xa3, 0xd0, 0x01, 0x01, 0x00, 0x00, } func (m *Params) Marshal() (dAtA []byte, err error) { @@ -102,6 +115,11 @@ func (m *Params) MarshalToSizedBuffer(dAtA []byte) (int, error) { _ = i var l int _ = l + if m.MaxIterationsPerBlock != 0 { + i = encodeVarintParams(dAtA, i, uint64(m.MaxIterationsPerBlock)) + i-- + dAtA[i] = 0x8 + } return len(dAtA) - i, nil } @@ -122,6 +140,9 @@ func (m *Params) Size() (n int) { } var l int _ = l + if m.MaxIterationsPerBlock != 0 { + n += 1 + sovParams(uint64(m.MaxIterationsPerBlock)) + } return n } @@ -160,6 +181,25 @@ func (m *Params) Unmarshal(dAtA []byte) error { return fmt.Errorf("proto: Params: illegal tag %d (wire type %d)", fieldNum, wire) } switch fieldNum { + case 1: + if wireType != 0 { + return fmt.Errorf("proto: wrong wireType = %d for field MaxIterationsPerBlock", wireType) + } + m.MaxIterationsPerBlock = 0 + for shift := uint(0); ; shift += 7 { + if shift >= 64 { + return ErrIntOverflowParams + } + if iNdEx >= l { + return io.ErrUnexpectedEOF + } + b := dAtA[iNdEx] + iNdEx++ + m.MaxIterationsPerBlock |= uint64(b&0x7F) << shift + if b < 0x80 { + break + } + } default: iNdEx = preIndex skippy, err := skipParams(dAtA[iNdEx:]) diff --git a/x/streamer/types/stream.go b/x/streamer/types/stream.go index cdad9c0de..072f32e0a 100644 --- a/x/streamer/types/stream.go +++ b/x/streamer/types/stream.go @@ -3,6 +3,7 @@ package types import ( time "time" + "cosmossdk.io/math" sdk "github.com/cosmos/cosmos-sdk/types" ) @@ -18,6 +19,7 @@ func NewStream(id uint64, distrTo *DistrInfo, coins sdk.Coins, startTime time.Ti FilledEpochs: 0, DistributedCoins: sdk.Coins{}, Sponsored: sponsored, + EpochCoins: coins.QuoInt(math.NewIntFromUint64(numEpochsPaidOver)), } } diff --git a/x/streamer/types/stream.pb.go b/x/streamer/types/stream.pb.go index 412eb3de9..167248617 100644 --- a/x/streamer/types/stream.pb.go +++ b/x/streamer/types/stream.pb.go @@ -56,6 +56,8 @@ type Stream struct { DistributedCoins github_com_cosmos_cosmos_sdk_types.Coins `protobuf:"bytes,8,rep,name=distributed_coins,json=distributedCoins,proto3,castrepeated=github.com/cosmos/cosmos-sdk/types.Coins" json:"distributed_coins"` // Sponsored indicates if the stream is based on the sponsorship distribution. Sponsored bool `protobuf:"varint,9,opt,name=sponsored,proto3" json:"sponsored,omitempty"` + // EpochCoins are coins that need to be distributed in this epoch. + EpochCoins github_com_cosmos_cosmos_sdk_types.Coins `protobuf:"bytes,10,rep,name=epoch_coins,json=epochCoins,proto3,castrepeated=github.com/cosmos/cosmos-sdk/types.Coins" json:"epoch_coins"` } func (m *Stream) Reset() { *m = Stream{} } @@ -154,6 +156,13 @@ func (m *Stream) GetSponsored() bool { return false } +func (m *Stream) GetEpochCoins() github_com_cosmos_cosmos_sdk_types.Coins { + if m != nil { + return m.EpochCoins + } + return nil +} + func init() { proto.RegisterType((*Stream)(nil), "dymensionxyz.dymension.streamer.Stream") } @@ -163,40 +172,41 @@ func init() { } var fileDescriptor_19586ad841c00cd9 = []byte{ - // 518 bytes of a gzipped FileDescriptorProto - 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0xac, 0x52, 0x4f, 0x6f, 0xd3, 0x30, - 0x1c, 0x6d, 0xba, 0xae, 0xac, 0xde, 0x86, 0xa8, 0x55, 0xa1, 0x50, 0xb1, 0xa4, 0x94, 0x4b, 0x84, - 0xc0, 0xde, 0x1f, 0x71, 0xe1, 0x58, 0xe0, 0xb0, 0x53, 0x51, 0x98, 0x04, 0xe2, 0x12, 0x39, 0xb5, - 0xdb, 0x59, 0x34, 0x71, 0x64, 0x3b, 0x55, 0xcb, 0xa7, 0xd8, 0xe7, 0xe0, 0x93, 0xec, 0xb8, 0x23, - 0xa7, 0x0e, 0xb5, 0x67, 0x2e, 0xfb, 0x04, 0x28, 0x76, 0xdb, 0x54, 0x08, 0xb4, 0x0b, 0xa7, 0xc4, - 0xef, 0xf7, 0xde, 0xef, 0xf7, 0x7b, 0xcf, 0x06, 0x2f, 0xe9, 0x2c, 0x61, 0xa9, 0xe2, 0x22, 0x9d, - 0xce, 0xbe, 0xe1, 0xcd, 0x01, 0x2b, 0x2d, 0x19, 0x49, 0x98, 0x5c, 0xfd, 0xa0, 0x4c, 0x0a, 0x2d, - 0xa0, 0xbf, 0xcd, 0x46, 0x9b, 0x03, 0x5a, 0xb3, 0xdb, 0xad, 0x91, 0x18, 0x09, 0xc3, 0xc5, 0xc5, - 0x9f, 0x95, 0xb5, 0xbd, 0x91, 0x10, 0xa3, 0x31, 0xc3, 0xe6, 0x14, 0xe7, 0x43, 0x4c, 0x73, 0x49, - 0x74, 0x21, 0xb4, 0x75, 0xff, 0xcf, 0xba, 0xe6, 0x09, 0x53, 0x9a, 0x24, 0xd9, 0xba, 0xc1, 0x40, - 0xa8, 0x44, 0x28, 0x1c, 0x13, 0xc5, 0xf0, 0xe4, 0x24, 0x66, 0x9a, 0x9c, 0xe0, 0x81, 0xe0, 0xeb, - 0x06, 0xc7, 0xf7, 0xb9, 0xa0, 0x5c, 0x69, 0x19, 0xf1, 0x74, 0xb8, 0x5a, 0xa9, 0xfb, 0xab, 0x06, - 0xea, 0x1f, 0x4d, 0x15, 0x3e, 0x04, 0x55, 0x4e, 0x5d, 0xa7, 0xe3, 0x04, 0xb5, 0xb0, 0xca, 0x29, - 0xec, 0x83, 0x43, 0x43, 0xe7, 0x71, 0xae, 0x59, 0xa4, 0x85, 0x5b, 0xed, 0x38, 0xc1, 0xfe, 0xe9, - 0x0b, 0x74, 0x8f, 0x79, 0xf4, 0xae, 0x50, 0x9d, 0xa7, 0x43, 0x11, 0x1e, 0x94, 0x0d, 0x2e, 0x04, - 0x24, 0x60, 0xb7, 0xd8, 0x55, 0xb9, 0x3b, 0x9d, 0x9d, 0x60, 0xff, 0xf4, 0x09, 0xb2, 0x6e, 0x50, - 0xe1, 0x06, 0xad, 0xdc, 0xa0, 0xb7, 0x82, 0xa7, 0xbd, 0xe3, 0xeb, 0xb9, 0x5f, 0xf9, 0x7e, 0xeb, - 0x07, 0x23, 0xae, 0x2f, 0xf3, 0x18, 0x0d, 0x44, 0x82, 0x57, 0xd6, 0xed, 0xe7, 0x95, 0xa2, 0x5f, - 0xb1, 0x9e, 0x65, 0x4c, 0x19, 0x81, 0x0a, 0x6d, 0x67, 0xf8, 0x19, 0x00, 0xa5, 0x89, 0xd4, 0x51, - 0x91, 0x9c, 0x5b, 0x33, 0x0b, 0xb7, 0x91, 0x8d, 0x15, 0xad, 0x63, 0x45, 0x17, 0xeb, 0x58, 0x7b, - 0x47, 0xc5, 0xa0, 0xbb, 0xb9, 0xdf, 0x9c, 0x91, 0x64, 0xfc, 0xa6, 0x5b, 0x6a, 0xbb, 0x57, 0xb7, - 0xbe, 0x13, 0x36, 0x0c, 0x50, 0xd0, 0xe1, 0x27, 0xf0, 0xd8, 0x86, 0xc7, 0x32, 0x31, 0xb8, 0x8c, - 0x38, 0x65, 0xa9, 0xe6, 0x43, 0xce, 0xa4, 0xbb, 0xdb, 0x71, 0x82, 0x46, 0xef, 0xd9, 0xdd, 0xdc, - 0x3f, 0xb2, 0x5d, 0xfe, 0xce, 0xeb, 0x86, 0x2d, 0x53, 0x78, 0x5f, 0xe0, 0xe7, 0x1b, 0x18, 0x62, - 0xd0, 0x4a, 0xf3, 0xc4, 0xd2, 0x55, 0x94, 0x11, 0x4e, 0x23, 0x31, 0x61, 0xd2, 0xad, 0x9b, 0x8b, - 0x68, 0xa6, 0x79, 0x62, 0x14, 0xea, 0x03, 0xe1, 0xb4, 0x3f, 0x61, 0x12, 0x3e, 0x07, 0x87, 0x43, - 0x3e, 0x1e, 0x33, 0xba, 0xd2, 0xb8, 0x0f, 0x0c, 0xf3, 0xc0, 0x82, 0x96, 0x0c, 0xa7, 0xa0, 0x59, - 0x66, 0x4f, 0x23, 0x9b, 0xfb, 0xde, 0xff, 0xcf, 0xfd, 0xd1, 0xd6, 0x14, 0x83, 0xc0, 0xa7, 0xa0, - 0xa1, 0x32, 0x91, 0x2a, 0x21, 0x19, 0x75, 0x1b, 0x1d, 0x27, 0xd8, 0x0b, 0x4b, 0xa0, 0xd7, 0xbf, - 0x5e, 0x78, 0xce, 0xcd, 0xc2, 0x73, 0x7e, 0x2e, 0x3c, 0xe7, 0x6a, 0xe9, 0x55, 0x6e, 0x96, 0x5e, - 0xe5, 0xc7, 0xd2, 0xab, 0x7c, 0x79, 0xbd, 0x35, 0xf3, 0x1f, 0xcf, 0x78, 0x72, 0x86, 0xa7, 0xe5, - 0x5b, 0x36, 0x6b, 0xc4, 0x75, 0x73, 0xab, 0x67, 0xbf, 0x03, 0x00, 0x00, 0xff, 0xff, 0x8c, 0x33, - 0x70, 0x77, 0xc1, 0x03, 0x00, 0x00, + // 533 bytes of a gzipped FileDescriptorProto + 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0xac, 0x53, 0xcb, 0x6e, 0x13, 0x31, + 0x14, 0xcd, 0xa4, 0x4d, 0x68, 0x9c, 0x16, 0x11, 0x2b, 0x42, 0x43, 0x44, 0x67, 0x42, 0xd8, 0x8c, + 0x10, 0xd8, 0x7d, 0x88, 0x0d, 0xcb, 0x00, 0x8b, 0xae, 0x82, 0x86, 0x4a, 0x20, 0x36, 0x23, 0x4f, + 0xec, 0xa4, 0x16, 0x99, 0xf1, 0xc8, 0xf6, 0x44, 0x09, 0x5f, 0xd1, 0xef, 0xe0, 0x4b, 0xba, 0xec, + 0x12, 0x09, 0x29, 0x45, 0xc9, 0x1f, 0xf4, 0x0b, 0xd0, 0xd8, 0x79, 0x09, 0x51, 0x75, 0xd3, 0x55, + 0xec, 0x7b, 0xcf, 0x39, 0xf7, 0x9e, 0x93, 0x31, 0x78, 0x4d, 0xa7, 0x09, 0x4b, 0x15, 0x17, 0xe9, + 0x64, 0xfa, 0x03, 0xaf, 0x2f, 0x58, 0x69, 0xc9, 0x48, 0xc2, 0xe4, 0xf2, 0x80, 0x32, 0x29, 0xb4, + 0x80, 0xfe, 0x36, 0x1a, 0xad, 0x2f, 0x68, 0x85, 0x6e, 0x35, 0x87, 0x62, 0x28, 0x0c, 0x16, 0x17, + 0x27, 0x4b, 0x6b, 0x79, 0x43, 0x21, 0x86, 0x23, 0x86, 0xcd, 0x2d, 0xce, 0x07, 0x98, 0xe6, 0x92, + 0xe8, 0x82, 0x68, 0xfb, 0xfe, 0xbf, 0x7d, 0xcd, 0x13, 0xa6, 0x34, 0x49, 0xb2, 0x95, 0x40, 0x5f, + 0xa8, 0x44, 0x28, 0x1c, 0x13, 0xc5, 0xf0, 0xf8, 0x38, 0x66, 0x9a, 0x1c, 0xe3, 0xbe, 0xe0, 0x2b, + 0x81, 0xa3, 0xfb, 0x5c, 0x50, 0xae, 0xb4, 0x8c, 0x78, 0x3a, 0x58, 0xae, 0xd4, 0xf9, 0x5d, 0x01, + 0xd5, 0xcf, 0xa6, 0x0b, 0x1f, 0x83, 0x32, 0xa7, 0xae, 0xd3, 0x76, 0x82, 0xdd, 0xb0, 0xcc, 0x29, + 0xec, 0x81, 0x03, 0x03, 0xe7, 0x71, 0xae, 0x59, 0xa4, 0x85, 0x5b, 0x6e, 0x3b, 0x41, 0xfd, 0xe4, + 0x15, 0xba, 0xc7, 0x3c, 0xfa, 0x50, 0xb0, 0xce, 0xd2, 0x81, 0x08, 0xf7, 0x37, 0x02, 0xe7, 0x02, + 0x12, 0x50, 0x29, 0x76, 0x55, 0xee, 0x4e, 0x7b, 0x27, 0xa8, 0x9f, 0x3c, 0x43, 0xd6, 0x0d, 0x2a, + 0xdc, 0xa0, 0xa5, 0x1b, 0xf4, 0x5e, 0xf0, 0xb4, 0x7b, 0x74, 0x35, 0xf3, 0x4b, 0x3f, 0x6f, 0xfc, + 0x60, 0xc8, 0xf5, 0x45, 0x1e, 0xa3, 0xbe, 0x48, 0xf0, 0xd2, 0xba, 0xfd, 0x79, 0xa3, 0xe8, 0x77, + 0xac, 0xa7, 0x19, 0x53, 0x86, 0xa0, 0x42, 0xab, 0x0c, 0xbf, 0x02, 0xa0, 0x34, 0x91, 0x3a, 0x2a, + 0x92, 0x73, 0x77, 0xcd, 0xc2, 0x2d, 0x64, 0x63, 0x45, 0xab, 0x58, 0xd1, 0xf9, 0x2a, 0xd6, 0xee, + 0x61, 0x31, 0xe8, 0x76, 0xe6, 0x37, 0xa6, 0x24, 0x19, 0xbd, 0xeb, 0x6c, 0xb8, 0x9d, 0xcb, 0x1b, + 0xdf, 0x09, 0x6b, 0xa6, 0x50, 0xc0, 0xe1, 0x17, 0xf0, 0xd4, 0x86, 0xc7, 0x32, 0xd1, 0xbf, 0x88, + 0x38, 0x65, 0xa9, 0xe6, 0x03, 0xce, 0xa4, 0x5b, 0x69, 0x3b, 0x41, 0xad, 0xfb, 0xe2, 0x76, 0xe6, + 0x1f, 0x5a, 0x95, 0xff, 0xe3, 0x3a, 0x61, 0xd3, 0x34, 0x3e, 0x16, 0xf5, 0xb3, 0x75, 0x19, 0x62, + 0xd0, 0x4c, 0xf3, 0xc4, 0xc2, 0x55, 0x94, 0x11, 0x4e, 0x23, 0x31, 0x66, 0xd2, 0xad, 0x9a, 0x3f, + 0xa2, 0x91, 0xe6, 0x89, 0x61, 0xa8, 0x4f, 0x84, 0xd3, 0xde, 0x98, 0x49, 0xf8, 0x12, 0x1c, 0x0c, + 0xf8, 0x68, 0xc4, 0xe8, 0x92, 0xe3, 0x3e, 0x32, 0xc8, 0x7d, 0x5b, 0xb4, 0x60, 0x38, 0x01, 0x8d, + 0x4d, 0xf6, 0x34, 0xb2, 0xb9, 0xef, 0x3d, 0x7c, 0xee, 0x4f, 0xb6, 0xa6, 0x98, 0x0a, 0x7c, 0x0e, + 0x6a, 0x2a, 0x13, 0xa9, 0x12, 0x92, 0x51, 0xb7, 0xd6, 0x76, 0x82, 0xbd, 0x70, 0x53, 0x80, 0x23, + 0x50, 0xb7, 0xc1, 0xd8, 0x8d, 0xc0, 0xc3, 0x6f, 0x04, 0x8c, 0xbe, 0x39, 0x77, 0x7b, 0x57, 0x73, + 0xcf, 0xb9, 0x9e, 0x7b, 0xce, 0x9f, 0xb9, 0xe7, 0x5c, 0x2e, 0xbc, 0xd2, 0xf5, 0xc2, 0x2b, 0xfd, + 0x5a, 0x78, 0xa5, 0x6f, 0x6f, 0xb7, 0xf4, 0xee, 0x78, 0x34, 0xe3, 0x53, 0x3c, 0xd9, 0xbc, 0x1c, + 0x33, 0x22, 0xae, 0x9a, 0x6f, 0xe8, 0xf4, 0x6f, 0x00, 0x00, 0x00, 0xff, 0xff, 0x95, 0x10, 0x61, + 0xc8, 0x2f, 0x04, 0x00, 0x00, } func (m *Stream) Marshal() (dAtA []byte, err error) { @@ -219,6 +229,20 @@ func (m *Stream) MarshalToSizedBuffer(dAtA []byte) (int, error) { _ = i var l int _ = l + if len(m.EpochCoins) > 0 { + for iNdEx := len(m.EpochCoins) - 1; iNdEx >= 0; iNdEx-- { + { + size, err := m.EpochCoins[iNdEx].MarshalToSizedBuffer(dAtA[:i]) + if err != nil { + return 0, err + } + i -= size + i = encodeVarintStream(dAtA, i, uint64(size)) + } + i-- + dAtA[i] = 0x52 + } + } if m.Sponsored { i-- if m.Sponsored { @@ -353,6 +377,12 @@ func (m *Stream) Size() (n int) { if m.Sponsored { n += 2 } + if len(m.EpochCoins) > 0 { + for _, e := range m.EpochCoins { + l = e.Size() + n += 1 + l + sovStream(uint64(l)) + } + } return n } @@ -637,6 +667,40 @@ func (m *Stream) Unmarshal(dAtA []byte) error { } } m.Sponsored = bool(v != 0) + case 10: + if wireType != 2 { + return fmt.Errorf("proto: wrong wireType = %d for field EpochCoins", wireType) + } + var msglen int + for shift := uint(0); ; shift += 7 { + if shift >= 64 { + return ErrIntOverflowStream + } + if iNdEx >= l { + return io.ErrUnexpectedEOF + } + b := dAtA[iNdEx] + iNdEx++ + msglen |= int(b&0x7F) << shift + if b < 0x80 { + break + } + } + if msglen < 0 { + return ErrInvalidLengthStream + } + postIndex := iNdEx + msglen + if postIndex < 0 { + return ErrInvalidLengthStream + } + if postIndex > l { + return io.ErrUnexpectedEOF + } + m.EpochCoins = append(m.EpochCoins, types.Coin{}) + if err := m.EpochCoins[len(m.EpochCoins)-1].Unmarshal(dAtA[iNdEx:postIndex]); err != nil { + return err + } + iNdEx = postIndex default: iNdEx = preIndex skippy, err := skipStream(dAtA[iNdEx:]) diff --git a/x/streamer/types/streamer.go b/x/streamer/types/streamer.go new file mode 100644 index 000000000..c45559b36 --- /dev/null +++ b/x/streamer/types/streamer.go @@ -0,0 +1,60 @@ +package types + +import ( + "math" + "slices" + "time" +) + +const ( + IterationsNoLimit uint64 = math.MaxUint64 + + MaxStreamID uint64 = math.MaxUint64 + MaxGaugeID uint64 = math.MaxUint64 + + MinStreamID uint64 = 0 + MinGaugeID uint64 = 0 +) + +func NewEpochPointer(epochIdentifier string, epochDuration time.Duration) EpochPointer { + return EpochPointer{ + StreamId: MinStreamID, + GaugeId: MinGaugeID, + EpochIdentifier: epochIdentifier, + EpochDuration: epochDuration, + } +} + +func (p *EpochPointer) Set(streamId uint64, gaugeId uint64) { + p.StreamId = streamId + p.GaugeId = gaugeId +} + +func (p *EpochPointer) SetToFirstGauge() { + p.Set(MinStreamID, MinGaugeID) +} + +func (p *EpochPointer) SetToLastGauge() { + p.Set(MaxStreamID, MaxGaugeID) +} + +func SortEpochPointers(ep []EpochPointer) { + slices.SortFunc(ep, func(a, b EpochPointer) int { + return cmpDurations(a.EpochDuration, b.EpochDuration) + }) +} + +func cmpDurations(a, b time.Duration) int { + return cmpInt64(int64(a), int64(b)) +} + +func cmpInt64(a, b int64) int { + switch { + case a < b: + return -1 + case a > b: + return 1 + default: + return 0 + } +} diff --git a/x/streamer/types/streamer.pb.go b/x/streamer/types/streamer.pb.go new file mode 100644 index 000000000..3560a610e --- /dev/null +++ b/x/streamer/types/streamer.pb.go @@ -0,0 +1,459 @@ +// Code generated by protoc-gen-gogo. DO NOT EDIT. +// source: dymensionxyz/dymension/streamer/streamer.proto + +package types + +import ( + fmt "fmt" + _ "github.com/cosmos/gogoproto/gogoproto" + proto "github.com/cosmos/gogoproto/proto" + github_com_cosmos_gogoproto_types "github.com/cosmos/gogoproto/types" + _ "google.golang.org/protobuf/types/known/durationpb" + io "io" + math "math" + math_bits "math/bits" + time "time" +) + +// Reference imports to suppress errors if they are not otherwise used. +var _ = proto.Marshal +var _ = fmt.Errorf +var _ = math.Inf +var _ = time.Kitchen + +// This is a compile-time assertion to ensure that this generated file +// is compatible with the proto package it is being compiled against. +// A compilation error at this line likely means your copy of the +// proto package needs to be updated. +const _ = proto.GoGoProtoPackageIsVersion3 // please upgrade the proto package + +// EpochPointer is a special object used for the streamer pagination. It helps iterate over +// streams with the specified epoch identifier within one epoch. Additionally, holds coins +// that must be distributed in this epoch. +type EpochPointer struct { + // StreamID is the ID of a stream. + StreamId uint64 `protobuf:"varint,1,opt,name=stream_id,json=streamId,proto3" json:"stream_id,omitempty"` + // GaugeID is the ID of a gauge. + GaugeId uint64 `protobuf:"varint,2,opt,name=gauge_id,json=gaugeId,proto3" json:"gauge_id,omitempty"` + // EpochIdentifier is a unique reference to this particular timer. + EpochIdentifier string `protobuf:"bytes,3,opt,name=epoch_identifier,json=epochIdentifier,proto3" json:"epoch_identifier,omitempty"` + // EpochDuration is the time in between epoch ticks. It is stored in order to have + // an ability to sort the EpochPointer slice. + EpochDuration time.Duration `protobuf:"bytes,4,opt,name=epoch_duration,json=epochDuration,proto3,stdduration" json:"epoch_duration"` +} + +func (m *EpochPointer) Reset() { *m = EpochPointer{} } +func (m *EpochPointer) String() string { return proto.CompactTextString(m) } +func (*EpochPointer) ProtoMessage() {} +func (*EpochPointer) Descriptor() ([]byte, []int) { + return fileDescriptor_5216d7d357ab09b8, []int{0} +} +func (m *EpochPointer) XXX_Unmarshal(b []byte) error { + return m.Unmarshal(b) +} +func (m *EpochPointer) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + if deterministic { + return xxx_messageInfo_EpochPointer.Marshal(b, m, deterministic) + } else { + b = b[:cap(b)] + n, err := m.MarshalToSizedBuffer(b) + if err != nil { + return nil, err + } + return b[:n], nil + } +} +func (m *EpochPointer) XXX_Merge(src proto.Message) { + xxx_messageInfo_EpochPointer.Merge(m, src) +} +func (m *EpochPointer) XXX_Size() int { + return m.Size() +} +func (m *EpochPointer) XXX_DiscardUnknown() { + xxx_messageInfo_EpochPointer.DiscardUnknown(m) +} + +var xxx_messageInfo_EpochPointer proto.InternalMessageInfo + +func (m *EpochPointer) GetStreamId() uint64 { + if m != nil { + return m.StreamId + } + return 0 +} + +func (m *EpochPointer) GetGaugeId() uint64 { + if m != nil { + return m.GaugeId + } + return 0 +} + +func (m *EpochPointer) GetEpochIdentifier() string { + if m != nil { + return m.EpochIdentifier + } + return "" +} + +func (m *EpochPointer) GetEpochDuration() time.Duration { + if m != nil { + return m.EpochDuration + } + return 0 +} + +func init() { + proto.RegisterType((*EpochPointer)(nil), "dymensionxyz.dymension.streamer.EpochPointer") +} + +func init() { + proto.RegisterFile("dymensionxyz/dymension/streamer/streamer.proto", fileDescriptor_5216d7d357ab09b8) +} + +var fileDescriptor_5216d7d357ab09b8 = []byte{ + // 292 bytes of a gzipped FileDescriptorProto + 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0xe2, 0xd2, 0x4b, 0xa9, 0xcc, 0x4d, + 0xcd, 0x2b, 0xce, 0xcc, 0xcf, 0xab, 0xa8, 0xac, 0xd2, 0x87, 0x73, 0xf4, 0x8b, 0x4b, 0x8a, 0x52, + 0x13, 0x73, 0x53, 0x8b, 0xe0, 0x0c, 0xbd, 0x82, 0xa2, 0xfc, 0x92, 0x7c, 0x21, 0x79, 0x64, 0xf5, + 0x08, 0xcd, 0x7a, 0x30, 0x65, 0x52, 0x22, 0xe9, 0xf9, 0xe9, 0xf9, 0x60, 0xb5, 0xfa, 0x20, 0x16, + 0x44, 0x9b, 0x94, 0x5c, 0x7a, 0x7e, 0x7e, 0x7a, 0x4e, 0xaa, 0x3e, 0x98, 0x97, 0x54, 0x9a, 0xa6, + 0x9f, 0x52, 0x5a, 0x94, 0x58, 0x02, 0xd2, 0x08, 0x16, 0x51, 0xda, 0xcb, 0xc8, 0xc5, 0xe3, 0x5a, + 0x90, 0x9f, 0x9c, 0x11, 0x90, 0x9f, 0x99, 0x57, 0x92, 0x5a, 0x24, 0x24, 0xcd, 0xc5, 0x09, 0x31, + 0x32, 0x3e, 0x33, 0x45, 0x82, 0x51, 0x81, 0x51, 0x83, 0x25, 0x88, 0x03, 0x22, 0xe0, 0x99, 0x22, + 0x24, 0xc9, 0xc5, 0x91, 0x9e, 0x58, 0x9a, 0x9e, 0x0a, 0x92, 0x63, 0x02, 0xcb, 0xb1, 0x83, 0xf9, + 0x9e, 0x29, 0x42, 0x9a, 0x5c, 0x02, 0xa9, 0x20, 0x73, 0xe2, 0x33, 0x53, 0x52, 0xf3, 0x4a, 0x32, + 0xd3, 0x32, 0x53, 0x8b, 0x24, 0x98, 0x15, 0x18, 0x35, 0x38, 0x83, 0xf8, 0xc1, 0xe2, 0x9e, 0x70, + 0x61, 0x21, 0x2f, 0x2e, 0x3e, 0x88, 0x52, 0x98, 0x5b, 0x24, 0x58, 0x14, 0x18, 0x35, 0xb8, 0x8d, + 0x24, 0xf5, 0x20, 0x8e, 0xd5, 0x83, 0x39, 0x56, 0xcf, 0x05, 0xaa, 0xc0, 0x89, 0xe3, 0xc4, 0x3d, + 0x79, 0x86, 0x19, 0xf7, 0xe5, 0x19, 0x83, 0x78, 0xc1, 0x5a, 0xe1, 0x12, 0xfe, 0x27, 0x1e, 0xc9, + 0x31, 0x5e, 0x78, 0x24, 0xc7, 0xf8, 0xe0, 0x91, 0x1c, 0xe3, 0x84, 0xc7, 0x72, 0x0c, 0x17, 0x1e, + 0xcb, 0x31, 0xdc, 0x78, 0x2c, 0xc7, 0x10, 0x65, 0x9a, 0x9e, 0x59, 0x92, 0x51, 0x9a, 0xa4, 0x97, + 0x9c, 0x9f, 0xab, 0x8f, 0x23, 0xac, 0xcb, 0x8c, 0xf5, 0x2b, 0x10, 0x01, 0x5e, 0x52, 0x59, 0x90, + 0x5a, 0x9c, 0xc4, 0x06, 0xb6, 0xdc, 0x18, 0x10, 0x00, 0x00, 0xff, 0xff, 0x96, 0x00, 0x50, 0x36, + 0xa0, 0x01, 0x00, 0x00, +} + +func (m *EpochPointer) Marshal() (dAtA []byte, err error) { + size := m.Size() + dAtA = make([]byte, size) + n, err := m.MarshalToSizedBuffer(dAtA[:size]) + if err != nil { + return nil, err + } + return dAtA[:n], nil +} + +func (m *EpochPointer) MarshalTo(dAtA []byte) (int, error) { + size := m.Size() + return m.MarshalToSizedBuffer(dAtA[:size]) +} + +func (m *EpochPointer) MarshalToSizedBuffer(dAtA []byte) (int, error) { + i := len(dAtA) + _ = i + var l int + _ = l + n1, err1 := github_com_cosmos_gogoproto_types.StdDurationMarshalTo(m.EpochDuration, dAtA[i-github_com_cosmos_gogoproto_types.SizeOfStdDuration(m.EpochDuration):]) + if err1 != nil { + return 0, err1 + } + i -= n1 + i = encodeVarintStreamer(dAtA, i, uint64(n1)) + i-- + dAtA[i] = 0x22 + if len(m.EpochIdentifier) > 0 { + i -= len(m.EpochIdentifier) + copy(dAtA[i:], m.EpochIdentifier) + i = encodeVarintStreamer(dAtA, i, uint64(len(m.EpochIdentifier))) + i-- + dAtA[i] = 0x1a + } + if m.GaugeId != 0 { + i = encodeVarintStreamer(dAtA, i, uint64(m.GaugeId)) + i-- + dAtA[i] = 0x10 + } + if m.StreamId != 0 { + i = encodeVarintStreamer(dAtA, i, uint64(m.StreamId)) + i-- + dAtA[i] = 0x8 + } + return len(dAtA) - i, nil +} + +func encodeVarintStreamer(dAtA []byte, offset int, v uint64) int { + offset -= sovStreamer(v) + base := offset + for v >= 1<<7 { + dAtA[offset] = uint8(v&0x7f | 0x80) + v >>= 7 + offset++ + } + dAtA[offset] = uint8(v) + return base +} +func (m *EpochPointer) Size() (n int) { + if m == nil { + return 0 + } + var l int + _ = l + if m.StreamId != 0 { + n += 1 + sovStreamer(uint64(m.StreamId)) + } + if m.GaugeId != 0 { + n += 1 + sovStreamer(uint64(m.GaugeId)) + } + l = len(m.EpochIdentifier) + if l > 0 { + n += 1 + l + sovStreamer(uint64(l)) + } + l = github_com_cosmos_gogoproto_types.SizeOfStdDuration(m.EpochDuration) + n += 1 + l + sovStreamer(uint64(l)) + return n +} + +func sovStreamer(x uint64) (n int) { + return (math_bits.Len64(x|1) + 6) / 7 +} +func sozStreamer(x uint64) (n int) { + return sovStreamer(uint64((x << 1) ^ uint64((int64(x) >> 63)))) +} +func (m *EpochPointer) Unmarshal(dAtA []byte) error { + l := len(dAtA) + iNdEx := 0 + for iNdEx < l { + preIndex := iNdEx + var wire uint64 + for shift := uint(0); ; shift += 7 { + if shift >= 64 { + return ErrIntOverflowStreamer + } + if iNdEx >= l { + return io.ErrUnexpectedEOF + } + b := dAtA[iNdEx] + iNdEx++ + wire |= uint64(b&0x7F) << shift + if b < 0x80 { + break + } + } + fieldNum := int32(wire >> 3) + wireType := int(wire & 0x7) + if wireType == 4 { + return fmt.Errorf("proto: EpochPointer: wiretype end group for non-group") + } + if fieldNum <= 0 { + return fmt.Errorf("proto: EpochPointer: illegal tag %d (wire type %d)", fieldNum, wire) + } + switch fieldNum { + case 1: + if wireType != 0 { + return fmt.Errorf("proto: wrong wireType = %d for field StreamId", wireType) + } + m.StreamId = 0 + for shift := uint(0); ; shift += 7 { + if shift >= 64 { + return ErrIntOverflowStreamer + } + if iNdEx >= l { + return io.ErrUnexpectedEOF + } + b := dAtA[iNdEx] + iNdEx++ + m.StreamId |= uint64(b&0x7F) << shift + if b < 0x80 { + break + } + } + case 2: + if wireType != 0 { + return fmt.Errorf("proto: wrong wireType = %d for field GaugeId", wireType) + } + m.GaugeId = 0 + for shift := uint(0); ; shift += 7 { + if shift >= 64 { + return ErrIntOverflowStreamer + } + if iNdEx >= l { + return io.ErrUnexpectedEOF + } + b := dAtA[iNdEx] + iNdEx++ + m.GaugeId |= uint64(b&0x7F) << shift + if b < 0x80 { + break + } + } + case 3: + if wireType != 2 { + return fmt.Errorf("proto: wrong wireType = %d for field EpochIdentifier", wireType) + } + var stringLen uint64 + for shift := uint(0); ; shift += 7 { + if shift >= 64 { + return ErrIntOverflowStreamer + } + if iNdEx >= l { + return io.ErrUnexpectedEOF + } + b := dAtA[iNdEx] + iNdEx++ + stringLen |= uint64(b&0x7F) << shift + if b < 0x80 { + break + } + } + intStringLen := int(stringLen) + if intStringLen < 0 { + return ErrInvalidLengthStreamer + } + postIndex := iNdEx + intStringLen + if postIndex < 0 { + return ErrInvalidLengthStreamer + } + if postIndex > l { + return io.ErrUnexpectedEOF + } + m.EpochIdentifier = string(dAtA[iNdEx:postIndex]) + iNdEx = postIndex + case 4: + if wireType != 2 { + return fmt.Errorf("proto: wrong wireType = %d for field EpochDuration", wireType) + } + var msglen int + for shift := uint(0); ; shift += 7 { + if shift >= 64 { + return ErrIntOverflowStreamer + } + if iNdEx >= l { + return io.ErrUnexpectedEOF + } + b := dAtA[iNdEx] + iNdEx++ + msglen |= int(b&0x7F) << shift + if b < 0x80 { + break + } + } + if msglen < 0 { + return ErrInvalidLengthStreamer + } + postIndex := iNdEx + msglen + if postIndex < 0 { + return ErrInvalidLengthStreamer + } + if postIndex > l { + return io.ErrUnexpectedEOF + } + if err := github_com_cosmos_gogoproto_types.StdDurationUnmarshal(&m.EpochDuration, dAtA[iNdEx:postIndex]); err != nil { + return err + } + iNdEx = postIndex + default: + iNdEx = preIndex + skippy, err := skipStreamer(dAtA[iNdEx:]) + if err != nil { + return err + } + if (skippy < 0) || (iNdEx+skippy) < 0 { + return ErrInvalidLengthStreamer + } + if (iNdEx + skippy) > l { + return io.ErrUnexpectedEOF + } + iNdEx += skippy + } + } + + if iNdEx > l { + return io.ErrUnexpectedEOF + } + return nil +} +func skipStreamer(dAtA []byte) (n int, err error) { + l := len(dAtA) + iNdEx := 0 + depth := 0 + for iNdEx < l { + var wire uint64 + for shift := uint(0); ; shift += 7 { + if shift >= 64 { + return 0, ErrIntOverflowStreamer + } + if iNdEx >= l { + return 0, io.ErrUnexpectedEOF + } + b := dAtA[iNdEx] + iNdEx++ + wire |= (uint64(b) & 0x7F) << shift + if b < 0x80 { + break + } + } + wireType := int(wire & 0x7) + switch wireType { + case 0: + for shift := uint(0); ; shift += 7 { + if shift >= 64 { + return 0, ErrIntOverflowStreamer + } + if iNdEx >= l { + return 0, io.ErrUnexpectedEOF + } + iNdEx++ + if dAtA[iNdEx-1] < 0x80 { + break + } + } + case 1: + iNdEx += 8 + case 2: + var length int + for shift := uint(0); ; shift += 7 { + if shift >= 64 { + return 0, ErrIntOverflowStreamer + } + if iNdEx >= l { + return 0, io.ErrUnexpectedEOF + } + b := dAtA[iNdEx] + iNdEx++ + length |= (int(b) & 0x7F) << shift + if b < 0x80 { + break + } + } + if length < 0 { + return 0, ErrInvalidLengthStreamer + } + iNdEx += length + case 3: + depth++ + case 4: + if depth == 0 { + return 0, ErrUnexpectedEndOfGroupStreamer + } + depth-- + case 5: + iNdEx += 4 + default: + return 0, fmt.Errorf("proto: illegal wireType %d", wireType) + } + if iNdEx < 0 { + return 0, ErrInvalidLengthStreamer + } + if depth == 0 { + return iNdEx, nil + } + } + return 0, io.ErrUnexpectedEOF +} + +var ( + ErrInvalidLengthStreamer = fmt.Errorf("proto: negative length found during unmarshaling") + ErrIntOverflowStreamer = fmt.Errorf("proto: integer overflow") + ErrUnexpectedEndOfGroupStreamer = fmt.Errorf("proto: unexpected end of group") +)