diff --git a/README.md b/README.md index 5b1f42d..654d4a4 100644 --- a/README.md +++ b/README.md @@ -17,7 +17,8 @@ Current status of features implemented in the SDK is listed in the table below: | Latest Releases | Conformance to spec version | | :---: | :---: | | [v1.0.0](https://github.com/serverlessworkflow/sdk-go/releases/tag/v1.0.0) | [v0.5](https://github.com/serverlessworkflow/specification/tree/0.5.x) | -| [v2.0.1](https://github.com/serverlessworkflow/sdk-go/releases/tag/v2.0.0) | [v0.6](https://github.com/serverlessworkflow/specification/tree/0.6.x) | +| [v2.0.1](https://github.com/serverlessworkflow/sdk-go/releases/tag/v2.0.1) | [v0.6](https://github.com/serverlessworkflow/specification/tree/0.6.x) | +| [v2.1.0](https://github.com/serverlessworkflow/sdk-go/releases/tag/v2.1.0) | [v0.7](https://github.com/serverlessworkflow/specification/tree/0.7.x) | ## How to use diff --git a/model/auth.go b/model/auth.go new file mode 100644 index 0000000..f9df23d --- /dev/null +++ b/model/auth.go @@ -0,0 +1,276 @@ +// Copyright 2021 The Serverless Workflow Specification Authors +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package model + +import ( + "encoding/json" + "fmt" +) + +// AuthType ... +type AuthType string + +const ( + // AuthTypeBasic ... + AuthTypeBasic AuthType = "basic" + // AuthTypeBearer ... + AuthTypeBearer AuthType = "bearer" + // AuthTypeOAuth2 ... + AuthTypeOAuth2 AuthType = "oauth2" +) + +// GrantType ... +type GrantType string + +const ( + // GrantTypePassword ... + GrantTypePassword GrantType = "password" + // GrantTypeClientCredentials ... + GrantTypeClientCredentials GrantType = "clientCredentials" + // GrantTypeTokenExchange ... + GrantTypeTokenExchange GrantType = "tokenExchange" +) + +// authTypesMapping map to support JSON unmarshalling when guessing the auth scheme +var authTypesMapping = map[AuthType]AuthProperties{ + AuthTypeBasic: &BasicAuthProperties{}, + AuthTypeBearer: &BearerAuthProperties{}, + AuthTypeOAuth2: &OAuth2AuthProperties{}, +} + +// Auth ... +type Auth struct { + // Name Unique auth definition name + Name string `json:"name" validate:"required"` + // Scheme Defines the auth type + Scheme AuthType `json:"scheme,omitempty" validate:"omitempty,min=1"` + // Properties ... + Properties AuthProperties `json:"properties" validate:"required"` +} + +// UnmarshalJSON ... +func (a *Auth) UnmarshalJSON(data []byte) error { + auth := make(map[string]json.RawMessage) + if err := json.Unmarshal(data, &auth); err != nil { + // it's a file + file, err := unmarshalFile(data) + if err != nil { + return err + } + // call us recursively + if err := json.Unmarshal(file, &a); err != nil { + return err + } + return nil + } + if err := unmarshalKey("scheme", auth, &a.Scheme); err != nil { + return err + } + if err := unmarshalKey("name", auth, &a.Name); err != nil { + return err + } + + if len(a.Scheme) == 0 { + a.Scheme = AuthTypeBasic + } + if _, ok := authTypesMapping[a.Scheme]; !ok { + return fmt.Errorf("authentication scheme %s not supported", authTypesMapping["type"]) + } + // we take the type we want to unmarshal based on the scheme + authProperties := authTypesMapping[a.Scheme] + if err := unmarshalKey("properties", auth, authProperties); err != nil { + return err + } + return nil +} + +// AuthProperties ... +type AuthProperties interface { + // GetMetadata ... + GetMetadata() *Metadata + // GetSecret ... + GetSecret() string +} + +// BaseAuthProperties ... +type BaseAuthProperties struct { + Common + // Secret Expression referencing a workflow secret that contains all needed auth info + Secret string `json:"secret,omitempty"` +} + +// UnmarshalJSON ... +func (b *BaseAuthProperties) UnmarshalJSON(data []byte) error { + properties := make(map[string]json.RawMessage) + if err := json.Unmarshal(data, &properties); err != nil { + b.Secret, err = unmarshalString(data) + if err != nil { + return err + } + return nil + } + if err := unmarshalKey("metadata", properties, &b.Metadata); err != nil { + return err + } + if err := unmarshalKey("secret", properties, &b.Secret); err != nil { + return err + } + return nil +} + +// GetMetadata ... +func (b *BaseAuthProperties) GetMetadata() *Metadata { + return &b.Metadata +} + +// GetSecret ... +func (b *BaseAuthProperties) GetSecret() string { + return b.Secret +} + +// BasicAuthProperties Basic Auth Info +type BasicAuthProperties struct { + BaseAuthProperties + // Username String or a workflow expression. Contains the username + Username string `json:"username" validate:"required"` + // Password String or a workflow expression. Contains the user password + Password string `json:"password" validate:"required"` +} + +// UnmarshalJSON ... +func (b *BasicAuthProperties) UnmarshalJSON(data []byte) error { + properties := make(map[string]json.RawMessage) + if err := json.Unmarshal(data, &properties); err != nil { + err = json.Unmarshal(data, &b.BaseAuthProperties) + if err != nil { + return err + } + return nil + } + if err := unmarshalKey("username", properties, &b.Username); err != nil { + return err + } + if err := unmarshalKey("password", properties, &b.Password); err != nil { + return err + } + if err := unmarshalKey("metadata", properties, &b.Metadata); err != nil { + return err + } + return nil +} + +// BearerAuthProperties Bearer auth information +type BearerAuthProperties struct { + BaseAuthProperties + // Token String or a workflow expression. Contains the token + Token string `json:"token" validate:"required"` +} + +// UnmarshalJSON ... +func (b *BearerAuthProperties) UnmarshalJSON(data []byte) error { + properties := make(map[string]json.RawMessage) + if err := json.Unmarshal(data, &properties); err != nil { + err = json.Unmarshal(data, &b.BaseAuthProperties) + if err != nil { + return err + } + return nil + } + if err := unmarshalKey("token", properties, &b.Token); err != nil { + return err + } + if err := unmarshalKey("metadata", properties, &b.Metadata); err != nil { + return err + } + return nil +} + +// OAuth2AuthProperties OAuth2 information +type OAuth2AuthProperties struct { + BaseAuthProperties + // Authority String or a workflow expression. Contains the authority information + Authority string `json:"authority,omitempty" validate:"omitempty,min=1"` + // GrantType Defines the grant type + GrantType GrantType `json:"grantType" validate:"required"` + // ClientID String or a workflow expression. Contains the client identifier + ClientID string `json:"clientId" validate:"required"` + // ClientSecret Workflow secret or a workflow expression. Contains the client secret + ClientSecret string `json:"clientSecret,omitempty" validate:"omitempty,min=1"` + // Scopes Array containing strings or workflow expressions. Contains the OAuth2 scopes + Scopes []string `json:"scopes,omitempty" validate:"omitempty,min=1"` + // Username String or a workflow expression. Contains the username. Used only if grantType is 'resourceOwner' + Username string `json:"username,omitempty" validate:"omitempty,min=1"` + // Password String or a workflow expression. Contains the user password. Used only if grantType is 'resourceOwner' + Password string `json:"password,omitempty" validate:"omitempty,min=1"` + // Audiences Array containing strings or workflow expressions. Contains the OAuth2 audiences + Audiences []string `json:"audiences,omitempty" validate:"omitempty,min=1"` + // SubjectToken String or a workflow expression. Contains the subject token + SubjectToken string `json:"subjectToken,omitempty" validate:"omitempty,min=1"` + // RequestedSubject String or a workflow expression. Contains the requested subject + RequestedSubject string `json:"requestedSubject,omitempty" validate:"omitempty,min=1"` + // RequestedIssuer String or a workflow expression. Contains the requested issuer + RequestedIssuer string `json:"requestedIssuer,omitempty" validate:"omitempty,min=1"` +} + +// TODO: use reflection to unmarshal the keys and think on a generic approach to handle them + +// UnmarshalJSON ... +func (b *OAuth2AuthProperties) UnmarshalJSON(data []byte) error { + properties := make(map[string]json.RawMessage) + if err := json.Unmarshal(data, &properties); err != nil { + err = json.Unmarshal(data, &b.BaseAuthProperties) + if err != nil { + return err + } + return nil + } + if err := unmarshalKey("authority", properties, &b.Authority); err != nil { + return err + } + if err := unmarshalKey("grantType", properties, &b.GrantType); err != nil { + return err + } + if err := unmarshalKey("clientId", properties, &b.ClientID); err != nil { + return err + } + if err := unmarshalKey("clientSecret", properties, &b.ClientSecret); err != nil { + return err + } + if err := unmarshalKey("scopes", properties, &b.Scopes); err != nil { + return err + } + if err := unmarshalKey("username", properties, &b.Username); err != nil { + return err + } + if err := unmarshalKey("password", properties, &b.Password); err != nil { + return err + } + if err := unmarshalKey("audiences", properties, &b.Audiences); err != nil { + return err + } + if err := unmarshalKey("subjectToken", properties, &b.SubjectToken); err != nil { + return err + } + if err := unmarshalKey("requestedSubject", properties, &b.RequestedSubject); err != nil { + return err + } + if err := unmarshalKey("requestedIssuer", properties, &b.RequestedIssuer); err != nil { + return err + } + if err := unmarshalKey("metadata", properties, &b.Metadata); err != nil { + return err + } + return nil +} diff --git a/model/event.go b/model/event.go index 18606c2..0154428 100644 --- a/model/event.go +++ b/model/event.go @@ -55,6 +55,8 @@ type Event struct { Type string `json:"type" validate:"required"` // Defines the CloudEvent as either 'consumed' or 'produced' by the workflow. Default is 'consumed' Kind EventKind `json:"kind,omitempty"` + // If `true`, only the Event payload is accessible to consuming Workflow states. If `false`, both event payload and context attributes should be accessible" + DataOnly bool `json:"dataOnly,omitempty"` // CloudEvent correlation definitions Correlation []Correlation `json:"correlation,omitempty" validate:"omitempty,dive"` } @@ -79,11 +81,3 @@ type EventRef struct { // Add additional extension context attributes to the produced event ContextAttributes map[string]interface{} `json:"contextAttributes,omitempty"` } - -// SubFlowRef ... -type SubFlowRef struct { - // Sub-workflow unique id - WorkflowID string `json:"workflowId" validate:"required"` - // Sub-workflow version - Version string `json:"version,omitempty"` -} diff --git a/model/function.go b/model/function.go index 660099d..662b341 100644 --- a/model/function.go +++ b/model/function.go @@ -25,6 +25,10 @@ const ( FunctionTypeExpression FunctionType = "expression" // FunctionTypeGraphQL ... FunctionTypeGraphQL FunctionType = "graphql" + // FunctionTypeAsyncAPI ... + FunctionTypeAsyncAPI FunctionType = "asyncapi" + // FunctionTypeOData ... + FunctionTypeOData FunctionType = "odata" ) // FunctionType ... @@ -39,6 +43,8 @@ type Function struct { Operation string `json:"operation" validate:"required"` // Defines the function type. Is either `rest`, `rpc`, `expression` or `graphql`. Default is `rest` Type FunctionType `json:"type,omitempty"` + // References an auth definition name to be used to access to resource defined in the operation parameter + AuthRef string `json:"authRef,omitempty" validate:"omitempty,min=1"` } // FunctionRef ... diff --git a/model/retry.go b/model/retry.go index 7f80b38..7a3990a 100644 --- a/model/retry.go +++ b/model/retry.go @@ -32,7 +32,7 @@ type Retry struct { // Numeric value, if specified the delay between retries is multiplied by this value. Multiplier floatstr.Float32OrString `json:"multiplier,omitempty" validate:"omitempty,min=0"` // Maximum number of retry attempts. - MaxAttempts intstr.IntOrString `json:"maxAttempts" validate:"required,min=0"` + MaxAttempts intstr.IntOrString `json:"maxAttempts" validate:"required"` // If float type, maximum amount of random time added or subtracted from the delay between each retry relative to total delay (between 0 and 1). If string type, absolute maximum amount of random time added or subtracted from the delay between each retry (ISO 8601 duration format) Jitter floatstr.Float32OrString `json:"jitter,omitempty" validate:"omitempty,min=0,max=1"` } diff --git a/model/states.go b/model/states.go index 00adbfa..9ffd006 100644 --- a/model/states.go +++ b/model/states.go @@ -37,13 +37,18 @@ const ( StateTypeInject = "inject" // StateTypeCallback ... StateTypeCallback = "callback" - - // CompletionTypeAnd .. - CompletionTypeAnd = "and" - // CompletionTypeXor ... - CompletionTypeXor = "xor" - // CompletionTypeNOfM ... - CompletionTypeNOfM = "n_of_m" + // StateTypeSleep ... + StateTypeSleep = "sleep" + + // CompletionTypeAllOf ... + CompletionTypeAllOf CompletionType = "allOf" + // CompletionTypeAtLeast ... + CompletionTypeAtLeast CompletionType = "atLeast" + + // ForEachModeTypeSequential ... + ForEachModeTypeSequential ForEachModeType = "sequential" + // ForEachModeTypeParallel ... + ForEachModeTypeParallel ForEachModeType = "parallel" ) // StateType ... @@ -52,12 +57,15 @@ type StateType string // CompletionType Option types on how to complete branch execution. type CompletionType string +// ForEachModeType Specifies how iterations are to be performed (sequentially or in parallel) +type ForEachModeType string + // State definition for a Workflow state type State interface { GetID() string GetName() string GetType() StateType - GetOnErrors() []Error + GetOnErrors() []OnError GetTransition() *Transition GetStateDataFilter() *StateDataFilter GetCompensatedBy() string @@ -75,7 +83,7 @@ type BaseState struct { // State type Type StateType `json:"type" validate:"required"` // States error handling and retries definitions - OnErrors []Error `json:"onErrors,omitempty" validate:"omitempty,dive"` + OnErrors []OnError `json:"onErrors,omitempty" validate:"omitempty,dive"` // Next transition of the workflow after the time delay Transition *Transition `json:"transition,omitempty"` // State data filter @@ -90,7 +98,7 @@ type BaseState struct { } // GetOnErrors ... -func (s *BaseState) GetOnErrors() []Error { return s.OnErrors } +func (s *BaseState) GetOnErrors() []OnError { return s.OnErrors } // GetCompensatedBy ... func (s *BaseState) GetCompensatedBy() string { return s.CompensatedBy } @@ -130,11 +138,11 @@ type DelayState struct { type EventState struct { BaseState // If true consuming one of the defined events causes its associated actions to be performed. If false all of the defined events must be consumed in order for actions to be performed - Exclusive bool `json:"exclusive,omitempty"` + Exclusive *bool `json:"exclusive,omitempty"` // Define the events to be consumed and optional actions to be performed OnEvents []OnEvents `json:"onEvents" validate:"required,min=1,dive"` - // Time period to wait for incoming events (ISO 8601 format) - Timeout string `json:"timeout,omitempty"` + // State specific timeouts + Timeout EventStateTimeout `json:"timeouts,omitempty"` } // UnmarshalJSON ... @@ -149,11 +157,10 @@ func (e *EventState) UnmarshalJSON(data []byte) error { } if eventStateMap["exclusive"] == nil { - e.Exclusive = true + e.Exclusive = &TRUE } else { - e.Exclusive = eventStateMap["exclusive"].(bool) + e.Exclusive = eventStateMap["exclusive"].(*bool) } - e.Timeout = requiresNotNilOrEmpty(eventStateMap["timeout"]) eventStateRaw := make(map[string]json.RawMessage) if err := json.Unmarshal(data, &eventStateRaw); err != nil { @@ -162,10 +169,20 @@ func (e *EventState) UnmarshalJSON(data []byte) error { if err := json.Unmarshal(eventStateRaw["onEvents"], &e.OnEvents); err != nil { return err } + if err := unmarshalKey("timeouts", eventStateRaw, &e.Timeout); err != nil { + return err + } return nil } +// EventStateTimeout ... +type EventStateTimeout struct { + StateExecTimeout StateExecTimeout `json:"stateExecTimeout,omitempty"` + ActionExecTimeout string `json:"actionExecTimeout,omitempty"` + EventTimeout string `json:"eventTimeout,omitempty"` +} + // OperationState Defines actions be performed. Does not wait for incoming events type OperationState struct { BaseState @@ -173,6 +190,14 @@ type OperationState struct { ActionMode ActionMode `json:"actionMode,omitempty"` // Actions to be performed Actions []Action `json:"actions" validate:"required,min=1,dive"` + // State specific timeouts + Timeouts OperationStateTimeout `json:"timeouts,omitempty"` +} + +// OperationStateTimeout ... +type OperationStateTimeout struct { + StateExecTimeout StateExecTimeout `json:"stateExecTimeout,omitempty"` + ActionExecTimeout string `json:"actionExecTimeout,omitempty" validate:"omitempty,min=1"` } // ParallelState Consists of a number of states that are executed in parallel @@ -182,8 +207,16 @@ type ParallelState struct { Branches []Branch `json:"branches" validate:"required,min=1,dive"` // Option types on how to complete branch execution. CompletionType CompletionType `json:"completionType,omitempty"` - // Used when completionType is set to 'n_of_m' to specify the 'N' value - N intstr.IntOrString `json:"n,omitempty"` + // Used when completionType is set to 'atLeast' to specify the minimum number of branches that must complete before the state will transition." + NumCompleted intstr.IntOrString `json:"numCompleted,omitempty"` + // State specific timeouts + Timeouts ParallelStateTimeout `json:"timeouts,omitempty"` +} + +// ParallelStateTimeout ... +type ParallelStateTimeout struct { + StateExecTimeout StateExecTimeout `json:"stateExecTimeout,omitempty"` + BranchExecTimeout string `json:"branchExecTimeout,omitempty" validate:"omitempty,min=1"` } // InjectState ... @@ -191,6 +224,13 @@ type InjectState struct { BaseState // JSON object which can be set as states data input and can be manipulated via filters Data map[string]interface{} `json:"data" validate:"required,min=1"` + // State specific timeouts + Timeouts InjectStateTimeout `json:"timeouts,omitempty"` +} + +// InjectStateTimeout ... +type InjectStateTimeout struct { + StateExecTimeout StateExecTimeout `json:"stateExecTimeout,omitempty"` } // ForEachState ... @@ -203,9 +243,19 @@ type ForEachState struct { // Name of the iteration parameter that can be referenced in actions/workflow. For each parallel iteration, this param should contain an unique element of the inputCollection array IterationParam string `json:"iterationParam" validate:"required"` // Specifies how upper bound on how many iterations may run in parallel - Max intstr.IntOrString `json:"max,omitempty"` + BatchSize intstr.IntOrString `json:"batchSize,omitempty"` // Actions to be executed for each of the elements of inputCollection Actions []Action `json:"actions,omitempty"` + // State specific timeout + Timeouts ForEachStateTimeout `json:"timeouts,omitempty"` + // Mode Specifies how iterations are to be performed (sequentially or in parallel) + Mode ForEachModeType `json:"mode,omitempty"` +} + +// ForEachStateTimeout ... +type ForEachStateTimeout struct { + StateExecTimeout StateExecTimeout `json:"stateExecTimeout,omitempty"` + ActionExecTimeout string `json:"actionExecTimeout,omitempty"` } // CallbackState ... @@ -213,19 +263,40 @@ type CallbackState struct { BaseState // Defines the action to be executed Action Action `json:"action" validate:"required"` - // References an unique callback event name in the defined workflow events + // References a unique callback event name in the defined workflow events EventRef string `json:"eventRef" validate:"required"` // Time period to wait for incoming events (ISO 8601 format) - Timeout string `json:"timeout" validate:"required"` + Timeouts CallbackStateTimeout `json:"timeouts" validate:"required"` // Event data filter EventDataFilter EventDataFilter `json:"eventDataFilter,omitempty"` } +// CallbackStateTimeout ... +type CallbackStateTimeout struct { + StateExecTimeout StateExecTimeout `json:"stateExecTimeout,omitempty"` + ActionExecTimeout string `json:"actionExecTimeout,omitempty"` + EventTimeout string `json:"eventTimeout,omitempty"` +} + +// SleepState ... +type SleepState struct { + BaseState + // Duration (ISO 8601 duration format) to sleep + Duration string `json:"duration" validate:"required"` + // Timeouts State specific timeouts + Timeouts SleepStateTimeout `json:"timeouts,omitempty"` +} + +// SleepStateTimeout ... +type SleepStateTimeout struct { + StateExecTimeout StateExecTimeout `json:"stateExecTimeout,omitempty"` +} + // BaseSwitchState ... type BaseSwitchState struct { BaseState // Default transition of the workflow if there is no matching data conditions. Can include a transition or end definition - Default DefaultDef `json:"default,omitempty"` + DefaultCondition DefaultCondition `json:"defaultCondition,omitempty"` } // EventBasedSwitchState Permits transitions to other states based on events @@ -233,6 +304,8 @@ type EventBasedSwitchState struct { BaseSwitchState // Defines conditions evaluated against events EventConditions []EventCondition `json:"eventConditions" validate:"required,min=1,dive"` + // State specific timeouts + Timeouts EventBasedSwitchStateTimeout `json:"timeouts,omitempty"` } // UnmarshalJSON implementation for json Unmarshal function for the Eventbasedswitch type @@ -241,21 +314,21 @@ func (j *EventBasedSwitchState) UnmarshalJSON(data []byte) error { return err } eventBasedSwitch := make(map[string]json.RawMessage) - err := json.Unmarshal(data, &eventBasedSwitch) - if err != nil { + if err := json.Unmarshal(data, &eventBasedSwitch); err != nil { return err } var rawConditions []json.RawMessage - err = json.Unmarshal(eventBasedSwitch["eventConditions"], &rawConditions) - if err != nil { + if err := unmarshalKey("timeouts", eventBasedSwitch, &j.Timeouts); err != nil { + return err + } + if err := json.Unmarshal(eventBasedSwitch["eventConditions"], &rawConditions); err != nil { return err } j.EventConditions = make([]EventCondition, len(rawConditions)) var mapConditions map[string]interface{} for i, rawCondition := range rawConditions { - err = json.Unmarshal(rawCondition, &mapConditions) - if err != nil { + if err := json.Unmarshal(rawCondition, &mapConditions); err != nil { return err } var condition EventCondition @@ -264,8 +337,7 @@ func (j *EventBasedSwitchState) UnmarshalJSON(data []byte) error { } else { condition = &TransitionEventCondition{} } - err := json.Unmarshal(rawCondition, condition) - if err != nil { + if err := json.Unmarshal(rawCondition, condition); err != nil { return err } j.EventConditions[i] = condition @@ -273,6 +345,12 @@ func (j *EventBasedSwitchState) UnmarshalJSON(data []byte) error { return nil } +// EventBasedSwitchStateTimeout ... +type EventBasedSwitchStateTimeout struct { + StateExecTimeout StateExecTimeout `json:"stateExecTimeout,omitempty"` + EventTimeout string `json:"eventTimeout,omitempty"` +} + // EventCondition ... type EventCondition interface { GetName() string @@ -285,7 +363,7 @@ type EventCondition interface { type BaseEventCondition struct { // Event condition name Name string `json:"name,omitempty"` - // References an unique event name in the defined workflow events + // References a unique event name in the defined workflow events EventRef string `json:"eventRef" validate:"required"` // Event data filter definition EventDataFilter EventDataFilter `json:"eventDataFilter,omitempty"` @@ -321,7 +399,8 @@ type EndEventCondition struct { // DataBasedSwitchState Permits transitions to other states based on data conditions type DataBasedSwitchState struct { BaseSwitchState - DataConditions []DataCondition `json:"dataConditions" validate:"required,min=1,dive"` + DataConditions []DataCondition `json:"dataConditions" validate:"required,min=1,dive"` + Timeouts DataBasedSwitchStateTimeout `json:"timeouts,omitempty"` } // UnmarshalJSON implementation for json Unmarshal function for the Databasedswitch type @@ -330,21 +409,20 @@ func (j *DataBasedSwitchState) UnmarshalJSON(data []byte) error { return err } dataBasedSwitch := make(map[string]json.RawMessage) - err := json.Unmarshal(data, &dataBasedSwitch) - if err != nil { + if err := json.Unmarshal(data, &dataBasedSwitch); err != nil { return err } var rawConditions []json.RawMessage - err = json.Unmarshal(dataBasedSwitch["dataConditions"], &rawConditions) - if err != nil { + if err := unmarshalKey("timeouts", dataBasedSwitch, &j.Timeouts); err != nil { + return err + } + if err := json.Unmarshal(dataBasedSwitch["dataConditions"], &rawConditions); err != nil { return err } - j.DataConditions = make([]DataCondition, len(rawConditions)) var mapConditions map[string]interface{} for i, rawCondition := range rawConditions { - err = json.Unmarshal(rawCondition, &mapConditions) - if err != nil { + if err := json.Unmarshal(rawCondition, &mapConditions); err != nil { return err } var condition DataCondition @@ -353,8 +431,7 @@ func (j *DataBasedSwitchState) UnmarshalJSON(data []byte) error { } else { condition = &TransitionDataCondition{} } - err := json.Unmarshal(rawCondition, condition) - if err != nil { + if err := json.Unmarshal(rawCondition, condition); err != nil { return err } j.DataConditions[i] = condition @@ -362,6 +439,11 @@ func (j *DataBasedSwitchState) UnmarshalJSON(data []byte) error { return nil } +// DataBasedSwitchStateTimeout ... +type DataBasedSwitchStateTimeout struct { + StateExecTimeout StateExecTimeout `json:"stateExecTimeout,omitempty"` +} + // DataCondition ... type DataCondition interface { GetName() string diff --git a/model/util.go b/model/util.go index 692de86..edf12e9 100644 --- a/model/util.go +++ b/model/util.go @@ -25,6 +25,12 @@ import ( const prefix = "file:/" +// TRUE used by bool fields that needs a boolean pointer +var TRUE = true + +// FALSE used by bool fields that needs a boolean pointer +var FALSE = false + func getBytesFromFile(s string) (b []byte, err error) { // #nosec if resp, err := http.Get(s); err == nil { @@ -69,3 +75,18 @@ func unmarshalKey(key string, data map[string]json.RawMessage, output interface{ } return nil } + +// unmarshalFile same as calling unmarshalString following by getBytesFromFile. +// Assumes that the value inside `data` is a path to a known location. +// Returns the content of the file or a not nil error reference. +func unmarshalFile(data []byte) (b []byte, err error) { + filePath, err := unmarshalString(data) + if err != nil { + return nil, err + } + file, err := getBytesFromFile(filePath) + if err != nil { + return nil, err + } + return file, nil +} diff --git a/model/workflow.go b/model/workflow.go index b59cf7c..a00653b 100644 --- a/model/workflow.go +++ b/model/workflow.go @@ -26,6 +26,8 @@ const ( ActionModeSequential ActionMode = "sequential" // ActionModeParallel ... ActionModeParallel ActionMode = "parallel" + // UnlimitedTimeout description for unlimited timeouts + UnlimitedTimeout = "unlimited" ) var actionsModelMapping = map[string]func(state map[string]interface{}) State{ @@ -42,6 +44,7 @@ var actionsModelMapping = map[string]func(state map[string]interface{}) State{ StateTypeInject: func(map[string]interface{}) State { return &InjectState{} }, StateTypeForEach: func(map[string]interface{}) State { return &ForEachState{} }, StateTypeCallback: func(map[string]interface{}) State { return &CallbackState{} }, + StateTypeSleep: func(map[string]interface{}) State { return &SleepState{} }, } // ActionMode ... @@ -51,22 +54,42 @@ type ActionMode string // to make it easy for custom unmarshalers implementations to unmarshal the common data structure. type BaseWorkflow struct { // Workflow unique identifier - ID string `json:"id" validate:"required"` + ID string `json:"id" validate:"omitempty,min=1"` + // Key Domain-specific workflow identifier + Key string `json:"key,omitempty" validate:"omitempty,min=1"` // Workflow name Name string `json:"name" validate:"required"` // Workflow description Description string `json:"description,omitempty"` // Workflow version - Version string `json:"version" validate:"required"` + Version string `json:"version" validate:"omitempty,min=1"` Start *Start `json:"start" validate:"required"` + // Annotations List of helpful terms describing the workflows intended purpose, subject areas, or other important qualities + Annotations []string `json:"annotations,omitempty"` + // DataInputSchema URI of the JSON Schema used to validate the workflow data input + DataInputSchema *DataInputSchema `json:"dataInputSchema,omitempty"` // Serverless Workflow schema version - SchemaVersion string `json:"schemaVersion,omitempty" validate:"omitempty,min=1"` + SpecVersion string `json:"specVersion,omitempty" validate:"required"` + // Secrets allow you to access sensitive information, such as passwords, OAuth tokens, ssh keys, etc inside your Workflow Expressions. + Secrets Secrets `json:"secrets,omitempty"` + // Constants Workflow constants are used to define static, and immutable, data which is available to Workflow Expressions. + Constants *Constants `json:"constants,omitempty"` // Identifies the expression language used for workflow expressions. Default is 'jq' - ExpressionLang string `json:"expressionLang,omitempty" validate:"omitempty,min=1"` - ExecTimeout *ExecTimeout `json:"execTimeout,omitempty"` + ExpressionLang string `json:"expressionLang,omitempty" validate:"omitempty,min=1"` + // Timeouts definition for Workflow, State, Action, Branch, and Event consumption. + Timeouts *Timeouts `json:"timeouts,omitempty"` + // Errors declarations for this Workflow definition + Errors []Error `json:"errors,omitempty"` // If 'true', workflow instances is not terminated when there are no active execution paths. Instance can be terminated via 'terminate end definition' or reaching defined 'execTimeout' - KeepActive bool `json:"keepActive,omitempty"` - Metadata Metadata `json:"metadata,omitempty"` + KeepActive bool `json:"keepActive,omitempty"` + // Metadata custom information shared with the runtime + Metadata Metadata `json:"metadata,omitempty"` + // AutoRetries If set to true, actions should automatically be retried on unchecked errors. Default is false + AutoRetries bool `json:"autoRetries,omitempty"` + // Auth definitions can be used to define authentication information that should be applied to resources defined in the operation + // property of function definitions. It is not used as authentication information for the function invocation, + // but just to access the resource containing the function invocation information. + Auth *Auth `json:"auth,omitempty"` } // Workflow base definition @@ -159,19 +182,176 @@ func (w *Workflow) UnmarshalJSON(data []byte) error { w.Retries = m["retries"] } } + if _, ok := workflowMap["errors"]; ok { + if err := json.Unmarshal(workflowMap["errors"], &w.Errors); err != nil { + nestedData, err := unmarshalFile(workflowMap["errors"]) + if err != nil { + return err + } + m := make(map[string][]Error) + if err := json.Unmarshal(nestedData, &m); err != nil { + return err + } + w.Errors = m["errors"] + } + } + w.setDefaults() + return nil +} + +func (w *Workflow) setDefaults() { + if len(w.ExpressionLang) == 0 { + w.ExpressionLang = DefaultExpressionLang + } +} + +// WorkflowRef holds a reference for a workflow definition +type WorkflowRef struct { + // Sub-workflow unique id + WorkflowID string `json:"workflowId" validate:"required"` + // Sub-workflow version + Version string `json:"version,omitempty"` +} + +// UnmarshalJSON ... +func (s *WorkflowRef) UnmarshalJSON(data []byte) error { + subflowRef := make(map[string]json.RawMessage) + if err := json.Unmarshal(data, &subflowRef); err != nil { + s.WorkflowID, err = unmarshalString(data) + if err != nil { + return err + } + return nil + } + if err := unmarshalKey("version", subflowRef, &s.Version); err != nil { + return err + } + if err := unmarshalKey("workflowId", subflowRef, &s.WorkflowID); err != nil { + return err + } + + return nil +} + +// Timeouts ... +type Timeouts struct { + // WorkflowExecTimeout Workflow execution timeout duration (ISO 8601 duration format). If not specified should be 'unlimited' + WorkflowExecTimeout *WorkflowExecTimeout `json:"workflowExecTimeout,omitempty"` + // StateExecTimeout Total state execution timeout (including retries) (ISO 8601 duration format) + StateExecTimeout *StateExecTimeout `json:"stateExecTimeout,omitempty"` + // ActionExecTimeout Single actions definition execution timeout duration (ISO 8601 duration format) + ActionExecTimeout string `json:"actionExecTimeout,omitempty" validate:"omitempty,min=1"` + // BranchExecTimeout Single branch execution timeout duration (ISO 8601 duration format) + BranchExecTimeout string `json:"branchExecTimeout,omitempty" validate:"omitempty,min=1"` + // EventTimeout Timeout duration to wait for consuming defined events (ISO 8601 duration format) + EventTimeout string `json:"eventTimeout,omitempty" validate:"omitempty,min=1"` +} + +// UnmarshalJSON ... +func (t *Timeouts) UnmarshalJSON(data []byte) error { + timeout := make(map[string]json.RawMessage) + if err := json.Unmarshal(data, &timeout); err != nil { + // assumes it's a reference to a file + file, err := unmarshalFile(data) + if err != nil { + return err + } + if err := json.Unmarshal(file, &t); err != nil { + return err + } + return nil + } + if err := unmarshalKey("workflowExecTimeout", timeout, &t.WorkflowExecTimeout); err != nil { + return err + } + if err := unmarshalKey("stateExecTimeout", timeout, &t.StateExecTimeout); err != nil { + return err + } + if err := unmarshalKey("actionExecTimeout", timeout, &t.ActionExecTimeout); err != nil { + return err + } + if err := unmarshalKey("branchExecTimeout", timeout, &t.ActionExecTimeout); err != nil { + return err + } + if err := unmarshalKey("eventTimeout", timeout, &t.ActionExecTimeout); err != nil { + return err + } + return nil } -// ExecTimeout ... -type ExecTimeout struct { - // Timeout duration (ISO 8601 duration format) - Duration string `json:"duration" validate:"required"` +// WorkflowExecTimeout ... +type WorkflowExecTimeout struct { + // Duration Workflow execution timeout duration (ISO 8601 duration format). If not specified should be 'unlimited' + Duration string `json:"duration,omitempty" validate:"omitempty,min=1"` // If `false`, workflow instance is allowed to finish current execution. If `true`, current workflow execution is abrupted. Interrupt bool `json:"interrupt,omitempty"` // Name of a workflow state to be executed before workflow instance is terminated RunBefore string `json:"runBefore,omitempty" validate:"omitempty,min=1"` } +// UnmarshalJSON ... +func (w *WorkflowExecTimeout) UnmarshalJSON(data []byte) error { + execTimeout := make(map[string]json.RawMessage) + if err := json.Unmarshal(data, &execTimeout); err != nil { + w.Duration, err = unmarshalString(data) + if err != nil { + return err + } + } else { + if err := unmarshalKey("duration", execTimeout, &w.Duration); err != nil { + return err + } + if err := unmarshalKey("interrupt", execTimeout, &w.Interrupt); err != nil { + return err + } + if err := unmarshalKey("runBefore", execTimeout, &w.RunBefore); err != nil { + return err + } + } + if len(w.Duration) == 0 { + w.Duration = UnlimitedTimeout + } + return nil +} + +// StateExecTimeout ... +type StateExecTimeout struct { + // Single state execution timeout, not including retries (ISO 8601 duration format) + Single string `json:"single,omitempty" validate:"omitempty,min=1"` + // Total state execution timeout, including retries (ISO 8601 duration format) + Total string `json:"total" validate:"required"` +} + +// UnmarshalJSON ... +func (s *StateExecTimeout) UnmarshalJSON(data []byte) error { + stateTimeout := make(map[string]json.RawMessage) + if err := json.Unmarshal(data, &stateTimeout); err != nil { + s.Total, err = unmarshalString(data) + if err != nil { + return err + } + return nil + } + if err := unmarshalKey("total", stateTimeout, &s.Total); err != nil { + return err + } + if err := unmarshalKey("single", stateTimeout, &s.Single); err != nil { + return err + } + return nil +} + +// Error declaration for workflow definitions +type Error struct { + // Name Domain-specific error name + Name string `json:"name" validate:"required"` + // Code OnError code. Can be used in addition to the name to help runtimes resolve to technical errors/exceptions. Should not be defined if error is set to '*' + Code string `json:"code,omitempty" validate:"omitempty,min=1"` + // OnError description + Description string `json:"description,omitempty"` +} + // Start definition type Start struct { StateName string `json:"stateName" validate:"required"` @@ -198,8 +378,8 @@ func (s *Start) UnmarshalJSON(data []byte) error { return nil } -// DefaultDef Can be either a transition or end definition -type DefaultDef struct { +// DefaultCondition Can be either a transition or end definition +type DefaultCondition struct { Transition Transition `json:"transition,omitempty"` End End `json:"end,omitempty"` } @@ -296,14 +476,12 @@ func (t *Transition) UnmarshalJSON(data []byte) error { return nil } -// Error ... -type Error struct { - // Domain-specific error name, or '*' to indicate all possible errors - Error string `json:"error" validate:"required,min=1"` - // Error code. Can be used in addition to the name to help runtimes resolve to technical errors/exceptions. Should not be defined if error is set to '*' - Code string `json:"code,omitempty" validate:"omitempty,min=1"` - // References a unique name of a retry definition. - RetryRef string `json:"retryRef,omitempty" validate:"omitempty,min=1"` +// OnError ... +type OnError struct { + // ErrorRef Reference to a unique workflow error definition. Used of errorRefs is not used + ErrorRef string `json:"errorRef,omitempty"` + // ErrorRefs References one or more workflow error definitions. Used if errorRef is not used + ErrorRefs []string `json:"errorRefs,omitempty"` // Transition to next state to handle the error. If retryRef is defined, this transition is taken only if retries were unsuccessful. Transition Transition `json:"transition,omitempty"` // End workflow execution in case of this error. If retryRef is defined, this ends workflow only if retries were unsuccessful. @@ -330,9 +508,15 @@ type Action struct { // References a 'trigger' and 'result' reusable event definitions EventRef EventRef `json:"eventRef,omitempty"` // References a sub-workflow to be executed - SubFlowRef SubFlowRef `json:"subFlowRef,omitempty"` - // Time period to wait for function execution to complete - Timeout string `json:"timeout,omitempty"` + SubFlowRef WorkflowRef `json:"subFlowRef,omitempty"` + // Sleep Defines time period workflow execution should sleep before / after function execution + Sleep Sleep `json:"sleep,omitempty"` + // RetryRef References a defined workflow retry definition. If not defined the default retry policy is assumed + RetryRef string `json:"retryRef,omitempty"` + // List of unique references to defined workflow errors for which the action should not be retried. Used only when `autoRetries` is set to `true` + NonRetryableErrors []string `json:"nonRetryableErrors,omitempty" validate:"omitempty,min=1"` + // List of unique references to defined workflow errors for which the action should be retried. Used only when `autoRetries` is set to `false` + RetryableErrors []string `json:"retryableErrors,omitempty" validate:"omitempty,min=1"` // Action data filter ActionDataFilter ActionDataFilter `json:"actionDataFilter,omitempty"` } @@ -344,7 +528,8 @@ type End struct { // Defines events that should be produced ProduceEvents []ProduceEvent `json:"produceEvents,omitempty"` // If set to true, triggers workflow compensation. Default is false - Compensate bool `json:"compensate,omitempty"` + Compensate bool `json:"compensate,omitempty"` + ContinueAs ContinueAs `json:"continueAs,omitempty"` } // UnmarshalJSON ... @@ -365,10 +550,23 @@ func (e *End) UnmarshalJSON(data []byte) error { if err := unmarshalKey("produceEvents", endMap, &e.ProduceEvents); err != nil { return err } + if err := unmarshalKey("continueAs", endMap, &e.ContinueAs); err != nil { + return err + } return nil } +// ContinueAs ... +type ContinueAs struct { + WorkflowRef + // TODO: add object or string data type + // If string type, an expression which selects parts of the states data output to become the workflow data input of continued execution. If object type, a custom object to become the workflow data input of the continued execution + Data interface{} `json:"data,omitempty"` + // WorkflowExecTimeout Workflow execution timeout to be used by the workflow continuing execution. Overwrites any specific settings set by that workflow + WorkflowExecTimeout WorkflowExecTimeout `json:"workflowExecTimeout,omitempty"` +} + // ProduceEvent ... type ProduceEvent struct { // References a name of a defined event @@ -401,29 +599,102 @@ type Branch struct { // Branch name Name string `json:"name" validate:"required"` // Actions to be executed in this branch - Actions []Action `json:"actions,omitempty"` + Actions []Action `json:"actions" validate:"required,min=1"` + // Timeouts State specific timeouts + Timeouts BranchTimeouts `json:"timeouts,omitempty"` +} + +// BranchTimeouts ... +type BranchTimeouts struct { + // ActionExecTimeout Single actions definition execution timeout duration (ISO 8601 duration format) + ActionExecTimeout string `json:"actionExecTimeout,omitempty" validate:"omitempty,min=1"` + // BranchExecTimeout Single branch execution timeout duration (ISO 8601 duration format) + BranchExecTimeout string `json:"branchExecTimeout,omitempty" validate:"omitempty,min=1"` } // ActionDataFilter ... type ActionDataFilter struct { // Workflow expression that selects state data that the state action can use FromStateData string `json:"fromStateData,omitempty"` - // Workflow expression that filters the actions data results + // Workflow expression that filters the actions' data results Results string `json:"results,omitempty"` // Workflow expression that selects a state data element to which the action results should be added/merged into. If not specified, denote, the top-level state data element ToStateData string `json:"toStateData,omitempty"` } -// Repeat ... -type Repeat struct { - // Expression evaluated against SubFlow state data. SubFlow will repeat execution as long as this expression is true or until the max property count is reached - Expression string `json:"expression,omitempty"` - // If true, the expression is evaluated before each repeat execution, if false the expression is evaluated after each repeat execution - CheckBefore bool `json:"checkBefore,omitempty"` - // Sets the maximum amount of repeat executions - Max int `json:"max,omitempty"` - // If true, repeats executions in a case unhandled errors propagate from the sub-workflow to this state - ContinueOnError bool `json:"continueOnError,omitempty"` - // List referencing defined consumed workflow events. SubFlow will repeat execution until one of the defined events is consumed, or until the max property count is reached - StopOnEvents []string `json:"stopOnEvents,omitempty"` +// DataInputSchema ... +type DataInputSchema struct { + Schema string `json:"schema" validate:"required"` + FailOnValidationErrors *bool `json:"failOnValidationErrors" validate:"required"` +} + +// UnmarshalJSON ... +func (d *DataInputSchema) UnmarshalJSON(data []byte) error { + dataInSchema := make(map[string]json.RawMessage) + if err := json.Unmarshal(data, &dataInSchema); err != nil { + d.Schema, err = unmarshalString(data) + if err != nil { + return err + } + d.FailOnValidationErrors = &TRUE + return nil + } + if err := unmarshalKey("schema", dataInSchema, &d.Schema); err != nil { + return err + } + if err := unmarshalKey("failOnValidationErrors", dataInSchema, &d.FailOnValidationErrors); err != nil { + return err + } + + return nil +} + +// Secrets allow you to access sensitive information, such as passwords, OAuth tokens, ssh keys, etc inside your Workflow Expressions. +type Secrets []string + +// UnmarshalJSON ... +func (s *Secrets) UnmarshalJSON(data []byte) error { + var secretArray []string + if err := json.Unmarshal(data, &secretArray); err != nil { + file, err := unmarshalFile(data) + if err != nil { + return err + } + if err := json.Unmarshal(file, &secretArray); err != nil { + return err + } + } + *s = secretArray + return nil +} + +// Constants Workflow constants are used to define static, and immutable, data which is available to Workflow Expressions. +type Constants struct { + // Data represents the generic structure of the constants value + Data map[string]json.RawMessage `json:",omitempty"` +} + +// UnmarshalJSON ... +func (c *Constants) UnmarshalJSON(data []byte) error { + constantData := make(map[string]json.RawMessage) + if err := json.Unmarshal(data, &constantData); err != nil { + // assumes it's a reference to a file + file, err := unmarshalFile(data) + if err != nil { + return err + } + if err := json.Unmarshal(file, &constantData); err != nil { + return err + } + } + c.Data = constantData + return nil +} + +// Sleep ... +type Sleep struct { + // Before Amount of time (ISO 8601 duration format) to sleep before function/subflow invocation. Does not apply if 'eventRef' is defined. + Before string `json:"before,omitempty"` + // After Amount of time (ISO 8601 duration format) to sleep after function/subflow invocation. Does not apply if 'eventRef' is defined. + After string `json:"after,omitempty"` } diff --git a/parser/parser_test.go b/parser/parser_test.go index a81b8f5..933936f 100644 --- a/parser/parser_test.go +++ b/parser/parser_test.go @@ -15,27 +15,44 @@ package parser import ( + "io/ioutil" + "path/filepath" "testing" "github.com/serverlessworkflow/sdk-go/v2/model" "github.com/stretchr/testify/assert" ) +func TestBasicValidation(t *testing.T) { + rootPath := "./testdata/workflows" + files, err := ioutil.ReadDir(rootPath) + assert.NoError(t, err) + for _, file := range files { + if !file.IsDir() { + workflow, err := FromFile(filepath.Join(rootPath, file.Name())) + assert.NoError(t, err) + assert.NotEmpty(t, workflow.Name) + assert.NotEmpty(t, workflow.ID) + assert.NotEmpty(t, workflow.States) + } + } +} + func TestFromFile(t *testing.T) { files := map[string]func(*testing.T, *model.Workflow){ - "./testdata/greetings.sw.json": func(t *testing.T, w *model.Workflow) { + "./testdata/workflows/greetings.sw.json": func(t *testing.T, w *model.Workflow) { assert.Equal(t, "greeting", w.ID) assert.IsType(t, &model.OperationState{}, w.States[0]) assert.Equal(t, "greetingFunction", w.States[0].(*model.OperationState).Actions[0].FunctionRef.RefName) }, - "./testdata/greetings.sw.yaml": func(t *testing.T, w *model.Workflow) { + "./testdata/workflows/greetings.sw.yaml": func(t *testing.T, w *model.Workflow) { assert.IsType(t, &model.OperationState{}, w.States[0]) assert.Equal(t, "greeting", w.ID) assert.NotEmpty(t, w.States[0].(*model.OperationState).Actions) assert.NotNil(t, w.States[0].(*model.OperationState).Actions[0].FunctionRef) assert.Equal(t, "greetingFunction", w.States[0].(*model.OperationState).Actions[0].FunctionRef.RefName) }, - "./testdata/eventbasedgreeting.sw.json": func(t *testing.T, w *model.Workflow) { + "./testdata/workflows/eventbasedgreeting.sw.json": func(t *testing.T, w *model.Workflow) { assert.Equal(t, "GreetingEvent", w.Events[0].Name) assert.IsType(t, &model.EventState{}, w.States[0]) eventState := w.States[0].(*model.EventState) @@ -43,7 +60,7 @@ func TestFromFile(t *testing.T) { assert.NotEmpty(t, eventState.OnEvents) assert.Equal(t, "GreetingEvent", eventState.OnEvents[0].EventRefs[0]) }, - "./testdata/eventbasedgreeting.sw.p.json": func(t *testing.T, w *model.Workflow) { + "./testdata/workflows/eventbasedgreeting.sw.p.json": func(t *testing.T, w *model.Workflow) { assert.Equal(t, "GreetingEvent", w.Events[0].Name) assert.IsType(t, &model.EventState{}, w.States[0]) eventState := w.States[0].(*model.EventState) @@ -51,14 +68,15 @@ func TestFromFile(t *testing.T) { assert.NotEmpty(t, eventState.OnEvents) assert.Equal(t, "GreetingEvent", eventState.OnEvents[0].EventRefs[0]) }, - "./testdata/eventbasedswitch.sw.json": func(t *testing.T, w *model.Workflow) { + "./testdata/workflows/eventbasedswitch.sw.json": func(t *testing.T, w *model.Workflow) { assert.IsType(t, &model.EventBasedSwitchState{}, w.States[0]) eventState := w.States[0].(*model.EventBasedSwitchState) assert.NotNil(t, eventState) assert.NotEmpty(t, eventState.EventConditions) + assert.NotEmpty(t, eventState.Name) assert.IsType(t, &model.TransitionEventCondition{}, eventState.EventConditions[0]) }, - "./testdata/applicationrequest.json": func(t *testing.T, w *model.Workflow) { + "./testdata/workflows/applicationrequest.json": func(t *testing.T, w *model.Workflow) { assert.IsType(t, &model.DataBasedSwitchState{}, w.States[0]) eventState := w.States[0].(*model.DataBasedSwitchState) assert.NotNil(t, eventState) @@ -72,7 +90,7 @@ func TestFromFile(t *testing.T) { assert.NotEmpty(t, operationState.Actions) assert.Equal(t, "startApplicationWorkflowId", operationState.Actions[0].SubFlowRef.WorkflowID) }, - "./testdata/applicationrequest.rp.json": func(t *testing.T, w *model.Workflow) { + "./testdata/workflows/applicationrequest.rp.json": func(t *testing.T, w *model.Workflow) { assert.IsType(t, &model.DataBasedSwitchState{}, w.States[0]) eventState := w.States[0].(*model.DataBasedSwitchState) assert.NotNil(t, eventState) @@ -80,7 +98,7 @@ func TestFromFile(t *testing.T) { assert.IsType(t, &model.TransitionDataCondition{}, eventState.DataConditions[0]) assert.Equal(t, "TimeoutRetryStrategy", w.Retries[0].Name) }, - "./testdata/applicationrequest.url.json": func(t *testing.T, w *model.Workflow) { + "./testdata/workflows/applicationrequest.url.json": func(t *testing.T, w *model.Workflow) { assert.IsType(t, &model.DataBasedSwitchState{}, w.States[0]) eventState := w.States[0].(*model.DataBasedSwitchState) assert.NotNil(t, eventState) @@ -88,7 +106,7 @@ func TestFromFile(t *testing.T) { assert.IsType(t, &model.TransitionDataCondition{}, eventState.DataConditions[0]) assert.Equal(t, "TimeoutRetryStrategy", w.Retries[0].Name) }, - "./testdata/checkinbox.sw.yaml": func(t *testing.T, w *model.Workflow) { + "./testdata/workflows/checkinbox.sw.yaml": func(t *testing.T, w *model.Workflow) { assert.IsType(t, &model.OperationState{}, w.States[0]) operationState := w.States[0].(*model.OperationState) assert.NotNil(t, operationState) @@ -96,21 +114,24 @@ func TestFromFile(t *testing.T) { assert.Len(t, w.States, 2) }, // validates: https://github.com/serverlessworkflow/specification/pull/175/ - "./testdata/provisionorders.sw.json": func(t *testing.T, w *model.Workflow) { + "./testdata/workflows/provisionorders.sw.json": func(t *testing.T, w *model.Workflow) { assert.IsType(t, &model.OperationState{}, w.States[0]) operationState := w.States[0].(*model.OperationState) assert.NotNil(t, operationState) assert.NotEmpty(t, operationState.Actions) assert.Len(t, operationState.OnErrors, 3) - assert.Equal(t, "Missing order id", operationState.OnErrors[0].Error) - assert.Equal(t, "Missing order item", operationState.OnErrors[1].Error) - assert.Equal(t, "Missing order quantity", operationState.OnErrors[2].Error) - }, "./testdata/checkinbox.cron-test.sw.yaml": func(t *testing.T, w *model.Workflow) { + assert.Equal(t, "Missing order id", operationState.OnErrors[0].ErrorRef) + assert.Equal(t, "MissingId", operationState.OnErrors[0].Transition.NextState) + assert.Equal(t, "Missing order item", operationState.OnErrors[1].ErrorRef) + assert.Equal(t, "MissingItem", operationState.OnErrors[1].Transition.NextState) + assert.Equal(t, "Missing order quantity", operationState.OnErrors[2].ErrorRef) + assert.Equal(t, "MissingQuantity", operationState.OnErrors[2].Transition.NextState) + }, "./testdata/workflows/checkinbox.cron-test.sw.yaml": func(t *testing.T, w *model.Workflow) { assert.Equal(t, "0 0/15 * * * ?", w.Start.Schedule.Cron.Expression) assert.Equal(t, "checkInboxFunction", w.States[0].(*model.OperationState).Actions[0].FunctionRef.RefName) assert.Equal(t, "SendTextForHighPriority", w.States[0].GetTransition().NextState) assert.False(t, w.States[1].GetEnd().Terminate) - }, "./testdata/applicationrequest-issue16.sw.yaml": func(t *testing.T, w *model.Workflow) { + }, "./testdata/workflows/applicationrequest-issue16.sw.yaml": func(t *testing.T, w *model.Workflow) { assert.IsType(t, &model.DataBasedSwitchState{}, w.States[0]) dataBaseSwitchState := w.States[0].(*model.DataBasedSwitchState) assert.NotNil(t, dataBaseSwitchState) @@ -118,7 +139,7 @@ func TestFromFile(t *testing.T) { assert.Equal(t, "CheckApplication", w.States[0].GetName()) }, // validates: https://github.com/serverlessworkflow/sdk-go/issues/36 - "./testdata/patientonboarding.sw.yaml": func(t *testing.T, w *model.Workflow) { + "./testdata/workflows/patientonboarding.sw.yaml": func(t *testing.T, w *model.Workflow) { assert.IsType(t, &model.EventState{}, w.States[0]) eventState := w.States[0].(*model.EventState) assert.NotNil(t, eventState) @@ -127,6 +148,31 @@ func TestFromFile(t *testing.T) { assert.Equal(t, float32(0.0), w.Retries[0].Jitter.FloatVal) assert.Equal(t, float32(1.1), w.Retries[0].Multiplier.FloatVal) }, + "./testdata/workflows/greetings-secret.sw.yaml": func(t *testing.T, w *model.Workflow) { + assert.Len(t, w.Secrets, 1) + }, + "./testdata/workflows/greetings-secret-file.sw.yaml": func(t *testing.T, w *model.Workflow) { + assert.Len(t, w.Secrets, 3) + }, + "./testdata/workflows/greetings-constants-file.sw.yaml": func(t *testing.T, w *model.Workflow) { + assert.NotEmpty(t, w.Constants) + assert.NotEmpty(t, w.Constants.Data["Translations"]) + }, + "./testdata/workflows/roomreadings.timeouts.sw.json": func(t *testing.T, w *model.Workflow) { + assert.NotNil(t, w.Timeouts) + assert.Equal(t, "PT1H", w.Timeouts.WorkflowExecTimeout.Duration) + assert.Equal(t, "GenerateReport", w.Timeouts.WorkflowExecTimeout.RunBefore) + }, + "./testdata/workflows/roomreadings.timeouts.file.sw.json": func(t *testing.T, w *model.Workflow) { + assert.NotNil(t, w.Timeouts) + assert.Equal(t, "PT1H", w.Timeouts.WorkflowExecTimeout.Duration) + assert.Equal(t, "GenerateReport", w.Timeouts.WorkflowExecTimeout.RunBefore) + }, + "./testdata/workflows/purchaseorderworkflow.sw.json": func(t *testing.T, w *model.Workflow) { + assert.NotNil(t, w.Timeouts) + assert.Equal(t, "PT30D", w.Timeouts.WorkflowExecTimeout.Duration) + assert.Equal(t, "CancelOrder", w.Timeouts.WorkflowExecTimeout.RunBefore) + }, } for file, f := range files { workflow, err := FromFile(file) diff --git a/parser/testdata/constantsDogs.json b/parser/testdata/constantsDogs.json new file mode 100644 index 0000000..cd3f101 --- /dev/null +++ b/parser/testdata/constantsDogs.json @@ -0,0 +1,9 @@ +{ + "Translations": { + "Dog": { + "Serbian": "pas", + "Spanish": "perro", + "French": "chien" + } + } +} \ No newline at end of file diff --git a/parser/testdata/errors.json b/parser/testdata/errors.json new file mode 100644 index 0000000..099e14d --- /dev/null +++ b/parser/testdata/errors.json @@ -0,0 +1,13 @@ +{ + "errors": [ + { + "name": "Missing order id" + }, + { + "name": "Missing order item" + }, + { + "name": "Missing order quantity" + } + ] +} \ No newline at end of file diff --git a/parser/testdata/provisionorders.sw.json b/parser/testdata/provisionorders.sw.json deleted file mode 100644 index 26275a9..0000000 --- a/parser/testdata/provisionorders.sw.json +++ /dev/null @@ -1,114 +0,0 @@ -{ - "id": "provisionorders", - "version": "1.0", - "name": "Provision Orders", - "description": "Provision Orders and handle errors thrown", - "start": { - "stateName": "ProvisionOrder" - }, - "functions": [ - { - "name": "provisionOrderFunction", - "operation": "http://myapis.org/provisioningapi.json#doProvision" - } - ], - "states": [ - { - "name": "ProvisionOrder", - "type": "operation", - "actionMode": "sequential", - "actions": [ - { - "functionRef": { - "refName": "provisionOrderFunction", - "parameters": { - "order": "{{ $.order }}" - } - } - } - ], - "stateDataFilter": { - "dataOutputPath": "{{ $.exceptions }}" - }, - "transition": { - "nextState": "ApplyOrder" - }, - "onErrors": [ - { - "error": "Missing order id", - "transition": { - "nextState": "MissingId" - } - }, - { - "error": "Missing order item", - "transition": { - "nextState": "MissingItem" - } - }, - { - "error": "Missing order quantity", - "transition": { - "nextState": "MissingQuantity" - } - } - ] - }, - { - "name": "MissingId", - "type": "operation", - "actions": [ - { - "subFlowRef": { - "workflowId": "handleMissingIdExceptionWorkflow" - } - } - ], - "end": { - "terminate": true - } - }, - { - "name": "MissingItem", - "type": "operation", - "actions": [ - { - "subFlowRef": { - "workflowId": "handleMissingItemExceptionWorkflow" - } - } - ], - "end": { - "terminate": true - } - }, - { - "name": "MissingQuantity", - "type": "operation", - "actions": [ - { - "subFlowRef": { - "workflowId": "handleMissingQuantityExceptionWorkflow" - } - } - ], - "end": { - "terminate": true - } - }, - { - "name": "ApplyOrder", - "type": "operation", - "actions": [ - { - "subFlowRef": { - "workflowId": "applyOrderWorkflowId" - } - } - ], - "end": { - "terminate": true - } - } - ] -} \ No newline at end of file diff --git a/parser/testdata/secrets.json b/parser/testdata/secrets.json new file mode 100644 index 0000000..d5b17c7 --- /dev/null +++ b/parser/testdata/secrets.json @@ -0,0 +1,5 @@ +[ + "SECRET1", + "SECRET2", + "SECRET3" +] \ No newline at end of file diff --git a/parser/testdata/timeouts.json b/parser/testdata/timeouts.json new file mode 100644 index 0000000..c3586bd --- /dev/null +++ b/parser/testdata/timeouts.json @@ -0,0 +1,6 @@ +{ + "workflowExecTimeout": { + "duration": "PT1H", + "runBefore": "GenerateReport" + } +} \ No newline at end of file diff --git a/parser/testdata/applicationrequest-issue16.sw.yaml b/parser/testdata/workflows/applicationrequest-issue16.sw.yaml similarity index 97% rename from parser/testdata/applicationrequest-issue16.sw.yaml rename to parser/testdata/workflows/applicationrequest-issue16.sw.yaml index 1b9091d..a8e77ff 100644 --- a/parser/testdata/applicationrequest-issue16.sw.yaml +++ b/parser/testdata/workflows/applicationrequest-issue16.sw.yaml @@ -17,6 +17,7 @@ version: '1.0' name: Applicant Request Decision Workflow description: Determine if applicant request is valid start: CheckApplication +specVersion: "0.7" functions: - name: sendRejectionEmailFunction operation: http://myapis.org/applicationapi.json#emailRejection @@ -28,7 +29,7 @@ states: transition: StartApplication - condition: "{{ $.applicants[?(@.age < 18)] }}" transition: RejectApplication - default: + defaultCondition: transition: RejectApplication - name: StartApplication type: operation diff --git a/parser/testdata/applicationrequest.json b/parser/testdata/workflows/applicationrequest.json similarity index 98% rename from parser/testdata/applicationrequest.json rename to parser/testdata/workflows/applicationrequest.json index ec8986d..1e792a4 100644 --- a/parser/testdata/applicationrequest.json +++ b/parser/testdata/workflows/applicationrequest.json @@ -4,6 +4,7 @@ "name": "Applicant Request Decision Workflow", "description": "Determine if applicant request is valid", "start": "CheckApplication", + "specVersion": "0.7", "functions": [ { "name": "sendRejectionEmailFunction", diff --git a/parser/testdata/applicationrequest.rp.json b/parser/testdata/workflows/applicationrequest.rp.json similarity index 96% rename from parser/testdata/applicationrequest.rp.json rename to parser/testdata/workflows/applicationrequest.rp.json index 60b6cb2..ad2acce 100644 --- a/parser/testdata/applicationrequest.rp.json +++ b/parser/testdata/workflows/applicationrequest.rp.json @@ -3,6 +3,7 @@ "version": "1.0", "name": "Applicant Request Decision Workflow", "description": "Determine if applicant request is valid", + "specVersion": "0.7", "start": { "stateName": "CheckApplication" }, @@ -26,7 +27,7 @@ } } ], - "default": { + "defaultCondition": { "transition": { "nextState": "RejectApplication" } diff --git a/parser/testdata/applicationrequest.url.json b/parser/testdata/workflows/applicationrequest.url.json similarity index 98% rename from parser/testdata/applicationrequest.url.json rename to parser/testdata/workflows/applicationrequest.url.json index 3cc0e9c..64e62c6 100644 --- a/parser/testdata/applicationrequest.url.json +++ b/parser/testdata/workflows/applicationrequest.url.json @@ -3,6 +3,7 @@ "version": "1.0", "name": "Applicant Request Decision Workflow", "description": "Determine if applicant request is valid", + "specVersion": "0.7", "functions": "https://raw.githubusercontent.com/serverlessworkflow/sdk-java/main/api/src/test/resources/features/applicantrequestfunctions.json", "retries": "https://raw.githubusercontent.com/serverlessworkflow/sdk-java/main/api/src/test/resources/features/applicantrequestretries.json", "start": { diff --git a/parser/testdata/checkinbox.cron-test.sw.yaml b/parser/testdata/workflows/checkinbox.cron-test.sw.yaml similarity index 98% rename from parser/testdata/checkinbox.cron-test.sw.yaml rename to parser/testdata/workflows/checkinbox.cron-test.sw.yaml index 54d42ac..cd548fe 100644 --- a/parser/testdata/checkinbox.cron-test.sw.yaml +++ b/parser/testdata/workflows/checkinbox.cron-test.sw.yaml @@ -16,6 +16,7 @@ id: checkInbox name: Check Inbox Workflow description: Periodically Check Inbox version: '1.0' +specVersion: "0.7" start: stateName: CheckInbox schedule: diff --git a/parser/testdata/checkinbox.sw.yaml b/parser/testdata/workflows/checkinbox.sw.yaml similarity index 98% rename from parser/testdata/checkinbox.sw.yaml rename to parser/testdata/workflows/checkinbox.sw.yaml index f50b172..cdcec1e 100644 --- a/parser/testdata/checkinbox.sw.yaml +++ b/parser/testdata/workflows/checkinbox.sw.yaml @@ -16,6 +16,7 @@ id: checkInbox name: Check Inbox Workflow description: Periodically Check Inbox version: '1.0' +specVersion: "0.7" start: stateName: CheckInbox schedule: diff --git a/parser/testdata/eventbasedgreeting.sw.json b/parser/testdata/workflows/eventbasedgreeting.sw.json similarity index 97% rename from parser/testdata/eventbasedgreeting.sw.json rename to parser/testdata/workflows/eventbasedgreeting.sw.json index 70b94f0..a62a865 100644 --- a/parser/testdata/eventbasedgreeting.sw.json +++ b/parser/testdata/workflows/eventbasedgreeting.sw.json @@ -3,6 +3,7 @@ "version": "1.0", "name": "Event Based Greeting Workflow", "description": "Event Based Greeting", + "specVersion": "0.7", "start": { "stateName": "Greet" }, diff --git a/parser/testdata/eventbasedgreeting.sw.p.json b/parser/testdata/workflows/eventbasedgreeting.sw.p.json similarity index 97% rename from parser/testdata/eventbasedgreeting.sw.p.json rename to parser/testdata/workflows/eventbasedgreeting.sw.p.json index 08e5932..f9beefa 100644 --- a/parser/testdata/eventbasedgreeting.sw.p.json +++ b/parser/testdata/workflows/eventbasedgreeting.sw.p.json @@ -3,6 +3,7 @@ "version": "1.0", "name": "Event Based Greeting Workflow", "description": "Event Based Greeting", + "specVersion": "0.7", "start": { "stateName": "Greet" }, diff --git a/parser/testdata/eventbasedswitch.sw.json b/parser/testdata/workflows/eventbasedswitch.sw.json similarity index 97% rename from parser/testdata/eventbasedswitch.sw.json rename to parser/testdata/workflows/eventbasedswitch.sw.json index 6577a6b..03563d9 100644 --- a/parser/testdata/eventbasedswitch.sw.json +++ b/parser/testdata/workflows/eventbasedswitch.sw.json @@ -3,6 +3,7 @@ "version": "1.0", "name": "Event Based Switch Transitions", "description": "Event Based Switch Transitions", + "specVersion": "0.7", "start": { "stateName": "CheckVisaStatus" }, @@ -37,7 +38,7 @@ } ], "eventTimeout": "PT1H", - "default": { + "defaultCondition": { "transition": { "nextState": "HandleNoVisaDecision" } diff --git a/parser/testdata/workflows/greetings-constants-file.sw.yaml b/parser/testdata/workflows/greetings-constants-file.sw.yaml new file mode 100644 index 0000000..060ace9 --- /dev/null +++ b/parser/testdata/workflows/greetings-constants-file.sw.yaml @@ -0,0 +1,40 @@ +# Copyright 2020 The Serverless Workflow Specification Authors +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +id: greeting +version: '1.0' +name: Greeting Workflow +description: Greet Someone +specVersion: "0.7" +start: + stateName: Greet +constants: "testdata/constantsDogs.json" +functions: + - name: greetingFunction + operation: file://myapis/greetingapis.json#greeting +states: + - name: Greet + type: operation + actionMode: sequential + actions: + - functionRef: + refName: greetingFunction + parameters: + name: "${ $SECRETS.SECRET1 }" + actionDataFilter: + dataResultsPath: "$.payload.greeting" + stateDataFilter: + dataOutputPath: "$.greeting" + end: + terminate: true \ No newline at end of file diff --git a/parser/testdata/workflows/greetings-secret-file.sw.yaml b/parser/testdata/workflows/greetings-secret-file.sw.yaml new file mode 100644 index 0000000..a7afbe9 --- /dev/null +++ b/parser/testdata/workflows/greetings-secret-file.sw.yaml @@ -0,0 +1,40 @@ +# Copyright 2020 The Serverless Workflow Specification Authors +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +id: greeting +version: '1.0' +name: Greeting Workflow +description: Greet Someone +specVersion: "0.7" +start: + stateName: Greet +secrets: "testdata/secrets.json" +functions: + - name: greetingFunction + operation: file://myapis/greetingapis.json#greeting +states: + - name: Greet + type: operation + actionMode: sequential + actions: + - functionRef: + refName: greetingFunction + parameters: + name: "${ $SECRETS.SECRET1 }" + actionDataFilter: + dataResultsPath: "$.payload.greeting" + stateDataFilter: + dataOutputPath: "$.greeting" + end: + terminate: true \ No newline at end of file diff --git a/parser/testdata/workflows/greetings-secret.sw.yaml b/parser/testdata/workflows/greetings-secret.sw.yaml new file mode 100644 index 0000000..1e6b71f --- /dev/null +++ b/parser/testdata/workflows/greetings-secret.sw.yaml @@ -0,0 +1,41 @@ +# Copyright 2020 The Serverless Workflow Specification Authors +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +id: greeting +version: '1.0' +name: Greeting Workflow +description: Greet Someone +specVersion: "0.7" +start: + stateName: Greet +secrets: + - NAME +functions: + - name: greetingFunction + operation: file://myapis/greetingapis.json#greeting +states: + - name: Greet + type: operation + actionMode: sequential + actions: + - functionRef: + refName: greetingFunction + parameters: + name: "${ $SECRETS.NAME }" + actionDataFilter: + dataResultsPath: "$.payload.greeting" + stateDataFilter: + dataOutputPath: "$.greeting" + end: + terminate: true \ No newline at end of file diff --git a/parser/testdata/greetings.sw.json b/parser/testdata/workflows/greetings.sw.json similarity index 96% rename from parser/testdata/greetings.sw.json rename to parser/testdata/workflows/greetings.sw.json index acbbbe9..ba56384 100644 --- a/parser/testdata/greetings.sw.json +++ b/parser/testdata/workflows/greetings.sw.json @@ -3,6 +3,7 @@ "version": "1.0", "name": "Greeting Workflow", "description": "Greet Someone", + "specVersion": "0.7", "start": { "stateName": "Greet" }, diff --git a/parser/testdata/greetings.sw.yaml b/parser/testdata/workflows/greetings.sw.yaml similarity index 98% rename from parser/testdata/greetings.sw.yaml rename to parser/testdata/workflows/greetings.sw.yaml index 9ff0222..de92c77 100644 --- a/parser/testdata/greetings.sw.yaml +++ b/parser/testdata/workflows/greetings.sw.yaml @@ -16,6 +16,7 @@ id: greeting version: '1.0' name: Greeting Workflow description: Greet Someone +specVersion: "0.7" start: stateName: Greet functions: diff --git a/parser/testdata/patientonboarding.sw.yaml b/parser/testdata/workflows/patientonboarding.sw.yaml similarity index 98% rename from parser/testdata/patientonboarding.sw.yaml rename to parser/testdata/workflows/patientonboarding.sw.yaml index e071bd9..0841351 100644 --- a/parser/testdata/patientonboarding.sw.yaml +++ b/parser/testdata/workflows/patientonboarding.sw.yaml @@ -16,6 +16,7 @@ id: patientonboarding name: Patient Onboarding Workflow version: '1.0' start: Onboard +specVersion: "0.7" states: - name: Onboard type: event diff --git a/parser/testdata/workflows/provisionorders.sw.json b/parser/testdata/workflows/provisionorders.sw.json new file mode 100644 index 0000000..21119a2 --- /dev/null +++ b/parser/testdata/workflows/provisionorders.sw.json @@ -0,0 +1,100 @@ +{ + "id": "provisionorders", + "version": "1.0", + "specVersion": "0.7", + "name": "Provision Orders", + "description": "Provision Orders and handle errors thrown", + "start": "ProvisionOrder", + "functions": [ + { + "name": "provisionOrderFunction", + "operation": "http://myapis.org/provisioningapi.json#doProvision" + } + ], + "errors": [ + { + "name": "Missing order id" + }, + { + "name": "Missing order item" + }, + { + "name": "Missing order quantity" + } + ], + "states":[ + { + "name":"ProvisionOrder", + "type":"operation", + "actionMode":"sequential", + "actions":[ + { + "functionRef": { + "refName": "provisionOrderFunction", + "arguments": { + "order": "${ .order }" + } + } + } + ], + "stateDataFilter": { + "output": "${ .exceptions }" + }, + "transition": "ApplyOrder", + "onErrors": [ + { + "errorRef": "Missing order id", + "transition": "MissingId" + }, + { + "errorRef": "Missing order item", + "transition": "MissingItem" + }, + { + "errorRef": "Missing order quantity", + "transition": "MissingQuantity" + } + ] + }, + { + "name": "MissingId", + "type": "operation", + "actions": [ + { + "subFlowRef": "handleMissingIdExceptionWorkflow" + } + ], + "end": true + }, + { + "name": "MissingItem", + "type": "operation", + "actions": [ + { + "subFlowRef": "handleMissingItemExceptionWorkflow" + } + ], + "end": true + }, + { + "name": "MissingQuantity", + "type": "operation", + "actions": [ + { + "subFlowRef": "handleMissingQuantityExceptionWorkflow" + } + ], + "end": true + }, + { + "name": "ApplyOrder", + "type": "operation", + "actions": [ + { + "subFlowRef": "applyOrderWorkflowId" + } + ], + "end": true + } + ] +} diff --git a/parser/testdata/workflows/purchaseorderworkflow.sw.json b/parser/testdata/workflows/purchaseorderworkflow.sw.json new file mode 100644 index 0000000..998b974 --- /dev/null +++ b/parser/testdata/workflows/purchaseorderworkflow.sw.json @@ -0,0 +1,162 @@ +{ + "id": "order", + "name": "Purchase Order Workflow", + "version": "1.0", + "specVersion": "0.7", + "start": "StartNewOrder", + "timeouts": { + "workflowExecTimeout": { + "duration": "PT30D", + "runBefore": "CancelOrder" + } + }, + "states": [ + { + "name": "StartNewOrder", + "type": "event", + "onEvents": [ + { + "eventRefs": [ + "OrderCreatedEvent" + ], + "actions": [ + { + "functionRef": { + "refName": "LogNewOrderCreated" + } + } + ] + } + ], + "transition": { + "nextState": "WaitForOrderConfirmation" + } + }, + { + "name": "WaitForOrderConfirmation", + "type": "event", + "onEvents": [ + { + "eventRefs": [ + "OrderConfirmedEvent" + ], + "actions": [ + { + "functionRef": { + "refName": "LogOrderConfirmed" + } + } + ] + } + ], + "transition": { + "nextState": "WaitOrderShipped" + } + }, + { + "name": "WaitOrderShipped", + "type": "event", + "onEvents": [ + { + "eventRefs": [ + "ShipmentSentEvent" + ], + "actions": [ + { + "functionRef": { + "refName": "LogOrderShipped" + } + } + ] + } + ], + "end": { + "terminate": true, + "produceEvents": [ + { + "eventRef": "OrderFinishedEvent" + } + ] + } + }, + { + "name": "CancelOrder", + "type": "operation", + "actions": [ + { + "functionRef": { + "refName": "CancelOrder" + } + } + ], + "end": { + "terminate": true, + "produceEvents": [ + { + "eventRef": "OrderCancelledEvent" + } + ] + } + } + ], + "events": [ + { + "name": "OrderCreatedEvent", + "type": "my.company.orders", + "source": "/orders/new", + "correlation": [ + { + "contextAttributeName": "orderid" + } + ] + }, + { + "name": "OrderConfirmedEvent", + "type": "my.company.orders", + "source": "/orders/confirmed", + "correlation": [ + { + "contextAttributeName": "orderid" + } + ] + }, + { + "name": "ShipmentSentEvent", + "type": "my.company.orders", + "source": "/orders/shipped", + "correlation": [ + { + "contextAttributeName": "orderid" + } + ] + }, + { + "name": "OrderFinishedEvent", + "type": "my.company.orders", + "kind": "produced" + }, + { + "name": "OrderCancelledEvent", + "type": "my.company.orders", + "kind": "produced" + } + ], + "functions": [ + { + "name": "LogNewOrderCreated", + "operation": "http.myorg.io/ordersservices.json#logcreated" + }, + { + "name": "LogOrderConfirmed", + "operation": "http.myorg.io/ordersservices.json#logconfirmed" + }, + { + "name": "LogOrderShipped", + "operation": "http.myorg.io/ordersservices.json#logshipped" + }, + { + "name": "CancelOrder", + "operation": "http.myorg.io/ordersservices.json#calcelorder" + } + ] +} \ No newline at end of file diff --git a/parser/testdata/workflows/roomreadings.timeouts.file.sw.json b/parser/testdata/workflows/roomreadings.timeouts.file.sw.json new file mode 100644 index 0000000..c875863 --- /dev/null +++ b/parser/testdata/workflows/roomreadings.timeouts.file.sw.json @@ -0,0 +1,80 @@ +{ + "id": "roomreadings", + "name": "Room Temp and Humidity Workflow", + "version": "1.0", + "specVersion": "0.7", + "start": "ConsumeReading", + "timeouts": "testdata/timeouts.json", + "keepActive": true, + "states": [ + { + "name": "ConsumeReading", + "type": "event", + "onEvents": [ + { + "eventRefs": ["TemperatureEvent", "HumidityEvent"], + "actions": [ + { + "functionRef": { + "refName": "LogReading" + } + } + ], + "eventDataFilter": { + "toStateData": "${ .readings }" + } + } + ], + "end": true + }, + { + "name": "GenerateReport", + "type": "operation", + "actions": [ + { + "functionRef": { + "refName": "ProduceReport", + "arguments": { + "data": "${ .readings }" + } + } + } + ], + "end": { + "terminate": true + } + } + ], + "events": [ + { + "name": "TemperatureEvent", + "type": "my.home.sensors", + "source": "/home/rooms/+", + "correlation": [ + { + "contextAttributeName": "roomId" + } + ] + }, + { + "name": "HumidityEvent", + "type": "my.home.sensors", + "source": "/home/rooms/+", + "correlation": [ + { + "contextAttributeName": "roomId" + } + ] + } + ], + "functions": [ + { + "name": "LogReading", + "operation": "http.myorg.io/ordersservices.json#logreading" + }, + { + "name": "ProduceReport", + "operation": "http.myorg.io/ordersservices.json#produceReport" + } + ] +} diff --git a/parser/testdata/workflows/roomreadings.timeouts.sw.json b/parser/testdata/workflows/roomreadings.timeouts.sw.json new file mode 100644 index 0000000..b322f8c --- /dev/null +++ b/parser/testdata/workflows/roomreadings.timeouts.sw.json @@ -0,0 +1,85 @@ +{ + "id": "roomreadings", + "name": "Room Temp and Humidity Workflow", + "version": "1.0", + "specVersion": "0.7", + "start": "ConsumeReading", + "timeouts": { + "workflowExecTimeout": { + "duration": "PT1H", + "runBefore": "GenerateReport" + } + }, + "keepActive": true, + "states": [ + { + "name": "ConsumeReading", + "type": "event", + "onEvents": [ + { + "eventRefs": ["TemperatureEvent", "HumidityEvent"], + "actions": [ + { + "functionRef": { + "refName": "LogReading" + } + } + ], + "eventDataFilter": { + "toStateData": "${ .readings }" + } + } + ], + "end": true + }, + { + "name": "GenerateReport", + "type": "operation", + "actions": [ + { + "functionRef": { + "refName": "ProduceReport", + "arguments": { + "data": "${ .readings }" + } + } + } + ], + "end": { + "terminate": true + } + } + ], + "events": [ + { + "name": "TemperatureEvent", + "type": "my.home.sensors", + "source": "/home/rooms/+", + "correlation": [ + { + "contextAttributeName": "roomId" + } + ] + }, + { + "name": "HumidityEvent", + "type": "my.home.sensors", + "source": "/home/rooms/+", + "correlation": [ + { + "contextAttributeName": "roomId" + } + ] + } + ], + "functions": [ + { + "name": "LogReading", + "operation": "http.myorg.io/ordersservices.json#logreading" + }, + { + "name": "ProduceReport", + "operation": "http.myorg.io/ordersservices.json#produceReport" + } + ] +}