diff --git a/.gitignore b/.gitignore new file mode 100644 index 00000000..c09a9c1d --- /dev/null +++ b/.gitignore @@ -0,0 +1,50 @@ +out +result +result-* +dist +dist-* +cabal-dev +*.o +*.hi +*.chi +*.chs.h +*.dyn_o +*.dyn_hi +.hpc +.hsenv +.cabal-sandbox/ +cabal.sandbox.config +*.prof +*.aux +*.hp +*.eventlog +.stack-work/ +cabal.project.local +cabal.project.local~ +.HTF/ +.ghc.environment.* +dist-newstyle +.stack-work +client/ + +.ghci +.ghcid + +.idea/ +*.iml + +*.diff +!nix/packages/*.diff +*.patch +!nix/packages/*.patch + +tags + +.vscode/ +.nvimrc + +init/id_rsa + +*~ + +dump.rdb diff --git a/.stylish-haskell.yaml b/.stylish-haskell.yaml new file mode 100644 index 00000000..431d1e91 --- /dev/null +++ b/.stylish-haskell.yaml @@ -0,0 +1,278 @@ +# stylish-haskell configuration file +# ================================== + +# The stylish-haskell tool is mainly configured by specifying steps. These steps +# are a list, so they have an order, and one specific step may appear more than +# once (if needed). Each file is processed by these steps in the given order. +steps: + # Convert some ASCII sequences to their Unicode equivalents. This is disabled + # by default. + # - unicode_syntax: + # # In order to make this work, we also need to insert the UnicodeSyntax + # # language pragma. If this flag is set to true, we insert it when it's + # # not already present. You may want to disable it if you configure + # # language extensions using some other method than pragmas. Default: + # # true. + # add_language_pragma: true + + # Align the right hand side of some elements. This is quite conservative + # and only applies to statements where each element occupies a single + # line. All default to true. + - simple_align: + cases: true + top_level_patterns: true + records: true + + # Import cleanup + - imports: + # There are different ways we can align names and lists. + # + # - global: Align the import names and import list throughout the entire + # file. + # + # - file: Like global, but don't add padding when there are no qualified + # imports in the file. + # + # - group: Only align the imports per group (a group is formed by adjacent + # import lines). + # + # - none: Do not perform any alignment. + # + # Default: global. + align: global + + # The following options affect only import list alignment. + # + # List align has following options: + # + # - after_alias: Import list is aligned with end of import including + # 'as' and 'hiding' keywords. + # + # > import qualified Data.List as List (concat, foldl, foldr, head, + # > init, last, length) + # + # - with_alias: Import list is aligned with start of alias or hiding. + # + # > import qualified Data.List as List (concat, foldl, foldr, head, + # > init, last, length) + # + # - with_module_name: Import list is aligned `list_padding` spaces after + # the module name. + # + # > import qualified Data.List as List (concat, foldl, foldr, head, + # init, last, length) + # + # This is mainly intended for use with `pad_module_names: false`. + # + # > import qualified Data.List as List (concat, foldl, foldr, head, + # init, last, length, scanl, scanr, take, drop, + # sort, nub) + # + # - new_line: Import list starts always on new line. + # + # > import qualified Data.List as List + # > (concat, foldl, foldr, head, init, last, length) + # + # Default: after_alias + list_align: after_alias + + # Right-pad the module names to align imports in a group: + # + # - true: a little more readable + # + # > import qualified Data.List as List (concat, foldl, foldr, + # > init, last, length) + # > import qualified Data.List.Extra as List (concat, foldl, foldr, + # > init, last, length) + # + # - false: diff-safe + # + # > import qualified Data.List as List (concat, foldl, foldr, init, + # > last, length) + # > import qualified Data.List.Extra as List (concat, foldl, foldr, + # > init, last, length) + # + # Default: true + pad_module_names: false + + # Long list align style takes effect when import is too long. This is + # determined by 'columns' setting. + # + # - inline: This option will put as much specs on same line as possible. + # + # - new_line: Import list will start on new line. + # + # - new_line_multiline: Import list will start on new line when it's + # short enough to fit to single line. Otherwise it'll be multiline. + # + # - multiline: One line per import list entry. + # Type with constructor list acts like single import. + # + # > import qualified Data.Map as M + # > ( empty + # > , singleton + # > , ... + # > , delete + # > ) + # + # Default: inline + long_list_align: inline + + # Align empty list (importing instances) + # + # Empty list align has following options + # + # - inherit: inherit list_align setting + # + # - right_after: () is right after the module name: + # + # > import Vector.Instances () + # + # Default: inherit + empty_list_align: inherit + + # List padding determines indentation of import list on lines after import. + # This option affects 'long_list_align'. + # + # - : constant value + # + # - module_name: align under start of module name. + # Useful for 'file' and 'group' align settings. + # + # Default: 4 + list_padding: 4 + + # Separate lists option affects formatting of import list for type + # or class. The only difference is single space between type and list + # of constructors, selectors and class functions. + # + # - true: There is single space between Foldable type and list of it's + # functions. + # + # > import Data.Foldable (Foldable (fold, foldl, foldMap)) + # + # - false: There is no space between Foldable type and list of it's + # functions. + # + # > import Data.Foldable (Foldable(fold, foldl, foldMap)) + # + # Default: true + separate_lists: true + + # Space surround option affects formatting of import lists on a single + # line. The only difference is single space after the initial + # parenthesis and a single space before the terminal parenthesis. + # + # - true: There is single space associated with the enclosing + # parenthesis. + # + # > import Data.Foo ( foo ) + # + # - false: There is no space associated with the enclosing parenthesis + # + # > import Data.Foo (foo) + # + # Default: false + space_surround: false + + # Language pragmas + - language_pragmas: + # We can generate different styles of language pragma lists. + # + # - vertical: Vertical-spaced language pragmas, one per line. + # + # - compact: A more compact style. + # + # - compact_line: Similar to compact, but wrap each line with + # `{-#LANGUAGE #-}'. + # + # Default: vertical. + style: vertical + + # Align affects alignment of closing pragma brackets. + # + # - true: Brackets are aligned in same column. + # + # - false: Brackets are not aligned together. There is only one space + # between actual import and closing bracket. + # + # Default: true + align: true + + # stylish-haskell can detect redundancy of some language pragmas. If this + # is set to true, it will remove those redundant pragmas. Default: true. + remove_redundant: true + + # Replace tabs by spaces. This is disabled by default. + # - tabs: + # # Number of spaces to use for each tab. Default: 8, as specified by the + # # Haskell report. + # spaces: 8 + + # Remove trailing whitespace + - trailing_whitespace: {} + + # Squash multiple spaces between the left and right hand sides of some + # elements into single spaces. Basically, this undoes the effect of + # simple_align but is a bit less conservative. + # - squash: {} + +# A common setting is the number of columns (parts of) code will be wrapped +# to. Different steps take this into account. Default: 80. +columns: 80 + +# By default, line endings are converted according to the OS. You can override +# preferred format here. +# +# - native: Native newline format. CRLF on Windows, LF on other OSes. +# +# - lf: Convert to LF ("\n"). +# +# - crlf: Convert to CRLF ("\r\n"). +# +# Default: native. +newline: native + +# Sometimes, language extensions are specified in a cabal file or from the +# command line instead of using language pragmas in the file. stylish-haskell +# needs to be aware of these, so it can parse the file correctly. +# +# No language extensions are enabled by default. +language_extensions: + - ConstraintKinds + - DataKinds + - DefaultSignatures + - DeriveFunctor + - DeriveGeneric + - DuplicateRecordFields + - ExplicitNamespaces + - FlexibleContexts + - FlexibleInstances + - FunctionalDependencies + - GADTs + - LambdaCase + - MultiParamTypeClasses + - MultiWayIf + - NamedFieldPuns + - NoImplicitPrelude + - OverloadedStrings + - PartialTypeSignatures + - PatternSynonyms + - PolyKinds + - RankNTypes + - RecordWildCards + - ScopedTypeVariables + - TupleSections + - TypeApplications + - TypeFamilies + - TypeOperators + - ViewPatterns + - BlockArguments + - AllowAmbiguousTypes + - NoMonomorphismRestriction + +# Attempt to find the cabal file in ancestors of the current directory, and +# parse options (currently only language extensions) from that. +# +# Default: true +cabal: true diff --git a/BEAM_NOTES.md b/BEAM_NOTES.md new file mode 100644 index 00000000..90265736 --- /dev/null +++ b/BEAM_NOTES.md @@ -0,0 +1,117 @@ +# What you should be aware of when using beam + +`euler-hs` uses `beam` library for database access. + +### SqlBool vs Bool (can have critical impact on performance) + +This is especially important for users of *MySQL*. +On Postgres the generated queries are somewhat better. + +For filtering where we have NULLABLE values we have to use different comparison operators, which operate on `SqlBool` type, for example instead of this: + +```haskell +import qualified Database.Beam as B +import qualified Database.Beam.Query as B + +... + +predicate orderId merchantId = + (orderId ==. B.just_ (B.val_ orderId')) &&. + (merchantId ==. B.just_ (B.val_ merchantId')) + +findOrder orderId merchantId + = findRow + $ B.select + $ B.limit_ 1 + $ B.filter_ (predicate orderId merchantId) + $ B.all_ DBSchema.orders +``` + +do this: + +```haskell +predicate orderId merchantId = + (orderId ==?. B.just_ (B.val_ orderId')) &&?. -- Notice a change here + (merchantId ==?. B.just_ (B.val_ merchantId')) -- NULLABLE values + +findOrder orderId merchantId + = findRow + $ B.select + $ B.limit_ 1 + $ B.filter_' (predicate orderId merchantId) -- Notice a change here: filter_' + $ B.all_ DBSchema.orders +``` + +Notice there are not only operators like `&&?.` and `==?`, but also a `filter_'` which operates on `SqlBools` +if you need to mix `Bool` and `SqlBool` in a single predicate there are functions to allow this: + +[Beam's SqlBool](https://hackage.haskell.org/package/beam-core-0.8.0.0/docs/Database-Beam-Query.html#t:SqlBool) + +Without that the generated SQL code consists of many `CASE WHEN ...` expressions which destroy performance. + +Beam docs have an overview of that: + +[Beam's SQL-like comparisons](https://github.com/haskell-beam/beam/blob/master/docs/user-guide/expressions.md#sql-like-comparisons) + +With updates it's less straightforward as there is no `update'` function which supports `SqlBool` +What we can do is change: + +```haskell + updateRows + $ B.update someDBTable someUpdateAssigments + (\someEntity -> (someEntity ^. _id) ==. (B.just_ $ B.val_ neededEntityId)) +``` + +like this: + +```haskell + updateRows + $ B.update' someDBTable someUpdateAssigments + (\someEntity -> B.unknownAs_ False $ (someEntity ^. _id) ==?. (B.just_ $ B.val_ neededEntityId)) +``` + +If you do a change like this: + +```haskell + (\oRef -> B.unknownAs_ False $ (oRef ^. _id) ==?. (B.just_ $ B.val_ orderRefId)) +``` + +The generated SQL code will contain `UPDATE ... WHERE (some_id == 1) IS TRUE` which will also destroy performance. +Similar with `save` function which uses similar query internally. You can use `save''` instead. + +We've added an `update'` and `save'` functions to the [fork of beam](https://github.com/juspay/beam/tree/upstream-master). + +An upstream PR [is merged](https://github.com/haskell-beam/beam/pull/464). + +### Transactions + +`beam` does not provide support for transactions. + +The framework has two methods: +- `runDB` - this one doesn't use transactions, +- `runTransaction` - treats the `SQLDB` monad as a transactional scope. + +### Connection pool management + +`beam` does not provide a connection management, if you want to use a connection pool you have the following options: + +The framework has support of pools out of box. Consider to pass an appropriate config to `initSqlDBConnection`. + +There is currently a possible issue with connection pools on Postgres which is under investigation. + +### MySQL specific + +There are several problems with the `beam-mysql` backend described below which are currently fixed or mitigated in [our fork](https://github.com/juspay/beam-mysql) of `beam-mysql`. + +We'll attempt to upstream some of the changes, though `beam-mysql` repo remains in the account of original author of beam. + +#### Insert returning + +As MySQL does not support `INSERT ... RETURNING` `beam-mysql` does not implement `insertReturning*` functions. +The `juspay/beam-mysql` adds an `INSERT RETURNING` emulation and support for these, but uses a temporary table. + +So we added an additional function `insertRowReturningMySQL` which can insert a single row and return its value together with the generated autoincrement id column. + +#### Changes to parsing + +`juspay/beam-mysql` also allows to parse `Blob` as `Text`; `Bit(1)` as `Bool`. diff --git a/BRANCHING-POLICY.md b/BRANCHING-POLICY.md new file mode 100644 index 00000000..999b4904 --- /dev/null +++ b/BRANCHING-POLICY.md @@ -0,0 +1,21 @@ +We use a kind of gitflow for our development process and a certain branch policy. + +General rule: avoid polluting the repository. It's a common place to work for many peoples, and a lot of branches makes it very hard to track the process. + +- `master` is a main development branch. +- `master` branch should be always valid: compilation is ok, the tests passing, the latest actual code is there. (A broken `master` branch should be fixed immediately.) +- Create feature branches for active development. +- It's allowed for feature branches to be in a broken state. +- Avoid creating branches for no real need. Don't forget to delete your temporary branches from the repository. +- Consider the following subfolders: + - `feature` for a new feature + - `fix` for immediate fixes of something +- It's also allowed to have a thematic subfolder. +- Avoid merging to `master`, prefer to rebase your feature branch instead. +- It's allowed to do merges within collective feature branches (otherwise it will be hard for participants to work together). +- WIP comments in personal / feature branches allowed as long as you'll squash these commits before pushing them to `master`. +- The PR for merging should be made before moving `master` to the upcoming changes (for triggering our CI and making Code Reviews). +- Code Reviews are mandatory for the most of the changes. +- Old merged branches should be deleted ASAP. +- The reasonably old branches are not allowed to be kept in the repository. +- Abandoned branches should be deleted. diff --git a/BUILD.md b/BUILD.md new file mode 100644 index 00000000..db979192 --- /dev/null +++ b/BUILD.md @@ -0,0 +1,57 @@ +# Building EulerHS Framework + +## Prerequisites + +EulerHS framework uses several external libraries you need to install first: + +- binutils +- libssl-dev +- libpq-dev +- libmysqlclient-dev +- libsqlite-dev +- postgresql +- postgresql-server-dev-all +- mysql-server +- ... maybe some others + +### Linux + +Ubuntu libraries can be installed from the general repo: + +`sudo apt install binutils` +`sudo apt install libssl-dev` +`sudo apt install libpq-dev` +`sudo apt install libmysqlclient-dev` +`sudo apt install libsqlite-dev` +`sudo apt install postgresql` +`sudo apt install postgresql-server-dev-all` +`sudo apt install mysql-server` + +### MacOS + +Packages for MacOS can be installed using brew. + +N.B., you might meet a problem with MacOS building failure: +`> Configuring mysql-0.1.7...` +`> setup: Missing dependencies on foreign libraries:` +`> * Missing (or bad) C libraries: ssl, crypto` + +[Possible solution](https://github.com/depressed-pho/HsOpenSSL/issues/41) + +## Building the framework + +### Stack + +Building with stack is straightforward. Run in the euler-hs dir: + +- `stack build` - build the framework +- `stack test` - build and test the framework +- `stack build --fast -j4` - build the framework without optimisations + +### cabal + +> TODO + +### nix + +> TODO diff --git a/CHANGELOG.md b/CHANGELOG.md new file mode 100644 index 00000000..16e35385 --- /dev/null +++ b/CHANGELOG.md @@ -0,0 +1,160 @@ +# Changelog for euler-hs + +## [2.5.0.0] - 2020-12-01 +* EulerHS is prepared for open sourcing. + - New documentation + - Improvements and updates in public interface + - Warning fixes + - Logger rework + - License changed to Apache 2.0 + - All builds except stack are removed + +## [2.0.0.0] - 2020-07-01 + +* Use `beam-mysql` based on `mysql-haskell` instead of `mysql` + +## [1.10.0.0] - 2020-06-16 +* EulerHS 1.10.0.0: a significant update with new features and fixes. + - Rework of repository structure, each library has its own repo now. + - Compatibility with GHC 8.8. + - Updated stack resolver to 15.15. + - Added `run[Update/Delete]ReturningList` for Postgres. + - Added `delOption`. + - Added `runUntracedIO` for reading/writing sensitive data. + - Added untyped HTTP calls to `Flow`. + - Lots of various fixes not listed here. + - Added `insertRowReturningMySql` function which does not use temporary tables internally. + - Beware of `SqlBool` vs `Bool` when writing `beam` queries, and other gotchas: [see BEAM_NOTES.md](BEAM_NOTES.md), + read this if you use database at all via `euler-hs`. + Some of this can have a critical effect on performance, especially on MySQL. + +## [1.9.5.0] - 2020-04-13 +* EulerHS 1.9.5.0: fixes + - Async logger mem consumption fixed. + - Integration tests with MySQL disabled + - Improved documentation [see README.md](README.md) + +## [1.9.0.0] - 2020-04-13 +* EulerHS 1.9.0.0: a significant update with new features and fixes. + - RunSafeFlow method: ability to catch exceptions thrown by throwException + (breaking change for recordings) + - Exceptions in forked flows are now handled (breaking change) + - Options made a bit more effective + - Raw SQL now is printed into recordings (breaking change for recordings) + +## [1.8.0.0] - 2020-04-03 +* EulerHS 1.8.0.0: new features and fixes. + - Redis cluster support (switched to other hedis fork). + - Framework and Logger performance tuned. + +## [1.7.0.0] - 2020-03-30 +* EulerHS 1.7.0.0: new features. + - Granular DB errors added (breaking change) + - Test framework added + +## [1.6.0.0] - 2020-03-27 +* EulerHS 1.6.0.0: a significant update with new features and fixes. + - beam-mysql updated: temporary tables fix, autocommit fix, bytestrings encoding fix + - MySQL transactions bug fixed + - New feature: awaiting for results from forked flows added + - runIO' with description added + - KV DB hardcorded DB name fixed (breaking change) + - More documentation on SQL subsystem usage added (see README.md) + +## [1.4.0.0] - 2020-03-12 +* Euler-hs 1.4.0.0: + - Performance analysed and tuned + N.B. Async logger has a lazy mem leak. Will be fixed in the next version. + Use sync logger for now. + - Pub-Sub mechanism added + - Beam-MySQL updated (support of the `Day` type added) + - Small fixes and additions + +## [1.3.0.0] - 2020-02-17 +- Breaking changes. Options reworked. + Interface modified (Typeable instance required now). + Fixed a bug with identical encoding of different keys. +- Added GHC options: Wcompat Widentities fhide-source-paths +- Added wrappers for kvdb actions +- Added callServantApi request | response logging +- Changed `Serializable` instances for ByteStrings +- Fixed recording forked flow with exception +- Fixed throwException method entry record/replay + +## [1.2.0.0] - 2019-12-20 + +- Added beam-mysql support of insertReturning +- Added beam-mysql BIT / TEXT problem solved +- Added transactions for SQL DB subsystem made right +- Added `getOrInit` methods for SQL DB & KV DB +- Improvements in business logic and tests + +## [1.1.0.0] - 2019-12-16 + +- Added InitKVDB method +- Added Shared connections +- Added `getOrInitKVDB` method + +## [1.0.0.0] - 2019-12-09 + +- Added shared connections `getSqlDBConnection` and `getOrInitSqlConn` for SQL DB + +## [0.9.0.0] - 2019-12-02 + +### 2019-12 + +- Added branching policy guide + +### 2019-11 + +- Added metrics base +- Added strictness annotations to reduce memory consumption under heavy logger load +- Tune sqlite to wait for resource in case of concurrent access +- Added Load tester base app +- Agnostic ART record|replay Optimized +- Added ART player/recorder & art.sh script +- Added CODESTYLE guide +- Added redis tests mocked with ART +- Added descriptions. Imports refactored +- Rollback to the BeamRunner instances +- Fixed bugs with DB initialization + +### 2019-10 + +- Add art in KVDB layer +- Fixed fork recording race +- Added own Serializable class +- Added JSONEx as union of Serializable ans To/FromJSON +- Art tests improved +- Added art entries and art support for new methods. +- Added connection pool +- Added deinitSqlDBConn method +- Fork Flow logic fixes and improvements +- Add art rec/rep for InitSqlDBConnection method +- Added ART +- Added mysql tests +- Postgres support added +- MySQL support added +- Introducing kvdb mock +- SQL DB support reworked and improved +- Added KVDB transactions support + +### 2019-09 + +- Added kvdb methods wrappers +- Added KVDB sub-language +- Test DB clearing added +- Tests for SqlDB added +- Run db in transaction +- Added custom ConnectInfo +- SQL DB Support Beam +- Added SQL DB Support +- Added mocked values in test interpreter +- Added additional language methods `GenerateGUID`, `runSysCmd`, `forkFlow`, `throwException` +- Test app implemented +- Added logger logic, language and interpreter +- Added `runIO` and get/setOptions tests +- ServantClient types added +- `runIO`, `getOption`, `setOption` methods added +- `interpretFlowMethodL` and `runFlowMethodL` methods added +- Added basic project layout, test app layout, initial CallAPI facilities, sample API diff --git a/CODESTYLE.md b/CODESTYLE.md new file mode 100644 index 00000000..d8817e5e --- /dev/null +++ b/CODESTYLE.md @@ -0,0 +1,437 @@ + +# Haskell Style Guide + +This is a Style Guide for all the projects within the euler-hs repository. +This is a nice-to-follow guide, still it's not pushed hardly to avoid +unnecessary conflicts and friction. You should decide on your own +about how to structure your code. Still remember that your code will be +read by other people. + +### Line Length + +- Recommended line length is *80 characters*. +- Maximum line length is *120 characters*. + +Comments should be wrapped accordingly. +There should be no trailing whitespace anywhere in your code. +This makes git diff output look ugly and causes spurious merge +conflicts. + +### Indentation + +- Tabs are illegal. Use spaces for indenting. +- Recommended to use *2 spaces* for each indentation level. +- Allowed to use *4 spaces* if this seems more appropriate + for certain situations. + +The only exception is for code blocks inside +a definition, which should be indented with *4 spaces*. Indent the +`where` keyword two spaces to set it apart from the rest of the code +and indent the definitions in a `where` clause 2 spaces. Guards are +usually indented 2 spaces. Some examples: + +```Haskell +sayHello :: IO () +sayHello = do + name <- getLine + putStrLn $ greeting name + where + greeting name = "Hello, " ++ name ++ "!" + +filter :: (a -> Bool) -> [a] -> [a] +filter _ [] = [] +filter p (x:xs) + | p x = x : filter p xs + | otherwise = filter p xs +``` + +As a general rule, *indentation of a line should not depend on the +length of any identifier in preceding lines*, only on layout +constraints. + +### Hanging let-expressions + +Avoid hanging let-expressions in do blocks. This makes it harder to refactor +and brings more unstructured noise into the code, as well as makes resolving +conflicts in git harder. + +Avoid: + +``` +someFunc = do + let a = calcA 10 + b = calcB 20 + pure (a + b) + +someFunc = do + let + a = calcA 10 + b = calcB 20 + pure (a + b) +``` +Prefer: +``` +someFunc = do + let a = calcA 10 + let b = calcB 20 + pure (a + b) +``` + +### Blank Lines + +Two blank lines between top-level definitions, and a line of 78 "-" characters +to delineate top-level definitions from each other. No blank lines between +type signatures and function definitions. Add one blank line between functions +in a type class instance declaration if the functions bodies are large. +Use your judgment. + +### Whitespace + +Surround binary operators with a single space on either side. Use your +better judgement for the insertion of spaces around arithmetic +operators but always be consistent about whitespace on either side of +a binary operator. Don't insert a space after a lambda. Don't insert +space inside a parenthesized expression. + +### Applications + +If an application must spawn multiple lines to fit within the maximum +line length, then write one argument on each line following the head, +indented by one level: + +```Haskell +let'sFold xs = do + foldr + (\x y -> ...) + Nothing + xs +``` + +But consider naming the arguments instead to avoid multi-line +expressions. + +### Data Declarations + +Align the constructors in a data type definition. Example: + +```Haskell +data Tree a + = Branch !a !(Tree a) !(Tree a) + | Leaf +``` + +Format records as follows: + +```Haskell +data Person = Person + { firstName :: !String -- ^ First name + , lastName :: !String -- ^ Last name + , age :: !Int -- ^ Age + } deriving (Eq, Show) + +data Person + = Person + { firstName :: !String -- ^ First name + , lastName :: !String -- ^ Last name + , age :: !Int -- ^ Age + } deriving (Eq, Show) + | SomeThing + { foo :: !String -- ^ Foo + , bar :: !String -- ^ Bar + , baz :: !String -- ^ Baz + } +``` + +### List Declarations + +Align the elements in the list. Example: +```Haskell +exceptions = + [ InvalidStatusCode + , MissingContentHeader + , InternalServerError + ] +``` + +### Pragmas + +Put pragmas immediately following the function they apply to. Example: + +```Haskell +id :: a -> a +id x = x +{-# INLINE id #-} +``` + +In the case of data type definitions you must put the pragma before +the type it applies to. Example: + +```Haskell +data Array e = Array {-# UNPACK #-} !Int !ByteArray +``` + +`LANGUAGE` pragmas should enable a single language extension per line, +for easy addition and deletion. + +### Hanging Lambdas + +You may or may not indent the code following a "hanging" lambda. Use +your judgement. Some examples: + +```Haskell +bar :: IO () +bar = + forM_ [1, 2, 3] $ \n -> do + putStrLn "Here comes a number!" + print n + +foo :: IO () +foo = + alloca 10 $ \a -> + alloca 20 $ \b -> + cFunction a b +``` + +### Export Lists + +Format export lists as follows: + +```Haskell +module Data.Set + ( -- * The @Set@ type + Set + , empty + , singleton + -- * Querying + , member + ) where +``` + +### If-then-else expressions + +Generally, guards should be preferred over if-then-else expressions, +where possible. if-then-else is preferred to case analysis on +a boolean. Short cases should usually be put on a single line (when +line length allows it). + +When writing non-monadic code (i.e. when not using `do`) and guards +can't be used, you can align if-then-else expressions like you would +normal expressions: + +```Haskell +foo = + if ... + then ... + else ... +``` + +In monadic code, so long as you use the Haskell 2010 dialect and +above, you can use the same alignment as above. A different alignment +rule for monadic code is no longer necessary. + +### Case expressions + +The alternatives in a case expression can be indented using either of +the two following styles: + +```Haskell +foobar = case something of + Nothing -> foo + Just j -> bar +``` + +or as + +```Haskell +foobar = + case something of + Nothing -> foo + Just j -> bar +``` +Right-hand side of `->` can be moved to the next line + +```Haskell +foobar = + case something of + Nothing -> foo + Just j -> + someLongLongLong $ bar $ foo $ baz [1,2,3] +``` + +`->` should be aligned + + +## Alignment in do, case, guards +`->` in case should be aligned + +```Haskell +... + case foo of + Bar -> () + BazBaz -> () +``` + + +`<-` in do should be aligned +```Haskell +... + do + a <- Bar + bar <- BarBazaz + pure () +``` + + +`=` in guards should be aligned + +```Haskell +foo a b + | a < b = () + | otherwise = () +``` + +## Imports + +Imports should be listed in alphabetical order with no intervening +blank lines, except for any explicit `Prelude` import, which must +always come first. The reason for this exception is that some redundant +import warnings are sensitive to the order of the `Prelude` import. + +Always use explicit import lists or `qualified` imports for standard +and third party libraries. This makes the code more robust against +changes in these libraries. Exception: the Prelude. + +Use your judgement when it comes to local application/library specific +imports. On the one hand, they make for more maintainable code because +identifiers that are removed from the imported module will be caught +early and identifiers added to the imported module do not risk +clashing with local identifiers. They also serve as documentation as +to which parts of a module are actually required. + +However, explicit import lists are also much more verbose, and slow +down development. Moreover, in a collaborative environment, explicit +import lists can cause spurious conflicts, since two otherwise +unrelated changes to a file may both require changes to the same +import list. + +The qualifier for well known modules, such as `ByteString` can be +shortened further, eg `BS`. But in general, prefer descriptive +qualifiers rather than one letter ones. For example + +```Haskell +import qualified Data.Map as Map -- good +import qualified Data.Map as M -- not so good +``` + +## Comments + +Comments should be placed immediately *before* the line(s) of code +they pertain to. + +### End-of-line comments + +End-of-line comments should be separated from code by at least two +spaces. + +### Top-Level Definitions + +Comment every top-level function (particularly exported functions), +and provide a type signature; use Haddock syntax in the comments. +Comment every exported data type. Function example: + +```Haskell +-- | Send a message on a socket. The socket must be in a connected +-- state. Returns the number of bytes sent. Applications are +-- responsible for ensuring that all data has been sent. +send + :: Socket -- ^ Connected socket + -> ByteString -- ^ Data to send + -> IO Int -- ^ Bytes sent +``` + +For functions the documentation should give enough information to +apply the function without looking at the function's definition. + +Record example: +```Haskell +-- | Bla bla bla. +data Person = Person + { age :: !Int -- ^ Age + , name :: !String -- ^ First name + } +``` +For fields that require longer comments format them like so: + +```Haskell +data Record = Record + { -- | This is a very very very long comment that is split over + -- multiple lines. + field1 :: !Text + -- | This is a second very very very long comment that is split + -- over multiple lines. + , field2 :: !Int + } +``` + +## Naming + +Use camel-case when naming values (`fooBar`) and data +types (`FooBar`). + +For readability reasons, don't capitalize all letters when using an +abbreviation. For example, write `HttpServer` instead of `HTTPServer`. +Exception: Two letter abbreviations, e.g. `IO`. + +### Records + +Where appropriate, add an unabbreviated prefix to the name of record +fields. Example: + +```Haskell +-- | Messages consist of their typeRep fingerprint and their encoding +data Message = Message + { messageFingerprint :: !Fingerprint + , messageEncoding :: !BSL.ByteString + } +``` + +This is not necessary for modules that export only one data type *and* +are meant to be imported qualified. + +### Modules + +Use singular when naming modules e.g. use `Data.Map` and +`Data.ByteString.Internal` instead of `Data.Maps` and +`Data.ByteString.Internals`. + +### Lenses + +**N.B. This section is under construction.** + +Where appropriate, define lenses for all fields in a record. Use the +`_` prefix to name fields. When using the [lens package][lens], use +`makeClassy` [where possible][lens-makeClassy] to generate lenses +rather than `makeLenses`. This is to make it easy to export all lenses +for a record all at once. + +```Haskell +module Person + ( Person(..) + , HasPerson(..) + ) where + +data Person = Person + { _firstName :: !String -- ^ First name + , _lastName :: !String -- ^ Last name + , _age :: !Int -- ^ Age + } deriving (Eq, Show) + +makeClassy ''Person +``` + +For consistency, if a record has lenses defined, always use the lens +to get or set the field of a record (rather than `_fieldName`). Field +names should only be used to initialize the record. + +[lens]: http://hackage.haskell.org/package/lens +[lens-makeClassy]: http://hackage.haskell.org/package/lens-4.3.3/docs/Control-Lens-TH.html#v:makeClassy diff --git a/LICENSE b/LICENSE new file mode 100644 index 00000000..39684a13 --- /dev/null +++ b/LICENSE @@ -0,0 +1,190 @@ + Apache License + Version 2.0, January 2004 + http://www.apache.org/licenses/ + + TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION + + 1. Definitions. + + "License" shall mean the terms and conditions for use, reproduction, + and distribution as defined by Sections 1 through 9 of this document. + + "Licensor" shall mean the copyright owner or entity authorized by + the copyright owner that is granting the License. + + "Legal Entity" shall mean the union of the acting entity and all + other entities that control, are controlled by, or are under common + control with that entity. For the purposes of this definition, + "control" means (i) the power, direct or indirect, to cause the + direction or management of such entity, whether by contract or + otherwise, or (ii) ownership of fifty percent (50%) or more of the + outstanding shares, or (iii) beneficial ownership of such entity. + + "You" (or "Your") shall mean an individual or Legal Entity + exercising permissions granted by this License. + + "Source" form shall mean the preferred form for making modifications, + including but not limited to software source code, documentation + source, and configuration files. + + "Object" form shall mean any form resulting from mechanical + transformation or translation of a Source form, including but + not limited to compiled object code, generated documentation, + and conversions to other media types. + + "Work" shall mean the work of authorship, whether in Source or + Object form, made available under the License, as indicated by a + copyright notice that is included in or attached to the work + (an example is provided in the Appendix below). + + "Derivative Works" shall mean any work, whether in Source or Object + form, that is based on (or derived from) the Work and for which the + editorial revisions, annotations, elaborations, or other modifications + represent, as a whole, an original work of authorship. For the purposes + of this License, Derivative Works shall not include works that remain + separable from, or merely link (or bind by name) to the interfaces of, + the Work and Derivative Works thereof. + + "Contribution" shall mean any work of authorship, including + the original version of the Work and any modifications or additions + to that Work or Derivative Works thereof, that is intentionally + submitted to Licensor for inclusion in the Work by the copyright owner + or by an individual or Legal Entity authorized to submit on behalf of + the copyright owner. For the purposes of this definition, "submitted" + means any form of electronic, verbal, or written communication sent + to the Licensor or its representatives, including but not limited to + communication on electronic mailing lists, source code control systems, + and issue tracking systems that are managed by, or on behalf of, the + Licensor for the purpose of discussing and improving the Work, but + excluding communication that is conspicuously marked or otherwise + designated in writing by the copyright owner as "Not a Contribution." + + "Contributor" shall mean Licensor and any individual or Legal Entity + on behalf of whom a Contribution has been received by Licensor and + subsequently incorporated within the Work. + + 2. Grant of Copyright License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + copyright license to reproduce, prepare Derivative Works of, + publicly display, publicly perform, sublicense, and distribute the + Work and such Derivative Works in Source or Object form. + + 3. Grant of Patent License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + (except as stated in this section) patent license to make, have made, + use, offer to sell, sell, import, and otherwise transfer the Work, + where such license applies only to those patent claims licensable + by such Contributor that are necessarily infringed by their + Contribution(s) alone or by combination of their Contribution(s) + with the Work to which such Contribution(s) was submitted. If You + institute patent litigation against any entity (including a + cross-claim or counterclaim in a lawsuit) alleging that the Work + or a Contribution incorporated within the Work constitutes direct + or contributory patent infringement, then any patent licenses + granted to You under this License for that Work shall terminate + as of the date such litigation is filed. + + 4. Redistribution. You may reproduce and distribute copies of the + Work or Derivative Works thereof in any medium, with or without + modifications, and in Source or Object form, provided that You + meet the following conditions: + + (a) You must give any other recipients of the Work or + Derivative Works a copy of this License; and + + (b) You must cause any modified files to carry prominent notices + stating that You changed the files; and + + (c) You must retain, in the Source form of any Derivative Works + that You distribute, all copyright, patent, trademark, and + attribution notices from the Source form of the Work, + excluding those notices that do not pertain to any part of + the Derivative Works; and + + (d) If the Work includes a "NOTICE" text file as part of its + distribution, then any Derivative Works that You distribute must + include a readable copy of the attribution notices contained + within such NOTICE file, excluding those notices that do not + pertain to any part of the Derivative Works, in at least one + of the following places: within a NOTICE text file distributed + as part of the Derivative Works; within the Source form or + documentation, if provided along with the Derivative Works; or, + within a display generated by the Derivative Works, if and + wherever such third-party notices normally appear. The contents + of the NOTICE file are for informational purposes only and + do not modify the License. You may add Your own attribution + notices within Derivative Works that You distribute, alongside + or as an addendum to the NOTICE text from the Work, provided + that such additional attribution notices cannot be construed + as modifying the License. + + You may add Your own copyright statement to Your modifications and + may provide additional or different license terms and conditions + for use, reproduction, or distribution of Your modifications, or + for any such Derivative Works as a whole, provided Your use, + reproduction, and distribution of the Work otherwise complies with + the conditions stated in this License. + + 5. Submission of Contributions. Unless You explicitly state otherwise, + any Contribution intentionally submitted for inclusion in the Work + by You to the Licensor shall be under the terms and conditions of + this License, without any additional terms or conditions. + Notwithstanding the above, nothing herein shall supersede or modify + the terms of any separate license agreement you may have executed + with Licensor regarding such Contributions. + + 6. Trademarks. This License does not grant permission to use the trade + names, trademarks, service marks, or product names of the Licensor, + except as required for reasonable and customary use in describing the + origin of the Work and reproducing the content of the NOTICE file. + + 7. Disclaimer of Warranty. Unless required by applicable law or + agreed to in writing, Licensor provides the Work (and each + Contributor provides its Contributions) on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or + implied, including, without limitation, any warranties or conditions + of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A + PARTICULAR PURPOSE. You are solely responsible for determining the + appropriateness of using or redistributing the Work and assume any + risks associated with Your exercise of permissions under this License. + + 8. Limitation of Liability. In no event and under no legal theory, + whether in tort (including negligence), contract, or otherwise, + unless required by applicable law (such as deliberate and grossly + negligent acts) or agreed to in writing, shall any Contributor be + liable to You for damages, including any direct, indirect, special, + incidental, or consequential damages of any character arising as a + result of this License or out of the use or inability to use the + Work (including but not limited to damages for loss of goodwill, + work stoppage, computer failure or malfunction, or any and all + other commercial damages or losses), even if such Contributor + has been advised of the possibility of such damages. + + 9. Accepting Warranty or Additional Liability. While redistributing + the Work or Derivative Works thereof, You may choose to offer, + and charge a fee for, acceptance of support, warranty, indemnity, + or other liability obligations and/or rights consistent with this + License. However, in accepting such obligations, You may act only + on Your own behalf and on Your sole responsibility, not on behalf + of any other Contributor, and only if You agree to indemnify, + defend, and hold each Contributor harmless for any liability + incurred by, or claims asserted against, such Contributor by reason + of your accepting any such warranty or additional liability. + + END OF TERMS AND CONDITIONS + +Copyright 2021 Juspay Technologies Pvt Ltd + +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. diff --git a/README.md b/README.md index 21d1fe94..e067fd84 100644 --- a/README.md +++ b/README.md @@ -1,451 +1,509 @@ -# EulerHS Project +# Juspay's EulerHS Framework + +***EulerHS*** is a free monadic framework for easy building backend and console applications in Haskell. This framework provides you the most important subsystems already integrated, such as SQL DBs, logging, KV DBs and other. + +The framework represents a safe layer with its own philosophy of exception safety, so you don't need to think about those difficult Haskell questions. + +The framework also provides two testing mechanisms: integration testing facilities and automatic whitebox testing mechanism. The code you'll be writing will be simple and testable. See the related materials for more info. + +Framework is successfully used in production in Juspay and shows impressive results. + +* [Framework features](#Framework-features) +* [Business logic code sample](#The-Flow-monad) +* [Important note on forked external libs](#Important-note-on-forked-external-libs) + - [hedis library for Redis](#hedis-library for Redis) + - [beam-mysql library for MySql](#beam-mysql-library-for-MySql) + - [beam](#beam) +* [EulerHS application architecture](#EulerHS-application-architecture) +* [Flow monad sample usages](#Flow-monad-sample-usages) + - [Logging](#Logging) + - [Typed options](#Typed-options) + - [SQL subsystem](#SQL-subsystem) + - [mtl style support](#mtl-style-support) + - [KV DB subsystem](#KV-DB-subsystem) + - [Forking and awaiting child flows](#Forking-and-awaiting-child-flows) +* [State handling](#State-handling) +* [Building the framework](#Building-the-framework) +* [Testing the framework](#Testing-the-framework) +* [EulerHS tutorials and template projects](#EulerHS-tutorials-and-template-projects) +* [Background-materials](#Background-materials) + +## Framework features + +The framework exports the `Flow` monad which provides the following facilities: + + - Custom Prelude (universum-based) + - SQL DB interaction using the `beam` library. Supported SQL backends: + * Postgres + * MySQL + * SQLite + - KV DB interaction. Supported KV DBs: + * Redis + - Async and parallel flows evaluation + - Client HTTP in two forms: + * Servant-based HTTP client interaction + * Low-level HTTP client + - Logging (tiny-logger inside) + - Typed mutable options + - Safe IO actions + - Exception-handling mechanism + - Running OS system commands + - ART (Automatic Regression Testing) - white box testing facilities + - Integration testing framework + - Experimental Pub/Sub mechanism (using Redis pub sub subsystem) + +## Business logic code sample + +A sample scenario you may find [here](./test/EulerHS/TestData/Scenarios/Scenario1.hs). -### EulerHS Framework +```haskell +import EulerHS.Prelude +import qualified EulerHS.Language as L +import EulerHS.TestData.API.Client +import EulerHS.TestData.Types +import Servant.Client (BaseUrl (..), Scheme (..)) + +testScenario1 :: L.Flow User +testScenario1 = do + logDebug "testScenario1" "Running sys cmd whoami..." + localUserName <- L.runSysCmd "whoami" + + logDebug "testScenario1" "Reading a guid from file..." + localGUID <- L.runIO (readFile "my_guid.txt") + + logDebug "testScenario1" "Generating new guid..." + guid <- L.generateGUID + + logDebug "testScenario1" "Obtaining URL..." + url <- maybe (mkUrl "127.0.0.1") mkUrl <$> L.getOption UrlKey + + logDebug "testScenario1" "Calling some HTTP API..." + res <- L.callServantAPI Nothing url getUser + + logDebug "testScenario1" "Finished." + pure $ case res of + Right u | localGUID /= userGUID u -> u + Right u | otherwise -> User localUserName "" $ toString guid + _ -> User localUserName "Smith" $ toString guid + where + mkUrl :: String -> BaseUrl + mkUrl host = BaseUrl Http host port "" +``` -***euler-hs/Flow*** is a free monadic framework for building backend and console applications in Haskell. +## Important note on forked external libs -The framework exports the Flow monad which provides the following facilities: +Juspay made forks of several libraries to fix their problems and to support +specific cases. We didn't meant to release these features, and we have a plan +to fix it. You might want to be aware of these Juspay-specific fixes. If they are not suitable for your needs, it might need to avoid them till EulerHS v3.0. - - SQL DB interaction (using the `beam` library). Postgres, MySQL and SQLite DBs supported. - - KV DB interaction. Redis is supported. - - Forking flows in separate threads (green threads are used). - - HTTP services interaction (using servant-client facilities). - - Logging (tiny-logger inside). - - Typed mutable options. - - Pub/Sub mechanism (using Redis pub sub subsystem). - - Safe call to IO actions. - - Running system commands. - - ART (Automatic Regression Testing) - white box testing facilities. - - Integration testing framework. +### hedis library for Redis -# Installation +[hedis](https://github.com/juspay/hedis) is stapled to a Juspay-specific fork as current. This fork is multiple releases behind the current mainline, incompatibly. -### Build tools +### beam-mysql library for MySql -You can use any of the two building tools supported: +The [beam-mysql](https://github.com/juspay/beam-mysql) is rewritten almost completely. The original version doesn't have protection from SQL injections, and also is written with some internal problems. The updated version fixes that. -- **Stack**, install from [https://docs.haskellstack.org/en/stable/README/](https://docs.haskellstack.org/en/stable/README/) +Additionally, the fork is working around the fact that we can receive `UTF-8` encoded content, but have to munge it into `Latin-1` (due to internal's Juspay practice). This is the default behaviour of our fork, which euler-hs thus silently inherits whenever it deals with `MySQL` databases (or indeed, `MariaDB` databases, since the back-end of `beam-mysql` works for both). -### Dependencies +This behaviour as a __silent__, irreplaceable default may not be desirable for general purposes. Will be fixed in the next releases of the forked `beam-mysql` library. -**Install development tools and libraries with your distro package manager:** +### beam -- binutils -- libssl-dev -- libpq-dev -- postgresql-server-dev-all -- libmysqlclient-dev -- libsqlite -- postgresql -- mysql-server -- ... maybe some others +We made several minor improvements of the original `beam` library in [our fork](https://github.com/juspay/beam). These changes do not have anything Juspay-specific, but yet to be pushed to the upstream. -~~git clone [git@bitbucket.org](mailto:git@bitbucket.org):juspay/euler-hs.git~~ +## EulerHS application architecture -or +The EulerHS framework slices your application into several layers preventing the implementation details to leak into the business logic. This helps to keep the business logic code simple, maintainable and more reliable. It also helps to tweak the implementation without affecting the business logic. The further development of the framework will be towards improving its safety and performance, but this won't require updating of the business logic code due to this clear layering. -~~git clone [https://user_name@bitbucket.org/juspay/euler-hs.git](https://user_name@bitbucket.org/juspay/euler-hs.git)~~ +Layers of the typical web backend application with Servant: -### Building and testing +**Application layer:** -#### Stack +* Application (`IO` monad) + - Processes app configs and command line arguments + - Manages `FlowRuntime` + - Runs Servant HTTP server +* API types model + - Types used in Servant to handle queries and API methods +* Servant method handler (Servant monad stack: `ReaderT FlowRuntime (ExceptT ServerError IO)`) + - Handles errors and exceptions + - Validates requests + - Converts API types into domain types and back + - Uses API types model + - Runs business logic scenarios using `FlowRuntime` and interpreters -**Build** +**Business domain layer:** -- `stack build` (Will build all the projects) -- `stack build --fast -j4` (Will skip some optimisations) -- `stack build euler-hs` -- `stack build euler-backend` +* EulerHS language (the `Flow` monad and its derivables) + - Provides an abstracted, purified set of eDSLs which are free from implementation details + - Helps to organize buisiness logic code +* Domain model (ADT types mostly) + - Represents types and functions directly related to the domain +* DB model (`beam`-powered schema) + - DB representation of domain +* Business logic scenarios (`Flow` monad and its derivables) + - Interacts with domain model and DB model + - Isolated from the other layers -**Tests:** +**Implementation layer:** -- All tests: +* EulerHS runtime (`FlowRuntime`) + - Keeps operational data of the framework + - Handles resources, manages connections +* EulerHS interpreters (`IO`-based monadic stack) + - Connects `Flow` scenarios to real subsytems and libraries + - Handles exceptions, threads, ART system + - Uses `FlowRuntime` - `stack test` +Checkout [Background-materials](#Background materials) to know more about this layering. -- backend dsl language tests: - - `stack test euler-hs:language` -- SQL DB sub set of backend dsl (by default enabled only sqlite tests, if you want to test MySQL and Postgres then uncomment corresponding specs in `lib/euler-hs/testSqlDB/Main.hs` , you need an appropriate DB and tables) - - `stack test euler-hs:sql` -- euler-backend tests: - - `stack test euler-backend` -- ART tests: - - `cd ./app/euler-backend` - - `art.sh` +## Flow monad sample usages -**Run:** +### Logging -- `stack run euler-backend` -- Alternatively, you can use the shell script `./runEulerBackend.sh` that will set environment variables and run the program with some RTS options. +Framework provides logging mechanism out-of-the-box. It provides 4 logging functions: -# Usage guidelines +```haskell +logInfo :: Show tag => tag -> T.Message -> Flow () +logError :: Show tag => tag -> T.Message -> Flow () +logDebug :: Show tag => tag -> T.Message -> Flow () +logWarning :: Show tag => tag -> T.Message -> Flow () +``` -***See also:*** +Usage is quite simple. -* [Tutorial](./TUTORIAL.md) -* [Architecture diagram](./docs/Architecture.png) -* [Beam query examples](./lib/euler-hs/testDB/SQLDB/Tests/QueryExamplesSpec.hs) +```haskell +import qualified EulerHS.Language as L -### State handling +myFlow :: L.Flow () +myFlow = L.logInfo "myFlow" "Hello world!" +``` -Sometimes you need to handle some (possibly mutable) state in your flows. -The framework doesn't support the state right as is, -but there are several ways to do this with other tools. +Notice there is no anything related to a specific logging library. It's all hidden behind the `Flow` interface. -***Simple argument passing state*** +The logging subsystem can be configured on the start of the application. You should specify what logger you want passing a logger creation function into `withFlowRuntime` or `createFlowRuntime` functions. -This is the simplest way. You pass some values as arguments across the flow functions. +- No logger (`createVoidLoggerRuntime`) +- Memory logger (`createMemoryLoggerRuntime`). Can eat your memory very fast! +- File logger (a special flag in `LoggerConfig`). Will flush logs into a file (the action is not immediate). +- Console logger (a special flag in `LoggerConfig`). Will show logs in console. -```haskell -orderCreate :: OrderCreateRequest -> MerchantAccount -> Flow OrderCreateResponse -orderCreate req mAccnt = do - order <- validateOrderCreateRequest req - orderCreate' order mAccnt +You can also choose should your file and console logger be sync or async (a special flag in `LoggerConfig`). N.B. Async logger is not properly tested yet, it can have performance implications. -orderCreate' :: Order -> MerchantAccount -> Flow AOrderCreateResponse -orderCreate' order mAccnt = ... +```haskell +import qualified EulerHS.Types as T +import qualified EulerHS.Runtime as R +import qualified EulerHS.Interpreters as I + +loggerConfig :: T.LoggerConfig +loggerConfig = T.LoggerConfig + { T._isAsync = False + , T._logLevel = Debug + , T._logFilePath = "/tmp/logs/myFlow.log" + , T._logToConsole = True + , T._logToFile = True + , T._maxQueueSize = 1000 + , T._logRawSql = False + } + +runApp :: IO () +runApp = do + let mkLoggerRt = R.createLoggerRuntime T.defaultFlowFormatter loggerConfig + R.withFlowRuntime (Just mkLoggerRt) + $ \flowRt -> I.runFlow flowRt myFlow ``` -Here, all the arguments can be treated as immutable state. -You can't probably do that much with this kind of state. +The framework also supports different logger formatters for different flows. See documentation on `FlowFormatter` for more info. -***StateT state*** +On `FlowRuntime` disposal, the rest of the queue of logs will be flushed gracefully. -You can use StateT for handling immutable state in flows. +Log entries are considered app-wide, but there is a `LogCounter` that is counting entries and helping to see their ordering. -```haskell +Ordering itself is not guaranteed. -type FlowT a = StateT Order a +### Typed options -orderCreate :: OrderCreateRequest -> MerchantAccount -> Flow OrderCreateResponse -orderCreate req mAccnt = do - order <- validateOrderCreateRequest req - runStateT (orderCreate' mAccnt) order +Just typed key-value options. -orderCreate' :: MerchantAccount -> FlowT AOrderCreateResponse -orderCreate' mAccnt = do - order <- get - lift someFlowMethod - put order - ... - -someFlowMethod :: Flow () -someFlowMethod = ... +```haskell +getOption :: (OptionEntity k v) => k -> Flow (Maybe v) +setOption :: (OptionEntity k v) => k -> v -> Flow () +delOption :: (OptionEntity k v) => k -> Flow () ``` -You handle your state with the StateT monad transformer wrapped around the Flow monad. -It allows to do 'mutability' but you'll have to lift the Flow methods. - -This state will be thread safe. +Options work as a shared concurrent yet mutable state, +so be careful to not produce data races. -***Options like a mutable state*** - -Typically, options are not intended to be used as user defined state. -But it's not prohibited somehow. Do it if you know what are you doing. -The only restriction here is that the state should be serializable (ToJSON / FromJSON) -because options should be like this. +Avoid using it as an operational state or your app, it's better to use `StateT` on top of the `Flow`. See [State handling](#State-handling) for more info. ```haskell - -data Order = Order - { someFiled :: Int - } +data TestIntKey = TestIntKey deriving (Generic, Typeable, Show, Eq, ToJSON, FromJSON) -data OrderKey = OrderKey +data TestStringKey = TestStringKey deriving (Generic, Typeable, Show, Eq, ToJSON, FromJSON) -instance OptionEntity OrderKey Order +instance T.OptionEntity TestStringKey String +instance T.OptionEntity TestIntKey Int -orderCreate :: OrderCreateRequest -> MerchantAccount -> Flow OrderCreateResponse -orderCreate req mAccnt = do - order :: Order <- validateOrderCreateRequest req - setOption OrderKey order - orderCreate' mAccnt +myFlow :: L.Flow (Maybe String, Maybe Int) +myFlow = do + _ <- L.setOption TestStringKey "lore ipsum" + _ <- L.setOption TestIntKey 100 + v1 <- L.getOption TestStringKey + v2 <- L.getOption TestIntKey + pure (v1, v2) -orderCreate' :: MerchantAccount -> FlowT AOrderCreateResponse -orderCreate' mAccnt = do - order <- getOption OrderKey - ... +-- (Just "lore ipsum", Just 100) ``` -This state will be thread safe itself but with forked flows it's possible -to have race conditions anyway. +### SQL subsystem + +This subsystem supports several SQL backends: -***Mutable impure state*** +- MySQL +- Postgres +- SQLite -The idea is to have IORef, MVar or TVar defined outside the flows and use it -in flows to store data. +The goal was to provide a unified interface for all those backends, so that different projects could write a relatively similar code. -It is preferable to create any of these outside of the Flow monad -as the `runIO` method can't return a created variable. Let's elaborate. +Unfortunately, there are two drawbacks here. -This code will work, but it's kinda useless because doesn't allow to expose -the internal IORef state: +- We had to fix many problems in `beam` and related libraries, but were unable to push all the changes to the upstream. This especially true regarding `beam-mysql` that we had to rewrite almost completely. +- Framework cannot provide things specific to any SQL backend. Only a common subset of features. +- `beam` itself is a sophisticated library, and its usage is quite difficult. You may want to consult with our [cheatsheet](./BEAM_NOTES.md) on `beam` usage. + +There is a test suite [QueryExamplesSpec](testDB/SQLDB/Tests/QueryExamplesSpec.hs) with `beam` samples, +check it out to get an idea on how to compose queries and how it's all integrated with the `Flow` monad. + +In general, you need to define your DB model with tables and relations. ```haskell -someFlow :: Flow () -someFlow = do - n :: Int <- runIO $ do - ref <- newIORef 100 - readIORef ref - doSomethingWithN n +import qualified Database.Beam as B + +-- Description of the @member@ table + +data MemberT f = Member + { memberId :: B.C f Int + , surName :: B.C f Text + , firstName :: B.C f Text + , address :: B.C f Text + , zipCode :: B.C f Int + , telephone :: B.C f Text + , recommendedBy :: B.C f (Maybe Int) + , joinDate :: B.C f LocalTime + } deriving (Generic, B.Beamable) + +-- Description of the primary key type: + +instance B.Table MemberT where + data PrimaryKey MemberT f = MemberId (B.C f Int) + deriving (Generic, B.Beamable) + primaryKey = MemberId . memberId + +type Member = MemberT Identity +type MemberId = B.PrimaryKey MemberT Identity + +-- Field names can be mapped 1:1 to names of the ADT, but it's possible +-- to alter them, like this: + +membersEMod + :: B.EntityModification (B.DatabaseEntity be db) be (B.TableEntity MemberT) +membersEMod = B.modifyTableFields + B.tableModification + { memberId = B.fieldNamed "memid" + , surName = B.fieldNamed "surname" + , firstName = B.fieldNamed "firstname" + , address = B.fieldNamed "address" + , zipCode = B.fieldNamed "zipcode" + , telephone = B.fieldNamed "telephone" + , recommendedBy = B.fieldNamed "recommendedby" + , joinDate = B.fieldNamed "joindate" + } + +-- The Schema itself: + +data ClubDB f = ClubDB + { members :: f (B.TableEntity MemberT) -- members table + , facilities :: f (B.TableEntity FacilityT) -- facilities table + , bookings :: f (B.TableEntity BookingT) -- bookings table + } deriving (Generic, B.Database be) + +-- DB Schema representation. Use this value to reference to any of tables. +clubDB :: B.DatabaseSettings be ClubDB +clubDB = B.defaultDbSettings `B.withDbModification` B.dbModification + { facilities = facilitiesEMod -- Alter field names of tables. + , members = membersEMod + , bookings = bookingsEMod + } ``` -This flow won't work because the runIO method is not able to return IORef. -IORef is not serializable: +There can be conversion functions between your domain model and DB models, but in general, it's possible to have only the latter. + +Simple SELECT query for the `members` table which requests all members joined after this date: ```haskell -someFlow :: Flow () -someFlow = do - ref :: IORef Int <- runIO $ newIORef 100 -- won't compile - n <- runIO $ readIORef ref - doSomethingWithN n +searchByDate :: LocalTime -> L.Flow (T.DBResult [Member]) +searchByDate startDate = do + conn <- connectOrFail sqliteCfg -- obtain SQLite config somehow + L.runDB conn -- run a query + $ L.findRows -- SELECT query returning rows + $ B.select + $ B.filter_ (\m -> joinDate m >=. B.val_ startDate) + $ B.all_ (members clubDB) ``` -The only way to work with IORef (or MVar which is better) is to pre-create it -before the Flow scenario. Sample: +Notice that we use `runDB` for running SQL queries expressed in `beam` and wrapped into the `SqlDB` language. This function does not imply transactionality, but if you need it, use `runTransaction` instead. In this case, any query packed into an `SqlDB` monadic block, will be scoped by a single transaction. + +Framework allows you to create either permanent, application-wide SQL connections, or immediate single-usage connections in place. + +For permanent connections, you need to create them on the __Application Layer__ and pass them into your `Flow` scenarios. One of the possible solutions will be to wrap your `Flow` scenarios into a `ReaderT` stack (so called `ReaderT` pattern), and provide an environment with permanent connections: ```haskell --- API method for the Servant server -orderCreate - :: OrderCreateRequest -> Handler ApiOrder.OrderCreateResponse -orderCreate req = do - - mVar :: MVar Order <- liftIO newEmptyMVar - ref :: IORef (Maybe Order) <- liftIO $ newIORef Nothing - - runFlow $ Flows.orderCreate mVar ref req - -orderCreate - :: MVar Order - -> IORef (Maybe Order) - -> OrderCreateRequest - -> Flow OrderCreateResponse -orderCreate mVar ref req = do - order :: Order <- validateOrderCreateRequest req - runIO $ writeIORef ref $ Just order -- works - runIO $ putMVar mVar order -- works - orderCreate' mAccnt - -orderCreate' - :: MVar Order - -> IORef (Maybe Order) - -> FlowT AOrderCreateResponse -orderCreate' mVar ref = do - order <- runIO $ readIORef ref -- works - order <- runIO $ readMVar mVar -- works - ... -``` +import qualified Database.Beam.Sqlite as BS -MVar and STM is thread safe, IORef is not thread safe. -Still, race coniditions are possible even with MVars and STM. +data FlowEnv = FlowEnv + { sqliteConn1 :: T.SqlConn BS.SqliteM + , sqliteConn2 :: T.SqlConn BS.SqliteM + } + +type MyFlow a = ReaderT FlowEnv L.Flow a +-- The same as +-- type MyFlow r a = L.ReaderFlow FlowEnv a + +searchByDate :: LocalTime -> MyFlow (T.DBResult [Member]) +searchByDate startDate = do + FlowEnv conn1 _ <- ask + L.runDB conn1 -- run a query + $ L.findRows -- SELECT query returning rows + $ B.select + $ B.filter_ (\m -> joinDate m >=. B.val_ startDate) + $ B.all_ (members clubDB) +``` -***Untraced IO and STM*** -It is possible to run IO actions outside of the ART tracing system, however -it should be used with extreme caution as this means the following: +Here, `DBResult` is an `Either` type that carries either success or a failure: - 1. no trace will be collected - 2. replay is not possible; instead, untraced IO-actions are re-executed on playback +```haskell +data DBError = DBError DBErrorType Text + deriving (Show, Eq, Ord, Generic, ToJSON, FromJSON) -Such functionality only really makes sense for two scenarios: +type DBResult a = Either DBError a +``` - 1. mutation of in-memory data structures using `STM` -- in particular, the use of `atomically` and `newTVarIO`. - 2. reading of sensitive data, such as API keys +Where `DBErrorType` is an enum with various reasons of DB failure. -For example: +`Flow` has methods for connecting and disconnecting: ```haskell -countStuff :: Flow Int - countVar <- runUntracedIO $ newTVarIO (0 :: Int) - forkFlow "counter1" $ void $ runUntracedIO $ countTo100 countVar - forkFlow "counter2" $ void $ runUntracedIO $ countTo100 countVar - count <- runUntracedIO $ atomically $ readTVar countVar - return count - -countTo100 :: TVar Int -> IO Int -countTo100 countVar = do - count <- atomically $ updateCount countVar - if count < 100 - then countTo100 countVar - else return count - -updateCount :: TVar Int -> STM Int -updateCount countVar = do - count <- readTVar countVar - when (count < 100) (writeTVar countVar (count + 1)) - readTVar countVar +initSqlDBConnection :: DBConfig beM -> Flow (DBResult (SqlConn beM)) +deinitSqlDBConnection :: SqlConn beM -> Flow () +getOrInitSqlConn :: DBConfig beM -> Flow (DBResult (SqlConn beM)) ``` -Although such `TVar`s can be allocated outside of `Flow`, this may make -use of local and composable abstractions difficult. +Surely, you should be careful to not produce race conditions when using these methods from different threads. -***Untraced IO and Sensitive Data*** -Another good use case is reading sensitive data which should not be collected -by the ART system, such as e.g. API keys stored in a config inside of an `IORef`. +For more info on the SQL DB subsystem usage, see tutorials and background materials. -Arguably the best way to deal with this is as follows: +### mtl style support - 1. store sensitive data as separate configuration state, for example in an `IORef` - 2. read and write to this `IORef` using `runUntracedIO` - 3. use `runIO` for any non-sensitive data +The framework exposes a type class `MonadFlow` with instances for `Flow`, `ReaderT r Flow`, `StateT s Flow`, `WriterT w Flow`, `ExceptT e Flow` and some other transformers. This makes it possible to write your `Flow` scenarios with the `mtl` style. -This way the ART traces will never collect sensitive data, and replay/mocking of ART -traces will still work in different execution environments with e.g. test API keys. +Let's see how the scenario `searhByDate` will look like: -***KV DB and SQL DB based state*** +```haskell +searchByDate + :: MonadFlow m + => MonadReader FlowEnv m + => LocalTime + -> m (T.DBResult [Member]) +searchByDate startDate = do + FlowEnv conn1 _ <- ask + L.runDB conn1 -- run a query + $ L.findRows -- SELECT query returning rows + $ B.select + $ B.filter_ (\m -> joinDate m >=. B.val_ startDate) + $ B.all_ (members clubDB) +``` -***KV DB and SQL DB based state*** +### KV DB subsystem -You can use KV DB and SQL DB as an external state storage which is significantly -less performant and less convenient. +The framework supports KV DBs in form of Redis. -### Methods for connection management: +The way we work with KV DB connections differs from the SQL DB subsystem. This time, we should specify not the connection instance, but rather its name. Certainly, the connection should be pre-created, otherwise you'll get the error `KVDBConnectionDoesNotExist`. -*Takes SQL DB config and create connection that can be used in queries.* ```haskell -initSqlDBConnection :: T.DBConfig beM -> Flow (T.DBResult (T.SqlConn beM)) -``` -*Deinit the given connection if you want to deny access over that connection.* -```haskell -deinitSqlDBConnection :: T.SqlConn beM -> Flow () -``` +myFlow :: T.KVDBKey -> L.Flow (T.TxResult (Maybe T.KVDBValue)) +myFlow k = L.runKVDB "redis" $ L.multiExec $ do + L.setTx k "bbb" + res <- L.getTx k + L.delTx [k] + pure res -*Get existing connection. If there is no such connection, returns error.* -```haskell -getSqlDBConnection ::T.DBConfig beM -> Flow (T.DBResult (T.SqlConn beM)) +-- returns T.TxSuccess (Just "bbb") ``` -*Get existing SQL connection, or init a new connection.* -```haskell -getOrInitSqlConn :: T.DBConfig beM -> L.Flow (T.DBResult (T.SqlConn beM)) -``` +See tests for more info: +- [KVDBSpec](./testDB/KVDB/KVDBSpec.hs) +- [KVDBArtSpec](./test/EulerHS/Tests/Framework/KVDBArtSpec.hs) + +### Forking and awaiting child flows -### SQL DB subsystem +It's possible to fork controllable flows and await for their results. This subsystem can be used to compose async-like flows, but the main case is parallel execution. + +`Flow` has the following methods to work with forked flows: -*Takes connection, sql query (described using BEAM syntax) and make request.* ```haskell -runDB - :: - ( T.JSONEx a - , T.BeamRunner beM - , T.BeamRuntime be beM - ) - => T.SqlConn beM - -> L.SqlDB beM a - -> Flow (T.DBResult a) +forkFlow :: Description -> Flow () -> Flow () +forkFlow' :: Description -> Flow a -> Flow (Awaitable (Either Text a)) +await :: Maybe Microseconds -> Awaitable (Either Text a) -> m (Either AwaitingError a) ``` -Runs outside of a transaction. For transactions you can use `runTransaction`. +Notice that in the current framework there are no methods for killing a forked flow. -*Extracting existing connection from FlowRuntime by given db config and runs sql query (described using BEAM syntax). Acts like 'getSqlDBConnection' + 'runDB'* ```haskell -withDB :: - ( T.JSONEx a - , T.BeamRunner beM - , T.BeamRuntime be beM - ) - => T.DBConfig beM -> L.SqlDB beM a -> Flow a +myFlow :: L.Flow (Maybe String, Maybe String) +myFlow = do + awaitable1 <- l.forkFlow' "Child Flow 1" (L.runIO (threadDelay 10000) >> pure "1") + awaitable2 <- l.forkFlow' "Child Flow 2" (L.runIO (threadDelay 100000) >> pure "2") + mbRes1 <- L.await Nothing awaitable1 + mbRes2 <- L.await Nothing awaitable2 + pure (mbRes1, mbRes2) + + -- Returns (Just "1", Just "2") after approximately 100000 ms. ``` -When you start the application, you can initialize all the connections that you plan to use. -```haskell -keepConnsAliveForSecs :: NominalDiffTime -keepConnsAliveForSecs = 60 * 10 -- 10 mins - -maxTotalConns :: Int -maxTotalConns = 8 - -mySQLCfg :: MySQLConfig -mySQLCfg = MySQLConfig - { connectHost = "localhost" - , connectPort = 3306 - , connectUser = "username" - , connectPassword = "password" - , connectDatabase = "dbname" - , connectOptions = [T.CharsetName "utf8"] - , connectPath = "" - , connectSSL = Nothing - } +## State handling -sqlDBcfg = mkMySQLPoolConfig "eulerMysqlDB" mySQLCfg - $ PoolConfig 1 keepConnsAliveForSecs maxTotalConns +TODO -prepareDBConnections :: Flow () -prepareDBConnections = do - ePool <- initSqlDBConnection sqlDBcfg - throwOnFailedWithLog ePool SqlDBConnectionFailedException "Failed to connect to SQL DB." +## Building the framework -``` +See [BUILD.md](./BUILD.md) -And then run flow methods with it -```haskell -endpointHandler :: RequestType -> Flow (Maybe Int) -endpointHandler req = do - logInfo @String "endpointHandler" "endpointHandler started" - validReq <- validateRequest req - -- ... - -- some other actions - -- ... - res <- withDB sqlDBcfg $ do - let predicate DBTableType {idField} = - (idField ==. B.val_ (validReq ^. reqIdField)) - findRow - $ B.select - $ B.limit_ 1 - $ B.filter_ predicate - $ B.all_ (dbTableName dbSchema) - pure $ (^. intField) <$> res -``` +## Testing the framework -Also, you can put your dbConfig in Options and take it back later in specialized `withDB` wrappers. Maybe helpful when you should create config on startup, so config can't be hardcoded as constant and easily passed in methods (e.g. read DB password from env var and decode it with some IO operation). You can manage many different db configs +You can run `stack test` to see if your system is ready, and the framework can be used. -At first define keys for DBs: -```haskell -data DB1Cfg = DB1Cfg - deriving (Generic, Typeable, Show, Eq, ToJSON, FromJSON) +### Integration tests for SQL backends -instance OptionEntity DB1Cfg (DBConfig MySQLM) +There are disabled tests for MySQL and Postgres DB backends. To run them: +- Create DB tables and fill with the appropriate data using these sql queries: + * [MySQLDBSpec.sql](./testDB/SQLDB/TestData/MySQLDBSpec.sql) + * [PostgresDBSpec.sql](./testDB/SQLDB/TestData/PostgresDBSpec.sql) +- Uncomment the corresponding specs in `lib/euler-hs/testSqlDB/Main.hs` +- Run tests: + `stack test euler-hs:sql` -data DB2Cfg = DB2Cfg - deriving (Generic, Typeable, Show, Eq, ToJSON, FromJSON) +## EulerHS tutorials and template projects -instance OptionEntity DB2Cfg (DBConfig Pg) -``` +* [Tutorial](./TUTORIAL.md) +* [Beam query examples](./lib/euler-hs/testDB/SQLDB/Tests/QueryExamplesSpec.hs) +* Demo project (**coming soon**) -Then you can define a specialized wrapper for each db: -```haskell -withDB1 :: JSONEx a => SqlDB MySQLM a -> Flow a -withDB1 act = do - dbcfg <- getOption DB1Cfg - case dbcfg of - Just cfg -> withDB cfg act - Nothing -> do - logError @String "MissingDB identifier" "Can't find DB1 identifier in options" - throwException YourException - -withDB2 :: JSONEx a => SqlDB Pg a -> Flow a -withDB2 act = do - dbcfg <- getOption DB2Cfg - case dbcfg of - Just cfg -> withDB cfg act - Nothing -> do - logError @String "MissingDB identifier" "Can't find DB2 identifier in options" - throwException YourException -``` -On startup initialization just put configs in Options +## Background materials -```haskell -prepareDBConnections :: Flow () -prepareDBConnections = do - sqlDBcfg1 <- runIO getFromEnvAndDecodeMySqlDbCfg - ePool1 <- initSqlDBConnection sqlDBcfg1 - setOption DB1Cfg sqlDBcfg1 - throwOnFailedWithLog ePool SqlDBConnectionFailedException "Failed to connect to SQL DB1." - sqlDBcfg2 <- runIO getFromEnvAndDecodePostgresDbCfg - ePool2 <- initSqlDBConnection sqlDBcfg2 - setOption DB2Cfg sqlDBcfg2 - throwOnFailedWithLog ePool SqlDBConnectionFailedException "Failed to connect to SQL DB2." -``` +* [Hierarchical Free Monads](https://github.com/graninas/hierarchical-free-monads-the-most-developed-approach-in-haskell) +* [Automatic whitebox testing with Free monads](https://github.com/graninas/automatic-whitebox-testing-showcase) diff --git a/Setup.hs b/Setup.hs new file mode 100644 index 00000000..9a994af6 --- /dev/null +++ b/Setup.hs @@ -0,0 +1,2 @@ +import Distribution.Simple +main = defaultMain diff --git a/TUTORIAL.md b/TUTORIAL.md new file mode 100644 index 00000000..ba4fda02 --- /dev/null +++ b/TUTORIAL.md @@ -0,0 +1,102 @@ +# EulerHS usage tutorial + +**N.B. This is a draft of the tutorial. Updated tutorial will be uploaded soon.** + +* [Your first flow](#Your-first-flow) +* [Running your flows](#Running-your-flows) +* [Servant based web server (coming soon)](Servant-based-web-server) +* [Working with SQL subsystem (coming soon)](Working-with-SQL-subsystem) +* [State handling (coming soon)](#State-handling) +* [Automatic regression testing (coming soon)](#Automatic-regression-testing) +* [See also](#See-also) + +## Your first flow + +You typically want to write some business logic (BL) code that is not encrusted by implementation details. Such code is much simpler to write and maintain; and it becomes possible to update implementation details without affecting the BL code. The framework provides you the `Flow` monad and its derivables for defining pure, safe abstractions suitable for most backend apps. + +See [the documentation](./src/EulerHS/Framework/Flow/Language.hs) on the `Flow` language for more insights on the provided API. + +Sample `Flow` monadic scenario follows here. It greats the user, logs some message and makes an HTTP call via the integrated servant-client subsystem: + +```haskell +import EulerHS.Prelude +import qualified EulerHS.Types as T +import qualified EulerHS.Language as L +import qualified Servant.Client as S + +myFlow :: L.Flow (Either T.ClientError User) +myFlow = do + L.runIO $ putStrLn @String "Hello there!" + L.logInfo "myFlow" "This is a message from myFlow." + + let url = S.BaseUrl Http "127.0.0.1" 8081 "" + L.callAPI Nothing url getUser + +-- HTTP API powered by Servant +type API = "user" :> Get '[JSON] User + +getUser :: T.EulerClient User +getUser = client api +``` + +It's recommended to import the modules of the framework qualified to avoid name clashes and to keep your code in order. + +The method returns `Either T.ClientError User`. Normally, you should think about this code as safe, exception-free. Neither of methods of the framework throw (sync) exceptions, and this code should be safe. + + **N.B. There is a mechanism for working with exceptions in the framework, and it should be used carefully.** + + **N.B. Generally, framework is sync exceptions-free. All the exceptions from the lower implementation level are guarded by the interpreters, converted into error types and returned as Eithers.** + + **N.B. It should be considered that the `Flow` code is also async-exceptions free. The framework is not yet polished to protect from async exceptions, but the current practice of usage shows that such problems have almost a zero-like chance to happen. The further development of the framework will be towards even more exception safety.** + +## Running your flows + +Flows are just declarative descriptions on what your logic should do. You need to run them in order to take real action. To do that, you obtain a `FlowRuntime` instance and pass both runtime and your flow into the `runFlow` function: + +```haskell +import qualified EulerHS.Interpreters as R + +runMyFlow :: R.FlowRuntime -> IO () +runMyFlow flowRt = R.runFlow flowRT myFlow +``` + +`FlowRuntime` is a structure in which all the operational data will be managed by the framework. This data is needed for the framework to keep connections, bookkeep threads, handle external libraries. You can create an instance using various functions from the `EulerHS.Runtime` module. + +```haskell +import qualified EulerHS.Types as T +import qualified EulerHS.Runtime as R + +runApp :: IO () +runApp = do + -- Default logger creation function + let mkLoggerRt = R.createLoggerRuntime T.defaultFlowFormatter T.defaultLoggerConfig + + -- Bracket-like helper which will free your FlowRuntime gracefully when the flow finishes: + R.withFlowRuntime (Just mkLoggerRt) $ \flowRt -> runMyFlow flowRt +``` + +You typically need only a single `FlowRuntime` structure for the whole backend app. +You can create this structure before starting an HTTP server, and use it in all the handlers to run your business logic. +`FlowRuntime` is a thread-safe structure, and it's okay to run several `Flows` in parallel. Just don't try to change the internals of the runtime yourself. + +## Servant based web server + +**Coming soon...** + +## Working with SQL subsystem + +**Coming soon...** + +## State handling + +**Coming soon...** + +# Automatic regression testing + +**Coming soon...** + +## See also + +The `EulerHS` framework is build using Hierarchical Free Monads approach. This approach was developed by Alexander Granin, and there are many different materials showing its usage and philosophy. For more info, consider the `Hydra` framework which is a "lesser brother" of the `EulerHS` framework. It's a showcase framework for demonstrating the approach, but they have a lot of things in common, including the design of several subsystems. In the `Hydra` repo, you can find showcase projects which architecture and structure can be directly derived for your `EulerHS` apps. + +* [The Hydra framework](https://github.com/graninas/Hydra) diff --git a/euler-hs.cabal b/euler-hs.cabal new file mode 100644 index 00000000..740c84ae --- /dev/null +++ b/euler-hs.cabal @@ -0,0 +1,315 @@ +cabal-version: 3.0 +name: euler-hs +version: 2.5.0.0 +synopsis: The Flow framework for web backends +license: Apache-2.0 +author: Juspay Technologies Pvt Ltd +maintainer: opensource@juspay.in +copyright: (C) Juspay Technologies Pvt Ltd 2019-2021 +category: Euler +build-type: Simple +tested-with: GHC ==8.8.3 + +common common-lang + ghc-options: + -Wall -Wcompat -Wincomplete-record-updates + -Wincomplete-uni-patterns -Wredundant-constraints + + build-depends: base >=4.13 && <5 + default-extensions: + NoImplicitPrelude + ConstraintKinds + DataKinds + DefaultSignatures + DeriveFunctor + DeriveGeneric + DuplicateRecordFields + ExplicitNamespaces + FlexibleContexts + FlexibleInstances + FunctionalDependencies + GADTs + LambdaCase + MultiParamTypeClasses + MultiWayIf + NamedFieldPuns + OverloadedLabels + OverloadedStrings + PatternSynonyms + RankNTypes + RecordWildCards + ScopedTypeVariables + TupleSections + TypeApplications + TypeFamilies + TypeOperators + ViewPatterns + + default-language: Haskell2010 + +library + import: common-lang + exposed-modules: + EulerHS.Extra.AltValidation + EulerHS.Extra.Test + EulerHS.Extra.Validation + EulerHS.Interpreters + EulerHS.Language + EulerHS.Prelude + EulerHS.Runtime + EulerHS.Types + + other-modules: + EulerHS.Core.Api + EulerHS.Core.Interpreters + EulerHS.Core.KVDB.Entries + EulerHS.Core.KVDB.Interpreter + EulerHS.Core.KVDB.Language + EulerHS.Core.Language + EulerHS.Core.Logger.Entries + EulerHS.Core.Logger.Impl.TinyLogger + EulerHS.Core.Logger.Interpreter + EulerHS.Core.Logger.Language + EulerHS.Core.Playback.Entries + EulerHS.Core.Playback.Machine + EulerHS.Core.PubSub.Entries + EulerHS.Core.PubSub.Interpreter + EulerHS.Core.PubSub.Language + EulerHS.Core.Runtime + EulerHS.Core.SqlDB.Interpreter + EulerHS.Core.SqlDB.Language + EulerHS.Core.Types + EulerHS.Core.Types.BinaryString + EulerHS.Core.Types.Common + EulerHS.Core.Types.DB + EulerHS.Core.Types.Exceptions + EulerHS.Core.Types.HttpAPI + EulerHS.Core.Types.KVDB + EulerHS.Core.Types.Logger + EulerHS.Core.Types.MySQL + EulerHS.Core.Types.Options + EulerHS.Core.Types.Playback + EulerHS.Core.Types.Postgres + EulerHS.Core.Types.Serializable + EulerHS.Extra.Aeson + EulerHS.Extra.Language + EulerHS.Framework.Flow.Interpreter + EulerHS.Framework.Flow.Language + EulerHS.Framework.Interpreters + EulerHS.Framework.Language + EulerHS.Framework.Runtime + + build-depends: + , aeson + , base64-bytestring + , base64-bytestring-type + , beam-core ^>=0.9.0.0 + , beam-mysql ^>=1.2.1.0 + , beam-postgres ^>=0.5.0.0 + , beam-sqlite ^>=0.5.0.0 + , binary + , bytestring + , case-insensitive + , cereal + , connection + , containers + , data-default + , dlist + , exceptions + , extra + , fmt + , free + , generic-lens + , hedis ^>=0.12.8.1 + , http-client + , http-client-tls + , http-media + , http-types + , lens + , mysql-haskell ^>=0.8.4.2 + , newtype-generics + , postgresql-simple + , process + , profunctors + , resource-pool + , servant-client + , servant-client-core + -- , servant-client ^>=0.18.1 + -- , servant-client-core ^>=0.18.1 + , sqlite-simple + , stm + , string-conversions + , text + , time + , tinylog + , tls + , transformers + , unagi-chan + , universum + , unordered-containers + , utf8-string + , uuid + , validation + , vector + + hs-source-dirs: src + +test-suite language + import: common-lang + type: exitcode-stdio-1.0 + main-is: Main.hs + other-modules: + EulerHS.Core.Api + EulerHS.Core.Interpreters + EulerHS.Core.KVDB.Entries + EulerHS.Core.KVDB.Interpreter + EulerHS.Core.KVDB.Language + EulerHS.Core.Language + EulerHS.Core.Logger.Entries + EulerHS.Core.Logger.Impl.TinyLogger + EulerHS.Core.Logger.Interpreter + EulerHS.Core.Logger.Language + EulerHS.Core.Playback.Entries + EulerHS.Core.Playback.Machine + EulerHS.Core.PubSub.Entries + EulerHS.Core.PubSub.Interpreter + EulerHS.Core.PubSub.Language + EulerHS.Core.Runtime + EulerHS.Core.SqlDB.Interpreter + EulerHS.Core.SqlDB.Language + EulerHS.Core.Types + EulerHS.Core.Types.BinaryString + EulerHS.Core.Types.Common + EulerHS.Core.Types.DB + EulerHS.Core.Types.Exceptions + EulerHS.Core.Types.HttpAPI + EulerHS.Core.Types.KVDB + EulerHS.Core.Types.Logger + EulerHS.Core.Types.MySQL + EulerHS.Core.Types.Options + EulerHS.Core.Types.Playback + EulerHS.Core.Types.Postgres + EulerHS.Core.Types.Serializable + EulerHS.Extra.Aeson + EulerHS.Extra.Language + EulerHS.Framework.Flow.Interpreter + EulerHS.Framework.Flow.Language + EulerHS.Framework.Interpreters + EulerHS.Framework.Language + EulerHS.Framework.Runtime + EulerHS.Interpreters + EulerHS.Language + EulerHS.Prelude + EulerHS.Runtime + EulerHS.TestData.API.Client + EulerHS.TestData.Scenarios.Scenario1 + EulerHS.TestData.Types + EulerHS.Testing.Flow.Interpreter + EulerHS.Testing.Types + EulerHS.Tests.Framework.ArtSpec + EulerHS.Tests.Framework.Common + EulerHS.Tests.Framework.DBSetup + EulerHS.Tests.Framework.FlowSpec + EulerHS.Tests.Framework.KVDBArtSpec + EulerHS.Tests.Framework.PubSubSpec + EulerHS.Tests.Framework.SQLArtSpec + EulerHS.Types + + build-depends: + , aeson + , aeson-pretty + , base64-bytestring + , base64-bytestring-type + , beam-core ^>=0.9.0.0 + , beam-mysql ^>=1.2.1.0 + , beam-postgres ^>=0.5.0.0 + , beam-sqlite ^>=0.5.0.0 + , binary + , bytestring + , case-insensitive + , cereal + , connection + , containers + , data-default + , dlist + , euler-hs + , exceptions + , extra + , fmt + , free + , generic-lens + , hedis ^>=0.12.8.1 + , hspec + , http-client + , http-client-tls + , http-media + , http-types + , lens + , mysql-haskell ^>=0.8.4.2 + , newtype-generics + , postgresql-simple + , process + , profunctors + , QuickCheck + , resource-pool + , servant + , servant-client + , servant-client-core + , servant-mock + , servant-server + -- , servant ^>=0.18.1 + -- , servant-client ^>=0.18.1 + -- , servant-client-core ^>=0.18.1 + -- , servant-mock ^>=0.8.7 + -- , servant-server ^>=0.18.1 + , sqlite-simple + , stm + , string-conversions + , text + , time + , tinylog + , tls + , transformers + , unagi-chan + , universum + , unordered-containers + , utf8-string + , uuid + , vector + , warp + + hs-source-dirs: test src + +test-suite db + import: common-lang + type: exitcode-stdio-1.0 + main-is: Main.hs + other-modules: + KVDB.KVDBSpec + SQLDB.TestData.Connections + SQLDB.TestData.Scenarios.MySQL + SQLDB.TestData.Scenarios.Postgres + SQLDB.TestData.Scenarios.SQLite + SQLDB.TestData.Types + SQLDB.Tests.MySQLDBSpec + SQLDB.Tests.PostgresDBSpec + SQLDB.Tests.QueryExamplesSpec + SQLDB.Tests.SQLiteDBSpec + + build-depends: + , aeson + , beam-core + , beam-mysql + , beam-postgres + , beam-sqlite + , bytestring + , euler-hs + , hspec + , mysql-haskell + , postgresql-simple + , process + , servant + , servant-mock + , time + + hs-source-dirs: testDB diff --git a/src/EulerHS/Core/Api.hs b/src/EulerHS/Core/Api.hs new file mode 100644 index 00000000..0bb44d60 --- /dev/null +++ b/src/EulerHS/Core/Api.hs @@ -0,0 +1,82 @@ +{-# LANGUAGE DerivingStrategies #-} +{-# LANGUAGE GeneralizedNewtypeDeriving #-} + +{- | +Module : EulerHS.Core.Api +Copyright : (C) Juspay Technologies Pvt Ltd 2019-2021 +License : Apache 2.0 (see the file LICENSE) +Maintainer : opensource@juspay.in +Stability : experimental +Portability : non-portable + +This module contains implementation of the low-level HTTP client subsystem. + +This is an internal module. Import EulerHS.Types instead. +-} + + +module EulerHS.Core.Api where + +import EulerHS.Prelude +import qualified Servant.Client as SC +import qualified Servant.Client.Core as SCC +import Servant.Client.Core.RunClient (RunClient) +import qualified Servant.Client.Free as SCF +import qualified Servant.Client.Internal.HttpClient as SCIHC +import qualified Network.HTTP.Types as HTTP + +data LogServantRequest = LogServantRequest + { url :: SCF.BaseUrl + , method :: HTTP.Method + , body :: String + , headers :: Seq HTTP.Header + , queryString :: Seq HTTP.QueryItem + } + deriving (Show) + +newtype EulerClient a = EulerClient (Free SCF.ClientF a) + deriving newtype (Functor, Applicative, Monad, RunClient) + +client :: SC.HasClient EulerClient api => Proxy api -> SC.Client EulerClient api +client api = SCC.clientIn api $ Proxy @EulerClient + +-- Servant >=0.18.1 changes +-- interpretClientF :: (String -> IO ()) -> SCC.BaseUrl -> SCF.ClientF a -> SC.ClientM a +-- interpretClientF _ _ (SCF.Throw e) = throwM e +-- interpretClientF logMsg bUrl (SCF.RunRequest req next) = do +-- liftIO $ logServantRequest logMsg bUrl req +-- res <- SCC.runRequestAcceptStatus Nothing req +-- liftIO . logMsg $ show res +-- pure $ next res + +interpretClientF :: (String -> IO ()) -> SCC.BaseUrl -> SCF.ClientF a -> SC.ClientM a +interpretClientF _ _ (SCF.Throw e) = throwM e +interpretClientF log bUrl (SCF.RunRequest req next) = do + case SCC.requestBody req of + Just (body, _) -> liftIO . log $ show body + Nothing -> pure () + liftIO . log $ show (SCIHC.requestToClientRequest bUrl req) + res <- SCC.runRequest req + liftIO . log $ show (res) + pure $ next res + +logServantRequest :: (String -> IO ()) -> SCC.BaseUrl -> SCC.Request -> IO () +logServantRequest log url req = do + log $ show $ LogServantRequest + { url = url + , method = method + , body = body + , headers = headers + , queryString = queryString + } + where + body = case SCC.requestBody req of + Just (b, _) -> show b + Nothing -> "body = (empty)" + method = SCC.requestMethod req + headers = SCC.requestHeaders req + queryString = SCC.requestQueryString req + -- liftIO . log $ show (SCIHC.requestToClientRequest bUrl req) + +runEulerClient :: (String -> IO()) -> SCC.BaseUrl -> EulerClient a -> SCIHC.ClientM a +runEulerClient log bUrl (EulerClient f) = foldFree (interpretClientF log bUrl) f diff --git a/src/EulerHS/Core/Interpreters.hs b/src/EulerHS/Core/Interpreters.hs new file mode 100644 index 00000000..9eaf214f --- /dev/null +++ b/src/EulerHS/Core/Interpreters.hs @@ -0,0 +1,21 @@ +{- | +Module : EulerHS.Core.Interpreters +Copyright : (C) Juspay Technologies Pvt Ltd 2019-2021 +License : Apache 2.0 (see the file LICENSE) +Maintainer : opensource@juspay.in +Stability : experimental +Portability : non-portable + +This module reexports interpreters of the core subsystems. + +This is an internal module. Import EulerHS.Interpreters instead. +-} + +module EulerHS.Core.Interpreters + ( module X + ) where + +import EulerHS.Core.KVDB.Interpreter as X +import EulerHS.Core.Logger.Interpreter as X +import EulerHS.Core.PubSub.Interpreter as X +import EulerHS.Core.SqlDB.Interpreter as X diff --git a/src/EulerHS/Core/KVDB/Entries.hs b/src/EulerHS/Core/KVDB/Entries.hs new file mode 100644 index 00000000..0861b25c --- /dev/null +++ b/src/EulerHS/Core/KVDB/Entries.hs @@ -0,0 +1,311 @@ +{-# LANGUAGE DeriveAnyClass #-} +{-# LANGUAGE ScopedTypeVariables #-} +{-# LANGUAGE UndecidableInstances #-} + +module EulerHS.Core.KVDB.Entries where + + +import qualified Data.Aeson as A +import EulerHS.Prelude +import EulerHS.Types (MockedResult (..), RRItem (..)) +import qualified EulerHS.Types as T +import qualified EulerHS.Core.KVDB.Language as L + +data SetEntry = SetEntry + { jsonKey :: A.Value + , jsonValue :: A.Value + , jsonResult :: A.Value + } deriving (Show, Eq, Generic, ToJSON, FromJSON) + +instance RRItem SetEntry where + getTag _ = "SetEntry" + +instance MockedResult SetEntry (Either T.KVDBReply T.KVDBStatus) where + getMock SetEntry {jsonResult} = T.jsonDecode jsonResult + +mkSetEntry :: ByteString -> ByteString -> Either T.KVDBReply T.KVDBStatus -> SetEntry +mkSetEntry k v r = SetEntry + (T.jsonEncode k) + (T.jsonEncode v) + (T.jsonEncode r) + +---------------------------------------------------------------------- + +data SetExEntry = SetExEntry + { jsonKey :: A.Value + , jsonTtl :: A.Value + , jsonValue :: A.Value + , jsonResult :: A.Value + } deriving (Show, Eq, Generic, ToJSON, FromJSON) + +instance RRItem SetExEntry where + getTag _ = "SetExEntry" + +instance MockedResult SetExEntry (Either T.KVDBReply T.KVDBStatus) where + getMock SetExEntry {jsonResult} = T.jsonDecode jsonResult + +mkSetExEntry :: ByteString -> Integer -> ByteString -> Either T.KVDBReply T.KVDBStatus -> SetExEntry +mkSetExEntry k e v r = SetExEntry + (T.jsonEncode k) + (toJSON e) + (T.jsonEncode v) + (T.jsonEncode r) + +---------------------------------------------------------------------- + +data SetOptsEntry = SetOptsEntry + { jsonKey :: A.Value + , jsonValue :: A.Value + , jsonTTL :: A.Value + , jsonCond :: A.Value + , jsonResult :: A.Value + } deriving (Show, Eq, Generic, ToJSON, FromJSON) + +instance RRItem SetOptsEntry where + getTag _ = "SetOptsEntry" + +instance MockedResult SetOptsEntry (Either T.KVDBReply Bool) where + getMock SetOptsEntry {jsonResult} = T.jsonDecode jsonResult + +mkSetOptsEntry :: ByteString -> ByteString -> L.KVDBSetTTLOption -> L.KVDBSetConditionOption -> Either T.KVDBReply Bool -> SetOptsEntry +mkSetOptsEntry k v ttl cond r = SetOptsEntry + (T.jsonEncode k) + (T.jsonEncode v) + (toJSON ttl) + (toJSON cond) + (T.jsonEncode r) + +---------------------------------------------------------------------- + +data GetEntry = GetEntry + { jsonKey :: A.Value + , jsonResult :: A.Value + } deriving (Show, Eq, Generic, ToJSON, FromJSON) + +instance RRItem GetEntry where + getTag _ = "GetEntry" + +instance MockedResult GetEntry (Either T.KVDBReply (Maybe ByteString)) where + getMock GetEntry {jsonResult} = T.jsonDecode jsonResult + + +mkGetEntry :: ByteString -> Either T.KVDBReply (Maybe ByteString) -> GetEntry +mkGetEntry k r = GetEntry + (T.jsonEncode k) + (T.jsonEncode r) + +---------------------------------------------------------------------- + +data ExistsEntry = ExistsEntry + { jsonKey :: A.Value + , jsonResult :: A.Value + } deriving (Show, Eq, Generic, ToJSON, FromJSON) + +instance RRItem ExistsEntry where + getTag _ = "ExistsEntry" + +instance MockedResult ExistsEntry (Either T.KVDBReply Bool) where + getMock ExistsEntry {jsonResult} = T.jsonDecode jsonResult + +mkExistsEntry :: ByteString -> Either T.KVDBReply Bool -> ExistsEntry +mkExistsEntry k r = ExistsEntry + (T.jsonEncode k) + (T.jsonEncode r) + +-- ---------------------------------------------------------------------- + +data DelEntry = DelEntry + { jsonKeys :: A.Value + , jsonResult :: A.Value + } deriving (Show, Eq, Generic, ToJSON, FromJSON) + +instance RRItem DelEntry where + getTag _ = "DelEntry" + +instance MockedResult DelEntry (Either T.KVDBReply Integer) where + getMock DelEntry {jsonResult} = T.jsonDecode jsonResult + +mkDelEntry :: [ByteString] -> Either T.KVDBReply Integer -> DelEntry +mkDelEntry k r = DelEntry + (T.jsonEncode k) + (T.jsonEncode r) + + +-- ---------------------------------------------------------------------- + +data ExpireEntry = ExpireEntry + { jsonKey :: A.Value + , duration :: Integer + , jsonResult :: A.Value + } deriving (Show, Eq, Generic, ToJSON, FromJSON) + +instance RRItem ExpireEntry where + getTag _ = "ExpireEntry" + +instance MockedResult ExpireEntry (Either T.KVDBReply Bool) where + getMock ExpireEntry {jsonResult} = T.jsonDecode jsonResult + +mkExpireEntry :: ByteString -> Integer -> Either T.KVDBReply Bool -> ExpireEntry +mkExpireEntry k d r = ExpireEntry + (T.jsonEncode k) + d + (T.jsonEncode r) + +-- ---------------------------------------------------------------------- + +data IncrEntry = IncrEntry + { jsonKey :: A.Value + , jsonResult :: A.Value + } deriving (Show, Eq, Generic, ToJSON, FromJSON) + +instance RRItem IncrEntry where + getTag _ = "IncrEntry" + +instance MockedResult IncrEntry (Either T.KVDBReply Integer) where + getMock IncrEntry {jsonResult} = T.jsonDecode jsonResult + +mkIncrEntry :: ByteString -> Either T.KVDBReply Integer -> IncrEntry +mkIncrEntry k r = IncrEntry + (T.jsonEncode k) + (T.jsonEncode r) + +-- ---------------------------------------------------------------------- + +data HSetEntry = HSetEntry + { jsonKey :: A.Value + , jsonField :: A.Value + , jsonValue :: A.Value + , jsonResult :: A.Value + } deriving (Show, Eq, Generic, ToJSON, FromJSON) + +instance RRItem HSetEntry where + getTag _ = "HSetEntry" + +instance MockedResult HSetEntry (Either T.KVDBReply Bool) where + getMock HSetEntry {jsonResult} = T.jsonDecode jsonResult + +mkHSetEntry :: ByteString -> ByteString -> ByteString -> Either T.KVDBReply Bool -> HSetEntry +mkHSetEntry k f v r = HSetEntry + (T.jsonEncode k) + (T.jsonEncode f) + (T.jsonEncode v) + (T.jsonEncode r) + +-- ---------------------------------------------------------------------- + +data HGetEntry = HGetEntry + { jsonKey :: A.Value + , jsonField :: A.Value + , jsonResult :: A.Value + } deriving (Show, Eq, Generic, ToJSON, FromJSON) + +instance RRItem HGetEntry where + getTag _ = "HGetEntry" + +instance MockedResult HGetEntry (Either T.KVDBReply (Maybe ByteString)) where + getMock HGetEntry {jsonResult} = T.jsonDecode jsonResult + +mkHGetEntry :: ByteString -> ByteString -> Either T.KVDBReply (Maybe ByteString) -> HGetEntry +mkHGetEntry k f r = HGetEntry + (T.jsonEncode k) + (T.jsonEncode f) + (T.jsonEncode r) + +-- ---------------------------------------------------------------------- + +data XAddEntry = XAddEntry + { jsonStream :: A.Value + , jsonEntryId :: A.Value + , jsonItems :: A.Value + , jsonResult :: A.Value + } deriving (Show, Eq, Generic, ToJSON, FromJSON) + +instance RRItem XAddEntry where + getTag _ = "XAddEntry" + +instance MockedResult XAddEntry (Either T.KVDBReply L.KVDBStreamEntryID) where + getMock XAddEntry {jsonResult} = T.jsonDecode jsonResult + +mkXAddEntry :: ByteString -> L.KVDBStreamEntryIDInput -> [L.KVDBStreamItem] -> Either T.KVDBReply L.KVDBStreamEntryID -> XAddEntry +mkXAddEntry s e i r = XAddEntry + (T.jsonEncode s) + (toJSON e) + (T.jsonEncode i) + (T.jsonEncode r) + +-- ---------------------------------------------------------------------- + +data XLenEntry = XLenEntry + { jsonStream :: A.Value + , jsonResult :: A.Value + } deriving (Show, Eq, Generic, ToJSON, FromJSON) + +instance RRItem XLenEntry where + getTag _ = "XLenEntry" + +instance MockedResult XLenEntry (Either T.KVDBReply Integer) where + getMock XLenEntry {jsonResult} = T.jsonDecode jsonResult + +mkXLenEntry :: ByteString -> Either T.KVDBReply Integer -> XLenEntry +mkXLenEntry s r = XLenEntry + (T.jsonEncode s) + (T.jsonEncode r) + +-- ---------------------------------------------------------------------- + +jsonExDecode :: forall a . T.JSONEx a => A.Value -> Maybe a +jsonExDecode = T.resolveJSONEx @a T.jsonDecode T.fromJSONMaybe + +data MultiExecEntry = MultiExecEntry + { jsonResult :: A.Value + } deriving (Show, Eq, Generic, ToJSON, FromJSON) + +instance RRItem MultiExecEntry where + getTag _ = "MultiExecEntry" + +instance T.JSONEx a => MockedResult MultiExecEntry (Either T.KVDBReply (T.TxResult a)) where + getMock MultiExecEntry {jsonResult} = + case temp of + Nothing -> Nothing + Just (Left e) -> Just $ Left e + Just (Right (T.TxSuccess Nothing )) -> Nothing + Just (Right (T.TxSuccess (Just a))) -> Just $ Right $ T.TxSuccess a + Just (Right (T.TxAborted )) -> Just $ Right $ T.TxAborted + Just (Right (T.TxError s )) -> Just $ Right $ T.TxError s + where + temp :: Maybe (Either T.KVDBReply (T.TxResult (Maybe a))) + temp = fmap (fmap (fmap jsonExDecode)) $ jsonExDecode jsonResult + + +mkMultiExecEntry :: forall a . T.JSONEx a => Either T.KVDBReply (T.TxResult a) -> MultiExecEntry +mkMultiExecEntry r = MultiExecEntry $ + A.toJSON $ fmap (A.toJSON1 . fmap (T.resolveJSONEx @a T.jsonEncode toJSON)) r + + +-- MultiExecWithHash + +data MultiExecWithHashEntry = MultiExecWithHashEntry + { hashValue :: A.Value + , jsonResult :: A.Value + } deriving (Show, Eq, Generic, ToJSON, FromJSON) + +instance RRItem MultiExecWithHashEntry where + getTag _ = "MultiExecWithHashEntry" + +instance T.JSONEx a => MockedResult MultiExecWithHashEntry (Either T.KVDBReply (T.TxResult a)) where + getMock MultiExecWithHashEntry {jsonResult} = + case temp of + Nothing -> Nothing + Just (Left e) -> Just $ Left e + Just (Right (T.TxSuccess Nothing )) -> Nothing + Just (Right (T.TxSuccess (Just a))) -> Just $ Right $ T.TxSuccess a + Just (Right (T.TxAborted )) -> Just $ Right $ T.TxAborted + Just (Right (T.TxError s )) -> Just $ Right $ T.TxError s + where + temp :: Maybe (Either T.KVDBReply (T.TxResult (Maybe a))) + temp = fmap (fmap (fmap jsonExDecode)) $ jsonExDecode jsonResult + + +mkMultiExecWithHashEntry :: forall a . T.JSONEx a => ByteString -> Either T.KVDBReply (T.TxResult a) -> MultiExecWithHashEntry +mkMultiExecWithHashEntry h r = MultiExecWithHashEntry (T.jsonEncode h) $ + A.toJSON $ fmap (A.toJSON1 . fmap (T.resolveJSONEx @a T.jsonEncode toJSON)) r \ No newline at end of file diff --git a/src/EulerHS/Core/KVDB/Interpreter.hs b/src/EulerHS/Core/KVDB/Interpreter.hs new file mode 100644 index 00000000..9b8d976a --- /dev/null +++ b/src/EulerHS/Core/KVDB/Interpreter.hs @@ -0,0 +1,209 @@ +{-# LANGUAGE AllowAmbiguousTypes #-} +{-# LANGUAGE FlexibleContexts #-} +{-# LANGUAGE FlexibleInstances #-} +{-# LANGUAGE TypeFamilies #-} +module EulerHS.Core.KVDB.Interpreter + ( + -- * KVDB Interpreter + runKVDB + ) where + +import EulerHS.Prelude + +import qualified Data.Map as Map +import qualified Database.Redis as R +import qualified Data.Text as T +import qualified Data.Text.Encoding as TE +import qualified EulerHS.Core.KVDB.Language as L +import EulerHS.Core.Types.KVDB + +import qualified EulerHS.Core.KVDB.Entries as E +import qualified EulerHS.Core.Playback.Machine as P +import qualified EulerHS.Core.Types as D + + +interpretKeyValueF + :: (forall b . R.Redis (Either R.Reply b) -> IO (Either KVDBReply b)) + -> D.RunMode + -> L.KeyValueF (Either KVDBReply) a + -> IO a +interpretKeyValueF runRedis runMode (L.Set k v next) = + fmap next $ P.withRunMode runMode (E.mkSetEntry k v) $ + fmap (second fromRdStatus) $ runRedis $ R.set k v + +interpretKeyValueF runRedis runMode (L.SetEx k e v next) = + fmap next $ P.withRunMode runMode (E.mkSetExEntry k e v) $ + fmap (second fromRdStatus) $ runRedis $ R.setex k e v + +interpretKeyValueF runRedis runMode (L.SetOpts k v ttl cond next) = + fmap next $ P.withRunMode runMode (E.mkSetOptsEntry k v ttl cond) $ do + result <- runRedis $ R.setOpts k v (makeSetOpts ttl cond) + pure $ case result of + Right _ -> Right True + -- (nil) is ok, app should not fail + Left (Bulk Nothing) -> Right False + Left reply -> Left reply + +interpretKeyValueF runRedis runMode (L.Get k next) = + fmap next $ P.withRunMode runMode (E.mkGetEntry k) $ + runRedis $ R.get k + +interpretKeyValueF runRedis runMode (L.Exists k next) = + fmap next $ P.withRunMode runMode (E.mkExistsEntry k) $ + runRedis $ R.exists k + +interpretKeyValueF _ runMode (L.Del [] next) = + fmap next $ P.withRunMode runMode (E.mkDelEntry []) $ + pure $ pure 0 + +interpretKeyValueF runRedis runMode (L.Del ks next) = + fmap next $ P.withRunMode runMode (E.mkDelEntry ks) $ + runRedis $ R.del ks + +interpretKeyValueF runRedis runMode (L.Expire k sec next) = + fmap next $ P.withRunMode runMode (E.mkExpireEntry k sec) $ + runRedis $ R.expire k sec + +interpretKeyValueF runRedis runMode (L.Incr k next) = + fmap next $ P.withRunMode runMode (E.mkIncrEntry k) $ + runRedis $ R.incr k + +interpretKeyValueF runRedis runMode (L.HSet k field value next) = + fmap next $ P.withRunMode runMode (E.mkHSetEntry k field value) $ + runRedis $ R.hset k field value + +interpretKeyValueF runRedis runMode (L.HGet k field next) = + fmap next $ P.withRunMode runMode (E.mkHGetEntry k field) $ + runRedis $ R.hget k field + +interpretKeyValueF runRedis runMode (L.XAdd stream entryId items next) = + fmap next $ P.withRunMode runMode (E.mkXAddEntry stream entryId items) $ + runRedis $ do + result <- R.xadd stream (makeStreamEntryId entryId) items + pure $ parseStreamEntryId <$> result + where + makeStreamEntryId (L.EntryID (L.KVDBStreamEntryID ms ss)) + = show ms <> "-" <> show ss + makeStreamEntryId L.AutoID = "*" + + -- FIXME: this is a very dirty code! + unpackHelper bs = read . T.unpack <$> T.splitOn "-" (TE.decodeUtf8 bs) + parseStreamEntryId bs = case unpackHelper bs of + [ms, ss] -> L.KVDBStreamEntryID ms ss + _ -> error "Failed to unpack " + +interpretKeyValueF runRedis runMode (L.XLen stream next) = + fmap next $ P.withRunMode runMode (E.mkXLenEntry stream) $ + runRedis $ R.xlen stream + + +interpretKeyValueTxF :: L.KeyValueF R.Queued a -> R.RedisTx a +interpretKeyValueTxF (L.Set k v next) = + fmap next $ fmap (fmap D.fromRdStatus) $ R.set k v + +interpretKeyValueTxF (L.SetEx k e v next) = + fmap next $ fmap (fmap D.fromRdStatus) $ R.setex k e v + +interpretKeyValueTxF (L.SetOpts k v ttl cond next) = + fmap next $ fmap (fmap rdStatusToBool) $ R.setOpts k v (makeSetOpts ttl cond) + where + rdStatusToBool R.Ok = True + rdStatusToBool _ = False + +interpretKeyValueTxF (L.Get k next) = + fmap next $ R.get k + +interpretKeyValueTxF (L.Exists k next) = + fmap next $ R.exists k + +interpretKeyValueTxF (L.Del [] next) = + fmap next $ return $ pure 0 + +interpretKeyValueTxF (L.Del ks next) = + fmap next $ R.del ks + +interpretKeyValueTxF (L.Expire k sec next) = + fmap next $ R.expire k sec + +interpretKeyValueTxF (L.Incr k next) = + fmap next $ R.incr k + +interpretKeyValueTxF (L.HSet k field value next) = + fmap next $ R.hset k field value + +interpretKeyValueTxF (L.HGet k field next) = + fmap next $ R.hget k field + +interpretKeyValueTxF (L.XLen stream next) = + fmap next $ R.xlen stream + +interpretKeyValueTxF (L.XAdd stream entryId items next) = + fmap next $ fmap (fmap parseStreamEntryId) $ R.xadd stream (makeStreamEntryId entryId) items + where + makeStreamEntryId (L.EntryID (L.KVDBStreamEntryID ms ss)) = show ms <> "-" <> show ss + makeStreamEntryId L.AutoID = "*" + + -- FIXME: this is a very dirty code! + unpackHelper bs = read . T.unpack <$> T.splitOn "-" (TE.decodeUtf8 bs) + parseStreamEntryId bs = case unpackHelper bs of + [ms, ss] -> L.KVDBStreamEntryID ms ss + _ -> error "Failed to unpack " + + +interpretTransactionF + :: (forall b. R.Redis (Either R.Reply b) -> IO (Either KVDBReply b)) + -> D.RunMode + -> L.TransactionF a + -> IO a +interpretTransactionF runRedis runMode (L.MultiExec dsl next) = + fmap next $ P.withRunMode runMode E.mkMultiExecEntry $ + runRedis $ fmap (Right . fromRdTxResult) $ R.multiExec $ foldF interpretKeyValueTxF dsl + +interpretTransactionF runRedis runMode (L.MultiExecWithHash h dsl next) = + fmap next $ P.withRunMode runMode (E.mkMultiExecWithHashEntry h) $ + runRedis $ fmap (Right . fromRdTxResult) $ R.multiExecWithHash h $ foldF interpretKeyValueTxF dsl + + +interpretDbF + :: (forall b. R.Redis (Either R.Reply b) -> IO (Either KVDBReply b)) + -> D.RunMode + -> L.KVDBF a + -> IO a +interpretDbF runRedis runMode (L.KV f) = interpretKeyValueF runRedis runMode f +interpretDbF runRedis runMode (L.TX f) = interpretTransactionF runRedis runMode f + + +runKVDB :: Text -> D.RunMode -> MVar (Map Text NativeKVDBConn) -> L.KVDB a -> IO (Either KVDBReply a) +runKVDB cName runMode kvdbConnMapMVar = + fmap (join . first exceptionToKVDBReply) . try @_ @SomeException . + foldF (interpretDbF runRedis runMode) . runExceptT + where + runRedis :: R.Redis (Either R.Reply a) -> IO (Either KVDBReply a) + runRedis redisDsl = do + connections <- readMVar kvdbConnMapMVar + case Map.lookup cName connections of + Nothing -> pure $ Left $ KVDBError KVDBConnectionDoesNotExist "Can't find redis connection" + Just conn -> + case conn of + NativeKVDB c -> fmap (first hedisReplyToKVDBReply) $ R.runRedis c redisDsl + NativeKVDBMockedConn -> pure $ Right $ + error "Result of runRedis with mocked connection should not ever be evaluated" + + +makeSetOpts :: L.KVDBSetTTLOption -> L.KVDBSetConditionOption -> R.SetOpts +makeSetOpts ttl cond = + R.SetOpts + { setSeconds = + case ttl of + L.Seconds s -> Just s + _ -> Nothing + , setMilliseconds = + case ttl of + L.Milliseconds ms -> Just ms + _ -> Nothing + , setCondition = + case cond of + L.SetAlways -> Nothing + L.SetIfExist -> Just R.Xx + L.SetIfNotExist -> Just R.Nx + } diff --git a/src/EulerHS/Core/KVDB/Language.hs b/src/EulerHS/Core/KVDB/Language.hs new file mode 100644 index 00000000..bbd44e3e --- /dev/null +++ b/src/EulerHS/Core/KVDB/Language.hs @@ -0,0 +1,271 @@ +{-# LANGUAGE GADTs #-} +{-# LANGUAGE MultiParamTypeClasses #-} +{-# LANGUAGE DerivingStrategies #-} +{-# LANGUAGE DeriveAnyClass #-} + +{- | +Module : EulerHS.Core.KVDB.Language +Copyright : (C) Juspay Technologies Pvt Ltd 2019-2021 +License : Apache 2.0 (see the file LICENSE) +Maintainer : opensource@juspay.in +Stability : experimental +Portability : non-portable + +Language of the KV DB subsystem. + +Currently, highly resembles the `hedis` library interface to Redis. +Other KV DBs are not yet supported. + +This module is internal and should not imported in the projects. +Import 'EulerHS.Language' instead. +-} + +module EulerHS.Core.KVDB.Language + ( + -- * KVDB language + -- ** Types + KVDB, KVDBTx, KVDBKey, KVDBValue, KVDBDuration + , KVDBSetTTLOption(..), KVDBSetConditionOption(..) + , KVDBField, KVDBChannel, KVDBMessage + , KVDBStream, KVDBStreamItem, KVDBStreamEntryID (..), KVDBStreamEntryIDInput (..) + , KVDBF(..), KeyValueF(..), TransactionF(..) + -- ** Methods + -- *** Regular + -- **** For simple values + , set, get, incr, setex, setOpts + -- **** For hash values + , hset, hget + -- **** For streams + , xadd, xlen + -- **** For both + , exists, del, expire + -- *** Transactional + -- | Used inside multiExec instead of regular + , multiExec, multiExecWithHash + , setTx, getTx, delTx, setexTx + , hsetTx, hgetTx + , xaddTx, xlenTx + , expireTx + ) where + +import qualified Data.Aeson as A +import qualified Database.Redis as R +import qualified EulerHS.Core.Types as T +import EulerHS.Prelude hiding (get) + +-- | TTL options for the `set` operaion +data KVDBSetTTLOption + = NoTTL + -- ^ No TTL + | Seconds Integer + -- ^ TTL in seconds + | Milliseconds Integer + -- ^ TTL in millisecons + deriving stock Generic + deriving anyclass A.ToJSON + +-- | Options for the `set` operation +data KVDBSetConditionOption + = SetAlways + -- ^ Set value no matter what + | SetIfExist + -- ^ Set if exist + | SetIfNotExist + -- ^ Set if not exist + deriving stock Generic + deriving anyclass A.ToJSON + +-- | Raw key value (ByteString) +type KVDBKey = ByteString + +-- | Raw value (ByteString) +type KVDBValue = ByteString + +-- | Duration (seconds) +type KVDBDuration = Integer + +-- | Field +type KVDBField = ByteString + +-- | Channel +type KVDBChannel = ByteString + +-- | Message +type KVDBMessage = ByteString + +-- | Stream +type KVDBStream = ByteString + +-- | ID of a stream entity +data KVDBStreamEntryID = KVDBStreamEntryID Integer Integer + deriving stock Generic + deriving anyclass (A.ToJSON, A.FromJSON) + +-- | Input of a stream entity +data KVDBStreamEntryIDInput + = EntryID KVDBStreamEntryID + | AutoID + deriving stock Generic + deriving anyclass A.ToJSON + +-- | Stream item +type KVDBStreamItem = (ByteString, ByteString) + +---------------------------------------------------------------------- + +-- | Algebra of the KV DB language +data KeyValueF f next where + Set :: KVDBKey -> KVDBValue -> (f T.KVDBStatus -> next) -> KeyValueF f next + SetEx :: KVDBKey -> KVDBDuration -> KVDBValue -> (f T.KVDBStatus -> next) -> KeyValueF f next + SetOpts :: KVDBKey -> KVDBValue -> KVDBSetTTLOption -> KVDBSetConditionOption -> (f Bool -> next) -> KeyValueF f next + Get :: KVDBKey -> (f (Maybe ByteString) -> next) -> KeyValueF f next + Exists :: KVDBKey -> (f Bool -> next) -> KeyValueF f next + Del :: [KVDBKey] -> (f Integer -> next) -> KeyValueF f next + Expire :: KVDBKey -> KVDBDuration -> (f Bool -> next) -> KeyValueF f next + Incr :: KVDBKey -> (f Integer -> next) -> KeyValueF f next + HSet :: KVDBKey -> KVDBField -> KVDBValue -> (f Bool -> next) -> KeyValueF f next + HGet :: KVDBKey -> KVDBField -> (f (Maybe ByteString) -> next) -> KeyValueF f next + XAdd :: KVDBStream -> KVDBStreamEntryIDInput -> [KVDBStreamItem] -> (f KVDBStreamEntryID -> next) -> KeyValueF f next + XLen :: KVDBStream -> (f Integer -> next) -> KeyValueF f next + +instance Functor (KeyValueF f) where + fmap f (Set k value next) = Set k value (f . next) + fmap f (SetEx k ex value next) = SetEx k ex value (f . next) + fmap f (SetOpts k value ttl cond next) = SetOpts k value ttl cond (f . next) + fmap f (Get k next) = Get k (f . next) + fmap f (Exists k next) = Exists k (f . next) + fmap f (Del ks next) = Del ks (f . next) + fmap f (Expire k sec next) = Expire k sec (f . next) + fmap f (Incr k next) = Incr k (f . next) + fmap f (HSet k field value next) = HSet k field value (f . next) + fmap f (HGet k field next) = HGet k field (f . next) + fmap f (XAdd s entryId items next) = XAdd s entryId items (f . next) + fmap f (XLen s next) = XLen s (f . next) + +-- | KV DB transactional monadic language +type KVDBTx = F (KeyValueF R.Queued) + +---------------------------------------------------------------------- + +-- | Algebra of the transactional evaluation +-- ('Exec' in hedis notaion) +data TransactionF next where + MultiExec + :: T.JSONEx a + => KVDBTx (R.Queued a) + -> (T.KVDBAnswer (T.TxResult a) -> next) + -> TransactionF next + MultiExecWithHash + :: T.JSONEx a + => ByteString + -> KVDBTx (R.Queued a) + -> (T.KVDBAnswer (T.TxResult a) -> next) + -> TransactionF next + +instance Functor TransactionF where + fmap f (MultiExec dsl next) = MultiExec dsl (f . next) + fmap f (MultiExecWithHash h dsl next) = MultiExecWithHash h dsl (f . next) + +---------------------------------------------------------------------- + +-- | Top-level algebra combining either a transactional or regular language method +data KVDBF next + = KV (KeyValueF T.KVDBAnswer next) + | TX (TransactionF next) + deriving Functor + +-- | Main KV DB language +type KVDB next = ExceptT T.KVDBReply (F KVDBF) next + +---------------------------------------------------------------------- +-- | Set the value of a key. Transaction version. +setTx :: KVDBKey -> KVDBValue -> KVDBTx (R.Queued T.KVDBStatus) +setTx key value = liftFC $ Set key value id + +-- | Set the value and ttl of a key. Transaction version. +setexTx :: KVDBKey -> KVDBDuration -> KVDBValue -> KVDBTx (R.Queued T.KVDBStatus) +setexTx key ex value = liftFC $ SetEx key ex value id + +-- | Set the value of a hash field. Transaction version. +hsetTx :: KVDBKey -> KVDBField -> KVDBValue -> KVDBTx (R.Queued Bool) +hsetTx key field value = liftFC $ HSet key field value id + +-- | Get the value of a key. Transaction version. +getTx :: KVDBKey -> KVDBTx (R.Queued (Maybe ByteString)) +getTx key = liftFC $ Get key id + +-- | Get the value of a hash field. Transaction version. +hgetTx :: KVDBKey -> KVDBField -> KVDBTx (R.Queued (Maybe ByteString)) +hgetTx key field = liftFC $ HGet key field id + +-- | Delete a keys. Transaction version. +delTx :: [KVDBKey] -> KVDBTx (R.Queued Integer) +delTx ks = liftFC $ Del ks id + +-- | Set a key's time to live in seconds. Transaction version. +expireTx :: KVDBKey -> KVDBDuration -> KVDBTx (R.Queued Bool) +expireTx key sec = liftFC $ Expire key sec id + +-- | Add entities to a stream +xaddTx :: KVDBStream -> KVDBStreamEntryIDInput -> [KVDBStreamItem] -> KVDBTx (R.Queued KVDBStreamEntryID) +xaddTx stream entryId items = liftFC $ XAdd stream entryId items id + +-- | Get length of a stream +xlenTx :: KVDBStream -> KVDBTx (R.Queued Integer) +xlenTx stream = liftFC $ XLen stream id + +-- | Set the value of a key +set :: KVDBKey -> KVDBValue -> KVDB T.KVDBStatus +set key value = ExceptT $ liftFC $ KV $ Set key value id + +-- | Set the value and ttl of a key. +setex :: KVDBKey -> KVDBDuration -> KVDBValue -> KVDB T.KVDBStatus +setex key ex value = ExceptT $ liftFC $ KV $ SetEx key ex value id + +-- | Specify set operation options +setOpts :: KVDBKey -> KVDBValue -> KVDBSetTTLOption -> KVDBSetConditionOption -> KVDB Bool +setOpts key value ttl cond = ExceptT $ liftFC $ KV $ SetOpts key value ttl cond id + +-- | Get the value of a key +get :: KVDBKey -> KVDB (Maybe ByteString) +get key = ExceptT $ liftFC $ KV $ Get key id + +-- | Determine if a key exists +exists :: KVDBKey -> KVDB Bool +exists key = ExceptT $ liftFC $ KV $ Exists key id + +-- | Delete a keys +del :: [KVDBKey] -> KVDB Integer +del ks = ExceptT $ liftFC $ KV $ Del ks id + +-- | Set a key's time to live in seconds +expire :: KVDBKey -> KVDBDuration -> KVDB Bool +expire key sec = ExceptT $ liftFC $ KV $ Expire key sec id + +-- | Increment the integer value of a key by one +incr :: KVDBKey -> KVDB Integer +incr key = ExceptT $ liftFC $ KV $ Incr key id + +-- | Set the value of a hash field +hset :: KVDBKey -> KVDBField -> KVDBValue -> KVDB Bool +hset key field value = ExceptT $ liftFC $ KV $ HSet key field value id + +-- | Get the value of a hash field +hget :: KVDBKey -> KVDBField -> KVDB (Maybe ByteString) +hget key field = ExceptT $ liftFC $ KV $ HGet key field id + +-- | Add entities to a stream +xadd :: KVDBStream -> KVDBStreamEntryIDInput -> [KVDBStreamItem] -> KVDB KVDBStreamEntryID +xadd stream entryId items = ExceptT $ liftFC $ KV $ XAdd stream entryId items id + +-- | Get length of a stream +xlen :: KVDBStream -> KVDB Integer +xlen stream = ExceptT $ liftFC $ KV $ XLen stream id + +-- | Run commands inside a transaction(suited only for standalone redis setup). +multiExec :: T.JSONEx a => KVDBTx (R.Queued a) -> KVDB (T.TxResult a) +multiExec kvtx = ExceptT $ liftFC $ TX $ MultiExec kvtx id + +-- | Run commands inside a transaction(suited only for cluster redis setup). +multiExecWithHash :: T.JSONEx a => ByteString -> KVDBTx (R.Queued a) -> KVDB (T.TxResult a) +multiExecWithHash h kvtx = ExceptT $ liftFC $ TX $ MultiExecWithHash h kvtx id diff --git a/src/EulerHS/Core/Language.hs b/src/EulerHS/Core/Language.hs new file mode 100644 index 00000000..0456e499 --- /dev/null +++ b/src/EulerHS/Core/Language.hs @@ -0,0 +1,23 @@ +{- | +Module : EulerHS.Core.Types +Copyright : (C) Juspay Technologies Pvt Ltd 2019-2021 +License : Apache 2.0 (see the file LICENSE) +Maintainer : opensource@juspay.in +Stability : experimental +Portability : non-portable + +This module reexports the language of the core subsystems. + +This is an internal module. Import EulerHS.Language instead. +-} + + +module EulerHS.Core.Language + ( module X + ) where + +import EulerHS.Core.KVDB.Language as X +import EulerHS.Core.Logger.Language as X +import EulerHS.Core.PubSub.Language as X hiding (psubscribe, publish, + subscribe) +import EulerHS.Core.SqlDB.Language as X diff --git a/src/EulerHS/Core/Logger/Entries.hs b/src/EulerHS/Core/Logger/Entries.hs new file mode 100644 index 00000000..69c153bd --- /dev/null +++ b/src/EulerHS/Core/Logger/Entries.hs @@ -0,0 +1,31 @@ +{-# LANGUAGE DeriveAnyClass #-} +{-# LANGUAGE ScopedTypeVariables #-} + +module EulerHS.Core.Logger.Entries where + + +import EulerHS.Prelude +import EulerHS.Types (MockedResult (..), RRItem (..)) +import qualified EulerHS.Types as T + + +data LogMessageEntry = LogMessageEntry + { level :: T.LogLevel + , tag :: T.Tag + , msg :: T.Message + } deriving (Show, Eq, Ord, Generic, ToJSON, FromJSON) + +mkLogMessageEntry + :: T.LogLevel + -> T.Tag + -> T.Message + -> a + -> LogMessageEntry +mkLogMessageEntry level tag msg _ = LogMessageEntry level tag msg + +instance RRItem LogMessageEntry where + getTag _ = "LogMessageEntry" + +instance MockedResult LogMessageEntry () where + getMock _ = Just () + diff --git a/src/EulerHS/Core/Logger/Impl/TinyLogger.hs b/src/EulerHS/Core/Logger/Impl/TinyLogger.hs new file mode 100644 index 00000000..3a136cc4 --- /dev/null +++ b/src/EulerHS/Core/Logger/Impl/TinyLogger.hs @@ -0,0 +1,152 @@ +module EulerHS.Core.Logger.Impl.TinyLogger + ( + -- * TinyLogger Implementation + -- ** Types + LoggerHandle + -- ** Methods + , sendPendingMsg + , createLogger + , createLogger' + , createVoidLogger + , disposeLogger + , withLogger + , withLogger' + , defaultDateFormat + , defaultRenderer + , defaultBufferSize + ) where + +import EulerHS.Prelude hiding ((.=)) + +import Control.Concurrent (forkOn, getNumCapabilities) +import qualified Control.Concurrent.Chan.Unagi.Bounded as Chan +import qualified System.Logger as Log +import qualified EulerHS.Core.Types as T + +type LogQueue = (Chan.InChan T.PendingMsg, Chan.OutChan T.PendingMsg) + +type Loggers = [Log.Logger] + +data LoggerHandle + = AsyncLoggerHandle [ThreadId] LogQueue Loggers + | SyncLoggerHandle Loggers + | VoidLoggerHandle + +dispatchLogLevel :: T.LogLevel -> Log.Level +dispatchLogLevel T.Debug = Log.Debug +dispatchLogLevel T.Info = Log.Info +dispatchLogLevel T.Warning = Log.Warn +dispatchLogLevel T.Error = Log.Error + +logPendingMsg :: T.FlowFormatter -> Loggers -> T.PendingMsg -> IO () +logPendingMsg flowFormatter loggers pendingMsg@(T.PendingMsg mbFlowGuid lvl _ _ _) = do + formatter <- flowFormatter mbFlowGuid + let lvl' = dispatchLogLevel lvl + let msg' = Log.msg $ formatter pendingMsg + mapM_ (\logger -> Log.log logger lvl' msg') loggers + +loggerWorker :: T.FlowFormatter -> Chan.OutChan T.PendingMsg -> Loggers -> IO () +loggerWorker flowFormatter outChan loggers = do + pendingMsg <- Chan.readChan outChan + logPendingMsg flowFormatter loggers pendingMsg + +sendPendingMsg :: T.FlowFormatter -> LoggerHandle -> T.PendingMsg -> IO () +sendPendingMsg _ VoidLoggerHandle = const (pure ()) +sendPendingMsg flowFormatter (SyncLoggerHandle loggers) = logPendingMsg flowFormatter loggers +sendPendingMsg _ (AsyncLoggerHandle _ (inChan, _) _) = Chan.writeChan inChan + +createVoidLogger :: IO LoggerHandle +createVoidLogger = pure VoidLoggerHandle + +createLogger :: T.FlowFormatter -> T.LoggerConfig -> IO LoggerHandle +createLogger = createLogger' defaultDateFormat defaultRenderer defaultBufferSize + +createLogger' + :: Maybe Log.DateFormat + -> Maybe Log.Renderer + -> T.BufferSize + -> T.FlowFormatter + -> T.LoggerConfig + -> IO LoggerHandle +createLogger' + mbDateFormat + mbRenderer + bufferSize + flowFormatter + (T.LoggerConfig isAsync _ logFileName isConsoleLog isFileLog maxQueueSize _) = do + + let fileSettings + = Log.setFormat mbDateFormat + $ maybe id Log.setRenderer mbRenderer + $ Log.setBufSize bufferSize + $ Log.setOutput (Log.Path logFileName) + $ Log.defSettings + + let consoleSettings + = Log.setFormat mbDateFormat + $ maybe id Log.setRenderer mbRenderer + $ Log.setBufSize bufferSize + $ Log.setOutput Log.StdOut + $ Log.defSettings + + let fileH = [Log.new fileSettings | isFileLog] + let consoleH = [Log.new consoleSettings | isConsoleLog] + let loggersH = fileH ++ consoleH + + when (not $ null loggersH) $ + if isAsync then putStrLn @String "Creating async loggers..." + else putStrLn @String "Creating sync loggers..." + when isFileLog $ putStrLn @String $ "Creating file logger (" +| logFileName |+ ")..." + when isConsoleLog $ putStrLn @String "Creating console logger..." + + loggers <- sequence loggersH + startLogger isAsync loggers + where + startLogger :: Bool -> Loggers -> IO LoggerHandle + startLogger _ [] = pure VoidLoggerHandle + startLogger False loggers = pure $ SyncLoggerHandle loggers + startLogger True loggers = do + caps <- getNumCapabilities + chan@(_, outChan) <- Chan.newChan (fromIntegral maxQueueSize) + threadIds <- traverse ((flip forkOn) (forever $ loggerWorker flowFormatter outChan loggers)) [1..caps] + pure $ AsyncLoggerHandle threadIds chan loggers + +disposeLogger :: T.FlowFormatter -> LoggerHandle -> IO () +disposeLogger _ VoidLoggerHandle = pure () +disposeLogger _ (SyncLoggerHandle loggers) = do + putStrLn @String "Disposing sync logger..." + mapM_ Log.flush loggers + mapM_ Log.close loggers +disposeLogger flowFormatter (AsyncLoggerHandle threadIds (_, outChan) loggers) = do + putStrLn @String "Disposing async logger..." + traverse_ killThread threadIds + Chan.getChanContents outChan >>= mapM_ (logPendingMsg flowFormatter loggers) + mapM_ Log.flush loggers + mapM_ Log.close loggers + +withLogger' + :: Maybe Log.DateFormat + -> Maybe Log.Renderer + -> T.BufferSize + -> T.FlowFormatter + -> T.LoggerConfig + -> (LoggerHandle -> IO a) + -> IO a +withLogger' mbDateFormat mbRenderer bufSize flowFormatter cfg = + bracket (createLogger' mbDateFormat mbRenderer bufSize flowFormatter cfg) (disposeLogger flowFormatter) + +withLogger + :: T.FlowFormatter + -> T.LoggerConfig + -> (LoggerHandle -> IO a) + -> IO a +withLogger flowFormatter cfg = bracket (createLogger flowFormatter cfg) (disposeLogger flowFormatter) + +defaultBufferSize :: T.BufferSize +defaultBufferSize = 4096 + +defaultDateFormat :: Maybe Log.DateFormat +defaultDateFormat = Nothing + +defaultRenderer :: Maybe Log.Renderer +defaultRenderer = Nothing diff --git a/src/EulerHS/Core/Logger/Interpreter.hs b/src/EulerHS/Core/Logger/Interpreter.hs new file mode 100644 index 00000000..5ae3d516 --- /dev/null +++ b/src/EulerHS/Core/Logger/Interpreter.hs @@ -0,0 +1,52 @@ +{-# LANGUAGE BangPatterns #-} + +module EulerHS.Core.Logger.Interpreter + ( + -- * Core Logger Interpreter + runLogger + ) +where + +import EulerHS.Prelude + +import qualified EulerHS.Core.Language as L +import qualified EulerHS.Core.Logger.Entries as E +import qualified EulerHS.Core.Logger.Impl.TinyLogger as Impl +import qualified EulerHS.Core.Playback.Machine as P +import qualified EulerHS.Core.Runtime as R +import qualified EulerHS.Core.Types as T +import qualified Control.Concurrent.MVar as MVar +import qualified Data.Text as T + + +interpretLogger :: Maybe T.FlowGUID -> T.RunMode -> R.LoggerRuntime -> L.LoggerMethod a -> IO a +interpretLogger + mbFlowGuid + runMode + (R.MemoryLoggerRuntime flowFormatter logLevel logsVar cntVar) + (L.LogMessage msgLogLvl tag msg next) = + + fmap next $ P.withRunMode runMode (E.mkLogMessageEntry msgLogLvl tag msg) $ + case compare logLevel msgLogLvl of + GT -> pure () + _ -> do + formatter <- flowFormatter mbFlowGuid + !msgNum <- R.incLogCounter cntVar + let !m = T.pack $ formatter $ T.PendingMsg mbFlowGuid msgLogLvl tag msg msgNum + MVar.modifyMVar logsVar $ \(!lgs) -> pure (m : lgs, ()) + +interpretLogger + mbFlowGuid + runMode + (R.LoggerRuntime flowFormatter logLevel _ cntVar handle) + (L.LogMessage msgLogLevel tag msg next) = + + fmap next $ P.withRunMode runMode (E.mkLogMessageEntry msgLogLevel tag msg) $ + case compare logLevel msgLogLevel of + GT -> pure () + _ -> do + msgNum <- R.incLogCounter cntVar + Impl.sendPendingMsg flowFormatter handle $ T.PendingMsg mbFlowGuid msgLogLevel tag msg msgNum + +runLogger :: Maybe T.FlowGUID -> T.RunMode -> R.LoggerRuntime -> L.Logger a -> IO a +runLogger mbFlowGuid runMode loggerRt = foldF (interpretLogger mbFlowGuid runMode loggerRt) diff --git a/src/EulerHS/Core/Logger/Language.hs b/src/EulerHS/Core/Logger/Language.hs new file mode 100644 index 00000000..0e7f10d6 --- /dev/null +++ b/src/EulerHS/Core/Logger/Language.hs @@ -0,0 +1,35 @@ +{-# LANGUAGE GADTs #-} +{-# LANGUAGE TemplateHaskell #-} + +module EulerHS.Core.Logger.Language + ( + Logger + , LoggerMethod(..) + , logMessage' + ) where + +import EulerHS.Prelude + +import qualified EulerHS.Core.Types.Logger as T + +-- | Language for logging. +data LoggerMethod next where + -- | Log message with a predefined level. + LogMessage :: T.LogLevel -> !T.Tag -> !T.Message -> (() -> next) -> LoggerMethod next + +instance Functor LoggerMethod where + fmap f (LogMessage lvl tag msg next) = LogMessage lvl tag msg $ f . next + +type Logger = F LoggerMethod + +logMessage' :: Show tag => T.LogLevel -> tag -> T.Message -> Logger () +logMessage' lvl tag msg = liftFC $ LogMessage lvl (show tag) msg id +-- {-# NOINLINE logMessage' #-} +-- {-# RULES +-- +-- "Specialise Text Tag logMessage'" forall (tag :: Text) (lvl :: T.LogLevel) (msg :: T.Message) . +-- logMessage' lvl tag msg = liftFC $ LogMessage lvl tag msg id ; +-- +-- "Specialise String Tag logMessage'" forall (tag :: String) (lvl :: T.LogLevel) (msg :: T.Message) . +-- logMessage' lvl tag msg = liftFC $ LogMessage lvl (toText tag) msg id +-- #-} diff --git a/src/EulerHS/Core/Playback/Entries.hs b/src/EulerHS/Core/Playback/Entries.hs new file mode 100644 index 00000000..dbacc92d --- /dev/null +++ b/src/EulerHS/Core/Playback/Entries.hs @@ -0,0 +1,354 @@ +{-# LANGUAGE DeriveAnyClass #-} +{-# LANGUAGE MultiParamTypeClasses #-} +{-# LANGUAGE RecordWildCards #-} +{-# LANGUAGE ScopedTypeVariables #-} +{-# LANGUAGE UndecidableInstances #-} + +module EulerHS.Core.Playback.Entries where + +import qualified Data.Aeson as A +import Data.Generics.Product.Positions (getPosition) +import qualified Data.Text as Text +import EulerHS.Core.Types.Playback (MockedResult (..), RRItem (..)) +import EulerHS.Prelude +import qualified EulerHS.Types as T +import qualified Servant.Client as S + +---------------------------------------------------------------------- + +data RunDBEntry = RunDBEntry + { jsonResult :: A.Value + , rawSql :: [Text] + } + deriving (Show, Eq, Generic, ToJSON, FromJSON) + +mkRunDBEntry :: T.JSONEx a => (T.DBResult a, [Text]) -> RunDBEntry +mkRunDBEntry (res, sql) = RunDBEntry (T.jsonEncode res) sql + +instance RRItem RunDBEntry where + getTag _ = "RunDBEntry" + +instance T.JSONEx a => MockedResult RunDBEntry (T.DBResult a, [Text]) where + getMock RunDBEntry {jsonResult, rawSql} = fmap (\x -> (x,rawSql)) (T.jsonDecode jsonResult) + +data ThrowExceptionEntry = ThrowExceptionEntry + { exMessage :: String + } deriving (Show, Eq, Generic, ToJSON, FromJSON) + +mkThrowExceptionEntry :: Exception e => e -> a -> ThrowExceptionEntry +mkThrowExceptionEntry e _ = ThrowExceptionEntry $ show e + +instance RRItem ThrowExceptionEntry where + getTag _ = "ThrowExceptionEntry" + +instance MockedResult ThrowExceptionEntry a where + getMock _ = Just $ error "This shold not be evaluated: throw exception result" + +---------------------------------------------------------------------- + +data CallServantAPIEntry = CallServantAPIEntry + { baseUrl :: S.BaseUrl + , jsonResult :: A.Value + } deriving (Show, Eq, Generic, ToJSON, FromJSON) + +mkCallServantAPIEntry + :: T.JSONEx a + => S.BaseUrl + -> Either S.ClientError a + -> CallServantAPIEntry +mkCallServantAPIEntry burl = CallServantAPIEntry burl . T.jsonEncode + +instance RRItem CallServantAPIEntry where + getTag _ = "CallServantAPIEntry" + +instance T.JSONEx a => MockedResult CallServantAPIEntry (Either S.ClientError a) where + getMock CallServantAPIEntry {jsonResult} = T.jsonDecode jsonResult + +---------------------------------------------------------------------- + +data CallHttpAPIEntry = CallHttpAPIEntry + { request :: T.HTTPRequest + , eResponse :: Either Text.Text T.HTTPResponse + } deriving (Show, Eq, Generic, ToJSON, FromJSON) + +mkCallHttpAPIEntry + :: T.HTTPRequest + -> Either Text.Text T.HTTPResponse + -> CallHttpAPIEntry +mkCallHttpAPIEntry = CallHttpAPIEntry + +instance RRItem CallHttpAPIEntry where + getTag _ = "CallHttpAPIEntry" + +instance MockedResult CallHttpAPIEntry (Either Text.Text T.HTTPResponse) where + getMock (CallHttpAPIEntry {eResponse}) = Just eResponse + +-- ---------------------------------------------------------------------- + +data SetOptionEntry = SetOptionEntry + { key :: Text + , value :: A.Value + } deriving (Show, Eq, Generic, ToJSON, FromJSON) + +mkSetOptionEntry :: ToJSON v => Text -> v -> () -> SetOptionEntry +mkSetOptionEntry k v _ = SetOptionEntry k (toJSON v) + +instance RRItem SetOptionEntry where + getTag _ = "SetOptionEntry" + +instance MockedResult SetOptionEntry () where + getMock _ = Just () +------------------------------------------------------------------------- + +data DelOptionEntry = DelOptionEntry { key :: Text } + deriving (Show, Eq, Generic, ToJSON, FromJSON) + +mkDelOptionEntry :: Text -> () -> DelOptionEntry +mkDelOptionEntry k _ = DelOptionEntry k + +instance RRItem DelOptionEntry where + getTag _ = "DelOptionEntry" + +instance MockedResult DelOptionEntry () where + getMock _ = Just () + +-- ---------------------------------------------------------------------- + +data GetOptionEntry = GetOptionEntry + { key :: Text + , value :: A.Value + } deriving (Show, Eq, Generic, ToJSON, FromJSON) + +mkGetOptionEntry :: ToJSON v => Text -> Maybe v -> GetOptionEntry +mkGetOptionEntry k mv = GetOptionEntry k (toJSON mv) + +instance RRItem GetOptionEntry where + getTag _ = "GetOptionEntry" + +instance FromJSON v => MockedResult GetOptionEntry v where + getMock GetOptionEntry{value} = T.fromJSONMaybe value + +-- ---------------------------------------------------------------------- + +data RunSysCmdEntry = RunSysCmdEntry + { cmd :: String + , result :: String + } deriving (Show, Eq, Generic, ToJSON, FromJSON) + +mkRunSysCmdEntry :: String -> String -> RunSysCmdEntry +mkRunSysCmdEntry cmd result = RunSysCmdEntry cmd result + +instance RRItem RunSysCmdEntry where + getTag _ = "RunSysCmdEntry" + +instance MockedResult RunSysCmdEntry String where + getMock RunSysCmdEntry {..} = Just result + +-- ---------------------------------------------------------------------- + +data ForkEntry = ForkEntry + { description :: Text + , guid :: Text + } deriving (Show, Eq, Ord, Generic, ToJSON, FromJSON) + +mkForkEntry :: Text -> Text -> () -> ForkEntry +mkForkEntry desc guid _ = ForkEntry desc guid + +instance RRItem ForkEntry where + getTag _ = "ForkEntry" + +instance MockedResult ForkEntry () where + getMock _ = Just () + +-- ---------------------------------------------------------------------- + +data GenerateGUIDEntry = GenerateGUIDEntry + { guid :: Text + } deriving (Show, Eq, Generic, ToJSON, FromJSON) + +mkGenerateGUIDEntry :: Text -> GenerateGUIDEntry +mkGenerateGUIDEntry = GenerateGUIDEntry + +instance RRItem GenerateGUIDEntry where + getTag _ = "GenerateGUIDEntry" + +instance MockedResult GenerateGUIDEntry Text where + getMock (GenerateGUIDEntry g) = Just g + +-- ---------------------------------------------------------------------- + +data RunIOEntry = RunIOEntry + { description :: Text + , jsonResult :: A.Value + } + deriving (Show, Eq, Generic, ToJSON, FromJSON) + +mkRunIOEntry + :: forall a + . T.JSONEx a + => Text + -> a + -> RunIOEntry +mkRunIOEntry descr a = RunIOEntry descr $ + (T.resolveJSONEx @a T.jsonEncode toJSON) a + +instance RRItem RunIOEntry where + getTag _ = "RunIOEntry" + +instance T.JSONEx a => MockedResult RunIOEntry a where + getMock (RunIOEntry _ r) = + T.resolveJSONEx @a T.jsonDecode T.fromJSONMaybe r + + +-- ---------------------------------------------------------------------- + +data RunUntracedIOEntry = RunUntracedIOEntry + { description :: Text + } deriving (Show, Eq, Generic, ToJSON, FromJSON) + +mkRunUntracedIOEntry :: Text -> a -> RunUntracedIOEntry +mkRunUntracedIOEntry descr _ = RunUntracedIOEntry descr + +instance RRItem RunUntracedIOEntry where + getTag _ = "RunUntracedIOEntry" + +-- Not possible to mock these values, you have to re-run the IO action +-- instance MockedResult RunUntracedIOEntry () where +-- getMock (RunUntracedIOEntry _) = Just () + +------------------------------------------------------------------------ + +data InitSqlDBConnectionEntry beM = InitSqlDBConnectionEntry + { dBConfig :: T.DBConfig beM + , initConnResult :: Either T.DBError () + } deriving (Show, Eq, Generic, ToJSON, FromJSON) + +mkInitSqlDBConnectionEntry :: T.DBConfig beM -> Either T.DBError a -> InitSqlDBConnectionEntry beM +mkInitSqlDBConnectionEntry dbcfg res = case res of + Left err -> InitSqlDBConnectionEntry dbcfg (Left err) + Right _ -> InitSqlDBConnectionEntry dbcfg (Right ()) + +instance RRItem (InitSqlDBConnectionEntry beM) where + getTag _ = "InitSqlDBConnectionEntry" + +instance MockedResult (InitSqlDBConnectionEntry beM) (T.DBResult (T.SqlConn beM)) where + getMock (InitSqlDBConnectionEntry _ res) = + case res of + Left err -> Just $ Left err + Right _ -> Just $ Right $ T.MockedPool "" + + +---------------------------------------------------------------------- + +data DeInitSqlDBConnectionEntry (beM :: Type -> Type) = DeInitSqlDBConnectionEntry + { connTag :: Text + } deriving (Show, Eq, Ord, Generic, ToJSON, FromJSON) + +mkDeInitSqlDBConnectionEntry :: T.SqlConn beM -> a -> DeInitSqlDBConnectionEntry beM +mkDeInitSqlDBConnectionEntry cfg _ = DeInitSqlDBConnectionEntry (getPosition @1 cfg) + +instance RRItem (DeInitSqlDBConnectionEntry beM) where + getTag _ = "DeInitSqlDBConnectionEntry" + +instance MockedResult (DeInitSqlDBConnectionEntry beM) () where + getMock (DeInitSqlDBConnectionEntry _) = Just () + +------------------------------------------------------------------------ + +data GetSqlDBConnectionEntry beM = GetSqlDBConnectionEntry + { dBConfig :: T.DBConfig beM + , getConnResult :: Either T.DBError () + } deriving (Show, Eq, Generic, ToJSON, FromJSON) + +mkGetSqlDBConnectionEntry :: T.DBConfig beM -> Either T.DBError a -> GetSqlDBConnectionEntry beM +mkGetSqlDBConnectionEntry dbcfg res = case res of + Left err -> GetSqlDBConnectionEntry dbcfg (Left err) + Right _ -> GetSqlDBConnectionEntry dbcfg (Right ()) + +instance RRItem (GetSqlDBConnectionEntry beM) where + getTag _ = "GetSqlDBConnectionEntry" + +instance MockedResult (GetSqlDBConnectionEntry beM) (T.DBResult (T.SqlConn beM)) where + getMock (GetSqlDBConnectionEntry _ res) = + case res of + Left err -> Just $ Left err + Right _ -> Just $ Right $ T.MockedPool "" + +------------------------------------------------------------------------- + +data InitKVDBConnectionEntry = InitKVDBConnectionEntry + { kvdbConfig :: T.KVDBConfig + } deriving (Show, Eq, Generic, ToJSON, FromJSON) + +mkInitKVDBConnectionEntry :: T.KVDBConfig -> a -> InitKVDBConnectionEntry +mkInitKVDBConnectionEntry dbcfg _ = InitKVDBConnectionEntry dbcfg + +instance RRItem InitKVDBConnectionEntry where + getTag _ = "InitKVDBConnectionEntry" + +instance MockedResult InitKVDBConnectionEntry (T.KVDBAnswer T.KVDBConn) where + getMock (InitKVDBConnectionEntry _) = Just $ Right $ T.Mocked "" + +------------------------------------------------------------------------- + +data DeInitKVDBConnectionEntry = DeInitKVDBConnectionEntry + { connTag :: Text + } deriving (Show, Eq, Generic, ToJSON, FromJSON) + +mkDeInitKVDBConnectionEntry :: T.KVDBConn -> a -> DeInitKVDBConnectionEntry +mkDeInitKVDBConnectionEntry conn _ = DeInitKVDBConnectionEntry (getPosition @1 conn) + +instance RRItem DeInitKVDBConnectionEntry where + getTag _ = "DeInitKVDBConnectionEntry" + +instance MockedResult DeInitKVDBConnectionEntry () where + getMock (DeInitKVDBConnectionEntry _) = Just () + +------------------------------------------------------------------------- + +data GetKVDBConnectionEntry = GetKVDBConnectionEntry + { kvdbConfig :: T.KVDBConfig + } deriving (Show, Eq, Generic, ToJSON, FromJSON) + +mkGetKVDBConnectionEntry :: T.KVDBConfig -> a -> GetKVDBConnectionEntry +mkGetKVDBConnectionEntry dbcfg _ = GetKVDBConnectionEntry dbcfg + +instance RRItem GetKVDBConnectionEntry where + getTag _ = "GetKVDBConnectionEntry" + +instance MockedResult GetKVDBConnectionEntry (T.KVDBAnswer T.KVDBConn) where + getMock (GetKVDBConnectionEntry _) = Just $ Right $ T.Mocked "" + +---------------------------------------------------------------------- + +data AwaitEntry = AwaitEntry + { timeout :: Maybe Int + , jsonResult :: A.Value + } deriving (Show, Eq, Generic, ToJSON, FromJSON) + +mkAwaitEntry :: ToJSON v => Maybe T.Microseconds -> Either T.AwaitingError v -> AwaitEntry +mkAwaitEntry mbMcs val = AwaitEntry (unwrapMcs <$> mbMcs) (toJSON val) + where + unwrapMcs (T.Microseconds mcs) = fromIntegral mcs + +instance RRItem AwaitEntry where + getTag _ = "AwaitEntry" + +instance FromJSON v => MockedResult AwaitEntry v where + getMock (AwaitEntry _ jsonValue) = T.fromJSONMaybe jsonValue + +------------------------------------------------------------------------------------- + +data RunSafeFlowEntry = RunSafeFlowEntry + { guid :: Text + , jsonResult :: A.Value + } deriving (Show, Eq, Generic, ToJSON, FromJSON) + +mkRunSafeFlowEntry :: ToJSON v => Text -> Either Text v -> RunSafeFlowEntry +mkRunSafeFlowEntry guid val = RunSafeFlowEntry guid (toJSON val) + +instance RRItem RunSafeFlowEntry where + getTag _ = "RunSafeFlowEntry" + +instance (FromJSON v) => MockedResult RunSafeFlowEntry v where + getMock (RunSafeFlowEntry _ jsonValue) = T.fromJSONMaybe jsonValue diff --git a/src/EulerHS/Core/Playback/Machine.hs b/src/EulerHS/Core/Playback/Machine.hs new file mode 100644 index 00000000..f5c4058e --- /dev/null +++ b/src/EulerHS/Core/Playback/Machine.hs @@ -0,0 +1,221 @@ +{-# LANGUAGE ScopedTypeVariables #-} +{-# LANGUAGE TypeApplications #-} + +{- | +Module : EulerHS.Core.Playback.Machine +Copyright : (C) Juspay Technologies Pvt Ltd 2019-2021 +License : Apache 2.0 (see the file LICENSE) +Maintainer : opensource@juspay.in +Stability : experimental +Portability : non-portable + +Automatic Regression Testing (ART) system. + +You typically don't need to import this module. +-} + +module EulerHS.Core.Playback.Machine + ( + -- * Playback Machine + record, + withRunMode + ) where + +import Control.Exception (throwIO) +import Data.Vector as V ((!?)) +import qualified Data.Vector as V +import EulerHS.Prelude hiding (note) +import EulerHS.Types + + +showInfo :: String -> String -> String +showInfo flowStep recordingEntry = + "\n>>>>Recording entry: \n" ++ recordingEntry + ++ "\n>>>>Flow step: \n" ++ flowStep + +unexpectedRecordingEnd :: Text -> String -> PlaybackError +unexpectedRecordingEnd errFlowGuid flowStep + = PlaybackError UnexpectedRecordingEnd + ("\n>>>>Flow step: " ++ flowStep) + errFlowGuid + +unknownRRItem :: Text -> String -> String -> PlaybackError +unknownRRItem errFlowGuid flowStep recordingEntry + = PlaybackError UnknownRRItem + (showInfo flowStep recordingEntry) + errFlowGuid + +mockDecodingFailed :: Text -> String -> String -> PlaybackError +mockDecodingFailed errFlowGuid flowStep recordingEntry + = PlaybackError MockDecodingFailed + (showInfo flowStep recordingEntry) + errFlowGuid + +itemMismatch :: Text -> String -> String -> PlaybackError +itemMismatch errFlowGuid flowStep recordingEntry + = PlaybackError ItemMismatch + (showInfo flowStep recordingEntry) + errFlowGuid + +setReplayingError :: MonadIO m => PlayerRuntime -> PlaybackError -> m e +setReplayingError playerRt err = do + let PlayerRuntime{rerror = ReplayErrors{errorMVar}} = playerRt + + void $ takeMVar errorMVar + putMVar errorMVar $ Just err + liftIO $ throwIO $ ReplayingException err + +pushRecordingEntry + :: MonadIO m + => RecorderRuntime + -> RecordingEntry + -> m () +pushRecordingEntry RecorderRuntime{recording} (RecordingEntry _ mode n p) = do + let recMVar = recordingMVar recording + entries <- takeMVar recMVar + let idx = V.length entries + let re = RecordingEntry idx mode n p + + putMVar recMVar $ V.snoc entries re + +popNextRecordingEntry :: MonadIO m => PlayerRuntime -> m (Maybe RecordingEntry) +popNextRecordingEntry PlayerRuntime{resRecording = ResultRecording{..}, ..} = do + cur <- takeMVar stepMVar + let mbItem = recording !? cur + when (isJust mbItem) $ putMVar stepMVar (cur + 1) + pure mbItem + +popNextRRItem + :: forall rrItem m + . MonadIO m + => Show rrItem + => RRItem rrItem + => PlayerRuntime + -> m (Either PlaybackError (RecordingEntry, rrItem)) +popNextRRItem playerRt@PlayerRuntime{..} = do + mbRecordingEntry <- popNextRecordingEntry playerRt + let flowStep = getTag $ Proxy @rrItem + pure $ do + recordingEntry <- note (unexpectedRecordingEnd flowGUID flowStep) mbRecordingEntry + let unknownErr = unknownRRItem flowGUID flowStep $ showRecEntry @rrItem recordingEntry -- show recordingEntry + rrItem <- note unknownErr $ fromRecordingEntry recordingEntry + pure (recordingEntry, rrItem) + +popNextRRItemAndResult + :: forall rrItem native m + . MonadIO m + => Show rrItem + => MockedResult rrItem native + => PlayerRuntime + -> m (Either PlaybackError (RecordingEntry, rrItem, native)) +popNextRRItemAndResult playerRt@PlayerRuntime{..} = do + let flowStep = getTag $ Proxy @rrItem + eNextRRItem <- popNextRRItem playerRt + pure $ do + (recordingEntry, rrItem) <- eNextRRItem + let mbNative = getMock rrItem + nextResult <- note (mockDecodingFailed flowGUID flowStep (show recordingEntry)) mbNative + pure (recordingEntry, rrItem, nextResult) + +compareRRItems + :: forall rrItem m native + . RRItem rrItem + => Show rrItem + => MonadIO m + => PlayerRuntime + -> (RecordingEntry, rrItem, native) + -> rrItem + -> m () +compareRRItems playerRt@PlayerRuntime{..} (recordingEntry, rrItem, _) flowRRItem = do + when (rrItem /= flowRRItem) $ do + let flowStep = show flowRRItem + setReplayingError playerRt $ itemMismatch flowGUID flowStep (showRecEntry @rrItem recordingEntry) -- show recordingEntry) + +getCurrentEntryReplayMode :: MonadIO m => PlayerRuntime -> m EntryReplayingMode +getCurrentEntryReplayMode PlayerRuntime{resRecording = ResultRecording{..}, ..} = do + cur <- readMVar stepMVar + pure $ fromMaybe Normal $ do + (RecordingEntry _ mode _ _) <- recording !? cur + pure mode + +replayWithGlobalConfig + :: forall rrItem native m + . MonadIO m + => Show rrItem + => MockedResult rrItem native + => PlayerRuntime + -> m native + -> (native -> rrItem) + -> Either PlaybackError (RecordingEntry, rrItem, native) + -> m native +replayWithGlobalConfig playerRt ioAct mkRRItem eNextRRItemRes = do + let tag = getTag $ Proxy @rrItem + let config = checkForReplayConfig playerRt tag + case config of + GlobalNoVerify -> case eNextRRItemRes of + Left err -> setReplayingError playerRt err + Right (_, _, r) -> pure r + GlobalNormal -> case eNextRRItemRes of + Left err -> setReplayingError playerRt err + Right stepInfo@(_, _, r) -> do + compareRRItems playerRt stepInfo $ mkRRItem r + pure r + GlobalNoMocking -> ioAct + GlobalSkip -> ioAct + +checkForReplayConfig :: PlayerRuntime -> String -> GlobalReplayingMode +checkForReplayConfig PlayerRuntime{..} tag | tag `elem` disableMocking = GlobalNoMocking + | tag `elem` disableVerify = GlobalNoVerify + | otherwise = GlobalNormal + +replay + :: forall rrItem native m + . MonadIO m + => Show rrItem + => MockedResult rrItem native + => PlayerRuntime + -> (native -> rrItem) + -> m native + -> m native +replay playerRt@PlayerRuntime{..} mkRRItem ioAct + | getTag (Proxy @rrItem) `elem` skipEntries = ioAct + | otherwise = do + entryReplayMode <- getCurrentEntryReplayMode playerRt + eNextRRItemRes <- popNextRRItemAndResult playerRt + case entryReplayMode of + Normal -> do + replayWithGlobalConfig playerRt ioAct mkRRItem eNextRRItemRes + NoVerify -> case eNextRRItemRes of + Left err -> setReplayingError playerRt err + Right (_, _, r) -> pure r + NoMock -> ioAct + +record + :: forall rrItem native m + . MonadIO m + => RRItem rrItem + => RecorderRuntime + -> (native -> rrItem) + -> m native + -> m native +record recorderRt@RecorderRuntime{..} mkRRItem ioAct = do + native <- ioAct + let tag = getTag $ Proxy @rrItem + when (tag `notElem` disableEntries) + $ pushRecordingEntry recorderRt $ toRecordingEntry (mkRRItem native) 0 Normal + pure native + + +withRunMode + :: MonadIO m + => Show rrItem + => MockedResult rrItem native + => RunMode + -> (native -> rrItem) + -> m native + -> m native +withRunMode RegularMode _ act = act +withRunMode (RecordingMode recorderRt) mkRRItem act = + record recorderRt mkRRItem act +withRunMode (ReplayingMode playerRt) mkRRItem act = + replay playerRt mkRRItem act diff --git a/src/EulerHS/Core/PubSub/Entries.hs b/src/EulerHS/Core/PubSub/Entries.hs new file mode 100644 index 00000000..6c11db33 --- /dev/null +++ b/src/EulerHS/Core/PubSub/Entries.hs @@ -0,0 +1,61 @@ +{-# LANGUAGE DeriveAnyClass #-} + +module EulerHS.Core.PubSub.Entries where + + +import EulerHS.Prelude + +import qualified Data.Aeson as A +import qualified EulerHS.Types as T + +---------------------------------------------------------------------- + +data PublishEntry = PublishEntry + { jsonChannel :: A.Value + , jsonPayload :: A.Value + , jsonResult :: A.Value + } deriving (Show, Eq, Generic, ToJSON, FromJSON) + +instance T.RRItem PublishEntry where + getTag _ = "PublishEntry" + +instance T.MockedResult PublishEntry (Either T.KVDBReply Integer) where + getMock PublishEntry {jsonResult} = T.jsonDecode jsonResult + +mkPublishEntry :: ByteString -> ByteString -> Either T.KVDBReply Integer -> PublishEntry +mkPublishEntry c p r = PublishEntry + (T.jsonEncode c) + (T.jsonEncode p) + (T.jsonEncode r) + +---------------------------------------------------------------------- + +data SubscribeEntry = SubscribeEntry + { jsonChannels :: A.Value + } deriving (Show, Eq, Generic, ToJSON, FromJSON) + +instance T.RRItem SubscribeEntry where + getTag _ = "SubscribeEntry" + +instance T.MockedResult SubscribeEntry (IO ()) where + getMock _ = Just $ pure () + +mkSubscribeEntry :: [ByteString] -> IO () -> SubscribeEntry +mkSubscribeEntry c _ = SubscribeEntry $ T.jsonEncode c + +---------------------------------------------------------------------- + +data PSubscribeEntry = PSubscribeEntry + { jsonPatterns :: A.Value + } deriving (Show, Eq, Generic, ToJSON, FromJSON) + +instance T.RRItem PSubscribeEntry where + getTag _ = "PSubscribeEntry" + +instance T.MockedResult PSubscribeEntry (IO ()) where + getMock _ = Just $ pure () + +mkPSubscribeEntry :: [ByteString] -> IO () -> PSubscribeEntry +mkPSubscribeEntry p _ = PSubscribeEntry $ T.jsonEncode p + +---------------------------------------------------------------------- diff --git a/src/EulerHS/Core/PubSub/Interpreter.hs b/src/EulerHS/Core/PubSub/Interpreter.hs new file mode 100644 index 00000000..b5a05504 --- /dev/null +++ b/src/EulerHS/Core/PubSub/Interpreter.hs @@ -0,0 +1,49 @@ +module EulerHS.Core.PubSub.Interpreter where + +import EulerHS.Prelude + +import Data.Coerce +import qualified Database.Redis as R +import qualified EulerHS.Core.Playback.Machine as P +import qualified EulerHS.Types as T + +import EulerHS.Core.PubSub.Entries +import EulerHS.Core.PubSub.Language + + +interpretPubSubF + :: T.RunMode + -> R.PubSubController + -> R.Connection + -> PubSubF a + -> IO a +interpretPubSubF runMode _ conn (Publish ch pl next) = + fmap next $ + P.withRunMode runMode (mkPublishEntry bsch bspl) $ + fmap (first T.hedisReplyToKVDBReply) $ R.runRedis conn $ R.publish bsch bspl + where + bsch = coerce ch + bspl = coerce pl + +interpretPubSubF runMode pubSubController _ (Subscribe chs cb next) = + fmap next $ + P.withRunMode runMode (mkSubscribeEntry bsChs) $ + R.addChannelsAndWait pubSubController (zip bsChs $ repeat cb) [] + where + bsChs = coerce chs + +interpretPubSubF runMode pubSubController _ (PSubscribe patts cb next) = + fmap next $ + P.withRunMode runMode (mkPSubscribeEntry bsPatts) $ + R.addChannelsAndWait pubSubController [] (zip bsPatts $ repeat cb) + where + bsPatts = coerce patts + +runPubSub :: T.RunMode -> R.PubSubController -> R.Connection -> PubSub a -> IO a +runPubSub runMode pubSubController conn = + foldF (interpretPubSubF runMode pubSubController conn) + + + + + diff --git a/src/EulerHS/Core/PubSub/Language.hs b/src/EulerHS/Core/PubSub/Language.hs new file mode 100644 index 00000000..12c131c6 --- /dev/null +++ b/src/EulerHS/Core/PubSub/Language.hs @@ -0,0 +1,53 @@ +{-# LANGUAGE DeriveFunctor #-} + +{- | +Module : EulerHS.Core.PubSub.Language +Copyright : (C) Juspay Technologies Pvt Ltd 2019-2021 +License : Apache 2.0 (see the file LICENSE) +Maintainer : opensource@juspay.in +Stability : experimental +Portability : non-portable + +Experimental PubSub subsystem (Redis-based) + +This module is internal and should not imported in the projects. +Import 'EulerHS.Language' instead. +-} + +module EulerHS.Core.PubSub.Language where + +import EulerHS.Prelude + +import qualified Database.Redis as R +import qualified EulerHS.Types as T + +-- | Channel +newtype Channel = Channel ByteString + +-- | Channel pattern +newtype ChannelPattern = ChannelPattern ByteString + +-- | Payload +newtype Payload = Payload ByteString + +-- | Algebra for the PubSub mechanism +data PubSubF next + = Publish Channel Payload (Either T.KVDBReply Integer -> next) + | Subscribe [Channel ] R.MessageCallback (IO () -> next) + | PSubscribe [ChannelPattern] R.PMessageCallback (IO () -> next) + deriving Functor + +-- | PubSub language +type PubSub = F PubSubF + +-- | Publish some payload into channel +publish :: Channel -> Payload -> PubSub (Either T.KVDBReply Integer) +publish channel payload = liftFC $ Publish channel payload id + +-- | Subscribe to channel +subscribe :: [Channel] -> R.MessageCallback -> PubSub (IO ()) +subscribe channels cb = liftFC $ Subscribe channels cb id + +-- | Subscribe to channels with this pattern +psubscribe :: [ChannelPattern] -> R.PMessageCallback -> PubSub (IO ()) +psubscribe channels cb = liftFC $ PSubscribe channels cb id diff --git a/src/EulerHS/Core/Runtime.hs b/src/EulerHS/Core/Runtime.hs new file mode 100644 index 00000000..afada9fb --- /dev/null +++ b/src/EulerHS/Core/Runtime.hs @@ -0,0 +1,137 @@ +{- | +Module : EulerHS.Core.Runtime +Copyright : (C) Juspay Technologies Pvt Ltd 2019-2021 +License : Apache 2.0 (see the file LICENSE) +Maintainer : opensource@juspay.in +Stability : experimental +Portability : non-portable + +This module contains functions and types to work with `CoreRuntime`. + +This is an internal module. Import EulerHS.Runtime instead. +-} + +module EulerHS.Core.Runtime + ( + -- * Core Runtime + CoreRuntime(..) + , LoggerRuntime(..) + , shouldLogRawSql + , incLogCounter + , createCoreRuntime + , createVoidLoggerRuntime + , createMemoryLoggerRuntime + , createLoggerRuntime + , createLoggerRuntime' + , clearCoreRuntime + , clearLoggerRuntime + , module X + ) where + +import EulerHS.Prelude + +-- Currently, TinyLogger is highly coupled with the Runtime. +-- Fix it if an interchangable implementations are needed. +import qualified EulerHS.Core.Logger.Impl.TinyLogger as Impl +import qualified EulerHS.Core.Types as T +import EulerHS.Core.Types.DB as X (withTransaction) +import qualified System.Logger as Log + +-- | A counter of log messages sent since the `LoggerRuntime` creation. +type LogCounter = IORef Int -- No race condition: atomicModifyIORef' is used. + +-- | Runtime sturcture holding all the necessary operational information +-- for the logging subsystem. +data LoggerRuntime + -- | Runtime structure of a regular logger. + = LoggerRuntime + { _flowFormatter :: T.FlowFormatter + -- ^ A callback for obtaining a flow-specific formatter. + , _logLevel :: T.LogLevel + -- ^ Log level + , _logRawSql :: !Bool + -- ^ Whether to log raw SQL as Debug messages. + , _logCounter :: !LogCounter + -- ^ Log messages counter variable. + , _logLoggerHandle :: Impl.LoggerHandle + -- ^ Internal logging subsystem handler. + } + -- | Runtime structure for a memory logger. + | MemoryLoggerRuntime !T.FlowFormatter !T.LogLevel !(MVar [Text]) !LogCounter + +-- | Runtime that keeps all the operational info for the core subsystems. +data CoreRuntime = CoreRuntime + { _loggerRuntime :: LoggerRuntime + -- ^ Logger runtime + } + +-- | Create a memory logger runtime. +-- +-- This function can be passed to `createFlowRuntime'`. +createMemoryLoggerRuntime :: T.FlowFormatter -> T.LogLevel -> IO LoggerRuntime +createMemoryLoggerRuntime flowFormatter logLevel = + MemoryLoggerRuntime flowFormatter logLevel <$> newMVar [] <*> initLogCounter + +-- | Create a regular logger runtime according to the config passed. +-- +-- This function can be passed to `createFlowRuntime'`. +createLoggerRuntime :: T.FlowFormatter -> T.LoggerConfig -> IO LoggerRuntime +createLoggerRuntime flowFormatter cfg = do + counter <- initLogCounter + LoggerRuntime flowFormatter (T._logLevel cfg) (T._logRawSql cfg) counter + <$> Impl.createLogger flowFormatter cfg + +-- | The same as `createLoggerRuntime` but allows to setup different tweaks +-- of the specific tiny-logger subsystem. +-- +-- This function can be passed to `createFlowRuntime'`. +createLoggerRuntime' + :: Maybe Log.DateFormat + -> Maybe Log.Renderer + -> T.BufferSize + -> T.FlowFormatter + -> T.LoggerConfig + -> IO LoggerRuntime +createLoggerRuntime' mbDateFormat mbRenderer bufferSize flowFormatter cfg = do + counter <- initLogCounter + loggerHandle <- Impl.createLogger' mbDateFormat mbRenderer bufferSize flowFormatter cfg + pure $ LoggerRuntime flowFormatter (T._logLevel cfg) (T._logRawSql cfg) counter loggerHandle + +-- | Create a void logger: nothing will be logged. +createVoidLoggerRuntime :: IO LoggerRuntime +createVoidLoggerRuntime = do + counter <- initLogCounter + LoggerRuntime (const $ pure show) T.Debug True counter <$> Impl.createVoidLogger + +-- | Clear a logger runtime and dispose the logger. +-- +-- This function flushes the last log messages existing in the log queue. +clearLoggerRuntime :: LoggerRuntime -> IO () +clearLoggerRuntime (LoggerRuntime flowFormatter _ _ _ handle) = Impl.disposeLogger flowFormatter handle +clearLoggerRuntime (MemoryLoggerRuntime _ _ msgsVar _) = void $ swapMVar msgsVar [] + +-- | Creates a core runtime. +createCoreRuntime :: LoggerRuntime -> IO CoreRuntime +createCoreRuntime = pure . CoreRuntime + +-- | Clears the core runtime. +clearCoreRuntime :: CoreRuntime -> IO () +clearCoreRuntime _ = pure () + +-- | Returns True if debug logging of raw SQL queries was set. +shouldLogRawSql :: LoggerRuntime -> Bool +shouldLogRawSql = \case + (LoggerRuntime _ _ logRawSql _ _) -> logRawSql + _ -> True + +-- | Init log messages counter. +-- +-- Internal function, should not be used in the BL. +initLogCounter :: IO LogCounter +initLogCounter = newIORef 0 + +-- | Incremenet log messages counter. +-- +-- Internal function, should not be used in the BL. +incLogCounter :: LogCounter -> IO Int +incLogCounter = flip atomicModifyIORef' (\cnt -> (cnt + 1, cnt)) diff --git a/src/EulerHS/Core/SqlDB/Interpreter.hs b/src/EulerHS/Core/SqlDB/Interpreter.hs new file mode 100644 index 00000000..24ad6ea1 --- /dev/null +++ b/src/EulerHS/Core/SqlDB/Interpreter.hs @@ -0,0 +1,22 @@ +module EulerHS.Core.SqlDB.Interpreter + ( + -- * SQL DB Interpreter + runSqlDB + ) where + +import EulerHS.Prelude + +import qualified EulerHS.Core.Language as L +import qualified EulerHS.Core.Types as T + + +interpretSqlDBMethod + :: T.NativeSqlConn + -> (Text -> IO ()) + -> L.SqlDBMethodF beM a + -> IO a +interpretSqlDBMethod conn logger (L.SqlDBMethod runner next) = + next <$> runner conn logger + +runSqlDB :: T.NativeSqlConn -> (Text -> IO ()) -> L.SqlDB beM a -> IO a +runSqlDB sqlConn logger = foldF (interpretSqlDBMethod sqlConn logger) diff --git a/src/EulerHS/Core/SqlDB/Language.hs b/src/EulerHS/Core/SqlDB/Language.hs new file mode 100644 index 00000000..65365bf6 --- /dev/null +++ b/src/EulerHS/Core/SqlDB/Language.hs @@ -0,0 +1,143 @@ +{-# LANGUAGE GADTs #-} +{-# LANGUAGE MultiParamTypeClasses #-} +{-# LANGUAGE ScopedTypeVariables #-} +{-# LANGUAGE TemplateHaskell #-} + +{- | +Module : EulerHS.Core.SqlDB.Language +Copyright : (C) Juspay Technologies Pvt Ltd 2019-2021 +License : Apache 2.0 (see the file LICENSE) +Maintainer : opensource@juspay.in +Stability : experimental +Portability : non-portable + +Language of the SQL DB subsystem. + +Uses `beam` as relational DBs connector. + +This module is internal and should not imported in the projects. +Import 'EulerHS.Language' instead. +-} + +module EulerHS.Core.SqlDB.Language + ( + -- * SQLDB language + -- ** Types + SqlDB + , SqlDBMethodF(..) + -- ** Methods + , findRow + , findRows + , insertRows + , insertRowsReturningList + , updateRows + , updateRowsReturningList + , deleteRows + , deleteRowsReturningListPG + , updateRowsReturningListPG + , insertRowReturningMySQL + ) where + +import qualified Database.Beam as B +import qualified Database.Beam.MySQL as BM +import qualified Database.Beam.Postgres as BP +import qualified EulerHS.Core.Types as T +import EulerHS.Prelude + +-- | Language of the SQL DB subsytem. +type SqlDB beM = F (SqlDBMethodF beM) + +-- | Algebra of the SQL DB subsytem. +data SqlDBMethodF (beM :: Type -> Type) next where + SqlDBMethod :: HasCallStack => (T.NativeSqlConn -> (Text -> IO ()) -> IO a) -> (a -> next) -> SqlDBMethodF beM next + +instance Functor (SqlDBMethodF beM) where + fmap f (SqlDBMethod runner next) = SqlDBMethod runner (f . next) + +-- | Wrapping helper +sqlDBMethod + :: (HasCallStack, T.BeamRunner beM) + => beM a + -> SqlDB beM a +sqlDBMethod act = liftFC $ SqlDBMethod (flip T.getBeamDebugRunner act) id + +-- Convenience interface + +-- | Select many rows query +findRows + :: (HasCallStack, T.BeamRunner beM, T.BeamRuntime be beM, B.FromBackendRow be a) + => B.SqlSelect be a + -> SqlDB beM [a] +findRows = sqlDBMethod . T.rtSelectReturningList + +-- | Select one row query +findRow + :: (HasCallStack, T.BeamRunner beM, T.BeamRuntime be beM, B.FromBackendRow be a) + => B.SqlSelect be a + -> SqlDB beM (Maybe a) +findRow = sqlDBMethod . T.rtSelectReturningOne + +-- | Insert query +insertRows + :: (HasCallStack, T.BeamRunner beM, T.BeamRuntime be beM) + => B.SqlInsert be table + -> SqlDB beM () +insertRows = sqlDBMethod . T.rtInsert + +-- | Insert returning list query +insertRowsReturningList + :: (HasCallStack, B.Beamable table, B.FromBackendRow be (table Identity), T.BeamRuntime be beM, T.BeamRunner beM) + => B.SqlInsert be table + -> SqlDB beM [table Identity] +insertRowsReturningList = sqlDBMethod . T.rtInsertReturningList + +-- | Update query +updateRows + :: (HasCallStack, T.BeamRunner beM, T.BeamRuntime be beM) + => B.SqlUpdate be table + -> SqlDB beM () +updateRows = sqlDBMethod . T.rtUpdate + +-- | Update returning list query +updateRowsReturningList + :: (HasCallStack, T.BeamRunner beM, T.BeamRuntime be beM, + B.Beamable table, B.FromBackendRow be (table Identity)) + => B.SqlUpdate be table + -> SqlDB beM [table Identity] +updateRowsReturningList = sqlDBMethod . T.rtUpdateReturningList + +-- | Delete query +deleteRows + :: (HasCallStack, T.BeamRunner beM, T.BeamRuntime be beM) + => B.SqlDelete be table + -> SqlDB beM () +deleteRows = sqlDBMethod . T.rtDelete + + +-- Postgres-only extra methods + +-- | Postgres-only DELETE query (returning list) +deleteRowsReturningListPG + :: (HasCallStack, B.Beamable table, B.FromBackendRow BP.Postgres (table Identity)) + => B.SqlDelete BP.Postgres table + -> SqlDB BP.Pg [table Identity] +deleteRowsReturningListPG = sqlDBMethod . T.deleteReturningListPG + +-- | Postgres-only UPDATE query (returning list) +updateRowsReturningListPG + :: (HasCallStack, B.Beamable table, B.FromBackendRow BP.Postgres (table Identity)) + => B.SqlUpdate BP.Postgres table + -> SqlDB BP.Pg [table Identity] +updateRowsReturningListPG = sqlDBMethod . T.updateReturningListPG + +-- MySQL only extra methods +-- NOTE: This should be run inside a SQL transaction! + +-- | MySQL-only INSERT query (returning list) +-- +-- NOTE: This should be run inside a SQL transaction! +insertRowReturningMySQL :: (HasCallStack, B.FromBackendRow BM.MySQL (table Identity)) + => B.SqlInsert BM.MySQL table + -> SqlDB BM.MySQLM (Maybe (table Identity)) +insertRowReturningMySQL = + sqlDBMethod . BM.runInsertRowReturning diff --git a/src/EulerHS/Core/Types.hs b/src/EulerHS/Core/Types.hs new file mode 100644 index 00000000..1ef5296e --- /dev/null +++ b/src/EulerHS/Core/Types.hs @@ -0,0 +1,30 @@ +{- | +Module : EulerHS.Core.Types +Copyright : (C) Juspay Technologies Pvt Ltd 2019-2021 +License : Apache 2.0 (see the file LICENSE) +Maintainer : opensource@juspay.in +Stability : experimental +Portability : non-portable + +This module reexports general functions and types of the framework. + +This is an internal module. Import EulerHS.Types instead. +-} + +module EulerHS.Core.Types + ( module X + ) where + +import EulerHS.Core.Api as X +import EulerHS.Core.Types.BinaryString as X +import EulerHS.Core.Types.Common as X +import EulerHS.Core.Types.DB as X hiding (withTransaction) +import EulerHS.Core.Types.Exceptions as X +import EulerHS.Core.Types.HttpAPI as X +import EulerHS.Core.Types.KVDB as X +import EulerHS.Core.Types.Logger as X +import EulerHS.Core.Types.MySQL as X +import EulerHS.Core.Types.Options as X +import EulerHS.Core.Types.Playback as X +import EulerHS.Core.Types.Postgres as X +import EulerHS.Core.Types.Serializable as X diff --git a/src/EulerHS/Core/Types/BinaryString.hs b/src/EulerHS/Core/Types/BinaryString.hs new file mode 100644 index 00000000..95f17096 --- /dev/null +++ b/src/EulerHS/Core/Types/BinaryString.hs @@ -0,0 +1,94 @@ +module EulerHS.Core.Types.BinaryString +( BinaryString(..) +, LBinaryString(..) +, base64Encode +, base64Decode +) where + +import EulerHS.Prelude + +import qualified Control.Monad.Fail as MonadFail +import qualified Data.ByteString as Strict +import qualified Data.ByteString.Base64 as B64 +import qualified Data.ByteString.Lazy as Lazy +import qualified Data.String.Conversions as Conversions +import qualified Data.Text as Text +import qualified Data.Text.Encoding as Encoding + +-------------------------------------------------------------------------- +-- Base64 encoding/decoding helpers +-------------------------------------------------------------------------- + +newtype BinaryString + = BinaryString + { getBinaryString :: Strict.ByteString } + deriving (Show, Eq, Ord) + +instance ToJSON BinaryString where + toJSON val = toJSON ((show $ getBinaryString val) :: Text) + +instance FromJSON BinaryString where + parseJSON val = do + bs <- parseJSON val + pure $ BinaryString $ read bs + +instance Conversions.ConvertibleStrings Strict.ByteString BinaryString where + convertString = BinaryString + +instance Conversions.ConvertibleStrings BinaryString Strict.ByteString where + convertString = getBinaryString + +instance Conversions.ConvertibleStrings Lazy.ByteString BinaryString where + convertString = BinaryString . Conversions.convertString + +instance Conversions.ConvertibleStrings BinaryString Lazy.ByteString where + convertString = Conversions.convertString . getBinaryString + +-------------------------------------------------------------------------- +-- Lazy BinaryString +-------------------------------------------------------------------------- + +newtype LBinaryString + = LBinaryString + { getLBinaryString :: Lazy.ByteString } + deriving (Show, Eq, Ord) + +instance ToJSON LBinaryString where + toJSON val = toJSON (show (getLBinaryString val) :: Text) + +instance FromJSON LBinaryString where + parseJSON val = do + lbs <- parseJSON val + pure $ LBinaryString $ read lbs + +instance Conversions.ConvertibleStrings Lazy.ByteString LBinaryString where + convertString = LBinaryString + +instance Conversions.ConvertibleStrings LBinaryString Lazy.ByteString where + convertString = getLBinaryString + +instance Conversions.ConvertibleStrings Strict.ByteString LBinaryString where + convertString = LBinaryString . Conversions.convertString + +instance Conversions.ConvertibleStrings LBinaryString Strict.ByteString where + convertString = Conversions.convertString . getLBinaryString + +-------------------------------------------------------------------------- +-- Base64 encoding/decoding helpers +-------------------------------------------------------------------------- + +-- | Base64 encode a bytestring +-- +-- NOTE: Decoding to UTF-8 cannot fail so is safe +-- +base64Encode :: Strict.ByteString -> Text.Text +base64Encode = Encoding.decodeUtf8 . B64.encode + +-- | Base64 decode a Base64-encoded string +-- +-- NOTE: This may fail if the string is malformed using MonadFail +-- +base64Decode :: MonadFail.MonadFail m => Text.Text -> m Strict.ByteString +base64Decode s = case B64.decode (Encoding.encodeUtf8 s) of + Left err -> fail err + Right res -> return res diff --git a/src/EulerHS/Core/Types/Common.hs b/src/EulerHS/Core/Types/Common.hs new file mode 100644 index 00000000..3494ab90 --- /dev/null +++ b/src/EulerHS/Core/Types/Common.hs @@ -0,0 +1,56 @@ +{- | +Module : EulerHS.Core.Types.Common +Copyright : (C) Juspay Technologies Pvt Ltd 2019-2021 +License : Apache 2.0 (see the file LICENSE) +Maintainer : opensource@juspay.in +Stability : experimental +Portability : non-portable + +Common types and helper functions. + +This module is internal and should not imported in the projects. +Import 'EulerHS.Types' instead. +-} + +module EulerHS.Core.Types.Common + ( + FlowGUID + , ForkGUID + , SafeFlowGUID + , ManagerSelector + , Description + , Awaitable (..) + , Microseconds (..) + ) where + +import qualified Data.Word as W +import EulerHS.Prelude + + -- | Guid for any flow. + -- This type can be used to specify a separate logger formatting + -- for each flow. +type FlowGUID = Text + +-- | Guid for a forked flow. +-- Service type, rarely needed in the business logic. +type ForkGUID = Text + + -- | Guid for a safe flow. + -- Service type, rarely needed in business logic. +type SafeFlowGUID = Text + + -- | Network manager selector. + -- Allows to have a set of named managers with own configs + -- when the default one is not enough. +type ManagerSelector = String + + -- | Description type +type Description = Text + + -- | Awaitable object. Ask it for results from forked flow. +data Awaitable s = Awaitable (MVar s) + + -- | Wrapper for microseconds. +newtype Microseconds + = Microseconds W.Word32 + -- ^ Max timeout ~71 minutes with Word32 diff --git a/src/EulerHS/Core/Types/DB.hs b/src/EulerHS/Core/Types/DB.hs new file mode 100644 index 00000000..3b56f2a1 --- /dev/null +++ b/src/EulerHS/Core/Types/DB.hs @@ -0,0 +1,599 @@ +{-# LANGUAGE DerivingStrategies #-} +{-# LANGUAGE DeriveAnyClass #-} +{-# LANGUAGE FunctionalDependencies #-} +{-# LANGUAGE RecordWildCards #-} + +{- | +Module : EulerHS.Core.Types.DB +Copyright : (C) Juspay Technologies Pvt Ltd 2019-2021 +License : Apache 2.0 (see the file LICENSE) +Maintainer : opensource@juspay.in +Stability : experimental +Portability : non-portable + +This module contains general DB-related types and helper functions. + +This module is internal and should not imported in the projects. +Import 'EulerHS.Types' instead. + +Types and helpers for specific databases can be found in separate modules: + +'EulerHS.Core.Types.MySQL' +'EulerHS.Core.Types.Postgres' +-} + +-- TODO: separate runtime, implementation and public interface. +module EulerHS.Core.Types.DB + ( + -- * Core DB + -- ** Public types + ConnTag + , SQliteDBname + , SqlConn(..) + , DBConfig + , PoolConfig(..) + , DBErrorType(..) + , DBError(..) + , DBResult + , PostgresSqlError(..) + , PostgresExecStatus(..) + , MysqlSqlError(..) + , SqliteSqlError(..) + , SqliteError(..) + , SQLError(..) + + -- ** Private types + , BeamRuntime(..) + , BeamRunner(..) + , NativeSqlPool(..) + , NativeSqlConn(..) + + -- ** Public helpers + , mkSqlConn + , mkSQLiteConfig + , mkSQLitePoolConfig + , mkPostgresConfig + , mkPostgresPoolConfig + , mkMySQLConfig + , mkMySQLPoolConfig + , getDBName + , deleteReturningListPG + , updateReturningListPG + , defaultPoolConfig + + -- ** Private helpers + , bemToNative + , nativeToBem + , withTransaction + , mysqlErrorToDbError + , sqliteErrorToDbError + , postgresErrorToDbError + ) where + +import EulerHS.Prelude + +import qualified Data.Pool as DP +import Data.Time.Clock (NominalDiffTime) +import qualified Data.Text as T +import qualified Database.Beam as B +import qualified Database.Beam.Backend.SQL as B +import qualified Database.Beam.Backend.SQL.BeamExtensions as B +import qualified Database.Beam.MySQL as BM +import qualified Database.Beam.Postgres as BP +import qualified Database.Beam.Sqlite as BS +import qualified Database.Beam.Sqlite.Connection as SQLite +import qualified Database.MySQL.Base as MySQL +import qualified Database.PostgreSQL.Simple as PGS +import qualified Database.SQLite.Simple as SQLite + +import EulerHS.Core.Types.MySQL (MySQLConfig(..), createMySQLConn) +import EulerHS.Core.Types.Postgres (PostgresConfig(..), + createPostgresConn) + +-- * Public types and helpers + +-- | Creates 'SqlConn' from 'DBConfig'. +-- +-- You can use this function to prepare your DB connections before running a flow. +-- It's also possible to call this function during the flow evaluation +-- (using 'runIO' or 'runUntracedIO'). +mkSqlConn :: DBConfig beM -> IO (SqlConn beM) +mkSqlConn (PostgresPoolConf connTag cfg PoolConfig {..}) = PostgresPool connTag + <$> DP.createPool (createPostgresConn cfg) BP.close stripes keepAlive resourcesPerStripe + +mkSqlConn (MySQLPoolConf connTag cfg PoolConfig {..}) = MySQLPool connTag + <$> DP.createPool (createMySQLConn cfg) MySQL.close stripes keepAlive resourcesPerStripe + +mkSqlConn (SQLitePoolConf connTag dbname PoolConfig {..}) = SQLitePool connTag + <$> DP.createPool (SQLite.open dbname) SQLite.close stripes keepAlive resourcesPerStripe + +mkSqlConn (MockConfig connTag) = pure $ MockedPool connTag + +-- | Special version of DELETE query specified for Postgres. +-- TODO: unify this with other backends. +deleteReturningListPG + :: (B.Beamable table, B.FromBackendRow BP.Postgres (table Identity)) + => B.SqlDelete BP.Postgres table + -> BP.Pg [table Identity] +deleteReturningListPG = B.runDeleteReturningList + +-- | Tag for SQL connections +type ConnTag = Text + +-- | Represents path to the SQLite DB +type SQliteDBname = String + +-- | Represents SQL connection that we use in flow. +-- Parametrised by BEAM monad corresponding to the certain DB (MySQL, Postgres, SQLite) +data SqlConn (beM :: Type -> Type) + = MockedPool ConnTag + -- ^ This mocked connection is not related to any DBs. Used in the ART system and tests. + | PostgresPool ConnTag (DP.Pool BP.Connection) + -- ^ 'Pool' with Postgres connections. + | MySQLPool ConnTag (DP.Pool MySQL.MySQLConn) + -- ^ 'Pool' with MySQL connections. + | SQLitePool ConnTag (DP.Pool SQLite.Connection) + -- ^ 'Pool' with SQLite connections. + deriving (Generic) + +-- | Represents DB configurations +data DBConfig (beM :: Type -> Type) + = MockConfig ConnTag + -- ^ This mocked configs is not related to any DBs. Used in the ART system and tests. + | PostgresPoolConf ConnTag PostgresConfig PoolConfig + -- ^ Config for 'Pool' with Postgres connections + | MySQLPoolConf ConnTag MySQLConfig PoolConfig + -- ^ Config for 'Pool' with MySQL connections + | SQLitePoolConf ConnTag SQliteDBname PoolConfig + -- ^ Config for 'Pool' with SQlite connections + deriving (Show, Eq, Ord, Generic, ToJSON, FromJSON) + +-- | Represents 'Pool' parameters. +-- +-- All the DB connections use a pool internally. +-- Configure pools according to your needs. +data PoolConfig = PoolConfig + { stripes :: Int + -- ^ The number of stripes (distinct sub-pools) to maintain. The smallest acceptable value is 1. + , keepAlive :: NominalDiffTime + -- ^ Amount of time for which an unused resource is kept open. The smallest acceptable value is 0.5 seconds. + -- + -- The elapsed time before destroying a resource may be a little longer than requested, as the reaper thread wakes at 1-second intervals. + -- + -- Conversion functions will treat it as seconds. + -- For example, (0.010 :: NominalDiffTime) corresponds to 10 milliseconds. + , resourcesPerStripe :: Int + -- ^ Maximum number of resources to keep open per stripe. The smallest acceptable value is 1. + } + deriving (Show, Eq, Ord, Generic, ToJSON, FromJSON) + +-- | Default pool config. +-- +-- stripes = 1 +-- keepAlive = 100 (seconds) +-- resourcesPerStripe = 1 +-- +defaultPoolConfig :: PoolConfig +defaultPoolConfig = PoolConfig + { stripes = 1 + , keepAlive = 100 + , resourcesPerStripe = 1 + } + +-- | Create SQLite 'DBConfig' +mkSQLiteConfig :: ConnTag -> SQliteDBname -> DBConfig BS.SqliteM +mkSQLiteConfig connTag dbName = SQLitePoolConf connTag dbName defaultPoolConfig + +-- | Create SQLite 'Pool' 'DBConfig' +mkSQLitePoolConfig :: ConnTag -> SQliteDBname -> PoolConfig -> DBConfig BS.SqliteM +mkSQLitePoolConfig = SQLitePoolConf + +-- | Create Postgres 'DBConfig' +mkPostgresConfig :: ConnTag -> PostgresConfig -> DBConfig BP.Pg +mkPostgresConfig connTag dbName = PostgresPoolConf connTag dbName defaultPoolConfig + +-- | Create Postgres 'Pool' 'DBConfig' +mkPostgresPoolConfig :: ConnTag -> PostgresConfig -> PoolConfig -> DBConfig BP.Pg +mkPostgresPoolConfig = PostgresPoolConf + +-- | Create MySQL 'DBConfig' +mkMySQLConfig :: ConnTag -> MySQLConfig -> DBConfig BM.MySQLM +mkMySQLConfig connTag dbName = MySQLPoolConf connTag dbName defaultPoolConfig + +-- | Create MySQL 'Pool' 'DBConfig' +mkMySQLPoolConfig :: ConnTag -> MySQLConfig -> PoolConfig -> DBConfig BM.MySQLM +mkMySQLPoolConfig = MySQLPoolConf + +-- | Obtains a DB name from 'DBConfig'. +-- +-- For a mocked config, returns ConnTag as a DB name. +getDBName :: DBConfig beM -> String +getDBName (PostgresPoolConf _ (PostgresConfig{..}) _) = connectDatabase +getDBName (MySQLPoolConf _ (MySQLConfig{..}) _) = connectDatabase +getDBName (SQLitePoolConf _ dbName _) = dbName +getDBName (MockConfig tag) = T.unpack tag + +---------------------------------------------------------------------- + +-- | Abstracted type for SQLite error types. +data SqliteError + = SqliteErrorOK + | SqliteErrorError + | SqliteErrorInternal + | SqliteErrorPermission + | SqliteErrorAbort + | SqliteErrorBusy + | SqliteErrorLocked + | SqliteErrorNoMemory + | SqliteErrorReadOnly + | SqliteErrorInterrupt + | SqliteErrorIO + | SqliteErrorCorrupt + | SqliteErrorNotFound + | SqliteErrorFull + | SqliteErrorCantOpen + | SqliteErrorProtocol + | SqliteErrorEmpty + | SqliteErrorSchema + | SqliteErrorTooBig + | SqliteErrorConstraint + | SqliteErrorMismatch + | SqliteErrorMisuse + | SqliteErrorNoLargeFileSupport + | SqliteErrorAuthorization + | SqliteErrorFormat + | SqliteErrorRange + | SqliteErrorNotADatabase + | SqliteErrorNotice + | SqliteErrorWarning + | SqliteErrorRow + | SqliteErrorDone + deriving (Show, Eq, Ord, Generic, ToJSON, FromJSON) + +-- | Abstracted type for SQLite error. +data SqliteSqlError + = SqliteSqlError + { sqlError :: !SqliteError + -- ^ Error type + , sqlErrorDetails :: Text + -- ^ Additional error details + , sqlErrorContext :: Text + -- ^ Error context + } + deriving (Show, Eq, Ord, Generic, ToJSON, FromJSON) + +-- | Abstracted type for any errors occurring when dealing with the SQL DB subsystem. +data SQLError + = PostgresError PostgresSqlError + -- ^ Contains a Postgres abstracted error + | MysqlError MysqlSqlError + -- ^ Contains a MySQL abstracted error + | SqliteError SqliteSqlError + -- ^ Contains a SQLite abstracted error + deriving (Show, Eq, Ord, Generic, ToJSON, FromJSON) + +---------------------------------------------------------------------- + +-- | Abstracted type for MySQL error. +data MysqlSqlError = + MysqlSqlError + { errCode :: {-# UNPACK #-} !Word16, + errMsg :: {-# UNPACK #-} !Text + } + deriving stock (Show, Eq, Ord, Generic) + deriving anyclass (ToJSON, FromJSON) + +---------------------------------------------------------------------- + +-- | Abstracted type for Postgress exec status. +data PostgresExecStatus + = PostgresEmptyQuery + | PostgresCommandOk + | PostgresTuplesOk + | PostgresCopyOut + | PostgresCopyIn + | PostgresCopyBoth + | PostgresBadResponse + | PostgresNonfatalError + | PostgresFatalError + | PostgresSingleTuple + deriving (Show, Eq, Ord, Generic, ToJSON, FromJSON) + +-- | Abstracted type for Postgress SQL error. +data PostgresSqlError = + PostgresSqlError + { sqlState :: Text + , sqlExecStatus :: PostgresExecStatus + , sqlErrorMsg :: Text + , sqlErrorDetail :: Text + , sqlErrorHint :: Text + } + deriving (Show, Eq, Ord, Generic, ToJSON, FromJSON) + +---------------------------------------------------------------------- + +-- | Represents different failures that the SQL subsystem may return +data DBErrorType + = ConnectionFailed + -- ^ Connection problem. Can be anything that causes the connection to break. + | ConnectionAlreadyExists + -- ^ This error indicates that the connection for this particular config already exist. + | ConnectionDoesNotExist + -- ^ This error indicates that the connection for this particular config is not found. + | TransactionRollbacked + -- ^ This error indicates about a rollbacked transaction. + -- + -- (Not supported yet) + | SQLError SQLError + -- ^ Some specific error the DB backend has returned. + | UnexpectedResult + -- ^ An unexpected error happened in the SQL DB subsystem. + -- + -- (Not supported yet) + | UnrecognizedError + -- ^ Unknown error from a native DB backend or from the SQL DB subsystem. + deriving (Show, Eq, Ord, Generic, ToJSON, FromJSON) + +-- | Represents DB error +data DBError + = DBError DBErrorType Text + deriving (Show, Eq, Ord, Generic, ToJSON, FromJSON) + +-- | Represents resulting type for DB actions +type DBResult a = Either DBError a + +-- * Internal types and functions + +-- | This type class ties internal beam-related type classes +-- and implementation details. +-- +-- In typical scenarios, you won't be needing this type class or its methods, +-- because the 'SqlDB' language provides a more high level interface to the +-- SQL DB subsystem. +-- +-- It's not guaranteed that this type class will remain public. +-- +-- This type class helps to support multiple DB backends. +-- 3 different backends are supported out of the box: +-- +-- - SQLite +-- - MySQL +-- - Postgres +class (B.BeamSqlBackend be, B.MonadBeam be beM) => BeamRuntime be beM + | be -> beM, beM -> be where + rtSelectReturningList :: B.FromBackendRow be a => B.SqlSelect be a -> beM [a] + rtSelectReturningOne :: B.FromBackendRow be a => B.SqlSelect be a -> beM (Maybe a) + rtInsert :: B.SqlInsert be table -> beM () + rtInsertReturningList :: forall table . (B.Beamable table, B.FromBackendRow be (table Identity)) => B.SqlInsert be table -> beM [table Identity] + rtUpdate :: B.SqlUpdate be table -> beM () + rtUpdateReturningList :: forall table. (B.Beamable table, B.FromBackendRow be (table Identity)) => B.SqlUpdate be table -> beM [table Identity] + rtDelete :: B.SqlDelete be table -> beM () + +-- | Implements 'BeamRuntime' for SQLite. +instance BeamRuntime BS.Sqlite BS.SqliteM where + rtSelectReturningList = B.runSelectReturningList + rtSelectReturningOne = B.runSelectReturningOne + rtInsert = B.runInsert + rtInsertReturningList = B.runInsertReturningList + rtUpdate = B.runUpdate + rtUpdateReturningList = error "Not implemented" + rtDelete = B.runDelete + +-- | Implements 'BeamRuntime' for Postgres. +instance BeamRuntime BP.Postgres BP.Pg where + rtSelectReturningList = B.runSelectReturningList + rtSelectReturningOne = B.runSelectReturningOne + rtInsert = B.runInsert + rtInsertReturningList = B.runInsertReturningList + rtUpdate = B.runUpdate + rtUpdateReturningList = updateReturningListPG + rtDelete = B.runDelete + +-- | Implements 'BeamRuntime' for MySQL. +instance BeamRuntime BM.MySQL BM.MySQLM where + rtSelectReturningList = B.runSelectReturningList + rtSelectReturningOne = B.runSelectReturningOne + rtInsert = B.runInsert + rtInsertReturningList = error "Not implemented" + rtUpdate = B.runUpdate + rtUpdateReturningList = error "Not implemented" + rtDelete = B.runDelete + +-- | Special version of UPDATE query specified for Postgres. +-- TODO: unify this with other backends. +updateReturningListPG + :: (B.Beamable table, B.FromBackendRow BP.Postgres (table Identity)) + => B.SqlUpdate BP.Postgres table + -> BP.Pg [table Identity] +updateReturningListPG = B.runUpdateReturningList + +-- | This type class ties native connections, beam and native SQL backends. +-- +-- In typical scenarios, you won't be needing this type class or its methods, +-- because the 'SqlDB' language provides a more high level interface to the +-- SQL DB subsystem. +-- +-- It's not guaranteed that this type class will remain public. +-- +-- This type class helps to support multiple SQL backends. +-- 3 different backends are supported out of the box: +-- +-- - SQLite +-- - MySQL +-- - Postgres +class BeamRunner beM where + getBeamDebugRunner :: NativeSqlConn -> beM a -> ((Text -> IO ()) -> IO a) + +-- | Implements 'BeamRunner' for SQLite. +instance BeamRunner BS.SqliteM where + getBeamDebugRunner (NativeSQLiteConn conn) beM = + \logger -> SQLite.runBeamSqliteDebug logger conn beM + getBeamDebugRunner _ _ = \_ -> error "Not a SQLite connection" + +-- | Implements 'BeamRunner' for Postgres. +instance BeamRunner BP.Pg where + getBeamDebugRunner (NativePGConn conn) beM = + \logger -> BP.runBeamPostgresDebug logger conn beM + getBeamDebugRunner _ _ = \_ -> error "Not a Postgres connection" + +-- | Implements 'BeamRunner' for MySQL. +instance BeamRunner BM.MySQLM where + getBeamDebugRunner (NativeMySQLConn conn) beM = + \logger -> BM.runBeamMySQLDebug logger conn beM + getBeamDebugRunner _ _ = \_ -> error "Not a MySQL connection" + +-- | Evaluates an action over a native connection within a native transaction. +-- All the backends have this support of transactions: +-- +-- - SQLite +-- - MySQL +-- - Postgres +-- +-- This is an internal function. Don't use it in the BL code. +withTransaction :: forall beM a . + SqlConn beM -> (NativeSqlConn -> IO a) -> IO (Either SomeException a) +withTransaction conn f = case conn of + MockedPool _ -> error "Mocked pool connections are not supported." + PostgresPool _ pool -> DP.withResource pool (go PGS.withTransaction NativePGConn) + MySQLPool _ pool -> DP.withResource pool (go MySQL.withTransaction NativeMySQLConn) + SQLitePool _ pool -> DP.withResource pool (go SQLite.withTransaction NativeSQLiteConn) + where + go :: forall b . (b -> IO a -> IO a) -> (b -> NativeSqlConn) -> b -> IO (Either SomeException a) + go hof wrap conn' = tryAny (hof conn' (f . wrap $ conn')) + +-- | Representation of native DB pools that we store in FlowRuntime. +-- +-- This is an internal type. Don't use it in the BL code. +data NativeSqlPool + = NativePGPool (DP.Pool BP.Connection) -- ^ 'Pool' with Postgres connections + | NativeMySQLPool (DP.Pool MySQL.MySQLConn) -- ^ 'Pool' with MySQL connections + | NativeSQLitePool (DP.Pool SQLite.Connection) -- ^ 'Pool' with SQLite connections + | NativeMockedPool + deriving Show + +-- | Representation of native DB connections that we use in the implementation. +-- +-- This is an internal type. Don't use it in the BL code. +data NativeSqlConn + = NativePGConn BP.Connection + | NativeMySQLConn MySQL.MySQLConn + | NativeSQLiteConn SQLite.Connection + +-- | Transform 'SqlConn' to 'NativeSqlPool'. +-- +-- This is an internal function. Don't use it in the BL code. +bemToNative :: SqlConn beM -> NativeSqlPool +bemToNative (MockedPool _) = NativeMockedPool +bemToNative (PostgresPool _ pool) = NativePGPool pool +bemToNative (MySQLPool _ pool) = NativeMySQLPool pool +bemToNative (SQLitePool _ pool) = NativeSQLitePool pool + +-- | Transforms 'NativeSqlPool' to 'SqlConn'. +-- +-- This is an internal function. Don't use it in the BL code. +nativeToBem :: ConnTag -> NativeSqlPool -> SqlConn beM +nativeToBem connTag NativeMockedPool = MockedPool connTag +nativeToBem connTag (NativePGPool conn) = PostgresPool connTag conn +nativeToBem connTag (NativeMySQLPool conn) = MySQLPool connTag conn +nativeToBem connTag (NativeSQLitePool conn) = SQLitePool connTag conn + +---- + +-- | Transforms a native Postgres SQL error into an abstracted error type 'PostgresSqlError'. +-- +-- This is an internal function. Don't use it in the BL code. +toPostgresSqlError :: PGS.SqlError -> PostgresSqlError +toPostgresSqlError e = PostgresSqlError + { sqlState = decodeUtf8 $ PGS.sqlState e + , sqlExecStatus = toPostgresExecStatus $ PGS.sqlExecStatus e + , sqlErrorMsg = decodeUtf8 $ PGS.sqlErrorMsg e + , sqlErrorDetail = decodeUtf8 $ PGS.sqlErrorDetail e + , sqlErrorHint = decodeUtf8 $ PGS.sqlErrorHint e + } + +-- | Transforms a native Postgres SQL error into the general DB error type 'DBError'. +-- +-- This is an internal function. Don't use it in the BL code. +postgresErrorToDbError :: Text -> PGS.SqlError -> DBError +postgresErrorToDbError descr e = DBError (SQLError $ PostgresError $ toPostgresSqlError e) descr + +-- | Transforms a native Postgres exec status into an abstracted type 'PostgresExecStatus'. +-- +-- This is an internal function. Don't use it in the BL code. +toPostgresExecStatus :: PGS.ExecStatus -> PostgresExecStatus +toPostgresExecStatus PGS.EmptyQuery = PostgresEmptyQuery +toPostgresExecStatus PGS.CommandOk = PostgresCommandOk +toPostgresExecStatus PGS.TuplesOk = PostgresTuplesOk +toPostgresExecStatus PGS.CopyOut = PostgresCopyOut +toPostgresExecStatus PGS.CopyIn = PostgresCopyIn +toPostgresExecStatus PGS.CopyBoth = PostgresCopyBoth +toPostgresExecStatus PGS.BadResponse = PostgresBadResponse +toPostgresExecStatus PGS.NonfatalError = PostgresNonfatalError +toPostgresExecStatus PGS.FatalError = PostgresFatalError +toPostgresExecStatus PGS.SingleTuple = PostgresSingleTuple + +-- | Transforms a native SQLite SQL error type into an abstracted error type 'SqliteError'. +-- +-- This is an internal function. Don't use it in the BL code. +toSqliteError :: SQLite.Error -> SqliteError +toSqliteError SQLite.ErrorOK = SqliteErrorOK +toSqliteError SQLite.ErrorError = SqliteErrorError +toSqliteError SQLite.ErrorInternal = SqliteErrorInternal +toSqliteError SQLite.ErrorPermission = SqliteErrorPermission +toSqliteError SQLite.ErrorAbort = SqliteErrorAbort +toSqliteError SQLite.ErrorBusy = SqliteErrorBusy +toSqliteError SQLite.ErrorLocked = SqliteErrorLocked +toSqliteError SQLite.ErrorNoMemory = SqliteErrorNoMemory +toSqliteError SQLite.ErrorReadOnly = SqliteErrorReadOnly +toSqliteError SQLite.ErrorInterrupt = SqliteErrorInterrupt +toSqliteError SQLite.ErrorIO = SqliteErrorIO +toSqliteError SQLite.ErrorCorrupt = SqliteErrorCorrupt +toSqliteError SQLite.ErrorNotFound = SqliteErrorNotFound +toSqliteError SQLite.ErrorFull = SqliteErrorFull +toSqliteError SQLite.ErrorCan'tOpen = SqliteErrorCantOpen +toSqliteError SQLite.ErrorProtocol = SqliteErrorProtocol +toSqliteError SQLite.ErrorEmpty = SqliteErrorEmpty +toSqliteError SQLite.ErrorSchema = SqliteErrorSchema +toSqliteError SQLite.ErrorTooBig = SqliteErrorTooBig +toSqliteError SQLite.ErrorConstraint = SqliteErrorConstraint +toSqliteError SQLite.ErrorMismatch = SqliteErrorMismatch +toSqliteError SQLite.ErrorMisuse = SqliteErrorMisuse +toSqliteError SQLite.ErrorNoLargeFileSupport = SqliteErrorNoLargeFileSupport +toSqliteError SQLite.ErrorAuthorization = SqliteErrorAuthorization +toSqliteError SQLite.ErrorFormat = SqliteErrorFormat +toSqliteError SQLite.ErrorRange = SqliteErrorRange +toSqliteError SQLite.ErrorNotADatabase = SqliteErrorNotADatabase +toSqliteError SQLite.ErrorNotice = SqliteErrorNotice +toSqliteError SQLite.ErrorWarning = SqliteErrorWarning +toSqliteError SQLite.ErrorRow = SqliteErrorRow +toSqliteError SQLite.ErrorDone = SqliteErrorDone + +-- | Transforms a native SQLite SQL error into an abstracted error type 'SqliteSqlError'. +-- +-- This is an internal function. Don't use it in the BL code. +toSqliteSqlError :: SQLite.SQLError -> SqliteSqlError +toSqliteSqlError sqlErr = SqliteSqlError + { sqlError = toSqliteError $ SQLite.sqlError sqlErr + , sqlErrorDetails = SQLite.sqlErrorDetails sqlErr + , sqlErrorContext = SQLite.sqlErrorContext sqlErr + } + +-- | Transforms a native SQLite SQL error into the general DB error type 'DBError'. +-- +-- This is an internal function. Don't use it in the BL code. +sqliteErrorToDbError :: Text -> SQLite.SQLError -> DBError +sqliteErrorToDbError descr e = DBError (SQLError $ SqliteError $ toSqliteSqlError e) descr + +-- | Transforms a native MySQL error into an abstracted error type 'MysqlSqlError'. +-- +-- This is an internal function. Don't use it in the BL code. +toMysqlSqlError :: MySQL.ERR -> MysqlSqlError +toMysqlSqlError err = MysqlSqlError { errCode = MySQL.errCode err, + errMsg = decodeUtf8 . MySQL.errMsg $ err } + +-- | Transforms a native MySQL error into the general DB error type 'DBError'. +-- +-- This is an internal function. Don't use it in the BL code. +mysqlErrorToDbError :: Text -> MySQL.ERRException -> DBError +mysqlErrorToDbError desc (MySQL.ERRException e) = + DBError (SQLError . MysqlError . toMysqlSqlError $ e) desc diff --git a/src/EulerHS/Core/Types/Exceptions.hs b/src/EulerHS/Core/Types/Exceptions.hs new file mode 100644 index 00000000..fc0757ba --- /dev/null +++ b/src/EulerHS/Core/Types/Exceptions.hs @@ -0,0 +1,36 @@ +{-# LANGUAGE DeriveAnyClass #-} + +{- | +Module : EulerHS.Core.Types.Exceptions +Copyright : (C) Juspay Technologies Pvt Ltd 2019-2021 +License : Apache 2.0 (see the file LICENSE) +Maintainer : opensource@juspay.in +Stability : experimental +Portability : non-portable + +This module contains some exceptions and error types used in the framework. + +This module is internal and should not imported in the projects. +Import 'EulerHS.Types' instead. +-} + +module EulerHS.Core.Types.Exceptions + ( -- * Exceptions + HttpManagerNotFound(..) + , AwaitingError (..) + ) where + +import EulerHS.Prelude + +-- | Exception type for indicating that a named Http manager is not set. +data HttpManagerNotFound = HttpManagerNotFound String + deriving (Show, Eq, Exception) + + +-- | This error may be returned on some problem with an awaitable value. +data AwaitingError + = AwaitingTimeout + -- ^ Awaiting period has expired. + | ForkedFlowError Text + -- ^ A forked flow has finished with this error. + deriving (Show, Eq, Ord, Generic, ToJSON, FromJSON) diff --git a/src/EulerHS/Core/Types/HttpAPI.hs b/src/EulerHS/Core/Types/HttpAPI.hs new file mode 100644 index 00000000..9b170105 --- /dev/null +++ b/src/EulerHS/Core/Types/HttpAPI.hs @@ -0,0 +1,223 @@ +{-# LANGUAGE DeriveAnyClass #-} + +{- | +Module : EulerHS.Core.Types.HttpAPI +Copyright : (C) Juspay Technologies Pvt Ltd 2019-2021 +License : Apache 2.0 (see the file LICENSE) +Maintainer : opensource@juspay.in +Stability : experimental +Portability : non-portable + +This module contains types and helper functions for the low-level +client HTTP subsystem (the `CallHTTP` method; not servant-based `CallServantAPI` method). + +This module is internal and should not imported in the projects. +Import 'EulerHS.Types' instead. +-} + +module EulerHS.Core.Types.HttpAPI + ( + -- * Core Logger + -- ** Types + HTTPRequest(..) + , HTTPResponse(..) + , HTTPMethod(..) + , HTTPCert(..) + , HTTPRequestResponse(HTTPRequestResponse) + , HTTPIOException(HTTPIOException) + , defaultRequest + , defaultTimeout + , extractBody + , httpGet + , httpPut + , httpPost + , httpDelete + , httpHead + , withHeader + , withOptionalHeader + , withBody + , withTimeout + , withRedirects + ) where + +import EulerHS.Prelude hiding ((.=), ord) +import qualified EulerHS.Core.Types.BinaryString as T + +import qualified Data.ByteString as B +import qualified Data.ByteString.Lazy as LB +import Data.ByteString.Lazy.Builder (Builder) +import qualified Data.ByteString.Lazy.Builder as Builder +import qualified Data.Char as Char +import qualified Data.Map as Map +import Data.String.Conversions (convertString) +import qualified Data.Text.Encoding as Text + +data HTTPRequest + = HTTPRequest + { getRequestMethod :: HTTPMethod + , getRequestHeaders :: Map.Map HeaderName HeaderValue + , getRequestBody :: Maybe T.LBinaryString + , getRequestURL :: Text + , getRequestTimeout :: Maybe Int -- ^ timeout, in microseconds + , getRequestRedirects :: Maybe Int + } + deriving (Show, Eq, Ord, Generic, ToJSON, FromJSON) + +data HTTPResponse + = HTTPResponse + { getResponseBody :: T.LBinaryString + , getResponseCode :: Int + , getResponseHeaders :: Map.Map HeaderName HeaderValue + , getResponseStatus :: Text + } + deriving (Show, Eq, Ord, Generic, ToJSON, FromJSON) + +data HTTPCert + = HTTPCert + { getCert :: B.ByteString + , getCertChain :: [B.ByteString] + , getCertHost :: String + , getCertKey :: B.ByteString + } + +data HTTPMethod + = Get + | Put + | Post + | Delete + | Head + deriving (Show, Eq, Ord, Generic, ToJSON, FromJSON) + +type HeaderName = Text +type HeaderValue = Text + +data HTTPRequestResponse + = HTTPRequestResponse + { request :: HTTPRequest + , response :: HTTPResponse + } + deriving (Show, Eq, Ord, Generic, ToJSON, FromJSON) + +-- | Used when some IO (or other) exception ocurred during a request +data HTTPIOException + = HTTPIOException + { errorMessage :: Text + , request :: HTTPRequest + } + deriving (Show, Eq, Ord, Generic, ToJSON, FromJSON) + + +-------------------------------------------------------------------------- +-- Convenience functions +-------------------------------------------------------------------------- + +-- | HTTP GET request. +-- +-- > httpGet "https://google.com" +httpGet :: Text -> HTTPRequest +httpGet = defaultRequest Get + +httpPut :: Text -> HTTPRequest +httpPut = defaultRequest Put + +httpPost :: Text -> HTTPRequest +httpPost = defaultRequest Post + +httpDelete :: Text -> HTTPRequest +httpDelete = defaultRequest Delete + +httpHead :: Text -> HTTPRequest +httpHead = defaultRequest Head + +defaultRequest :: HTTPMethod -> Text -> HTTPRequest +defaultRequest method url + = HTTPRequest + { getRequestMethod = method + , getRequestHeaders = Map.empty + , getRequestBody = Nothing + , getRequestURL = url + , getRequestTimeout = Just defaultTimeout + , getRequestRedirects = Just 10 + } + +defaultTimeout :: Int +defaultTimeout = 9000000 + +-- | Add a header to an HTTPRequest +-- +-- > httpGet "https://google.com" +-- > & withHeader "Content-Type" "application/json" +-- +withHeader :: HeaderName -> HeaderValue -> HTTPRequest -> HTTPRequest +withHeader headerName headerValue (request@HTTPRequest {getRequestHeaders}) = + let headers = Map.insert headerName headerValue getRequestHeaders + in request { getRequestHeaders = headers } + +withOptionalHeader :: HeaderName -> Maybe HeaderValue -> HTTPRequest -> HTTPRequest +withOptionalHeader headerName (Just headerValue) = withHeader headerName headerValue +withOptionalHeader _ Nothing = id + +-- | Sets timeout, in microseconds +withTimeout :: Int -> HTTPRequest -> HTTPRequest +withTimeout timeout request = + request {getRequestTimeout = Just timeout} + +withRedirects :: Int -> HTTPRequest -> HTTPRequest +withRedirects redirects request = + request {getRequestRedirects = Just redirects} + +withBody :: [(Text, Text)] -> HTTPRequest -> HTTPRequest +withBody pairs request = request {getRequestBody = Just body} + where + body = T.LBinaryString $ formUrlEncode pairs + +extractBody :: HTTPResponse -> Text +extractBody HTTPResponse{getResponseBody} = decodeUtf8With lenientDecode $ convertString getResponseBody + +formUrlEncode :: [(Text, Text)] -> LB.ByteString +formUrlEncode = Builder.toLazyByteString . mconcat . intersperse amp . map encodePair + where + equals = Builder.word8 (ord '=') + amp = Builder.word8 (ord '&') + percent = Builder.word8 (ord '%') + plus = Builder.word8 (ord '+') + + encodePair :: (Text, Text) -> Builder + encodePair (key, value) = encode key <> equals <> encode value + + encode :: Text -> Builder + encode = escape . Text.encodeUtf8 + + escape :: ByteString -> Builder + escape = mconcat . map f . B.unpack + where + f :: Word8 -> Builder + f c + | p c = Builder.word8 c + | c == ord ' ' = plus + | otherwise = percentEncode c + + p :: Word8 -> Bool + p c = + ord 'a' <= c && c <= ord 'z' + || c == ord '_' + || c == ord '*' + || c == ord '-' + || c == ord '.' + || ord '0' <= c && c <= ord '9' + || ord 'A' <= c && c <= ord 'Z' + + ord :: Char -> Word8 + ord = fromIntegral . Char.ord + + percentEncode :: Word8 -> Builder + percentEncode n = percent <> hex hi <> hex lo + where + (hi, lo) = n `divMod` 16 + + hex :: Word8 -> Builder + hex n = Builder.word8 (offset + n) + where + offset + | n < 10 = 48 + | otherwise = 55 diff --git a/src/EulerHS/Core/Types/KVDB.hs b/src/EulerHS/Core/Types/KVDB.hs new file mode 100644 index 00000000..b2e5ee3d --- /dev/null +++ b/src/EulerHS/Core/Types/KVDB.hs @@ -0,0 +1,303 @@ +{-# LANGUAGE DeriveAnyClass #-} +{-# LANGUAGE DeriveDataTypeable #-} +{-# LANGUAGE RecordWildCards #-} + +{- | +Module : EulerHS.Core.Types.KVDB +Copyright : (C) Juspay Technologies Pvt Ltd 2019-2021 +License : Apache 2.0 (see the file LICENSE) +Maintainer : opensource@juspay.in +Stability : experimental +Portability : non-portable + +Types and helper functions of the KV DB subsystem. + +Currently, highly resembles the `hedis` library interface to Redis. +Other KV DBs are not yet supported. + +This module is internal and should not imported in the projects. +Import 'EulerHS.Types' instead. +-} + +module EulerHS.Core.Types.KVDB + ( + -- * Core KVDB + -- ** Types + KVDBKey + , KVDBConn(..) + , KVDBAnswer + , KVDBReply + , TxResult(..) + , KVDBStatus + , KVDBStatusF(..) + , KVDBReplyF(..) + , NativeKVDBConn (..) + , KVDBConfig (..) + , RedisConfig (..) + , KVDBError (..) + -- ** Methods + , defaultKVDBConnConfig + , exceptionToKVDBReply + , fromRdStatus + , fromRdTxResult + , hedisReplyToKVDBReply + , mkKVDBConfig + , mkKVDBClusterConfig + , mkRedisConn + , nativeToKVDB + , kvdbToNative + ) where + +import qualified Data.Aeson as A +import Data.Time (NominalDiffTime) +import qualified Database.Redis as RD +import EulerHS.Core.Types.Serializable +import EulerHS.Prelude +import qualified GHC.Generics as G + +-- | Alias for denoting a raw key type. +type KVDBKey = Text + +-- | Key-value database connection +data KVDBConn + = Mocked Text -- TODO swap Text with ConnTag type + -- ^ Mocked connection. Used for ART and tests. + | Redis Text RD.Connection + -- ^ Real Redis connection. + deriving (Generic) + +---------------------------------------------------------------------- + +-- | Error that may occur when initializing / deinitializing a KV DB connection. +data KVDBError + = KVDBConnectionAlreadyExists + -- ^ Connection for a particular config already exist. + | KVDBConnectionDoesNotExist + -- ^ Connection for a particular config is not found. + | KVDBConnectionFailed + -- ^ Connection procedure failed. + deriving (Eq, Show, Generic, ToJSON, FromJSON) + +-- | A unified parametrizable type for return values of the KV DB subsystem. +-- +-- Mostly duplicates the @hedis@ library interface. +data KVDBReplyF bs + = SingleLine bs + | Err bs + | Integer Integer + | Bulk (Maybe bs) + | MultiBulk (Maybe [KVDBReplyF bs]) + | ExceptionMessage String + | KVDBError KVDBError String + deriving (Eq, Show, Generic, Functor) + +instance ToJSON (KVDBReplyF ByteStringS) +instance FromJSON (KVDBReplyF ByteStringS) + +-- | A unified type for return values of the KV DB subsystem. +-- +-- Mostly duplicates the @hedis@ library interface. +type KVDBReply = KVDBReplyF ByteString + +instance ToJSON KVDBReply where + toJSON = toJSON . fromKVDBReply + +fromKVDBReply :: KVDBReply -> KVDBReplyF ByteStringS +fromKVDBReply = fmap fromByteString + +instance FromJSON KVDBReply where + parseJSON = fmap toKVDBReply . parseJSON + +toKVDBReply :: KVDBReplyF ByteStringS -> KVDBReply +toKVDBReply = fmap toByteString + +---------------------------------------------------------------------- + +-- | Status that may be returned by the methods of the KVDB language. +data KVDBStatusF bs + = Ok + | Pong + | Status bs + deriving (Eq, Show, Generic, Functor) + +instance ToJSON (KVDBStatusF ByteStringS) +instance FromJSON (KVDBStatusF ByteStringS) + +-- | Status that may be returned by the methods of the KVDB language. +type KVDBStatus = KVDBStatusF ByteString + +instance ToJSON KVDBStatus where + toJSON = toJSON . fromStatus + +fromStatus :: KVDBStatus -> KVDBStatusF ByteStringS +fromStatus Ok = Ok +fromStatus Pong = Pong +fromStatus (Status bs) = Status $ fromByteString bs + +instance FromJSON KVDBStatus where + parseJSON = fmap toStatus . parseJSON + +toStatus :: KVDBStatusF ByteStringS -> KVDBStatus +toStatus Ok = Ok +toStatus Pong = Pong +toStatus (Status bs) = Status $ toByteString bs + +-- | Result of a transactional evaluation of KV DB methods. +data TxResult a + = TxSuccess a + -- ^ Transaction is successful + | TxAborted + -- ^ Transaction is aborted + | TxError String + -- ^ Some error happened + deriving (Eq, Show, Functor, Generic, G.Generic1, A.ToJSON1, A.FromJSON1, ToJSON, FromJSON) + +-- | A type that contains either a valid result or a detailed info +-- about the response from the KV DB subsystem. +type KVDBAnswer = Either KVDBReply + +-- | A config type used to create a connection with a KV DB. +data KVDBConfig + = KVDBConfig Text RedisConfig + -- ^ Regular Redis config + | KVDBClusterConfig Text RedisConfig + -- ^ KV DB config for a Redis cluster + | KVDBMockedConfig Text + -- ^ Mocked config. Used in ART and tests. + deriving (Show, Eq, Ord, Generic, ToJSON, FromJSON) + +-- | Redis config +data RedisConfig = RedisConfig + { connectHost :: String + -- ^ Host + , connectPort :: Word16 + -- ^ Port + , connectAuth :: Maybe Text + -- ^ Auth credentials + , connectDatabase :: Integer + -- ^ Database number + , connectMaxConnections :: Int + -- ^ Max number of connections + , connectMaxIdleTime :: NominalDiffTime + -- ^ Max connection idle time + , connectTimeout :: Maybe NominalDiffTime + -- ^ Timeout for a connection + } deriving (Show, Eq, Ord, Generic, ToJSON, FromJSON) + +-- | Default Redis config. +-- connectHost = "127.0.0.1" +-- connectPort = 6379 +-- connectAuth = Nothing +-- connectDatabase = 0 +-- connectMaxConnections = 50 +-- connectMaxIdleTime = 30 +-- connectTimeout = Nothing + +defaultKVDBConnConfig :: RedisConfig +defaultKVDBConnConfig = RedisConfig + { connectHost = "127.0.0.1" + , connectPort = 6379 + , connectAuth = Nothing + , connectDatabase = 0 + , connectMaxConnections = 50 + , connectMaxIdleTime = 30 + , connectTimeout = Nothing + } + +-- | Create configuration KVDBConfig for Redis +mkKVDBConfig :: Text -> RedisConfig -> KVDBConfig +mkKVDBConfig = KVDBConfig + +-- | Create cluster configuration KVDBConfig for Redis +mkKVDBClusterConfig :: Text -> RedisConfig -> KVDBConfig +mkKVDBClusterConfig = KVDBClusterConfig + +-- * Internal types and functions + +-- | Native connection of a particular KV DB. +-- +-- Internal type, should not be used in the BL. +data NativeKVDBConn + = NativeKVDB (RD.Connection) + | NativeKVDBMockedConn + +-- | Transform 'KVDBConn' to 'NativeKVDBConn' +-- +-- Internal function, should not be used in the BL. +kvdbToNative :: KVDBConn -> NativeKVDBConn +kvdbToNative (Mocked _) = NativeKVDBMockedConn +kvdbToNative (Redis _ conn) = NativeKVDB conn + +-- | Transforms 'NativeKVDBConn' to 'KVDBConn' +-- +-- Internal function, should not be used in the BL. +nativeToKVDB :: Text -> NativeKVDBConn -> KVDBConn +nativeToKVDB connTag NativeKVDBMockedConn = Mocked connTag +nativeToKVDB connTag (NativeKVDB conn) = Redis connTag conn + +-- | Transforms a Redis-related @Status@ to an abstracted 'KVDBStatus' +-- +-- Internal function, should not be used in the BL. +fromRdStatus :: RD.Status -> KVDBStatus +fromRdStatus RD.Ok = Ok +fromRdStatus RD.Pong = Pong +fromRdStatus (RD.Status bs) = Status $ bs + +-- | Transforms a Redis-related @TxResult@ to an abstracted 'TxResult' +-- +-- Internal function, should not be used in the BL. +fromRdTxResult :: RD.TxResult a -> TxResult a +fromRdTxResult (RD.TxSuccess a) = TxSuccess a +fromRdTxResult RD.TxAborted = TxAborted +fromRdTxResult (RD.TxError s) = TxError s + +-- | Transforms a Redis-related @Reply@ to an abstracted 'KVDBReply' +-- +-- Internal function, should not be used in the BL. +hedisReplyToKVDBReply :: RD.Reply -> KVDBReply +hedisReplyToKVDBReply (RD.SingleLine s) = SingleLine s +hedisReplyToKVDBReply (RD.Error s) = Err s +hedisReplyToKVDBReply (RD.Integer s) = Integer s +hedisReplyToKVDBReply (RD.Bulk s) = Bulk s +hedisReplyToKVDBReply (RD.MultiBulk s) = MultiBulk (map (hedisReplyToKVDBReply <$>) s) + +-- | Wraps an exception into 'KVDBReply' +-- +-- Internal function, should not be used in the BL. +exceptionToKVDBReply :: Exception e => e -> KVDBReply +exceptionToKVDBReply e = ExceptionMessage $ displayException e + +-- | Transform `RedisConfig` to the Redis-related @ConnectInfo@. +-- +-- Internal function, should not be used in the BL. +toRedisConnectInfo :: RedisConfig -> RD.ConnectInfo +toRedisConnectInfo RedisConfig {..} = RD.ConnInfo + { RD.connectHost = connectHost + , RD.connectPort = RD.PortNumber $ toEnum $ fromEnum connectPort + , RD.connectAuth = encodeUtf8 <$> connectAuth + , RD.connectDatabase = connectDatabase + , RD.connectMaxConnections = connectMaxConnections + , RD.connectMaxIdleTime = connectMaxIdleTime + , RD.connectTimeout = connectTimeout + , RD.connectTLSParams = Nothing + } + +-- | Create 'KVDBConn' from 'KVDBConfig' +-- +-- Internal function, should not be used in the BL. +mkRedisConn :: KVDBConfig -> IO KVDBConn +mkRedisConn (KVDBMockedConfig connTag) = pure $ Mocked connTag +mkRedisConn (KVDBConfig connTag cfg) = Redis connTag <$> createRedisConn cfg +mkRedisConn (KVDBClusterConfig connTag cfg) = Redis connTag <$> createClusterRedisConn cfg + +-- | Connect with the given config to the database. +-- +-- Internal function, should not be used in the BL. +createRedisConn :: RedisConfig -> IO RD.Connection +createRedisConn = RD.connect . toRedisConnectInfo + +-- | Connect with the given cluster config to the database. +-- +-- Internal function, should not be used in the BL. +createClusterRedisConn :: RedisConfig -> IO RD.Connection +createClusterRedisConn = RD.connectCluster . toRedisConnectInfo diff --git a/src/EulerHS/Core/Types/Logger.hs b/src/EulerHS/Core/Types/Logger.hs new file mode 100644 index 00000000..4b69f7df --- /dev/null +++ b/src/EulerHS/Core/Types/Logger.hs @@ -0,0 +1,198 @@ +{-# LANGUAGE DeriveAnyClass #-} + +{- | +Module : EulerHS.Core.Types.Logger +Copyright : (C) Juspay Technologies Pvt Ltd 2019-2021 +License : Apache 2.0 (see the file LICENSE) +Maintainer : opensource@juspay.in +Stability : experimental +Portability : non-portable + +Types and helper functions of the Logging subsystem. + +This module is internal and should not imported in the projects. +Import 'EulerHS.Types' instead. +-} + +module EulerHS.Core.Types.Logger + ( + -- * Core Logger + -- ** Types + LogLevel(..) + , BufferSize + , MessageFormatter + , FlowFormatter + , LoggerConfig(..) + , Message + , Tag + , PendingMsg(..) + , LogEntry (..) + , Log + -- ** defaults + , defaultLoggerConfig + , defaultMessageFormatter + , defaultFlowFormatter + ) where + +import EulerHS.Prelude + +import qualified EulerHS.Core.Types.Common as T + +-- | Logging level. +data LogLevel + = Debug + | Info + | Warning + | Error + deriving (Generic, Eq, Ord, Show, Read, Enum, ToJSON, FromJSON) + +-- | Message type +type Message = Text + +-- | Tag that accompanies every call of `logMessage`, `logInfo` and other functions. +type Tag = Text + +-- | The number of a log message in the this run. +-- +-- It's 0 for a fresh `LoggerRuntime`, and increases on each logging call. +type MessageNumber = Int + +-- | Buffer size of a logger. Can be important in some cases. +type BufferSize = Int + +{- | Formatter of a message. + +Can be used to format a particular logging message (wrapped into `PendingMsg`). + +The simplest formatter is just `show`. +@ +import qualified EulerHS.Types as T + +simplestFormatter :: T.MessageFormatter +simplestFormatter = show +@ +-} + +type MessageFormatter = PendingMsg -> String + +{- | A flow-specific message formatter. + +It's a callback that should return a `MessageFormatter` before a message +goes to the underlying logging library. + +In the simplest case, you return the same message formatter for any flow. +The @FlowGUID@ argument then has no effect. + +@ +-- flowFormatter :: T.FlowFormatter +flowFormatter :: Maybe T.FlowGUID -> IO T.MessageFormatter +flowFormatter _ = pure simplestFormatter +@ + +In fact, you can setup your own message formatter for each new flow. +To do this, you define a callback which is able to track your flows +and return a particular message formatter. + +This logic should be thread-safe. + +@ +type FlowsFormatters = MVar (Map T.FlowGUID T.MessageFormatter) + +flowsFormatter + :: FlowsFormatters + -> Maybe T.FlowGUID + -> IO T.MessageFormatter +flowsFormatter flowFsVar Nothing = pure simplestFormatter +flowsFormatter flowFsVar (Just flowGuid) = do + flowFs <- readMVar flowFsVar + case Map.lookup flowGuid flowFS of + Nothing -> pure simplestFormatter -- You should decide on what to return here + Just formatter -> pure formatter +@ + +You can update your formatters map right before and after running a flow. +There is a special function `runFlow'` to run a flow with a particular GUID. + +GUID string can be anything unique across your flows. +-} + +type FlowFormatter = Maybe T.FlowGUID -> IO MessageFormatter + +-- | Config of a logger +data LoggerConfig + = LoggerConfig + { _isAsync :: Bool + -- ^ Is it async. + -- + -- N.B. The async logger feature is not well-tested. + , _logLevel :: LogLevel + -- ^ System log level + , _logFilePath :: FilePath + -- ^ Log file path if a file logger is enabled + , _logToConsole :: Bool + -- ^ Enable / disable a console logging. + , _logToFile :: Bool + -- ^ Enable / disable a file logging + , _maxQueueSize :: Word + -- ^ Allows to configure a logging queue. + , _logRawSql :: Bool + -- ^ Enable / disable logging of SQL queries in the SQL DB subsystem. + -- + -- SQL queries will be written as Debug messages. + -- + -- N.B. Enabling this feature slows down the performance of the SQL DB subsystem. + } deriving (Generic, Show, Read) + +-- | A message to send to the underlying logger subsystem. +-- +-- Can be formatted with `MessageFormatter`. +data PendingMsg = PendingMsg + !(Maybe T.FlowGUID) + !LogLevel + !Tag + !Message + !MessageNumber + deriving (Show) + +{- | Default message formatter: +@ +defaultMessageFormatter (PendingMsg _ lvl tag msg _) = + "[" +|| lvl ||+ "] <" +| tag |+ "> " +| msg |+ "" +@ +-} +defaultMessageFormatter :: MessageFormatter +defaultMessageFormatter (PendingMsg _ lvl tag msg _) = + "[" +|| lvl ||+ "] <" +| tag |+ "> " +| msg |+ "" + +{- | Default logger config: + isAsync = False + logLevel = Debug + logFilePath = "" + logToConsole = True + logToFile = False + maxQueueSize = 1000 + logRawSql = True +-} +defaultLoggerConfig :: LoggerConfig +defaultLoggerConfig = LoggerConfig + { _isAsync = False + , _logLevel = Debug + , _logFilePath = "" + , _logToConsole = True + , _logToFile = False + , _maxQueueSize = 1000 + , _logRawSql = True + } + +-- | Default flow formatter. +-- Ignores the flow GUID and just returns `defaultMessageFormatter`. +defaultFlowFormatter :: FlowFormatter +defaultFlowFormatter _ = pure defaultMessageFormatter + +-- * Internal types + +-- | Service type for tracking log entries +data LogEntry = LogEntry !LogLevel !Message + +-- | Service type for tracking log entries +type Log = [LogEntry] diff --git a/src/EulerHS/Core/Types/MySQL.hs b/src/EulerHS/Core/Types/MySQL.hs new file mode 100644 index 00000000..91303e6b --- /dev/null +++ b/src/EulerHS/Core/Types/MySQL.hs @@ -0,0 +1,144 @@ +{-# LANGUAGE DerivingStrategies #-} +{-# LANGUAGE DeriveAnyClass #-} + +{- | +Module : EulerHS.Core.Types.MySQL +Copyright : (C) Juspay Technologies Pvt Ltd 2019-2021 +License : Apache 2.0 (see the file LICENSE) +Maintainer : opensource@juspay.in +Stability : experimental +Portability : non-portable + +Types and helper functions to wrap a MySQL-related stuff. + +This module is internal and should not imported in the projects. +Import 'EulerHS.Types' instead. +-} + +module EulerHS.Core.Types.MySQL + ( + -- * Core MySQL + -- ** Types + MySQLConfig(..) + , MySqlOption(..) + -- ** Methods + , createMySQLConn + , closeMySQLConn + -- ** Defaults + , defaultMySQLConfig + ) where + +import Prelude +import Data.Word (Word16) +import Data.Aeson (ToJSON, FromJSON) +import GHC.Generics (Generic) +import Database.MySQL.Base (MySQLConn, close, ConnectInfo (..), connect, defaultConnectInfoMB4) +import Data.ByteString.UTF8 (fromString) + +-- | MySQL connection protocol +data MySqlProtocol + = TCP + | Socket + | Pipe + | Memory + deriving stock (Show, Eq, Ord, Enum, Bounded, Generic) + deriving anyclass (ToJSON, FromJSON) + +-- | MySQL options +data MySqlOption + = ConnectTimeout Word + | Compress + | NamedPipe + -- | InitCommand ByteString -- TODO + | ReadDefaultFile FilePath + -- | ReadDefaultGroup ByteString -- TODO + | CharsetDir FilePath + | CharsetName String + | LocalInFile Bool + | Protocol MySqlProtocol + -- | SharedMemoryBaseName ByteString -- TODO + | ReadTimeout Word + | WriteTimeout Word + -- | UseRemoteConnection + -- | UseEmbeddedConnection + -- | GuessConnection + -- | ClientIP ByteString + | SecureAuth Bool + | ReportDataTruncation Bool + | Reconnect Bool + -- | SSLVerifyServerCert Bool + | FoundRows + | IgnoreSIGPIPE + | IgnoreSpace + | Interactive + | LocalFiles + | MultiResults + | MultiStatements + | NoSchema + deriving stock (Show, Eq, Ord, Generic) + deriving anyclass (ToJSON, FromJSON) + +-- | Auth credentials +data SSLInfo = SSLInfo + { sslKey :: FilePath + , sslCert :: FilePath + , sslCA :: FilePath + , sslCAPath :: FilePath + , sslCiphers :: String + } + deriving stock (Show, Eq, Ord, Generic) + deriving anyclass (ToJSON, FromJSON) + +-- | MySQL config +data MySQLConfig = MySQLConfig + { connectHost :: String + , connectPort :: Word16 + , connectUser :: String + , connectPassword :: String + , connectDatabase :: String + , connectOptions :: [MySqlOption] + , connectPath :: FilePath + , connectSSL :: Maybe SSLInfo + } + deriving stock (Show, Eq, Ord, Generic) + deriving anyclass (ToJSON, FromJSON) + +{- | Default MySQL config. + +connectHost = "127.0.0.1" +connectPort = 3306 +connectUser = "root" +connectPassword = "" +connectDatabase = "test" +connectOptions = [CharsetName "utf8"] +connectPath = "" +connectSSL = Nothing +-} +defaultMySQLConfig :: MySQLConfig +defaultMySQLConfig = MySQLConfig + { connectHost = "127.0.0.1" + , connectPort = 3306 + , connectUser = "root" + , connectPassword = "" + , connectDatabase = "test" + , connectOptions = [CharsetName "utf8"] + , connectPath = "" + , connectSSL = Nothing + } + +-- | Connect with the given config to the database. +createMySQLConn :: MySQLConfig -> IO MySQLConn +createMySQLConn conf = do + let dbConf = ConnectInfo + { ciHost = connectHost conf + , ciPort = fromIntegral . connectPort $ conf + , ciDatabase = fromString . connectDatabase $ conf + , ciUser = fromString . connectUser $ conf + , ciPassword = fromString . connectPassword $ conf + , ciCharset = ciCharset defaultConnectInfoMB4 + } + connect dbConf + +-- | Close the given connection. +closeMySQLConn :: MySQLConn -> IO () +closeMySQLConn = close diff --git a/src/EulerHS/Core/Types/Options.hs b/src/EulerHS/Core/Types/Options.hs new file mode 100644 index 00000000..3afd62d5 --- /dev/null +++ b/src/EulerHS/Core/Types/Options.hs @@ -0,0 +1,39 @@ +{- | +Module : EulerHS.Core.Types.Options +Copyright : (C) Juspay Technologies Pvt Ltd 2019-2021 +License : Apache 2.0 (see the file LICENSE) +Maintainer : opensource@juspay.in +Stability : experimental +Portability : non-portable + +Options can be used as a stateful mutable KV storage. + +One should be careful in mutating it from different threads. + +This module is internal and should not imported in the projects. +Import 'EulerHS.Types' instead. +-} + +module EulerHS.Core.Types.Options + ( + -- * Options + -- | Determine the relationship between key & value + OptionEntity + -- * Make option key + , mkOptionKey + ) where + +import Data.Aeson (encode) +import qualified Data.ByteString.Lazy as BSL +import EulerHS.Prelude +import Type.Reflection (typeRep) + +-- | This type class helps to tie a key to a value. +-- +-- You can't have different values for the same key. +class (Typeable k, FromJSON k, ToJSON k, FromJSON v, ToJSON v) + => OptionEntity k v | k -> v + +-- | Converts a value-like key into a string. +mkOptionKey :: forall k v. OptionEntity k v => k -> Text +mkOptionKey k = show (typeRep @k) <> (decodeUtf8 $ BSL.toStrict $ encode k) diff --git a/src/EulerHS/Core/Types/Playback.hs b/src/EulerHS/Core/Types/Playback.hs new file mode 100644 index 00000000..7da16f86 --- /dev/null +++ b/src/EulerHS/Core/Types/Playback.hs @@ -0,0 +1,246 @@ +{-# LANGUAGE AllowAmbiguousTypes #-} +{-# LANGUAGE DeriveAnyClass #-} +{-# LANGUAGE ScopedTypeVariables #-} + +{- | +Module : EulerHS.Core.Types.Playback +Copyright : (C) Juspay Technologies Pvt Ltd 2019-2021 +License : Apache 2.0 (see the file LICENSE) +Maintainer : opensource@juspay.in +Stability : experimental +Portability : non-portable + +Types and helper functions of the ART subsystem. + +This module is internal and should not imported in the projects. +Import 'EulerHS.Types' instead. +-} + +module EulerHS.Core.Types.Playback + ( + -- * Core Playback + -- ** Types + RRItem(..) + , MockedResult(..) + , RecordingEntry(..) + , RecordingEntries + , GlobalReplayingMode(..) + , EntryReplayingMode(..) + , PlaybackErrorType(..) + , PlaybackError(..) + , ReplayingException(..) + , ResultRecording(..) + , Recording(..) + , ReplayErrors(..) + , ResultReplayError + , RecorderRuntime(..) + , PlayerRuntime(..) + , RunMode (..) + -- ** Methods + , awaitRecording + , awaitErrors + , flattenErrors + , note + , encodeToStr + , decodeFromStr + , showRecEntry + ) where + + +import qualified Data.Aeson as A +import qualified Data.ByteString.Char8 as BS +import qualified Data.ByteString.Lazy as BSL +import qualified Data.Map as Map +import qualified EulerHS.Core.Types.Serializable as S +import EulerHS.Prelude hiding (note) +import qualified Prelude as P (show) + + +-- Represents ART single entry, saved in recordings +data RecordingEntry = RecordingEntry + { _entryIndex :: Int + -- ^ Index in entries array + , _entryReplayMode :: EntryReplayingMode + -- ^ entry replay mode, could be one of 'Normal' 'NoVerify' 'NoMock' + , _entryName :: String + -- ^ name of the method that this entry represents + , _entryPayload :: A.Value + -- ^ method result value + } deriving (Show, Eq, Generic, ToJSON, FromJSON) + +-- | Shows RecordingEntry with decoded _entryPayload +showRecEntry :: forall a. (FromJSON a, Show a) => RecordingEntry -> String +showRecEntry RecordingEntry{..} = + "RecordingEntry {_entryIndex = " + <> show _entryIndex + <> ", _entryReplayMode = " + <> show _entryReplayMode + <> ", _entryName = " + <> _entryName + <> ", _entryPayload = \n" + <> payload <> "\n}" + where + (payload :: String) = case A.fromJSON @a _entryPayload of + A.Success a -> P.show a + A.Error err -> err + +-- | Represents method entries from the flow +type RecordingEntries = Vector RecordingEntry + +-- | Global replaying mode to be applied to all entries +data GlobalReplayingMode = GlobalNormal | GlobalNoVerify | GlobalNoMocking | GlobalSkip + +-- | Entry individual replaying mode settings +data EntryReplayingMode + = Normal + -- ^ (default) Verifying enabled. Mocking enabled. + | NoVerify + -- ^ Verifying disabled. Mocking enabled. + | NoMock + -- ^ Verifying disabled. Mocking disabled (real effect will be evaluated). + deriving (Show, Eq, Ord, Generic, ToJSON, FromJSON) + + + +class (Eq rrItem, ToJSON rrItem, FromJSON rrItem) => RRItem rrItem where + toRecordingEntry :: rrItem -> Int -> EntryReplayingMode -> RecordingEntry + toRecordingEntry rrItem idx mode = RecordingEntry idx mode (getTag (Proxy :: Proxy rrItem)) $ + toJSON rrItem + + fromRecordingEntry :: RecordingEntry -> Maybe rrItem + fromRecordingEntry (RecordingEntry _ _ _ payload) = S.fromJSONMaybe payload + + getTag :: Proxy rrItem -> String + {-# MINIMAL getTag #-} + +class RRItem rrItem => MockedResult rrItem native where + getMock :: rrItem -> Maybe native + +-- | Playback errors +data PlaybackErrorType + -- | Player successfully replayed all recordings, but the current flow has + -- some additional steps. + = UnexpectedRecordingEnd + -- | Current flow step and recorded step is different. Signals about changes in the code + -- (removed or added new steps, changed logic/behavior of a function - flow go to another branch, etc.) + | UnknownRRItem + -- | Mistakes in Encode/Decode instances, changes in types (another fields, different types of fields, etc.) + | MockDecodingFailed + -- | Results of execution of current flow step and recorded step is different. Something in code was changed + -- (output format, order of values in result, etc), compare results to see what exactly is different. + | ItemMismatch + -- | Something went wrong. + | UnknownPlaybackError + -- | Flow is forked on this step, but there are no forked flow recordings. Check difference in the code. + | ForkedFlowRecordingsMissed + -- | Flow is placed to safe flow on this step, but there are no safe flow recordings. Check difference in the code. + | SafeFlowRecordingsMissed + deriving (Show, Eq, Ord, Generic, ToJSON, FromJSON, Exception) + +-- | Playback error +data PlaybackError = PlaybackError + { errorType :: PlaybackErrorType + , errorMessage :: String + , errorFlowGUID :: Text + } deriving (Show, Eq, Ord, Generic, ToJSON, FromJSON) + +-- | Playback exception +data ReplayingException = ReplayingException PlaybackError + deriving (Show, Eq, Ord, Generic, ToJSON, FromJSON) + +instance Exception ReplayingException + + +---------------------------------------------------------------------- +-- | Final recordings from main flow, forked and safe flows. +data ResultRecording = ResultRecording + { recording :: RecordingEntries + , safeRecordings :: Map Text RecordingEntries + , forkedRecordings :: Map Text ResultRecording + } deriving (Show, Eq, Generic, ToJSON, FromJSON) + +-- | Thread safe Recording representation that used in record process +data Recording = Recording + { recordingMVar :: MVar RecordingEntries + , safeRecordingsVar :: MVar (Map Text RecordingEntries) + , forkedRecordingsVar :: MVar (Map Text Recording) + } + +-- | Transform 'Recording' to 'ResultRecording' in safe way +awaitRecording :: Recording -> IO ResultRecording +awaitRecording Recording{..}= do + recording <- readMVar recordingMVar + safeRecordings <- readMVar safeRecordingsVar + forkedRecordings <- traverse awaitRecording =<< readMVar forkedRecordingsVar + pure ResultRecording{..} + +---------------------------------------------------------------------- +-- | Thread safe ReplayErrors representation used in replay process +data ReplayErrors = ReplayErrors + { errorMVar :: MVar (Maybe PlaybackError) + , safeFlowErrorsVar :: MVar (Map Text PlaybackError) + , forkedFlowErrorsVar :: MVar (Map Text ReplayErrors) + } + +-- | Final player errors representation +data ResultReplayError = ResultReplayError + { rerror :: Maybe PlaybackError + , safeError :: Map Text PlaybackError + , forkedError :: Map Text ResultReplayError + } deriving (Show, Eq, Generic, ToJSON, FromJSON) + +-- | Transform 'ReplayErrors' to 'ResultReplayError' in safe way +awaitErrors :: ReplayErrors -> IO ResultReplayError +awaitErrors ReplayErrors{..}= do + rerror <- readMVar errorMVar + safeError <- readMVar safeFlowErrorsVar + forkedError <- traverse awaitErrors =<< readMVar forkedFlowErrorsVar + pure ResultReplayError{..} + +-- | Extracts all errors from 'ResultReplayError' structure and puts them in the list +flattenErrors :: ResultReplayError -> [PlaybackError] +flattenErrors = catMaybes . flattenErrors_ + where + flattenErrors_ ResultReplayError{..} = + rerror : (pure <$> Map.elems safeError) <> (Map.elems forkedError >>= flattenErrors_) + +---------------------------------------------------------------------- + +-- | Represents ART recorder state and parameters +data RecorderRuntime = RecorderRuntime + { flowGUID :: Text + , recording :: Recording + , disableEntries :: [String] + } + +-- | Represents ART player state and parameters +data PlayerRuntime = PlayerRuntime + { resRecording :: ResultRecording + , rerror :: ReplayErrors + , stepMVar :: MVar Int + , disableVerify :: [String] + , disableMocking :: [String] + , skipEntries :: [String] + , entriesFiltered :: Bool + , flowGUID :: Text + } + +-- | ART running mode +data RunMode + = RegularMode + -- ^ Flow executed as-is, ART has no impact + | RecordingMode RecorderRuntime + -- ^ ART collecting recordings for each backend method + | ReplayingMode PlayerRuntime + -- ^ ART replaying given recordings on corresponding flow scenario + + +encodeToStr :: ToJSON a => a -> String +encodeToStr = BS.unpack . BSL.toStrict . A.encode + +decodeFromStr :: FromJSON a => String -> Maybe a +decodeFromStr = A.decode . BSL.fromStrict . BS.pack + +note :: forall a b. a -> Maybe b -> Either a b +note a Nothing = Left a +note _ (Just b) = Right b diff --git a/src/EulerHS/Core/Types/Postgres.hs b/src/EulerHS/Core/Types/Postgres.hs new file mode 100644 index 00000000..a89bd6c8 --- /dev/null +++ b/src/EulerHS/Core/Types/Postgres.hs @@ -0,0 +1,51 @@ +{-# LANGUAGE DeriveAnyClass #-} +{-# LANGUAGE GeneralisedNewtypeDeriving #-} + +{- | +Module : EulerHS.Core.Types.Postgres +Copyright : (C) Juspay Technologies Pvt Ltd 2019-2021 +License : Apache 2.0 (see the file LICENSE) +Maintainer : opensource@juspay.in +Stability : experimental +Portability : non-portable + +Types and helper functions to wrap a Postgres-related stuff. + +This module is internal and should not imported in the projects. +Import 'EulerHS.Types' instead. +-} + +module EulerHS.Core.Types.Postgres + ( + -- * Core Postgres + -- ** Types + PostgresConfig(..) + -- ** Methods + , createPostgresConn + , closePostgresConn + ) where + +import EulerHS.Prelude + +import qualified Database.Beam.Postgres as BP + +-- | Postgres config +data PostgresConfig = PostgresConfig + { connectHost :: String + , connectPort :: Word16 + , connectUser :: String + , connectPassword :: String + , connectDatabase :: String + } deriving (Show, Eq, Ord, Generic, ToJSON, FromJSON) + +-- | Transform PostgresConfig to the Postgres ConnectInfo. +toBeamPostgresConnectInfo :: PostgresConfig -> BP.ConnectInfo +toBeamPostgresConnectInfo (PostgresConfig {..}) = BP.ConnectInfo {..} + +-- | Connect with the given config to the database. +createPostgresConn :: PostgresConfig -> IO BP.Connection +createPostgresConn = BP.connect . toBeamPostgresConnectInfo + +-- | Close the given connection. +closePostgresConn :: BP.Connection -> IO () +closePostgresConn = BP.close diff --git a/src/EulerHS/Core/Types/Serializable.hs b/src/EulerHS/Core/Types/Serializable.hs new file mode 100644 index 00000000..d5eff087 --- /dev/null +++ b/src/EulerHS/Core/Types/Serializable.hs @@ -0,0 +1,346 @@ +{-# LANGUAGE AllowAmbiguousTypes #-} +{-# LANGUAGE DeriveAnyClass #-} +{-# LANGUAGE DeriveDataTypeable #-} +{-# LANGUAGE FlexibleContexts #-} +{-# LANGUAGE FlexibleInstances #-} +{-# LANGUAGE ScopedTypeVariables #-} +{-# LANGUAGE UndecidableInstances #-} + + +module EulerHS.Core.Types.Serializable + ( + -- * Core Serializable + -- ** Class + Serializable(..) + -- ** Bytestrings + , ByteStringS + , fromByteString + , toByteString + -- ** JSONEx + , JSONEx + , resolveJSONEx + , fromJSONMaybe + ) where + + +import qualified Data.Aeson as A +import qualified Data.Aeson.Types as A +import qualified Data.ByteString as BS +import Data.ByteString.Base64.Type as B64 +import qualified Data.ByteString.Lazy as BSL +import qualified Data.CaseInsensitive as CI +import qualified Data.Map as Map +import qualified Data.Sequence as Seq +import EulerHS.Prelude +import qualified Network.HTTP.Media as HTTP +import qualified Network.HTTP.Types as HTTP +import qualified Servant.Client as S +import qualified Servant.Client.Core.Request as S + + + +---------------------------------------------------------------------- + +class EitherC c d where + resolve :: (c => r) -> (d => r) -> r + +instance {-# OVERLAPPABLE #-} d => EitherC c d where + resolve _ r = r + + +---------------------------------------------------------------------- + +class Serializable a where + jsonEncode :: a -> A.Value + jsonDecode :: A.Value -> Maybe a + +type JSONEx a = EitherC (Serializable a) (ToJSON a, FromJSON a) + +resolveJSONEx + :: forall a r + . JSONEx a + => (Serializable a => r) + -> ((ToJSON a, FromJSON a) => r) + -> r +resolveJSONEx sf jf = resolve @(Serializable a) @(ToJSON a, FromJSON a) sf jf + +---------------------------------------------------------------------- + +fromJSONMaybe :: FromJSON a => A.Value -> Maybe a +fromJSONMaybe = A.parseMaybe parseJSON + +---------------------------------------------------------------------- + +instance JSONEx a => Serializable (Maybe a) where + jsonEncode = resolveJSONEx @a + (encodeWith jsonEncode) + (encodeWith toJSON) + where + encodeWith el = toJSON . fmap el + + jsonDecode = resolveJSONEx @a + (decodeWith jsonDecode) + (decodeWith fromJSONMaybe) + where + decodeWith dl val = + case fmap (fmap dl) $ fromJSONMaybe val of + Just (Just (Just v)) -> Just (Just v) + Just (Nothing) -> Just (Nothing) + _ -> Nothing + + + + + +instance JSONEx a => EitherC (Serializable (Maybe a)) d where resolve r _ = r + +---------------------------------------------------------------------- + +instance (JSONEx a, JSONEx b) => Serializable (Either a b) where + jsonEncode = resolveJSONEx @a + (resolveJSONEx @b + (encodeWith jsonEncode jsonEncode) + (encodeWith jsonEncode toJSON)) + (resolveJSONEx @b + (encodeWith toJSON jsonEncode) + (encodeWith toJSON toJSON)) + where + encodeWith el er = toJSON . bimap el er + + jsonDecode = resolveJSONEx @a + (resolveJSONEx @b + (decodeWith jsonDecode jsonDecode) + (decodeWith jsonDecode (A.parseMaybe parseJSON))) + (resolveJSONEx @b + (decodeWith (A.parseMaybe parseJSON) jsonDecode) + (decodeWith (A.parseMaybe parseJSON) (A.parseMaybe parseJSON))) + where + decodeWith dl dr = A.parseMaybe parseJSON >=> either (fmap Left . dl) (fmap Right . dr) + + + +instance (JSONEx a, JSONEx b) => EitherC (Serializable (Either a b)) d where resolve r _ = r + +---------------------------------------------------------------------- + +instance (JSONEx a, JSONEx b) => Serializable (a, b) where + jsonEncode = resolveJSONEx @a + (resolveJSONEx @b + (encodeWith jsonEncode jsonEncode) + (encodeWith jsonEncode toJSON)) + (resolveJSONEx @b + (encodeWith toJSON jsonEncode) + (encodeWith toJSON toJSON)) + where + encodeWith el er = toJSON . bimap el er + + jsonDecode = resolveJSONEx @a + (resolveJSONEx @b + (decodeWith jsonDecode jsonDecode) + (decodeWith jsonDecode (A.parseMaybe parseJSON))) + (resolveJSONEx @b + (decodeWith (A.parseMaybe parseJSON) jsonDecode) + (decodeWith (A.parseMaybe parseJSON) (A.parseMaybe parseJSON))) + where + decodeWith dl dr = A.parseMaybe parseJSON >=> \(va, vb) -> (,) <$> dl va <*> dr vb + + +instance (JSONEx a, JSONEx b) => EitherC (Serializable (a, b)) d where resolve r _ = r + + +---------------------------------------------------------------------- + +instance JSONEx a => Serializable [a] where + jsonEncode = toJSON . fmap (resolveJSONEx @a jsonEncode toJSON) + jsonDecode = join . fmap (sequence . fmap (resolveJSONEx @a jsonDecode fromJSONMaybe)) . fromJSONMaybe + + +instance JSONEx a => EitherC (Serializable [a]) d where resolve r _ = r + +---------------------------------------------------------------------- + +instance Serializable ByteString where + jsonEncode bs = A.object ["b64" A..= mkBS64 bs, "utf8" A..= decodeUtf8 @Text bs] + jsonDecode = A.parseMaybe . A.withObject "bs" $ \o -> fmap getBS64 (o A..: "b64") + +instance Serializable ByteString64 where + jsonEncode = toJSON + jsonDecode = A.parseMaybe parseJSON + +instance EitherC (Serializable ByteString) d where resolve r _ = r + +---------------------------------------------------------------------- + +instance Serializable S.ClientError where + jsonEncode = toJSON . fromClientError + jsonDecode = fmap toClientError . A.parseMaybe parseJSON + +instance EitherC (Serializable S.ClientError) d where resolve r _ = r + +---------------------------------------------------------------------- + +newtype ByteStringS = ByteStringS [Word8] + deriving (Generic, ToJSON, FromJSON) + +fromByteString :: ByteString -> ByteStringS +fromByteString = ByteStringS . BS.unpack + +toByteString :: ByteStringS -> ByteString +toByteString (ByteStringS a)= BS.pack a + +---------------------------------------------------------------------- + +fromCIByteString :: CI.CI ByteString -> ByteStringS +fromCIByteString = fromByteString . CI.original + +toCIByteString :: ByteStringS -> CI.CI ByteString +toCIByteString = CI.mk . toByteString + + +---------------------------------------------------------------------- +-- ClientError serializer/deserializer +---------------------------------------------------------------------- + +data HttpVersionS + = HttpVersionS + { httpMajor :: !Int + , httpMinor :: !Int + } deriving (Generic, ToJSON, FromJSON) + +fromHttpVersion :: HTTP.HttpVersion -> HttpVersionS +fromHttpVersion HTTP.HttpVersion {..} = HttpVersionS {..} + +toHttpVersion :: HttpVersionS -> HTTP.HttpVersion +toHttpVersion HttpVersionS {..} = HTTP.HttpVersion {..} + +---------------------------------------------------------------------- + +data HTTPStatusS + = StatusS + { statusCode :: Int + , statusMessage :: ByteStringS + } deriving (Generic, ToJSON, FromJSON) + +fromHTTPStatus :: HTTP.Status -> HTTPStatusS +fromHTTPStatus HTTP.Status {..} = StatusS {statusMessage = fromByteString statusMessage, ..} + +toHTTPStatus :: HTTPStatusS -> HTTP.Status +toHTTPStatus StatusS {..} = HTTP.Status {statusMessage = toByteString statusMessage, ..} + +---------------------------------------------------------------------- + +data ResponseS = ResponseS + { responseStatusCode :: HTTPStatusS + , responseHeaders :: [(ByteStringS, ByteStringS)] + , responseHttpVersion :: HttpVersionS + , responseBody :: ByteStringS + } deriving (Generic, ToJSON, FromJSON) + +fromResponse :: S.Response -> ResponseS +fromResponse S.Response {..} = ResponseS + { responseStatusCode = fromHTTPStatus responseStatusCode + , responseHttpVersion = fromHttpVersion responseHttpVersion + , responseBody = fromByteString $ BSL.toStrict responseBody + , responseHeaders = toList $ fmap (bimap fromCIByteString fromByteString) $ responseHeaders + } + +toResponse :: ResponseS -> S.Response +toResponse ResponseS {..} = S.Response + { responseStatusCode = toHTTPStatus responseStatusCode + , responseHttpVersion = toHttpVersion responseHttpVersion + , responseBody = BSL.fromStrict $ toByteString responseBody + , responseHeaders = Seq.fromList $ fmap (bimap toCIByteString toByteString) $ responseHeaders + } + +---------------------------------------------------------------------- + +data ConnectionException = ConnectionException + deriving (Show, Generic, Exception, ToJSON, FromJSON) + +---------------------------------------------------------------------- + +data MediaTypeS = MediaTypeS + { mainType :: ByteStringS + , subType :: ByteStringS + , parameters :: [(ByteStringS, ByteStringS)] + } deriving (Generic, ToJSON, FromJSON) + +fromMediaType :: HTTP.MediaType -> MediaTypeS +fromMediaType mt = MediaTypeS + { mainType = fromCIByteString mainType' + , subType = fromCIByteString subType' + , parameters = bimap fromCIByteString fromCIByteString <$> Map.assocs parameters' + } + where + mainType' = HTTP.mainType mt + subType' = HTTP.subType mt + parameters' = HTTP.parameters mt + +toMediaType :: MediaTypeS -> HTTP.MediaType +toMediaType MediaTypeS {..} = + foldl' (HTTP./:) + (toByteString mainType HTTP.// toByteString subType) + (bimap toByteString toByteString <$> parameters) + +---------------------------------------------------------------------- + +data RequestS = RequestS + { requestPath :: (S.BaseUrl, ByteStringS) + , requestQueryString :: [(ByteStringS, Maybe ByteStringS)] + , requestBody :: Maybe ((), MediaTypeS) + , requestAccept :: [MediaTypeS] + , requestHeaders :: [(ByteStringS, ByteStringS)] + , requestHttpVersion :: HttpVersionS + , requestMethod :: ByteStringS + } deriving (Generic, Typeable, ToJSON, FromJSON) + + +fromRequest :: S.RequestF () (S.BaseUrl, ByteString) -> RequestS +fromRequest S.Request {..} = RequestS + { requestPath = second fromByteString requestPath + , requestQueryString = toList $ fmap (bimap fromByteString (fmap fromByteString)) requestQueryString + , requestBody = fmap (second fromMediaType) requestBody + , requestAccept = toList $ fmap fromMediaType requestAccept + , requestHeaders = toList $ fmap (bimap fromCIByteString fromByteString) requestHeaders + , requestHttpVersion = fromHttpVersion requestHttpVersion + , requestMethod = fromByteString requestMethod + } + +toRequest :: RequestS -> S.RequestF () (S.BaseUrl, ByteString) +toRequest RequestS {..} = S.Request + { requestPath = second toByteString requestPath + , requestQueryString = Seq.fromList $ fmap (bimap toByteString (fmap toByteString)) requestQueryString + , requestBody = fmap (second toMediaType) requestBody + , requestAccept = Seq.fromList $ fmap toMediaType requestAccept + , requestHeaders = Seq.fromList $ fmap (bimap toCIByteString toByteString) requestHeaders + , requestHttpVersion = toHttpVersion requestHttpVersion + , requestMethod = toByteString requestMethod + } + +---------------------------------------------------------------------- + +data ClientErrorS + = FailureResponse RequestS ResponseS + | DecodeFailure Text ResponseS + | UnsupportedContentType MediaTypeS ResponseS + | InvalidContentTypeHeader ResponseS + | ConnectionError ConnectionException + deriving (Generic, ToJSON, FromJSON) + +fromClientError :: S.ClientError -> ClientErrorS +fromClientError (S.FailureResponse req res) = FailureResponse (fromRequest req) (fromResponse res) +fromClientError (S.DecodeFailure i res) = DecodeFailure i (fromResponse res) +fromClientError (S.UnsupportedContentType t res) = UnsupportedContentType (fromMediaType t) (fromResponse res) +fromClientError (S.InvalidContentTypeHeader res) = InvalidContentTypeHeader (fromResponse res) +-- Note: We do not preserve actual error, for now +fromClientError (S.ConnectionError _) = ConnectionError ConnectionException + +toClientError :: ClientErrorS -> S.ClientError +toClientError (FailureResponse req res) = S.FailureResponse (toRequest req) (toResponse res) +toClientError (DecodeFailure i res) = S.DecodeFailure i (toResponse res) +toClientError (UnsupportedContentType t res) = S.UnsupportedContentType (toMediaType t) (toResponse res) +toClientError (InvalidContentTypeHeader res) = S.InvalidContentTypeHeader (toResponse res) +toClientError (ConnectionError e) = S.ConnectionError $ toException e + + +---------------------------------------------------------------------- diff --git a/src/EulerHS/Extra/Aeson.hs b/src/EulerHS/Extra/Aeson.hs new file mode 100644 index 00000000..8d357fc2 --- /dev/null +++ b/src/EulerHS/Extra/Aeson.hs @@ -0,0 +1,49 @@ +module EulerHS.Extra.Aeson + ( stripLensPrefixOptions + , stripAllLensPrefixOptions + , jsonSetField + , encodeJSON + , decodeJSON + ) where + +import Prelude + +import Data.Aeson (FromJSON, ToJSON, Options, defaultOptions, fieldLabelModifier) +import qualified Data.Aeson as Aeson +import qualified Data.Aeson.Text as Aeson +import qualified Data.ByteString.Lazy as LazyByteString +import qualified Data.HashMap.Strict as HashMap +import Data.Text (Text) +import qualified Data.Text.Encoding as Text +import qualified Data.Text.Lazy as LazyText + +stripLensPrefixOptions :: Options +stripLensPrefixOptions = defaultOptions { fieldLabelModifier = drop 1 } + +stripAllLensPrefixOptions :: Options +stripAllLensPrefixOptions = defaultOptions { fieldLabelModifier = dropPrefix} + where + dropPrefix :: String -> String + dropPrefix field = if length field > 0 + then dropWhile (== head field) field + else field + +-- utility functions + +-- | Set a field inside a JSON Object +jsonSetField :: ToJSON a => Text -> a -> Aeson.Value -> Aeson.Value +jsonSetField fieldName fieldValue obj = case obj of + Aeson.Object fields -> + Aeson.Object $ HashMap.insert fieldName (Aeson.toJSON fieldValue) fields + _ -> + error $ "This should be an object... got " <> show obj + +-- | Encode a value to JSON Text +-- +-- Note: the name `jsonEncode` is already taken by Aeson +encodeJSON :: ToJSON a => a -> Text +encodeJSON = LazyText.toStrict . Aeson.encodeToLazyText + +-- | Parse JSON Text into a value +decodeJSON :: FromJSON a => Text -> Maybe a +decodeJSON = Aeson.decode . LazyByteString.fromStrict . Text.encodeUtf8 diff --git a/src/EulerHS/Extra/AltValidation.hs b/src/EulerHS/Extra/AltValidation.hs new file mode 100644 index 00000000..440c2020 --- /dev/null +++ b/src/EulerHS/Extra/AltValidation.hs @@ -0,0 +1,167 @@ +{-# OPTIONS -fno-warn-deprecations #-} +{-# LANGUAGE AllowAmbiguousTypes #-} + +module EulerHS.Extra.AltValidation + ( + -- * Extra Validation + Transform(..) + , mkValidator + , mkCustomValidator + , mkTransformer + , withCustomError + , Transformer + , Validator + , V + , Errors + , VErrorPayload(..) + , module X + , withField + , runParser + , extractJust + , extractMaybeWithDefault + , guarded + , guardedCustom + , decode + , decodeCustom + , insideJust + , parValidate + ) where + +import EulerHS.Prelude hiding (or, pred) +import qualified Prelude as P + +import Data.Either.Extra (mapLeft) +import Data.Generics.Product.Fields +import qualified Data.Text as T +import Data.Validation +import Data.Validation as X +import GHC.TypeLits +import Type.Reflection + + +data VErrorPayload = VErrorPayload + { status :: Text + , status_id :: Maybe Int + , error_code :: Maybe Text + , error_message :: Maybe Text + , error_field :: Maybe Text + } + deriving (Eq, Show, Generic) + +validationError :: VErrorPayload +validationError = VErrorPayload + { status = "invalid_request_error" + , status_id = Nothing + , error_code = Just "INVALID_REQUEST" + , error_message = Nothing + , error_field = Nothing + } + +-- | Context contains the name of validated field +type Ctx = Text + +type Errors = [VErrorPayload] +type V a = Validation [VErrorPayload] a + +-- | Represents Transformer from one type to another. + +--- | This class represents transformation abilities between types. +class Transform a b where + transform :: a -> Validation Errors b + +type Transformer a b = a -> ReaderT Ctx (Either Errors) b + +-- | Represents the value parameter validator. +type Validator a = Transformer a a + +-- | Takes error message and predicate and returns validation function +-- using default 'VErrorPayload' +mkValidator :: Text -> (t -> Bool) -> Validator t +mkValidator msg pred v = ReaderT (\ctx -> if not $ pred v + then Left [validationError { error_message = Just msg, error_field = Just ctx }] + else pure v) + +-- | Make a validator using a particular error message, original +-- errors are ignored +withCustomError :: VErrorPayload -> Validator a -> Validator a +withCustomError err v a = ReaderT (\ctx -> mapLeft (\_ -> [err]) $ runReaderT (v a) ctx) + +-- | Takes error message and predicate and returns validation function +-- using custom error +mkCustomValidator :: VErrorPayload -> Text -> (t -> Bool) -> Validator t +mkCustomValidator err msg pred v = ReaderT (\ctx -> if not $ pred v + then Left [err { error_message = Just msg, error_field = Just ctx }] + else pure v) + +-- | Guards computations by a validation rule +guarded :: Text -> Bool -> ReaderT Ctx (Either Errors) () +guarded msg pred | pred = ReaderT (\_ -> pure ()) + | otherwise = ReaderT (\ctx -> Left [validationError {error_message = Just msg, error_field = Just ctx }]) + +-- | Guards computations by a validation rule with a custom error +guardedCustom :: VErrorPayload -> Bool -> ReaderT Ctx (Either Errors) () +guardedCustom err pred | pred = ReaderT (\_ -> pure ()) + | otherwise = ReaderT (\ctx -> Left [err {error_field = Just ctx }]) + +-- | Trying to decode 'Text' into a target type +decode :: forall t . (Read t) => Transformer Text t +decode v = ReaderT (\ctx -> case (readMaybe $ toString v) of + Just x -> Right x + _ -> Left [ validationError { error_message = Just ("Can't decode value: " <> v) + , error_field = Just ctx}]) + +-- | Trying to decode 'Text' into a target type, use custom error +decodeCustom :: forall t . (Read t) => VErrorPayload -> Transformer Text t +decodeCustom err v = ReaderT (\_ -> case (readMaybe $ toString v) of + Just x -> Right x + _ -> Left [ err ]) + +mkTransformer :: VErrorPayload -> (a -> Maybe b) -> Transformer a b +mkTransformer err f v = ReaderT (\_ -> case f v of + Just x -> Right x + Nothing -> Left [ err ]) + +insideJust :: Transformer a b -> Transformer (Maybe a) (Maybe b) +insideJust _ Nothing = pure Nothing +insideJust val (Just a) = Just <$> val a + +-- | Trying to extract the argument from Maybe type +-- if value is Nothing then raise Failure +extractJust :: Transformer (Maybe a) a +extractJust r = ReaderT (\ctx -> maybe (Left [err ctx]) Right r) + where + err ctx = validationError + { status = "Bad Request" + , error_message = Just "Mandatory fields are missing" + , error_code = Just "Mandatory fields are missing" + , error_field = Just ctx + } + +extractMaybeWithDefault :: a -> Transformer (Maybe a) a +extractMaybeWithDefault d r = ReaderT (\_ -> maybe (Right d) Right r) + +-- | Extract value and run validators on it +withField + :: forall (f :: Symbol) v r a + . (HasField' f r v, KnownSymbol f) + => r -> Transformer v a -> Validation Errors a +withField rec pav = fromEither $ runReaderT (pav $ getField @f rec) $ fieldName_ @f + +-- | Run a custom parser +runParser + :: forall a + . ReaderT Ctx (Either Errors) a + -> Text + -> Validation Errors a +runParser p err = fromEither $ runReaderT p err + +-- | Return given 'Symbol' as 'Text' +-- >>> fieldName @"userId" +-- "userId" +fieldName_ :: forall (f :: Symbol) . KnownSymbol f => Text +fieldName_ = T.pack $ ((filter (/='"'))) $ P.show $ typeRep @f + +parValidate :: [Validator a] -> Validator a +parValidate vals a = ReaderT (\ctx -> toEither $ foldr (*>) (pure a) $ fmap (mapper ctx) vals) + where + mapper ctx val = fromEither $ runReaderT (val a) ctx diff --git a/src/EulerHS/Extra/Language.hs b/src/EulerHS/Extra/Language.hs new file mode 100644 index 00000000..502e1ad0 --- /dev/null +++ b/src/EulerHS/Extra/Language.hs @@ -0,0 +1,373 @@ +{- | +Module : EulerHS.Extra.Language +Copyright : (C) Juspay Technologies Pvt Ltd 2019-2021 +License : Apache 2.0 (see the file LICENSE) +Maintainer : opensource@juspay.in +Stability : experimental +Portability : non-portable + +This module contains additional methods and functions providing extra functionality +over the stok ones. + +This is an internal module. Import `EulerHS.Language` instead. +-} + +module EulerHS.Extra.Language + ( getOrInitSqlConn + , getOrInitKVDBConn + , rExpire + , rExpireB + , rDel + , rDelB + , rExists + , rExistsB + , rExistsT -- alias for rExists (back compat) + , rHget + , rHgetB + , rHset + , rHsetB + , rIncr + , rIncrB + , rSet + , rSetT -- alias for rSet (back compat) + , rSetB + , rGet + , rGetB + , rGetT -- alias for rGet (back compat) + , rSetex + , rSetexB + , rSetexT -- alias for rSetex (back compat) + , keyToSlot + ) where + +import EulerHS.Prelude hiding (get, id) + +import qualified Data.Aeson as A +import qualified Data.ByteString.Lazy as BSL +import qualified Data.Text.Encoding as TE +import Database.Redis (keyToSlot) +import qualified EulerHS.Core.KVDB.Language as L +import qualified EulerHS.Core.Types as T +import qualified EulerHS.Framework.Language as L + +type RedisName = Text +type TextKey = Text +type TextField = Text +type ByteKey = ByteString +type ByteField = ByteString +type ByteValue = ByteString + +-- | Get existing SQL connection, or init a new connection. +getOrInitSqlConn :: (HasCallStack, L.MonadFlow m) => + T.DBConfig beM -> m (T.DBResult (T.SqlConn beM)) +getOrInitSqlConn cfg = do + eConn <- L.getSqlDBConnection cfg + case eConn of + Left (T.DBError T.ConnectionDoesNotExist _) -> L.initSqlDBConnection cfg + res -> pure res + +-- | Get existing Redis connection, or init a new connection. +getOrInitKVDBConn :: (HasCallStack, L.MonadFlow m) => T.KVDBConfig -> m (T.KVDBAnswer T.KVDBConn) +getOrInitKVDBConn cfg = do + conn <- L.getKVDBConnection cfg + case conn of + Left (T.KVDBError T.KVDBConnectionDoesNotExist _) -> L.initKVDBConnection cfg + res -> pure res + +-- ---------------------------------------------------------------------------- +-- KVDB convenient functions +-- ---------------------------------------------------------------------------- + +-- | Set a key's time to live in seconds. +-- Key is a text string. +-- +-- mtl version of the original function. +rExpire :: (HasCallStack, Integral t, L.MonadFlow m) => + RedisName -> TextKey -> t -> m (Either T.KVDBReply Bool) +rExpire cName k t = rExpireB cName (TE.encodeUtf8 k) t + +-- | Set a key's time to live in seconds. +-- Key is a byte string. +-- +-- mtl version of the original function. +-- Additionally, logs the error may happen. +rExpireB :: (HasCallStack, Integral t, L.MonadFlow m) => + RedisName -> ByteKey -> t -> m (Either T.KVDBReply Bool) +rExpireB cName k t = do + res <- L.runKVDB cName $ L.expire k $ toInteger t + case res of + Right _ -> pure res + Left err -> do + L.logError @Text "Redis expire" $ show err + pure res + +-- ---------------------------------------------------------------------------- + +-- | Delete a keys. +-- Key is a text string. +-- +-- mtl version of the original function. +rDel :: (HasCallStack, L.MonadFlow m) => + RedisName -> [TextKey] -> m (Either T.KVDBReply Integer) +rDel cName ks = rDelB cName (TE.encodeUtf8 <$> ks) + +-- | Delete a keys. +-- Key is a byte string. +-- +-- mtl version of the original function. +-- Additionally, logs the error may happen. +rDelB :: (HasCallStack, L.MonadFlow m) => + RedisName -> [ByteKey] -> m (Either T.KVDBReply Integer) +rDelB cName ks = do + res <- L.runKVDB cName $ L.del ks + case res of + Right _ -> pure res + Left err -> do + L.logError @Text "Redis del" $ show err + pure res + +-- ---------------------------------------------------------------------------- + +-- | Determine if a key exists. +-- Key is a text string. +-- +-- mtl version of the original function. +-- Additionally, logs the error may happen. +rExists :: (HasCallStack, L.MonadFlow m) => + RedisName -> TextKey -> m (Either T.KVDBReply Bool) +rExists cName k = rExistsB cName $ TE.encodeUtf8 k + +-- | Determine if a key exists. +-- Key is a byte string. +-- +-- mtl version of the original function. +-- Additionally, logs the error may happen. +rExistsB :: (HasCallStack, L.MonadFlow m) => + RedisName -> ByteKey -> m (Either T.KVDBReply Bool) +rExistsB cName k = do + res <- L.runKVDB cName $ L.exists k + case res of + Right _ -> pure res + Left err -> do + L.logError @Text "Redis exists" $ show err + pure res + +-- | Determine if a key exists. +-- Key is a text string. +-- +-- mtl version of the original function. +rExistsT :: (HasCallStack, L.MonadFlow m) => + RedisName -> TextKey -> m (Either T.KVDBReply Bool) +rExistsT = rExists + +-- ---------------------------------------------------------------------------- + +-- | Get the value of a hash field. +-- Key is a text string. +-- +-- Performs decodings of the value. +-- mtl version of the original function. +-- Additionally, logs the error may happen. +rHget :: (HasCallStack, FromJSON v, L.MonadFlow m) + => RedisName -> TextKey -> TextField -> m (Maybe v) +rHget cName k f = do + let k' = TE.encodeUtf8 k + let f' = TE.encodeUtf8 f + r <- L.runKVDB cName $ L.hget k' f' + case r of + Right (Just val) -> do + let v = A.eitherDecode $ BSL.fromStrict val + case v of + Left err -> do + L.logError @Text "Decoding error: " $ show err + pure Nothing + Right v' -> pure $ Just v' + Right Nothing -> pure Nothing + Left err -> do + L.logError @Text "Redis rHget" $ show err + pure Nothing + +-- | Get the value of a hash field. +-- Key is a byte string. +-- +-- mtl version of the original function. +-- Additionally, logs the error may happen. +rHgetB :: (HasCallStack, L.MonadFlow m) => + Text -> ByteKey -> ByteField -> m (Maybe ByteValue) +rHgetB cName k f = do + res <- L.runKVDB cName $ L.hget k f + case res of + Right (Just val) -> pure $ Just val + Right Nothing -> pure Nothing + Left err -> do + L.logError @Text "Redis hget" $ show err + pure Nothing + +-- ---------------------------------------------------------------------------- + +-- | Set the value of a hash field. +-- Key is a text string. +-- +-- mtl version of the original function. +rHset :: (HasCallStack, ToJSON v, L.MonadFlow m) + => RedisName -> TextKey -> TextField -> v -> m (Either T.KVDBReply Bool) +rHset cName k f v = rHsetB cName k' f' v' + where + k' = TE.encodeUtf8 k + f' = TE.encodeUtf8 f + v' = BSL.toStrict $ A.encode v + +-- | Set the value of a hash field. +-- Key is a byte string. +-- +-- mtl version of the original function. +-- Additionally, logs the error may happen. +rHsetB :: (HasCallStack, L.MonadFlow m) + => RedisName -> ByteKey -> ByteField -> ByteValue -> m (Either T.KVDBReply Bool) +rHsetB cName k f v = do + res <- L.runKVDB cName $ L.hset k f v + case res of + Right _ -> pure res + Left err -> do + L.logError @Text "Redis hset" $ show err + pure res + +-- ---------------------------------------------------------------------------- + +-- | Increment the integer value of a key by one. +-- Key is a text string. +-- +-- mtl version of the original function. +rIncr :: (HasCallStack, L.MonadFlow m) => + RedisName -> TextKey -> m (Either T.KVDBReply Integer) +rIncr cName k = rIncrB cName (TE.encodeUtf8 k) + +-- | Increment the integer value of a key by one. +-- Key is a byte string. +-- +-- mtl version of the original function. +-- Additionally, logs the error may happen. +rIncrB :: (HasCallStack, L.MonadFlow m) => + RedisName -> ByteKey -> m (Either T.KVDBReply Integer) +rIncrB cName k = do + res <- L.runKVDB cName $ L.incr k + case res of + Right _ -> pure res + Left err -> do + L.logError @Text "Redis incr" $ show err + pure res + +-- ---------------------------------------------------------------------------- + +-- | Set the value of a key. +-- Key is a text string. +-- +-- mtl version of the original function. +rSet :: (HasCallStack, ToJSON v, L.MonadFlow m) => + RedisName -> TextKey -> v -> m (Either T.KVDBReply T.KVDBStatus) +rSet cName k v = rSetB cName k' v' + where + k' = TE.encodeUtf8 k + v' = BSL.toStrict $ A.encode v + +-- | Set the value of a key. +-- Key is a byte string. +-- +-- mtl version of the original function. +-- Additionally, logs the error may happen. +rSetB :: (HasCallStack, L.MonadFlow m) => + Text -> ByteKey -> ByteValue -> m (Either T.KVDBReply T.KVDBStatus) +rSetB cName k v = do + res <- L.runKVDB cName $ L.set k v + case res of + Right _ -> pure res + Left err -> do + L.logError @Text "Redis set" $ show err + pure res + +-- | Set the value of a key. +-- Key is a text string. +-- +-- mtl version of the original function. +rSetT :: (HasCallStack, ToJSON v, L.MonadFlow m) => + RedisName -> TextKey -> v -> m (Either T.KVDBReply T.KVDBStatus) +rSetT = rSet + +-- ---------------------------------------------------------------------------- + +-- | Get the value of a key. +-- Key is a text string. +-- +-- Performs encodings of the value. +-- mtl version of the original function. +-- Additionally, logs the error may happen. +rGet :: (HasCallStack, FromJSON v, L.MonadFlow m) => + RedisName -> TextKey -> m (Maybe v) +rGet cName k = do + mv <- L.runKVDB cName $ L.get (TE.encodeUtf8 k) + case mv of + Right (Just val) -> pure $ A.decode $ BSL.fromStrict val + Right Nothing -> pure Nothing + Left err -> do + L.logError @Text "Redis get" $ show err + pure Nothing + +-- | Get the value of a key. +-- Key is a byte string. +-- +-- mtl version of the original function. +-- Additionally, logs the error may happen. +rGetB :: (HasCallStack, L.MonadFlow m) => + RedisName -> ByteKey -> m (Maybe ByteValue) -- Binary.decode? +rGetB cName k = do + mv <- L.runKVDB cName $ L.get k + case mv of + Right mval -> pure mval + Left err -> do + L.logError @Text "Redis get" $ show err + pure Nothing + +-- | Get the value of a key. +-- Key is a text string. +-- +-- mtl version of the original function. +rGetT :: (HasCallStack, FromJSON v, L.MonadFlow m) => + Text -> Text -> m (Maybe v) +rGetT = rGet + +-- ---------------------------------------------------------------------------- + +-- | Set the value and ttl of a key. +-- Key is a text string. +-- +-- Performs encodings of the key and value. +-- mtl version of the original function. +rSetex :: (HasCallStack, ToJSON v, Integral t, L.MonadFlow m) => + RedisName -> TextKey -> v -> t -> m (Either T.KVDBReply T.KVDBStatus) +rSetex cName k v t = rSetexB cName k' v' t + where + k' = TE.encodeUtf8 k + v' = BSL.toStrict $ A.encode v + +-- | Set the value and ttl of a key. +-- Key is a byte string. +-- +-- mtl version of the original function. +-- Additionally, logs the error may happen. +rSetexB :: (HasCallStack, Integral t, L.MonadFlow m) => + RedisName -> ByteKey -> ByteValue -> t -> m (Either T.KVDBReply T.KVDBStatus) +rSetexB cName k v t = do + res <- L.runKVDB cName $ L.setex k (toInteger t) v + case res of + Right _ -> pure res + Left err -> do + L.logError @Text "Redis setex" $ show err + pure res + +-- | Set the value and ttl of a key. +-- Key is a text string. +-- +-- mtl version of the original function. +rSetexT :: (HasCallStack, ToJSON v, Integral t, L.MonadFlow m) => + RedisName -> TextKey -> v -> t -> m (Either T.KVDBReply T.KVDBStatus) +rSetexT = rSetex diff --git a/src/EulerHS/Extra/Test.hs b/src/EulerHS/Extra/Test.hs new file mode 100644 index 00000000..0c48c94f --- /dev/null +++ b/src/EulerHS/Extra/Test.hs @@ -0,0 +1,100 @@ +{-# LANGUAGE OverloadedStrings #-} + +module EulerHS.Extra.Test where + +import EulerHS.Prelude + +import qualified Database.Beam.Postgres as BP +import qualified Database.MySQL.Base as MySQL +import qualified Database.PostgreSQL.Simple as PG (execute_) +import EulerHS.Interpreters +import EulerHS.Language +import EulerHS.Runtime (FlowRuntime) +import EulerHS.Types +import qualified EulerHS.Types as T +import System.Process + + +mwhen :: Monoid m => Bool -> m -> m +mwhen True = id +mwhen False = const mempty + + +withMysqlDb :: String -> String -> MySQLConfig -> IO a -> IO a +withMysqlDb dbName filePath msRootCfg next = + bracket_ + (dropTestDbIfExist >> createTestDb) + (dropTestDbIfExist) + (loadMySQLDump >> next) + where + T.MySQLConfig + { connectPort + , connectHost + , connectUser + , connectPassword + } = msRootCfg + + loadMySQLDump :: IO () + loadMySQLDump = + void $ system $ + "mysql " <> options <> " " <> dbName <> " 2> /dev/null < " <> filePath + where + options = + intercalate " " + [ "--port=" <> show connectPort + , mwhen (not $ null connectHost ) $ "--host=" <> connectHost + , mwhen (not $ null connectUser ) $ "--user=" <> connectUser + , mwhen (not $ null connectPassword) $ "--password=" <> connectPassword + ] + + dropTestDbIfExist :: IO () + dropTestDbIfExist = + bracket (T.createMySQLConn msRootCfg) T.closeMySQLConn $ \rootConn -> do + void . MySQL.execute_ rootConn . MySQL.Query $ "drop database if exists " <> encodeUtf8 dbName + + createTestDb :: IO () + createTestDb = + bracket (T.createMySQLConn msRootCfg) T.closeMySQLConn $ \rootConn -> do + _ <- MySQL.execute_ rootConn . MySQL.Query $ "create database " <> encodeUtf8 dbName + void . MySQL.execute_ rootConn . MySQL.Query $ "grant all privileges on " <> encodeUtf8 dbName <> ".* to 'cloud'@'%'" + + +preparePostgresDB + :: FilePath + -> T.PostgresConfig + -> T.PostgresConfig + -> (T.PostgresConfig -> DBConfig BP.Pg) + -> (forall a . (FlowRuntime -> IO a) -> IO a) + -> (FlowRuntime -> IO ()) + -> IO() +preparePostgresDB filePath pgRootCfg pgCfg@T.PostgresConfig{..} pgCfgToDbCfg withRt next = + withRt $ \flowRt -> + bracket (T.createPostgresConn pgRootCfg) T.closePostgresConn $ \rootConn -> do + let + dropTestDbIfExist :: IO () + dropTestDbIfExist = do + void $ PG.execute_ rootConn "drop database if exists euler_test_db" + + createTestDb :: IO () + createTestDb = do + void $ PG.execute_ rootConn "create database euler_test_db" + + bracket_ + (dropTestDbIfExist >> createTestDb) + (dropTestDbIfExist) + (loadPgDump >> prepareDBConnections flowRt >> next flowRt) + where + prepareDBConnections :: FlowRuntime -> IO () + prepareDBConnections flowRuntime = runFlow flowRuntime $ do + ePool <- initSqlDBConnection $ pgCfgToDbCfg pgCfg + either (error "Failed to connect to PG") (const $ pure ()) ePool + + loadPgDump :: IO () + loadPgDump = + void $ system $ + "psql -q " <> uri <> " 1> /dev/null < " <> filePath + where + uri = "postgresql://" + <> connectUser <> ":" <> connectPassword <> "@" + <> connectHost <> ":" <> show connectPort <> "/" + <> connectDatabase diff --git a/src/EulerHS/Extra/Validation.hs b/src/EulerHS/Extra/Validation.hs new file mode 100644 index 00000000..cf5b4314 --- /dev/null +++ b/src/EulerHS/Extra/Validation.hs @@ -0,0 +1,108 @@ +{-# OPTIONS -fno-warn-deprecations #-} +{-# LANGUAGE AllowAmbiguousTypes #-} + +module EulerHS.Extra.Validation + ( + -- * Extra Validation + Transform(..) + , mkValidator + , mkTransformer + , Transformer + , Validator + , V + , Errors + , module X + , withField + , runParser + , extractJust + , extractMaybeWithDefault + , guarded + , decode + , insideJust + , parValidate + ) where + +import EulerHS.Prelude hiding (or, pred) +import qualified Prelude as P + +import Data.Generics.Product.Fields +import qualified Data.Text as T +import Data.Validation +import Data.Validation as X +import GHC.TypeLits +import Type.Reflection + + +type Ctx = Text +type Errors = [Text] +type V a = Validation [Text] a + +-- | Represents Transformer from one type to another. + +--- | This class represents transformation abilities between types. +class Transform a b where + transform :: a -> Validation Errors b + +type Transformer a b = a -> ReaderT Ctx (Either Errors) b + +-- | Represents the value parameter validator. +type Validator a = Transformer a a + +-- | Takes error message and predicate and return validation function +mkValidator :: Text -> (t -> Bool) -> Validator t +mkValidator err pred v = ReaderT (\ctx -> if not $ pred v + then Left [ctx <> " " <> err] + else pure v) + +guarded :: Text -> Bool -> ReaderT Ctx (Either Errors) () +guarded err pred | pred = ReaderT (\_ -> pure ()) + | otherwise = ReaderT (\ctx -> Left [ctx <> " " <> err]) + +-- | Trying to decode Text to target type +decode :: forall t . Read t => Transformer Text t +decode v = ReaderT (\ctx -> case (readMaybe $ toString v) of + Just x -> Right x + _ -> Left ["Can't decode " <> v <> " from field " <> ctx]) + +mkTransformer :: Text -> (a -> Maybe b) -> Transformer a b +mkTransformer err f v = ReaderT (\ctx -> case f v of + Just x -> Right x + Nothing -> Left [ctx <> " " <> err]) + +insideJust :: Transformer a b -> Transformer (Maybe a) (Maybe b) +insideJust _ Nothing = pure Nothing +insideJust val (Just a) = Just <$> val a + +-- | Trying to extract the argument from Maybe type +-- if value is Nothing then raise Failure +extractJust :: Transformer (Maybe a) a +extractJust r = ReaderT (\ctx -> maybe (Left [ctx <> " not present"]) Right r) + +extractMaybeWithDefault :: a -> Transformer (Maybe a) a +extractMaybeWithDefault d r = ReaderT (\_ -> maybe (Right d) Right r) + +-- | Extract value and run validators on it +withField + :: forall (f :: Symbol) v r a + . (HasField' f r v, KnownSymbol f) + => r -> Transformer v a -> Validation Errors a +withField rec pav = fromEither $ runReaderT (pav $ getField @f rec) $ fieldName_ @f + +-- | Run parser +runParser + :: forall a + . ReaderT Ctx (Either Errors) a + -> Text + -> Validation Errors a +runParser p msg = fromEither $ runReaderT p msg + +-- | Return given 'Symbol' as 'Text' +-- >>> fieldName @"userId" +-- "userId" +fieldName_ :: forall (f :: Symbol) . KnownSymbol f => Text +fieldName_ = T.pack $ ((filter (/='"'))) $ P.show $ typeRep @f + +parValidate :: [Validator a] -> Validator a +parValidate vals a = ReaderT (\ctx -> toEither $ foldr (*>) (pure a) $ fmap (mapper ctx) vals) + where + mapper ctx val = fromEither $ runReaderT (val a) ctx diff --git a/src/EulerHS/Framework/Flow/Interpreter.hs b/src/EulerHS/Framework/Flow/Interpreter.hs new file mode 100644 index 00000000..a6e777fa --- /dev/null +++ b/src/EulerHS/Framework/Flow/Interpreter.hs @@ -0,0 +1,617 @@ +module EulerHS.Framework.Flow.Interpreter + ( -- * Flow Interpreter + runFlow + , runFlow' + ) where + +import Control.Exception (throwIO) +import qualified Control.Exception as Exception +import qualified Data.ByteString as Strict +import qualified Data.ByteString.Lazy as Lazy +import qualified Data.CaseInsensitive as CI +import qualified Data.DList as DL +import Data.Default (def) +import Data.Either.Extra (mapLeft) +import Data.Generics.Product.Positions (getPosition) +import qualified Data.Map as Map +import qualified Data.Pool as DP +import qualified Data.Text as Text +import qualified Data.Text.Encoding as Encoding +import qualified Data.UUID as UUID (toText) +import qualified Data.UUID.V4 as UUID (nextRandom) +import qualified Data.Vector as V +import qualified EulerHS.Core.Interpreters as R +import qualified EulerHS.Core.Logger.Language as L +import qualified EulerHS.Core.Playback.Entries as P +import qualified EulerHS.Core.Playback.Machine as P +import qualified EulerHS.Core.Runtime as R +import qualified EulerHS.Core.Types as T +import EulerHS.Core.Types.KVDB +import qualified EulerHS.Framework.Flow.Language as L +import qualified EulerHS.Framework.Runtime as R +import EulerHS.Prelude +import qualified Network.Connection as Conn +import qualified Network.HTTP.Client as HTTP +import qualified Network.HTTP.Client.TLS as TLS +import qualified Network.HTTP.Types as HTTP +import qualified Network.TLS as TLS +import qualified Network.TLS.Extra.Cipher as TLS +import qualified Servant.Client as S +import System.Process (readCreateProcess, shell) +import Unsafe.Coerce (unsafeCoerce) + +connect :: T.DBConfig be -> IO (T.DBResult (T.SqlConn be)) +connect cfg = do + eConn <- try $ T.mkSqlConn cfg + case eConn of + Left (e :: SomeException) -> pure $ Left $ T.DBError T.ConnectionFailed $ show e + Right conn -> pure $ Right conn + +connectRedis :: T.KVDBConfig -> IO (T.KVDBAnswer T.KVDBConn) +connectRedis cfg = do + eConn <- try $ T.mkRedisConn cfg + case eConn of + Left (e :: SomeException) -> pure $ Left $ T.KVDBError T.KVDBConnectionFailed $ show e + Right conn -> pure $ Right conn + +disconnect :: T.SqlConn beM -> IO () +disconnect (T.MockedPool _) = pure () +disconnect (T.PostgresPool _ pool) = DP.destroyAllResources pool +disconnect (T.MySQLPool _ pool) = DP.destroyAllResources pool +disconnect (T.SQLitePool _ pool) = DP.destroyAllResources pool + +suppressErrors :: IO a -> IO () +suppressErrors = void . try @_ @SomeException + +awaitMVarWithTimeout :: MVar (Either Text a) -> Int -> IO (Either T.AwaitingError a) +awaitMVarWithTimeout mvar mcs | mcs <= 0 = go 0 + | otherwise = go mcs + where + portion = (mcs `div` 10) + 1 + go rest + | rest <= 0 = do + mValue <- tryReadMVar mvar + pure $ case mValue of + Nothing -> Left T.AwaitingTimeout + Just (Right val) -> Right val + Just (Left err) -> Left $ T.ForkedFlowError err + | otherwise = do + tryReadMVar mvar >>= \case + Just (Right val) -> pure $ Right val + Just (Left err) -> pure $ Left $ T.ForkedFlowError err + Nothing -> threadDelay portion >> go (rest - portion) + +-- TODO: update the code with timeouts when servant 0.18.1 is released +-- (see the code in the private downstream repo) + +-- | Utility function to convert HttpApi HTTPRequests to http-client HTTP +-- requests +getHttpLibRequest :: MonadThrow m => T.HTTPRequest -> m HTTP.Request +getHttpLibRequest request = do + let url = Text.unpack $ T.getRequestURL request + httpLibRequest <- HTTP.parseRequest url + let + requestMethod = case T.getRequestMethod request of + T.Get -> "GET" + T.Put -> "PUT" + T.Post -> "POST" + T.Delete -> "DELETE" + T.Head -> "HEAD" + let + setBody = case T.getRequestBody request of + Just body -> + let body' = T.getLBinaryString body + in \req -> req { HTTP.requestBody = HTTP.RequestBodyLBS body' } + Nothing -> id + + -- TODO: Respect "Content-Transfer-Encoding" header + let + headers :: HTTP.RequestHeaders = T.getRequestHeaders request + & Map.toList + & map (\(x, y) -> (CI.mk (Encoding.encodeUtf8 x), Encoding.encodeUtf8 y)) + + let + setTimeout = case T.getRequestTimeout request of + Just x -> \req -> req {HTTP.responseTimeout = HTTP.responseTimeoutMicro x} + Nothing -> id + + let + setRedirects = case T.getRequestRedirects request of + Just x -> \req -> req {HTTP.redirectCount = x} + Nothing -> id + + pure $ setRedirects . setTimeout . setBody $ + httpLibRequest + { HTTP.method = requestMethod + , HTTP.requestHeaders = headers + } + +-- | Utility function to translate http-client HTTP responses back to HttpAPI +-- responses +translateHttpResponse :: HTTP.Response Lazy.ByteString -> Either Text T.HTTPResponse +translateHttpResponse response = do + headers <- translateResponseHeaders $ HTTP.responseHeaders response + status <- translateResponseStatusMessage . HTTP.statusMessage . HTTP.responseStatus $ response + pure $ T.HTTPResponse + { getResponseBody = T.LBinaryString $ HTTP.responseBody response + , getResponseCode = HTTP.statusCode $ HTTP.responseStatus response + , getResponseHeaders = headers + , getResponseStatus = status + } + +translateResponseHeaders + :: [(CI.CI Strict.ByteString, Strict.ByteString)] + -> Either Text (Map.Map Text.Text Text.Text) +translateResponseHeaders httpLibHeaders = do + let + result = do + headerNames <- mapM (Encoding.decodeUtf8' . CI.original . fst) httpLibHeaders + headerValues <- mapM (Encoding.decodeUtf8' . snd) httpLibHeaders + return $ zip (map Text.toLower headerNames) headerValues + + -- TODO: Look up encoding and use some thread-safe unicode package to decode + -- headers + -- let encoding + -- = List.findIndex (\name -> name == "content-transfer-encoding") headerNames + headers <- displayEitherException "Error decoding HTTP response headers: " result + pure $ Map.fromList headers + +translateResponseStatusMessage :: Strict.ByteString -> Either Text Text +translateResponseStatusMessage = displayEitherException "Error decoding HTTP response status message: " . Encoding.decodeUtf8' + +displayEitherException :: Exception e => Text -> Either e a -> Either Text a +displayEitherException prefix = either (Left . (prefix <>) . Text.pack . Exception.displayException) Right + +-- | Utility function to create a manager from certificate data +mkManagerFromCert :: T.HTTPCert -> IO (Either String HTTP.Manager) +mkManagerFromCert T.HTTPCert {..} = do + case TLS.credentialLoadX509ChainFromMemory getCert getCertChain getCertKey of + Right creds -> do + let hooks = def { TLS.onCertificateRequest = \_ -> return $ Just creds } + let clientParams = (TLS.defaultParamsClient getCertHost "") + { TLS.clientHooks = hooks + , TLS.clientSupported = def { TLS.supportedCiphers = TLS.ciphersuite_default } + } + let tlsSettings = Conn.TLSSettings clientParams + fmap Right $ HTTP.newManager $ TLS.mkManagerSettings tlsSettings Nothing + Left err -> pure $ Left err + +-- translateHeaderName :: CI.CI Strict.ByteString -> Text.Text +-- translateHeaderName = Encoding.decodeUtf8' . CI.original + +interpretFlowMethod :: Maybe T.FlowGUID -> R.FlowRuntime -> L.FlowMethod a -> IO a +interpretFlowMethod mbFlowGuid flowRt@R.FlowRuntime {..} (L.CallServantAPI mbMgrSel bUrl clientAct next) = + fmap next $ P.withRunMode _runMode (P.mkCallServantAPIEntry bUrl) $ do + let mbClientMngr = case mbMgrSel of + Nothing -> Right _defaultHttpClientManager + Just mngrName -> maybeToRight mngrName $ Map.lookup mngrName _httpClientManagers + case mbClientMngr of + Right mngr -> do + eitherResult <- S.runClientM (T.runEulerClient (dbgLogger T.Debug) bUrl clientAct) (S.mkClientEnv mngr bUrl) + case eitherResult of + Left err -> do + dbgLogger T.Error $ show err + pure $ Left err + Right response -> + pure $ Right response + Left name -> do + let err = S.ConnectionError $ toException $ T.HttpManagerNotFound name + dbgLogger T.Error (show err) + pure $ Left err + where + dbgLogger debugLevel = + R.runLogger mbFlowGuid T.RegularMode (R._loggerRuntime . R._coreRuntime $ flowRt) + . L.logMessage' debugLevel ("CallServantAPI impl" :: String) + . show + +interpretFlowMethod mbFlowGuid flowRt@R.FlowRuntime {..} (L.CallHTTP request cert next) = + fmap next $ P.withRunMode _runMode (P.mkCallHttpAPIEntry request) $ do + httpLibRequest <- getHttpLibRequest request + _manager <- maybe (pure $ Right _defaultHttpClientManager) mkManagerFromCert cert + -- TODO: Refactor + case _manager of + Left err -> do + let errMsg = "Certificate failure: " <> Text.pack err + logJsonError errMsg request + pure $ Left errMsg + Right manager -> do + eResponse <- try $ HTTP.httpLbs httpLibRequest manager + case eResponse of + Left (err :: SomeException) -> do + let errMsg = Text.pack $ displayException err + logJsonError errMsg request + pure $ Left errMsg + Right httpResponse -> do + case translateHttpResponse httpResponse of + Left errMsg -> do + logJsonError errMsg request + pure $ Left errMsg + Right response -> do + logJson T.Debug $ T.HTTPRequestResponse request response + pure $ Right response + where + logJsonError :: Text -> T.HTTPRequest -> IO () + logJsonError err = logJson T.Error . T.HTTPIOException err + + logJson :: ToJSON a => T.LogLevel -> a -> IO () + logJson debugLevel = + R.runLogger mbFlowGuid T.RegularMode (R._loggerRuntime . R._coreRuntime $ flowRt) + . L.logMessage' debugLevel ("callHTTP failure" :: String) + . encodeJSON + +interpretFlowMethod mbFlowGuid R.FlowRuntime {..} (L.EvalLogger loggerAct next) = + next <$> R.runLogger mbFlowGuid _runMode (R._loggerRuntime _coreRuntime) loggerAct + +interpretFlowMethod _ R.FlowRuntime {..} (L.RunIO descr ioAct next) = + next <$> P.withRunMode _runMode (P.mkRunIOEntry descr) ioAct + +interpretFlowMethod _ R.FlowRuntime {..} (L.RunUntracedIO descr ioAct next) = + case _runMode of + (T.RecordingMode recorderRt) -> + next <$> P.record recorderRt (P.mkRunUntracedIOEntry descr) ioAct + _ -> + next <$> ioAct + +interpretFlowMethod _ R.FlowRuntime {..} (L.GetOption k next) = + fmap next $ P.withRunMode _runMode (P.mkGetOptionEntry k) $ do + m <- readMVar _options + pure $ do + valAny <- Map.lookup k m + pure $ unsafeCoerce valAny + +interpretFlowMethod _ R.FlowRuntime {..} (L.SetOption k v next) = + fmap next $ P.withRunMode _runMode (P.mkSetOptionEntry k v) $ do + m <- takeMVar _options + let newMap = Map.insert k (unsafeCoerce @_ @Any v) m + putMVar _options newMap + +interpretFlowMethod _ R.FlowRuntime {..} (L.DelOption k next) = + fmap next $ P.withRunMode _runMode (P.mkDelOptionEntry k) $ do + m <- takeMVar _options + let newMap = Map.delete k m + putMVar _options newMap + +interpretFlowMethod _ R.FlowRuntime {_runMode} (L.GenerateGUID next) = do + next <$> P.withRunMode _runMode P.mkGenerateGUIDEntry + (UUID.toText <$> UUID.nextRandom) + +interpretFlowMethod _ R.FlowRuntime {_runMode} (L.RunSysCmd cmd next) = + next <$> P.withRunMode _runMode + (P.mkRunSysCmdEntry cmd) + (readCreateProcess (shell cmd) "") + +---------------------------------------------------------------------- +interpretFlowMethod mbFlowGuid rt (L.Fork desc newFlowGUID flow next) = do + awaitableMVar <- newEmptyMVar + case R._runMode rt of + T.RegularMode -> void $ forkIO (suppressErrors (runFlow' mbFlowGuid rt (L.runSafeFlow flow) >>= putMVar awaitableMVar)) + + T.RecordingMode T.RecorderRuntime{recording = T.Recording{..}, ..} -> do + finalRecordingMVar <- newEmptyMVar + finalSafeRecordingVar <- newEmptyMVar + finalForkedRecordingsVar <- newEmptyMVar + + forkRecordingMVar <- newMVar V.empty + forkSafeRecordingVar <- newMVar Map.empty + forkForkedRecordingsVar <- newMVar Map.empty + + let freshRecording = T.Recording forkRecordingMVar forkSafeRecordingVar forkForkedRecordingsVar + let emptyRecording = T.Recording finalRecordingMVar finalSafeRecordingVar finalForkedRecordingsVar + + let forkRuntime = T.RecorderRuntime + { flowGUID = newFlowGUID + , recording = freshRecording + , .. + } + + forkedRecs <- takeMVar forkedRecordingsVar + putMVar forkedRecordingsVar $ + Map.insert newFlowGUID emptyRecording forkedRecs + + let newRt = rt {R._runMode = T.RecordingMode forkRuntime} + + void $ forkIO $ do + suppressErrors (runFlow' mbFlowGuid newRt (L.runSafeFlow flow) >>= putMVar awaitableMVar) + putMVar finalRecordingMVar =<< readMVar forkRecordingMVar + putMVar finalSafeRecordingVar =<< readMVar forkSafeRecordingVar + putMVar finalForkedRecordingsVar =<< readMVar forkForkedRecordingsVar + +---------------------------------------------------------------------- + + T.ReplayingMode playerRt -> do + let + T.PlayerRuntime + { rerror = T.ReplayErrors {..} + , resRecording = T.ResultRecording{ forkedRecordings } + , .. + } = playerRt + + case Map.lookup newFlowGUID forkedRecordings of + Nothing -> do + let + err = + T.PlaybackError + { errorType = T.ForkedFlowRecordingsMissed + , errorMessage = "No recordings found for forked flow: " <> Text.unpack newFlowGUID + , errorFlowGUID = flowGUID } + + takeMVar errorMVar *> putMVar errorMVar (Just err) + throwIO $ T.ReplayingException err + + Just recording -> do + stepVar <- newMVar 0 + + finalErrorMVar <- newEmptyMVar + finalSafeFlowErrorVar <- newEmptyMVar + finalForkedFlowErrorVar <- newEmptyMVar + + forkErrorMVar <- newMVar Nothing + forkSafeFlowErrorVar <- newMVar Map.empty + forkForkedFlowErrorVar <- newMVar Map.empty + + let freshReplayErrors = T.ReplayErrors forkErrorMVar forkSafeFlowErrorVar forkForkedFlowErrorVar + let finalReplayErrors = T.ReplayErrors finalErrorMVar finalSafeFlowErrorVar finalForkedFlowErrorVar + + let forkRuntime = T.PlayerRuntime + { flowGUID = newFlowGUID + , stepMVar = stepVar + , resRecording = recording + , rerror = freshReplayErrors + , .. + } + + forkedFlowErrs <- takeMVar forkedFlowErrorsVar + + putMVar forkedFlowErrorsVar $ + Map.insert newFlowGUID finalReplayErrors forkedFlowErrs + + let newRt = rt {R._runMode = T.ReplayingMode forkRuntime} + void $ forkIO $ do + suppressErrors (runFlow' mbFlowGuid newRt (L.runSafeFlow flow) >>= putMVar awaitableMVar) + putMVar finalErrorMVar =<< readMVar forkErrorMVar + putMVar finalSafeFlowErrorVar =<< readMVar forkSafeFlowErrorVar + putMVar finalForkedFlowErrorVar =<< readMVar forkForkedFlowErrorVar + +---------------------------------------------------------------------- +---------------------------------------------------------------------- + + void $ P.withRunMode (R._runMode rt) (P.mkForkEntry desc newFlowGUID) (pure ()) + pure $ next $ T.Awaitable awaitableMVar + +---------------------------------------------------------------------- + +interpretFlowMethod _ R.FlowRuntime {..} (L.Await mbMcs (T.Awaitable awaitableMVar) next) = do + let act = case mbMcs of + Nothing -> do + val <- readMVar awaitableMVar + case val of + Left err -> pure $ Left $ T.ForkedFlowError err + Right res -> pure $ Right res + Just (T.Microseconds mcs) -> awaitMVarWithTimeout awaitableMVar $ fromIntegral mcs + next <$> P.withRunMode _runMode (P.mkAwaitEntry mbMcs) act + +interpretFlowMethod _ R.FlowRuntime {_runMode} (L.ThrowException ex _) = do + void $ P.withRunMode _runMode (P.mkThrowExceptionEntry ex) (pure ()) + throwIO ex + +interpretFlowMethod mbFlowGuid rt@R.FlowRuntime {_runMode} (L.RunSafeFlow newFlowGUID flow next) = fmap next $ do + fl <- case R._runMode rt of + T.RegularMode -> do + fl <- try @_ @SomeException $ runFlow' mbFlowGuid rt flow + pure $ mapLeft show fl + + T.RecordingMode T.RecorderRuntime{recording = T.Recording{..}, ..} -> do + freshRecordingMVar <- newMVar V.empty + + let freshRecording = T.Recording freshRecordingMVar safeRecordingsVar forkedRecordingsVar + + let safeRuntime = T.RecorderRuntime + { flowGUID = newFlowGUID + , recording = freshRecording + , .. + } + + let newRt = rt {R._runMode = T.RecordingMode safeRuntime} + + fl <- try @_ @SomeException $ runFlow' mbFlowGuid newRt flow + + freshRec <- readMVar freshRecordingMVar + + safeRecs <- takeMVar safeRecordingsVar + + putMVar safeRecordingsVar $ + Map.insert newFlowGUID freshRec safeRecs + + pure $ mapLeft show fl + +---------------------------------------------------------------------- + + T.ReplayingMode playerRt -> do + let + T.PlayerRuntime + { rerror = T.ReplayErrors {..} + , resRecording + , .. + } = playerRt + + T.ResultRecording{ safeRecordings } = resRecording + + case Map.lookup newFlowGUID safeRecordings of + Nothing -> do + let + err = + T.PlaybackError + { errorType = T.SafeFlowRecordingsMissed + , errorMessage = "No recordings found for safe flow " <> Text.unpack newFlowGUID + , errorFlowGUID = flowGUID } + + takeMVar errorMVar *> putMVar errorMVar (Just err) + throwIO $ T.ReplayingException err + + Just (newrecording :: T.RecordingEntries) -> do + stepVar <- newMVar 0 + freshErrorMVar <- newMVar Nothing + + let freshReplayErrors = T.ReplayErrors freshErrorMVar safeFlowErrorsVar forkedFlowErrorsVar + + let forkRuntime = T.PlayerRuntime + { flowGUID = newFlowGUID + , stepMVar = stepVar + , resRecording = resRecording { T.recording = newrecording } + , rerror = freshReplayErrors + , .. + } + + let newRt = rt {R._runMode = T.ReplayingMode forkRuntime} + fl <- try @_ @SomeException $ runFlow' mbFlowGuid newRt flow + + safeFlowErrs <- takeMVar safeFlowErrorsVar + freshError <- takeMVar freshErrorMVar + + putMVar safeFlowErrorsVar $ + case freshError of + Just err -> Map.insert newFlowGUID err safeFlowErrs + Nothing -> safeFlowErrs + + pure $ mapLeft show fl + +---------------------------------------------------------------------- + + P.withRunMode (R._runMode rt) (P.mkRunSafeFlowEntry newFlowGUID) (pure fl) + + +interpretFlowMethod _ R.FlowRuntime {..} (L.InitSqlDBConnection cfg next) = + fmap next $ P.withRunMode _runMode (P.mkInitSqlDBConnectionEntry cfg) $ do + let connTag = getPosition @1 cfg + connMap <- takeMVar _sqldbConnections + res <- case Map.lookup connTag connMap of + Just _ -> pure $ Left $ T.DBError T.ConnectionAlreadyExists $ "Connection for " <> connTag <> " already created." + Nothing -> connect cfg + case res of + Right conn -> putMVar _sqldbConnections $ Map.insert connTag (T.bemToNative conn) connMap + Left _ -> putMVar _sqldbConnections connMap + pure res + +interpretFlowMethod _ R.FlowRuntime {..} (L.DeInitSqlDBConnection conn next) = + fmap next $ P.withRunMode _runMode (P.mkDeInitSqlDBConnectionEntry conn) $ do + let connTag = getPosition @1 conn + connMap <- takeMVar _sqldbConnections + case Map.lookup connTag connMap of + Nothing -> putMVar _sqldbConnections connMap + Just _ -> do + disconnect conn + putMVar _sqldbConnections $ Map.delete connTag connMap + +interpretFlowMethod _ R.FlowRuntime {..} (L.GetSqlDBConnection cfg next) = + fmap next $ P.withRunMode _runMode (P.mkGetSqlDBConnectionEntry cfg) $ do + let connTag = getPosition @1 cfg + connMap <- readMVar _sqldbConnections + pure $ case Map.lookup connTag connMap of + Just conn -> Right $ T.nativeToBem connTag conn + Nothing -> Left $ T.DBError T.ConnectionDoesNotExist $ "Connection for " <> connTag <> " does not exists." + +interpretFlowMethod _ R.FlowRuntime {..} (L.InitKVDBConnection cfg next) = + fmap next $ P.withRunMode _runMode (P.mkInitKVDBConnectionEntry cfg) $ do + let connTag = getPosition @1 cfg + connections <- takeMVar _kvdbConnections + res <- case Map.lookup connTag connections of + Just _ -> pure $ Left $ T.KVDBError T.KVDBConnectionAlreadyExists $ "Connection for " +|| connTag ||+ " already created." + Nothing -> connectRedis cfg + case res of + Left _ -> putMVar _kvdbConnections connections + Right conn -> putMVar _kvdbConnections + $ Map.insert connTag (kvdbToNative conn) connections + pure res + +interpretFlowMethod _ R.FlowRuntime {..} (L.DeInitKVDBConnection conn next) = + fmap next $ P.withRunMode _runMode (P.mkDeInitKVDBConnectionEntry conn) $ do + let connTag = getPosition @1 conn + connections <- takeMVar _kvdbConnections + case Map.lookup connTag connections of + Nothing -> putMVar _kvdbConnections connections + Just _ -> do + R.kvDisconnect $ kvdbToNative conn + putMVar _kvdbConnections $ Map.delete connTag connections + +interpretFlowMethod _ R.FlowRuntime {..} (L.GetKVDBConnection cfg next) = + fmap next $ P.withRunMode _runMode (P.mkGetKVDBConnectionEntry cfg) $ do + let connTag = getPosition @1 cfg + connMap <- readMVar _kvdbConnections + pure $ case Map.lookup connTag connMap of + Just conn -> Right $ T.nativeToKVDB connTag conn + Nothing -> Left $ KVDBError KVDBConnectionDoesNotExist $ "Connection for " +|| connTag ||+ " does not exists." + +interpretFlowMethod mbFlowGuid flowRt (L.RunDB conn sqlDbMethod runInTransaction next) = do + let runMode = R._runMode flowRt + let dbgLogger = + if R.shouldFlowLogRawSql flowRt + then R.runLogger mbFlowGuid T.RegularMode (R._loggerRuntime . R._coreRuntime $ flowRt) + . L.logMessage' T.Debug ("RunDB Impl" :: String) + else const $ pure () + rawSqlTVar <- newTVarIO mempty + -- This function would be used inside beam and write raw sql, generated by beam backend, in TVar. + let dbgLogAction = \rawSqlStr -> atomically (modifyTVar' rawSqlTVar (`DL.snoc` rawSqlStr)) *> dbgLogger rawSqlStr + -- TODO: unify the below two branches + fmap (next . fst) $ P.withRunMode runMode P.mkRunDBEntry $ case runInTransaction of + True -> + case conn of + (T.MockedPool _) -> error "Mocked Pool not implemented" + _ -> do + eRes <- R.withTransaction conn $ \nativeConn -> + R.runSqlDB nativeConn dbgLogAction sqlDbMethod + eRes' <- case eRes of + Left exception -> Left <$> wrapException exception + Right x -> pure $ Right x + rawSql <- DL.toList <$> readTVarIO rawSqlTVar + pure (eRes', rawSql) + False -> + case conn of + (T.MockedPool _) -> error "Mocked Pool not implemented" + (T.PostgresPool _ pool) -> DP.withResource pool $ \conn' -> do + eRes <- try @_ @SomeException . R.runSqlDB (T.NativePGConn conn') dbgLogAction $ sqlDbMethod + wrapAndSend rawSqlTVar eRes + (T.MySQLPool _ pool) -> DP.withResource pool $ \conn' -> do + eRes <- try @_ @SomeException . R.runSqlDB (T.NativeMySQLConn conn') dbgLogAction $ sqlDbMethod + wrapAndSend rawSqlTVar eRes + (T.SQLitePool _ pool) -> DP.withResource pool $ \conn' -> do + eRes <- try @_ @SomeException . R.runSqlDB (T.NativeSQLiteConn conn') dbgLogAction $ sqlDbMethod + wrapAndSend rawSqlTVar eRes + where + wrapAndSend rawSqlLoc eResult = do + rawSql <- DL.toList <$> readTVarIO rawSqlLoc + eResult' <- case eResult of + Left exception -> Left <$> wrapException exception + Right x -> pure $ Right x + pure (eResult', rawSql) + + wrapException :: HasCallStack => SomeException -> IO T.DBError + wrapException exception = do + R.runLogger mbFlowGuid T.RegularMode (R._loggerRuntime . R._coreRuntime $ flowRt) + . L.logMessage' T.Debug ("CALLSTACK" :: String) $ Text.pack $ prettyCallStack callStack + pure (wrapException' exception) + + wrapException' :: SomeException -> T.DBError + wrapException' e = fromMaybe (T.DBError T.UnrecognizedError $ show e) + (T.sqliteErrorToDbError (show e) <$> fromException e <|> + T.mysqlErrorToDbError (show e) <$> fromException e <|> + T.postgresErrorToDbError (show e) <$> fromException e) + + +interpretFlowMethod _ R.FlowRuntime {..} (L.RunKVDB cName act next) = + next <$> R.runKVDB cName _runMode _kvdbConnections act + + +interpretFlowMethod mbFlowGuid rt@R.FlowRuntime {_runMode, _pubSubController, _pubSubConnection} (L.RunPubSub act next) = + case (_pubSubConnection, _runMode) of + (Nothing, T.ReplayingMode _) -> go $ error "Connection mock. Shold not ever be evaluated" + (Just cn, _ ) -> go cn + _ -> error "RunPubSub method called, while proper Redis connection has not been provided" + where + go conn = next <$> R.runPubSub _runMode _pubSubController conn + (L.unpackLanguagePubSub act $ runFlow' mbFlowGuid $ rt { R._runMode = T.RegularMode }) + +runFlow' :: Maybe T.FlowGUID -> R.FlowRuntime -> L.Flow a -> IO a +runFlow' mbFlowGuid flowRt (L.Flow comp) = foldF (interpretFlowMethod mbFlowGuid flowRt) comp + +runFlow :: R.FlowRuntime -> L.Flow a -> IO a +runFlow = runFlow' Nothing diff --git a/src/EulerHS/Framework/Flow/Language.hs b/src/EulerHS/Framework/Flow/Language.hs new file mode 100644 index 00000000..8895f0d2 --- /dev/null +++ b/src/EulerHS/Framework/Flow/Language.hs @@ -0,0 +1,1105 @@ +{-# LANGUAGE AllowAmbiguousTypes #-} +{-# LANGUAGE DerivingStrategies #-} +{-# LANGUAGE GeneralizedNewtypeDeriving #-} +{-# LANGUAGE InstanceSigs #-} + +{- | +Module : EulerHS.Framework.Flow.Language +Copyright : (C) Juspay Technologies Pvt Ltd 2019-2021 +License : Apache 2.0 (see the file LICENSE) +Maintainer : opensource@juspay.in +Stability : experimental +Portability : non-portable + +The `Flow` eDSL for building a pure, monadic business logic. + +This is an internal module. Import `EulerHS.Language` instead. +-} + + +module EulerHS.Framework.Flow.Language + ( + -- * Flow language + Flow(..) + , FlowMethod(..) + , MonadFlow(..) + , ReaderFlow + -- ** Extra methods + -- *** Logging + , logCallStack + , logExceptionCallStack + , logInfo + , logError + , logDebug + , logWarning + -- *** Other + , callAPI + , callAPI' + , callHTTP + , runIO + , runUntracedIO + , forkFlow + , forkFlow' + -- *** PublishSubscribe + , unpackLanguagePubSub + -- ** Interpretation + , foldFlow + ) where + +import Control.Monad.Catch (MonadThrow (throwM)) +import Control.Monad.Free.Church (MonadFree) +import Control.Monad.Trans.RWS.Strict (RWST) +import Control.Monad.Trans.Writer (WriterT) +import qualified Data.Text as Text +import EulerHS.Core.Language (KVDB, Logger, logMessage') +import qualified EulerHS.Core.Language as L +import qualified EulerHS.Core.PubSub.Language as PSL +import qualified EulerHS.Core.Types as T +import EulerHS.Prelude hiding (getOption, throwM) +import Servant.Client (BaseUrl, ClientError) + +-- | Algebra of the `Flow` language. + +data FlowMethod (next :: Type) where + CallServantAPI + :: (HasCallStack, T.JSONEx a) + => Maybe T.ManagerSelector + -> BaseUrl + -> T.EulerClient a + -> (Either ClientError a -> next) + -> FlowMethod next + + CallHTTP + :: HasCallStack + => T.HTTPRequest + -> Maybe T.HTTPCert + -> (Either Text T.HTTPResponse -> next) + -> FlowMethod next + + EvalLogger + :: HasCallStack + => Logger a + -> (a -> next) + -> FlowMethod next + + RunIO + :: (HasCallStack, T.JSONEx a) + => Text + -> IO a + -> (a -> next) + -> FlowMethod next + + RunUntracedIO + :: HasCallStack + => Text + -> IO a + -> (a -> next) + -> FlowMethod next + + GetOption + :: (HasCallStack, ToJSON a, FromJSON a) + => T.KVDBKey + -> (Maybe a -> next) + -> FlowMethod next + + SetOption + :: (HasCallStack, ToJSON a, FromJSON a) + => T.KVDBKey + -> a + -> (() -> next) + -> FlowMethod next + + DelOption + :: HasCallStack + => T.KVDBKey + -> (() -> next) + -> FlowMethod next + + GenerateGUID + :: HasCallStack + => (Text -> next) + -> FlowMethod next + + RunSysCmd + :: HasCallStack + => String + -> (String -> next) + -> FlowMethod next + + Fork + :: (HasCallStack, FromJSON a, ToJSON a) + => T.Description + -> T.ForkGUID + -> Flow a + -> (T.Awaitable (Either Text a) -> next) + -> FlowMethod next + + Await + :: (HasCallStack, FromJSON a, ToJSON a) + => Maybe T.Microseconds + -> T.Awaitable (Either Text a) + -> (Either T.AwaitingError a -> next) + -> FlowMethod next + + ThrowException + :: forall a e next + . (HasCallStack, Exception e) + => e + -> (a -> next) + -> FlowMethod next + + RunSafeFlow + :: (HasCallStack, FromJSON a, ToJSON a) + => T.SafeFlowGUID + -> Flow a + -> (Either Text a -> next) + -> FlowMethod next + + InitSqlDBConnection + :: HasCallStack + => T.DBConfig beM + -> (T.DBResult (T.SqlConn beM) -> next) + -> FlowMethod next + + DeInitSqlDBConnection + :: HasCallStack + => T.SqlConn beM + -> (() -> next) + -> FlowMethod next + + GetSqlDBConnection + :: HasCallStack + => T.DBConfig beM + -> (T.DBResult (T.SqlConn beM) -> next) + -> FlowMethod next + + InitKVDBConnection + :: HasCallStack + => T.KVDBConfig + -> (T.KVDBAnswer T.KVDBConn -> next) + -> FlowMethod next + + DeInitKVDBConnection + :: HasCallStack + => T.KVDBConn + -> (() -> next) + -> FlowMethod next + + GetKVDBConnection + :: HasCallStack + => T.KVDBConfig + -> (T.KVDBAnswer T.KVDBConn -> next) + -> FlowMethod next + + RunDB + :: (HasCallStack, T.JSONEx a) + => T.SqlConn beM + -> L.SqlDB beM a + -> Bool + -> (T.DBResult a -> next) + -> FlowMethod next + + RunKVDB + :: HasCallStack + => Text + -> KVDB a + -> (T.KVDBAnswer a -> next) + -> FlowMethod next + + RunPubSub + :: HasCallStack + => PubSub a + -> (a -> next) + -> FlowMethod next + +instance Functor FlowMethod where + {-# INLINEABLE fmap #-} + fmap f = \case + CallServantAPI mSel url client cont -> + CallServantAPI mSel url client (f . cont) + CallHTTP req cert cont -> CallHTTP req cert (f . cont) + EvalLogger logger cont -> EvalLogger logger (f . cont) + RunIO t act cont -> RunIO t act (f . cont) + RunUntracedIO t act cont -> RunUntracedIO t act (f . cont) + GetOption k cont -> GetOption k (f . cont) + SetOption k v cont -> SetOption k v (f . cont) + DelOption k cont -> DelOption k (f . cont) + GenerateGUID cont -> GenerateGUID (f . cont) + RunSysCmd cmd cont -> RunSysCmd cmd (f . cont) + Fork desc guid flow cont -> Fork desc guid flow (f . cont) + Await time awaitable cont -> Await time awaitable (f . cont) + ThrowException e cont -> ThrowException e (f . cont) + RunSafeFlow guid flow cont -> RunSafeFlow guid flow (f . cont) + InitSqlDBConnection conf cont -> InitSqlDBConnection conf (f . cont) + DeInitSqlDBConnection conn cont -> DeInitSqlDBConnection conn (f . cont) + GetSqlDBConnection conf cont -> GetSqlDBConnection conf (f . cont) + InitKVDBConnection conf cont -> InitKVDBConnection conf (f . cont) + DeInitKVDBConnection conn cont -> DeInitKVDBConnection conn (f . cont) + GetKVDBConnection conf cont -> GetKVDBConnection conf (f . cont) + RunDB conn db b cont -> RunDB conn db b (f . cont) + RunKVDB t db cont -> RunKVDB t db (f . cont) + RunPubSub pubSub cont -> RunPubSub pubSub (f . cont) + +newtype Flow (a :: Type) = Flow (F FlowMethod a) + deriving newtype (Functor, Applicative, Monad, MonadFree FlowMethod) + +instance MonadThrow Flow where + {-# INLINEABLE throwM #-} + throwM e = liftFC . ThrowException e $ id + +foldFlow :: (Monad m) => (forall b . FlowMethod b -> m b) -> Flow a -> m a +foldFlow f (Flow comp) = foldF f comp + +type ReaderFlow r = ReaderT r Flow + +newtype PubSub a = PubSub { + unpackLanguagePubSub :: HasCallStack => (forall b . Flow b -> IO b) -> PSL.PubSub a + } + +type MessageCallback + = ByteString -- ^ Message payload + -> Flow () + +type PMessageCallback + = ByteString -- ^ Channel name + -> ByteString -- ^ Message payload + -> Flow () + +-- | Fork a unit-returning flow. +-- +-- __Note__: to fork a flow which yields a value use 'forkFlow\'' instead. +-- +-- __Warning__: With forked flows, race coniditions and dead / live blocking become possible. +-- All the rules applied to forked threads in Haskell can be applied to forked flows. +-- +-- Generally, the method is thread safe. Doesn't do anything to bookkeep the threads. +-- There is no possibility to kill a thread at the moment. +-- +-- Thread safe, exception free. +-- +-- > myFlow1 = do +-- > logInfoT "myflow1" "logFromMyFlow1" +-- > someAction +-- > +-- > myFlow2 = do +-- > _ <- runIO someAction +-- > forkFlow "myFlow1 fork" myFlow1 +-- > pure () +-- +forkFlow :: HasCallStack => T.Description -> Flow () -> Flow () +forkFlow description flow = void $ forkFlow' description $ do + eitherResult <- runSafeFlow flow + case eitherResult of + Left msg -> logError ("forkFlow" :: Text) msg + Right _ -> pure () + +-- | Same as 'forkFlow', but takes @Flow a@ and returns an 'T.Awaitable' which can be used +-- to reap results from the flow being forked. +-- +-- > myFlow1 = do +-- > logInfoT "myflow1" "logFromMyFlow1" +-- > pure 10 +-- > +-- > myFlow2 = do +-- > awaitable <- forkFlow' "myFlow1 fork" myFlow1 +-- > await Nothing awaitable +-- +forkFlow' :: (HasCallStack, FromJSON a, ToJSON a) => + T.Description -> Flow a -> Flow (T.Awaitable (Either Text a)) +forkFlow' description flow = do + flowGUID <- generateGUID + logInfo ("ForkFlow" :: Text) $ case Text.uncons description of + Nothing -> + "Flow forked. Description: " +| description |+ " GUID: " +| flowGUID |+ "" + Just _ -> "Flow forked. GUID: " +| flowGUID |+ "" + liftFC $ Fork description flowGUID flow id + +-- | Method for calling external HTTP APIs using the facilities of servant-client. +-- Allows to specify what manager should be used. If no manager found, +-- `HttpManagerNotFound` will be returned (as part of `ClientError.ConnectionError`). +-- +-- Thread safe, exception free. +-- +-- Alias for callServantAPI. +-- +-- | Takes remote url, servant client for this endpoint +-- and returns either client error or result. +-- +-- > data User = User { firstName :: String, lastName :: String , userGUID :: String} +-- > deriving (Generic, Show, Eq, ToJSON, FromJSON ) +-- > +-- > data Book = Book { author :: String, name :: String } +-- > deriving (Generic, Show, Eq, ToJSON, FromJSON ) +-- > +-- > type API = "user" :> Get '[JSON] User +-- > :<|> "book" :> Get '[JSON] Book +-- > +-- > api :: HasCallStack => Proxy API +-- > api = Proxy +-- > +-- > getUser :: HasCallStack => EulerClient User +-- > getBook :: HasCallStack => EulerClient Book +-- > (getUser :<|> getBook) = client api +-- > +-- > url = BaseUrl Http "127.0.0.1" port "" +-- > +-- > +-- > myFlow = do +-- > book <- callAPI url getBook +-- > user <- callAPI url getUser +callAPI' :: (HasCallStack, T.JSONEx a, MonadFlow m) => + Maybe T.ManagerSelector -> BaseUrl -> T.EulerClient a -> m (Either ClientError a) +callAPI' = callServantAPI + +-- | The same as `callAPI'` but with default manager to be used. +callAPI :: (HasCallStack, T.JSONEx a, MonadFlow m) => + BaseUrl -> T.EulerClient a -> m (Either ClientError a) +callAPI = callServantAPI Nothing + +-- | Log message with Info level. +-- +-- Thread safe. +logInfo :: forall (tag :: Type) (m :: Type -> Type) . + (HasCallStack, MonadFlow m, Show tag) => tag -> T.Message -> m () +logInfo tag msg = evalLogger' $ logMessage' T.Info tag msg + +-- | Log message with Error level. +-- +-- Thread safe. +logError :: forall (tag :: Type) (m :: Type -> Type) . + (HasCallStack, MonadFlow m, Show tag) => tag -> T.Message -> m () +logError tag msg = do + logCallStack + evalLogger' $ logMessage' T.Error tag msg + +-- | Log message with Debug level. +-- +-- Thread safe. +logDebug :: forall (tag :: Type) (m :: Type -> Type) . + (HasCallStack, MonadFlow m, Show tag) => tag -> T.Message -> m () +logDebug tag msg = evalLogger' $ logMessage' T.Debug tag msg + +-- | Log message with Warning level. +-- +-- Thread safe. +logWarning :: forall (tag :: Type) (m :: Type -> Type) . + (HasCallStack, MonadFlow m, Show tag) => tag -> T.Message -> m () +logWarning tag msg = evalLogger' $ logMessage' T.Warning tag msg + +-- | Run some IO operation, result should have 'ToJSONEx' instance (extended 'ToJSON'), +-- because we have to collect it in recordings for ART system. +-- +-- Warning. This method is dangerous and should be used wisely. +-- +-- > myFlow = do +-- > content <- runIO $ readFromFile file +-- > logDebugT "content id" $ extractContentId content +-- > pure content +runIO :: (HasCallStack, MonadFlow m, T.JSONEx a) => IO a -> m a +runIO = runIO' "" + +-- | The same as runIO, but do not record IO outputs in the ART recordings. +-- For example, this can be useful to implement things like STM or use mutable +-- state. +-- +-- Warning. This method is dangerous and should be used wisely. +-- +-- > myFlow = do +-- > content <- runUntracedIO $ readFromFile file +-- > logDebugT "content id" $ extractContentId content +-- > pure content +runUntracedIO :: (HasCallStack, MonadFlow m) => IO a -> m a +runUntracedIO = runUntracedIO' "" + +-- | The same as callHTTPWithCert but does not need certificate data. +-- +-- Thread safe, exception free. +-- +-- Takes remote url and returns either client error or result. +-- +-- > myFlow = do +-- > book <- callHTTP url +callHTTP :: (HasCallStack, MonadFlow m) => + T.HTTPRequest -> m (Either Text.Text T.HTTPResponse) +callHTTP url = callHTTPWithCert url Nothing + +-- | MonadFlow implementation for the `Flow` Monad. This allows implementation of MonadFlow for +-- `ReaderT` and other monad transformers. +-- +-- Omit `forkFlow` as this will break some monads like StateT (you can lift this manually if you +-- know what you're doing) +class (MonadThrow m) => MonadFlow m where + + -- | Method for calling external HTTP APIs using the facilities of servant-client. + -- Allows to specify what manager should be used. If no manager found, + -- `HttpManagerNotFound` will be returne (as part of `ClientError.ConnectionError`). + -- + -- Thread safe, exception free. + -- + -- Takes remote url, servant client for this endpoint + -- and returns either client error or result. + -- + -- > data User = User { firstName :: String, lastName :: String , userGUID :: String} + -- > deriving (Generic, Show, Eq, ToJSON, FromJSON ) + -- > + -- > data Book = Book { author :: String, name :: String } + -- > deriving (Generic, Show, Eq, ToJSON, FromJSON ) + -- > + -- > type API = "user" :> Get '[JSON] User + -- > :<|> "book" :> Get '[JSON] Book + -- > + -- > api :: HasCallStack => Proxy API + -- > api = Proxy + -- > + -- > getUser :: HasCallStack => EulerClient User + -- > getBook :: HasCallStack => EulerClient Book + -- > (getUser :<|> getBook) = client api + -- > + -- > url = BaseUrl Http "127.0.0.1" port "" + -- > + -- > + -- > myFlow = do + -- > book <- callServantAPI url getBook + -- > user <- callServantAPI url getUser + callServantAPI + :: (HasCallStack, T.JSONEx a) + => Maybe T.ManagerSelector -- ^ name of the connection manager to be used + -> BaseUrl -- ^ remote url 'BaseUrl' + -> T.EulerClient a -- ^ servant client 'EulerClient' + -> m (Either ClientError a) -- ^ result + + -- | Method for calling external HTTP APIs without bothering with types. + -- + -- Thread safe, exception free. + -- + -- Takes remote url, optional certificate data and returns either client error or result. + -- + -- > myFlow = do + -- > book <- callHTTPWithCert url cert + callHTTPWithCert + :: HasCallStack + => T.HTTPRequest -- ^ remote url 'Text' + -> Maybe T.HTTPCert -- ^ TLS certificate data + -> m (Either Text.Text T.HTTPResponse) -- ^ result + + -- | Evaluates a logging action. + evalLogger' :: (HasCallStack, ToJSON a, FromJSON a) => Logger a -> m a + + -- | The same as runIO, but accepts a description which will be written into the ART recordings + -- for better clarity. + -- + -- Warning. This method is dangerous and should be used wisely. + -- + -- > myFlow = do + -- > content <- runIO' "reading from file" $ readFromFile file + -- > logDebugT "content id" $ extractContentId content + -- > pure content + runIO' :: (HasCallStack, T.JSONEx a) => Text -> IO a -> m a + + -- | The same as runUntracedIO, but accepts a description which will be written into + -- the ART recordings for better clarity. + -- + -- Warning. This method is dangerous and should be used wisely. + -- + -- > myFlow = do + -- > content <- runUntracedIO' "reading secret data" $ readFromFile secret_file + -- > logDebugT "content id" $ extractContentId content + -- > pure content + runUntracedIO' :: HasCallStack => Text -> IO a -> m a + + -- | Gets stored a typed option by a typed key. + -- + -- Thread safe, exception free. + getOption :: forall k v. (HasCallStack, T.OptionEntity k v) => k -> m (Maybe v) + + -- Sets a typed option using a typed key (a mutable destructive operation) + -- + -- Be aware that it's possible to overflow the runtime with options + -- created uncontrollably. + -- + -- Also please keep in mind the options are runtime-bound and if you have + -- several API methods working with the same option key, you'll get a race. + -- + -- Thread safe, exception free. + -- + -- > data MerchantIdKey = MerchantIdKey + -- > + -- > instance OptionEntity MerchantIdKey Text + -- > + -- > myFlow = do + -- > _ <- setOption MerchantIdKey "abc1234567" + -- > mKey <- getOption MerchantIdKey + -- > runIO $ putTextLn mKey + -- > delOption MerchantIdKey + setOption :: forall k v. (HasCallStack, T.OptionEntity k v) => k -> v -> m () + + -- | Deletes a typed option using a typed key. + delOption :: forall k v. (HasCallStack, T.OptionEntity k v) => k -> m () + + -- | Generate a version 4 UUIDs as specified in RFC 4122 + -- e.g. 25A8FC2A-98F2-4B86-98F6-84324AF28611. + -- + -- Thread safe, exception free. + generateGUID :: HasCallStack => m Text + + -- | Runs system command and returns its output. + -- + -- Warning. This method is dangerous and should be used wisely. + -- + -- > myFlow = do + -- > currentDir <- runSysCmd "pwd" + -- > logInfoT "currentDir" $ toText currentDir + -- > ... + runSysCmd :: HasCallStack => String -> m String + + -- | Inits an SQL connection using a config. + -- + -- Returns an error (Left $ T.DBError T.ConnectionAlreadyExists msg) + -- if the connection already exists for this config. + -- + -- Thread safe, exception free. + initSqlDBConnection :: HasCallStack => T.DBConfig beM -> m (T.DBResult (T.SqlConn beM)) + + -- | Deinits an SQL connection. + -- Does nothing if the connection is not found (might have been closed earlier). + -- + -- Thread safe, exception free. + deinitSqlDBConnection :: HasCallStack => T.SqlConn beM -> m () + + -- | Gets the existing connection. + -- + -- Returns an error (Left $ T.DBError T.ConnectionDoesNotExist) + -- if the connection does not exist. + -- + -- Thread safe, exception free. + getSqlDBConnection :: HasCallStack => T.DBConfig beM -> m (T.DBResult (T.SqlConn beM)) + + -- | Inits a KV DB connection using a config. + -- + -- Returns an error (Left $ KVDBError KVDBConnectionAlreadyExists msg) + -- if the connection already exists. + -- + -- Thread safe, exception free. + initKVDBConnection :: HasCallStack => T.KVDBConfig -> m (T.KVDBAnswer T.KVDBConn) + + -- | Deinits the given KV DB connection. + -- Does nothing if the connection is not found (might have been closed earlier). + -- + -- Thread safe, exception free. + deinitKVDBConnection :: HasCallStack => T.KVDBConn -> m () + + -- | Get the existing connection. + + -- Returns an error (Left $ KVDBError KVDBConnectionDoesNotExist) + -- if the connection does not exits for this config. + -- + -- Thread safe, exception free. + getKVDBConnection :: HasCallStack => T.KVDBConfig -> m (T.KVDBAnswer T.KVDBConn) + + -- | Evaluates SQL DB operations without creating a transaction. + -- It's possible to have a chain of SQL DB calls (within the SqlDB language). + -- + -- Thread safe, exception free. + -- + -- The underlying library is beam which allows to access 3 different SQL backends. + -- See TUTORIAL.md, README.md and QueryExamplesSpec.hs for more info. + -- + -- > myFlow :: HasCallStack => L.Flow (T.DBResult (Maybe User)) + -- > myFlow = do + -- > connection <- L.initSqlDBConnection postgresCfg + -- > + -- > res <- L.runDB connection $ do + -- > let predicate1 User {..} = _userFirstName ==. B.val_ "John" + -- > + -- > L.updateRows $ B.update (_users eulerDb) + -- > (\User {..} -> mconcat + -- > [ _userFirstName <-. B.val_ "Leo" + -- > , _userLastName <-. B.val_ "San" + -- > ] + -- > ) + -- > predicate1 + -- > + -- > let predicate2 User {..} = _userFirstName ==. B.val_ "Leo" + -- > L.findRow + -- > $ B.select + -- > $ B.limit_ 1 + -- > $ B.filter_ predicate2 + -- > $ B.all_ (_users eulerDb) + -- > + -- > L.deinitSqlDBConnection connection + -- > pure res + runDB + :: + ( HasCallStack + , T.JSONEx a + , T.BeamRunner beM + , T.BeamRuntime be beM + ) + => T.SqlConn beM + -> L.SqlDB beM a + -> m (T.DBResult a) + + -- | Like @runDB@ but the SqlDB script will be considered a transactional scope. + -- All the queries made within a single @runTransaction@ scope will be placed + -- into a single transaction. + runTransaction + :: + ( HasCallStack + , T.JSONEx a + , T.BeamRunner beM + , T.BeamRuntime be beM + ) + => T.SqlConn beM + -> L.SqlDB beM a + -> m (T.DBResult a) + + -- | Await for some a result from the flow. + -- If the timeout is Nothing than the operation is blocking. + -- If the timeout is set then the internal mechanism tries to do several (10) checks for the result. + -- Can return earlier if the result became available. + -- Returns either an Awaiting error or a result. + -- + -- Warning. There are no guarantees of a proper thread delaying here. + -- + -- Thread safe, exception free. + -- + -- | mbMcs == Nothing: infinite awaiting. + -- | mbMcs == Just (Microseconds n): await for approximately n seconds. + -- Awaiting may succeed ealier. + -- + -- > myFlow1 = do + -- > logInfoT "myflow1" "logFromMyFlow1" + -- > pure 10 + -- > + -- > myFlow2 = do + -- > awaitable <- forkFlow' "myFlow1 fork" myFlow1 + -- > await Nothing awaitable + await + :: (HasCallStack, FromJSON a, ToJSON a) + => Maybe T.Microseconds + -> T.Awaitable (Either Text a) + -> m (Either T.AwaitingError a) + + -- | Throw a given exception. + -- + -- It's possible to catch this exception using runSafeFlow method. + -- + -- Thread safe. Exception throwing. + -- + -- > myFlow = do + -- > res <- authAction + -- > case res of + -- > Failure reason -> throwException err403 {errBody = reason} + -- > Success -> ... + throwException :: forall a e. (HasCallStack, Exception e) => e -> m a + throwException ex = do + -- Doubt: Should we just print the exception details without the + -- contextual details that logError prints. As finding the message inside logError is a bit + -- cumbersome. Just printing the exception details will be much cleaner if we don't need the + -- contextual details. + logExceptionCallStack ex + throwExceptionWithoutCallStack ex + + throwExceptionWithoutCallStack :: forall a e. (HasCallStack, Exception e) => e -> m a + throwExceptionWithoutCallStack = throwM + + -- | Run a flow safely with catching all the exceptions from it. + -- Returns either a result or the exception turned into a text message. + -- + -- This includes ususal instances of the Exception type class, + -- `error` exception and custom user exceptions thrown by the `throwException` method. + -- + -- Thread safe, exception free. + -- + -- > myFlow = runSafeFlow $ throwException err403 {errBody = reason} + -- + -- > myFlow = do + -- > eitherContent <- runSafeFlow $ runIO $ readFromFile file + -- > case eitherContent of + -- > Left err -> ... + -- > Right content -> ... + runSafeFlow :: (HasCallStack, FromJSON a, ToJSON a) => Flow a -> m (Either Text a) + + -- | Execute kvdb actions. + -- + -- Thread safe, exception free. + -- + -- > myFlow = do + -- > kvres <- L.runKVDB $ do + -- > set "aaa" "bbb" + -- > res <- get "aaa" + -- > del ["aaa"] + -- > pure res + runKVDB + :: HasCallStack + => Text + -> KVDB a -- ^ KVDB action + -> m (T.KVDBAnswer a) + + ---- Experimental Pub Sub implementation using Redis Pub Sub. + + runPubSub + :: HasCallStack + => PubSub a + -> m a + + -- | Publish payload to channel. + publish + :: HasCallStack + => PSL.Channel -- ^ Channel in which payload will be send + -> PSL.Payload -- ^ Payload + -> m (Either T.KVDBReply Integer) -- ^ Number of subscribers received payload + + -- | Subscribe to all channels from list. + -- Note: Subscription won't be unsubscribed automatically on thread end. + -- Use canceller explicitly to cancel subscription + subscribe + :: HasCallStack + => [PSL.Channel] -- ^ List of channels to subscribe + -> MessageCallback -- ^ Callback function. + -> m (Flow ()) -- ^ Inner flow is a canceller of current subscription + + -- | Subscribe to all channels from list. Respects redis pattern syntax. + -- Note: Subscription won't be unsubscribed automatically on thread end. + -- Use canceller explicitly to cancel subscription + psubscribe + :: HasCallStack + => [PSL.ChannelPattern] -- ^ List of channels to subscribe (wit respect to patterns supported by redis) + -> PMessageCallback -- ^ Callback function + -> m (Flow ()) -- ^ Inner flow is a canceller of current subscription + +instance MonadFlow Flow where + {-# INLINEABLE callServantAPI #-} + callServantAPI mbMgrSel url cl = liftFC $ CallServantAPI mbMgrSel url cl id + {-# INLINEABLE callHTTPWithCert #-} + callHTTPWithCert url cert = liftFC $ CallHTTP url cert id + {-# INLINEABLE evalLogger' #-} + evalLogger' logAct = liftFC $ EvalLogger logAct id + {-# INLINEABLE runIO' #-} + runIO' descr ioAct = liftFC $ RunIO descr ioAct id + {-# INLINEABLE runUntracedIO' #-} + runUntracedIO' descr ioAct = liftFC $ RunUntracedIO descr ioAct id + {-# INLINEABLE getOption #-} + getOption :: forall k v. (HasCallStack, T.OptionEntity k v) => k -> Flow (Maybe v) + getOption k = liftFC $ GetOption (T.mkOptionKey @k @v k) id + {-# INLINEABLE setOption #-} + setOption :: forall k v. (HasCallStack, T.OptionEntity k v) => k -> v -> Flow () + setOption k v = liftFC $ SetOption (T.mkOptionKey @k @v k) v id + {-# INLINEABLE delOption #-} + delOption :: forall k v. (HasCallStack, T.OptionEntity k v) => k -> Flow () + delOption k = liftFC $ DelOption (T.mkOptionKey @k @v k) id + {-# INLINEABLE generateGUID #-} + generateGUID = liftFC $ GenerateGUID id + {-# INLINEABLE runSysCmd #-} + runSysCmd cmd = liftFC $ RunSysCmd cmd id + {-# INLINEABLE initSqlDBConnection #-} + initSqlDBConnection cfg = liftFC $ InitSqlDBConnection cfg id + {-# INLINEABLE deinitSqlDBConnection #-} + deinitSqlDBConnection conn = liftFC $ DeInitSqlDBConnection conn id + {-# INLINEABLE getSqlDBConnection #-} + getSqlDBConnection cfg = liftFC $ GetSqlDBConnection cfg id + {-# INLINEABLE initKVDBConnection #-} + initKVDBConnection cfg = liftFC $ InitKVDBConnection cfg id + {-# INLINEABLE deinitKVDBConnection #-} + deinitKVDBConnection conn = liftFC $ DeInitKVDBConnection conn id + {-# INLINEABLE getKVDBConnection #-} + getKVDBConnection cfg = liftFC $ GetKVDBConnection cfg id + {-# INLINEABLE runDB #-} + runDB conn dbAct = liftFC $ RunDB conn dbAct False id + {-# INLINEABLE runTransaction #-} + runTransaction conn dbAct = liftFC $ RunDB conn dbAct True id + {-# INLINEABLE await #-} + await mbMcs awaitable = liftFC $ Await mbMcs awaitable id + {-# INLINEABLE runSafeFlow #-} + runSafeFlow flow = do + safeFlowGUID <- generateGUID + liftFC $ RunSafeFlow safeFlowGUID flow id + {-# INLINEABLE runKVDB #-} + runKVDB cName act = liftFC $ RunKVDB cName act id + {-# INLINEABLE runPubSub #-} + runPubSub act = liftFC $ RunPubSub act id + {-# INLINEABLE publish #-} + publish channel payload = runPubSub $ PubSub $ const $ PSL.publish channel payload + {-# INLINEABLE subscribe #-} + subscribe channels cb = fmap (runIO' "subscribe") $ + runPubSub $ PubSub $ \runFlow -> PSL.subscribe channels (runFlow . cb) + {-# INLINEABLE psubscribe #-} + psubscribe channels cb = fmap (runIO' "psubscribe") $ + runPubSub $ PubSub $ \runFlow -> PSL.psubscribe channels (\ch -> runFlow . cb ch) + +instance MonadFlow m => MonadFlow (ReaderT r m) where + {-# INLINEABLE callServantAPI #-} + callServantAPI mbMgrSel url = lift . callServantAPI mbMgrSel url + {-# INLINEABLE callHTTPWithCert #-} + callHTTPWithCert url = lift . callHTTPWithCert url + {-# INLINEABLE evalLogger' #-} + evalLogger' = lift . evalLogger' + {-# INLINEABLE runIO' #-} + runIO' descr = lift . runIO' descr + {-# INLINEABLE runUntracedIO' #-} + runUntracedIO' descr = lift . runUntracedIO' descr + {-# INLINEABLE getOption #-} + getOption = lift . getOption + {-# INLINEABLE setOption #-} + setOption k = lift . setOption k + {-# INLINEABLE delOption #-} + delOption = lift . delOption + {-# INLINEABLE generateGUID #-} + generateGUID = lift generateGUID + {-# INLINEABLE runSysCmd #-} + runSysCmd = lift . runSysCmd + {-# INLINEABLE initSqlDBConnection #-} + initSqlDBConnection = lift . initSqlDBConnection + {-# INLINEABLE deinitSqlDBConnection #-} + deinitSqlDBConnection = lift . deinitSqlDBConnection + {-# INLINEABLE getSqlDBConnection #-} + getSqlDBConnection = lift . getSqlDBConnection + {-# INLINEABLE initKVDBConnection #-} + initKVDBConnection = lift . initKVDBConnection + {-# INLINEABLE deinitKVDBConnection #-} + deinitKVDBConnection = lift . deinitKVDBConnection + {-# INLINEABLE getKVDBConnection #-} + getKVDBConnection = lift . getKVDBConnection + {-# INLINEABLE runDB #-} + runDB conn = lift . runDB conn + {-# INLINEABLE runTransaction #-} + runTransaction conn = lift . runTransaction conn + {-# INLINEABLE await #-} + await mbMcs = lift . await mbMcs + {-# INLINEABLE runSafeFlow #-} + runSafeFlow = lift . runSafeFlow + {-# INLINEABLE runKVDB #-} + runKVDB cName = lift . runKVDB cName + {-# INLINEABLE runPubSub #-} + runPubSub = lift . runPubSub + {-# INLINEABLE publish #-} + publish channel = lift . publish channel + {-# INLINEABLE subscribe #-} + subscribe channels = lift . subscribe channels + {-# INLINEABLE psubscribe #-} + psubscribe channels = lift . psubscribe channels + +instance MonadFlow m => MonadFlow (StateT s m) where + {-# INLINEABLE callServantAPI #-} + callServantAPI mbMgrSel url = lift . callServantAPI mbMgrSel url + {-# INLINEABLE callHTTPWithCert #-} + callHTTPWithCert url = lift . callHTTPWithCert url + {-# INLINEABLE evalLogger' #-} + evalLogger' = lift . evalLogger' + {-# INLINEABLE runIO' #-} + runIO' descr = lift . runIO' descr + {-# INLINEABLE runUntracedIO' #-} + runUntracedIO' descr = lift . runUntracedIO' descr + {-# INLINEABLE getOption #-} + getOption = lift . getOption + {-# INLINEABLE setOption #-} + setOption k = lift . setOption k + {-# INLINEABLE delOption #-} + delOption = lift . delOption + {-# INLINEABLE generateGUID #-} + generateGUID = lift generateGUID + {-# INLINEABLE runSysCmd #-} + runSysCmd = lift . runSysCmd + {-# INLINEABLE initSqlDBConnection #-} + initSqlDBConnection = lift . initSqlDBConnection + {-# INLINEABLE deinitSqlDBConnection #-} + deinitSqlDBConnection = lift . deinitSqlDBConnection + {-# INLINEABLE getSqlDBConnection #-} + getSqlDBConnection = lift . getSqlDBConnection + {-# INLINEABLE initKVDBConnection #-} + initKVDBConnection = lift . initKVDBConnection + {-# INLINEABLE deinitKVDBConnection #-} + deinitKVDBConnection = lift . deinitKVDBConnection + {-# INLINEABLE getKVDBConnection #-} + getKVDBConnection = lift . getKVDBConnection + {-# INLINEABLE runDB #-} + runDB conn = lift . runDB conn + {-# INLINEABLE runTransaction #-} + runTransaction conn = lift . runTransaction conn + {-# INLINEABLE await #-} + await mbMcs = lift . await mbMcs + {-# INLINEABLE runSafeFlow #-} + runSafeFlow = lift . runSafeFlow + {-# INLINEABLE runKVDB #-} + runKVDB cName = lift . runKVDB cName + {-# INLINEABLE runPubSub #-} + runPubSub = lift . runPubSub + {-# INLINEABLE publish #-} + publish channel = lift . publish channel + {-# INLINEABLE subscribe #-} + subscribe channels = lift . subscribe channels + {-# INLINEABLE psubscribe #-} + psubscribe channels = lift . psubscribe channels + +instance (MonadFlow m, Monoid w) => MonadFlow (WriterT w m) where + {-# INLINEABLE callServantAPI #-} + callServantAPI mbMgrSel url = lift . callServantAPI mbMgrSel url + {-# INLINEABLE callHTTPWithCert #-} + callHTTPWithCert url = lift . callHTTPWithCert url + {-# INLINEABLE evalLogger' #-} + evalLogger' = lift . evalLogger' + {-# INLINEABLE runIO' #-} + runIO' descr = lift . runIO' descr + {-# INLINEABLE runUntracedIO' #-} + runUntracedIO' descr = lift . runUntracedIO' descr + {-# INLINEABLE getOption #-} + getOption = lift . getOption + {-# INLINEABLE setOption #-} + setOption k = lift . setOption k + {-# INLINEABLE delOption #-} + delOption = lift . delOption + {-# INLINEABLE generateGUID #-} + generateGUID = lift generateGUID + {-# INLINEABLE runSysCmd #-} + runSysCmd = lift . runSysCmd + {-# INLINEABLE initSqlDBConnection #-} + initSqlDBConnection = lift . initSqlDBConnection + {-# INLINEABLE deinitSqlDBConnection #-} + deinitSqlDBConnection = lift . deinitSqlDBConnection + {-# INLINEABLE getSqlDBConnection #-} + getSqlDBConnection = lift . getSqlDBConnection + {-# INLINEABLE initKVDBConnection #-} + initKVDBConnection = lift . initKVDBConnection + {-# INLINEABLE deinitKVDBConnection #-} + deinitKVDBConnection = lift . deinitKVDBConnection + {-# INLINEABLE getKVDBConnection #-} + getKVDBConnection = lift . getKVDBConnection + {-# INLINEABLE runDB #-} + runDB conn = lift . runDB conn + {-# INLINEABLE runTransaction #-} + runTransaction conn = lift . runTransaction conn + {-# INLINEABLE await #-} + await mbMcs = lift . await mbMcs + {-# INLINEABLE runSafeFlow #-} + runSafeFlow = lift . runSafeFlow + {-# INLINEABLE runKVDB #-} + runKVDB cName = lift . runKVDB cName + {-# INLINEABLE runPubSub #-} + runPubSub = lift . runPubSub + {-# INLINEABLE publish #-} + publish channel = lift . publish channel + {-# INLINEABLE subscribe #-} + subscribe channels = lift . subscribe channels + {-# INLINEABLE psubscribe #-} + psubscribe channels = lift . psubscribe channels + +instance MonadFlow m => MonadFlow (ExceptT e m) where + {-# INLINEABLE callServantAPI #-} + callServantAPI mbMgrSel url = lift . callServantAPI mbMgrSel url + {-# INLINEABLE callHTTPWithCert #-} + callHTTPWithCert url = lift . callHTTPWithCert url + {-# INLINEABLE evalLogger' #-} + evalLogger' = lift . evalLogger' + {-# INLINEABLE runIO' #-} + runIO' descr = lift . runIO' descr + {-# INLINEABLE runUntracedIO' #-} + runUntracedIO' descr = lift . runUntracedIO' descr + {-# INLINEABLE getOption #-} + getOption = lift . getOption + {-# INLINEABLE setOption #-} + setOption k = lift . setOption k + {-# INLINEABLE delOption #-} + delOption = lift . delOption + {-# INLINEABLE generateGUID #-} + generateGUID = lift generateGUID + {-# INLINEABLE runSysCmd #-} + runSysCmd = lift . runSysCmd + {-# INLINEABLE initSqlDBConnection #-} + initSqlDBConnection = lift . initSqlDBConnection + {-# INLINEABLE deinitSqlDBConnection #-} + deinitSqlDBConnection = lift . deinitSqlDBConnection + {-# INLINEABLE getSqlDBConnection #-} + getSqlDBConnection = lift . getSqlDBConnection + {-# INLINEABLE initKVDBConnection #-} + initKVDBConnection = lift . initKVDBConnection + {-# INLINEABLE deinitKVDBConnection #-} + deinitKVDBConnection = lift . deinitKVDBConnection + {-# INLINEABLE getKVDBConnection #-} + getKVDBConnection = lift . getKVDBConnection + {-# INLINEABLE runDB #-} + runDB conn = lift . runDB conn + {-# INLINEABLE runTransaction #-} + runTransaction conn = lift . runTransaction conn + {-# INLINEABLE await #-} + await mbMcs = lift . await mbMcs + {-# INLINEABLE runSafeFlow #-} + runSafeFlow = lift . runSafeFlow + {-# INLINEABLE runKVDB #-} + runKVDB cName = lift . runKVDB cName + {-# INLINEABLE runPubSub #-} + runPubSub = lift . runPubSub + {-# INLINEABLE publish #-} + publish channel = lift . publish channel + {-# INLINEABLE subscribe #-} + subscribe channels = lift . subscribe channels + {-# INLINEABLE psubscribe #-} + psubscribe channels = lift . psubscribe channels + +instance (MonadFlow m, Monoid w) => MonadFlow (RWST r w s m) where + {-# INLINEABLE callServantAPI #-} + callServantAPI mbMgrSel url = lift . callServantAPI mbMgrSel url + {-# INLINEABLE callHTTPWithCert #-} + callHTTPWithCert url = lift . callHTTPWithCert url + {-# INLINEABLE evalLogger' #-} + evalLogger' = lift . evalLogger' + {-# INLINEABLE runIO' #-} + runIO' descr = lift . runIO' descr + {-# INLINEABLE runUntracedIO' #-} + runUntracedIO' descr = lift . runUntracedIO' descr + {-# INLINEABLE getOption #-} + getOption = lift . getOption + {-# INLINEABLE setOption #-} + setOption k = lift . setOption k + {-# INLINEABLE delOption #-} + delOption = lift . delOption + {-# INLINEABLE generateGUID #-} + generateGUID = lift generateGUID + {-# INLINEABLE runSysCmd #-} + runSysCmd = lift . runSysCmd + {-# INLINEABLE initSqlDBConnection #-} + initSqlDBConnection = lift . initSqlDBConnection + {-# INLINEABLE deinitSqlDBConnection #-} + deinitSqlDBConnection = lift . deinitSqlDBConnection + {-# INLINEABLE getSqlDBConnection #-} + getSqlDBConnection = lift . getSqlDBConnection + {-# INLINEABLE initKVDBConnection #-} + initKVDBConnection = lift . initKVDBConnection + {-# INLINEABLE deinitKVDBConnection #-} + deinitKVDBConnection = lift . deinitKVDBConnection + {-# INLINEABLE getKVDBConnection #-} + getKVDBConnection = lift . getKVDBConnection + {-# INLINEABLE runDB #-} + runDB conn = lift . runDB conn + {-# INLINEABLE runTransaction #-} + runTransaction conn = lift . runTransaction conn + {-# INLINEABLE await #-} + await mbMcs = lift . await mbMcs + {-# INLINEABLE runSafeFlow #-} + runSafeFlow = lift . runSafeFlow + {-# INLINEABLE runKVDB #-} + runKVDB cName = lift . runKVDB cName + {-# INLINEABLE runPubSub #-} + runPubSub = lift . runPubSub + {-# INLINEABLE publish #-} + publish channel = lift . publish channel + {-# INLINEABLE subscribe #-} + subscribe channels = lift . subscribe channels + {-# INLINEABLE psubscribe #-} + psubscribe channels = lift . psubscribe channels + +-- TODO: save a builder in some state for using `hPutBuilder`? +-- +-- Doubts: +-- Is it the right place to put it? +-- Should the type be more generic than IO ()? +logCallStack :: (HasCallStack, MonadFlow m) => m () +logCallStack = logDebug ("CALLSTACK" :: Text) $ Text.pack $ prettyCallStack callStack + +-- customPrettyCallStack :: Int -> CallStack -> String +-- customPrettyCallStack numLines stack = +-- let stackLines = prettyCallStackLines stack +-- lastNumLines = takeEnd numLines stackLines +-- in "CallStack: " ++ intercalate "; " lastNumLines + +logExceptionCallStack :: (HasCallStack, Exception e, MonadFlow m) => e -> m () +logExceptionCallStack ex = logError ("EXCEPTION" :: Text) $ Text.pack $ displayException ex diff --git a/src/EulerHS/Framework/Interpreters.hs b/src/EulerHS/Framework/Interpreters.hs new file mode 100644 index 00000000..004fdfac --- /dev/null +++ b/src/EulerHS/Framework/Interpreters.hs @@ -0,0 +1,19 @@ +{- | +Module : EulerHS.Framework.Interpreters +Copyright : (C) Juspay Technologies Pvt Ltd 2019-2021 +License : Apache 2.0 (see the file LICENSE) +Maintainer : opensource@juspay.in +Stability : experimental +Portability : non-portable + +This module reexports interpreters of the framework. + +This is an internal module. Import EulerHS.Interpreters instead. +-} + +module EulerHS.Framework.Interpreters + ( module X + ) where + +import EulerHS.Core.Logger.Interpreter as X +import EulerHS.Framework.Flow.Interpreter as X diff --git a/src/EulerHS/Framework/Language.hs b/src/EulerHS/Framework/Language.hs new file mode 100644 index 00000000..32953ade --- /dev/null +++ b/src/EulerHS/Framework/Language.hs @@ -0,0 +1,36 @@ +{- | +Module : EulerHS.Core.Types +Copyright : (C) Juspay Technologies Pvt Ltd 2019-2021 +License : Apache 2.0 (see the file LICENSE) +Maintainer : opensource@juspay.in +Stability : experimental +Portability : non-portable + +This module reexports the language of the framework. + +This is an internal module. Import EulerHS.Language instead. +-} + +module EulerHS.Framework.Language + ( X.Flow, + X.FlowMethod (..), + X.MonadFlow (..), + X.ReaderFlow, + X.logCallStack, + X.logExceptionCallStack, + X.logInfo, + X.logError, + X.logDebug, + X.logWarning, + X.callAPI, + X.callAPI', + X.callHTTP, + X.runIO, + X.runUntracedIO, + X.forkFlow, + X.forkFlow', + X.unpackLanguagePubSub, + X.foldFlow + ) where + +import qualified EulerHS.Framework.Flow.Language as X diff --git a/src/EulerHS/Framework/Runtime.hs b/src/EulerHS/Framework/Runtime.hs new file mode 100644 index 00000000..0dced3c8 --- /dev/null +++ b/src/EulerHS/Framework/Runtime.hs @@ -0,0 +1,216 @@ +{- | +Module : EulerHS.Framework.Runtime +Copyright : (C) Juspay Technologies Pvt Ltd 2019-2021 +License : Apache 2.0 (see the file LICENSE) +Maintainer : opensource@juspay.in +Stability : experimental +Portability : non-portable + +This module contains functions and types to work with `FlowRuntime`. + +This is an internal module. Import EulerHS.Runtime instead. +-} + +module EulerHS.Framework.Runtime + ( + -- * Framework Runtime + FlowRuntime(..) + , createFlowRuntime + , createFlowRuntime' + , withFlowRuntime + , kvDisconnect + , runPubSubWorker + , shouldFlowLogRawSql + ) where + +import EulerHS.Prelude + +import Network.HTTP.Client (Manager, newManager) +import Network.HTTP.Client.TLS (tlsManagerSettings) + +import qualified Data.Map as Map (empty) +import qualified Data.Pool as DP (destroyAllResources) +import qualified Database.Redis as RD +import qualified System.Mem as SYSM (performGC) + +import System.IO.Unsafe (unsafePerformIO) + +import qualified EulerHS.Core.Runtime as R +import qualified EulerHS.Core.Types as T + + +{- | FlowRuntime state and options. + +`FlowRuntime` is a structure that stores operational data of the framework, +such as native connections, internal state, threads, and other things +needed to run the framework. + +@ +import qualified EulerHS.Types as T +import qualified EulerHS.Language as L +import qualified EulerHS.Runtime as R +import qualified EulerHS.Interpreters as R + +myFlow :: L.Flow () +myFlow = L.runIO $ putStrLn @String "Hello there!" + +runApp :: IO () +runApp = do + let mkLoggerRt = R.createLoggerRuntime T.defaultFlowFormatter T.defaultLoggerConfig + R.withFlowRuntime (Just mkLoggerRt) + $ \flowRt -> R.runFlow flowRt myFlow +@ + +Typically, you need only one instance of `FlowRuntime` in your project. +You can run your flows with this instance in parallel, it should be thread-safe. + +It's okay to pass `FlowRuntime` here and there, but avoid changing its data. +Only the framework has a right to update those fields. + +Mutating any of its data from the outside will lead to an undefined behavior. +-} +data FlowRuntime = FlowRuntime + { _coreRuntime :: R.CoreRuntime + -- ^ Contains logger settings + , _defaultHttpClientManager :: Manager + -- ^ Http default manager, used for external api calls + , _httpClientManagers :: Map String Manager + -- ^ Http managers, used for external api calls + , _options :: MVar (Map Text Any) + -- ^ Typed key-value storage + , _kvdbConnections :: MVar (Map Text T.NativeKVDBConn) + -- ^ Connections for key-value databases + , _runMode :: T.RunMode + -- ^ ART mode in which current flow runs + , _sqldbConnections :: MVar (Map T.ConnTag T.NativeSqlPool) + -- ^ Connections for SQL databases + , _pubSubController :: RD.PubSubController + -- ^ Subscribe controller + , _pubSubConnection :: Maybe RD.Connection + -- ^ Connection being used for Publish + } + +-- | Create default FlowRuntime. +createFlowRuntime :: R.CoreRuntime -> IO FlowRuntime +createFlowRuntime coreRt = do + defaultManagerVar <- newManager tlsManagerSettings + optionsVar <- newMVar mempty + kvdbConnections <- newMVar Map.empty + sqldbConnections <- newMVar Map.empty + pubSubController <- RD.newPubSubController [] [] + + pure $ FlowRuntime + { _coreRuntime = coreRt + , _defaultHttpClientManager = defaultManagerVar + , _httpClientManagers = Map.empty + , _options = optionsVar + , _kvdbConnections = kvdbConnections + , _runMode = T.RegularMode + , _sqldbConnections = sqldbConnections + , _pubSubController = pubSubController + , _pubSubConnection = Nothing + } + +-- | Create a flow runtime. This function takes a creation function for `LoggerRuntime`. +-- +-- Normally, you should not create `LoggerRuntime` manually, but rather delegate its creation +-- to this function and like. +createFlowRuntime' :: Maybe (IO R.LoggerRuntime) -> IO FlowRuntime +createFlowRuntime' Nothing = R.createVoidLoggerRuntime >>= R.createCoreRuntime >>= createFlowRuntime +createFlowRuntime' (Just loggerRtCreator) = loggerRtCreator >>= R.createCoreRuntime >>= createFlowRuntime + +-- | Clear resources in given 'FlowRuntime' +clearFlowRuntime :: FlowRuntime -> IO () +clearFlowRuntime FlowRuntime{..} = do + _ <- takeMVar _options + putMVar _options mempty + kvConns <- takeMVar _kvdbConnections + putMVar _kvdbConnections mempty + traverse_ kvDisconnect kvConns + sqlConns <- takeMVar _sqldbConnections + putMVar _sqldbConnections mempty + traverse_ sqlDisconnect sqlConns + -- The Manager will be shut down automatically via garbage collection. + SYSM.performGC + +-- | Returns True if the logger option "log raw SQL queries as debug messages" set. +shouldFlowLogRawSql :: FlowRuntime -> Bool +shouldFlowLogRawSql = R.shouldLogRawSql . R._loggerRuntime . _coreRuntime + +-- | Run flow with given logger runtime creation function. +withFlowRuntime :: Maybe (IO R.LoggerRuntime) -> (FlowRuntime -> IO a) -> IO a +withFlowRuntime Nothing actionF = + bracket R.createVoidLoggerRuntime R.clearLoggerRuntime $ \loggerRt -> + bracket (R.createCoreRuntime loggerRt) R.clearCoreRuntime $ \coreRt -> + bracket (createFlowRuntime coreRt) clearFlowRuntime actionF +withFlowRuntime (Just loggerRuntimeCreator) actionF = + bracket loggerRuntimeCreator R.clearLoggerRuntime $ \loggerRt -> + bracket (R.createCoreRuntime loggerRt) R.clearCoreRuntime $ \coreRt -> + bracket (createFlowRuntime coreRt) clearFlowRuntime actionF + +-- * Experimental PubSub mechanism bits. + +-- Use {-# NOINLINE foo #-} as a pragma on any function foo that calls unsafePerformIO. +-- If the call is inlined, the I/O may be performed more than once. +{-# NOINLINE pubSubWorkerLock #-} +pubSubWorkerLock :: MVar () +pubSubWorkerLock = unsafePerformIO $ newMVar () + +runPubSubWorker :: FlowRuntime -> (Text -> IO ()) -> IO (IO ()) +runPubSubWorker rt log = do + let tsecond = 10 ^ (6 :: Int) + + lock <- tryTakeMVar pubSubWorkerLock + case lock of + Nothing -> error "Unable to run Publish/Subscribe worker: Only one worker allowed" + Just _ -> pure () + + let mconnection = _pubSubConnection rt + + delayRef <- newIORef tsecond + + threadId <- case mconnection of + Nothing -> do + putMVar pubSubWorkerLock () + error "Unable to run Publish/Subscribe worker: No connection to Redis provided" + + Just conn -> forkIO $ forever $ do + res <- try @_ @SomeException $ RD.pubSubForever conn (_pubSubController rt) $ do + writeIORef delayRef tsecond + log "Publish/Subscribe worker: Run successfuly" + + case res of + Left e -> do + delay <- readIORef delayRef + + log $ "Publish/Subscribe worker: Got error: " <> show e + log $ "Publish/Subscribe worker: Restart in " <> show (delay `div` tsecond) <> " sec" + + modifyIORef' delayRef (\d -> d + d `div` 2) -- (* 1.5) + threadDelay delay + Right _ -> pure () + + pure $ do + killThread threadId + putMVar pubSubWorkerLock () + log $ "Publish/Subscribe worker: Killed" + +-- * Internal functions + +-- | Disconnect from a SQL DB. +-- +-- Internal function, should not be used in the business logic. +sqlDisconnect :: T.NativeSqlPool -> IO () +sqlDisconnect = \case + T.NativePGPool connPool -> DP.destroyAllResources connPool + T.NativeMySQLPool connPool -> DP.destroyAllResources connPool + T.NativeSQLitePool connPool -> DP.destroyAllResources connPool + T.NativeMockedPool -> pure () + +-- | Disconnect from an KV DB. +-- +-- Internal function, should not be used in the business logic. +kvDisconnect :: T.NativeKVDBConn -> IO () +kvDisconnect = \case + T.NativeKVDBMockedConn -> pure () + T.NativeKVDB conn -> RD.disconnect conn diff --git a/src/EulerHS/Interpreters.hs b/src/EulerHS/Interpreters.hs new file mode 100644 index 00000000..df8945ad --- /dev/null +++ b/src/EulerHS/Interpreters.hs @@ -0,0 +1,34 @@ +{- | +Module : EulerHS.Interpreters +Copyright : (C) Juspay Technologies Pvt Ltd 2019-2021 +License : Apache 2.0 (see the file LICENSE) +Maintainer : opensource@juspay.in +Stability : experimental +Portability : non-portable + +This module contains interpreters and methods for running `Flow` scenarios. + +This module is better imported as qualified. + +@ +import qualified EulerHS.Types as T +import qualified EulerHS.Language as L +import qualified EulerHS.Runtime as R +import qualified EulerHS.Interpreters as R + +myFlow :: L.Flow () +myFlow = L.runIO $ putStrLn @String "Hello there!" + +runApp :: IO () +runApp = do + let mkLoggerRt = R.createLoggerRuntime T.defaultFlowFormatter T.defaultLoggerConfig + R.withFlowRuntime (Just mkLoggerRt) $ \flowRt -> R.runFlow flowRt myFlow +@ +-} + +module EulerHS.Interpreters + ( module X + ) where + +import EulerHS.Core.Interpreters as X +import EulerHS.Framework.Interpreters as X diff --git a/src/EulerHS/Language.hs b/src/EulerHS/Language.hs new file mode 100644 index 00000000..0e026b98 --- /dev/null +++ b/src/EulerHS/Language.hs @@ -0,0 +1,65 @@ +{- | +Module : EulerHS.Language +Copyright : (C) Juspay Technologies Pvt Ltd 2019-2021 +License : Apache 2.0 (see the file LICENSE) +Maintainer : opensource@juspay.in +Stability : experimental +Portability : non-portable + +This module provides you a public interface to the free monadic eDSLs of the framework. + +The `Flow` type or its derivations for different monad stacks can be used +to describe business logic of a typical web application. + +This language provides you with several features out-of-the-box: + +- Logging +- SQL DB subsystem. Supported SQL DB backends: + * MySQL + * Postgres + * SQLite +- KV DB subsystem (Redis) +- Fork/await of flows (async evaluation) +- Typed options +- Servant-like HTTP client runner +- Low-level HTTP client runner +- Arbitrary IO effects +- Exception throwing and handling +- Redis-based PubSub connector (experimental) + +The `Flow` is a monad, so you can write sequential scenarios in a monadic form: + +@ +import EulerHS.Prelude +import qualified EulerHS.Types as T +import qualified EulerHS.Language as L +import qualified Servant.Client as S + +myFlow :: L.Flow (Either T.ClientError User) +myFlow = do + L.runIO $ putStrLn @String "Hello there!" + L.logInfo "myFlow" "This is a message from myFlow." + + let url = S.BaseUrl Http "127.0.0.1" 8081 "" + L.callAPI Nothing url getUser + +-- HTTP API powered by Servant +type API = "user" :> Get '[JSON] User + +getUser :: T.EulerClient User +getUser = client api +@ + +To run this logic, you need to create an instance of `FlowRuntime`, +and pass @myFlow@ to the `runFlow` method. + +This module is better imported as qualified. +-} + +module EulerHS.Language + ( module X + ) where + +import EulerHS.Core.Language as X +import EulerHS.Extra.Language as X +import EulerHS.Framework.Language as X hiding (unpackLanguagePubSub) diff --git a/src/EulerHS/Prelude.hs b/src/EulerHS/Prelude.hs new file mode 100644 index 00000000..10ea2eec --- /dev/null +++ b/src/EulerHS/Prelude.hs @@ -0,0 +1,81 @@ +{-# OPTIONS -fno-warn-orphans #-} +{-# OPTIONS -fno-warn-unused-imports #-} + +{- | +Module : EulerHS.Prelude +Copyright : (C) Juspay Technologies Pvt Ltd 2019-2021 +License : Apache 2.0 (see the file LICENSE) +Maintainer : opensource@juspay.in +Stability : experimental +Portability : non-portable + +Custom prelude based on @universum@ by Serokell. +In contrast with the latter, it exports unsafe versions of such functions as +@head@, @last@ etc. It also has some other tiny changes here and there. +You may want to get familiar with the @universum@ documentation first. +-} + + +module EulerHS.Prelude + ( module X + , liftFC + , catchAny + -- JSON + , stripLensPrefixOptions + , stripAllLensPrefixOptions + , jsonSetField + , encodeJSON + , decodeJSON + ) where + +import Control.Concurrent as X (ThreadId, forkIO, killThread, + threadDelay) +import Control.Concurrent.STM as X (retry) +import Control.Concurrent.STM.TChan as X (TChan, newTChan, newTChanIO, + readTChan, tryReadTChan, + writeTChan) +import Control.Concurrent.STM.TMVar as X (TMVar, newEmptyTMVar, + newEmptyTMVarIO, newTMVar, + newTMVarIO, putTMVar, + readTMVar, takeTMVar, + tryReadTMVar) +import Control.Concurrent.STM.TVar as X (modifyTVar) +import Control.Exception as X (SomeException (..)) +import Control.Lens as X (at, (.=)) +import Control.Lens.TH as X (makeFieldsNoPrefix, makeLenses) +import Control.Monad as X (liftM, unless, void, when) +import Control.Monad.Free as X (Free (..), foldFree, liftF) +import Control.Monad.Free.Church as X (F (..), foldF, fromF, iter, + iterM, retract) +import Control.Newtype.Generics as X (Newtype, O, pack, unpack) +import Data.Aeson as X (FromJSON, FromJSONKey, ToJSON, ToJSONKey, + genericParseJSON, genericToJSON, parseJSON, + toJSON) +import Data.Function as X ((&)) +import Data.Kind as X (Type) +import Data.Maybe as X (fromJust, fromMaybe) +import Data.Serialize as X (Serialize) +import Fmt as X ((+|), (+||), (|+), (||+)) +import GHC.Base as X (until) +import GHC.Generics as X (Generic) +import Text.Read as X (read, readsPrec) + +-- includes Data.IORef +import Universum as X hiding (All, Option, Set, Type, head, init, + last, set, tail, trace, catchAny) +import Universum (catchAny) +import Universum.Functor.Fmap as X ((<<$>>)) +import Universum.Unsafe as X (head, init, last, tail, (!!)) + +import EulerHS.Extra.Aeson ( + stripLensPrefixOptions, stripAllLensPrefixOptions, jsonSetField, encodeJSON, decodeJSON) + +import qualified Control.Monad.Free.Church as CF +import qualified Control.Monad.Free.Class as MF + + + + +-- Lift for Church encoded Free +liftFC :: (Functor f, MF.MonadFree f m) => f a -> m a +liftFC = CF.liftF diff --git a/src/EulerHS/Runtime.hs b/src/EulerHS/Runtime.hs new file mode 100644 index 00000000..18463995 --- /dev/null +++ b/src/EulerHS/Runtime.hs @@ -0,0 +1,41 @@ +{- | +Module : EulerHS.Runtime +Copyright : (C) Juspay Technologies Pvt Ltd 2019-2021 +License : Apache 2.0 (see the file LICENSE) +Maintainer : opensource@juspay.in +Stability : experimental +Portability : non-portable + +This is a top module that reexports all the runtime-specific types and functions. + +This layer of the framework contains methods for creating and disposing runtimes +of different subsystems: logger, SQL, state and others. + +You typically create a single `FlowRuntime` instance and then use it to run your +`Flow` scenarios. + +This module is better imported as qualified. + +@ +import qualified EulerHS.Types as T +import qualified EulerHS.Language as L +import qualified EulerHS.Runtime as R +import qualified EulerHS.Interpreters as R + +myFlow :: L.Flow () +myFlow = L.runIO $ putStrLn @String "Hello there!" + +runApp :: IO () +runApp = do + let mkLoggerRt = R.createLoggerRuntime T.defaultFlowFormatter T.defaultLoggerConfig + R.withFlowRuntime (Just mkLoggerRt) $ \flowRt -> R.runFlow flowRt myFlow +@ + +-} + +module EulerHS.Runtime + ( module X + ) where + +import EulerHS.Core.Runtime as X +import EulerHS.Framework.Runtime as X diff --git a/src/EulerHS/Types.hs b/src/EulerHS/Types.hs new file mode 100644 index 00000000..4dbebc8b --- /dev/null +++ b/src/EulerHS/Types.hs @@ -0,0 +1,39 @@ +{- | +Module : EulerHS.Types +Copyright : (C) Juspay Technologies Pvt Ltd 2019-2021 +License : Apache 2.0 (see the file LICENSE) +Maintainer : opensource@juspay.in +Stability : experimental +Portability : non-portable + +This is a top module that reexports all the public types of the framework +along with some helper functions. + +This module is better imported as qualified. + +@ +import qualified EulerHS.Types as T + +-- Beam imports +import Database.Beam.MySQL (MySQLM) + +mySQLDevConfig :: T.'DBConfig' MySQLM +mySQLDevConfig = T.'mkMySQLPoolConfig' "MySQL dev DB" cfg poolCfg + where + cfg :: T.'MySQLConfig' + cfg = T.'defaultMySQLConfig' + { T.connectPassword = "my pass" + , T.connectDatabase = "my db" + } + poolCfg = T.'defaultPoolConfig' + { T.keepAlive = 1000000 + } +@ +-} + +module EulerHS.Types + ( module X + ) where + + +import EulerHS.Core.Types as X diff --git a/stack.yaml b/stack.yaml new file mode 100644 index 00000000..0944abbd --- /dev/null +++ b/stack.yaml @@ -0,0 +1,51 @@ +resolver: lts-15.15 + +packages: + - . + +extra-deps: + - git: https://github.com/juspay/hedis + commit: 46ea0ea78e6d8d1a2b1a66e6f08078a37864ad80 #4ea54f16c0057acc99a9f0e9b63ea51ea4bf420e + - git: https://github.com/juspay/beam-mysql + commit: eab93370b30f90e26e39e4d99f51db052aebc992 + - git: https://github.com/juspay/mysql-haskell + commit: 788022d65538db422b02ecc0be138b862d2e5cee + - git: https://github.com/juspay/bytestring-lexing + commit: 0a46db1139011736687cb50bbd3877d223bcb737 + - git: https://github.com/juspay/beam + commit: 185ff060e63ab0b8a72775ee2742621dd6fefeb1 + subdirs: + - beam-core + - beam-migrate + - beam-sqlite + - beam-postgres + + # Needed for beam + - dependent-map-0.4.0.0@sha256:ca2b131046f4340a1c35d138c5a003fe4a5be96b14efc26291ed35fd08c62221,1657 + - dependent-sum-0.7.1.0@sha256:5599aa89637db434431b1dd3fa7c34bc3d565ee44f0519bfbc877be1927c2531,2068 + - haskell-src-exts-1.21.1@sha256:11d18ec3f463185f81b7819376b532e3087f8192cffc629aac5c9eec88897b35,4541 + - sqlite-simple-0.4.18.0@sha256:3ceea56375c0a3590c814e411a4eb86943f8d31b93b110ca159c90689b6b39e5,3002 + - constraints-extras-0.3.0.2@sha256:bf6884be65958e9188ae3c9e5547abfd6d201df021bff8a4704c2c4fe1e1ae5b,1784 + - direct-sqlite-2.3.26@sha256:04e835402f1508abca383182023e4e2b9b86297b8533afbd4e57d1a5652e0c23,3718 + # Needed for beam-mysql + - tcp-streams-1.0.1.1@sha256:35e9ecfa515797052f8c3c01834d2daebd5e93f3152c7fc98b32652bf6f0c052,2329 + - wire-streams-0.1.1.0@sha256:08816c7fa53b20f52e5c465252c106d9de8e6d9580ec0b6d9f000a34c7bcefc8,2130 + - mason-0.2.3@sha256:186ff6306c7d44dbf7b108b87f73a30d45c70cd5c87d6f2a88d300def5542fef,1226 + - record-dot-preprocessor-0.2.7@sha256:bf7e83b2a01675577f81536fc3246e3b54e9d2dd28bb645599813dc5c486fbee,2440 +# MySQL +# MacOS: +# Problem: MacOS build failure +# > Configuring mysql-0.1.7... +# > setup: Missing dependencies on foreign libraries: +# > * Missing (or bad) C libraries: ssl, crypto +# Possible solution: +# https://github.com/depressed-pho/HsOpenSSL/issues/41 +# TODO: how to make it portable? +extra-include-dirs: + - /usr/local/opt/openssl/include +extra-lib-dirs: + - /usr/local/opt/openssl/lib + +# Linux: +# sudo apt install mysql-client +# sudo apt-get install libmysqlclient-dev diff --git a/stack.yaml.lock b/stack.yaml.lock new file mode 100644 index 00000000..be7ae3dd --- /dev/null +++ b/stack.yaml.lock @@ -0,0 +1,178 @@ +# This file was autogenerated by Stack. +# You should not edit this file by hand. +# For more information, please see the documentation at: +# https://docs.haskellstack.org/en/stable/lock_files + +packages: +- completed: + name: hedis + version: 0.12.8.1 + git: https://github.com/juspay/hedis + pantry-tree: + size: 2570 + sha256: efe83ea607d47d7d8200c61a0dddabc5e7f292a2f451a98dec1326eb0903ef71 + commit: 46ea0ea78e6d8d1a2b1a66e6f08078a37864ad80 + original: + git: https://github.com/juspay/hedis + commit: 46ea0ea78e6d8d1a2b1a66e6f08078a37864ad80 +- completed: + name: beam-mysql + version: 1.2.1.0 + git: https://github.com/juspay/beam-mysql + pantry-tree: + size: 3064 + sha256: 895257a8580c28e74b8f91e00280ab22a694a4c24f4318ab6161dace37375d8d + commit: eab93370b30f90e26e39e4d99f51db052aebc992 + original: + git: https://github.com/juspay/beam-mysql + commit: eab93370b30f90e26e39e4d99f51db052aebc992 +- completed: + name: mysql-haskell + version: 0.8.4.2 + git: https://github.com/juspay/mysql-haskell + pantry-tree: + size: 3863 + sha256: 2306f63a061d6cf45264a4d08f7da14ab3c719d20c528480b41c54db31ceace8 + commit: 788022d65538db422b02ecc0be138b862d2e5cee + original: + git: https://github.com/juspay/mysql-haskell + commit: 788022d65538db422b02ecc0be138b862d2e5cee +- completed: + name: bytestring-lexing + version: 0.5.0.6 + git: https://github.com/juspay/bytestring-lexing + pantry-tree: + size: 2693 + sha256: d2ffa359a9def33fd7fdef6c55341753e5390fa92173e29f9cb047647710d7ef + commit: 0a46db1139011736687cb50bbd3877d223bcb737 + original: + git: https://github.com/juspay/bytestring-lexing + commit: 0a46db1139011736687cb50bbd3877d223bcb737 +- completed: + subdir: beam-core + name: beam-core + version: 0.9.0.0 + git: https://github.com/juspay/beam + pantry-tree: + size: 2623 + sha256: 365f420adf34642df7ffac56f80a578da48b3bebfac140748f3e163f83b4e37c + commit: 185ff060e63ab0b8a72775ee2742621dd6fefeb1 + original: + subdir: beam-core + git: https://github.com/juspay/beam + commit: 185ff060e63ab0b8a72775ee2742621dd6fefeb1 +- completed: + subdir: beam-migrate + name: beam-migrate + version: 0.5.0.0 + git: https://github.com/juspay/beam + pantry-tree: + size: 1825 + sha256: 3ef904145e6e05e2f1cf6558e40f109f2751f08d2f8ce446f4e43ab70675936b + commit: 185ff060e63ab0b8a72775ee2742621dd6fefeb1 + original: + subdir: beam-migrate + git: https://github.com/juspay/beam + commit: 185ff060e63ab0b8a72775ee2742621dd6fefeb1 +- completed: + subdir: beam-sqlite + name: beam-sqlite + version: 0.5.0.0 + git: https://github.com/juspay/beam + pantry-tree: + size: 859 + sha256: d28565e631ed326b35719b4f0b1825dbdda0daed39618bff3506b70f45b9d2e8 + commit: 185ff060e63ab0b8a72775ee2742621dd6fefeb1 + original: + subdir: beam-sqlite + git: https://github.com/juspay/beam + commit: 185ff060e63ab0b8a72775ee2742621dd6fefeb1 +- completed: + subdir: beam-postgres + name: beam-postgres + version: 0.5.0.0 + git: https://github.com/juspay/beam + pantry-tree: + size: 2594 + sha256: 4d11e99a86be3ce5067479308bec2a541fa60a88ffbb1f95c316017646ed4c57 + commit: 185ff060e63ab0b8a72775ee2742621dd6fefeb1 + original: + subdir: beam-postgres + git: https://github.com/juspay/beam + commit: 185ff060e63ab0b8a72775ee2742621dd6fefeb1 +- completed: + hackage: dependent-map-0.4.0.0@sha256:ca2b131046f4340a1c35d138c5a003fe4a5be96b14efc26291ed35fd08c62221,1657 + pantry-tree: + size: 551 + sha256: 5defa30010904d2ad05a036f3eaf83793506717c93cbeb599f40db1a3632cfc5 + original: + hackage: dependent-map-0.4.0.0@sha256:ca2b131046f4340a1c35d138c5a003fe4a5be96b14efc26291ed35fd08c62221,1657 +- completed: + hackage: dependent-sum-0.7.1.0@sha256:5599aa89637db434431b1dd3fa7c34bc3d565ee44f0519bfbc877be1927c2531,2068 + pantry-tree: + size: 290 + sha256: 9cbfb32b5a8a782b7a1c941803fd517633cb699159b851c1d82267a9e9391b50 + original: + hackage: dependent-sum-0.7.1.0@sha256:5599aa89637db434431b1dd3fa7c34bc3d565ee44f0519bfbc877be1927c2531,2068 +- completed: + hackage: haskell-src-exts-1.21.1@sha256:11d18ec3f463185f81b7819376b532e3087f8192cffc629aac5c9eec88897b35,4541 + pantry-tree: + size: 95170 + sha256: 487db8defe20a7c8bdf5b4a9a68ec616a4349bdb643f2dc7c9d71e1a6495c8c7 + original: + hackage: haskell-src-exts-1.21.1@sha256:11d18ec3f463185f81b7819376b532e3087f8192cffc629aac5c9eec88897b35,4541 +- completed: + hackage: sqlite-simple-0.4.18.0@sha256:3ceea56375c0a3590c814e411a4eb86943f8d31b93b110ca159c90689b6b39e5,3002 + pantry-tree: + size: 1930 + sha256: e58b9955e483d51ee0966f8ba4384305d871480e2a38b32ee0fcd4573d74cf95 + original: + hackage: sqlite-simple-0.4.18.0@sha256:3ceea56375c0a3590c814e411a4eb86943f8d31b93b110ca159c90689b6b39e5,3002 +- completed: + hackage: constraints-extras-0.3.0.2@sha256:bf6884be65958e9188ae3c9e5547abfd6d201df021bff8a4704c2c4fe1e1ae5b,1784 + pantry-tree: + size: 594 + sha256: b0bcc96d375ee11b1972a2e9e8e42039b3f420b0e1c46e9c70652470445a6505 + original: + hackage: constraints-extras-0.3.0.2@sha256:bf6884be65958e9188ae3c9e5547abfd6d201df021bff8a4704c2c4fe1e1ae5b,1784 +- completed: + hackage: direct-sqlite-2.3.26@sha256:04e835402f1508abca383182023e4e2b9b86297b8533afbd4e57d1a5652e0c23,3718 + pantry-tree: + size: 770 + sha256: 11874ab21e10c5b54cd1e02a037b677dc1e2ee9986f38c599612c56654dc01c3 + original: + hackage: direct-sqlite-2.3.26@sha256:04e835402f1508abca383182023e4e2b9b86297b8533afbd4e57d1a5652e0c23,3718 +- completed: + hackage: tcp-streams-1.0.1.1@sha256:35e9ecfa515797052f8c3c01834d2daebd5e93f3152c7fc98b32652bf6f0c052,2329 + pantry-tree: + size: 1004 + sha256: 572071fca40a0b6c4cc950d10277a6f12e83cf4846882b6ef83fcccaa2c18c45 + original: + hackage: tcp-streams-1.0.1.1@sha256:35e9ecfa515797052f8c3c01834d2daebd5e93f3152c7fc98b32652bf6f0c052,2329 +- completed: + hackage: wire-streams-0.1.1.0@sha256:08816c7fa53b20f52e5c465252c106d9de8e6d9580ec0b6d9f000a34c7bcefc8,2130 + pantry-tree: + size: 506 + sha256: c99a12bfcbeacc5da8f166fbed1eb105a45f08be1a3a071fe9f903b386b14e1d + original: + hackage: wire-streams-0.1.1.0@sha256:08816c7fa53b20f52e5c465252c106d9de8e6d9580ec0b6d9f000a34c7bcefc8,2130 +- completed: + hackage: mason-0.2.3@sha256:186ff6306c7d44dbf7b108b87f73a30d45c70cd5c87d6f2a88d300def5542fef,1226 + pantry-tree: + size: 574 + sha256: 4147c0fd201f849d018f9408e82b75b0313294bb9fff290673dd30729eedf948 + original: + hackage: mason-0.2.3@sha256:186ff6306c7d44dbf7b108b87f73a30d45c70cd5c87d6f2a88d300def5542fef,1226 +- completed: + hackage: record-dot-preprocessor-0.2.7@sha256:bf7e83b2a01675577f81536fc3246e3b54e9d2dd28bb645599813dc5c486fbee,2440 + pantry-tree: + size: 1003 + sha256: 8e86a73101e570ce238522f45ffb906811116c752d1f08418a2abd078697488e + original: + hackage: record-dot-preprocessor-0.2.7@sha256:bf7e83b2a01675577f81536fc3246e3b54e9d2dd28bb645599813dc5c486fbee,2440 +snapshots: +- completed: + size: 496112 + url: https://raw.githubusercontent.com/commercialhaskell/stackage-snapshots/master/lts/15/15.yaml + sha256: 86169722ad0056ffc9eacc157ef80ee21d7024f92c0d2961c89ccf432db230a3 + original: lts-15.15 diff --git a/test/EulerHS/TestData/API/Client.hs b/test/EulerHS/TestData/API/Client.hs new file mode 100644 index 00000000..f3917b75 --- /dev/null +++ b/test/EulerHS/TestData/API/Client.hs @@ -0,0 +1,31 @@ +module EulerHS.TestData.API.Client where + +import EulerHS.Prelude + +import EulerHS.Types +import Servant.API +import Servant.Mock (mock) +import Servant.Server (Server) + +import EulerHS.TestData.Types + + +type API = "user" :> Get '[JSON] User + :<|> "book" :> Get '[JSON] Book + + +port :: Int +port = 8081 + +api :: Proxy API +api = Proxy + +context :: Proxy '[] +context = Proxy + +getUser :: EulerClient User +getBook :: EulerClient Book +(getUser :<|> getBook) = client api + +server :: Server API +server = mock api context diff --git a/test/EulerHS/TestData/Scenarios/Scenario1.hs b/test/EulerHS/TestData/Scenarios/Scenario1.hs new file mode 100644 index 00000000..ba9da5e7 --- /dev/null +++ b/test/EulerHS/TestData/Scenarios/Scenario1.hs @@ -0,0 +1,24 @@ +{-# OPTIONS -fno-warn-deprecations #-} +module EulerHS.TestData.Scenarios.Scenario1 where + +import qualified EulerHS.Language as L +import EulerHS.Prelude hiding (getOption) +import Servant.Client (BaseUrl (..), Scheme (..)) + +import EulerHS.TestData.API.Client +import EulerHS.TestData.Types + +testScenario1 :: L.Flow User +testScenario1 = do + localUserName <- L.runSysCmd "whoami" + localGUID <- L.runIO (undefined :: IO String) + guid <- L.generateGUID + url <- maybe (mkUrl "127.0.0.1") mkUrl <$> L.getOption UrlKey + res <- L.callServantAPI Nothing url getUser + pure $ case res of + Right u | localGUID /= userGUID u -> u + Right u | otherwise -> User localUserName "" $ toString guid + _ -> User localUserName "Smith" $ toString guid + where + mkUrl :: String -> BaseUrl + mkUrl host = BaseUrl Http host port "" diff --git a/test/EulerHS/TestData/Types.hs b/test/EulerHS/TestData/Types.hs new file mode 100644 index 00000000..16d59fa6 --- /dev/null +++ b/test/EulerHS/TestData/Types.hs @@ -0,0 +1,149 @@ +{-# LANGUAGE DeriveAnyClass #-} +module EulerHS.TestData.Types where + +import qualified Data.Aeson as A +import EulerHS.Prelude +import EulerHS.Types + +import Test.QuickCheck.Arbitrary + + + +data UrlKey = UrlKey + deriving (Generic, Typeable, Show, Eq, ToJSON, FromJSON) + + +data TestStringKey = TestStringKey + deriving (Generic, Typeable, Show, Eq, ToJSON, FromJSON) + +data TestStringKey2 = TestStringKey2 + deriving (Generic, Typeable, Show, Eq, ToJSON, FromJSON) + + +data TestIntKey = TestIntKey + deriving (Generic, Typeable, Show, Eq, ToJSON, FromJSON) + +data TestIntKey2 = TestIntKey2 + deriving (Generic, Typeable, Show, Eq, ToJSON, FromJSON) + + +data TestStringKeyAnotherEnc = TestStringKeyAnotherEnc + deriving (Generic, Typeable, Show, Eq, FromJSON) + +data TestStringKey2AnotherEnc = TestStringKey2AnotherEnc + deriving (Generic, Typeable, Show, Eq, FromJSON) + + +data TestKeyWithStringPayload = TestKeyWithStringPayload String + deriving (Generic, Typeable, Show, Eq, ToJSON, FromJSON) + +data TestKeyWithIntPayload = TestKeyWithIntPayload Int + deriving (Generic, Typeable, Show, Eq, ToJSON, FromJSON) + + +data TestKeyWithStringPayloadAnotherEnc = TestKeyWithStringPayloadAnotherEnc String + deriving (Generic, Typeable, Show, Eq, FromJSON) + +data TestKeyWithIntPayloadAnotherEnc = TestKeyWithIntPayloadAnotherEnc Int + deriving (Generic, Typeable, Show, Eq, FromJSON) + + + +newtype NTTestKeyWithStringPayload = NTTestKeyWithStringPayload String + deriving (Generic, Typeable, Show, Eq, ToJSON, FromJSON) + +newtype NTTestKeyWithIntPayload = NTTestKeyWithIntPayload Int + deriving (Generic, Typeable, Show, Eq, ToJSON, FromJSON) + + +newtype NTTestKeyWithStringPayloadAnotherEnc = NTTestKeyWithStringPayloadAnotherEnc String + deriving (Generic, Typeable, Show, Eq, FromJSON) + +newtype NTTestKeyWithIntPayloadAnotherEnc = NTTestKeyWithIntPayloadAnotherEnc Int + deriving (Generic, Typeable, Show, Eq, FromJSON) + + + +instance A.ToJSON TestStringKeyAnotherEnc + where toJSON = A.genericToJSON $ A.defaultOptions { A.tagSingleConstructors = True } + +instance A.ToJSON TestStringKey2AnotherEnc + where toJSON = A.genericToJSON $ A.defaultOptions { A.tagSingleConstructors = True } + + +instance A.ToJSON TestKeyWithStringPayloadAnotherEnc + where toJSON = A.genericToJSON $ A.defaultOptions { A.tagSingleConstructors = True } + +instance A.ToJSON TestKeyWithIntPayloadAnotherEnc + where toJSON = A.genericToJSON $ A.defaultOptions { A.tagSingleConstructors = True } + + +instance A.ToJSON NTTestKeyWithStringPayloadAnotherEnc + where toJSON = A.genericToJSON $ A.defaultOptions { A.tagSingleConstructors = True } + +instance A.ToJSON NTTestKeyWithIntPayloadAnotherEnc + where toJSON = A.genericToJSON $ A.defaultOptions { A.tagSingleConstructors = True } + +instance OptionEntity UrlKey String +instance OptionEntity TestStringKey String +instance OptionEntity TestStringKey2 String +instance OptionEntity TestIntKey Int +instance OptionEntity TestIntKey2 Int + +instance OptionEntity TestStringKeyAnotherEnc String +instance OptionEntity TestStringKey2AnotherEnc String + +instance OptionEntity TestKeyWithStringPayload String +instance OptionEntity TestKeyWithIntPayload String + +instance OptionEntity TestKeyWithStringPayloadAnotherEnc String +instance OptionEntity TestKeyWithIntPayloadAnotherEnc String + +instance OptionEntity NTTestKeyWithStringPayload String +instance OptionEntity NTTestKeyWithIntPayload Int + +instance OptionEntity NTTestKeyWithStringPayloadAnotherEnc String +instance OptionEntity NTTestKeyWithIntPayloadAnotherEnc Int + + + +data TestKVals = TestKVals + { mbTestStringKey :: Maybe String + , mbTestStringKey2 :: Maybe String + , mbTestIntKey :: Maybe Int + , mbTestIntKey2 :: Maybe Int + , mbTestStringKeyAnotherEnc :: Maybe String + , mbTestStringKey2AnotherEnc :: Maybe String + , mbTestKeyWithStringPayloadS1 :: Maybe String + , mbTestKeyWithStringPayloadS2 :: Maybe String + , mbTestKeyWithIntPayloadS1 :: Maybe String + , mbTestKeyWithIntPayloadS2 :: Maybe String + , mbTestKeyWithStringPayloadAnotherEncS1 :: Maybe String + , mbTestKeyWithStringPayloadAnotherEncS2 :: Maybe String + , mbTestKeyWithIntPayloadAnotherEncS1 :: Maybe String + , mbTestKeyWithIntPayloadAnotherEncS2 :: Maybe String + , mbNTTestKeyWithStringPayloadS1 :: Maybe String + , mbNTTestKeyWithStringPayloadS2 :: Maybe String + , mbNTTestKeyWithIntPayloadS1 :: Maybe Int + , mbNTTestKeyWithIntPayloadS2 :: Maybe Int + , mbNTTestKeyWithStringPayloadAnotherEncS1 :: Maybe String + , mbNTTestKeyWithStringPayloadAnotherEncS2 :: Maybe String + , mbNTTestKeyWithIntPayloadAnotherEncS1 :: Maybe Int + , mbNTTestKeyWithIntPayloadAnotherEncS2 :: Maybe Int + } + deriving (Show, Eq) + +---------------------------------- + + +data User = User { firstName :: String, lastName :: String , userGUID :: String} + deriving (Generic, Show, Eq, ToJSON, FromJSON ) + +instance Arbitrary User where + arbitrary = User <$> arbitrary <*> arbitrary <*> arbitrary + +data Book = Book { author :: String, name :: String } + deriving (Generic, Show, Eq, ToJSON, FromJSON ) + +instance Arbitrary Book where + arbitrary = Book <$> arbitrary <*> arbitrary diff --git a/test/EulerHS/TestData/test.db.template b/test/EulerHS/TestData/test.db.template new file mode 100644 index 00000000..17603a5a Binary files /dev/null and b/test/EulerHS/TestData/test.db.template differ diff --git a/test/EulerHS/Testing/CommonLog.hs b/test/EulerHS/Testing/CommonLog.hs new file mode 100644 index 00000000..928baa70 --- /dev/null +++ b/test/EulerHS/Testing/CommonLog.hs @@ -0,0 +1,90 @@ +module EulerHS.Testing.CommonLog where + +import EulerHS.Prelude +import EulerHS.Testing.HSLog (HSLog (..)) +import qualified EulerHS.Testing.HSLog as HSLog +import EulerHS.Testing.PSLog (PSLog (..)) +import qualified EulerHS.Testing.PSLog as PSLog + +data CommonLog + = CMLog + | CMIO + | CMRunDB + | CMGetDBConn + | CMGetKVDBConn + | CMFork + | CMSysCmd + | CMKVDB + | CMException + | CMSetOpt + | CMGetOpt + | CMGenGuid + | CMCallApi + deriving (Eq, Ord, Show) + +isLog :: CommonLog -> Bool +isLog = \case + CMLog -> True + _ -> False + +fromHSLog :: HSLog.HSLog -> Maybe CommonLog +fromHSLog = \case + SetEntry -> Just CMKVDB + SetExEntry -> Just CMKVDB + GetEntry -> Just CMKVDB + ExistsEntry -> Just CMKVDB + DelEntry -> Just CMKVDB + ExpireEntry -> Just CMKVDB + IncrEntry -> Just CMKVDB + HSetEntry -> Just CMKVDB + HGetEntry -> Just CMKVDB + MultiExecEntry -> Nothing + HSLog.ThrowExceptionEntry -> Just CMException + CallServantApiEntry -> Just CMCallApi + HSLog.SetOptionEntry -> Just CMSetOpt + HSLog.GetOptionEntry -> Just CMGetOpt + HSLog.RunSysCmdEntry -> Just CMSysCmd + ForkEntry -> Just CMFork + GeneratedGUIDEntry -> Just CMGenGuid + RunIOEntry -> Just CMIO + InitSqlDBConnectionEntry -> Nothing + DeInitSqlDBConnectionEntry -> Nothing + GetSqlDBConnectionEntry -> Just CMGetDBConn + HSLog.RunDBEntry -> Just CMRunDB + GetKVDBConnectionEntry -> Just CMGetKVDBConn + AwaitEntry -> Nothing + RunSafeFlowEntry -> Nothing + LogMessageEntry -> Just CMLog + +fromPSLog :: PSLog.PSLog -> Maybe CommonLog +fromPSLog = \case + LogEntry -> Just CMLog + PSLog.RunDBEntry -> Just CMRunDB + RunKVDBEitherEntry -> Just CMKVDB + DoAffEntry -> Just CMIO + PSLog.SetOptionEntry -> Just CMSetOpt + PSLog.GetOptionEntry -> Just CMGetOpt + GenerateGUIDEntry -> Just CMGenGuid + CallAPIEntry -> Just CMCallApi + ForkFlowEntry -> Just CMFork + PSLog.ThrowExceptionEntry -> Just CMException + PSLog.RunSysCmdEntry -> Just CMSysCmd + GetDBConnEntry -> Just CMGetDBConn + GetKVDBConnEntry -> Just CMGetKVDBConn + RunKVDBSimpleEntry -> Just CMKVDB + UnexpectedRecordingEnd -> Nothing + UnknownRRItem -> Nothing + ItemMismatch -> Nothing + ForkedFlowRecordingsMissed -> Nothing + MockDecodingFailed -> Nothing + UnknownPlaybackError -> Nothing + Other -> Nothing + +class HasCommonLog log where + toCommonLog :: log -> Maybe CommonLog + +instance HasCommonLog HSLog.HSLog where + toCommonLog = fromHSLog + +instance HasCommonLog PSLog.PSLog where + toCommonLog = fromPSLog diff --git a/test/EulerHS/Testing/Flow/Interpreter.hs b/test/EulerHS/Testing/Flow/Interpreter.hs new file mode 100644 index 00000000..07c3fcbf --- /dev/null +++ b/test/EulerHS/Testing/Flow/Interpreter.hs @@ -0,0 +1,56 @@ +{-# LANGUAGE AllowAmbiguousTypes #-} +{-# LANGUAGE NoMonomorphismRestriction #-} + +module EulerHS.Testing.Flow.Interpreter where + +import Data.Aeson (decode) +import Data.Generics.Product.Fields +import qualified EulerHS.Framework.Flow.Language as L +import EulerHS.Prelude +import qualified EulerHS.Runtime as R +import EulerHS.Testing.Types +import GHC.TypeLits +import Type.Reflection (typeRep) +import Unsafe.Coerce + +runFlowWithTestInterpreter :: FlowMockedValues -> R.FlowRuntime -> L.Flow a -> IO a +runFlowWithTestInterpreter mv flowRt (L.Flow comp) = foldF (interpretFlowMethod mv flowRt) comp + +interpretFlowMethod :: FlowMockedValues -> R.FlowRuntime -> L.FlowMethod a -> IO a + +interpretFlowMethod mmv _ (L.RunIO _ _ next) = do + v <- takeMockedVal @"mockedRunIO" mmv + next <$> (pure $ unsafeCoerce v) + +interpretFlowMethod mmv _ (L.CallServantAPI _ _ _ next) = do + v <- takeMockedVal @"mockedCallServantAPI" mmv + next <$> (pure $ unsafeCoerce v) + +interpretFlowMethod mmv R.FlowRuntime {..} (L.GetOption _ next) = do + v <- takeMockedVal @"mockedGetOption" mmv + next <$> (pure $ decode v) + +interpretFlowMethod _ R.FlowRuntime {..} (L.SetOption _ _ next) = + next <$> pure () + +interpretFlowMethod mmv _ (L.GenerateGUID next) = do + v <- takeMockedVal @"mockedGenerateGUID" mmv + next <$> (pure v) + +interpretFlowMethod mmv _ (L.RunSysCmd _ next) = do + v <- takeMockedVal @"mockedRunSysCmd" mmv + next <$> (pure v) + +interpretFlowMethod _ _ _ = error "not yet supported." + + +takeMockedVal ::forall (f :: Symbol) a r + . (KnownSymbol f, Typeable r, HasField' f r [a]) + => MVar r -> IO a +takeMockedVal mmv = do + mv <- takeMVar mmv + (v,t) <- case (getField @f mv) of + [] -> error $ "empty " <> (show $ typeRep @f) <> " in " <> (show $ typeRep @r) + (x:xs) -> pure (x,xs) + putMVar mmv $ setField @f t mv + pure v diff --git a/test/EulerHS/Testing/Flow/Runtime.hs b/test/EulerHS/Testing/Flow/Runtime.hs new file mode 100644 index 00000000..0b124830 --- /dev/null +++ b/test/EulerHS/Testing/Flow/Runtime.hs @@ -0,0 +1,18 @@ +module EulerHS.Testing.Flow.Runtime where + +-- import EulerHS.Prelude +-- import EulerHS.Runtime +-- import Network.HTTP.Client (defaultManagerSettings, newManager) +-- import Database.Redis (checkedConnect, defaultConnectInfo, Redis(..)) +-- import Data.Map (singleton) + +-- type FlowRtInitializer = IO FlowRuntime + +--initDefaultFlowRt :: FlowRtInitializer +--initDefaultFlowRt = do +-- manager <- newMVar =<< newManager defaultManagerSettings +-- options <- newMVar mempty +-- coreRuntime <- createCoreRuntime =<< createVoidLoggerRuntime +-- conn <- checkedConnect defaultConnectInfo +-- connPool <- newMVar (singleton "redis" $ T.Redis conn) +-- pure $ FlowRuntime coreRuntime manager options connPool diff --git a/test/EulerHS/Testing/HSLog.hs b/test/EulerHS/Testing/HSLog.hs new file mode 100644 index 00000000..2ce97404 --- /dev/null +++ b/test/EulerHS/Testing/HSLog.hs @@ -0,0 +1,77 @@ +module EulerHS.Testing.HSLog where + +import Data.Aeson (FromJSON, Value (..), (.:)) +import qualified Data.Aeson as Aeson +import Data.Aeson.Types (prependFailure, typeMismatch) +import EulerHS.Prelude + +data HSLog + = SetEntry + | SetExEntry + | GetEntry + | ExistsEntry + | DelEntry + | ExpireEntry + | IncrEntry + | HSetEntry + | HGetEntry + | MultiExecEntry + | ThrowExceptionEntry + | CallServantApiEntry + | SetOptionEntry + | GetOptionEntry + | RunSysCmdEntry + | ForkEntry + | GeneratedGUIDEntry + | RunIOEntry + | InitSqlDBConnectionEntry + | DeInitSqlDBConnectionEntry + | GetSqlDBConnectionEntry + | RunDBEntry + | GetKVDBConnectionEntry + | AwaitEntry + | RunSafeFlowEntry + | LogMessageEntry + +hsLogFromText :: Text -> Maybe HSLog +hsLogFromText = \case + "SetEntry" -> Just SetEntry + "SetExEntry" -> Just SetExEntry + "GetEntry" -> Just GetEntry + "ExistsEntry" -> Just ExistsEntry + "DelEntry" -> Just DelEntry + "ExpireEntry" -> Just ExpireEntry + "IncrEntry" -> Just IncrEntry + "HSetEntry" -> Just HSetEntry + "HGetEntry" -> Just HGetEntry + "MultiExecEntry" -> Just MultiExecEntry + "ThrowExceptionEntry" -> Just ThrowExceptionEntry + "CallServantApiEntry" -> Just CallServantApiEntry + "SetOptionEntry" -> Just SetOptionEntry + "GetOptionEntry" -> Just GetOptionEntry + "RunSysCmdEntry" -> Just RunSysCmdEntry + "ForkEntry" -> Just ForkEntry + "GeneratedGUIDEntry" -> Just GeneratedGUIDEntry + "RunIOEntry" -> Just RunIOEntry + "InitSqlDBConnectionEntry" -> Just InitSqlDBConnectionEntry + "DeInitSqlDBConnectionEntry" -> Just DeInitSqlDBConnectionEntry + "GetSqlDBConnectionEntry" -> Just GetSqlDBConnectionEntry + "RunDBEntry" -> Just RunDBEntry + "GetKVDBConnectionEntry" -> Just GetKVDBConnectionEntry + "AwaitEntry" -> Just AwaitEntry + "RunSafeFlowEntry" -> Just RunSafeFlowEntry + "LogMessageEntry" -> Just LogMessageEntry + _ -> Nothing + +instance FromJSON HSLog where + parseJSON j = Aeson.withObject "HSLog" + (\o -> do + logType <- o .: "_entryName" + case hsLogFromText logType of + Nothing -> prependFailure "parsing HSLog failed, " + (typeMismatch "HSLog" j) + Just x -> pure x + ) + j + + diff --git a/test/EulerHS/Testing/PSLog.hs b/test/EulerHS/Testing/PSLog.hs new file mode 100644 index 00000000..267b36f4 --- /dev/null +++ b/test/EulerHS/Testing/PSLog.hs @@ -0,0 +1,67 @@ +module EulerHS.Testing.PSLog where + +import Data.Aeson (FromJSON, Value (..), (.:)) +import qualified Data.Aeson as Aeson +import Data.Aeson.Types (Parser, prependFailure, typeMismatch) +import Data.Vector ((!?)) +import EulerHS.Prelude + +data PSLog + = LogEntry + | RunDBEntry + | RunKVDBEitherEntry + | DoAffEntry + | SetOptionEntry + | GetOptionEntry + | GenerateGUIDEntry + | CallAPIEntry + | ForkFlowEntry + | ThrowExceptionEntry + | RunSysCmdEntry + | GetDBConnEntry + | GetKVDBConnEntry + | RunKVDBSimpleEntry + | UnexpectedRecordingEnd + | UnknownRRItem + | ItemMismatch + | ForkedFlowRecordingsMissed + | MockDecodingFailed + | UnknownPlaybackError + | Other + +psLogFromText :: Text -> Maybe PSLog +psLogFromText = \case + "LogEntry" -> Just LogEntry + "RunDBEntry" -> Just RunDBEntry + "RunKVDBEitherEntry" -> Just RunKVDBEitherEntry + "DoAffEntry" -> Just DoAffEntry + "SetOptionEntry" -> Just SetOptionEntry + "GetOptionEntry" -> Just GetOptionEntry + "GenerateGUIDEntry" -> Just GenerateGUIDEntry + "CallAPIEntry" -> Just CallAPIEntry + "ForkFlowEntry" -> Just ForkFlowEntry + "ThrowExceptionEntry" -> Just ThrowExceptionEntry + "RunSysCmdEntry" -> Just RunSysCmdEntry + "GetDBConnEntry" -> Just GetDBConnEntry + "GetKVDBConnEntry" -> Just GetKVDBConnEntry + "RunKVDBSimpleEntry" -> Just RunKVDBSimpleEntry + "UnexpectedRecordingEnd" -> Just UnexpectedRecordingEnd + "UnknownRRItem" -> Just UnknownRRItem + "ItemMismatch" -> Just ItemMismatch + "ForkedFlowRecordingsMissed" -> Just ForkedFlowRecordingsMissed + "MockDecodingFailed" -> Just MockDecodingFailed + "UnknownPlaybackError" -> Just UnknownPlaybackError + "Other" -> Just Other + _ -> Nothing + +instance FromJSON PSLog where + parseJSON j = Aeson.withArray "PSLog" + (\a -> do + logType <- (traverse Aeson.parseJSON $ a !? 2 :: Parser (Maybe Text)) + case psLogFromText =<< logType of + Nothing -> prependFailure "parsing PSLog failed, " + (typeMismatch "PSLog" j) + Just x -> pure x + ) + j + diff --git a/test/EulerHS/Testing/Types.hs b/test/EulerHS/Testing/Types.hs new file mode 100644 index 00000000..cba21cab --- /dev/null +++ b/test/EulerHS/Testing/Types.hs @@ -0,0 +1,18 @@ +{-# LANGUAGE DeriveDataTypeable #-} +module EulerHS.Testing.Types where + +import qualified Data.ByteString.Lazy as BSL +import Data.Data +import EulerHS.Prelude + +data FlowMockedValues' = FlowMockedValues' + { mockedCallServantAPI :: [Any] + , mockedRunIO :: [Any] + , mockedGetOption :: [BSL.ByteString] + , mockedGenerateGUID :: [Text] + , mockedRunSysCmd :: [String] + } deriving (Generic, Typeable) + + + +type FlowMockedValues = MVar FlowMockedValues' diff --git a/test/EulerHS/Testing/Util.hs b/test/EulerHS/Testing/Util.hs new file mode 100644 index 00000000..6b3e222a --- /dev/null +++ b/test/EulerHS/Testing/Util.hs @@ -0,0 +1,118 @@ +module EulerHS.Testing.Util where + +import Control.Monad.Except (MonadError, runExceptT, throwError) +import Data.Aeson (Result (..), Value (..)) +import qualified Data.Aeson as Aeson +import Data.HashMap.Strict (HashMap) +import qualified Data.HashMap.Strict as HashMap +import Data.Vector (Vector (..), (!?)) +import qualified Data.Vector as Vector +import EulerHS.Prelude + +import EulerHS.Testing.CommonLog (CommonLog (..), HasCommonLog) +import qualified EulerHS.Testing.CommonLog as CommonLog +import qualified EulerHS.Testing.HSLog as HSLog +import qualified EulerHS.Testing.PSLog as PSLog + + +readJSONFile :: FilePath -> IO (Maybe Aeson.Value) +readJSONFile fp = Aeson.decodeFileStrict fp + +purescriptEntries :: Value -> Maybe (Vector Value) +purescriptEntries v = do + methodRec <- caseJSONObject (HashMap.lookup "methodRecording") v + entries <- caseJSONObject (HashMap.lookup "entries") methodRec + caseJSONArray Just entries + +haskellEntries :: Value -> Maybe (Vector Value) +haskellEntries v = do + methodRec <- caseJSONObject (HashMap.lookup "methodRecording") v + entries <- caseJSONObject (HashMap.lookup "mrEntries") methodRec + recording <- caseJSONObject (HashMap.lookup "recording") entries + caseJSONArray Just recording + +caseJSONObject :: (HashMap Text Value -> Maybe a) -> Value -> Maybe a +caseJSONObject f v = case v of + Object o -> f o + _ -> Nothing + +caseJSONArray :: (Vector Value -> Maybe a) -> Value -> Maybe a +caseJSONArray f v = case v of + Array a -> f a + _ -> Nothing + +data CheckJSONError + = MissingEventError CommonLog + | LoadJsonError + | ImpossibleError + deriving (Eq, Ord, Show) + +checkJSONFiles :: (MonadIO m) => FilePath -> FilePath -> m (Either CheckJSONError ()) +checkJSONFiles psfile haskellfile = runExceptT $ do + psVal <- liftIO $ readJSONFile psfile + hsVal <- liftIO $ readJSONFile haskellfile + psLogs <- processPSFile psVal + hsLogs <- processHSFile hsVal + compareLogs psLogs hsLogs + pure $ () + +compareLogs :: (MonadError CheckJSONError m) => Vector CommonLog -> Vector CommonLog -> m () +compareLogs psLogs hsLogs = case psLogs !? 0 of + Nothing -> pure () + Just log -> case log `elem` hsLogs of + False -> throwError $ MissingEventError log + True -> compareLogs (Vector.take 1 psLogs) (drop1of log hsLogs) + +drop1of :: forall a. (Eq a) => a -> Vector a -> Vector a +drop1of x xs = fst $ foldr go (Vector.empty, Vector.empty) xs + where + go :: a -> (Vector a, Vector a) -> (Vector a, Vector a) + go new acc@(l, r) = if Vector.null r + then if (new == x) then (l, Vector.cons new r) else (Vector.cons new l, r) + else acc + + +processPSFile :: (MonadError CheckJSONError m) => Maybe Value -> m (Vector CommonLog) +processPSFile maybeValue = do + case maybeValue of + Nothing -> throwError LoadJsonError + Just val -> do + case ( fmap CommonLog.fromPSLog . catMaybeVec . fmap (aesonMaybe . Aeson.fromJSON)) <$> purescriptEntries val of + Nothing -> throwError ImpossibleError + Just vec -> pure . Vector.filter (not . CommonLog.isLog) $ catMaybeVec vec + +processHSFile :: (MonadError CheckJSONError m) => Maybe Value -> m (Vector CommonLog) +processHSFile maybeValue = do + case maybeValue of + Nothing -> throwError LoadJsonError + Just val -> do + case (fmap CommonLog.fromHSLog . catMaybeVec . fmap (aesonMaybe . Aeson.fromJSON)) <$> haskellEntries val of + Nothing -> throwError ImpossibleError + Just vec -> pure $ catMaybeVec vec + +aesonMaybe :: Result a -> Maybe a +aesonMaybe = \case + Error _ -> Nothing + Success a -> Just a + +catMaybeVec :: Vector (Maybe a) -> Vector a +catMaybeVec = foldr go Vector.empty + where + go :: Maybe a -> Vector a -> Vector a + go new acc = case new of + Nothing -> acc + Just n -> acc <> Vector.singleton n + +-- psVal :: Maybe Value + + +-- the plan +-- 1) read in json from file for haskell and PS ( Aeson value ) - done +-- 2) parse the value up into a data type (Haskell) +-- 3) parse the PS value up into a data type +-- 4) parse both values into some comparison type, which unifies discrepancies +-- 5) check that the haskell log has effects in the same order as the purescript log + + + + diff --git a/test/EulerHS/Tests/Framework/ArtSpec.hs b/test/EulerHS/Tests/Framework/ArtSpec.hs new file mode 100644 index 00000000..0d17ce66 --- /dev/null +++ b/test/EulerHS/Tests/Framework/ArtSpec.hs @@ -0,0 +1,321 @@ +{-# LANGUAGE DeriveAnyClass #-} +{-# LANGUAGE DuplicateRecordFields #-} +{-# LANGUAGE StandaloneDeriving #-} + +module EulerHS.Tests.Framework.ArtSpec where + +import Control.Monad (void) +import Data.Aeson as A +import Data.Aeson.Encode.Pretty +import qualified Data.ByteString.Lazy as Lazy +import qualified Data.Map as Map +import qualified Data.String.Conversions as Conversions +import qualified Data.Text as Text +import qualified Data.Text.Encoding as Encoding +import qualified Data.Text.Encoding.Error as Encoding +import qualified Data.UUID as UUID (toText) +import qualified Data.UUID.V4 as UUID (nextRandom) +import qualified Data.Vector as V +import Network.Wai.Handler.Warp +import Servant.Client +import Servant.Server +import qualified System.IO.Error as Error +import Test.Hspec + +import EulerHS.Interpreters +import EulerHS.Language as L +import EulerHS.Prelude +import EulerHS.Runtime +import EulerHS.TestData.API.Client +import EulerHS.TestData.Types +import EulerHS.Tests.Framework.Common +import EulerHS.Types as T + + +spec :: Spec +spec = do + describe "ART Test" $ do + it "Regular mode" $ do + rt <- initRegularRT + res <- runFlow rt $ mainScript + res `shouldBe` "hello\n" + + it "Recorder mode" $ do + flowRuntime <- initRecorderRT + result <- runFlow flowRuntime mainScript + case _runMode flowRuntime of + T.RecordingMode T.RecorderRuntime{recording} -> do + T.ResultRecording{..} <- awaitRecording recording + V.length recording `shouldBe` 10 + Map.size forkedRecordings `shouldBe` 2 + result `shouldBe` "hello\n" + _ -> fail "wrong mode" + + it "Player mode: replaying incorrect flow returns error (main flow)" $ do + flowRuntime <- initRecorderRT + _ <- runFlow flowRuntime mainScript + case _runMode flowRuntime of + T.RecordingMode T.RecorderRuntime{recording} -> do + entries <- awaitRecording recording + playerRuntime <- initPlayerRT entries + -- TODO runFlow shoul catch all exceptions internally + _ <- try @_ @SomeException $ runFlow playerRuntime mainScriptWrong + case _runMode playerRuntime of + T.ReplayingMode T.PlayerRuntime{rerror} -> do + errors <- awaitErrors rerror + flattenErrors errors `shouldNotBe` [] + _ -> fail "wrong mode" + _ -> fail "wrong mode" + + it "Player mode: replaying incorrect flow returns error (fork flow)" $ do + flowRuntime <- initRecorderRT + _ <- runFlow flowRuntime mainScript + case _runMode flowRuntime of + T.RecordingMode T.RecorderRuntime{recording} -> do + entries <- awaitRecording recording + playerRuntime <- initPlayerRT entries + -- TODO runFlow shoul catch all exceptions internally + _ <- try @_ @SomeException $ runFlow playerRuntime mainScriptWrongFork + case _runMode playerRuntime of + T.ReplayingMode T.PlayerRuntime{rerror} -> do + errors <- awaitErrors rerror + flattenErrors errors `shouldNotBe` [] + _ -> fail "wrong mode" + _ -> fail "wrong mode" + + it "Player mode: missing fork recording returns error (fork flow)" $ do + flowRuntime <- initRecorderRT + _ <- runFlow flowRuntime mainScript + case _runMode flowRuntime of + T.RecordingMode T.RecorderRuntime{recording} -> do + entries <- awaitRecording recording + playerRuntime <- initPlayerRT $ entries {forkedRecordings = Map.empty} + -- TODO runFlow shoul catch all exceptions internally + _ <- try @_ @SomeException $ runFlow playerRuntime mainScript + case _runMode playerRuntime of + T.ReplayingMode T.PlayerRuntime{rerror} -> do + errors <- awaitErrors rerror + flattenErrors errors `shouldNotBe` [] + _ -> fail "wrong mode" + _ -> fail "wrong mode" + +---------------------------------------------------------------------- + + it "Set/Get Option" $ do + let testOptionValue = "testOptionValue" :: String + mopt <- runFlowWithArt $ do + L.setOption TestStringKey testOptionValue + L.getOption TestStringKey + mopt `shouldBe` Just testOptionValue + + it "Generate distinct GUID" $ do + (guid1, guid2) <- runFlowWithArt $ do + guid1 <- L.generateGUID + guid2 <- L.generateGUID + pure (guid1, guid2) + guid1 `shouldNotBe` guid2 + + it "RunIO" $ do + res <- runFlowWithArt $ do + L.runIO $ pure () + res `shouldBe` () + + it "RunIO" $ do + res <- runFlowWithArt $ do + L.runIO $ pure () + res `shouldBe` () + + it "RunIO also works with Serializable types" $ do + let bs :: ByteString = "Hello" + res <- runFlowWithArt $ do + L.runIO $ pure bs + res `shouldBe` bs + + it "RunUntracedIO" $ do + res <- runFlowWithArt $ do + L.runUntracedIO $ pure () + res `shouldBe` () + + -- run an example with non-deterministic outputs + it "RunUntracedIO with UUID" $ do + runFlowWithArt $ do + L.runUntracedIO (UUID.toText <$> UUID.nextRandom) + pure () + + it "RunSysCmd" $ do + let value = "hello" + res <- runFlowWithArt $ do + L.runSysCmd $ "echo " <> value + res `shouldBe` "hello\n" + + it "Logging" $ runFlowWithArt $ do + L.logInfo "Info" "L.logInfo" + L.logError "Error" "L.logError" + L.logDebug "Debug" "L.logDebug" + L.logWarning "Warning" "L.logWarning" + + it "SafeFlow, throwException" $ do + res <- runFlowWithArt $ do + runSafeFlow $ (throwException err403 {errBody = "403"} :: Flow Text) + res `shouldBe` (Left $ show err403{errBody = "403"}) + + it "SafeFlow, RunSysCmd" $ do + res <- runFlowWithArt $ do + runSafeFlow $ L.runSysCmd $ "echo " <> "safe hello" + runSafeFlow $ L.runSysCmd $ "echo " <> "safe hello2" + res `shouldBe` (Right "safe hello2\n") + + it "Fork" $ runFlowWithArt $ do + L.forkFlow "Fork" $ + L.logInfo "Fork" "Hello" + + it "SafeFlow and Fork" $ runFlowWithArt $ do + runSafeFlow $ L.runSysCmd $ "echo " <> "safe hello" + L.forkFlow "Fork" $ + L.logInfo "Fork" "Hello" + + it "SafeFlow exception and Fork" $ runFlowWithArt $ do + runSafeFlow $ (throwException err403 {errBody = "403"} :: Flow Text) + L.forkFlow "Fork" $ + L.logInfo "Fork" "Hello" + + it "Fork by fork" $ runFlowWithArt $ do + L.forkFlow "Fork" $ + L.logInfo "Fork" "Hello" + L.forkFlow "Fork 2" $ + L.logInfo "Fork 2" "Bye" + + it "SafeFlow and Fork" $ runFlowWithArt $ do + runSafeFlow $ L.runSysCmd $ "echo " <> "safe hello" + L.forkFlow "Fork" $ + L.logInfo "Fork" "Hello" + + it "Fork and flow from SafeFlow" $ do + res <- runFlowWithArt $ do + runSafeFlow $ do + L.runSysCmd $ "echo " <> "safe hello" + L.forkFlow "Fork" $ + L.logInfo "Fork" "Hello" + res `shouldBe` (Right ()) + + it "Flow and fork from SafeFlow" $ do + res <- runFlowWithArt $ do + runSafeFlow $ do + L.forkFlow "Fork" $ + L.logInfo "Fork" "Hello" + L.runSysCmd $ "echo " <> "safe hello" + res `shouldBe` (Right "safe hello\n") + + it "Fork from Fork" $ runFlowWithArt $ do + L.forkFlow "ForkOne" $ do + L.logInfo "ForkOne" "Hello" + L.forkFlow "ForkTwo" $ + L.forkFlow "ForkThree" $ do + L.forkFlow "ForkFour" $ + L.logInfo "ForkFour" "Hello" + + it "Fork and safeFlow from Fork" $ runFlowWithArt $ do + L.forkFlow "ForkOne" $ do + L.logInfo "ForkOne" "Hello" + runSafeFlow $ L.runSysCmd $ "echo " <> "safe hello" + L.forkFlow "ForkTwo" $ + L.forkFlow "ForkThree" $ do + L.forkFlow "ForkFour" $ + L.logInfo "ForkFour" "Hello" + + around_ withServer $ do + describe "CallServantAPI tests" $ do + it "Simple request (book)" $ do + let url = BaseUrl Http "127.0.0.1" port "" + bookEither <- runFlowWithArt $ callServantAPI Nothing url getBook + bookEither `shouldSatisfy` isRight + + it "Simple request (user)" $ do + let url = BaseUrl Http "127.0.0.1" port "" + userEither <- runFlowWithArt $ callServantAPI Nothing url getUser + userEither `shouldSatisfy` isRight + + xit "Untyped HTTP API Calls" $ do + let url = "https://google.com" + (statusCode, status, body, headers) <- runFlowWithArt $ do + eResponse <- L.callHTTP $ T.httpGet "https://google.com" :: Flow (Either Text T.HTTPResponse) + response <- case eResponse of + Left err -> throwException err403 {errBody = "Expected a response"} + Right response -> pure response + return + ( getResponseCode response + , getResponseStatus response + , getResponseBody response + , getResponseHeaders response + ) + -- check status code + statusCode `shouldBe` 200 + status `shouldBe` "OK" + -- check body + -- Lazy.putStr (getLBinaryString body) + -- seem to be non-breaking latin-1 encoded spaces in what is supposed to + -- be a UTF-8 output xD; show some leniency + let + body' = + Encoding.decodeUtf8With + Encoding.lenientDecode + (Conversions.convertString body) + Text.isInfixOf "google" body' `shouldBe` True + Text.isInfixOf " + throwM $ Error.userError "Expected a Content-Type header" + Just headerValue -> do + Text.isInfixOf "text/html" headerValue `shouldBe` True + + xit "Untyped HTTP API Calls" $ do + let url = "https://127.0.0.1:666/fourohhhfour" + result <- runFlowWithArt $ do + L.callHTTP $ T.httpGet url :: Flow (Either Text T.HTTPResponse) + + err <- extractLeft result + -- putStrLn $ "ERROR" <> err + pure () + + +extractLeft :: Either a b -> IO a +extractLeft eitherVal = + case eitherVal of + Left val -> + pure val + Right res -> + throwM $ Error.userError "Expected Left from erroneous call!" + +mainScript :: Flow String +mainScript = do + guid1 <- generateGUID + guid2 <- generateGUID + -- This should re-execute each time and not break replay + runUntracedIO (UUID.toText <$> UUID.nextRandom) + forkFlow guid1 (void forkScript) + forkFlow guid2 (void forkScript) + runSysCmd "echo hello" + +mainScriptWrong :: Flow String +mainScriptWrong = do + guid1 <- generateGUID + forkFlow guid1 (void forkScript) + runSysCmd "echo hello" + +mainScriptWrongFork :: Flow String +mainScriptWrongFork = do + guid1 <- generateGUID + guid2 <- generateGUID + forkFlow guid1 (void forkScript) + forkFlow guid2 (void forkScriptWrong) + runSysCmd "echo hello" + +forkScript :: Flow String +forkScript = do + _ <- generateGUID + runSysCmd "echo hello" + +forkScriptWrong :: Flow String +forkScriptWrong = do + runSysCmd "echo hello" diff --git a/test/EulerHS/Tests/Framework/Common.hs b/test/EulerHS/Tests/Framework/Common.hs new file mode 100644 index 00000000..00071773 --- /dev/null +++ b/test/EulerHS/Tests/Framework/Common.hs @@ -0,0 +1,176 @@ +module EulerHS.Tests.Framework.Common where + +import Data.Aeson +import qualified Data.Map as Map +import qualified Data.Vector as V +import Network.HTTP.Client (newManager) +import Network.HTTP.Client.TLS (tlsManagerSettings) +import Network.Wai.Handler.Warp +import Servant.Server +import Test.Hspec + +import Control.Concurrent.MVar (modifyMVar_) +import Database.Redis (ConnectInfo, checkedConnect) +import EulerHS.Interpreters +import EulerHS.Language as L +import EulerHS.Prelude +import EulerHS.Runtime +import EulerHS.TestData.API.Client +import EulerHS.Types as T + + +runFlowWithArt :: (Show b, Eq b) => Flow b -> IO b +runFlowWithArt flow = do + (recording, recResult) <- runFlowRecording ($) flow + (errors , repResult) <- runFlowReplaying recording flow + -- print $ encode $ recording + -- putStrLn $ encodePretty $ recording + flattenErrors errors `shouldBe` [] + recResult `shouldBe` repResult + pure recResult + +runFlowRecording :: (forall b . (FlowRuntime -> IO b) -> FlowRuntime -> IO b) -> Flow a -> IO (ResultRecording, a) +runFlowRecording mode flow = do + let next flowRuntime = do + result <- runFlow flowRuntime flow + case _runMode flowRuntime of + T.RecordingMode T.RecorderRuntime{recording} -> do + entries <- awaitRecording recording + pure (entries, result) + _ -> fail "wrong mode" + + initRecorderRT >>= mode next + +runFlowReplaying :: ResultRecording -> Flow a -> IO (ResultReplayError, a) +runFlowReplaying recording flow = do + playerRuntime <- initPlayerRT recording + result <- runFlow playerRuntime flow + case _runMode playerRuntime of + T.ReplayingMode T.PlayerRuntime{rerror} -> do + errors <- awaitErrors rerror + pure (errors, result) + _ -> fail "wrong mode" + +withServer :: IO () -> IO () +withServer action = do + serverStartupLock <- newEmptyMVar + + let + settings = setBeforeMainLoop (putMVar serverStartupLock ()) $ + setPort port defaultSettings + + threadId <- forkIO $ runSettings settings $ serve api server + readMVar serverStartupLock + action + killThread threadId + +initRTWithManagers :: IO FlowRuntime +initRTWithManagers = do + flowRt <- withFlowRuntime Nothing pure + m1 <- newManager tlsManagerSettings + m2 <- newManager tlsManagerSettings + let managersMap = Map.fromList + [ ("manager1", m1) + , ("manager2", m2) + ] + pure $ flowRt { _httpClientManagers = managersMap } + +initRegularRT :: IO FlowRuntime +initRegularRT = do + flowRt <- withFlowRuntime Nothing pure + pure $ flowRt { _runMode = T.RegularMode } + +initRecorderRT :: IO FlowRuntime +initRecorderRT = do + recMVar <- newMVar V.empty + safeRecMVar <- newMVar Map.empty + forkedRecMVar <- newMVar Map.empty + let + recorderRuntime = T.RecorderRuntime + { flowGUID = "testFlow" + , recording = T.Recording recMVar safeRecMVar forkedRecMVar + , disableEntries = [] + } + flowRuntime <- withFlowRuntime Nothing pure + pure $ flowRuntime { _runMode = T.RecordingMode recorderRuntime } + + +initPlayerRT :: ResultRecording -> IO FlowRuntime +initPlayerRT recEntries = do + step <- newMVar 0 + freshReplayErrors <- T.ReplayErrors <$> newMVar Nothing <*> newMVar Map.empty <*> newMVar Map.empty + + let + playerRuntime = T.PlayerRuntime + { resRecording = recEntries + , stepMVar = step + , rerror = freshReplayErrors + , disableVerify = [] + , disableMocking = [] + , skipEntries = [] + , entriesFiltered = False + , flowGUID = "testFlow" + } + + flowRuntime <- withFlowRuntime Nothing pure + pure $ flowRuntime { _runMode = T.ReplayingMode playerRuntime } + +replayRecording :: ResultRecording -> Flow a -> IO a +replayRecording rec flow = do + (errors, result) <- runFlowReplaying rec flow + flattenErrors errors `shouldBe` [] + pure result + +-- TODO: This should not take a dummy argument! +-- prints replay in JSON format to console +runWithRedisConn :: ConnectInfo -> a -> Flow b -> IO b +runWithRedisConn connectInfo _ flow = do + (recording, recResult) <- runFlowRecording withInitRedis flow + print $ encode $ recording + -- putStrLn $ encodePretty $ recording + pure recResult + where + withInitRedis :: (FlowRuntime -> IO c) -> FlowRuntime -> IO c + withInitRedis next _rt = do + realRedisConnection <- checkedConnect connectInfo + let rt = _rt { _pubSubConnection = Just $ realRedisConnection } + + cancelWorker <- runPubSubWorker rt (const $ pure ()) + + modifyMVar_ (_kvdbConnections rt) $ + pure . Map.insert "redis" (NativeKVDB realRedisConnection) + + res <- next rt + cancelWorker + pure res + + +emptyMVarWithWatchDog :: Int -> IO (MVar a, IO (Maybe a), IO ()) +emptyMVarWithWatchDog t = do + guard $ t >= 0 + targetMVar <- newEmptyMVar + finalMVar <- newEmptyMVar + let watch = forkIO $ do + let loop n = do + mresult <- tryTakeMVar targetMVar + + case mresult of + Just a -> do + putMVar targetMVar a + putMVar finalMVar $ Just a + + Nothing -> do + if n > 0 + then do + threadDelay $ 10 ^ (5 :: Integer) + loop $ n - 1 + + else putMVar finalMVar Nothing + + + loop $ t * 10 + + let reset = void $ tryTakeMVar targetMVar + + + pure (targetMVar, watch >> takeMVar finalMVar, reset) diff --git a/test/EulerHS/Tests/Framework/DBSetup.hs b/test/EulerHS/Tests/Framework/DBSetup.hs new file mode 100644 index 00000000..33525276 --- /dev/null +++ b/test/EulerHS/Tests/Framework/DBSetup.hs @@ -0,0 +1,118 @@ +{-# LANGUAGE DeriveAnyClass #-} +{-# LANGUAGE StandaloneDeriving #-} + +module EulerHS.Tests.Framework.DBSetup where + +import Data.Aeson as A +import Data.Aeson.Encode.Pretty +import qualified Database.Beam as B +import qualified Database.Beam.Backend.SQL as B +import qualified Database.Beam.Query as B +import Database.Beam.Sqlite.Connection (Sqlite, SqliteM) + +import EulerHS.Interpreters as I +import EulerHS.Language as L +import EulerHS.Prelude +import EulerHS.Runtime +import EulerHS.Tests.Framework.Common +import EulerHS.Types as T + + +-- TODO: Refactor the helper db functionskA +-- Prepare custom types for tests + +data UserT f = User + { _userGUID :: B.C f Int + , _firstName :: B.C f Text + , _lastName :: B.C f Text + } deriving (Generic, B.Beamable) + +instance B.Table UserT where + data PrimaryKey UserT f = + UserId (B.C f Int) deriving (Generic, B.Beamable) + primaryKey = UserId . _userGUID + +type User = UserT Identity + +type UserId = B.PrimaryKey UserT Identity + +deriving instance Show UserId +deriving instance Eq UserId +deriving instance ToJSON UserId +deriving instance FromJSON UserId + +deriving instance Show User +deriving instance Eq User +deriving instance ToJSON User +deriving instance FromJSON User + +userTMod = + B.tableModification + { _userGUID = B.fieldNamed "id" + , _firstName = B.fieldNamed "first_name" + , _lastName = B.fieldNamed "last_name" + } + +userEMod :: B.EntityModification (B.DatabaseEntity be db) be (B.TableEntity UserT) +userEMod = B.modifyTableFields userTMod + +data UserDB f = UserDB + { users :: f (B.TableEntity UserT) + } deriving (Generic, B.Database be) + +userDB :: B.DatabaseSettings be UserDB +userDB = B.defaultDbSettings `B.withDbModification` + B.dbModification + { users = userEMod + } + +-- Prepare connection to database file + +testDBName :: String +testDBName = "test/EulerHS/TestData/test.db" + +testDBTemplateName :: String +testDBTemplateName = "test/EulerHS/TestData/test.db.template" + +poolConfig = T.PoolConfig + { stripes = 1 + , keepAlive = 10 + , resourcesPerStripe = 50 + } + +sqliteCfg :: DBConfig SqliteM +sqliteCfg = T.mkSQLitePoolConfig "SQliteDB" testDBName poolConfig + +rmTestDB :: L.Flow () +rmTestDB = void $ L.runSysCmd $ "rm -f " <> testDBName + +prepareTestDB :: L.Flow () +prepareTestDB = do + rmTestDB + -- L.runSysCmd "pwd" >>= L.runIO . print + void $ L.runSysCmd $ "cp " <> testDBTemplateName <> " " <> testDBName + pure () + +withEmptyDB :: (FlowRuntime -> IO ()) -> IO () +withEmptyDB act = withFlowRuntime Nothing (\rt -> do + try (runFlow rt prepareTestDB) >>= \case + Left (e :: SomeException) -> + runFlow rt rmTestDB + `finally` error ("Preparing test values failed: " <> show e) + Right _ -> act rt `finally` runFlow rt rmTestDB + ) + +-- Prepare record log and test returns +connectOrFail :: T.DBConfig beM -> Flow (T.SqlConn beM) +connectOrFail cfg = L.getOrInitSqlConn cfg >>= \case + Left e -> error $ show e + Right conn -> pure conn + +runWithSQLConn :: (Show b, Eq b) => Flow b -> IO b +runWithSQLConn flow = do + (recording, recResult) <- runFlowRecording ($) flow + -- putStrLn $ encodePretty $ recording + print $ encode recording + -- writeFile "recorded" $ show $ encode $ recording + -- print recResult + pure recResult diff --git a/test/EulerHS/Tests/Framework/FlowSpec.hs b/test/EulerHS/Tests/Framework/FlowSpec.hs new file mode 100644 index 00000000..7024ab23 --- /dev/null +++ b/test/EulerHS/Tests/Framework/FlowSpec.hs @@ -0,0 +1,416 @@ +{-# LANGUAGE DeriveAnyClass #-} + +module EulerHS.Tests.Framework.FlowSpec where + +import qualified Control.Exception as E +import Control.Monad (void) +import Data.Aeson (encode) +import qualified Data.ByteString.Lazy as BSL +import qualified Data.UUID as UUID (fromText) +import EulerHS.Interpreters +import EulerHS.Language as L +import EulerHS.Prelude hiding (get, getOption) +import EulerHS.Runtime (withFlowRuntime, createLoggerRuntime) +import EulerHS.TestData.API.Client +import EulerHS.TestData.Scenarios.Scenario1 (testScenario1) +import EulerHS.TestData.Types +import EulerHS.Testing.Flow.Interpreter (runFlowWithTestInterpreter) +import EulerHS.Testing.Types (FlowMockedValues' (..)) +import EulerHS.Tests.Framework.Common (initRTWithManagers) +import EulerHS.Types (HttpManagerNotFound (..)) +import qualified EulerHS.Types as T +import Network.Wai.Handler.Warp +import Servant.Client (BaseUrl (..), ClientError (..), Scheme (..)) +import Servant.Server +import Test.Hspec hiding (runIO) +import Unsafe.Coerce + + +user :: Any +user = unsafeCoerce $ Right $ User "John" "Snow" "00000000-0000-0000-0000-000000000000" + +localGUID :: Any +localGUID = unsafeCoerce ("FFFFFFFF-FFFF-FFFF-FFFF-FFFFFFFFFFFF" :: String) + +lhost :: BSL.ByteString +lhost = encode ("127.0.0.1" :: String) + + +scenario1MockedValues :: FlowMockedValues' +scenario1MockedValues = FlowMockedValues' + { mockedCallServantAPI = [user] + , mockedRunIO = [localGUID] + , mockedGetOption = [lhost] + , mockedGenerateGUID = ["00000000-0000-0000-0000-000000000000"] + , mockedRunSysCmd = ["Neo"] + } + +ioActWithException :: IO Text +ioActWithException = do + E.throw (E.AssertionFailed "Exception from IO") + pure "Text from IO" + +withServer :: IO () -> IO () +withServer action = do + serverStartupLock <- newEmptyMVar + let settings = setBeforeMainLoop (putMVar serverStartupLock ()) $ + setPort port defaultSettings + threadId <- forkIO $ runSettings settings $ serve api server + readMVar serverStartupLock + finally action (killThread threadId) + +spec :: Maybe T.LoggerConfig -> Spec +spec mbLoggerCfg = do + around (withFlowRuntime (mbLoggerCfg >>= Just . createLoggerRuntime (const $ pure show))) $ do + + describe "EulerHS flow language tests" $ do + + describe "TestInterpreters" $ do + + it "testScenario1" $ \rt -> do + mv <- newMVar scenario1MockedValues + res <- runFlowWithTestInterpreter mv rt testScenario1 + res `shouldBe` (User "John" "Snow" "00000000-0000-0000-0000-000000000000") + + around_ withServer $ do + describe "CallServantAPI tests with server" $ do + + it "Simple request (book) with default manager" $ \rt -> do + let url = BaseUrl Http "127.0.0.1" port "" + bookEither <- runFlow rt $ callServantAPI Nothing url getBook + bookEither `shouldSatisfy` isRight + + it "Simple request (user) with default manager" $ \rt -> do + let url = BaseUrl Http "127.0.0.1" port "" + userEither <- runFlow rt $ callServantAPI Nothing url getUser + userEither `shouldSatisfy` isRight + + it "Simple request (book) with manager1" $ \_ -> do + rt <- initRTWithManagers + let url = BaseUrl Http "127.0.0.1" port "" + bookEither <- runFlow rt $ callServantAPI (Just "manager1") url getBook + bookEither `shouldSatisfy` isRight + + it "Simple request (user) with manager2" $ \_ -> do + rt <- initRTWithManagers + let url = BaseUrl Http "127.0.0.1" port "" + userEither <- runFlow rt $ callServantAPI (Just "manager2") url getUser + userEither `shouldSatisfy` isRight + + it "Simple request with not existing manager" $ \_ -> do + rt <- initRTWithManagers + let url = BaseUrl Http "127.0.0.1" port "" + let error = displayException (ConnectionError (toException $ HttpManagerNotFound "notexist")) + userEither <- runFlow rt $ callServantAPI (Just "notexist") url getUser + case userEither of + Left e -> displayException e `shouldBe` error + Right x -> fail "Success result not expected" + + xit "Untyped HTTP API Calls" $ \rt -> do + -- rt <- initRTWithManagers + let url = "https://google.com" + (statusCode, status, body, headers) <- runFlow rt $ do + eResponse <- L.callHTTP $ T.httpGet "https://google.com" :: Flow (Either Text T.HTTPResponse) + response <- case eResponse of + Left err -> throwException err403 {errBody = "Expected a response"} + Right response -> pure response + return + ( T.getResponseCode response + , T.getResponseStatus response + , T.getResponseBody response + , T.getResponseHeaders response + ) + -- check status code + statusCode `shouldBe` 200 + status `shouldBe` "OK" + + xit "Untyped HTTP API Calls" $ \rt -> do + -- rt <- initRTWithManagers + let url = "https://127.0.0.1:666/fourohhhfour" + result <- runFlow rt $ do + L.callHTTP $ T.httpGet url :: Flow (Either Text T.HTTPResponse) + + pure () + + + describe "CallServantAPI tests without server" $ do + + it "Simple request (book)" $ \rt -> do + let url = BaseUrl Http "127.0.0.1" port "" + bookEither <- runFlow rt $ callServantAPI Nothing url getBook + bookEither `shouldSatisfy` isLeft + + it "Simple request (user)" $ \rt -> do + let url = BaseUrl Http "127.0.0.1" port "" + userEither <- runFlow rt $ callServantAPI Nothing url getUser + userEither `shouldSatisfy` isLeft + + describe "runIO tests" $ do + it "RunIO" $ \rt -> do + result <- runFlow rt $ runIO (pure ("hi" :: String)) + result `shouldBe` "hi" + + it "RunIO with exception" $ \rt -> do + result <- E.catch + (runFlow rt $ do + runIO ioActWithException + pure ("Never returned" :: Text)) + (\e -> do let err = show (e :: E.AssertionFailed) + pure err) + result `shouldBe` ("Exception from IO" :: Text) + + it "RunIO with catched exception" $ \rt -> do + result <-runFlow rt $ do + runIO $ + E.catch + ioActWithException + (\e -> do let err = show (e :: E.AssertionFailed) + pure err) + result `shouldBe` ("Exception from IO" :: Text) + + it "RunUntracedIO" $ \rt -> do + result <- runFlow rt $ runUntracedIO (pure ("hi" :: String)) + result `shouldBe` "hi" + + describe "STM tests" $ do + it "STM Test" $ \rt -> do + result <- runFlow rt $ do + countVar <- runUntracedIO $ newTVarIO (0 :: Int) + + let + updateCount = do + count <- readTVar countVar + when (count < 100) (writeTVar countVar (count + 1)) + readTVar countVar + + let + countTo100 = do + count <- atomically $ updateCount + if count < 100 + then countTo100 + else return count + + forkFlow "counter1" $ runUntracedIO $ void countTo100 + forkFlow "counter2" $ runUntracedIO $ void countTo100 + count <- runUntracedIO $ atomically $ readTVar countVar + return count + + result `shouldBe` 100 + + describe "Options" $ do + + it "One key" $ \rt -> do + result <- runFlow rt $ do + _ <- setOption TestStringKey "lore ipsum" + getOption TestStringKey + result `shouldBe` (Just "lore ipsum") + + it "Not found" $ \rt -> do + result <- runFlow rt $ do + _ <- setOption TestStringKey "lore ipsum" + getOption TestStringKey2 + result `shouldBe` Nothing + + it "Two keys" $ \rt -> do + result <- runFlow rt $ do + _ <- setOption TestStringKey "lore ipsum" + _ <- setOption TestStringKey2 "lore ipsum2" + s1 <- getOption TestStringKey + s2 <- getOption TestStringKey2 + pure (s1,s2) + result `shouldBe` (Just "lore ipsum", Just "lore ipsum2") + + + it "Delete Key" $ \rt -> do + result <- runFlow rt $ do + _ <- setOption TestStringKey "lorem ipsum" + s1 <- getOption TestStringKey + _ <- delOption TestStringKey + s2 <- getOption TestStringKey + pure (s1, s2) + result `shouldBe` (Just "lorem ipsum", Nothing) + + it "Different encoding, types & payload" $ \rt -> do + testKVals <- runFlow rt $ do + _ <- setOption (TestStringKey ) "mbTestStringKey" + _ <- setOption (TestStringKey2 ) "mbTestStringKey2" + _ <- setOption (TestIntKey ) 1001 + _ <- setOption (TestIntKey2 ) 2002 + _ <- setOption (TestStringKeyAnotherEnc ) "mbTestStringKeyAnotherEnc" + _ <- setOption (TestStringKey2AnotherEnc ) "mbTestStringKey2AnotherEnc" + _ <- setOption (TestKeyWithStringPayload "SP1" ) "mbTestKeyWithStringPayloadS1" + _ <- setOption (TestKeyWithStringPayload "SP2" ) "mbTestKeyWithStringPayloadS2" + _ <- setOption (TestKeyWithIntPayload 1001 ) "mbTestKeyWithIntPayloadS1" + _ <- setOption (TestKeyWithIntPayload 2002 ) "mbTestKeyWithIntPayloadS2" + _ <- setOption (TestKeyWithStringPayloadAnotherEnc "SP1" ) "mbTestKeyWithStringPayloadAnotherEncS1" + _ <- setOption (TestKeyWithStringPayloadAnotherEnc "SP2" ) "mbTestKeyWithStringPayloadAnotherEncS2" + _ <- setOption (TestKeyWithIntPayloadAnotherEnc 1001 ) "mbTestKeyWithIntPayloadAnotherEncS1" + _ <- setOption (TestKeyWithIntPayloadAnotherEnc 2002 ) "mbTestKeyWithIntPayloadAnotherEncS2" + _ <- setOption (NTTestKeyWithStringPayload "SP1" ) "mbNTTestKeyWithStringPayloadS1" + _ <- setOption (NTTestKeyWithStringPayload "SP2" ) "mbNTTestKeyWithStringPayloadS2" + _ <- setOption (NTTestKeyWithIntPayload 1001 ) 2333 + _ <- setOption (NTTestKeyWithIntPayload 2002 ) 3322 + _ <- setOption (NTTestKeyWithStringPayloadAnotherEnc "SP1" ) "mbNTTestKeyWithStringPayloadAnotherEncS1" + _ <- setOption (NTTestKeyWithStringPayloadAnotherEnc "SP2" ) "mbNTTestKeyWithStringPayloadAnotherEncS2" + _ <- setOption (NTTestKeyWithIntPayloadAnotherEnc 1001 ) 9009 + _ <- setOption (NTTestKeyWithIntPayloadAnotherEnc 2002 ) 1001 + + TestKVals + <$> getOption (TestStringKey ) + <*> getOption (TestStringKey2 ) + <*> getOption (TestIntKey ) + <*> getOption (TestIntKey2 ) + <*> getOption (TestStringKeyAnotherEnc ) + <*> getOption (TestStringKey2AnotherEnc ) + <*> getOption (TestKeyWithStringPayload "SP1" ) + <*> getOption (TestKeyWithStringPayload "SP2" ) + <*> getOption (TestKeyWithIntPayload 1001 ) + <*> getOption (TestKeyWithIntPayload 2002 ) + <*> getOption (TestKeyWithStringPayloadAnotherEnc "SP1" ) + <*> getOption (TestKeyWithStringPayloadAnotherEnc "SP2" ) + <*> getOption (TestKeyWithIntPayloadAnotherEnc 1001 ) + <*> getOption (TestKeyWithIntPayloadAnotherEnc 2002 ) + <*> getOption (NTTestKeyWithStringPayload "SP1" ) + <*> getOption (NTTestKeyWithStringPayload "SP2" ) + <*> getOption (NTTestKeyWithIntPayload 1001 ) + <*> getOption (NTTestKeyWithIntPayload 2002 ) + <*> getOption (NTTestKeyWithStringPayloadAnotherEnc "SP1" ) + <*> getOption (NTTestKeyWithStringPayloadAnotherEnc "SP2" ) + <*> getOption (NTTestKeyWithIntPayloadAnotherEnc 1001 ) + <*> getOption (NTTestKeyWithIntPayloadAnotherEnc 2002 ) + + testKVals `shouldBe` TestKVals + { mbTestStringKey = Just "mbTestStringKey" + , mbTestStringKey2 = Just "mbTestStringKey2" + , mbTestIntKey = Just 1001 + , mbTestIntKey2 = Just 2002 + , mbTestStringKeyAnotherEnc = Just "mbTestStringKeyAnotherEnc" + , mbTestStringKey2AnotherEnc = Just "mbTestStringKey2AnotherEnc" + , mbTestKeyWithStringPayloadS1 = Just "mbTestKeyWithStringPayloadS1" + , mbTestKeyWithStringPayloadS2 = Just "mbTestKeyWithStringPayloadS2" + , mbTestKeyWithIntPayloadS1 = Just "mbTestKeyWithIntPayloadS1" + , mbTestKeyWithIntPayloadS2 = Just "mbTestKeyWithIntPayloadS2" + , mbTestKeyWithStringPayloadAnotherEncS1 = Just "mbTestKeyWithStringPayloadAnotherEncS1" + , mbTestKeyWithStringPayloadAnotherEncS2 = Just "mbTestKeyWithStringPayloadAnotherEncS2" + , mbTestKeyWithIntPayloadAnotherEncS1 = Just "mbTestKeyWithIntPayloadAnotherEncS1" + , mbTestKeyWithIntPayloadAnotherEncS2 = Just "mbTestKeyWithIntPayloadAnotherEncS2" + , mbNTTestKeyWithStringPayloadS1 = Just "mbNTTestKeyWithStringPayloadS1" + , mbNTTestKeyWithStringPayloadS2 = Just "mbNTTestKeyWithStringPayloadS2" + , mbNTTestKeyWithIntPayloadS1 = Just 2333 + , mbNTTestKeyWithIntPayloadS2 = Just 3322 + , mbNTTestKeyWithStringPayloadAnotherEncS1 = Just "mbNTTestKeyWithStringPayloadAnotherEncS1" + , mbNTTestKeyWithStringPayloadAnotherEncS2 = Just "mbNTTestKeyWithStringPayloadAnotherEncS2" + , mbNTTestKeyWithIntPayloadAnotherEncS1 = Just 9009 + , mbNTTestKeyWithIntPayloadAnotherEncS2 = Just 1001 + } + + it "RunSysCmd" $ \rt -> do + result <- runFlow rt $ runSysCmd "echo test" + result `shouldBe` "test\n" + + it "RunSysCmd with bad command" $ \rt -> do + result <- E.catch + (runFlow rt $ runSysCmd "badEcho test") + (\e -> do let err = show (e :: E.SomeException) + pure err) + result `shouldBe` ("readCreateProcess: badEcho test (exit 127): failed" :: String) + + + it "GenerateGUID" $ \rt -> do + guid <- runFlow rt generateGUID + let maybeGUID = UUID.fromText guid + maybeGUID `shouldSatisfy` isJust + + it "ThrowException" $ \rt -> do + result <- E.catch + (runFlow rt $ do + throwException (E.AssertionFailed "Exception message") + pure "Never returned") + (\e -> do let err = show (e :: E.AssertionFailed) + pure err) + result `shouldBe` "Exception message" + + describe "ForkFlow" $ do + let i :: Int = 101 + it "Fork and successful await infinitely" $ \rt -> do + let flow = do + awaitable <- forkFlow' "101" (pure i) + await Nothing awaitable + result <- runFlow rt flow + result `shouldBe` (Right 101) + + it "SafeFlow, fork and successful await infinitely" $ \rt -> do + let flow = do + awaitable <- forkFlow' "101" $ runSafeFlow (pure i :: Flow Int) + await Nothing awaitable + result <- runFlow rt flow + result `shouldBe` (Right $ Right 101) + + it "SafeFlow with exception, fork and successful await infinitely" $ \rt -> do + let flow = do + awaitable <- forkFlow' "101" (throwException err403 {errBody = "403"} :: Flow Text) + await Nothing awaitable + result <- runFlow rt flow + result `shouldBe` (Left $ T.ForkedFlowError $ show err403 {errBody = "403"}) + + it "Safe flow with exception and return power" $ \rt -> do + let flow = do + void $ runSafeFlow (throwException err403 {errBody = "403"} :: Flow Text) + runIO (pure ("hi" :: String)) + result <- runFlow rt flow + result `shouldBe` "hi" + + it "Safe flow, RunSysCmd" $ \rt -> do + let flow = do + runSafeFlow $ L.runSysCmd $ "echo " <> "safe hello" + result <- runFlow rt flow + result `shouldBe` (Right "safe hello\n") + + -- This might or might not happen (race condition) + -- it "Fork and successful await 0" $ \rt -> do + -- let flow = do + -- awaitable <- forkFlow' "101" (pure i) + -- await (Just $ T.Microseconds 0) awaitable + -- result <- runFlow rt flow + -- result `shouldBe` (Just 101) + + it "Fork and successful await with a sufficient timeout 1" $ \rt -> do + let flow = do + awaitable <- forkFlow' "101" (pure i) + await (Just $ T.Microseconds 1000000) awaitable + result <- runFlow rt flow + result `shouldBe` (Right 101) + + it "Fork and successful await with a sufficient timeout 2" $ \rt -> do + let flow = do + awaitable <- forkFlow' "101" (runIO (threadDelay 1000) >> pure i) + await (Just $ T.Microseconds 1000000) awaitable + result <- runFlow rt flow + result `shouldBe` (Right 101) + + it "Fork and successful await with an unsufficient timeout" $ \rt -> do + let flow = do + awaitable <- forkFlow' "101" (runIO (threadDelay 1000000) >> pure i) + await (Just $ T.Microseconds 1000) awaitable + result <- runFlow rt flow + result `shouldBe` (Left T.AwaitingTimeout) + + it "Fork and successful await for 2 flows" $ \rt -> do + let flow = do + awaitable1 <- forkFlow' "101" (runIO (threadDelay 10000) >> pure i) + awaitable2 <- forkFlow' "102" (runIO (threadDelay 100000) >> pure (i+1)) + mbRes1 <- await Nothing awaitable1 + mbRes2 <- await Nothing awaitable2 + pure (mbRes1, mbRes2) + result <- runFlow rt flow + result `shouldBe` (Right 101, Right 102) + + it "Fork and successful await 1 of 2 flows" $ \rt -> do + let flow = do + awaitable1 <- forkFlow' "101" (runIO (threadDelay 10000) >> pure i) + awaitable2 <- forkFlow' "102" (runIO (threadDelay 1000000) >> pure (i+1)) + mbRes1 <- await Nothing awaitable1 + mbRes2 <- await (Just $ T.Microseconds 1000) awaitable2 + pure (mbRes1, mbRes2) + result <- runFlow rt flow + result `shouldBe` (Right 101, Left T.AwaitingTimeout) diff --git a/test/EulerHS/Tests/Framework/KVDBArtSpec.hs b/test/EulerHS/Tests/Framework/KVDBArtSpec.hs new file mode 100644 index 00000000..2572a9bf --- /dev/null +++ b/test/EulerHS/Tests/Framework/KVDBArtSpec.hs @@ -0,0 +1,171 @@ +module EulerHS.Tests.Framework.KVDBArtSpec + ( spec + ) where + +import EulerHS.Prelude + +import Data.Aeson as A +import Test.Hspec + +import qualified Database.Redis as R +import EulerHS.Language as L +import EulerHS.Runtime +import EulerHS.Tests.Framework.Common +import EulerHS.Types as T + +connectInfo :: R.ConnectInfo +connectInfo = R.defaultConnectInfo {R.connectHost = "redis"} + +runWithRedisConn_ :: ResultRecording -> Flow b -> IO b +runWithRedisConn_ = replayRecording + +spec :: Spec +spec = do + describe "ART KVDB tests" $ do + it "get a correct key" $ do + result <- runWithRedisConn_ getKey $ L.runKVDB "redis" $ do + L.set "aaa" "bbb" + res <- L.get "aaa" + L.del ["aaa"] + pure res + result `shouldBe` Right (Just "bbb") + + it "get a wrong key" $ do + result <- runWithRedisConn_ getWrongKey $ L.runKVDB "redis" $ do + L.set "aaa" "bbb" + res <- L.get "aaac" + L.del ["aaa"] + pure res + result `shouldBe` Right Nothing + + it "delete existing keys" $ do + result <- runWithRedisConn_ deleteExisting $ L.runKVDB "redis" $ do + L.set "aaa" "bbb" + L.set "ccc" "ddd" + L.del ["aaa", "ccc"] + result `shouldBe` Right 2 + + it "delete keys (w/ no keys)" $ do + result <- runWithRedisConn_ deleteKeysNoKeys $ L.runKVDB "redis" $ do + L.del [] + result `shouldBe` Right 0 + + it "delete missing keys" $ do + result <- runWithRedisConn_ deleteMissing $ L.runKVDB "redis" $ do + L.del ["zzz", "yyy"] + result `shouldBe` Right 0 + + it "get a correct key from transaction" $ do + result <- runWithRedisConn_ getCorrectFromTx $ L.runKVDB "redis" $ L.multiExec $ do + L.setTx "aaa" "bbb" + res <- L.getTx "aaa" + L.delTx ["aaa"] + pure res + result `shouldBe` Right (T.TxSuccess (Just "bbb")) + + it "get incorrect key from transaction" $ do + result <- runWithRedisConn_ getIncorrectFromTx $ L.runKVDB "redis" $ L.multiExec $ do + res <- L.getTx "aaababababa" + pure res + result `shouldBe` Right (T.TxSuccess Nothing) + + it "setex sets value" $ do + let hour = 60 * 60 + result <- runWithRedisConn_ setExGetKey $ L.runKVDB "redis" $ do + L.setex "aaaex" hour "bbbex" + res <- L.get "aaaex" + L.del ["aaaex"] + pure res + result `shouldBe` Right (Just "bbbex") + + it "setex ttl works" $ do + result <- runWithRedisConn_ setExTtl $ do + L.runKVDB "redis" $ L.setex "aaaex" 1 "bbbex" + L.runIO $ threadDelay (2 * 10 ^ 6) + L.runKVDB "redis" $ do + res <- L.get "aaaex" + L.del ["aaaex"] + pure res + result `shouldBe` Right Nothing + + it "set only if not exist" $ do + result <- runWithRedisConn_ setIfNotExist $ L.runKVDB "redis" $ do + res1 <- L.setOpts "aaa" "bbb" L.NoTTL L.SetIfNotExist + res2 <- L.get "aaa" + res3 <- L.setOpts "aaa" "ccc" L.NoTTL L.SetIfNotExist + res4 <- L.get "aaa" + L.del ["aaa"] + pure (res1, res2, res3, res4) + result `shouldBe` Right (True, Just "bbb", False, Just "bbb") + + it "set only if exist" $ do + result <- runWithRedisConn_ setIfExist $ L.runKVDB "redis" $ do + res1 <- L.setOpts "aaa" "bbb" L.NoTTL L.SetIfExist + res2 <- L.get "aaa" + L.set "aaa" "bbb" + res3 <- L.setOpts "aaa" "ccc" L.NoTTL L.SetIfExist + res4 <- L.get "aaa" + L.del ["aaa"] + pure (res1, res2, res3, res4) + result `shouldBe` Right (False, Nothing, True, Just "ccc") + + it "set px ttl works" $ do + result <- runWithRedisConn_ setPxTtl $ do + L.runKVDB "redis" $ L.setOpts "aaapx" "bbbpx" (L.Milliseconds 500) L.SetAlways + res1 <- L.runKVDB "redis" $ L.get "aaapx" + L.runIO $ threadDelay (10 ^ 6) + res2 <- L.runKVDB "redis" $ L.get "aaapx" + L.runKVDB "redis" $ L.del ["aaapx"] + pure (res1, res2) + result `shouldBe` (Right (Just "bbbpx"), Right Nothing) + + it "xadd create and update stream" $ do + result <- runWithRedisConn_ xaddXlen $ L.runKVDB "redis" $ do + L.xadd "aaas" L.AutoID [("a", "1"), ("b", "2")] + res1 <- L.xlen "aaas" + L.xadd "aaas" L.AutoID [("c", "3")] + res2 <- L.xlen "aaas" + L.del ["aaas"] + res3 <- L.xlen "aaas" + pure (res1, res2, res3) + result `shouldBe` Right (1, 2, 0) + + +getKey :: ResultRecording +getKey = fromJust $ decode "{\"recording\":[{\"_entryName\":\"SetEntry\",\"_entryIndex\":0,\"_entryPayload\":{\"jsonValue\":{\"utf8\":\"bbb\",\"b64\":\"YmJi\"},\"jsonResult\":{\"Right\":{\"tag\":\"Ok\"}},\"jsonKey\":{\"utf8\":\"aaa\",\"b64\":\"YWFh\"}},\"_entryReplayMode\":\"Normal\"},{\"_entryName\":\"GetEntry\",\"_entryIndex\":1,\"_entryPayload\":{\"jsonResult\":{\"Right\":{\"utf8\":\"bbb\",\"b64\":\"YmJi\"}},\"jsonKey\":{\"utf8\":\"aaa\",\"b64\":\"YWFh\"}},\"_entryReplayMode\":\"Normal\"},{\"_entryName\":\"DelEntry\",\"_entryIndex\":2,\"_entryPayload\":{\"jsonResult\":{\"Right\":1},\"jsonKeys\":[{\"utf8\":\"aaa\",\"b64\":\"YWFh\"}]},\"_entryReplayMode\":\"Normal\"}],\"forkedRecordings\":{},\"safeRecordings\":{}}" + +getWrongKey :: ResultRecording +getWrongKey = fromJust $ decode "{\"recording\":[{\"_entryName\":\"SetEntry\",\"_entryIndex\":0,\"_entryPayload\":{\"jsonValue\":{\"utf8\":\"bbb\",\"b64\":\"YmJi\"},\"jsonResult\":{\"Right\":{\"tag\":\"Ok\"}},\"jsonKey\":{\"utf8\":\"aaa\",\"b64\":\"YWFh\"}},\"_entryReplayMode\":\"Normal\"},{\"_entryName\":\"GetEntry\",\"_entryIndex\":1,\"_entryPayload\":{\"jsonResult\":{\"Right\":null},\"jsonKey\":{\"utf8\":\"aaac\",\"b64\":\"YWFhYw==\"}},\"_entryReplayMode\":\"Normal\"},{\"_entryName\":\"DelEntry\",\"_entryIndex\":2,\"_entryPayload\":{\"jsonResult\":{\"Right\":1},\"jsonKeys\":[{\"utf8\":\"aaa\",\"b64\":\"YWFh\"}]},\"_entryReplayMode\":\"Normal\"}],\"forkedRecordings\":{},\"safeRecordings\":{}}" + +deleteExisting :: ResultRecording +deleteExisting = fromJust $ decode "{\"recording\":[{\"_entryName\":\"SetEntry\",\"_entryIndex\":0,\"_entryPayload\":{\"jsonValue\":{\"utf8\":\"bbb\",\"b64\":\"YmJi\"},\"jsonResult\":{\"Right\":{\"tag\":\"Ok\"}},\"jsonKey\":{\"utf8\":\"aaa\",\"b64\":\"YWFh\"}},\"_entryReplayMode\":\"Normal\"},{\"_entryName\":\"SetEntry\",\"_entryIndex\":1,\"_entryPayload\":{\"jsonValue\":{\"utf8\":\"ddd\",\"b64\":\"ZGRk\"},\"jsonResult\":{\"Right\":{\"tag\":\"Ok\"}},\"jsonKey\":{\"utf8\":\"ccc\",\"b64\":\"Y2Nj\"}},\"_entryReplayMode\":\"Normal\"},{\"_entryName\":\"DelEntry\",\"_entryIndex\":2,\"_entryPayload\":{\"jsonResult\":{\"Right\":2},\"jsonKeys\":[{\"utf8\":\"aaa\",\"b64\":\"YWFh\"},{\"utf8\":\"ccc\",\"b64\":\"Y2Nj\"}]},\"_entryReplayMode\":\"Normal\"}],\"forkedRecordings\":{},\"safeRecordings\":{}}" + +deleteKeysNoKeys :: ResultRecording +deleteKeysNoKeys = fromJust $ decode "{\"recording\":[{\"_entryName\":\"DelEntry\",\"_entryIndex\":0,\"_entryPayload\":{\"jsonResult\":{\"Right\":0},\"jsonKeys\":[]},\"_entryReplayMode\":\"Normal\"}],\"forkedRecordings\":{},\"safeRecordings\":{}}" + +deleteMissing :: ResultRecording +deleteMissing = fromJust $ decode "{\"recording\":[{\"_entryName\":\"DelEntry\",\"_entryIndex\":0,\"_entryPayload\":{\"jsonResult\":{\"Right\":0},\"jsonKeys\":[{\"utf8\":\"zzz\",\"b64\":\"enp6\"},{\"utf8\":\"yyy\",\"b64\":\"eXl5\"}]},\"_entryReplayMode\":\"Normal\"}],\"forkedRecordings\":{},\"safeRecordings\":{}}" + +getCorrectFromTx :: ResultRecording +getCorrectFromTx = fromJust $ decode "{\"recording\":[{\"_entryName\":\"MultiExecEntry\",\"_entryIndex\":0,\"_entryPayload\":{\"jsonResult\":{\"Right\":{\"tag\":\"TxSuccess\",\"contents\":{\"utf8\":\"bbb\",\"b64\":\"YmJi\"}}}},\"_entryReplayMode\":\"Normal\"}],\"forkedRecordings\":{},\"safeRecordings\":{}}" + +getIncorrectFromTx :: ResultRecording +getIncorrectFromTx = fromJust $ decode "{\"recording\":[{\"_entryName\":\"MultiExecEntry\",\"_entryIndex\":0,\"_entryPayload\":{\"jsonResult\":{\"Right\":{\"tag\":\"TxSuccess\",\"contents\":null}}},\"_entryReplayMode\":\"Normal\"}],\"forkedRecordings\":{},\"safeRecordings\":{}}" + +setExGetKey :: ResultRecording +setExGetKey = fromJust $ decode "{\"recording\":[{\"_entryName\":\"SetExEntry\",\"_entryIndex\":0,\"_entryPayload\":{\"jsonTtl\":3600,\"jsonValue\":{\"utf8\":\"bbbex\",\"b64\":\"YmJiZXg=\"},\"jsonResult\":{\"Right\":{\"tag\":\"Ok\"}},\"jsonKey\":{\"utf8\":\"aaaex\",\"b64\":\"YWFhZXg=\"}},\"_entryReplayMode\":\"Normal\"},{\"_entryName\":\"GetEntry\",\"_entryIndex\":1,\"_entryPayload\":{\"jsonResult\":{\"Right\":{\"utf8\":\"bbbex\",\"b64\":\"YmJiZXg=\"}},\"jsonKey\":{\"utf8\":\"aaaex\",\"b64\":\"YWFhZXg=\"}},\"_entryReplayMode\":\"Normal\"},{\"_entryName\":\"DelEntry\",\"_entryIndex\":2,\"_entryPayload\":{\"jsonResult\":{\"Right\":1},\"jsonKeys\":[{\"utf8\":\"aaaex\",\"b64\":\"YWFhZXg=\"}]},\"_entryReplayMode\":\"Normal\"}],\"forkedRecordings\":{},\"safeRecordings\":{}}" + +setExTtl :: ResultRecording +setExTtl = fromJust $ decode "{\"recording\":[{\"_entryName\":\"SetExEntry\",\"_entryIndex\":0,\"_entryPayload\":{\"jsonTtl\":1,\"jsonValue\":{\"utf8\":\"bbbex\",\"b64\":\"YmJiZXg=\"},\"jsonResult\":{\"Right\":{\"tag\":\"Ok\"}},\"jsonKey\":{\"utf8\":\"aaaex\",\"b64\":\"YWFhZXg=\"}},\"_entryReplayMode\":\"Normal\"},{\"_entryName\":\"RunIOEntry\",\"_entryIndex\":1,\"_entryPayload\":{\"jsonResult\":[],\"description\":\"\"},\"_entryReplayMode\":\"Normal\"},{\"_entryName\":\"GetEntry\",\"_entryIndex\":2,\"_entryPayload\":{\"jsonResult\":{\"Right\":null},\"jsonKey\":{\"utf8\":\"aaaex\",\"b64\":\"YWFhZXg=\"}},\"_entryReplayMode\":\"Normal\"},{\"_entryName\":\"DelEntry\",\"_entryIndex\":3,\"_entryPayload\":{\"jsonResult\":{\"Right\":0},\"jsonKeys\":[{\"utf8\":\"aaaex\",\"b64\":\"YWFhZXg=\"}]},\"_entryReplayMode\":\"Normal\"}],\"forkedRecordings\":{},\"safeRecordings\":{}}" + +setIfNotExist :: ResultRecording +setIfNotExist = fromJust $ decode "{\"recording\":[{\"_entryName\":\"SetOptsEntry\",\"_entryIndex\":0,\"_entryPayload\":{\"jsonTTL\":{\"tag\":\"NoTTL\"},\"jsonValue\":{\"utf8\":\"bbb\",\"b64\":\"YmJi\"},\"jsonCond\":\"SetIfNotExist\",\"jsonResult\":{\"Right\":true},\"jsonKey\":{\"utf8\":\"aaa\",\"b64\":\"YWFh\"}},\"_entryReplayMode\":\"Normal\"},{\"_entryName\":\"GetEntry\",\"_entryIndex\":1,\"_entryPayload\":{\"jsonResult\":{\"Right\":{\"utf8\":\"bbb\",\"b64\":\"YmJi\"}},\"jsonKey\":{\"utf8\":\"aaa\",\"b64\":\"YWFh\"}},\"_entryReplayMode\":\"Normal\"},{\"_entryName\":\"SetOptsEntry\",\"_entryIndex\":2,\"_entryPayload\":{\"jsonTTL\":{\"tag\":\"NoTTL\"},\"jsonValue\":{\"utf8\":\"ccc\",\"b64\":\"Y2Nj\"},\"jsonCond\":\"SetIfNotExist\",\"jsonResult\":{\"Right\":false},\"jsonKey\":{\"utf8\":\"aaa\",\"b64\":\"YWFh\"}},\"_entryReplayMode\":\"Normal\"},{\"_entryName\":\"GetEntry\",\"_entryIndex\":3,\"_entryPayload\":{\"jsonResult\":{\"Right\":{\"utf8\":\"bbb\",\"b64\":\"YmJi\"}},\"jsonKey\":{\"utf8\":\"aaa\",\"b64\":\"YWFh\"}},\"_entryReplayMode\":\"Normal\"},{\"_entryName\":\"DelEntry\",\"_entryIndex\":4,\"_entryPayload\":{\"jsonResult\":{\"Right\":1},\"jsonKeys\":[{\"utf8\":\"aaa\",\"b64\":\"YWFh\"}]},\"_entryReplayMode\":\"Normal\"}],\"forkedRecordings\":{},\"safeRecordings\":{}}" + +setIfExist :: ResultRecording +setIfExist = fromJust $ decode "{\"recording\":[{\"_entryName\":\"SetOptsEntry\",\"_entryIndex\":0,\"_entryPayload\":{\"jsonTTL\":{\"tag\":\"NoTTL\"},\"jsonValue\":{\"utf8\":\"bbb\",\"b64\":\"YmJi\"},\"jsonCond\":\"SetIfExist\",\"jsonResult\":{\"Right\":false},\"jsonKey\":{\"utf8\":\"aaa\",\"b64\":\"YWFh\"}},\"_entryReplayMode\":\"Normal\"},{\"_entryName\":\"GetEntry\",\"_entryIndex\":1,\"_entryPayload\":{\"jsonResult\":{\"Right\":null},\"jsonKey\":{\"utf8\":\"aaa\",\"b64\":\"YWFh\"}},\"_entryReplayMode\":\"Normal\"},{\"_entryName\":\"SetEntry\",\"_entryIndex\":2,\"_entryPayload\":{\"jsonValue\":{\"utf8\":\"bbb\",\"b64\":\"YmJi\"},\"jsonResult\":{\"Right\":{\"tag\":\"Ok\"}},\"jsonKey\":{\"utf8\":\"aaa\",\"b64\":\"YWFh\"}},\"_entryReplayMode\":\"Normal\"},{\"_entryName\":\"SetOptsEntry\",\"_entryIndex\":3,\"_entryPayload\":{\"jsonTTL\":{\"tag\":\"NoTTL\"},\"jsonValue\":{\"utf8\":\"ccc\",\"b64\":\"Y2Nj\"},\"jsonCond\":\"SetIfExist\",\"jsonResult\":{\"Right\":true},\"jsonKey\":{\"utf8\":\"aaa\",\"b64\":\"YWFh\"}},\"_entryReplayMode\":\"Normal\"},{\"_entryName\":\"GetEntry\",\"_entryIndex\":4,\"_entryPayload\":{\"jsonResult\":{\"Right\":{\"utf8\":\"ccc\",\"b64\":\"Y2Nj\"}},\"jsonKey\":{\"utf8\":\"aaa\",\"b64\":\"YWFh\"}},\"_entryReplayMode\":\"Normal\"},{\"_entryName\":\"DelEntry\",\"_entryIndex\":5,\"_entryPayload\":{\"jsonResult\":{\"Right\":1},\"jsonKeys\":[{\"utf8\":\"aaa\",\"b64\":\"YWFh\"}]},\"_entryReplayMode\":\"Normal\"}],\"forkedRecordings\":{},\"safeRecordings\":{}}" + +setPxTtl :: ResultRecording +setPxTtl = fromJust $ decode "{\"recording\":[{\"_entryName\":\"SetOptsEntry\",\"_entryIndex\":0,\"_entryPayload\":{\"jsonTTL\":{\"tag\":\"Milliseconds\",\"contents\":500},\"jsonValue\":{\"utf8\":\"bbbpx\",\"b64\":\"YmJicHg=\"},\"jsonCond\":\"SetAlways\",\"jsonResult\":{\"Right\":true},\"jsonKey\":{\"utf8\":\"aaapx\",\"b64\":\"YWFhcHg=\"}},\"_entryReplayMode\":\"Normal\"},{\"_entryName\":\"GetEntry\",\"_entryIndex\":1,\"_entryPayload\":{\"jsonResult\":{\"Right\":{\"utf8\":\"bbbpx\",\"b64\":\"YmJicHg=\"}},\"jsonKey\":{\"utf8\":\"aaapx\",\"b64\":\"YWFhcHg=\"}},\"_entryReplayMode\":\"Normal\"},{\"_entryName\":\"RunIOEntry\",\"_entryIndex\":2,\"_entryPayload\":{\"jsonResult\":[],\"description\":\"\"},\"_entryReplayMode\":\"Normal\"},{\"_entryName\":\"GetEntry\",\"_entryIndex\":3,\"_entryPayload\":{\"jsonResult\":{\"Right\":null},\"jsonKey\":{\"utf8\":\"aaapx\",\"b64\":\"YWFhcHg=\"}},\"_entryReplayMode\":\"Normal\"},{\"_entryName\":\"DelEntry\",\"_entryIndex\":4,\"_entryPayload\":{\"jsonResult\":{\"Right\":0},\"jsonKeys\":[{\"utf8\":\"aaapx\",\"b64\":\"YWFhcHg=\"}]},\"_entryReplayMode\":\"Normal\"}],\"forkedRecordings\":{},\"safeRecordings\":{}}" + +xaddXlen :: ResultRecording +xaddXlen = fromJust $ decode "{\"recording\":[{\"_entryName\":\"XAddEntry\",\"_entryIndex\":0,\"_entryPayload\":{\"jsonStream\":{\"utf8\":\"aaas\",\"b64\":\"YWFhcw==\"},\"jsonItems\":[[{\"utf8\":\"a\",\"b64\":\"YQ==\"},{\"utf8\":\"1\",\"b64\":\"MQ==\"}],[{\"utf8\":\"b\",\"b64\":\"Yg==\"},{\"utf8\":\"2\",\"b64\":\"Mg==\"}]],\"jsonResult\":{\"Right\":[1596654345484,0]},\"jsonEntryId\":{\"tag\":\"AutoID\"}},\"_entryReplayMode\":\"Normal\"},{\"_entryName\":\"XLenEntry\",\"_entryIndex\":1,\"_entryPayload\":{\"jsonStream\":{\"utf8\":\"aaas\",\"b64\":\"YWFhcw==\"},\"jsonResult\":{\"Right\":1}},\"_entryReplayMode\":\"Normal\"},{\"_entryName\":\"XAddEntry\",\"_entryIndex\":2,\"_entryPayload\":{\"jsonStream\":{\"utf8\":\"aaas\",\"b64\":\"YWFhcw==\"},\"jsonItems\":[[{\"utf8\":\"c\",\"b64\":\"Yw==\"},{\"utf8\":\"3\",\"b64\":\"Mw==\"}]],\"jsonResult\":{\"Right\":[1596654345485,0]},\"jsonEntryId\":{\"tag\":\"AutoID\"}},\"_entryReplayMode\":\"Normal\"},{\"_entryName\":\"XLenEntry\",\"_entryIndex\":3,\"_entryPayload\":{\"jsonStream\":{\"utf8\":\"aaas\",\"b64\":\"YWFhcw==\"},\"jsonResult\":{\"Right\":2}},\"_entryReplayMode\":\"Normal\"},{\"_entryName\":\"DelEntry\",\"_entryIndex\":4,\"_entryPayload\":{\"jsonResult\":{\"Right\":1},\"jsonKeys\":[{\"utf8\":\"aaas\",\"b64\":\"YWFhcw==\"}]},\"_entryReplayMode\":\"Normal\"},{\"_entryName\":\"XLenEntry\",\"_entryIndex\":5,\"_entryPayload\":{\"jsonStream\":{\"utf8\":\"aaas\",\"b64\":\"YWFhcw==\"},\"jsonResult\":{\"Right\":0}},\"_entryReplayMode\":\"Normal\"}],\"forkedRecordings\":{},\"safeRecordings\":{}}" diff --git a/test/EulerHS/Tests/Framework/PubSubSpec.hs b/test/EulerHS/Tests/Framework/PubSubSpec.hs new file mode 100644 index 00000000..eb09ea57 --- /dev/null +++ b/test/EulerHS/Tests/Framework/PubSubSpec.hs @@ -0,0 +1,196 @@ +module EulerHS.Tests.Framework.PubSubSpec + ( spec + ) where + +import EulerHS.Prelude + +import Test.Hspec + +import Data.Aeson +import qualified Database.Redis as R +import EulerHS.Language as L +import EulerHS.Tests.Framework.Common +import EulerHS.Types as T + +connectInfo :: R.ConnectInfo +connectInfo = R.defaultConnectInfo {R.connectHost = "redis"} + + +runWithRedisConn_ :: ResultRecording -> Flow b -> IO b +runWithRedisConn_ = replayRecording +-- runWithRedisConn_ = runWithRedisConn connectInfo + +spec :: Spec +spec = do + describe "Publish/Subscribe subsystem tests" $ do + it "Callback receives messages from channel it subscribed to" $ do + let testMsg = "Hello, Tests" + let testCh = "test" + (targetMVar, watch, _) <- emptyMVarWithWatchDog 1 + + result <- runWithRedisConn_ rr1 $ do + subscribe [Channel testCh] $ \msg -> L.runIO $ + putMVar targetMVar msg + + publish (Channel testCh) $ Payload testMsg + + L.runIO watch + result `shouldBe` Just testMsg + + -- TODO: This test is brittle if replayed with pre-recorded ART-traces, + -- as this ties it deeply to the implementation; instead we should + -- test the externally-observable behaviour only + -- + -- TODO: rework this test + -- it "Pub/Sub works the same way if run in fork" $ do + -- let testMsg = "Hello, Tests" + -- let testCh = "test" + -- (targetMVar, watch, _) <- emptyMVarWithWatchDog 1 + + -- waitSubscribe <- newEmptyMVar + -- result <- runWithRedisConn_ rr2 $ do + -- -- result <- runWithRedisConn connectInfo rr2 $ do + -- L.forkFlow "Fork" $ do + -- void $ subscribe [Channel testCh] $ \msg -> L.runIO $ + -- putMVar targetMVar msg + -- void $ L.runIO $ putMVar waitSubscribe () + + -- void $ L.runIO $ takeMVar waitSubscribe + + -- publish (Channel testCh) $ Payload testMsg + + -- L.runIO watch + + -- result `shouldBe` Just testMsg + + it "Callback does not receive messages from channel after unsubscribe (subscribe method)" $ do + let testMsg = "Hello, Tests" + let testCh = "test" + (targetMVar, watch, _) <- emptyMVarWithWatchDog 1 + + result <- runWithRedisConn_ rr3 $ do + unsubscribe <- subscribe [Channel testCh] $ \msg -> L.runIO $ + putMVar targetMVar msg + + unsubscribe + + publish (Channel testCh) $ Payload testMsg + + L.runIO watch + + result `shouldBe` Nothing + + it "Callback receives messages from channel it subscribed to, if pattern matches" $ do + let testMsg = "Hello, Tests" + let testCh0 = "0test" + let testCh1 = "1test" + let testPatt = "?test" + (targetMVar, watch, reset) <- emptyMVarWithWatchDog 1 + + result <- runWithRedisConn_ rr4 $ do + void $ psubscribe [ChannelPattern testPatt] $ \ch msg -> L.runIO $ + putMVar targetMVar (ch, msg) + + L.publish (Channel testCh0) $ Payload testMsg + result0 <- L.runIO $ watch <* reset + + L.publish (Channel testCh1) $ Payload testMsg + result1 <- L.runIO $ watch <* reset + + pure (result0, result1) + + result `shouldBe` + ( Just (testCh0, testMsg) + , Just (testCh1, testMsg) + ) + + it "Callback does not receive messages from channel after unsubscribe (psubscribe method)" $ do + let testMsg = "Hello, Tests" + let testCh = "ptest" + let testPatt = "?test" + (targetMVar, watch, _) <- emptyMVarWithWatchDog 1 + + result <- runWithRedisConn_ rr5 $ do + unsubscribe <- psubscribe [ChannelPattern testPatt] $ \ch msg -> L.runIO $ + putMVar targetMVar (ch, msg) + + unsubscribe + + publish (Channel testCh) $ Payload testMsg + + L.runIO watch + + result `shouldBe` Nothing + + it "Callback receive messages from all subscribed channels" $ do + let testMsg0 = "Hello, Tests_0" + let testMsg1 = "Hello, Tests_1" + let testCh0 = "test_0" + let testCh1 = "test_1" + (targetMVar, watch, reset) <- emptyMVarWithWatchDog 1 + + result <- runWithRedisConn_ rr6 $ do + void $ L.subscribe [Channel testCh0, Channel testCh1] $ \msg -> L.runIO $ + putMVar targetMVar msg + + L.publish (Channel testCh0) $ Payload testMsg0 + result0 <- L.runIO $ watch <* reset + + L.publish (Channel testCh1) $ Payload testMsg1 + result1 <- L.runIO $ watch <* reset + + pure (result0, result1) + + result `shouldBe` (Just testMsg0, Just testMsg1) + + it "Unsubscribe unsubscribes from all subscribed channels" $ do + let testMsg0 = "Hello, Tests_0" + let testMsg1 = "Hello, Tests_1" + let testCh0 = "test_0" + let testCh1 = "test_1" + (targetMVar, watch, reset) <- emptyMVarWithWatchDog 1 + + result <- runWithRedisConn_ rr7 $ do + unsubscribe <- L.subscribe [Channel testCh0, Channel testCh1] $ \msg -> L.runIO $ + putMVar targetMVar msg + + unsubscribe + + L.publish (Channel testCh0) $ Payload testMsg0 + result0 <- L.runIO $ watch <* reset + + L.publish (Channel testCh1) $ Payload testMsg1 + result1 <- L.runIO $ watch <* reset + + pure (result0, result1) + + result `shouldBe` (Nothing, Nothing) + + +-- Callback receives messages from channel it subscribed to +rr1 :: ResultRecording +rr1 = fromJust $ decode $ "{\"recording\":[{\"_entryName\":\"SubscribeEntry\",\"_entryIndex\":0,\"_entryPayload\":{\"jsonChannels\":[{\"utf8\":\"test\",\"b64\":\"dGVzdA==\"}]},\"_entryReplayMode\":\"Normal\"},{\"_entryName\":\"PublishEntry\",\"_entryIndex\":1,\"_entryPayload\":{\"jsonChannel\":{\"utf8\":\"test\",\"b64\":\"dGVzdA==\"},\"jsonPayload\":{\"utf8\":\"Hello, Tests\",\"b64\":\"SGVsbG8sIFRlc3Rz\"},\"jsonResult\":{\"Right\":1}},\"_entryReplayMode\":\"Normal\"},{\"_entryName\":\"RunIOEntry\",\"_entryIndex\":2,\"_entryPayload\":{\"jsonResult\":{\"utf8\":\"Hello, Tests\",\"b64\":\"SGVsbG8sIFRlc3Rz\"},\"description\":\"\"},\"_entryReplayMode\":\"Normal\"}],\"forkedRecordings\":{},\"safeRecordings\":{}}" + +-- Pub/Sub works the same way if run in fork +-- rr2 :: ResultRecording +-- rr2 = fromJust $ decode $ "{\"recording\":[{\"_entryName\":\"GenerateGUIDEntry\",\"_entryIndex\":0,\"_entryPayload\":{\"guid\":\"c9239b0c-083f-4711-94af-46972f31864d\"},\"_entryReplayMode\":\"Normal\"},{\"_entryName\":\"LogMessageEntry\",\"_entryIndex\":1,\"_entryPayload\":{\"tag\":\"\\\"ForkFlow\\\"\",\"msg\":\"Flow forked. Description: Fork GUID: c9239b0c-083f-4711-94af-46972f31864d\",\"level\":\"Info\"},\"_entryReplayMode\":\"Normal\"},{\"_entryName\":\"ForkEntry\",\"_entryIndex\":2,\"_entryPayload\":{\"guid\":\"c9239b0c-083f-4711-94af-46972f31864d\",\"description\":\"Fork\"},\"_entryReplayMode\":\"Normal\"},{\"_entryName\":\"RunIOEntry\",\"_entryIndex\":3,\"_entryPayload\":{\"jsonResult\":[],\"description\":\"\"},\"_entryReplayMode\":\"Normal\"},{\"_entryName\":\"PublishEntry\",\"_entryIndex\":4,\"_entryPayload\":{\"jsonChannel\":{\"utf8\":\"test\",\"b64\":\"dGVzdA==\"},\"jsonPayload\":{\"utf8\":\"Hello, Tests\",\"b64\":\"SGVsbG8sIFRlc3Rz\"},\"jsonResult\":{\"Right\":1}},\"_entryReplayMode\":\"Normal\"},{\"_entryName\":\"RunIOEntry\",\"_entryIndex\":5,\"_entryPayload\":{\"jsonResult\":{\"utf8\":\"Hello, Tests\",\"b64\":\"SGVsbG8sIFRlc3Rz\"},\"description\":\"\"},\"_entryReplayMode\":\"Normal\"}],\"forkedRecordings\":{\"c9239b0c-083f-4711-94af-46972f31864d\":{\"recording\":[{\"_entryName\":\"GenerateGUIDEntry\",\"_entryIndex\":0,\"_entryPayload\":{\"guid\":\"eed089ff-3a93-4768-8b56-c9cb6d7ce26e\"},\"_entryReplayMode\":\"Normal\"},{\"_entryName\":\"RunSafeFlowEntry\",\"_entryIndex\":1,\"_entryPayload\":{\"jsonResult\":{\"Right\":[]},\"guid\":\"eed089ff-3a93-4768-8b56-c9cb6d7ce26e\"},\"_entryReplayMode\":\"Normal\"}],\"forkedRecordings\":{},\"safeRecordings\":{\"eed089ff-3a93-4768-8b56-c9cb6d7ce26e\":[{\"_entryName\":\"SubscribeEntry\",\"_entryIndex\":0,\"_entryPayload\":{\"jsonChannels\":[{\"utf8\":\"test\",\"b64\":\"dGVzdA==\"}]},\"_entryReplayMode\":\"Normal\"},{\"_entryName\":\"RunIOEntry\",\"_entryIndex\":1,\"_entryPayload\":{\"jsonResult\":[],\"description\":\"\"},\"_entryReplayMode\":\"Normal\"}]}}},\"safeRecordings\":{}}" + +-- Callback does not receive messages from channel after unsubscribe (subscribe method) +rr3 :: ResultRecording +rr3 = fromJust $ decode $ "{\"recording\":[{\"_entryName\":\"SubscribeEntry\",\"_entryIndex\":0,\"_entryPayload\":{\"jsonChannels\":[{\"utf8\":\"test\",\"b64\":\"dGVzdA==\"}]},\"_entryReplayMode\":\"Normal\"},{\"_entryName\":\"RunIOEntry\",\"_entryIndex\":1,\"_entryPayload\":{\"jsonResult\":[],\"description\":\"subscribe\"},\"_entryReplayMode\":\"Normal\"},{\"_entryName\":\"PublishEntry\",\"_entryIndex\":2,\"_entryPayload\":{\"jsonChannel\":{\"utf8\":\"test\",\"b64\":\"dGVzdA==\"},\"jsonPayload\":{\"utf8\":\"Hello, Tests\",\"b64\":\"SGVsbG8sIFRlc3Rz\"},\"jsonResult\":{\"Right\":1}},\"_entryReplayMode\":\"Normal\"},{\"_entryName\":\"RunIOEntry\",\"_entryIndex\":3,\"_entryPayload\":{\"jsonResult\":null,\"description\":\"\"},\"_entryReplayMode\":\"Normal\"}],\"forkedRecordings\":{},\"safeRecordings\":{}}" + +-- Callback receives messages from channel it subscribed to, if pattern matches +rr4 :: ResultRecording +rr4 = fromJust $ decode $ "{\"recording\":[{\"_entryName\":\"PSubscribeEntry\",\"_entryIndex\":0,\"_entryPayload\":{\"jsonPatterns\":[{\"utf8\":\"?test\",\"b64\":\"P3Rlc3Q=\"}]},\"_entryReplayMode\":\"Normal\"},{\"_entryName\":\"PublishEntry\",\"_entryIndex\":1,\"_entryPayload\":{\"jsonChannel\":{\"utf8\":\"0test\",\"b64\":\"MHRlc3Q=\"},\"jsonPayload\":{\"utf8\":\"Hello, Tests\",\"b64\":\"SGVsbG8sIFRlc3Rz\"},\"jsonResult\":{\"Right\":1}},\"_entryReplayMode\":\"Normal\"},{\"_entryName\":\"RunIOEntry\",\"_entryIndex\":2,\"_entryPayload\":{\"jsonResult\":[{\"utf8\":\"0test\",\"b64\":\"MHRlc3Q=\"},{\"utf8\":\"Hello, Tests\",\"b64\":\"SGVsbG8sIFRlc3Rz\"}],\"description\":\"\"},\"_entryReplayMode\":\"Normal\"},{\"_entryName\":\"PublishEntry\",\"_entryIndex\":3,\"_entryPayload\":{\"jsonChannel\":{\"utf8\":\"1test\",\"b64\":\"MXRlc3Q=\"},\"jsonPayload\":{\"utf8\":\"Hello, Tests\",\"b64\":\"SGVsbG8sIFRlc3Rz\"},\"jsonResult\":{\"Right\":1}},\"_entryReplayMode\":\"Normal\"},{\"_entryName\":\"RunIOEntry\",\"_entryIndex\":4,\"_entryPayload\":{\"jsonResult\":[{\"utf8\":\"1test\",\"b64\":\"MXRlc3Q=\"},{\"utf8\":\"Hello, Tests\",\"b64\":\"SGVsbG8sIFRlc3Rz\"}],\"description\":\"\"},\"_entryReplayMode\":\"Normal\"}],\"forkedRecordings\":{},\"safeRecordings\":{}}" + +-- Callback does not receive messages from channel after unsubscribe (psubscribe method) +rr5 :: ResultRecording +rr5 = fromJust $ decode $ "{\"recording\":[{\"_entryName\":\"PSubscribeEntry\",\"_entryIndex\":0,\"_entryPayload\":{\"jsonPatterns\":[{\"utf8\":\"?test\",\"b64\":\"P3Rlc3Q=\"}]},\"_entryReplayMode\":\"Normal\"},{\"_entryName\":\"RunIOEntry\",\"_entryIndex\":1,\"_entryPayload\":{\"jsonResult\":[],\"description\":\"psubscribe\"},\"_entryReplayMode\":\"Normal\"},{\"_entryName\":\"PublishEntry\",\"_entryIndex\":2,\"_entryPayload\":{\"jsonChannel\":{\"utf8\":\"ptest\",\"b64\":\"cHRlc3Q=\"},\"jsonPayload\":{\"utf8\":\"Hello, Tests\",\"b64\":\"SGVsbG8sIFRlc3Rz\"},\"jsonResult\":{\"Right\":1}},\"_entryReplayMode\":\"Normal\"},{\"_entryName\":\"RunIOEntry\",\"_entryIndex\":3,\"_entryPayload\":{\"jsonResult\":null,\"description\":\"\"},\"_entryReplayMode\":\"Normal\"}],\"forkedRecordings\":{},\"safeRecordings\":{}}" + +-- Callback receive messages from all subscribed channels +rr6 :: ResultRecording +rr6 = fromJust $ decode $ "{\"recording\":[{\"_entryName\":\"SubscribeEntry\",\"_entryIndex\":0,\"_entryPayload\":{\"jsonChannels\":[{\"utf8\":\"test_0\",\"b64\":\"dGVzdF8w\"},{\"utf8\":\"test_1\",\"b64\":\"dGVzdF8x\"}]},\"_entryReplayMode\":\"Normal\"},{\"_entryName\":\"PublishEntry\",\"_entryIndex\":1,\"_entryPayload\":{\"jsonChannel\":{\"utf8\":\"test_0\",\"b64\":\"dGVzdF8w\"},\"jsonPayload\":{\"utf8\":\"Hello, Tests_0\",\"b64\":\"SGVsbG8sIFRlc3RzXzA=\"},\"jsonResult\":{\"Right\":1}},\"_entryReplayMode\":\"Normal\"},{\"_entryName\":\"RunIOEntry\",\"_entryIndex\":2,\"_entryPayload\":{\"jsonResult\":{\"utf8\":\"Hello, Tests_0\",\"b64\":\"SGVsbG8sIFRlc3RzXzA=\"},\"description\":\"\"},\"_entryReplayMode\":\"Normal\"},{\"_entryName\":\"PublishEntry\",\"_entryIndex\":3,\"_entryPayload\":{\"jsonChannel\":{\"utf8\":\"test_1\",\"b64\":\"dGVzdF8x\"},\"jsonPayload\":{\"utf8\":\"Hello, Tests_1\",\"b64\":\"SGVsbG8sIFRlc3RzXzE=\"},\"jsonResult\":{\"Right\":1}},\"_entryReplayMode\":\"Normal\"},{\"_entryName\":\"RunIOEntry\",\"_entryIndex\":4,\"_entryPayload\":{\"jsonResult\":{\"utf8\":\"Hello, Tests_1\",\"b64\":\"SGVsbG8sIFRlc3RzXzE=\"},\"description\":\"\"},\"_entryReplayMode\":\"Normal\"}],\"forkedRecordings\":{},\"safeRecordings\":{}}" + +-- Unsubscribe unsubscribes from all subscribed channels +rr7 :: ResultRecording +rr7 = fromJust $ decode $ "{\"recording\":[{\"_entryName\":\"SubscribeEntry\",\"_entryIndex\":0,\"_entryPayload\":{\"jsonChannels\":[{\"utf8\":\"test_0\",\"b64\":\"dGVzdF8w\"},{\"utf8\":\"test_1\",\"b64\":\"dGVzdF8x\"}]},\"_entryReplayMode\":\"Normal\"},{\"_entryName\":\"RunIOEntry\",\"_entryIndex\":1,\"_entryPayload\":{\"jsonResult\":[],\"description\":\"subscribe\"},\"_entryReplayMode\":\"Normal\"},{\"_entryName\":\"PublishEntry\",\"_entryIndex\":2,\"_entryPayload\":{\"jsonChannel\":{\"utf8\":\"test_0\",\"b64\":\"dGVzdF8w\"},\"jsonPayload\":{\"utf8\":\"Hello, Tests_0\",\"b64\":\"SGVsbG8sIFRlc3RzXzA=\"},\"jsonResult\":{\"Right\":1}},\"_entryReplayMode\":\"Normal\"},{\"_entryName\":\"RunIOEntry\",\"_entryIndex\":3,\"_entryPayload\":{\"jsonResult\":null,\"description\":\"\"},\"_entryReplayMode\":\"Normal\"},{\"_entryName\":\"PublishEntry\",\"_entryIndex\":4,\"_entryPayload\":{\"jsonChannel\":{\"utf8\":\"test_1\",\"b64\":\"dGVzdF8x\"},\"jsonPayload\":{\"utf8\":\"Hello, Tests_1\",\"b64\":\"SGVsbG8sIFRlc3RzXzE=\"},\"jsonResult\":{\"Right\":0}},\"_entryReplayMode\":\"Normal\"},{\"_entryName\":\"RunIOEntry\",\"_entryIndex\":5,\"_entryPayload\":{\"jsonResult\":null,\"description\":\"\"},\"_entryReplayMode\":\"Normal\"}],\"forkedRecordings\":{},\"safeRecordings\":{}}" diff --git a/test/EulerHS/Tests/Framework/SQLArtSpec.hs b/test/EulerHS/Tests/Framework/SQLArtSpec.hs new file mode 100644 index 00000000..d61f2b94 --- /dev/null +++ b/test/EulerHS/Tests/Framework/SQLArtSpec.hs @@ -0,0 +1,170 @@ +{-# LANGUAGE DeriveAnyClass #-} +{-# LANGUAGE StandaloneDeriving #-} + +module EulerHS.Tests.Framework.SQLArtSpec + ( spec + ) where + +import EulerHS.Prelude + +import Data.Aeson as A +import Data.Aeson.Encode.Pretty +import qualified Data.Map as Map +import qualified Database.Beam as B +import qualified Database.Beam.Backend.SQL as B +import qualified Database.Beam.Query as B +import Database.Beam.Sqlite.Connection (Sqlite, SqliteM) +import Test.Hspec + +import EulerHS.Interpreters as I +import EulerHS.Language as L +import EulerHS.Runtime +import EulerHS.Tests.Framework.Common +import EulerHS.Tests.Framework.DBSetup +import EulerHS.Types as T + + +run = replayRecording + +-- Tests + +spec :: Spec +spec = + around (withEmptyDB) $ do + describe "ART SQL tests" $ do + + it "success to get one correct row" $ \rt -> do + result <- run getRowRecord $ do + conn <- connectOrFail sqliteCfg + L.runDB conn $ do + L.insertRows $ B.insert (users userDB) $ B.insertValues [User 1 "Bill" "Gates"] + res <- L.findRow $ B.select $ B.filter_ (\u -> _userGUID u B.==. 1) $ B.all_ (users userDB) + L.deleteRows $ B.delete (users userDB) (\u -> _userGUID u B.==. 1) + pure res + result `shouldBe` Right (Just (User {_userGUID = 1, _firstName = "Bill", _lastName = "Gates"})) + + it "fail to get one wrong row" $ \rt -> do + result <- run getWrongRowRecord $ do + conn <- connectOrFail sqliteCfg + L.runDB conn $ do + L.insertRows $ B.insert (users userDB) $ B.insertValues [User 1 "Bill" "Gates"] + res <- L.findRow $ B.select $ B.filter_ (\u -> _userGUID u B.==. 2) $ B.all_ (users userDB) + L.deleteRows $ B.delete (users userDB) (\u -> _userGUID u B.==. 1) + pure res + result `shouldBe` Right Nothing + + it "success to get correct rows" $ \rt -> do + result <- run getRowsRecord $ do + conn <- connectOrFail sqliteCfg + L.runDB conn $ do + L.insertRows $ B.insert (users userDB) $ B.insertValues [User 1 "Bill" "Gates", User 2 "Stive" "Jobs"] + res <- L.findRows $ B.select $ B.filter_ (\u -> _userGUID u `B.in_` [1,2]) $ B.all_ (users userDB) + L.deleteRows $ B.delete (users userDB) (\u -> (_userGUID u) `B.in_` [1,2]) + pure res + result `shouldBe` Right + [ User {_userGUID = 1, _firstName = "Bill", _lastName = "Gates"} + , User {_userGUID = 2, _firstName = "Stive", _lastName = "Jobs"} + ] + + it "fail to get an uncorrect rows" $ \rt -> do + result <- run getWrongRowsRecord $ do + conn <- connectOrFail sqliteCfg + L.runDB conn $ do + L.insertRows $ B.insert (users userDB) $ B.insertValues [User 1 "Bill" "Gates", User 2 "Stive" "Jobs"] + res <- L.findRows $ B.select $ B.filter_ (\u -> _userGUID u `B.in_` [3,4]) $ B.all_ (users userDB) + L.deleteRows $ B.delete (users userDB) (\u -> (_userGUID u) `B.in_` [1,2]) + pure res + result `shouldBe` Right [] + + it "success to delete existing rows" $ \rt -> do + result <- run deleteRowsRecord $ do + conn <- connectOrFail sqliteCfg + L.runDB conn $ do + L.insertRows $ B.insert (users userDB) $ B.insertValues [User 1 "Bill" "Gates", User 2 "Stive" "Jobs"] + L.deleteRows $ B.delete (users userDB) (\u -> (_userGUID u) `B.in_` [1,2]) + res <- L.findRows $ B.select $ B.filter_ (\u -> _userGUID u `B.in_` [1,2]) $ B.all_ (users userDB) + pure res + result `shouldBe` Right [] + + it "fail to delete wrong rows" $ \rt -> do + result <- run deleteWrongRowsRecord $ do + conn <- connectOrFail sqliteCfg + L.runDB conn $ do + L.insertRows $ B.insert (users userDB) $ B.insertValues [User 1 "Bill" "Gates", User 2 "Stive" "Jobs"] + L.deleteRows $ B.delete (users userDB) (\u -> (_userGUID u) `B.in_` [3,4]) + res <- L.findRows $ B.select $ B.filter_ (\u -> _userGUID u `B.in_` [1,2]) $ B.all_ (users userDB) + L.deleteRows $ B.delete (users userDB) (\u -> (_userGUID u) `B.in_` [1,2]) + pure res + result `shouldBe` Right + [ User {_userGUID = 1, _firstName = "Bill", _lastName = "Gates"} + , User {_userGUID = 2, _firstName = "Stive", _lastName = "Jobs"} + ] + + it "success to update rows" $ \rt -> do + result <- run updateRowRecord $ do + conn <- connectOrFail sqliteCfg + L.runDB conn $ do + L.insertRows $ B.insert (users userDB) $ B.insertValues [User 1 "Bill" "Gates", User 2 "Stive" "Jobs"] + L.updateRows $ B.update (users userDB) + (\user -> _firstName user B.<-. B.val_ "Robert") + (\user -> _userGUID user B.==. 1) + res <- L.findRows $ B.select $ B.filter_ (\u -> _userGUID u `B.in_` [1,2]) $ B.all_ (users userDB) + L.deleteRows $ B.delete (users userDB) (\u -> (_userGUID u) `B.in_` [1,2]) + pure res + result `shouldBe` Right + [ User {_userGUID = 1, _firstName = "Robert", _lastName = "Gates"} + , User {_userGUID = 2, _firstName = "Stive", _lastName = "Jobs"} + ] + + it "success to update rows with IO action in between" $ \rt -> do + result <- run updateRowWithDelayRecord $ do + conn <- connectOrFail sqliteCfg + L.runDB conn $ do + L.insertRows $ B.insert (users userDB) $ B.insertValues [User 1 "Bill" "Gates", User 2 "Stive" "Jobs"] + L.updateRows $ B.update (users userDB) + (\user -> _firstName user B.<-. B.val_ "Robert") + (\user -> _userGUID user B.==. 1) + L.runIO $ threadDelay (2 * 10 ^ 6) + L.runDB conn $ do + res <- L.findRows $ B.select $ B.filter_ (\u -> _userGUID u `B.in_` [1,2]) $ B.all_ (users userDB) + L.deleteRows $ B.delete (users userDB) (\u -> (_userGUID u) `B.in_` [1,2]) + pure res + result `shouldBe` Right + [ User {_userGUID = 1, _firstName = "Robert", _lastName = "Gates"} + , User {_userGUID = 2, _firstName = "Stive", _lastName = "Jobs"} + ] + + +-- Use testRecord to generate record log to 'recorder' file +getRowRecord :: ResultRecording +getRowRecord = fromJust $ decode + "{\"recording\":[{\"_entryName\":\"GetSqlDBConnectionEntry\",\"_entryIndex\":0,\"_entryPayload\":{\"dBConfig\":{\"tag\":\"SQLitePoolConf\",\"contents\":[\"SQliteDB\",\"test/EulerHS/TestData/test.db\",{\"resourcesPerStripe\":50,\"stripes\":1,\"keepAlive\":10}]},\"getConnResult\":{\"Left\":[{\"tag\":\"ConnectionDoesNotExist\"},\"Connection for SQliteDB does not exists.\"]}},\"_entryReplayMode\":\"Normal\"},{\"_entryName\":\"InitSqlDBConnectionEntry\",\"_entryIndex\":1,\"_entryPayload\":{\"dBConfig\":{\"tag\":\"SQLitePoolConf\",\"contents\":[\"SQliteDB\",\"test/EulerHS/TestData/test.db\",{\"resourcesPerStripe\":50,\"stripes\":1,\"keepAlive\":10}]},\"initConnResult\":{\"Right\":[]}},\"_entryReplayMode\":\"Normal\"},{\"_entryName\":\"RunDBEntry\",\"_entryIndex\":2,\"_entryPayload\":{\"rawSql\":[\"INSERT INTO \\\"users\\\"(\\\"id\\\", \\\"first_name\\\", \\\"last_name\\\") VALUES (?, ?, ?);\\n-- With values: [SQLInteger 1,SQLText \\\"Bill\\\",SQLText \\\"Gates\\\"]\",\"SELECT \\\"t0\\\".\\\"id\\\" AS \\\"res0\\\", \\\"t0\\\".\\\"first_name\\\" AS \\\"res1\\\", \\\"t0\\\".\\\"last_name\\\" AS \\\"res2\\\" FROM \\\"users\\\" AS \\\"t0\\\" WHERE (\\\"t0\\\".\\\"id\\\")=(?);\\n-- With values: [SQLInteger 1]\",\"DELETE FROM \\\"users\\\" WHERE (\\\"id\\\")=(?);\\n-- With values: [SQLInteger 1]\"],\"jsonResult\":{\"Right\":{\"_userGUID\":1,\"_lastName\":\"Gates\",\"_firstName\":\"Bill\"}}},\"_entryReplayMode\":\"Normal\"}],\"forkedRecordings\":{},\"safeRecordings\":{}}" + +getWrongRowRecord :: ResultRecording +getWrongRowRecord = fromJust $ decode + "{\"recording\":[{\"_entryName\":\"GetSqlDBConnectionEntry\",\"_entryIndex\":0,\"_entryPayload\":{\"dBConfig\":{\"tag\":\"SQLitePoolConf\",\"contents\":[\"SQliteDB\",\"test/EulerHS/TestData/test.db\",{\"resourcesPerStripe\":50,\"stripes\":1,\"keepAlive\":10}]},\"getConnResult\":{\"Left\":[{\"tag\":\"ConnectionDoesNotExist\"},\"Connection for SQliteDB does not exists.\"]}},\"_entryReplayMode\":\"Normal\"},{\"_entryName\":\"InitSqlDBConnectionEntry\",\"_entryIndex\":1,\"_entryPayload\":{\"dBConfig\":{\"tag\":\"SQLitePoolConf\",\"contents\":[\"SQliteDB\",\"test/EulerHS/TestData/test.db\",{\"resourcesPerStripe\":50,\"stripes\":1,\"keepAlive\":10}]},\"initConnResult\":{\"Right\":[]}},\"_entryReplayMode\":\"Normal\"},{\"_entryName\":\"RunDBEntry\",\"_entryIndex\":2,\"_entryPayload\":{\"rawSql\":[\"INSERT INTO \\\"users\\\"(\\\"id\\\", \\\"first_name\\\", \\\"last_name\\\") VALUES (?, ?, ?);\\n-- With values: [SQLInteger 1,SQLText \\\"Bill\\\",SQLText \\\"Gates\\\"]\",\"SELECT \\\"t0\\\".\\\"id\\\" AS \\\"res0\\\", \\\"t0\\\".\\\"first_name\\\" AS \\\"res1\\\", \\\"t0\\\".\\\"last_name\\\" AS \\\"res2\\\" FROM \\\"users\\\" AS \\\"t0\\\" WHERE (\\\"t0\\\".\\\"id\\\")=(?);\\n-- With values: [SQLInteger 2]\",\"DELETE FROM \\\"users\\\" WHERE (\\\"id\\\")=(?);\\n-- With values: [SQLInteger 1]\"],\"jsonResult\":{\"Right\":null}},\"_entryReplayMode\":\"Normal\"}],\"forkedRecordings\":{},\"safeRecordings\":{}}" + +getRowsRecord :: ResultRecording +getRowsRecord = fromJust $ decode + "{\"recording\":[{\"_entryName\":\"GetSqlDBConnectionEntry\",\"_entryIndex\":0,\"_entryPayload\":{\"dBConfig\":{\"tag\":\"SQLitePoolConf\",\"contents\":[\"SQliteDB\",\"test/EulerHS/TestData/test.db\",{\"resourcesPerStripe\":50,\"stripes\":1,\"keepAlive\":10}]},\"getConnResult\":{\"Left\":[{\"tag\":\"ConnectionDoesNotExist\"},\"Connection for SQliteDB does not exists.\"]}},\"_entryReplayMode\":\"Normal\"},{\"_entryName\":\"InitSqlDBConnectionEntry\",\"_entryIndex\":1,\"_entryPayload\":{\"dBConfig\":{\"tag\":\"SQLitePoolConf\",\"contents\":[\"SQliteDB\",\"test/EulerHS/TestData/test.db\",{\"resourcesPerStripe\":50,\"stripes\":1,\"keepAlive\":10}]},\"initConnResult\":{\"Right\":[]}},\"_entryReplayMode\":\"Normal\"},{\"_entryName\":\"RunDBEntry\",\"_entryIndex\":2,\"_entryPayload\":{\"rawSql\":[\"INSERT INTO \\\"users\\\"(\\\"id\\\", \\\"first_name\\\", \\\"last_name\\\") VALUES (?, ?, ?), (?, ?, ?);\\n-- With values: [SQLInteger 1,SQLText \\\"Bill\\\",SQLText \\\"Gates\\\",SQLInteger 2,SQLText \\\"Stive\\\",SQLText \\\"Jobs\\\"]\",\"SELECT \\\"t0\\\".\\\"id\\\" AS \\\"res0\\\", \\\"t0\\\".\\\"first_name\\\" AS \\\"res1\\\", \\\"t0\\\".\\\"last_name\\\" AS \\\"res2\\\" FROM \\\"users\\\" AS \\\"t0\\\" WHERE (\\\"t0\\\".\\\"id\\\") IN (?, ?);\\n-- With values: [SQLInteger 1,SQLInteger 2]\",\"DELETE FROM \\\"users\\\" WHERE (\\\"id\\\") IN (?, ?);\\n-- With values: [SQLInteger 1,SQLInteger 2]\"],\"jsonResult\":{\"Right\":[{\"_userGUID\":1,\"_lastName\":\"Gates\",\"_firstName\":\"Bill\"},{\"_userGUID\":2,\"_lastName\":\"Jobs\",\"_firstName\":\"Stive\"}]}},\"_entryReplayMode\":\"Normal\"}],\"forkedRecordings\":{},\"safeRecordings\":{}}" + +getWrongRowsRecord :: ResultRecording +getWrongRowsRecord = fromJust $ decode + "{\"recording\":[{\"_entryName\":\"GetSqlDBConnectionEntry\",\"_entryIndex\":0,\"_entryPayload\":{\"dBConfig\":{\"tag\":\"SQLitePoolConf\",\"contents\":[\"SQliteDB\",\"test/EulerHS/TestData/test.db\",{\"resourcesPerStripe\":50,\"stripes\":1,\"keepAlive\":10}]},\"getConnResult\":{\"Left\":[{\"tag\":\"ConnectionDoesNotExist\"},\"Connection for SQliteDB does not exists.\"]}},\"_entryReplayMode\":\"Normal\"},{\"_entryName\":\"InitSqlDBConnectionEntry\",\"_entryIndex\":1,\"_entryPayload\":{\"dBConfig\":{\"tag\":\"SQLitePoolConf\",\"contents\":[\"SQliteDB\",\"test/EulerHS/TestData/test.db\",{\"resourcesPerStripe\":50,\"stripes\":1,\"keepAlive\":10}]},\"initConnResult\":{\"Right\":[]}},\"_entryReplayMode\":\"Normal\"},{\"_entryName\":\"RunDBEntry\",\"_entryIndex\":2,\"_entryPayload\":{\"rawSql\":[\"INSERT INTO \\\"users\\\"(\\\"id\\\", \\\"first_name\\\", \\\"last_name\\\") VALUES (?, ?, ?), (?, ?, ?);\\n-- With values: [SQLInteger 1,SQLText \\\"Bill\\\",SQLText \\\"Gates\\\",SQLInteger 2,SQLText \\\"Stive\\\",SQLText \\\"Jobs\\\"]\",\"SELECT \\\"t0\\\".\\\"id\\\" AS \\\"res0\\\", \\\"t0\\\".\\\"first_name\\\" AS \\\"res1\\\", \\\"t0\\\".\\\"last_name\\\" AS \\\"res2\\\" FROM \\\"users\\\" AS \\\"t0\\\" WHERE (\\\"t0\\\".\\\"id\\\") IN (?, ?);\\n-- With values: [SQLInteger 3,SQLInteger 4]\",\"DELETE FROM \\\"users\\\" WHERE (\\\"id\\\") IN (?, ?);\\n-- With values: [SQLInteger 1,SQLInteger 2]\"],\"jsonResult\":{\"Right\":[]}},\"_entryReplayMode\":\"Normal\"}],\"forkedRecordings\":{},\"safeRecordings\":{}}" + + +deleteRowsRecord :: ResultRecording +deleteRowsRecord = fromJust $ decode + "{\"recording\":[{\"_entryName\":\"GetSqlDBConnectionEntry\",\"_entryIndex\":0,\"_entryPayload\":{\"dBConfig\":{\"tag\":\"SQLitePoolConf\",\"contents\":[\"SQliteDB\",\"test/EulerHS/TestData/test.db\",{\"resourcesPerStripe\":50,\"stripes\":1,\"keepAlive\":10}]},\"getConnResult\":{\"Left\":[{\"tag\":\"ConnectionDoesNotExist\"},\"Connection for SQliteDB does not exists.\"]}},\"_entryReplayMode\":\"Normal\"},{\"_entryName\":\"InitSqlDBConnectionEntry\",\"_entryIndex\":1,\"_entryPayload\":{\"dBConfig\":{\"tag\":\"SQLitePoolConf\",\"contents\":[\"SQliteDB\",\"test/EulerHS/TestData/test.db\",{\"resourcesPerStripe\":50,\"stripes\":1,\"keepAlive\":10}]},\"initConnResult\":{\"Right\":[]}},\"_entryReplayMode\":\"Normal\"},{\"_entryName\":\"RunDBEntry\",\"_entryIndex\":2,\"_entryPayload\":{\"rawSql\":[\"INSERT INTO \\\"users\\\"(\\\"id\\\", \\\"first_name\\\", \\\"last_name\\\") VALUES (?, ?, ?), (?, ?, ?);\\n-- With values: [SQLInteger 1,SQLText \\\"Bill\\\",SQLText \\\"Gates\\\",SQLInteger 2,SQLText \\\"Stive\\\",SQLText \\\"Jobs\\\"]\",\"DELETE FROM \\\"users\\\" WHERE (\\\"id\\\") IN (?, ?);\\n-- With values: [SQLInteger 1,SQLInteger 2]\",\"SELECT \\\"t0\\\".\\\"id\\\" AS \\\"res0\\\", \\\"t0\\\".\\\"first_name\\\" AS \\\"res1\\\", \\\"t0\\\".\\\"last_name\\\" AS \\\"res2\\\" FROM \\\"users\\\" AS \\\"t0\\\" WHERE (\\\"t0\\\".\\\"id\\\") IN (?, ?);\\n-- With values: [SQLInteger 1,SQLInteger 2]\"],\"jsonResult\":{\"Right\":[]}},\"_entryReplayMode\":\"Normal\"}],\"forkedRecordings\":{},\"safeRecordings\":{}}" + +deleteWrongRowsRecord :: ResultRecording +deleteWrongRowsRecord = fromJust $ decode + "{\"recording\":[{\"_entryName\":\"GetSqlDBConnectionEntry\",\"_entryIndex\":0,\"_entryPayload\":{\"dBConfig\":{\"tag\":\"SQLitePoolConf\",\"contents\":[\"SQliteDB\",\"test/EulerHS/TestData/test.db\",{\"resourcesPerStripe\":50,\"stripes\":1,\"keepAlive\":10}]},\"getConnResult\":{\"Left\":[{\"tag\":\"ConnectionDoesNotExist\"},\"Connection for SQliteDB does not exists.\"]}},\"_entryReplayMode\":\"Normal\"},{\"_entryName\":\"InitSqlDBConnectionEntry\",\"_entryIndex\":1,\"_entryPayload\":{\"dBConfig\":{\"tag\":\"SQLitePoolConf\",\"contents\":[\"SQliteDB\",\"test/EulerHS/TestData/test.db\",{\"resourcesPerStripe\":50,\"stripes\":1,\"keepAlive\":10}]},\"initConnResult\":{\"Right\":[]}},\"_entryReplayMode\":\"Normal\"},{\"_entryName\":\"RunDBEntry\",\"_entryIndex\":2,\"_entryPayload\":{\"rawSql\":[\"INSERT INTO \\\"users\\\"(\\\"id\\\", \\\"first_name\\\", \\\"last_name\\\") VALUES (?, ?, ?), (?, ?, ?);\\n-- With values: [SQLInteger 1,SQLText \\\"Bill\\\",SQLText \\\"Gates\\\",SQLInteger 2,SQLText \\\"Stive\\\",SQLText \\\"Jobs\\\"]\",\"DELETE FROM \\\"users\\\" WHERE (\\\"id\\\") IN (?, ?);\\n-- With values: [SQLInteger 3,SQLInteger 4]\",\"SELECT \\\"t0\\\".\\\"id\\\" AS \\\"res0\\\", \\\"t0\\\".\\\"first_name\\\" AS \\\"res1\\\", \\\"t0\\\".\\\"last_name\\\" AS \\\"res2\\\" FROM \\\"users\\\" AS \\\"t0\\\" WHERE (\\\"t0\\\".\\\"id\\\") IN (?, ?);\\n-- With values: [SQLInteger 1,SQLInteger 2]\",\"DELETE FROM \\\"users\\\" WHERE (\\\"id\\\") IN (?, ?);\\n-- With values: [SQLInteger 1,SQLInteger 2]\"],\"jsonResult\":{\"Right\":[{\"_userGUID\":1,\"_lastName\":\"Gates\",\"_firstName\":\"Bill\"},{\"_userGUID\":2,\"_lastName\":\"Jobs\",\"_firstName\":\"Stive\"}]}},\"_entryReplayMode\":\"Normal\"}],\"forkedRecordings\":{},\"safeRecordings\":{}}" + +updateRowRecord :: ResultRecording +updateRowRecord = fromJust $ decode + "{\"recording\":[{\"_entryName\":\"GetSqlDBConnectionEntry\",\"_entryIndex\":0,\"_entryPayload\":{\"dBConfig\":{\"tag\":\"SQLitePoolConf\",\"contents\":[\"SQliteDB\",\"test/EulerHS/TestData/test.db\",{\"resourcesPerStripe\":50,\"stripes\":1,\"keepAlive\":10}]},\"getConnResult\":{\"Left\":[{\"tag\":\"ConnectionDoesNotExist\"},\"Connection for SQliteDB does not exists.\"]}},\"_entryReplayMode\":\"Normal\"},{\"_entryName\":\"InitSqlDBConnectionEntry\",\"_entryIndex\":1,\"_entryPayload\":{\"dBConfig\":{\"tag\":\"SQLitePoolConf\",\"contents\":[\"SQliteDB\",\"test/EulerHS/TestData/test.db\",{\"resourcesPerStripe\":50,\"stripes\":1,\"keepAlive\":10}]},\"initConnResult\":{\"Right\":[]}},\"_entryReplayMode\":\"Normal\"},{\"_entryName\":\"RunDBEntry\",\"_entryIndex\":2,\"_entryPayload\":{\"rawSql\":[\"INSERT INTO \\\"users\\\"(\\\"id\\\", \\\"first_name\\\", \\\"last_name\\\") VALUES (?, ?, ?), (?, ?, ?);\\n-- With values: [SQLInteger 1,SQLText \\\"Bill\\\",SQLText \\\"Gates\\\",SQLInteger 2,SQLText \\\"Stive\\\",SQLText \\\"Jobs\\\"]\",\"UPDATE \\\"users\\\" SET \\\"first_name\\\"=? WHERE (\\\"id\\\")=(?);\\n-- With values: [SQLText \\\"Robert\\\",SQLInteger 1]\",\"SELECT \\\"t0\\\".\\\"id\\\" AS \\\"res0\\\", \\\"t0\\\".\\\"first_name\\\" AS \\\"res1\\\", \\\"t0\\\".\\\"last_name\\\" AS \\\"res2\\\" FROM \\\"users\\\" AS \\\"t0\\\" WHERE (\\\"t0\\\".\\\"id\\\") IN (?, ?);\\n-- With values: [SQLInteger 1,SQLInteger 2]\",\"DELETE FROM \\\"users\\\" WHERE (\\\"id\\\") IN (?, ?);\\n-- With values: [SQLInteger 1,SQLInteger 2]\"],\"jsonResult\":{\"Right\":[{\"_userGUID\":1,\"_lastName\":\"Gates\",\"_firstName\":\"Robert\"},{\"_userGUID\":2,\"_lastName\":\"Jobs\",\"_firstName\":\"Stive\"}]}},\"_entryReplayMode\":\"Normal\"}],\"forkedRecordings\":{},\"safeRecordings\":{}}" + +updateRowWithDelayRecord :: ResultRecording +updateRowWithDelayRecord = fromJust $ decode + "{\"recording\":[{\"_entryName\":\"GetSqlDBConnectionEntry\",\"_entryIndex\":0,\"_entryPayload\":{\"dBConfig\":{\"tag\":\"SQLitePoolConf\",\"contents\":[\"SQliteDB\",\"test/EulerHS/TestData/test.db\",{\"resourcesPerStripe\":50,\"stripes\":1,\"keepAlive\":10}]},\"getConnResult\":{\"Left\":[{\"tag\":\"ConnectionDoesNotExist\"},\"Connection for SQliteDB does not exists.\"]}},\"_entryReplayMode\":\"Normal\"},{\"_entryName\":\"InitSqlDBConnectionEntry\",\"_entryIndex\":1,\"_entryPayload\":{\"dBConfig\":{\"tag\":\"SQLitePoolConf\",\"contents\":[\"SQliteDB\",\"test/EulerHS/TestData/test.db\",{\"resourcesPerStripe\":50,\"stripes\":1,\"keepAlive\":10}]},\"initConnResult\":{\"Right\":[]}},\"_entryReplayMode\":\"Normal\"},{\"_entryName\":\"RunDBEntry\",\"_entryIndex\":2,\"_entryPayload\":{\"rawSql\":[\"INSERT INTO \\\"users\\\"(\\\"id\\\", \\\"first_name\\\", \\\"last_name\\\") VALUES (?, ?, ?), (?, ?, ?);\\n-- With values: [SQLInteger 1,SQLText \\\"Bill\\\",SQLText \\\"Gates\\\",SQLInteger 2,SQLText \\\"Stive\\\",SQLText \\\"Jobs\\\"]\",\"UPDATE \\\"users\\\" SET \\\"first_name\\\"=? WHERE (\\\"id\\\")=(?);\\n-- With values: [SQLText \\\"Robert\\\",SQLInteger 1]\"],\"jsonResult\":{\"Right\":[]}},\"_entryReplayMode\":\"Normal\"},{\"_entryName\":\"RunIOEntry\",\"_entryIndex\":3,\"_entryPayload\":{\"jsonResult\":[],\"description\":\"\"},\"_entryReplayMode\":\"Normal\"},{\"_entryName\":\"RunDBEntry\",\"_entryIndex\":4,\"_entryPayload\":{\"rawSql\":[\"SELECT \\\"t0\\\".\\\"id\\\" AS \\\"res0\\\", \\\"t0\\\".\\\"first_name\\\" AS \\\"res1\\\", \\\"t0\\\".\\\"last_name\\\" AS \\\"res2\\\" FROM \\\"users\\\" AS \\\"t0\\\" WHERE (\\\"t0\\\".\\\"id\\\") IN (?, ?);\\n-- With values: [SQLInteger 1,SQLInteger 2]\",\"DELETE FROM \\\"users\\\" WHERE (\\\"id\\\") IN (?, ?);\\n-- With values: [SQLInteger 1,SQLInteger 2]\"],\"jsonResult\":{\"Right\":[{\"_userGUID\":1,\"_lastName\":\"Gates\",\"_firstName\":\"Robert\"},{\"_userGUID\":2,\"_lastName\":\"Jobs\",\"_firstName\":\"Stive\"}]}},\"_entryReplayMode\":\"Normal\"}],\"forkedRecordings\":{},\"safeRecordings\":{}}" diff --git a/test/Main.hs b/test/Main.hs new file mode 100644 index 00000000..40a6d655 --- /dev/null +++ b/test/Main.hs @@ -0,0 +1,42 @@ +module Main where + +import EulerHS.Prelude +import Test.Hspec + +import qualified EulerHS.Tests.Framework.ArtSpec as Art +import qualified EulerHS.Tests.Framework.FlowSpec as Framework +import qualified EulerHS.Tests.Framework.KVDBArtSpec as KVDB +import qualified EulerHS.Tests.Framework.PubSubSpec as PubSub +import qualified EulerHS.Tests.Framework.SQLArtSpec as SQL +import qualified EulerHS.Types as T + +import System.Process + +withRedis :: IO () -> IO () +withRedis action = do + cmdHandle <- spawnCommand "redis-server" + action + terminateProcess cmdHandle + +logsEnabled :: Maybe T.LoggerConfig +logsEnabled = Just $ T.LoggerConfig + { T._logToFile = False, + T._logFilePath = "", + T._isAsync = False, + T._logLevel = T.Debug, + T._logToConsole = True, + T._maxQueueSize = 1000, + T._logRawSql = False + } + +logsDisabled :: Maybe T.LoggerConfig +logsDisabled = Nothing + +main :: IO () +main = do + withRedis $ hspec $ do + Framework.spec logsDisabled + Art.spec + KVDB.spec + SQL.spec + PubSub.spec diff --git a/testDB/KVDB/KVDBSpec.hs b/testDB/KVDB/KVDBSpec.hs new file mode 100644 index 00000000..24e197a8 --- /dev/null +++ b/testDB/KVDB/KVDBSpec.hs @@ -0,0 +1,76 @@ +module KVDB.KVDBSpec where + +import Test.Hspec hiding (runIO) + +import EulerHS.Interpreters +import qualified EulerHS.Language as L +import EulerHS.Prelude +import EulerHS.Runtime +import qualified EulerHS.Types as T + +redisCfg :: T.KVDBConfig +redisCfg = T.mkKVDBConfig "eulerKVDB" T.defaultKVDBConnConfig + +spec :: Spec +spec = + around (withFlowRuntime Nothing) $ + + describe "EulerHS KVDB tests" $ do + + it "Double connection initialization should fail" $ \rt -> do + eRes :: Either String () <- runFlow rt $ do + eConn1 <- L.initKVDBConnection redisCfg + eConn2 <- L.initKVDBConnection redisCfg + pure $ case (eConn1, eConn2) of + (Left err, _) -> Left $ "Failed to connect 1st time: " <> show err + (_, Left (T.KVDBError T.KVDBConnectionAlreadyExists _)) -> Right () + (_, Left err) -> Left $ "Unexpected error type on 2nd connect: " <> show err + _ -> Left $ "Unexpected result." + eRes `shouldBe` Right () + + it "Get uninialized connection should fail" $ \rt -> do + eRes :: Either String () <- runFlow rt $ do + eConn <- L.getKVDBConnection redisCfg + case eConn of + Left (T.KVDBError T.KVDBConnectionDoesNotExist _) -> pure $ Right () + Left err -> pure $ Left $ "Unexpected error: " <> show err + Right _ -> pure $ Left "Unexpected connection success" + eRes `shouldBe` Right () + + it "Init and get connection should succeed" $ \rt -> do + eRes :: Either String () <- runFlow rt $ do + eConn1 <- L.initKVDBConnection redisCfg + eConn2 <- L.getKVDBConnection redisCfg + case (eConn1, eConn2) of + (Left err, _) -> pure $ Left $ "Failed to connect: " <> show err + (_, Left err) -> pure $ Left $ "Unexpected error on get connection: " <> show err + _ -> pure $ Right () + eRes `shouldBe` Right () + + it "Init and double get connection should succeed" $ \rt -> do + eRes :: Either String () <- runFlow rt $ do + eConn1 <- L.initKVDBConnection redisCfg + eConn2 <- L.getKVDBConnection redisCfg + eConn3 <- L.getKVDBConnection redisCfg + case (eConn1, eConn2, eConn3) of + (Left err, _, _) -> pure $ Left $ "Failed to connect: " <> show err + (_, Left err, _) -> pure $ Left $ "Unexpected error on 1st get connection: " <> show err + (_, _, Left err) -> pure $ Left $ "Unexpected error on 2nd get connection: " <> show err + _ -> pure $ Right () + eRes `shouldBe` Right () + + it "getOrInitKVDBConn should succeed" $ \rt -> do + eRes :: Either String () <- runFlow rt $ do + eConn <- L.getOrInitKVDBConn redisCfg + case eConn of + Left err -> pure $ Left $ "Failed to connect: " <> show err + _ -> pure $ Right () + eRes `shouldBe` Right () + + it "Prepared connection should be available" $ \rt -> do + void $ runFlow rt $ do + eConn <- L.initKVDBConnection redisCfg + when (isLeft eConn) $ error "Failed to prepare connection." + void $ runFlow rt $ do + eConn <- L.getKVDBConnection redisCfg + when (isLeft eConn) $ error "Failed to get prepared connection." diff --git a/testDB/Main.hs b/testDB/Main.hs new file mode 100644 index 00000000..6a4e3f0c --- /dev/null +++ b/testDB/Main.hs @@ -0,0 +1,25 @@ +module Main where + +import EulerHS.Prelude +import Test.Hspec + + +import qualified KVDB.KVDBSpec as KVDB +import qualified SQLDB.Tests.SQLiteDBSpec as SQLiteDB +import qualified SQLDB.Tests.QueryExamplesSpec as Ex + +-- Prepare your DBs environment and uncomment these lines +-- if you need integration testing of DB backents. +-- import qualified SQLDB.Tests.PostgresDBSpec as PGDB +-- import qualified SQLDB.Tests.PostgresDBPoolSpec as PGDBP +-- import qualified SQLDB.Tests.MySQLDBSpec as MySQL + +main :: IO () +main = hspec $ do + KVDB.spec + SQLiteDB.spec + Ex.spec + -- PGDB.spec + -- PGDBP.spec + -- MySQL.spec + -- MySQLP.spec diff --git a/testDB/SQLDB/TestData/Connections.hs b/testDB/SQLDB/TestData/Connections.hs new file mode 100644 index 00000000..61425f09 --- /dev/null +++ b/testDB/SQLDB/TestData/Connections.hs @@ -0,0 +1,39 @@ +module SQLDB.TestData.Connections where + +import EulerHS.Prelude + +import EulerHS.Interpreters +import qualified EulerHS.Language as L +import EulerHS.Runtime (withFlowRuntime) +import qualified EulerHS.Runtime as R +import qualified EulerHS.Types as T + + +connectOrFail :: T.DBConfig beM -> L.Flow (T.SqlConn beM) +connectOrFail cfg = L.initSqlDBConnection cfg >>= \case + Left e -> error $ show e -- L.throwException $ toException $ show e + Right conn -> pure conn + +testDBName :: String +testDBName = "./test/EulerHS/TestData/test.db" + +testDBTemplateName :: String +testDBTemplateName = "./test/EulerHS/TestData/test.db.template" + +rmTestDB :: L.Flow () +rmTestDB = void $ L.runSysCmd $ "rm -f " <> testDBName + +prepareTestDB :: (T.DBConfig beM -> L.Flow ()) -> T.DBConfig beM -> L.Flow () +prepareTestDB insertValues cfg = do + rmTestDB + void $ L.runSysCmd $ "cp " <> testDBTemplateName <> " " <> testDBName + insertValues cfg + +withEmptyDB :: (T.DBConfig beM -> L.Flow ()) -> T.DBConfig beM -> (R.FlowRuntime -> IO ()) -> IO () +withEmptyDB insertValues cfg act = withFlowRuntime Nothing (\rt -> do + try (runFlow rt $ prepareTestDB insertValues cfg) >>= \case + Left (e :: SomeException) -> + runFlow rt rmTestDB + `finally` error ("Preparing test values failed: " <> show e) + Right _ -> act rt `finally` runFlow rt rmTestDB + ) diff --git a/testDB/SQLDB/TestData/MySQLDBSpec.sql b/testDB/SQLDB/TestData/MySQLDBSpec.sql new file mode 100644 index 00000000..77ba68ac --- /dev/null +++ b/testDB/SQLDB/TestData/MySQLDBSpec.sql @@ -0,0 +1,52 @@ +-- MySQL dump 10.13 Distrib 5.7.29, for Linux (x86_64) +-- +-- Host: mysql Database: users +-- ------------------------------------------------------ +-- Server version 5.7.29 + +/*!40101 SET @OLD_CHARACTER_SET_CLIENT=@@CHARACTER_SET_CLIENT */; +/*!40101 SET @OLD_CHARACTER_SET_RESULTS=@@CHARACTER_SET_RESULTS */; +/*!40101 SET @OLD_COLLATION_CONNECTION=@@COLLATION_CONNECTION */; +/*!40101 SET NAMES utf8 */; +/*!40103 SET @OLD_TIME_ZONE=@@TIME_ZONE */; +/*!40103 SET TIME_ZONE='+00:00' */; +/*!40014 SET @OLD_UNIQUE_CHECKS=@@UNIQUE_CHECKS, UNIQUE_CHECKS=0 */; +/*!40014 SET @OLD_FOREIGN_KEY_CHECKS=@@FOREIGN_KEY_CHECKS, FOREIGN_KEY_CHECKS=0 */; +/*!40101 SET @OLD_SQL_MODE=@@SQL_MODE, SQL_MODE='NO_AUTO_VALUE_ON_ZERO' */; +/*!40111 SET @OLD_SQL_NOTES=@@SQL_NOTES, SQL_NOTES=0 */; + +-- +-- Table structure for table `users` +-- + +DROP TABLE IF EXISTS `users`; +/*!40101 SET @saved_cs_client = @@character_set_client */; +/*!40101 SET character_set_client = utf8 */; +CREATE TABLE `users` ( + `id` int(11) NOT NULL AUTO_INCREMENT, + `first_name` varchar(255) NOT NULL, + `last_name` varchar(255) NOT NULL, + PRIMARY KEY (`id`) +) ENGINE=InnoDB AUTO_INCREMENT=2 DEFAULT CHARSET=latin1; +/*!40101 SET character_set_client = @saved_cs_client */; + +-- +-- Dumping data for table `users` +-- + +LOCK TABLES `users` WRITE; +/*!40000 ALTER TABLE `users` DISABLE KEYS */; +INSERT INTO `users` VALUES (1,'John','Doe'); +/*!40000 ALTER TABLE `users` ENABLE KEYS */; +UNLOCK TABLES; +/*!40103 SET TIME_ZONE=@OLD_TIME_ZONE */; + +/*!40101 SET SQL_MODE=@OLD_SQL_MODE */; +/*!40014 SET FOREIGN_KEY_CHECKS=@OLD_FOREIGN_KEY_CHECKS */; +/*!40014 SET UNIQUE_CHECKS=@OLD_UNIQUE_CHECKS */; +/*!40101 SET CHARACTER_SET_CLIENT=@OLD_CHARACTER_SET_CLIENT */; +/*!40101 SET CHARACTER_SET_RESULTS=@OLD_CHARACTER_SET_RESULTS */; +/*!40101 SET COLLATION_CONNECTION=@OLD_COLLATION_CONNECTION */; +/*!40111 SET SQL_NOTES=@OLD_SQL_NOTES */; + +-- Dump completed on 2020-03-19 14:52:04 diff --git a/testDB/SQLDB/TestData/PostgresDBSpec.sql b/testDB/SQLDB/TestData/PostgresDBSpec.sql new file mode 100644 index 00000000..6cd9bf73 --- /dev/null +++ b/testDB/SQLDB/TestData/PostgresDBSpec.sql @@ -0,0 +1,83 @@ +-- +-- PostgreSQL database dump +-- + +-- Dumped from database version 12.2 (Debian 12.2-2.pgdg100+1) +-- Dumped by pg_dump version 12.1 + +SET statement_timeout = 0; +SET lock_timeout = 0; +SET idle_in_transaction_session_timeout = 0; +SET client_encoding = 'UTF8'; +SET standard_conforming_strings = on; +SELECT pg_catalog.set_config('search_path', '', false); +SET check_function_bodies = false; +SET xmloption = content; +SET client_min_messages = warning; +SET row_security = off; + +SET default_tablespace = ''; + +SET default_table_access_method = heap; + +-- +-- Name: users; Type: TABLE; Schema: public; Owner: cloud +-- + +CREATE TABLE public.users ( + id integer NOT NULL PRIMARY KEY, + first_name character varying(255) NOT NULL, + last_name character varying(255) NOT NULL +); + + +ALTER TABLE public.users OWNER TO cloud; + +-- +-- Name: users_id_seq; Type: SEQUENCE; Schema: public; Owner: cloud +-- + +CREATE SEQUENCE public.users_id_seq + AS integer + START WITH 1 + INCREMENT BY 1 + NO MINVALUE + NO MAXVALUE + CACHE 1; + + +ALTER TABLE public.users_id_seq OWNER TO cloud; + +-- +-- Name: users_id_seq; Type: SEQUENCE OWNED BY; Schema: public; Owner: cloud +-- + +ALTER SEQUENCE public.users_id_seq OWNED BY public.users.id; + + +-- +-- Name: users id; Type: DEFAULT; Schema: public; Owner: cloud +-- + +ALTER TABLE ONLY public.users ALTER COLUMN id SET DEFAULT nextval('public.users_id_seq'::regclass); + + +-- +-- Data for Name: users; Type: TABLE DATA; Schema: public; Owner: cloud +-- + +COPY public.users (id, first_name, last_name) FROM stdin; +\. + + +-- +-- Name: users_id_seq; Type: SEQUENCE SET; Schema: public; Owner: cloud +-- + +SELECT pg_catalog.setval('public.users_id_seq', 1, false); + + +-- +-- PostgreSQL database dump complete +-- + diff --git a/testDB/SQLDB/TestData/Scenarios/MySQL.hs b/testDB/SQLDB/TestData/Scenarios/MySQL.hs new file mode 100644 index 00000000..528a979d --- /dev/null +++ b/testDB/SQLDB/TestData/Scenarios/MySQL.hs @@ -0,0 +1,187 @@ +module SQLDB.TestData.Scenarios.MySQL where + +import EulerHS.Prelude + +import qualified EulerHS.Language as L +import qualified EulerHS.Types as T + +import SQLDB.TestData.Types + +import Database.Beam ((<-.), (==.)) +import qualified Database.Beam as B +import qualified Database.Beam.MySQL as BM + + +uniqueConstraintViolationDbScript :: T.DBConfig BM.MySQLM -> L.Flow (T.DBResult ()) +uniqueConstraintViolationDbScript dbcfg = do + econn <- L.getSqlDBConnection dbcfg + + flip (either $ error "Unable to get connection") econn $ \conn -> do + eRes1 <- L.runDB conn + $ L.insertRows + $ B.insert (_users eulerDb) + $ B.insertValues [User 2 "Rosa" "Rosa"] + + eRes2 <- L.runDB conn + $ L.insertRows + $ B.insert (_users eulerDb) + $ B.insertValues [User 2 "Rosa" "Rosa"] + + pure $ eRes1 >> eRes2 + + +uniqueConstraintViolationEveDbScript :: T.DBConfig BM.MySQLM -> L.Flow (T.DBResult ()) +uniqueConstraintViolationEveDbScript dbcfg = do + econn <- L.getSqlDBConnection dbcfg + + flip (either $ error "Unable to get connection") econn $ \conn -> + L.runDB conn $ do + L.insertRows + $ B.insert (_users eulerDb) + $ B.insertValues [User 2 "Eve" "Beon"] + + L.insertRows + $ B.insert (_users eulerDb) + $ B.insertValues [User 3 "Eve" "Beon"] + +uniqueConstraintViolationMickeyDbScript :: T.DBConfig BM.MySQLM -> L.Flow (T.DBResult ()) +uniqueConstraintViolationMickeyDbScript dbcfg = do + econn <- L.getSqlDBConnection dbcfg + + flip (either $ error "Unable to get connection") econn $ \conn -> do + eRes1 <- L.runDB conn $ + L.insertRows + $ B.insert (_users eulerDb) + $ B.insertValues [User 4 "Mickey" "Mouse"] + + eRes2 <- L.runDB conn $ + L.insertRows + $ B.insert (_users eulerDb) + $ B.insertValues [User 4 "Mickey" "Mouse"] + + pure $ eRes1 >> eRes2 + +throwExceptionFlowScript :: T.DBConfig BM.MySQLM -> L.Flow (T.DBResult ()) +throwExceptionFlowScript dbcfg = do + econn <- L.getSqlDBConnection dbcfg + + flip (either $ error "Unable to get connection") econn $ \conn -> + L.runDB conn $ do + L.insertRows + $ B.insert (_users eulerDb) + $ B.insertValues [User 6 "Billy" "Evil"] + + void $ error "ThisException" + + L.insertRows + $ B.insert (_users eulerDb) + $ B.insertValues [User 7 "Billy" "Bang"] + + +insertAndSelectWithinOneConnectionScript :: T.DBConfig BM.MySQLM -> L.Flow (T.DBResult (Maybe User)) +insertAndSelectWithinOneConnectionScript dbcfg = do + econn <- L.getSqlDBConnection dbcfg + + flip (either $ error "Unable to get connection") econn $ \conn -> + L.runDB conn $ do + L.insertRows + $ B.insert (_users eulerDb) + $ B.insertValues [User 4 "Milky" "Way"] + + let predicate User {..} = _userId ==. 4 + + L.findRow + $ B.select + $ B.limit_ 1 + $ B.filter_ predicate + $ B.all_ (_users eulerDb) + +selectUnknownDbScript :: T.DBConfig BM.MySQLM -> L.Flow (T.DBResult (Maybe User)) +selectUnknownDbScript dbcfg = do + econn <- L.getSqlDBConnection dbcfg + + flip (either $ error "Unable to get connection") econn $ \conn -> + L.runDB conn $ do + let predicate User {..} = _userFirstName ==. "Unknown" + L.findRow + $ B.select + $ B.limit_ 1 + $ B.filter_ predicate + $ B.all_ (_users eulerDb) + +selectRowDbScript :: Int -> T.DBConfig BM.MySQLM -> L.Flow (T.DBResult (Maybe User)) +selectRowDbScript userId dbcfg = do + econn <- L.getSqlDBConnection dbcfg + + flip (either $ error "Unable to get connection") econn $ \conn -> + L.runDB conn $ do + let predicate User {..} = _userId ==. (B.val_ userId) + L.findRow + $ B.select + $ B.limit_ 1 + $ B.filter_ predicate + $ B.all_ (_users eulerDb) + + +selectOneDbScript :: T.DBConfig BM.MySQLM -> L.Flow (T.DBResult (Maybe User)) +selectOneDbScript dbcfg = do + econn <- L.getSqlDBConnection dbcfg + + flip (either $ error "Unable to get connection") econn $ \conn -> do + eRes1 <- L.runDB conn + $ L.insertRows + $ B.insert (_users eulerDb) + $ B.insertExpressions (mkUser <$> susers) + + eRes2 <- L.runDB conn $ do + let predicate User {..} = _userFirstName ==. "John" + + L.findRow + $ B.select + $ B.limit_ 1 + $ B.filter_ predicate + $ B.all_ (_users eulerDb) + + pure $ eRes1 >> eRes2 + + +insertReturningScript :: T.DBConfig BM.MySQLM -> L.Flow (T.DBResult [User]) +insertReturningScript dbcfg = do + econn <- L.getSqlDBConnection dbcfg + + flip (either $ error "Unable to get connection") econn $ \conn -> + L.runDB conn + $ L.insertRowsReturningList + $ B.insert (_users eulerDb) + $ B.insertExpressions + [ User B.default_ + ( B.val_ "John" ) + ( B.val_ "Doe" ) + , User B.default_ + ( B.val_ "Doe" ) + ( B.val_ "John" ) + ] + + +updateAndSelectDbScript :: T.DBConfig BM.MySQLM -> L.Flow (T.DBResult (Maybe User)) +updateAndSelectDbScript dbcfg = do + econn <- L.getSqlDBConnection dbcfg + + flip (either $ error "Unable to get connection") econn $ \conn -> + L.runDB conn $ do + let predicate1 User {..} = _userFirstName ==. "John" + + L.updateRows $ B.update (_users eulerDb) + (\User {..} -> mconcat + [ _userFirstName <-. "Leo" + , _userLastName <-. "San" + ] + ) + predicate1 + + let predicate2 User {..} = _userFirstName ==. "Leo" + L.findRow + $ B.select + $ B.limit_ 1 + $ B.filter_ predicate2 + $ B.all_ (_users eulerDb) diff --git a/testDB/SQLDB/TestData/Scenarios/Postgres.hs b/testDB/SQLDB/TestData/Scenarios/Postgres.hs new file mode 100644 index 00000000..046dd78f --- /dev/null +++ b/testDB/SQLDB/TestData/Scenarios/Postgres.hs @@ -0,0 +1,110 @@ +module SQLDB.TestData.Scenarios.Postgres where + +import EulerHS.Prelude + +import qualified EulerHS.Language as L +import qualified EulerHS.Types as T + +import SQLDB.TestData.Types + +import Database.Beam ((<-.), (==.)) +import qualified Database.Beam as B +import qualified Database.Beam.Postgres as BP + + +-- Scenarios + +uniqueConstraintViolationDbScript :: T.DBConfig BP.Pg -> L.Flow (T.DBResult ()) +uniqueConstraintViolationDbScript dbcfg = do + econn <- L.getSqlDBConnection dbcfg + + flip (either $ error "Unable to get connection") econn $ \conn -> do + eRes1 <- L.runDB conn + $ L.insertRows + $ B.insert (_users eulerDb) + $ B.insertValues [User 2 "Eve" "Beon"] + + eRes2 <- L.runDB conn + $ L.insertRows + $ B.insert (_users eulerDb) + $ B.insertValues [User 2 "Eve" "Beon"] + + pure $ eRes1 >> eRes2 + + +selectUnknownDbScript :: T.DBConfig BP.Pg -> L.Flow (T.DBResult (Maybe User)) +selectUnknownDbScript dbcfg = do + econn <- L.getSqlDBConnection dbcfg + + flip (either $ error "Unable to get connection") econn $ \conn -> + L.runDB conn $ do + let predicate User {..} = _userFirstName ==. "Unknown" + L.findRow + $ B.select + $ B.limit_ 1 + $ B.filter_ predicate + $ B.all_ (_users eulerDb) + + +selectOneDbScript :: T.DBConfig BP.Pg -> L.Flow (T.DBResult (Maybe User)) +selectOneDbScript dbcfg = do + econn <- L.getSqlDBConnection dbcfg + + flip (either $ error "Unable to get connection") econn $ \conn -> do + eRes1 <- L.runDB conn + $ L.insertRows + $ B.insert (_users eulerDb) + $ B.insertExpressions (mkUser <$> susers) + + eRes2 <- L.runDB conn $ do + let predicate User {..} = _userFirstName ==. "John" + + L.findRow + $ B.select + $ B.limit_ 1 + $ B.filter_ predicate + $ B.all_ (_users eulerDb) + + pure $ eRes1 >> eRes2 + + +insertReturningScript :: T.DBConfig BP.Pg -> L.Flow (T.DBResult [User]) +insertReturningScript dbcfg = do + econn <- L.getSqlDBConnection dbcfg + + flip (either $ error "Unable to get connection") econn $ \conn -> + L.runDB conn + $ L.insertRowsReturningList + $ B.insert (_users eulerDb) + $ B.insertExpressions + [ User B.default_ + ( B.val_ "John" ) + ( B.val_ "Doe" ) + , User B.default_ + ( B.val_ "Doe" ) + ( B.val_ "John" ) + ] + + +updateAndSelectDbScript :: T.DBConfig BP.Pg -> L.Flow (T.DBResult (Maybe User)) +updateAndSelectDbScript dbcfg = do + econn <- L.getSqlDBConnection dbcfg + + flip (either $ error "Unable to get connection") econn $ \conn -> + L.runDB conn $ do + let predicate1 User {..} = _userFirstName ==. "John" + + L.updateRows $ B.update (_users eulerDb) + (\User {..} -> mconcat + [ _userFirstName <-. "Leo" + , _userLastName <-. "San" + ] + ) + predicate1 + + let predicate2 User {..} = _userFirstName ==. "Leo" + L.findRow + $ B.select + $ B.limit_ 1 + $ B.filter_ predicate2 + $ B.all_ (_users eulerDb) diff --git a/testDB/SQLDB/TestData/Scenarios/SQLite.hs b/testDB/SQLDB/TestData/Scenarios/SQLite.hs new file mode 100644 index 00000000..4d446ccb --- /dev/null +++ b/testDB/SQLDB/TestData/Scenarios/SQLite.hs @@ -0,0 +1,123 @@ +module SQLDB.TestData.Scenarios.SQLite where + +import EulerHS.Prelude + +import qualified EulerHS.Language as L +import qualified EulerHS.Types as T + +import SQLDB.TestData.Connections +import SQLDB.TestData.Types + +import Database.Beam ((/=.), (<-.), (==.)) +import qualified Database.Beam as B +import qualified Database.Beam.Sqlite as BS + + +-- Scenarios + +deleteTestValues :: T.DBConfig BS.SqliteM -> L.Flow () +deleteTestValues cfg = do + conn <- connectOrFail cfg -- $ T.mkSQLiteConfig testDBName + void $ L.runDB conn + $ L.deleteRows + $ B.delete (_users eulerDb) (\u -> _userId u /=. B.val_ 0) + void $ L.runDB conn + $ L.updateRows + $ B.update (_sqlite_sequence sqliteSequenceDb) + (\(SqliteSequence {..}) -> mconcat [_seq <-. B.val_ 0]) + (\(SqliteSequence {..}) -> _name ==. B.val_ "users") + +insertTestValues :: T.DBConfig BS.SqliteM -> L.Flow () +insertTestValues cfg = do + conn <- connectOrFail cfg + void $ L.runDB conn + $ L.insertRows + $ B.insert (_users eulerDb) + $ B.insertExpressions + [ User B.default_ + ( B.val_ "John" ) + ( B.val_ "Doe" ) + , User B.default_ + ( B.val_ "Doe" ) + ( B.val_ "John" ) + ] + L.deinitSqlDBConnection conn + +uniqueConstraintViolationDbScript :: T.DBConfig BS.SqliteM -> L.Flow (T.DBResult ()) +uniqueConstraintViolationDbScript cfg = do + connection <- connectOrFail cfg + + eRes1 <- L.runDB connection + $ L.insertRows + $ B.insert (_users eulerDb) + $ B.insertValues [User 1 "Eve" "Beon"] + + eRes2 <- L.runDB connection + $ L.insertRows + $ B.insert (_users eulerDb) + $ B.insertValues [User 1 "Eve" "Beon"] + + pure $ eRes1 >> eRes2 + +selectUnknownDbScript :: T.DBConfig BS.SqliteM -> L.Flow (T.DBResult (Maybe User)) +selectUnknownDbScript cfg = do + connection <- connectOrFail cfg + + L.runDB connection $ do + let predicate User {..} = _userFirstName ==. B.val_ "Unknown" + + L.findRow + $ B.select + $ B.limit_ 1 + $ B.filter_ predicate + $ B.all_ (_users eulerDb) + +selectOneDbScript :: T.DBConfig BS.SqliteM -> L.Flow (T.DBResult (Maybe User)) +selectOneDbScript cfg = do + connection <- connectOrFail cfg + + L.runDB connection $ do + let predicate User {..} = _userFirstName ==. B.val_ "John" + + L.findRow + $ B.select + $ B.limit_ 1 + $ B.filter_ predicate + $ B.all_ (_users eulerDb) + +updateAndSelectDbScript :: T.DBConfig BS.SqliteM -> L.Flow (T.DBResult (Maybe User)) +updateAndSelectDbScript cfg = do + connection <- connectOrFail cfg + + L.runDB connection $ do + let predicate1 User {..} = _userFirstName ==. B.val_ "John" + + L.updateRows $ B.update (_users eulerDb) + (\User {..} -> mconcat + [ _userFirstName <-. B.val_ "Leo" + , _userLastName <-. B.val_ "San" + ] + ) + predicate1 + + let predicate2 User {..} = _userFirstName ==. B.val_ "Leo" + L.findRow + $ B.select + $ B.limit_ 1 + $ B.filter_ predicate2 + $ B.all_ (_users eulerDb) + +insertReturningScript :: T.DBConfig BS.SqliteM -> L.Flow (T.DBResult [User]) +insertReturningScript cfg = do + connection <- connectOrFail cfg + L.runDB connection + $ L.insertRowsReturningList + $ B.insert (_users eulerDb) + $ B.insertExpressions + [ User B.default_ + ( B.val_ "John" ) + ( B.val_ "Doe" ) + , User B.default_ + ( B.val_ "Doe" ) + ( B.val_ "John" ) + ] diff --git a/testDB/SQLDB/TestData/Types.hs b/testDB/SQLDB/TestData/Types.hs new file mode 100644 index 00000000..e29ca1c4 --- /dev/null +++ b/testDB/SQLDB/TestData/Types.hs @@ -0,0 +1,85 @@ +{-# LANGUAGE DeriveAnyClass #-} +{-# LANGUAGE StandaloneDeriving #-} + +module SQLDB.TestData.Types where + +import EulerHS.Prelude +import qualified EulerHS.Types as T + +import qualified Database.Beam as B +import qualified Database.Beam.Backend.SQL as B + +-- sqlite3 db +-- CREATE TABLE users (id INTEGER PRIMARY KEY AUTOINCREMENT, first_name VARCHAR NOT NULL, last_name VARCHAR NOT NULL); +data UserT f = User + { _userId :: B.C f Int + , _userFirstName :: B.C f Text + , _userLastName :: B.C f Text + } deriving (Generic, B.Beamable) + +instance B.Table UserT where + data PrimaryKey UserT f = + UserId (B.C f Int) deriving (Generic, B.Beamable) + primaryKey = UserId . _userId + +type User = UserT Identity + + +type UserId = B.PrimaryKey UserT Identity + +deriving instance Show User +deriving instance Eq User +deriving instance ToJSON User +deriving instance FromJSON User + +data EulerDb f = EulerDb + { _users :: f (B.TableEntity UserT) + } deriving (Generic, B.Database be) + +eulerDb :: B.DatabaseSettings be EulerDb +eulerDb = B.defaultDbSettings + + +data SqliteSequenceT f = SqliteSequence + { _name :: B.C f Text + , _seq :: B.C f Int + } deriving (Generic, B.Beamable) + +instance B.Table SqliteSequenceT where + data PrimaryKey SqliteSequenceT f = + SqliteSequenceId (B.C f Text) deriving (Generic, B.Beamable) + primaryKey = SqliteSequenceId . _name + +type SqliteSequence = SqliteSequenceT Identity +type SqliteSequenceId = B.PrimaryKey SqliteSequenceT Identity + +data SqliteSequenceDb f = SqliteSequenceDb + { _sqlite_sequence :: f (B.TableEntity SqliteSequenceT) + } deriving (Generic, B.Database be) + +sqliteSequenceDb :: B.DatabaseSettings be SqliteSequenceDb +sqliteSequenceDb = B.defaultDbSettings + + +data SimpleUser = SimpleUser {firstN :: Text, lastN :: Text} + +susers :: [SimpleUser] +susers = + [ SimpleUser "John" "Doe" + , SimpleUser "Doe" "John" + ] + +mkUser + :: ( B.BeamSqlBackend be + , B.SqlValable (B.Columnar f Text) + , B.Columnar f Int ~ B.QGenExpr ctxt be s a + , B.HaskellLiteralForQExpr (B.Columnar f Text) ~ Text + ) + => SimpleUser + -> UserT f +mkUser SimpleUser {..} = User B.default_ (B.val_ firstN) (B.val_ lastN) + + +someUser :: Text -> Text -> T.DBResult (Maybe User) -> Bool +someUser f l (Right (Just u)) = _userFirstName u == f && _userLastName u == l +someUser _ _ _ = False diff --git a/testDB/SQLDB/TestData/query_examples.db.template b/testDB/SQLDB/TestData/query_examples.db.template new file mode 100644 index 00000000..e22c2028 Binary files /dev/null and b/testDB/SQLDB/TestData/query_examples.db.template differ diff --git a/testDB/SQLDB/Tests/MySQLDBSpec.hs b/testDB/SQLDB/Tests/MySQLDBSpec.hs new file mode 100644 index 00000000..29dbba22 --- /dev/null +++ b/testDB/SQLDB/Tests/MySQLDBSpec.hs @@ -0,0 +1,140 @@ +module SQLDB.Tests.MySQLDBSpec where + +import EulerHS.Prelude + +import EulerHS.Interpreters +import EulerHS.Runtime (withFlowRuntime) +import EulerHS.Types + +import SQLDB.TestData.Scenarios.MySQL +import SQLDB.TestData.Types +import qualified Database.Beam.MySQL as BM + +import Test.Hspec hiding (runIO) + +import EulerHS.Language +import qualified EulerHS.Types as T + +import EulerHS.Extra.Test + + +testDBName :: String +testDBName = "mysql_db_spec_test_db" + +mySQLCfg :: T.MySQLConfig +mySQLCfg = T.MySQLConfig + { connectHost = "mysql" + , connectPort = 3306 + , connectUser = "cloud" + , connectPassword = "scape" + , connectDatabase = testDBName + , connectOptions = [T.CharsetName "utf8"] + , connectPath = "" + , connectSSL = Nothing + } + +mySQLRootCfg :: T.MySQLConfig +mySQLRootCfg = + T.MySQLConfig + { connectUser = "root" + , connectPassword = "root" + , connectDatabase = "" + , .. + } + where + T.MySQLConfig {..} = mySQLCfg + +mkMysqlConfig :: T.MySQLConfig -> T.DBConfig BM.MySQLM +mkMysqlConfig = T.mkMySQLConfig "eulerMysqlDB" + +poolConfig :: T.PoolConfig +poolConfig = T.PoolConfig + { stripes = 1 + , keepAlive = 10 + , resourcesPerStripe = 50 + } + +mkMysqlPoolConfig :: T.MySQLConfig -> DBConfig BM.MySQLM +mkMysqlPoolConfig cfg = mkMySQLPoolConfig "eulerMysqlDB" cfg poolConfig + +spec :: Spec +spec = do + let test dbCfg = do + it "Unique Constraint Violation" $ \rt -> do + eRes <- runFlow rt $ uniqueConstraintViolationDbScript dbCfg + eRes `shouldBe` + ( Left $ DBError + ( SQLError $ MysqlError $ + MysqlSqlError + { errCode = 1062 + , errMsg = "Duplicate entry '2' for key 'PRIMARY'" + } + ) + "ConnectionError {errFunction = \"query\", errNumber = 1062, errMessage = \"Duplicate entry '2' for key 'PRIMARY'\"}" + ) + + it "Txn should be commited in both cases in one connection (Eva)" $ \rt -> do + _ <- runFlow rt $ uniqueConstraintViolationEveDbScript dbCfg + eRes2 <- runFlow rt $ selectRowDbScript 2 dbCfg + eRes3 <- runFlow rt $ selectRowDbScript 3 dbCfg + (eRes2, eRes3) `shouldBe` + ( Right (Just (User {_userId = 2, _userFirstName = "Eve", _userLastName = "Beon"})) + , Right (Just (User {_userId = 3, _userFirstName = "Eve", _userLastName = "Beon"})) + ) + + it "First insert success, last insert resolved on DB side (Mickey)" $ \rt -> do + _ <- runFlow rt $ uniqueConstraintViolationMickeyDbScript dbCfg + eRes <- runFlow rt $ selectRowDbScript 4 dbCfg + eRes `shouldSatisfy` (someUser "Mickey" "Mouse") + + it "Txn should be completely rollbacked on exception (Billy)" $ \rt -> do + _ <- runFlow rt $ throwExceptionFlowScript dbCfg + eRes6 <- runFlow rt $ selectRowDbScript 6 dbCfg + eRes7 <- runFlow rt $ selectRowDbScript 7 dbCfg + (eRes6, eRes7) `shouldBe` (Right Nothing, Right Nothing) + + it "Insert and Select in one db connection (Milky way)" $ \rt -> do + eRes <- runFlow rt $ insertAndSelectWithinOneConnectionScript dbCfg + eRes `shouldSatisfy` (someUser "Milky" "Way") + + it "Select one, row not found" $ \rt -> do + eRes <- runFlow rt $ selectUnknownDbScript dbCfg + eRes `shouldBe` (Right Nothing) + + it "Select one, row found" $ \rt -> do + eRes <- runFlow rt $ selectOneDbScript dbCfg + eRes `shouldSatisfy` (someUser "John" "Doe") + + it "Update / Select, row found & changed" $ \rt -> do + eRes <- runFlow rt $ updateAndSelectDbScript dbCfg + eRes `shouldSatisfy` (someUser "Leo" "San") + + it "Insert returning should return list of rows" $ \rt -> do + eRes <- runFlow rt $ insertReturningScript dbCfg + + case eRes of + Left _ -> expectationFailure "Left DBResult" + Right us -> do + length us `shouldBe` 2 + let u1 = us !! 0 + let u2 = us !! 1 + + _userFirstName u1 `shouldBe` "John" + _userLastName u1 `shouldBe` "Doe" + + _userFirstName u2 `shouldBe` "Doe" + _userLastName u2 `shouldBe` "John" + + let prepare msCfgToDbCfg next = + withMysqlDb testDBName "testDB/SQLDB/TestData/MySQLDBSpec.sql" mySQLRootCfg $ + withFlowRuntime Nothing $ \rt -> do + runFlow rt $ do + ePool <- initSqlDBConnection $ msCfgToDbCfg mySQLCfg + either (error "Failed to connect to MySQL") (const $ pure ()) ePool + next rt + + around (prepare mkMysqlConfig) $ + describe "EulerHS MySQL DB tests" $ test $ mkMysqlConfig mySQLCfg + + around (prepare mkMysqlPoolConfig) $ + describe "EulerHS MySQL DB tests. Pool" $ test $ mkMysqlPoolConfig mySQLCfg diff --git a/testDB/SQLDB/Tests/PostgresDBSpec.hs b/testDB/SQLDB/Tests/PostgresDBSpec.hs new file mode 100644 index 00000000..fa34b7c3 --- /dev/null +++ b/testDB/SQLDB/Tests/PostgresDBSpec.hs @@ -0,0 +1,100 @@ +module SQLDB.Tests.PostgresDBSpec where + +import EulerHS.Prelude + +import EulerHS.Interpreters +import EulerHS.Runtime (withFlowRuntime) +import qualified EulerHS.Types as T +import SQLDB.TestData.Scenarios.Postgres (uniqueConstraintViolationDbScript, + selectUnknownDbScript, selectOneDbScript, updateAndSelectDbScript) + +import SQLDB.TestData.Types + +import qualified Database.Beam.Postgres as BP +import EulerHS.Extra.Test +import Test.Hspec hiding (runIO) + +-- Configurations + +pgCfg' :: T.PostgresConfig +pgCfg' = T.PostgresConfig + { connectHost = "postgres" --String + , connectPort = 5432 --Word16 + , connectUser = "cloud" -- String + , connectPassword = "scape" -- String + , connectDatabase = "euler_test_db" -- String + } + + +pgRootCfg :: T.PostgresConfig +pgRootCfg = + T.PostgresConfig + { connectUser = "cloud" + , connectPassword = "scape" + , connectDatabase = "hpcldb" + , .. + } + where + T.PostgresConfig {..} = pgCfg' + +mkPgCfg :: T.PostgresConfig -> T.DBConfig BP.Pg +mkPgCfg = T.mkPostgresConfig "eulerPGDB" + +poolConfig :: T.PoolConfig +poolConfig = T.PoolConfig + { stripes = 1 + , keepAlive = 10 + , resourcesPerStripe = 50 + } + +mkPgPoolCfg :: T.PostgresConfig -> T.DBConfig BP.Pg +mkPgPoolCfg cfg = T.mkPostgresPoolConfig "eulerPGDB" cfg poolConfig + +-- Tests + +spec :: Spec +spec = do + let + test pgCfg = do + it "Unique Constraint Violation" $ \rt -> do + eRes <- runFlow rt $ uniqueConstraintViolationDbScript pgCfg + + eRes `shouldBe` + ( Left $ T.DBError + ( T.SQLError $ T.PostgresError $ + T.PostgresSqlError + { sqlState = "23505" + , sqlExecStatus = T.PostgresFatalError + , sqlErrorMsg = "duplicate key value violates unique constraint \"users_pkey\"" + , sqlErrorDetail = "Key (id)=(2) already exists." + , sqlErrorHint = "" + } + ) + "SqlError {sqlState = \"23505\", sqlExecStatus = FatalError, sqlErrorMsg = \"duplicate key value violates unique constraint \\\"users_pkey\\\"\", sqlErrorDetail = \"Key (id)=(2) already exists.\", sqlErrorHint = \"\"}" + ) + + it "Select one, row not found" $ \rt -> do + eRes <- runFlow rt $ selectUnknownDbScript pgCfg + eRes `shouldBe` (Right Nothing) + + it "Select one, row found" $ \rt -> do + eRes <- runFlow rt $ selectOneDbScript pgCfg + eRes `shouldSatisfy` (someUser "John" "Doe") + + it "Update / Select, row found & changed" $ \rt -> do + eRes <- runFlow rt $ updateAndSelectDbScript pgCfg + eRes `shouldSatisfy` (someUser "Leo" "San") + + let prepare pgCfgToDbCfg = + preparePostgresDB + "testDB/SQLDB/TestData/PostgresDBSpec.sql" + pgRootCfg + pgCfg' + pgCfgToDbCfg + (withFlowRuntime Nothing) + + around (prepare mkPgCfg) $ + describe "EulerHS Postgres DB tests" $ test $ mkPgCfg pgCfg' + + around (prepare mkPgPoolCfg) $ + describe "EulerHS Postgres DB tests. Pool" $ test $ mkPgPoolCfg pgCfg' diff --git a/testDB/SQLDB/Tests/QueryExamplesSpec.hs b/testDB/SQLDB/Tests/QueryExamplesSpec.hs new file mode 100644 index 00000000..807f0ea9 --- /dev/null +++ b/testDB/SQLDB/Tests/QueryExamplesSpec.hs @@ -0,0 +1,399 @@ +{-# LANGUAGE DeriveAnyClass #-} +{-# LANGUAGE PartialTypeSignatures #-} +{-# LANGUAGE StandaloneDeriving #-} + +module SQLDB.Tests.QueryExamplesSpec where + + +import Data.Aeson (encode) +import qualified Data.ByteString.Lazy as BSL +import Data.Time +import EulerHS.Prelude hiding (getOption) +import Test.Hspec hiding (runIO) +import Unsafe.Coerce + +import EulerHS.Interpreters +import EulerHS.Language +import EulerHS.Runtime (withFlowRuntime) +import EulerHS.Types hiding (error) + +import qualified EulerHS.Language as L +import qualified EulerHS.Runtime as R +import qualified EulerHS.Types as T + +import Database.Beam ((&&.), (/=.), (<-.), (<.), (==.), (>.), (>=.)) +import qualified Database.Beam as B +import qualified Database.Beam.Backend.SQL as B +import qualified Database.Beam.Sqlite as BS + + +date1 :: LocalTime +date1 = LocalTime + { localDay = toEnum 56195 + , localTimeOfDay = defaultTimeOfDay1 + } + +defaultTimeOfDay1 :: TimeOfDay +defaultTimeOfDay1 = TimeOfDay + { todHour = 1 + , todMin = 1 + , todSec = 1 + } + +-- 2012-09-26 18:08:45 +date2 :: LocalTime +date2 = LocalTime + { localDay = toEnum 56196 + , localTimeOfDay = td2 + } + +td2 :: TimeOfDay +td2 = TimeOfDay + { todHour = 18 + , todMin = 8 + , todSec = 45 + } + +date3 :: LocalTime +date3 = LocalTime + { localDay = toEnum 56190 + , localTimeOfDay = td3 + } + +date4 :: LocalTime +date4 = LocalTime + { localDay = toEnum 56191 + , localTimeOfDay = td3 + } + +td3 :: TimeOfDay +td3 = TimeOfDay + { todHour = 0 + , todMin = 0 + , todSec = 0 + } + +data MemberT f = Member + { memberId :: B.C f Int + , surName :: B.C f Text + , firstName :: B.C f Text + , address :: B.C f Text + , zipCode :: B.C f Int + , telephone :: B.C f Text + , recommendedBy :: B.C f (Maybe Int) + , joinDate :: B.C f LocalTime + } deriving (Generic, B.Beamable) + +instance B.Table MemberT where + data PrimaryKey MemberT f = + MemberId (B.C f Int) deriving (Generic, B.Beamable) + primaryKey = MemberId . memberId + +type Member = MemberT Identity + +type MemberId = B.PrimaryKey MemberT Identity + +deriving instance Show MemberId +deriving instance Eq MemberId +deriving instance ToJSON MemberId +deriving instance FromJSON MemberId + +deriving instance Show Member +deriving instance Eq Member +deriving instance ToJSON Member +deriving instance FromJSON Member + + +membersEMod :: B.EntityModification + (B.DatabaseEntity be db) be (B.TableEntity MemberT) +membersEMod = B.modifyTableFields + B.tableModification + { memberId = B.fieldNamed "memid" + , surName = B.fieldNamed "surname" + , firstName = B.fieldNamed "firstname" + , address = B.fieldNamed "address" + , zipCode = B.fieldNamed "zipcode" + , telephone = B.fieldNamed "telephone" + , recommendedBy = B.fieldNamed "recommendedby" + , joinDate = B.fieldNamed "joindate" + } + +data FacilityT f = Facility + { facilityId :: B.C f Int + , name :: B.C f Text + , memberCost :: B.C f Double + , guestCost :: B.C f Double + , initialOutlay :: B.C f Double + , monthlyMaintenance :: B.C f Double + } deriving (Generic, B.Beamable) + +instance B.Table FacilityT where + data PrimaryKey FacilityT f = + FacrId (B.C f Int) deriving (Generic, B.Beamable) + primaryKey = FacrId . facilityId + +type Facility = FacilityT Identity + +type FacrId = B.PrimaryKey FacilityT Identity + +deriving instance Show FacrId +deriving instance Eq FacrId +deriving instance ToJSON FacrId +deriving instance FromJSON FacrId + +deriving instance Show Facility +deriving instance Eq Facility +deriving instance ToJSON Facility +deriving instance FromJSON Facility + +facilitiesEMod :: B.EntityModification + (B.DatabaseEntity be db) be (B.TableEntity FacilityT) +facilitiesEMod = B.modifyTableFields + B.tableModification + { facilityId = B.fieldNamed "facid" + , name = B.fieldNamed "name" + , memberCost = B.fieldNamed "membercost" + , guestCost = B.fieldNamed "guestcost" + , initialOutlay = B.fieldNamed "initialoutlay" + , monthlyMaintenance = B.fieldNamed "monthlymaintenance" + } + + +data BookingT f = Booking + { bookId :: B.C f Int + , facId :: B.PrimaryKey FacilityT f + , bmemid :: B.PrimaryKey MemberT f + , starttime :: B.C f LocalTime + , slots :: B.C f Int + } deriving (Generic, B.Beamable) + +instance B.Table BookingT where + data PrimaryKey BookingT f = + BookId (B.C f Int) deriving (Generic, B.Beamable) + primaryKey = BookId . bookId + +type Booking = BookingT Identity + +type BookId = B.PrimaryKey BookingT Identity + +deriving instance Show Booking +deriving instance Eq Booking +deriving instance ToJSON Booking +deriving instance FromJSON Booking + +bookingsEMod :: B.EntityModification + (B.DatabaseEntity be db) be (B.TableEntity BookingT) +bookingsEMod = B.modifyTableFields + B.tableModification + { bookId = B.fieldNamed "bookid" + , facId = FacrId (B.fieldNamed "facid") + , bmemid = MemberId (B.fieldNamed "memid") + , starttime = B.fieldNamed "starttime" + , slots = B.fieldNamed "slots" + } + + +data ClubDB f = ClubDB + { members :: f (B.TableEntity MemberT) + , facilities :: f (B.TableEntity FacilityT) + , bookings :: f (B.TableEntity BookingT) + } deriving (Generic, B.Database be) + +clubDB :: B.DatabaseSettings be ClubDB +clubDB = B.defaultDbSettings `B.withDbModification` + B.dbModification + { facilities = facilitiesEMod + , members = membersEMod + , bookings = bookingsEMod + } + +testDBName :: String +testDBName = "./testDB/SQLDB/TestData/test.db" + +testDBTemplateName :: String +testDBTemplateName = "./testDB/SQLDB/TestData/query_examples.db.template" + +poolConfig = T.PoolConfig + { stripes = 1 + , keepAlive = 10 + , resourcesPerStripe = 50 + } + +sqliteCfg = T.mkSQLitePoolConfig "clubSQliteDB" testDBName poolConfig + +connectOrFail :: T.DBConfig beM -> Flow (T.SqlConn beM) +connectOrFail cfg = L.initSqlDBConnection cfg >>= \case + Left e -> error $ show e + Right conn -> pure conn + +rmTestDB :: L.Flow () +rmTestDB = void $ L.runSysCmd $ "rm -f " <> testDBName + +prepareTestDB :: L.Flow () +prepareTestDB = do + rmTestDB + void $ L.runSysCmd $ "cp " <> testDBTemplateName <> " " <> testDBName + pure () + + +--Basic string searches +textSearchLike :: L.Flow (T.DBResult [Facility]) +textSearchLike = do + conn <- connectOrFail sqliteCfg + L.runDB conn + $ L.findRows + $ B.select + $ B.filter_ (\f -> name f `B.like_` "%Tennis%") + $ B.all_ (facilities clubDB) + +-- Matching against multiple possible values +matchMultValues :: L.Flow (T.DBResult [Facility]) +matchMultValues = do + conn <- connectOrFail sqliteCfg + L.runDB conn + $ L.findRows + $ B.select + $ B.filter_ (\f -> facilityId f `B.in_` [1,5]) + $ B.all_ (facilities clubDB) + +searchByDate :: L.Flow (T.DBResult [Member]) +searchByDate = do + conn <- connectOrFail sqliteCfg + L.runDB conn + $ L.findRows + $ B.select + $ B.filter_ (\m -> joinDate m >=. B.val_ date1) + $ B.all_ (members clubDB) + + +-- Removing duplicates, and ordering results +groupDistinctLimit :: L.Flow (T.DBResult [Text]) +groupDistinctLimit = do + conn <- connectOrFail sqliteCfg + L.runDB conn + $ L.findRows + $ B.select + $ B.limit_ 10 + $ B.nub_ + $ fmap surName + $ B.orderBy_ (B.asc_ . surName) + $ B.all_ (members clubDB) + +-- Combining results from multiple queries with union +selectWithUnion:: L.Flow (T.DBResult [Text]) +selectWithUnion = do + conn <- connectOrFail sqliteCfg + L.runDB conn $ + let + sn = fmap surName + $ B.all_ (members clubDB) + n = fmap name + $ B.all_ (facilities clubDB) + in L.findRows $ B.select $ B.limit_ 3 $ B.union_ sn n + + +aggregate1 :: L.Flow (T.DBResult (Maybe LocalTime)) +aggregate1 = do + conn <- connectOrFail sqliteCfg + res <- L.runDB conn $ + L.findRow $ B.select + $ B.aggregate_ (\m -> B.max_ (joinDate m )) + $ B.all_ (members clubDB) + pure $ join <$> res + +aggregate2 :: L.Flow (T.DBResult (Maybe (Text, Text,(LocalTime)))) +aggregate2 = do + conn <- connectOrFail sqliteCfg + L.runDB conn $ + L.findRow $ B.select $ do + mdate <- B.aggregate_ (\ms -> ( B.max_ (joinDate ms))) + $ B.all_ (members clubDB) + lm <- B.filter_ (\m -> joinDate m ==. B.fromMaybe_ (B.val_ date2) mdate) $ B.all_ (members clubDB) + pure (firstName lm, surName lm, joinDate lm) + + +join1 :: L.Flow (T.DBResult [LocalTime]) +join1 = do + conn <- connectOrFail sqliteCfg + L.runDB conn $ + L.findRows $ B.select $ fmap starttime $ do + ms <- B.all_ (members clubDB) + bs <- B.join_ (bookings clubDB) (\book -> bmemid book ==. B.primaryKey ms) + B.guard_ (firstName ms ==. "David" &&. surName ms ==. "Farrell") + pure bs + +join2 :: L.Flow (T.DBResult [(LocalTime, Text)]) +join2 = do + conn <- connectOrFail sqliteCfg + L.runDB conn + $ L.findRows + $ B.select + $ B.orderBy_ (B.asc_ . fst) + $ fmap (\(fs,bs) -> (starttime bs,name fs)) + $ do + fs <- B.all_ (facilities clubDB) + bs <- B.join_ (bookings clubDB) (\book -> facId book ==. B.primaryKey fs) + B.guard_ ( starttime bs >=. (B.val_ date3) + &&. starttime bs <. (B.val_ date4) + &&. name fs `B.like_` "%Tennis Court%") + pure (fs, bs) + + +loggerCfg = defaultLoggerConfig + { _logToFile = True + , _logFilePath = "/tmp/euler-backend.log" + , _isAsync = True + } + +withEmptyDB :: (R.FlowRuntime -> IO ()) -> IO () +withEmptyDB act = withFlowRuntime Nothing (\rt -> do + try (runFlow rt prepareTestDB) >>= \case + Left (e :: SomeException) -> + runFlow rt rmTestDB + `finally` error ("Preparing test values failed: " <> show e) + Right _ -> act rt `finally` runFlow rt rmTestDB + ) + + +spec :: Spec +spec = + around withEmptyDB $ + + describe "Query examples" $ do + it "Text search with 'like' operator " $ \rt -> do + eRes <- runFlow rt textSearchLike + eRes `shouldBe` Right [Facility {facilityId = 0, name = "Tennis Court 1", memberCost = 5.0, guestCost = 25.0, initialOutlay = 10000.0, monthlyMaintenance = 200.0},Facility {facilityId = 1, name = "Tennis Court 2", memberCost = 5.0, guestCost = 25.0, initialOutlay = 8000.0, monthlyMaintenance = 200.0},Facility {facilityId = 3, name = "Table Tennis", memberCost = 0.0, guestCost = 5.0, initialOutlay = 320.0, monthlyMaintenance = 10.0}] + + it "Match against mult values" $ \rt -> do + eRes <- runFlow rt matchMultValues + eRes `shouldBe` Right [Facility {facilityId = 1, name = "Tennis Court 2", memberCost = 5.0, guestCost = 25.0, initialOutlay = 8000.0, monthlyMaintenance = 200.0},Facility {facilityId = 5, name = "Massage Room 2", memberCost = 35.0, guestCost = 80.0, initialOutlay = 4000.0, monthlyMaintenance = 3000.0}] + + + it "search by date" $ \rt -> do + eRes <- runFlow rt searchByDate + (length <$> eRes) `shouldBe` Right 1 + + it "orderDistinctLimit" $ \rt -> do + eRes <- runFlow rt groupDistinctLimit + eRes `shouldBe` Right ["Bader","Baker","Boothe","Butters","Coplin","Crumpet","Dare","Farrell","GUEST","Genting"] + + it "selectWithUnion" $ \rt -> do + eRes <- runFlow rt selectWithUnion + eRes `shouldBe` Right ["Bader","Badminton Court","Baker"] + + it "aggregate1" $ \rt -> do + eRes <- runFlow rt aggregate1 + eRes `shouldBe` Right (Just date2) + + it "aggregate2" $ \rt -> do + eRes <- runFlow rt aggregate2 + eRes `shouldBe` Right (Just ("Darren","Smith", date2)) + + it "join1" $ \rt -> do + eRes <- runFlow rt join1 + length <$> eRes `shouldBe` Right 34 + + it "join2" $ \rt -> do + eRes <- runFlow rt join2 + eRes `shouldBe` fmap (sortOn fst) eRes + length <$> eRes `shouldBe` Right 12 diff --git a/testDB/SQLDB/Tests/SQLiteDBSpec.hs b/testDB/SQLDB/Tests/SQLiteDBSpec.hs new file mode 100644 index 00000000..1ba37202 --- /dev/null +++ b/testDB/SQLDB/Tests/SQLiteDBSpec.hs @@ -0,0 +1,152 @@ +{-# LANGUAGE DeriveAnyClass #-} +{-# LANGUAGE LambdaCase #-} +{-# LANGUAGE PartialTypeSignatures #-} +{-# LANGUAGE RecordWildCards #-} +{-# LANGUAGE ScopedTypeVariables #-} +{-# LANGUAGE StandaloneDeriving #-} + +module SQLDB.Tests.SQLiteDBSpec where + +import EulerHS.Prelude + +import EulerHS.Interpreters +import qualified EulerHS.Language as L +import EulerHS.Types hiding (error) +import qualified EulerHS.Types as T + +import SQLDB.TestData.Connections +import SQLDB.TestData.Scenarios.SQLite +import SQLDB.TestData.Types + +import qualified Database.Beam.Sqlite as BS +import Test.Hspec hiding (runIO) + + +-- Configurations + +sqliteCfg' :: DBConfig BS.SqliteM +sqliteCfg' = T.mkSQLiteConfig "eulerSQliteDB" testDBName + +poolConfig :: T.PoolConfig +poolConfig = T.PoolConfig + { stripes = 1 + , keepAlive = 10 + , resourcesPerStripe = 50 + } + + +sqlitePoolCfg :: T.DBConfig BS.SqliteM +sqlitePoolCfg = T.mkSQLitePoolConfig "eulerSQliteDB" testDBName poolConfig + +-- Tests + +spec :: Spec +spec = do + let + test sqliteCfg = do + it "Double connection initialization should fail" $ \rt -> do + eRes :: Either String () <- runFlow rt $ do + eConn1 <- L.initSqlDBConnection sqliteCfg + eConn2 <- L.initSqlDBConnection sqliteCfg + case (eConn1, eConn2) of + (Left err, _) -> pure $ Left $ "Failed to connect 1st time: " <> show err + (_, Left (T.DBError T.ConnectionAlreadyExists msg)) + | msg == "Connection for eulerSQliteDB already created." -> pure $ Right () + (_, Left err) -> pure $ Left $ "Unexpected error type on 2nd connect: " <> show err + eRes `shouldBe` Right () + + it "Get uninialized connection should fail" $ \rt -> do + eRes :: Either String () <- runFlow rt $ do + eConn <- L.getSqlDBConnection sqliteCfg + case eConn of + Left (T.DBError T.ConnectionDoesNotExist msg) + | msg == "Connection for eulerSQliteDB does not exists." -> pure $ Right () + Left err -> pure $ Left $ "Unexpected error: " <> show err + Right _ -> pure $ Left "Unexpected connection success" + eRes `shouldBe` Right () + + it "Init and get connection should succeed" $ \rt -> do + eRes :: Either String () <- runFlow rt $ do + eConn1 <- L.initSqlDBConnection sqliteCfg + eConn2 <- L.getSqlDBConnection sqliteCfg + case (eConn1, eConn2) of + (Left err, _) -> pure $ Left $ "Failed to connect: " <> show err + (_, Left err) -> pure $ Left $ "Unexpected error on get connection: " <> show err + _ -> pure $ Right () + eRes `shouldBe` Right () + + it "Init and double get connection should succeed" $ \rt -> do + eRes :: Either String () <- runFlow rt $ do + eConn1 <- L.initSqlDBConnection sqliteCfg + eConn2 <- L.getSqlDBConnection sqliteCfg + eConn3 <- L.getSqlDBConnection sqliteCfg + case (eConn1, eConn2, eConn3) of + (Left err, _, _) -> pure $ Left $ "Failed to connect: " <> show err + (_, Left err, _) -> pure $ Left $ "Unexpected error on 1st get connection: " <> show err + (_, _, Left err) -> pure $ Left $ "Unexpected error on 2nd get connection: " <> show err + _ -> pure $ Right () + eRes `shouldBe` Right () + + it "getOrInitSqlConn should succeed" $ \rt -> do + eRes :: Either String () <- runFlow rt $ do + eConn <- L.getOrInitSqlConn sqliteCfg + case eConn of + Left err -> pure $ Left $ "Failed to connect: " <> show err + _ -> pure $ Right () + eRes `shouldBe` Right () + + it "Prepared connection should be available" $ \rt -> do + void $ runFlow rt $ do + eConn <- L.initSqlDBConnection sqliteCfg + when (isLeft eConn) $ error "Failed to prepare connection." + void $ runFlow rt $ do + eConn <- L.getSqlDBConnection sqliteCfg + when (isLeft eConn) $ error "Failed to get prepared connection." + + it "Unique Constraint Violation" $ \rt -> do + eRes <- runFlow rt (uniqueConstraintViolationDbScript sqliteCfg) + eRes `shouldBe` + ( Left $ DBError + ( SQLError $ SqliteError $ + SqliteSqlError + { sqlError = SqliteErrorConstraint + , sqlErrorDetails = "UNIQUE constraint failed: users.id" + , sqlErrorContext = "step" + } + ) + "SQLite3 returned ErrorConstraint while attempting to perform step: UNIQUE constraint failed: users.id" + ) + + it "Select one, row not found" $ \rt -> do + eRes <- runFlow rt (selectUnknownDbScript sqliteCfg) + eRes `shouldBe` (Right Nothing) + + it "Select one, row found" $ \rt -> do + eRes <- runFlow rt (selectOneDbScript sqliteCfg) + eRes `shouldSatisfy` (someUser "John" "Doe") + + it "Update / Select, row found & changed" $ \rt -> do + eRes <- runFlow rt (updateAndSelectDbScript sqliteCfg) + eRes `shouldSatisfy` (someUser "Leo" "San") + + it "Insert returning should return list of rows" $ \rt -> do + eRes <- runFlow rt (insertReturningScript sqliteCfg) + + case eRes of + Left _ -> expectationFailure "Left DBResult" + Right us -> do + length us `shouldBe` 2 + let u1 = us !! 0 + let u2 = us !! 1 + + _userFirstName u1 `shouldBe` "John" + _userLastName u1 `shouldBe` "Doe" + + _userFirstName u2 `shouldBe` "Doe" + _userLastName u2 `shouldBe` "John" + + around (withEmptyDB insertTestValues sqliteCfg') $ + describe "EulerHS SQLite DB tests" $ test sqliteCfg' + + around (withEmptyDB insertTestValues sqlitePoolCfg) $ + describe "EulerHS SQLite DB tests. Pool cfg." $ test sqlitePoolCfg