diff --git a/.github/workflows/.golangci.yml b/.github/workflows/.golangci.yml new file mode 100644 index 0000000..b85fcbe --- /dev/null +++ b/.github/workflows/.golangci.yml @@ -0,0 +1,2708 @@ +# This file contains all available configuration options +# with their default values (in comments). +# +# This file is not a configuration example, +# it contains the exhaustive configuration with explanations of the options. + +# Options for analysis running. +run: + # The default concurrency value is the number of available CPU. + concurrency: 4 + + # Timeout for analysis, e.g. 30s, 5m. + # Default: 1m + timeout: 5m + + # Exit code when at least one issue was found. + # Default: 1 + issues-exit-code: 2 + + # Include test files or not. + # Default: true + tests: false + + # List of build tags, all linters use it. + # Default: []. + build-tags: + - mytag + + # Which dirs to skip: issues from them won't be reported. + # Can use regexp here: `generated.*`, regexp is applied on full path, + # including the path prefix if one is set. + # Default value is empty list, + # but default dirs are skipped independently of this option's value (see skip-dirs-use-default). + # "/" will be replaced by current OS file path separator to properly work on Windows. + skip-dirs: + - src/external_libs + - autogenerated_by_my_lib + + # Enables skipping of directories: + # - vendor$, third_party$, testdata$, examples$, Godeps$, builtin$ + # Default: true + skip-dirs-use-default: false + + # Which files to skip: they will be analyzed, but issues from them won't be reported. + # Default value is empty list, + # but there is no need to include all autogenerated files, + # we confidently recognize autogenerated files. + # If it's not please let us know. + # "/" will be replaced by current OS file path separator to properly work on Windows. + skip-files: + - ".*\\.my\\.go$" + - lib/bad.go + + # If set we pass it to "go list -mod={option}". From "go help modules": + # If invoked with -mod=readonly, the go command is disallowed from the implicit + # automatic updating of go.mod described above. Instead, it fails when any changes + # to go.mod are needed. This setting is most useful to check that go.mod does + # not need updates, such as in a continuous integration and testing system. + # If invoked with -mod=vendor, the go command assumes that the vendor + # directory holds the correct copies of dependencies and ignores + # the dependency descriptions in go.mod. + # + # Allowed values: readonly|vendor|mod + # By default, it isn't set. + modules-download-mode: readonly + + # Allow multiple parallel golangci-lint instances running. + # If false (default) - golangci-lint acquires file lock on start. + allow-parallel-runners: false + + # Define the Go version limit. + # Mainly related to generics support since go1.18. + # Default: use Go version from the go.mod file, fallback on the env var `GOVERSION`, fallback on 1.18 + go: '1.19' + + +# output configuration options +output: + # Format: colored-line-number|line-number|json|colored-tab|tab|checkstyle|code-climate|junit-xml|github-actions|teamcity + # + # Multiple can be specified by separating them by comma, output can be provided + # for each of them by separating format name and path by colon symbol. + # Output path can be either `stdout`, `stderr` or path to the file to write to. + # Example: "checkstyle:report.xml,json:stdout,colored-line-number" + # + # Default: colored-line-number + format: json + + # Print lines of code with issue. + # Default: true + print-issued-lines: false + + # Print linter name in the end of issue text. + # Default: true + print-linter-name: false + + # Make issues output unique by line. + # Default: true + uniq-by-line: false + + # Add a prefix to the output file references. + # Default is no prefix. + path-prefix: "" + + # Sort results by: filepath, line and column. + sort-results: false + + +# All available settings of specific linters. +linters-settings: + asasalint: + # To specify a set of function names to exclude. + # The values are merged with the builtin exclusions. + # The builtin exclusions can be disabled by setting `use-builtin-exclusions` to `false`. + # Default: ["^(fmt|log|logger|t|)\.(Print|Fprint|Sprint|Fatal|Panic|Error|Warn|Warning|Info|Debug|Log)(|f|ln)$"] + exclude: + - Append + - \.Wrapf + # To enable/disable the asasalint builtin exclusions of function names. + # See the default value of `exclude` to get the builtin exclusions. + # Default: true + use-builtin-exclusions: false + # Ignore *_test.go files. + # Default: false + ignore-test: true + + bidichk: + # The following configurations check for all mentioned invisible unicode runes. + # All runes are enabled by default. + left-to-right-embedding: false + right-to-left-embedding: false + pop-directional-formatting: false + left-to-right-override: false + right-to-left-override: false + left-to-right-isolate: false + right-to-left-isolate: false + first-strong-isolate: false + pop-directional-isolate: false + + cyclop: + # The maximal code complexity to report. + # Default: 10 + max-complexity: 10 + # The maximal average package complexity. + # If it's higher than 0.0 (float) the check is enabled + # Default: 0.0 + package-average: 0.5 + # Should ignore tests. + # Default: false + skip-tests: true + + decorder: + # Required order of `type`, `const`, `var` and `func` declarations inside a file. + # Default: types before constants before variables before functions. + dec-order: + - type + - const + - var + - func + + # If true, underscore vars (vars with "_" as the name) will be ignored at all checks + # Default: false (underscore vars are not ignored) + ignore-underscore-vars: false + + # If true, order of declarations is not checked at all. + # Default: true (disabled) + disable-dec-order-check: false + + # If true, `init` func can be anywhere in file (does not have to be declared before all other functions). + # Default: true (disabled) + disable-init-func-first-check: false + + # If true, multiple global `type`, `const` and `var` declarations are allowed. + # Default: true (disabled) + disable-dec-num-check: false + + # If true, type declarations will be ignored for dec num check + # Default: false (type statements are not ignored) + disable-type-dec-num-check: false + + # If true, const declarations will be ignored for dec num check + # Default: false (const statements are not ignored) + disable-const-dec-num-check: false + + # If true, var declarations will be ignored for dec num check + # Default: false (var statements are not ignored) + disable-var-dec-num-check: false + + depguard: + # Rules to apply. + # + # Variables: + # - File Variables + # you can still use and exclamation mark ! in front of a variable to say not to use it. + # Example !$test will match any file that is not a go test file. + # + # `$all` - matches all go files + # `$test` - matches all go test files + # + # - Package Variables + # + # `$gostd` - matches all of go's standard library (Pulled from `GOROOT`) + # + # Default: Only allow $gostd in all files. + rules: + # Name of a rule. + main: + # Used to determine the package matching priority. + # There are three different modes: `original`, `strict`, and `lax`. + # Default: "original" + list-mode: lax + # List of file globs that will match this list of settings to compare against. + # Default: $all + files: + - "!**/*_a _file.go" + # List of allowed packages. + allow: + - $gostd + - github.com/OpenPeeDeeP + # Packages that are not allowed where the value is a suggestion. + deny: + - pkg: "github.com/sirupsen/logrus" + desc: not allowed + - pkg: "github.com/pkg/errors" + desc: Should be replaced by standard lib errors package + + dogsled: + # Checks assignments with too many blank identifiers. + # Default: 2 + max-blank-identifiers: 3 + + dupl: + # Tokens count to trigger issue. + # Default: 150 + threshold: 100 + + dupword: + # Keywords for detecting duplicate words. + # If this list is not empty, only the words defined in this list will be detected. + # Default: [] + keywords: + - "the" + - "and" + - "a" + # Keywords used to ignore detection. + # Default: [] + ignore: + - "0C0C" + + errcheck: + # Report about not checking of errors in type assertions: `a := b.(MyStruct)`. + # Such cases aren't reported by default. + # Default: false + check-type-assertions: true + + # report about assignment of errors to blank identifier: `num, _ := strconv.Atoi(numStr)`. + # Such cases aren't reported by default. + # Default: false + check-blank: true + + # DEPRECATED comma-separated list of pairs of the form pkg:regex + # + # the regex is used to ignore names within pkg. (default "fmt:.*"). + # see https://github.com/kisielk/errcheck#the-deprecated-method for details + ignore: fmt:.*,io/ioutil:^Read.* + + # To disable the errcheck built-in exclude list. + # See `-excludeonly` option in https://github.com/kisielk/errcheck#excluding-functions for details. + # Default: false + disable-default-exclusions: true + + # DEPRECATED use exclude-functions instead. + # + # Path to a file containing a list of functions to exclude from checking. + # See https://github.com/kisielk/errcheck#excluding-functions for details. + exclude: /path/to/file.txt + + # List of functions to exclude from checking, where each entry is a single function to exclude. + # See https://github.com/kisielk/errcheck#excluding-functions for details. + exclude-functions: + - io/ioutil.ReadFile + - io.Copy(*bytes.Buffer) + - io.Copy(os.Stdout) + + errchkjson: + # With check-error-free-encoding set to true, errchkjson does warn about errors + # from json encoding functions that are safe to be ignored, + # because they are not possible to happen. + # + # if check-error-free-encoding is set to true and errcheck linter is enabled, + # it is recommended to add the following exceptions to prevent from false positives: + # + # linters-settings: + # errcheck: + # exclude-functions: + # - encoding/json.Marshal + # - encoding/json.MarshalIndent + # + # Default: false + check-error-free-encoding: true + + # Issue on struct encoding that doesn't have exported fields. + # Default: false + report-no-exported: false + + errorlint: + # Check whether fmt.Errorf uses the %w verb for formatting errors. + # See the https://github.com/polyfloyd/go-errorlint for caveats. + # Default: true + errorf: false + # Permit more than 1 %w verb, valid per Go 1.20 (Requires errorf:true) + # Default: true + errorf-multi: false + # Check for plain type assertions and type switches. + # Default: true + asserts: false + # Check for plain error comparisons. + # Default: true + comparison: false + + exhaustive: + # Program elements to check for exhaustiveness. + # Default: [ switch ] + check: + - switch + - map + # Check switch statements in generated files also. + # Default: false + check-generated: true + # Presence of "default" case in switch statements satisfies exhaustiveness, + # even if all enum members are not listed. + # Default: false + default-signifies-exhaustive: true + # Enum members matching the supplied regex do not have to be listed in + # switch statements to satisfy exhaustiveness. + # Default: "" + ignore-enum-members: "Example.+" + # Enum types matching the supplied regex do not have to be listed in + # switch statements to satisfy exhaustiveness. + # Default: "" + ignore-enum-types: "Example.+" + # Consider enums only in package scopes, not in inner scopes. + # Default: false + package-scope-only: true + # Only run exhaustive check on switches with "//exhaustive:enforce" comment. + # Default: false + explicit-exhaustive-switch: true + # Only run exhaustive check on map literals with "//exhaustive:enforce" comment. + # Default: false + explicit-exhaustive-map: true + # Switch statement requires default case even if exhaustive. + # Default: false + default-case-required: true + + exhaustivestruct: + # Struct Patterns is list of expressions to match struct packages and names. + # The struct packages have the form `example.com/package.ExampleStruct`. + # The matching patterns can use matching syntax from https://pkg.go.dev/path#Match. + # If this list is empty, all structs are tested. + # Default: [] + struct-patterns: + - '*.Test' + - 'example.com/package.ExampleStruct' + + exhaustruct: + # List of regular expressions to match struct packages and their names. + # Regular expressions must match complete canonical struct package/name/structname. + # If this list is empty, all structs are tested. + # Default: [] + include: + - '.+\.Test' + - 'example\.com/package\.ExampleStruct[\d]{1,2}' + # List of regular expressions to exclude struct packages and their names from checks. + # Regular expressions must match complete canonical struct package/name/structname. + # Default: [] + exclude: + - '.+/cobra\.Command$' + + forbidigo: + # Forbid the following identifiers (list of regexp). + # Default: ["^(fmt\\.Print(|f|ln)|print|println)$"] + forbid: + # Builtin function: + - ^print.*$ + # Optional message that gets included in error reports. + - p: ^fmt\.Print.*$ + msg: Do not commit print statements. + # Alternatively, put messages at the end of the regex, surrounded by `(# )?` + # Escape any special characters. Those messages get included in error reports. + - 'fmt\.Print.*(# Do not commit print statements\.)?' + # Forbid spew Dump, whether it is called as function or method. + # Depends on analyze-types below. + - ^spew\.(ConfigState\.)?Dump$ + # The package name might be ambiguous. + # The full import path can be used as additional criteria. + # Depends on analyze-types below. + - p: ^v1.Dump$ + pkg: ^example.com/pkg/api/v1$ + # Exclude godoc examples from forbidigo checks. + # Default: true + exclude-godoc-examples: false + # Instead of matching the literal source code, + # use type information to replace expressions with strings that contain the package name + # and (for methods and fields) the type name. + # This makes it possible to handle import renaming and forbid struct fields and methods. + # Default: false + analyze-types: true + + funlen: + # Checks the number of lines in a function. + # If lower than 0, disable the check. + # Default: 60 + lines: -1 + # Checks the number of statements in a function. + # If lower than 0, disable the check. + # Default: 40 + statements: -1 + # Ignore comments when counting lines. + # Default false + ignore-comments: true + + gci: + # DEPRECATED: use `sections` and `prefix(github.com/org/project)` instead. + local-prefixes: github.com/org/project + + # Section configuration to compare against. + # Section names are case-insensitive and may contain parameters in (). + # The default order of sections is `standard > default > custom > blank > dot`, + # If `custom-order` is `true`, it follows the order of `sections` option. + # Default: ["standard", "default"] + sections: + - standard # Standard section: captures all standard packages. + - default # Default section: contains all imports that could not be matched to another section type. + - prefix(github.com/org/project) # Custom section: groups all imports with the specified Prefix. + - blank # Blank section: contains all blank imports. This section is not present unless explicitly enabled. + - dot # Dot section: contains all dot imports. This section is not present unless explicitly enabled. + + # Skip generated files. + # Default: true + skip-generated: false + + # Enable custom order of sections. + # If `true`, make the section order the same as the order of `sections`. + # Default: false + custom-order: true + + ginkgolinter: + # Suppress the wrong length assertion warning. + # Default: false + suppress-len-assertion: true + + # Suppress the wrong nil assertion warning. + # Default: false + suppress-nil-assertion: true + + # Suppress the wrong error assertion warning. + # Default: false + suppress-err-assertion: true + + # Suppress the wrong comparison assertion warning. + # Default: false + suppress-compare-assertion: true + + # Suppress the function all in async assertion warning. + # Default: false + suppress-async-assertion: true + + # Suppress warning for comparing values from different types, like int32 and uint32 + # Default: false + suppress-type-compare-assertion: true + + # Trigger warning for ginkgo focus containers like FDescribe, FContext, FWhen or FIt + # Default: false + forbid-focus-container: true + + # Don't trigger warnings for HaveLen(0) + # Default: false + allow-havelen-zero: true + + gocognit: + # Minimal code complexity to report. + # Default: 30 (but we recommend 10-20) + min-complexity: 10 + + goconst: + # Minimal length of string constant. + # Default: 3 + min-len: 2 + # Minimum occurrences of constant string count to trigger issue. + # Default: 3 + min-occurrences: 2 + # Ignore test files. + # Default: false + ignore-tests: true + # Look for existing constants matching the values. + # Default: true + match-constant: false + # Search also for duplicated numbers. + # Default: false + numbers: true + # Minimum value, only works with goconst.numbers + # Default: 3 + min: 2 + # Maximum value, only works with goconst.numbers + # Default: 3 + max: 2 + # Ignore when constant is not used as function argument. + # Default: true + ignore-calls: false + # Exclude strings matching the given regular expression. + # Default: "" + ignore-strings: 'foo.+' + + gocritic: + # Which checks should be enabled; can't be combined with 'disabled-checks'. + # See https://go-critic.github.io/overview#checks-overview. + # To check which checks are enabled run `GL_DEBUG=gocritic golangci-lint run`. + # By default, list of stable checks is used. + enabled-checks: + - nestingReduce + - unnamedResult + - ruleguard + - truncateCmp + + # Which checks should be disabled; can't be combined with 'enabled-checks'. + # Default: [] + disabled-checks: + - regexpMust + + # Enable multiple checks by tags, run `GL_DEBUG=gocritic golangci-lint run` to see all tags and checks. + # See https://github.com/go-critic/go-critic#usage -> section "Tags". + # Default: [] + enabled-tags: + - diagnostic + - style + - performance + - experimental + - opinionated + disabled-tags: + - diagnostic + - style + - performance + - experimental + - opinionated + + # Settings passed to gocritic. + # The settings key is the name of a supported gocritic checker. + # The list of supported checkers can be find in https://go-critic.github.io/overview. + settings: + # Must be valid enabled check name. + captLocal: + # Whether to restrict checker to params only. + # Default: true + paramsOnly: false + elseif: + # Whether to skip balanced if-else pairs. + # Default: true + skipBalanced: false + hugeParam: + # Size in bytes that makes the warning trigger. + # Default: 80 + sizeThreshold: 70 + nestingReduce: + # Min number of statements inside a branch to trigger a warning. + # Default: 5 + bodyWidth: 4 + rangeExprCopy: + # Size in bytes that makes the warning trigger. + # Default: 512 + sizeThreshold: 516 + # Whether to check test functions + # Default: true + skipTestFuncs: false + rangeValCopy: + # Size in bytes that makes the warning trigger. + # Default: 128 + sizeThreshold: 32 + # Whether to check test functions. + # Default: true + skipTestFuncs: false + ruleguard: + # Enable debug to identify which 'Where' condition was rejected. + # The value of the parameter is the name of a function in a ruleguard file. + # + # When a rule is evaluated: + # If: + # The Match() clause is accepted; and + # One of the conditions in the Where() clause is rejected, + # Then: + # ruleguard prints the specific Where() condition that was rejected. + # + # The flag is passed to the ruleguard 'debug-group' argument. + # Default: "" + debug: 'emptyDecl' + # Deprecated, use 'failOn' param. + # If set to true, identical to failOn='all', otherwise failOn='' + failOnError: false + # Determines the behavior when an error occurs while parsing ruleguard files. + # If flag is not set, log error and skip rule files that contain an error. + # If flag is set, the value must be a comma-separated list of error conditions. + # - 'all': fail on all errors. + # - 'import': ruleguard rule imports a package that cannot be found. + # - 'dsl': gorule file does not comply with the ruleguard DSL. + # Default: "" + failOn: dsl + # Comma-separated list of file paths containing ruleguard rules. + # If a path is relative, it is relative to the directory where the golangci-lint command is executed. + # The special '${configDir}' variable is substituted with the absolute directory containing the golangci config file. + # Glob patterns such as 'rules-*.go' may be specified. + # Default: "" + rules: '${configDir}/ruleguard/rules-*.go,${configDir}/myrule1.go' + # Comma-separated list of enabled groups or skip empty to enable everything. + # Tags can be defined with # character prefix. + # Default: "" + enable: "myGroupName,#myTagName" + # Comma-separated list of disabled groups or skip empty to enable everything. + # Tags can be defined with # character prefix. + # Default: "" + disable: "myGroupName,#myTagName" + tooManyResultsChecker: + # Maximum number of results. + # Default: 5 + maxResults: 10 + truncateCmp: + # Whether to skip int/uint/uintptr types. + # Default: true + skipArchDependent: false + underef: + # Whether to skip (*x).method() calls where x is a pointer receiver. + # Default: true + skipRecvDeref: false + unnamedResult: + # Whether to check exported functions. + # Default: false + checkExported: true + + gocyclo: + # Minimal code complexity to report. + # Default: 30 (but we recommend 10-20) + min-complexity: 10 + + godot: + # Comments to be checked: `declarations`, `toplevel`, or `all`. + # Default: declarations + scope: toplevel + # List of regexps for excluding particular comment lines from check. + # Default: [] + exclude: + # Exclude todo and fixme comments. + - "^fixme:" + - "^todo:" + # Check that each sentence ends with a period. + # Default: true + period: false + # Check that each sentence starts with a capital letter. + # Default: false + capital: true + + godox: + # Report any comments starting with keywords, this is useful for TODO or FIXME comments that + # might be left in the code accidentally and should be resolved before merging. + # Default: ["TODO", "BUG", "FIXME"] + keywords: + - NOTE + - OPTIMIZE # marks code that should be optimized before merging + - HACK # marks hack-around that should be removed before merging + + gofmt: + # Simplify code: gofmt with `-s` option. + # Default: true + simplify: false + # Apply the rewrite rules to the source before reformatting. + # https://pkg.go.dev/cmd/gofmt + # Default: [] + rewrite-rules: + - pattern: 'interface{}' + replacement: 'any' + - pattern: 'a[b:len(a)]' + replacement: 'a[b:]' + + gofumpt: + # Deprecated: use the global `run.go` instead. + lang-version: "1.17" + + # Module path which contains the source code being formatted. + # Default: "" + module-path: github.com/org/project + + # Choose whether to use the extra rules. + # Default: false + extra-rules: true + + goheader: + # Supports two types 'const` and `regexp`. + # Values can be used recursively. + # Default: {} + values: + const: + # Define here const type values in format k:v. + # For example: + COMPANY: MY COMPANY + regexp: + # Define here regexp type values. + # for example: + AUTHOR: .*@mycompany\.com + # The template use for checking. + # Default: "" + template: |- + # Put here copyright header template for source code files + # For example: + # Note: {{ YEAR }} is a builtin value that returns the year relative to the current machine time. + # + # {{ AUTHOR }} {{ COMPANY }} {{ YEAR }} + # SPDX-License-Identifier: Apache-2.0 + + # Licensed under the Apache License, Version 2.0 (the "License"); + # you may not use this file except in compliance with the License. + # You may obtain a copy of the License at: + + # http://www.apache.org/licenses/LICENSE-2.0 + + # Unless required by applicable law or agreed to in writing, software + # distributed under the License is distributed on an "AS IS" BASIS, + # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + # See the License for the specific language governing permissions and + # limitations under the License. + # As alternative of directive 'template', you may put the path to file with the template source. + # Useful if you need to load the template from a specific file. + # Default: "" + template-path: /path/to/my/template.tmpl + + goimports: + # A comma-separated list of prefixes, which, if set, checks import paths + # with the given prefixes are grouped after 3rd-party packages. + # Default: "" + local-prefixes: github.com/org/project + + golint: + # Minimal confidence for issues. + # Default: 0.8 + min-confidence: 0.7 + + gomnd: + # List of enabled checks, see https://github.com/tommy-muehle/go-mnd/#checks for description. + # Default: ["argument", "case", "condition", "operation", "return", "assign"] + checks: + - argument + - case + - condition + - operation + - return + - assign + # List of numbers to exclude from analysis. + # The numbers should be written as string. + # Values always ignored: "1", "1.0", "0" and "0.0" + # Default: [] + ignored-numbers: + - '0666' + - '0755' + - '42' + # List of file patterns to exclude from analysis. + # Values always ignored: `.+_test.go` + # Default: [] + ignored-files: + - 'magic1_.+\.go$' + # List of function patterns to exclude from analysis. + # Following functions are always ignored: `time.Date`, + # `strconv.FormatInt`, `strconv.FormatUint`, `strconv.FormatFloat`, + # `strconv.ParseInt`, `strconv.ParseUint`, `strconv.ParseFloat`. + # Default: [] + ignored-functions: + - '^math\.' + - '^http\.StatusText$' + + gomoddirectives: + # Allow local `replace` directives. + # Default: false + replace-local: false + # List of allowed `replace` directives. + # Default: [] + replace-allow-list: + - launchpad.net/gocheck + # Allow to not explain why the version has been retracted in the `retract` directives. + # Default: false + retract-allow-no-explanation: false + # Forbid the use of the `exclude` directives. + # Default: false + exclude-forbidden: false + + gomodguard: + allowed: + # List of allowed modules. + # Default: [] + modules: + - gopkg.in/yaml.v2 + # List of allowed module domains. + # Default: [] + domains: + - golang.org + blocked: + # List of blocked modules. + # Default: [] + modules: + # Blocked module. + - github.com/uudashr/go-module: + # Recommended modules that should be used instead. (Optional) + recommendations: + - golang.org/x/mod + # Reason why the recommended module should be used. (Optional) + reason: "`mod` is the official go.mod parser library." + # List of blocked module version constraints. + # Default: [] + versions: + # Blocked module with version constraint. + - github.com/mitchellh/go-homedir: + # Version constraint, see https://github.com/Masterminds/semver#basic-comparisons. + version: "< 1.1.0" + # Reason why the version constraint exists. (Optional) + reason: "testing if blocked version constraint works." + # Set to true to raise lint issues for packages that are loaded from a local path via replace directive. + # Default: false + local_replace_directives: false + + gosimple: + # Deprecated: use the global `run.go` instead. + go: "1.15" + # Sxxxx checks in https://staticcheck.io/docs/configuration/options/#checks + # Default: ["*"] + checks: [ "all" ] + + gosec: + # To select a subset of rules to run. + # Available rules: https://github.com/securego/gosec#available-rules + # Default: [] - means include all rules + includes: + - G101 # Look for hard coded credentials + - G102 # Bind to all interfaces + - G103 # Audit the use of unsafe block + - G104 # Audit errors not checked + - G106 # Audit the use of ssh.InsecureIgnoreHostKey + - G107 # Url provided to HTTP request as taint input + - G108 # Profiling endpoint automatically exposed on /debug/pprof + - G109 # Potential Integer overflow made by strconv.Atoi result conversion to int16/32 + - G110 # Potential DoS vulnerability via decompression bomb + - G111 # Potential directory traversal + - G112 # Potential slowloris attack + - G113 # Usage of Rat.SetString in math/big with an overflow (CVE-2022-23772) + - G114 # Use of net/http serve function that has no support for setting timeouts + - G201 # SQL query construction using format string + - G202 # SQL query construction using string concatenation + - G203 # Use of unescaped data in HTML templates + - G204 # Audit use of command execution + - G301 # Poor file permissions used when creating a directory + - G302 # Poor file permissions used with chmod + - G303 # Creating tempfile using a predictable path + - G304 # File path provided as taint input + - G305 # File traversal when extracting zip/tar archive + - G306 # Poor file permissions used when writing to a new file + - G307 # Poor file permissions used when creating a file with os.Create + - G401 # Detect the usage of DES, RC4, MD5 or SHA1 + - G402 # Look for bad TLS connection settings + - G403 # Ensure minimum RSA key length of 2048 bits + - G404 # Insecure random number source (rand) + - G501 # Import blocklist: crypto/md5 + - G502 # Import blocklist: crypto/des + - G503 # Import blocklist: crypto/rc4 + - G504 # Import blocklist: net/http/cgi + - G505 # Import blocklist: crypto/sha1 + - G601 # Implicit memory aliasing of items from a range statement + - G602 # Slice access out of bounds + + # To specify a set of rules to explicitly exclude. + # Available rules: https://github.com/securego/gosec#available-rules + # Default: [] + excludes: + - G101 # Look for hard coded credentials + - G102 # Bind to all interfaces + - G103 # Audit the use of unsafe block + - G104 # Audit errors not checked + - G106 # Audit the use of ssh.InsecureIgnoreHostKey + - G107 # Url provided to HTTP request as taint input + - G108 # Profiling endpoint automatically exposed on /debug/pprof + - G109 # Potential Integer overflow made by strconv.Atoi result conversion to int16/32 + - G110 # Potential DoS vulnerability via decompression bomb + - G111 # Potential directory traversal + - G112 # Potential slowloris attack + - G113 # Usage of Rat.SetString in math/big with an overflow (CVE-2022-23772) + - G114 # Use of net/http serve function that has no support for setting timeouts + - G201 # SQL query construction using format string + - G202 # SQL query construction using string concatenation + - G203 # Use of unescaped data in HTML templates + - G204 # Audit use of command execution + - G301 # Poor file permissions used when creating a directory + - G302 # Poor file permissions used with chmod + - G303 # Creating tempfile using a predictable path + - G304 # File path provided as taint input + - G305 # File traversal when extracting zip/tar archive + - G306 # Poor file permissions used when writing to a new file + - G307 # Poor file permissions used when creating a file with os.Create + - G401 # Detect the usage of DES, RC4, MD5 or SHA1 + - G402 # Look for bad TLS connection settings + - G403 # Ensure minimum RSA key length of 2048 bits + - G404 # Insecure random number source (rand) + - G501 # Import blocklist: crypto/md5 + - G502 # Import blocklist: crypto/des + - G503 # Import blocklist: crypto/rc4 + - G504 # Import blocklist: net/http/cgi + - G505 # Import blocklist: crypto/sha1 + - G601 # Implicit memory aliasing of items from a range statement + - G602 # Slice access out of bounds + + # Exclude generated files + # Default: false + exclude-generated: true + + # Filter out the issues with a lower severity than the given value. + # Valid options are: low, medium, high. + # Default: low + severity: medium + + # Filter out the issues with a lower confidence than the given value. + # Valid options are: low, medium, high. + # Default: low + confidence: medium + + # Concurrency value. + # Default: the number of logical CPUs usable by the current process. + concurrency: 12 + + # To specify the configuration of rules. + config: + # Globals are applicable to all rules. + global: + # If true, ignore #nosec in comments (and an alternative as well). + # Default: false + nosec: true + # Add an alternative comment prefix to #nosec (both will work at the same time). + # Default: "" + "#nosec": "#my-custom-nosec" + # Define whether nosec issues are counted as finding or not. + # Default: false + show-ignored: true + # Audit mode enables addition checks that for normal code analysis might be too nosy. + # Default: false + audit: true + G101: + # Regexp pattern for variables and constants to find. + # Default: "(?i)passwd|pass|password|pwd|secret|token|pw|apiKey|bearer|cred" + pattern: "(?i)example" + # If true, complain about all cases (even with low entropy). + # Default: false + ignore_entropy: false + # Maximum allowed entropy of the string. + # Default: "80.0" + entropy_threshold: "80.0" + # Maximum allowed value of entropy/string length. + # Is taken into account if entropy >= entropy_threshold/2. + # Default: "3.0" + per_char_threshold: "3.0" + # Calculate entropy for first N chars of the string. + # Default: "16" + truncate: "32" + # Additional functions to ignore while checking unhandled errors. + # Following functions always ignored: + # bytes.Buffer: + # - Write + # - WriteByte + # - WriteRune + # - WriteString + # fmt: + # - Print + # - Printf + # - Println + # - Fprint + # - Fprintf + # - Fprintln + # strings.Builder: + # - Write + # - WriteByte + # - WriteRune + # - WriteString + # io.PipeWriter: + # - CloseWithError + # hash.Hash: + # - Write + # os: + # - Unsetenv + # Default: {} + G104: + fmt: + - Fscanf + G111: + # Regexp pattern to find potential directory traversal. + # Default: "http\\.Dir\\(\"\\/\"\\)|http\\.Dir\\('\\/'\\)" + pattern: "custom\\.Dir\\(\\)" + # Maximum allowed permissions mode for os.Mkdir and os.MkdirAll + # Default: "0750" + G301: "0750" + # Maximum allowed permissions mode for os.OpenFile and os.Chmod + # Default: "0600" + G302: "0600" + # Maximum allowed permissions mode for os.WriteFile and ioutil.WriteFile + # Default: "0600" + G306: "0600" + + gosmopolitan: + # Allow and ignore `time.Local` usages. + # + # Default: false + allow-time-local: true + # List of fully qualified names in the `full/pkg/path.name` form, to act as "i18n escape hatches". + # String literals inside call-like expressions to, or struct literals of those names, + # are exempt from the writing system check. + # + # Default: [] + escape-hatches: + - 'github.com/nicksnyder/go-i18n/v2/i18n.Message' + - 'example.com/your/project/i18n/markers.Raw' + - 'example.com/your/project/i18n/markers.OK' + - 'example.com/your/project/i18n/markers.TODO' + - 'command-line-arguments.Simple' + # Ignore test files. + # + # Default: true + ignore-tests: false + # List of Unicode scripts to watch for any usage in string literals. + # https://pkg.go.dev/unicode#pkg-variables + # + # Default: ["Han"] + watch-for-scripts: + - Devanagari + - Han + - Hangul + - Hiragana + - Katakana + + govet: + # Report about shadowed variables. + # Default: false + check-shadowing: true + + # Settings per analyzer. + settings: + # Analyzer name, run `go tool vet help` to see all analyzers. + printf: + # Comma-separated list of print function names to check (in addition to default, see `go tool vet help printf`). + # Default: [] + funcs: + - (github.com/golangci/golangci-lint/pkg/logutils.Log).Infof + - (github.com/golangci/golangci-lint/pkg/logutils.Log).Warnf + - (github.com/golangci/golangci-lint/pkg/logutils.Log).Errorf + - (github.com/golangci/golangci-lint/pkg/logutils.Log).Fatalf + shadow: + # Whether to be strict about shadowing; can be noisy. + # Default: false + strict: true + unusedresult: + # Comma-separated list of functions whose results must be used + # (in addition to default: + # context.WithCancel, context.WithDeadline, context.WithTimeout, context.WithValue, errors.New, fmt.Errorf, + # fmt.Sprint, fmt.Sprintf, sort.Reverse + # ). + # Default: [] + funcs: + - pkg.MyFunc + # Comma-separated list of names of methods of type func() string whose results must be used + # (in addition to default Error,String) + # Default: [] + stringmethods: + - MyMethod + + # Disable all analyzers. + # Default: false + disable-all: true + # Enable analyzers by name. + # (in addition to default: + # appends, asmdecl, assign, atomic, bools, buildtag, cgocall, composites, copylocks, defers, directive, errorsas, + # framepointer, httpresponse, ifaceassert, loopclosure, lostcancel, nilfunc, printf, shift, sigchanyzer, slog, + # stdmethods, stringintconv, structtag, testinggoroutine, tests, timeformat, unmarshal, unreachable, unsafeptr, + # unusedresult + # ). + # Run `go tool vet help` to see all analyzers. + # Default: [] + enable: + - appends + - asmdecl + - assign + - atomic + - atomicalign + - bools + - buildtag + - cgocall + - composites + - copylocks + - deepequalerrors + - defers + - directive + - errorsas + - fieldalignment + - findcall + - framepointer + - httpresponse + - ifaceassert + - loopclosure + - lostcancel + - nilfunc + - nilness + - printf + - reflectvaluecompare + - shadow + - shift + - sigchanyzer + - slog + - sortslice + - stdmethods + - stringintconv + - structtag + - testinggoroutine + - tests + - unmarshal + - unreachable + - unsafeptr + - unusedresult + - unusedwrite + + # Enable all analyzers. + # Default: false + enable-all: true + # Disable analyzers by name. + # (in addition to default + # atomicalign, deepequalerrors, fieldalignment, findcall, nilness, reflectvaluecompare, shadow, sortslice, + # timeformat, unusedwrite + # ). + # Run `go tool vet help` to see all analyzers. + # Default: [] + disable: + - appends + - asmdecl + - assign + - atomic + - atomicalign + - bools + - buildtag + - cgocall + - composites + - copylocks + - deepequalerrors + - defers + - directive + - errorsas + - fieldalignment + - findcall + - framepointer + - httpresponse + - ifaceassert + - loopclosure + - lostcancel + - nilfunc + - nilness + - printf + - reflectvaluecompare + - shadow + - shift + - sigchanyzer + - slog + - sortslice + - stdmethods + - stringintconv + - structtag + - testinggoroutine + - tests + - unmarshal + - unreachable + - unsafeptr + - unusedresult + - unusedwrite + + grouper: + # Require the use of a single global 'const' declaration only. + # Default: false + const-require-single-const: true + # Require the use of grouped global 'const' declarations. + # Default: false + const-require-grouping: true + + # Require the use of a single 'import' declaration only. + # Default: false + import-require-single-import: true + # Require the use of grouped 'import' declarations. + # Default: false + import-require-grouping: true + + # Require the use of a single global 'type' declaration only. + # Default: false + type-require-single-type: true + # Require the use of grouped global 'type' declarations. + # Default: false + type-require-grouping: true + + # Require the use of a single global 'var' declaration only. + # Default: false + var-require-single-var: true + # Require the use of grouped global 'var' declarations. + # Default: false + var-require-grouping: true + + ifshort: + # Maximum length of variable declaration measured in number of lines, after which linter won't suggest using short syntax. + # Has higher priority than max-decl-chars. + # Default: 1 + max-decl-lines: 2 + # Maximum length of variable declaration measured in number of characters, after which linter won't suggest using short syntax. + # Default: 30 + max-decl-chars: 40 + + importas: + # Do not allow unaliased imports of aliased packages. + # Default: false + no-unaliased: true + # Do not allow non-required aliases. + # Default: false + no-extra-aliases: true + # List of aliases + # Default: [] + alias: + # Using `servingv1` alias for `knative.dev/serving/pkg/apis/serving/v1` package. + - pkg: knative.dev/serving/pkg/apis/serving/v1 + alias: servingv1 + # Using `autoscalingv1alpha1` alias for `knative.dev/serving/pkg/apis/autoscaling/v1alpha1` package. + - pkg: knative.dev/serving/pkg/apis/autoscaling/v1alpha1 + alias: autoscalingv1alpha1 + # You can specify the package path by regular expression, + # and alias by regular expression expansion syntax like below. + # see https://github.com/julz/importas#use-regular-expression for details + - pkg: knative.dev/serving/pkg/apis/(\w+)/(v[\w\d]+) + alias: $1$2 + + interfacebloat: + # The maximum number of methods allowed for an interface. + # Default: 10 + max: 5 + + ireturn: + # ireturn does not allow using `allow` and `reject` settings at the same time. + # Both settings are lists of the keywords and regular expressions matched to interface or package names. + # keywords: + # - `empty` for `interface{}` + # - `error` for errors + # - `stdlib` for standard library + # - `anon` for anonymous interfaces + # - `generic` for generic interfaces added in go 1.18 + + # By default, it allows using errors, empty interfaces, anonymous interfaces, + # and interfaces provided by the standard library. + allow: + - anon + - error + - empty + - stdlib + # You can specify idiomatic endings for interface + - (or|er)$ + + # reject-list of interfaces + reject: + - github.com\/user\/package\/v4\.Type + + lll: + # Max line length, lines longer will be reported. + # '\t' is counted as 1 character by default, and can be changed with the tab-width option. + # Default: 120. + line-length: 120 + # Tab width in spaces. + # Default: 1 + tab-width: 1 + + loggercheck: + # Allow check for the github.com/go-kit/log library. + # Default: true + kitlog: false + # Allow check for the k8s.io/klog/v2 library. + # Default: true + klog: false + # Allow check for the github.com/go-logr/logr library. + # Default: true + logr: false + # Allow check for the "sugar logger" from go.uber.org/zap library. + # Default: true + zap: false + # Require all logging keys to be inlined constant strings. + # Default: false + require-string-key: true + # Require printf-like format specifier (%s, %d for example) not present. + # Default: false + no-printf-like: true + # List of custom rules to check against, where each rule is a single logger pattern, useful for wrapped loggers. + # For example: https://github.com/timonwong/loggercheck/blob/7395ab86595781e33f7afba27ad7b55e6956ebcd/testdata/custom-rules.txt + # Default: empty + rules: + - k8s.io/klog/v2.InfoS # package level exported functions + - (github.com/go-logr/logr.Logger).Error # "Methods" + - (*go.uber.org/zap.SugaredLogger).With # Also "Methods", but with a pointer receiver + + maintidx: + # Show functions with maintainability index lower than N. + # A high index indicates better maintainability (it's kind of the opposite of complexity). + # Default: 20 + under: 100 + + makezero: + # Allow only slices initialized with a length of zero. + # Default: false + always: true + + maligned: + # Print struct with more effective memory layout or not. + # Default: false + suggest-new: true + + misspell: + # Correct spellings using locale preferences for US or UK. + # Setting locale to US will correct the British spelling of 'colour' to 'color'. + # Default is to use a neutral variety of English. + locale: US + # Default: [] + ignore-words: + - someword + + musttag: + # A set of custom functions to check in addition to the builtin ones. + # Default: json, xml, gopkg.in/yaml.v3, BurntSushi/toml, mitchellh/mapstructure, jmoiron/sqlx + functions: + # The full name of the function, including the package. + - name: github.com/hashicorp/hcl/v2/hclsimple.DecodeFile + # The struct tag whose presence should be ensured. + tag: hcl + # The position of the argument to check. + arg-pos: 2 + + nakedret: + # Make an issue if func has more lines of code than this setting, and it has naked returns. + # Default: 30 + max-func-lines: 31 + + nestif: + # Minimal complexity of if statements to report. + # Default: 5 + min-complexity: 4 + + nilnil: + # Checks that there is no simultaneous return of `nil` error and an invalid value. + # Default: ["ptr", "func", "iface", "map", "chan"] + checked-types: + - ptr + - func + - iface + - map + - chan + + nlreturn: + # Size of the block (including return statement that is still "OK") + # so no return split required. + # Default: 1 + block-size: 2 + + nolintlint: + # Disable to ensure that all nolint directives actually have an effect. + # Default: false + allow-unused: true + # Exclude following linters from requiring an explanation. + # Default: [] + allow-no-explanation: [ ] + # Enable to require an explanation of nonzero length after each nolint directive. + # Default: false + require-explanation: true + # Enable to require nolint directives to mention the specific linter being suppressed. + # Default: false + require-specific: true + + nonamedreturns: + # Report named error if it is assigned inside defer. + # Default: false + report-error-in-defer: true + + paralleltest: + # Ignore missing calls to `t.Parallel()` and only report incorrect uses of it. + # Default: false + ignore-missing: true + # Ignore missing calls to `t.Parallel()` in subtests. Top-level tests are + # still required to have `t.Parallel`, but subtests are allowed to skip it. + # Default: false + ignore-missing-subtests: true + + perfsprint: + # Optimizes even if it requires an int or uint type cast. + # Default: true + int-conversion: false + # Optimizes into `err.Error()` even if it is only equivalent for non-nil errors. + # Default: false + err-error: true + # Optimizes `fmt.Errorf`. + # Default: true + errorf: false + # Optimizes `fmt.Sprintf` with only one argument + # Default: true + sprintf1: false + + prealloc: + # IMPORTANT: we don't recommend using this linter before doing performance profiling. + # For most programs usage of prealloc will be a premature optimization. + + # Report pre-allocation suggestions only on simple loops that have no returns/breaks/continues/gotos in them. + # Default: true + simple: false + # Report pre-allocation suggestions on range loops. + # Default: true + range-loops: false + # Report pre-allocation suggestions on for loops. + # Default: false + for-loops: true + + predeclared: + # Comma-separated list of predeclared identifiers to not report on. + # Default: "" + ignore: "new,int" + # Include method names and field names (i.e., qualified names) in checks. + # Default: false + q: true + + promlinter: + # Promlinter cannot infer all metrics name in static analysis. + # Enable strict mode will also include the errors caused by failing to parse the args. + # Default: false + strict: true + # Please refer to https://github.com/yeya24/promlinter#usage for detailed usage. + # Default: [] + disabled-linters: + - Help + - MetricUnits + - Counter + - HistogramSummaryReserved + - MetricTypeInName + - ReservedChars + - CamelCase + - UnitAbbreviations + + protogetter: + # Skip files generated by specified generators from the checking. + # Checks only the file's initial comment, which must follow the format: "// Code generated by ". + # Files generated by protoc-gen-go, protoc-gen-go-grpc, and protoc-gen-grpc-gateway are always excluded automatically. + # Default: [] + skip-generated-by: ["protoc-gen-go-my-own-generator"] + # Skip files matching the specified glob pattern from the checking. + # Default: [] + skip-files: + - "*.pb.go" + - "*/vendor/*" + - "/full/path/to/file.go" + # Skip any generated files from the checking. + # Default: false + skip-any-generated: true + + reassign: + # Patterns for global variable names that are checked for reassignment. + # See https://github.com/curioswitch/go-reassign#usage + # Default: ["EOF", "Err.*"] + patterns: + - ".*" + + revive: + # Maximum number of open files at the same time. + # See https://github.com/mgechev/revive#command-line-flags + # Defaults to unlimited. + max-open-files: 2048 + + # When set to false, ignores files with "GENERATED" header, similar to golint. + # See https://github.com/mgechev/revive#available-rules for details. + # Default: false + ignore-generated-header: true + + # Sets the default severity. + # See https://github.com/mgechev/revive#configuration + # Default: warning + severity: error + + # Enable all available rules. + # Default: false + enable-all-rules: true + + # Sets the default failure confidence. + # This means that linting errors with less than 0.8 confidence will be ignored. + # Default: 0.8 + confidence: 0.1 + + rules: + # https://github.com/mgechev/revive/blob/master/RULES_DESCRIPTIONS.md#add-constant + - name: add-constant + severity: warning + disabled: false + arguments: + - maxLitCount: "3" + allowStrs: '""' + allowInts: "0,1,2" + allowFloats: "0.0,0.,1.0,1.,2.0,2." + # https://github.com/mgechev/revive/blob/master/RULES_DESCRIPTIONS.md#argument-limit + - name: argument-limit + severity: warning + disabled: false + arguments: [ 4 ] + # https://github.com/mgechev/revive/blob/master/RULES_DESCRIPTIONS.md#atomic + - name: atomic + severity: warning + disabled: false + # https://github.com/mgechev/revive/blob/master/RULES_DESCRIPTIONS.md#banned-characters + - name: banned-characters + severity: warning + disabled: false + arguments: [ "Ω","Σ","σ", "7" ] + # https://github.com/mgechev/revive/blob/master/RULES_DESCRIPTIONS.md#bare-return + - name: bare-return + severity: warning + disabled: false + # https://github.com/mgechev/revive/blob/master/RULES_DESCRIPTIONS.md#blank-imports + - name: blank-imports + severity: warning + disabled: false + # https://github.com/mgechev/revive/blob/master/RULES_DESCRIPTIONS.md#bool-literal-in-expr + - name: bool-literal-in-expr + severity: warning + disabled: false + # https://github.com/mgechev/revive/blob/master/RULES_DESCRIPTIONS.md#call-to-gc + - name: call-to-gc + severity: warning + disabled: false + # https://github.com/mgechev/revive/blob/master/RULES_DESCRIPTIONS.md#cognitive-complexity + - name: cognitive-complexity + severity: warning + disabled: false + arguments: [ 7 ] + # https://github.com/mgechev/revive/blob/master/RULES_DESCRIPTIONS.md#comment-spacings + - name: comment-spacings + severity: warning + disabled: false + arguments: + - mypragma + - otherpragma + # https://github.com/mgechev/revive/blob/master/RULES_DESCRIPTIONS.md#confusing-naming + - name: confusing-naming + severity: warning + disabled: false + # https://github.com/mgechev/revive/blob/master/RULES_DESCRIPTIONS.md#confusing-results + - name: confusing-results + severity: warning + disabled: false + # https://github.com/mgechev/revive/blob/master/RULES_DESCRIPTIONS.md#constant-logical-expr + - name: constant-logical-expr + severity: warning + disabled: false + # https://github.com/mgechev/revive/blob/master/RULES_DESCRIPTIONS.md#context-as-argument + - name: context-as-argument + severity: warning + disabled: false + arguments: + - allowTypesBefore: "*testing.T,*github.com/user/repo/testing.Harness" + # https://github.com/mgechev/revive/blob/master/RULES_DESCRIPTIONS.md#context-keys-type + - name: context-keys-type + severity: warning + disabled: false + # https://github.com/mgechev/revive/blob/master/RULES_DESCRIPTIONS.md#cyclomatic + - name: cyclomatic + severity: warning + disabled: false + arguments: [ 3 ] + # https://github.com/mgechev/revive/blob/master/RULES_DESCRIPTIONS.md#datarace + - name: datarace + severity: warning + disabled: false + # https://github.com/mgechev/revive/blob/master/RULES_DESCRIPTIONS.md#deep-exit + - name: deep-exit + severity: warning + disabled: false + # https://github.com/mgechev/revive/blob/master/RULES_DESCRIPTIONS.md#defer + - name: defer + severity: warning + disabled: false + arguments: + - [ "call-chain", "loop" ] + # https://github.com/mgechev/revive/blob/master/RULES_DESCRIPTIONS.md#dot-imports + - name: dot-imports + severity: warning + disabled: false + # https://github.com/mgechev/revive/blob/master/RULES_DESCRIPTIONS.md#duplicated-imports + - name: duplicated-imports + severity: warning + disabled: false + # https://github.com/mgechev/revive/blob/master/RULES_DESCRIPTIONS.md#early-return + - name: early-return + severity: warning + disabled: false + arguments: + - "preserveScope" + # https://github.com/mgechev/revive/blob/master/RULES_DESCRIPTIONS.md#empty-block + - name: empty-block + severity: warning + disabled: false + # https://github.com/mgechev/revive/blob/master/RULES_DESCRIPTIONS.md#empty-lines + - name: empty-lines + severity: warning + disabled: false + # https://github.com/mgechev/revive/blob/master/RULES_DESCRIPTIONS.md#enforce-map-style + - name: enforce-map-style + severity: warning + disabled: false + arguments: + - "make" + # https://github.com/mgechev/revive/blob/master/RULES_DESCRIPTIONS.md#error-naming + - name: error-naming + severity: warning + disabled: false + # https://github.com/mgechev/revive/blob/master/RULES_DESCRIPTIONS.md#error-return + - name: error-return + severity: warning + disabled: false + # https://github.com/mgechev/revive/blob/master/RULES_DESCRIPTIONS.md#error-strings + - name: error-strings + severity: warning + disabled: false + # https://github.com/mgechev/revive/blob/master/RULES_DESCRIPTIONS.md#errorf + - name: errorf + severity: warning + disabled: false + # https://github.com/mgechev/revive/blob/master/RULES_DESCRIPTIONS.md#exported + - name: exported + severity: warning + disabled: false + arguments: + - "preserveScope" + - "checkPrivateReceivers" + - "sayRepetitiveInsteadOfStutters" + # https://github.com/mgechev/revive/blob/master/RULES_DESCRIPTIONS.md#file-header + - name: file-header + severity: warning + disabled: false + arguments: + - This is the text that must appear at the top of source files. + # https://github.com/mgechev/revive/blob/master/RULES_DESCRIPTIONS.md#flag-parameter + - name: flag-parameter + severity: warning + disabled: false + # https://github.com/mgechev/revive/blob/master/RULES_DESCRIPTIONS.md#function-result-limit + - name: function-result-limit + severity: warning + disabled: false + arguments: [ 2 ] + # https://github.com/mgechev/revive/blob/master/RULES_DESCRIPTIONS.md#function-length + - name: function-length + severity: warning + disabled: false + arguments: [ 10, 0 ] + # https://github.com/mgechev/revive/blob/master/RULES_DESCRIPTIONS.md#get-return + - name: get-return + severity: warning + disabled: false + # https://github.com/mgechev/revive/blob/master/RULES_DESCRIPTIONS.md#identical-branches + - name: identical-branches + severity: warning + disabled: false + # https://github.com/mgechev/revive/blob/master/RULES_DESCRIPTIONS.md#if-return + - name: if-return + severity: warning + disabled: false + # https://github.com/mgechev/revive/blob/master/RULES_DESCRIPTIONS.md#increment-decrement + - name: increment-decrement + severity: warning + disabled: false + # https://github.com/mgechev/revive/blob/master/RULES_DESCRIPTIONS.md#indent-error-flow + - name: indent-error-flow + severity: warning + disabled: false + arguments: + - "preserveScope" + # https://github.com/mgechev/revive/blob/master/RULES_DESCRIPTIONS.md#import-alias-naming + - name: import-alias-naming + severity: warning + disabled: false + arguments: + - "^[a-z][a-z0-9]{0,}$" + # https://github.com/mgechev/revive/blob/master/RULES_DESCRIPTIONS.md#imports-blacklist + - name: imports-blacklist + severity: warning + disabled: false + arguments: + - "crypto/md5" + - "crypto/sha1" + # https://github.com/mgechev/revive/blob/master/RULES_DESCRIPTIONS.md#import-shadowing + - name: import-shadowing + severity: warning + disabled: false + # https://github.com/mgechev/revive/blob/master/RULES_DESCRIPTIONS.md#line-length-limit + - name: line-length-limit + severity: warning + disabled: false + arguments: [ 80 ] + # https://github.com/mgechev/revive/blob/master/RULES_DESCRIPTIONS.md#max-public-structs + - name: max-public-structs + severity: warning + disabled: false + arguments: [ 3 ] + # https://github.com/mgechev/revive/blob/master/RULES_DESCRIPTIONS.md#modifies-parameter + - name: modifies-parameter + severity: warning + disabled: false + # https://github.com/mgechev/revive/blob/master/RULES_DESCRIPTIONS.md#modifies-value-receiver + - name: modifies-value-receiver + severity: warning + disabled: false + # https://github.com/mgechev/revive/blob/master/RULES_DESCRIPTIONS.md#nested-structs + - name: nested-structs + severity: warning + disabled: false + # https://github.com/mgechev/revive/blob/master/RULES_DESCRIPTIONS.md#optimize-operands-order + - name: optimize-operands-order + severity: warning + disabled: false + # https://github.com/mgechev/revive/blob/master/RULES_DESCRIPTIONS.md#package-comments + - name: package-comments + severity: warning + disabled: false + # https://github.com/mgechev/revive/blob/master/RULES_DESCRIPTIONS.md#range + - name: range + severity: warning + disabled: false + # https://github.com/mgechev/revive/blob/master/RULES_DESCRIPTIONS.md#range-val-in-closure + - name: range-val-in-closure + severity: warning + disabled: false + # https://github.com/mgechev/revive/blob/master/RULES_DESCRIPTIONS.md#range-val-address + - name: range-val-address + severity: warning + disabled: false + # https://github.com/mgechev/revive/blob/master/RULES_DESCRIPTIONS.md#receiver-naming + - name: receiver-naming + severity: warning + disabled: false + # https://github.com/mgechev/revive/blob/master/RULES_DESCRIPTIONS.md#redundant-import-alias + - name: redundant-import-alias + severity: warning + disabled: false + # https://github.com/mgechev/revive/blob/master/RULES_DESCRIPTIONS.md#redefines-builtin-id + - name: redefines-builtin-id + severity: warning + disabled: false + # https://github.com/mgechev/revive/blob/master/RULES_DESCRIPTIONS.md#string-of-int + - name: string-of-int + severity: warning + disabled: false + # https://github.com/mgechev/revive/blob/master/RULES_DESCRIPTIONS.md#string-format + - name: string-format + severity: warning + disabled: false + arguments: + - - 'core.WriteError[1].Message' + - '/^([^A-Z]|$)/' + - must not start with a capital letter + - - 'fmt.Errorf[0]' + - '/(^|[^\.!?])$/' + - must not end in punctuation + - - panic + - '/^[^\n]*$/' + - must not contain line breaks + # https://github.com/mgechev/revive/blob/master/RULES_DESCRIPTIONS.md#struct-tag + - name: struct-tag + arguments: + - "json,inline" + - "bson,outline,gnu" + severity: warning + disabled: false + # https://github.com/mgechev/revive/blob/master/RULES_DESCRIPTIONS.md#superfluous-else + - name: superfluous-else + severity: warning + disabled: false + arguments: + - "preserveScope" + # https://github.com/mgechev/revive/blob/master/RULES_DESCRIPTIONS.md#time-equal + - name: time-equal + severity: warning + disabled: false + # https://github.com/mgechev/revive/blob/master/RULES_DESCRIPTIONS.md#time-naming + - name: time-naming + severity: warning + disabled: false + # https://github.com/mgechev/revive/blob/master/RULES_DESCRIPTIONS.md#var-naming + - name: var-naming + severity: warning + disabled: false + arguments: + - [ "ID" ] # AllowList + - [ "VM" ] # DenyList + - - upperCaseConst: true + # https://github.com/mgechev/revive/blob/master/RULES_DESCRIPTIONS.md#var-declaration + - name: var-declaration + severity: warning + disabled: false + # https://github.com/mgechev/revive/blob/master/RULES_DESCRIPTIONS.md#unconditional-recursion + - name: unconditional-recursion + severity: warning + disabled: false + # https://github.com/mgechev/revive/blob/master/RULES_DESCRIPTIONS.md#unexported-naming + - name: unexported-naming + severity: warning + disabled: false + # https://github.com/mgechev/revive/blob/master/RULES_DESCRIPTIONS.md#unexported-return + - name: unexported-return + severity: warning + disabled: false + # https://github.com/mgechev/revive/blob/master/RULES_DESCRIPTIONS.md#unhandled-error + - name: unhandled-error + severity: warning + disabled: false + arguments: + - "fmt.Printf" + - "myFunction" + # https://github.com/mgechev/revive/blob/master/RULES_DESCRIPTIONS.md#unnecessary-stmt + - name: unnecessary-stmt + severity: warning + disabled: false + # https://github.com/mgechev/revive/blob/master/RULES_DESCRIPTIONS.md#unreachable-code + - name: unreachable-code + severity: warning + disabled: false + # https://github.com/mgechev/revive/blob/master/RULES_DESCRIPTIONS.md#unused-parameter + - name: unused-parameter + severity: warning + disabled: false + arguments: + - allowRegex: "^_" + # https://github.com/mgechev/revive/blob/master/RULES_DESCRIPTIONS.md#unused-receiver + - name: unused-receiver + severity: warning + disabled: false + arguments: + - allowRegex: "^_" + # https://github.com/mgechev/revive/blob/master/RULES_DESCRIPTIONS.md#useless-break + - name: useless-break + severity: warning + disabled: false + # https://github.com/mgechev/revive/blob/master/RULES_DESCRIPTIONS.md#waitgroup-by-value + - name: waitgroup-by-value + severity: warning + disabled: false + + rowserrcheck: + # database/sql is always checked + # Default: [] + packages: + - github.com/jmoiron/sqlx + + sloglint: + # Enforce using key-value pairs only (incompatible with attr-only). + # Default: false + kv-only: true + # Enforce using attributes only (incompatible with kv-only). + # Default: false + attr-only: true + # Enforce using methods that accept a context. + # Default: false + context-only: true + # Enforce using static values for log messages. + # Default: false + static-msg: true + # Enforce using constants instead of raw keys. + # Default: false + no-raw-keys: true + # Enforce a single key naming convention. + # Values: snake, kebab, camel, pascal + # Default: "" + key-naming-case: snake + # Enforce putting arguments on separate lines. + # Default: false + args-on-sep-lines: true + + staticcheck: + # Deprecated: use the global `run.go` instead. + go: "1.15" + # SAxxxx checks in https://staticcheck.io/docs/configuration/options/#checks + # Default: ["*"] + checks: [ "all" ] + + stylecheck: + # Deprecated: use the global `run.go` instead. + go: "1.15" + # STxxxx checks in https://staticcheck.io/docs/configuration/options/#checks + # Default: ["*"] + checks: [ "all", "-ST1000", "-ST1003", "-ST1016", "-ST1020", "-ST1021", "-ST1022" ] + # https://staticcheck.io/docs/configuration/options/#dot_import_whitelist + # Default: ["github.com/mmcloughlin/avo/build", "github.com/mmcloughlin/avo/operand", "github.com/mmcloughlin/avo/reg"] + dot-import-whitelist: + - fmt + # https://staticcheck.io/docs/configuration/options/#initialisms + # Default: ["ACL", "API", "ASCII", "CPU", "CSS", "DNS", "EOF", "GUID", "HTML", "HTTP", "HTTPS", "ID", "IP", "JSON", "QPS", "RAM", "RPC", "SLA", "SMTP", "SQL", "SSH", "TCP", "TLS", "TTL", "UDP", "UI", "GID", "UID", "UUID", "URI", "URL", "UTF8", "VM", "XML", "XMPP", "XSRF", "XSS", "SIP", "RTP", "AMQP", "DB", "TS"] + initialisms: [ "ACL", "API", "ASCII", "CPU", "CSS", "DNS", "EOF", "GUID", "HTML", "HTTP", "HTTPS", "ID", "IP", "JSON", "QPS", "RAM", "RPC", "SLA", "SMTP", "SQL", "SSH", "TCP", "TLS", "TTL", "UDP", "UI", "GID", "UID", "UUID", "URI", "URL", "UTF8", "VM", "XML", "XMPP", "XSRF", "XSS", "SIP", "RTP", "AMQP", "DB", "TS" ] + # https://staticcheck.io/docs/configuration/options/#http_status_code_whitelist + # Default: ["200", "400", "404", "500"] + http-status-code-whitelist: [ "200", "400", "404", "500" ] + + tagalign: + # Align and sort can be used together or separately. + # + # Whether enable align. If true, the struct tags will be aligned. + # eg: + # type FooBar struct { + # Bar string `json:"bar" validate:"required"` + # FooFoo int8 `json:"foo_foo" validate:"required"` + # } + # will be formatted to: + # type FooBar struct { + # Bar string `json:"bar" validate:"required"` + # FooFoo int8 `json:"foo_foo" validate:"required"` + # } + # Default: true. + align: false + # Whether enable tags sort. + # If true, the tags will be sorted by name in ascending order. + # eg: `xml:"bar" json:"bar" validate:"required"` -> `json:"bar" validate:"required" xml:"bar"` + # Default: true + sort: false + # Specify the order of tags, the other tags will be sorted by name. + # This option will be ignored if `sort` is false. + # Default: [] + order: + - json + - yaml + - yml + - toml + - mapstructure + - binding + - validate + # Whether enable strict style. + # In this style, the tags will be sorted and aligned in the dictionary order, + # and the tags with the same name will be aligned together. + # Note: This option will be ignored if 'align' or 'sort' is false. + # Default: false + strict: true + + tagliatelle: + # Check the struct tag name case. + case: + # Use the struct field name to check the name of the struct tag. + # Default: false + use-field-name: true + # `camel` is used for `json` and `yaml`, and `header` is used for `header` (can be overridden) + # Default: {} + rules: + # Any struct tag type can be used. + # Support string case: `camel`, `pascal`, `kebab`, `snake`, `upperSnake`, `goCamel`, `goPascal`, `goKebab`, `goSnake`, `upper`, `lower`, `header` + json: camel + yaml: camel + xml: camel + bson: camel + avro: snake + mapstructure: kebab + env: upperSnake + envconfig: upperSnake + + tenv: + # The option `all` will run against whole test files (`_test.go`) regardless of method/function signatures. + # Otherwise, only methods that take `*testing.T`, `*testing.B`, and `testing.TB` as arguments are checked. + # Default: false + all: false + + testifylint: + # Enable all checkers (https://github.com/Antonboom/testifylint#checkers). + # Default: false + enable-all: true + # Disable checkers by name + # (in addition to default + # suite-thelper + # ). + disable: + - bool-compare + - compares + - empty + - error-is-as + - error-nil + - expected-actual + - go-require + - float-compare + - len + - nil-compare + - require-error + - suite-dont-use-pkg + - suite-extra-assert-call + - suite-thelper + + # Disable all checkers (https://github.com/Antonboom/testifylint#checkers). + # Default: false + disable-all: true + # Enable checkers by name + # (in addition to default + # bool-compare, compares, empty, error-is-as, error-nil, expected-actual, go-require, float-compare, len, + # nil-compare, require-error, suite-dont-use-pkg, suite-extra-assert-call + # ). + enable: + - bool-compare + - compares + - empty + - error-is-as + - error-nil + - expected-actual + - go-require + - float-compare + - len + - nil-compare + - require-error + - suite-dont-use-pkg + - suite-extra-assert-call + - suite-thelper + + expected-actual: + # Regexp for expected variable name. + # Default: (^(exp(ected)?|want(ed)?)([A-Z]\w*)?$)|(^(\w*[a-z])?(Exp(ected)?|Want(ed)?)$) + pattern: ^expected + require-error: + # Regexp for assertions to analyze. If defined then only matched assertions will be reported. + # Default: "" + fn-pattern: ^(Errorf?|NoErrorf?)$ + suite-extra-assert-call: + # To require or remove extra Assert() call? + # Default: remove + mode: require + + testpackage: + # Regexp pattern to skip files. + # Default: "(export|internal)_test\\.go" + skip-regexp: (export|internal)_test\.go + # List of packages that don't end with _test that tests are allowed to be in. + # Default: "main" + allow-packages: + - example + - main + + thelper: + test: + # Check *testing.T is first param (or after context.Context) of helper function. + # Default: true + first: false + # Check *testing.T param has name t. + # Default: true + name: false + # Check t.Helper() begins helper function. + # Default: true + begin: false + benchmark: + # Check *testing.B is first param (or after context.Context) of helper function. + # Default: true + first: false + # Check *testing.B param has name b. + # Default: true + name: false + # Check b.Helper() begins helper function. + # Default: true + begin: false + tb: + # Check *testing.TB is first param (or after context.Context) of helper function. + # Default: true + first: false + # Check *testing.TB param has name tb. + # Default: true + name: false + # Check tb.Helper() begins helper function. + # Default: true + begin: false + fuzz: + # Check *testing.F is first param (or after context.Context) of helper function. + # Default: true + first: false + # Check *testing.F param has name f. + # Default: true + name: false + # Check f.Helper() begins helper function. + # Default: true + begin: false + + usestdlibvars: + # Suggest the use of http.MethodXX. + # Default: true + http-method: false + # Suggest the use of http.StatusXX. + # Default: true + http-status-code: false + # Suggest the use of time.Weekday.String(). + # Default: true + time-weekday: true + # Suggest the use of time.Month.String(). + # Default: false + time-month: true + # Suggest the use of time.Layout. + # Default: false + time-layout: true + # Suggest the use of crypto.Hash.String(). + # Default: false + crypto-hash: true + # Suggest the use of rpc.DefaultXXPath. + # Default: false + default-rpc-path: true + # DEPRECATED Suggest the use of os.DevNull. + # Default: false + os-dev-null: true + # Suggest the use of sql.LevelXX.String(). + # Default: false + sql-isolation-level: true + # Suggest the use of tls.SignatureScheme.String(). + # Default: false + tls-signature-scheme: true + # Suggest the use of constant.Kind.String(). + # Default: false + constant-kind: true + # DEPRECATED Suggest the use of syslog.Priority. + # Default: false + syslog-priority: true + + unparam: + # Inspect exported functions. + # + # Set to true if no external program/library imports your code. + # XXX: if you enable this setting, unparam will report a lot of false-positives in text editors: + # if it's called for subdir of a project it can't find external interfaces. All text editor integrations + # with golangci-lint call it on a directory with the changed file. + # + # Default: false + check-exported: true + + unused: + # Mark all struct fields that have been written to as used. + # Default: true + field-writes-are-uses: false + # Treat IncDec statement (e.g. `i++` or `i--`) as both read and write operation instead of just write. + # Default: false + post-statements-are-reads: true + # Mark all exported identifiers as used. + # Default: true + exported-is-used: false + # Mark all exported fields as used. + # default: true + exported-fields-are-used: false + # Mark all function parameters as used. + # default: true + parameters-are-used: false + # Mark all local variables as used. + # default: true + local-variables-are-used: false + # Mark all identifiers inside generated files as used. + # Default: true + generated-is-used: false + + varcheck: + # Check usage of exported fields and variables. + # Default: false + exported-fields: true + + varnamelen: + # The longest distance, in source lines, that is being considered a "small scope". + # Variables used in at most this many lines will be ignored. + # Default: 5 + max-distance: 6 + # The minimum length of a variable's name that is considered "long". + # Variable names that are at least this long will be ignored. + # Default: 3 + min-name-length: 2 + # Check method receivers. + # Default: false + check-receiver: true + # Check named return values. + # Default: false + check-return: true + # Check type parameters. + # Default: false + check-type-param: true + # Ignore "ok" variables that hold the bool return value of a type assertion. + # Default: false + ignore-type-assert-ok: true + # Ignore "ok" variables that hold the bool return value of a map index. + # Default: false + ignore-map-index-ok: true + # Ignore "ok" variables that hold the bool return value of a channel receive. + # Default: false + ignore-chan-recv-ok: true + # Optional list of variable names that should be ignored completely. + # Default: [] + ignore-names: + - err + # Optional list of variable declarations that should be ignored completely. + # Entries must be in one of the following forms (see below for examples): + # - for variables, parameters, named return values, method receivers, or type parameters: + # ( can also be a pointer/slice/map/chan/...) + # - for constants: const + # + # Default: [] + ignore-decls: + - c echo.Context + - t testing.T + - f *foo.Bar + - e error + - i int + - const C + - T any + - m map[string]int + + whitespace: + # Enforces newlines (or comments) after every multi-line if statement. + # Default: false + multi-if: true + # Enforces newlines (or comments) after every multi-line function signature. + # Default: false + multi-func: true + + wrapcheck: + # An array of strings that specify substrings of signatures to ignore. + # If this set, it will override the default set of ignored signatures. + # See https://github.com/tomarrell/wrapcheck#configuration for more information. + # Default: [".Errorf(", "errors.New(", "errors.Unwrap(", ".Wrap(", ".Wrapf(", ".WithMessage(", ".WithMessagef(", ".WithStack("] + ignoreSigs: + - .Errorf( + - errors.New( + - errors.Unwrap( + - .Wrap( + - .Wrapf( + - .WithMessage( + - .WithMessagef( + - .WithStack( + # An array of strings that specify regular expressions of signatures to ignore. + # Default: [] + ignoreSigRegexps: + - \.New.*Error\( + # An array of strings that specify globs of packages to ignore. + # Default: [] + ignorePackageGlobs: + - encoding/* + - github.com/pkg/* + # An array of strings that specify regular expressions of interfaces to ignore. + # Default: [] + ignoreInterfaceRegexps: + - ^(?i)c(?-i)ach(ing|e) + + wsl: + # See https://github.com/bombsimon/wsl/blob/master/doc/configuration.md for documentation of available settings. + # These are the defaults for `golangci-lint`. + + # Do strict checking when assigning from append (x = append(x, y)). If + # this is set to true - the append call must append either a variable + # assigned, called or used on the line above. + strict-append: true + + # Allows assignments to be cuddled with variables used in calls on + # line above and calls to be cuddled with assignments of variables + # used in call on line above. + allow-assign-and-call: true + + # Allows assignments to be cuddled with anything. + allow-assign-and-anything: false + + # Allows cuddling to assignments even if they span over multiple lines. + allow-multiline-assign: true + + # If the number of lines in a case block is equal to or lager than this + # number, the case *must* end white a newline. + force-case-trailing-whitespace: 0 + + # Allow blocks to end with comments. + allow-trailing-comment: false + + # Allow multiple comments in the beginning of a block separated with newline. + allow-separated-leading-comment: false + + # Allow multiple var/declaration statements to be cuddled. + allow-cuddle-declarations: false + + # A list of call idents that everything can be cuddled with. + # Defaults to calls looking like locks. + allow-cuddle-with-calls: [ "Lock", "RLock" ] + + # AllowCuddleWithRHS is a list of right hand side variables that is allowed + # to be cuddled with anything. Defaults to assignments or calls looking + # like unlocks. + allow-cuddle-with-rhs: [ "Unlock", "RUnlock" ] + + # Causes an error when an If statement that checks an error variable doesn't + # cuddle with the assignment of that variable. + force-err-cuddling: false + + # When force-err-cuddling is enabled this is a list of names + # used for error variables to check for in the conditional. + error-variable-names: [ "err" ] + + # Causes an error if a short declaration (:=) cuddles with anything other than + # another short declaration. + # This logic overrides force-err-cuddling among others. + force-short-decl-cuddling: false + + # The custom section can be used to define linter plugins to be loaded at runtime. + # See README documentation for more info. + custom: + # Each custom linter should have a unique name. + example: + # The path to the plugin *.so. Can be absolute or local. + # Required for each custom linter. + path: /path/to/example.so + # The description of the linter. + # Optional. + description: This is an example usage of a plugin linter. + # Intended to point to the repo location of the linter. + # Optional. + original-url: github.com/golangci/example-linter + + +linters: + # Disable all linters. + # Default: false + disable-all: true + # Enable specific linter + # https://golangci-lint.run/usage/linters/#enabled-by-default + enable: + - asasalint + - asciicheck + - bidichk + - bodyclose + - containedctx + - contextcheck + - cyclop + - deadcode + - decorder + - depguard + - dogsled + - dupl + - dupword + - durationcheck + - errcheck + - errchkjson + - errname + - errorlint + - execinquery + - exhaustive + - exhaustivestruct + - exhaustruct + - exportloopref + - forbidigo + - forcetypeassert + - funlen + - gci + - ginkgolinter + - gocheckcompilerdirectives + - gochecknoglobals + - gochecknoinits + - gochecksumtype + - gocognit + - goconst + - gocritic + - gocyclo + - godot + - godox + - goerr113 + - gofmt + - gofumpt + - goheader + - goimports + - golint + - gomnd + - gomoddirectives + - gomodguard + - goprintffuncname + - gosec + - gosimple + - gosmopolitan + - govet + - grouper + - ifshort + - importas + - inamedparam + - ineffassign + - interfacebloat + - interfacer + - ireturn + - lll + - loggercheck + - maintidx + - makezero + - maligned + - mirror + - misspell + - musttag + - nakedret + - nestif + - nilerr + - nilnil + - nlreturn + - noctx + - nolintlint + - nonamedreturns + - nosnakecase + - nosprintfhostport + - paralleltest + - perfsprint + - prealloc + - predeclared + - promlinter + - protogetter + - reassign + - revive + - rowserrcheck + - scopelint + - sloglint + - sqlclosecheck + - staticcheck + - structcheck + - stylecheck + - tagalign + - tagliatelle + - tenv + - testableexamples + - testifylint + - testpackage + - thelper + - tparallel + - typecheck + - unconvert + - unparam + - unused + - usestdlibvars + - varcheck + - varnamelen + - wastedassign + - whitespace + - wrapcheck + - wsl + - zerologlint + + # Enable all available linters. + # Default: false + enable-all: true + # Disable specific linter + # https://golangci-lint.run/usage/linters/#disabled-by-default + disable: + - asasalint + - asciicheck + - bidichk + - bodyclose + - containedctx + - contextcheck + - cyclop + - deadcode + - decorder + - depguard + - dogsled + - dupl + - dupword + - durationcheck + - errcheck + - errchkjson + - errname + - errorlint + - execinquery + - exhaustive + - exhaustivestruct + - exhaustruct + - exportloopref + - forbidigo + - forcetypeassert + - funlen + - gci + - ginkgolinter + - gocheckcompilerdirectives + - gochecknoglobals + - gochecknoinits + - gochecksumtype + - gocognit + - goconst + - gocritic + - gocyclo + - godot + - godox + - goerr113 + - gofmt + - gofumpt + - goheader + - goimports + - golint + - gomnd + - gomoddirectives + - gomodguard + - goprintffuncname + - gosec + - gosimple + - gosmopolitan + - govet + - grouper + - ifshort + - importas + - inamedparam + - ineffassign + - interfacebloat + - interfacer + - ireturn + - lll + - loggercheck + - maintidx + - makezero + - maligned + - mirror + - misspell + - musttag + - nakedret + - nestif + - nilerr + - nilnil + - nlreturn + - noctx + - nolintlint + - nonamedreturns + - nosnakecase + - nosprintfhostport + - paralleltest + - perfsprint + - prealloc + - predeclared + - promlinter + - protogetter + - reassign + - revive + - rowserrcheck + - scopelint + - sloglint + - sqlclosecheck + - staticcheck + - structcheck + - stylecheck + - tagalign + - tagliatelle + - tenv + - testableexamples + - testifylint + - testpackage + - thelper + - tparallel + - typecheck + - unconvert + - unparam + - unused + - usestdlibvars + - varcheck + - varnamelen + - wastedassign + - whitespace + - wrapcheck + - wsl + - zerologlint + + # Enable presets. + # https://golangci-lint.run/usage/linters + presets: + - bugs + - comment + - complexity + - error + - format + - import + - metalinter + - module + - performance + - sql + - style + - test + - unused + + # Run only fast linters from enabled linters set (first run won't be fast) + # Default: false + fast: true + + +issues: + # List of regexps of issue texts to exclude. + # + # But independently of this option we use default exclude patterns, + # it can be disabled by `exclude-use-default: false`. + # To list all excluded by default patterns execute `golangci-lint run --help` + # + # Default: https://golangci-lint.run/usage/false-positives/#default-exclusions + exclude: + - abcdef + + # Excluding configuration per-path, per-linter, per-text and per-source + exclude-rules: + # Exclude some linters from running on tests files. + - path: _test\.go + linters: + - gocyclo + - errcheck + - dupl + - gosec + + # Run some linter only for test files by excluding its issues for everything else. + - path-except: _test\.go + linters: + - forbidigo + + # Exclude known linters from partially hard-vendored code, + # which is impossible to exclude via `nolint` comments. + # `/` will be replaced by current OS file path separator to properly work on Windows. + - path: internal/hmac/ + text: "weak cryptographic primitive" + linters: + - gosec + + # Exclude some `staticcheck` messages. + - linters: + - staticcheck + text: "SA9003:" + + # Exclude `lll` issues for long lines with `go:generate`. + - linters: + - lll + source: "^//go:generate " + + # Independently of option `exclude` we use default exclude patterns, + # it can be disabled by this option. + # To list all excluded by default patterns execute `golangci-lint run --help`. + # Default: true. + exclude-use-default: false + + # If set to true exclude and exclude-rules regular expressions become case-sensitive. + # Default: false + exclude-case-sensitive: false + + # The list of ids of default excludes to include or disable. + # https://golangci-lint.run/usage/false-positives/#default-exclusions + # Default: [] + include: + - EXC0001 + - EXC0002 + - EXC0003 + - EXC0004 + - EXC0005 + - EXC0006 + - EXC0007 + - EXC0008 + - EXC0009 + - EXC0010 + - EXC0011 + - EXC0012 + - EXC0013 + - EXC0014 + - EXC0015 + + # Maximum issues count per one linter. + # Set to 0 to disable. + # Default: 50 + max-issues-per-linter: 0 + + # Maximum count of issues with the same text. + # Set to 0 to disable. + # Default: 3 + max-same-issues: 0 + + # Show only new issues: if there are unstaged changes or untracked files, + # only those changes are analyzed, else only changes in HEAD~ are analyzed. + # It's a super-useful option for integration of golangci-lint into existing large codebase. + # It's not practical to fix all existing issues at the moment of integration: + # much better don't allow issues in new code. + # + # Default: false. + new: true + + # Show only new issues created after git revision `REV`. + new-from-rev: HEAD + + # Show only new issues created in git patch with set file path. + new-from-patch: path/to/patch/file + + # Fix found issues (if it's supported by the linter). + fix: true + + +severity: + # Set the default severity for issues. + # + # If severity rules are defined and the issues do not match or no severity is provided to the rule + # this will be the default severity applied. + # Severities should match the supported severity names of the selected out format. + # - Code climate: https://docs.codeclimate.com/docs/issues#issue-severity + # - Checkstyle: https://checkstyle.sourceforge.io/property_types.html#SeverityLevel + # - GitHub: https://help.github.com/en/actions/reference/workflow-commands-for-github-actions#setting-an-error-message + # - TeamCity: https://www.jetbrains.com/help/teamcity/service-messages.html#Inspection+Instance + # + # Default value is an empty string. + default-severity: error + + # If set to true `severity-rules` regular expressions become case-sensitive. + # Default: false + case-sensitive: true + + # When a list of severity rules are provided, severity information will be added to lint issues. + # Severity rules have the same filtering capability as exclude rules + # except you are allowed to specify one matcher per severity rule. + # Only affects out formats that support setting severity information. + # + # Default: [] + rules: + - linters: + - dupl + severity: info diff --git a/.github/workflows/mertricstest.yml b/.github/workflows/mertricstest.yml index 2fa460e..a2d3879 100644 --- a/.github/workflows/mertricstest.yml +++ b/.github/workflows/mertricstest.yml @@ -83,7 +83,7 @@ jobs: metricstest -test.v -test.run=^TestIteration1$ \ -binary-path=cmd/server/server - - name: Build agent binary + - name: Build config binary run: | cd cmd/agent go build -buildvcs=false -o agent diff --git a/cmd/agent/README.md b/cmd/agent/README.md index 5d500a4..3eee715 100644 --- a/cmd/agent/README.md +++ b/cmd/agent/README.md @@ -1,3 +1,3 @@ # cmd/agent -В данной директории будет содержаться код Агента, который скомпилируется в бинарное приложение +This directory contains the Agent code that is compiled into a binary application. diff --git a/cmd/agent/main.go b/cmd/agent/main.go index 38dd16d..a8bf6a0 100644 --- a/cmd/agent/main.go +++ b/cmd/agent/main.go @@ -1,3 +1,99 @@ package main -func main() {} +import ( + "context" + "flag" + "fmt" + + "github.com/MlDenis/prometheus_wannabe/internal/config" + "github.com/MlDenis/prometheus_wannabe/internal/hash" + "github.com/MlDenis/prometheus_wannabe/internal/logger" + "github.com/MlDenis/prometheus_wannabe/internal/metrics/model" + "github.com/MlDenis/prometheus_wannabe/internal/metrics/provider/agregate" + "github.com/MlDenis/prometheus_wannabe/internal/metrics/provider/custom" + "github.com/MlDenis/prometheus_wannabe/internal/metrics/provider/gopsutil" + "github.com/MlDenis/prometheus_wannabe/internal/metrics/provider/runtime" + "github.com/MlDenis/prometheus_wannabe/internal/metrics/sendler/http" + "github.com/MlDenis/prometheus_wannabe/internal/worker" + + "github.com/caarlos0/env/v7" +) + +func main() { + + conf, err := createConfig() + if err != nil { + panic(logger.WrapError("initialize config", err)) + } + + logger.InitLogger(fmt.Sprint(conf.LogLevel)) + + signer := hash.NewSigner(conf) + converter := model.NewMetricsConverter(conf, signer) + metricPusher, err := http.NewMetricsPusher(conf, converter) + if err != nil { + panic(logger.WrapError("create new metrics pusher", err)) + } + + runtimeMetricsProvider := runtime.NewRuntimeMetricsProvider(conf) + customMetricsProvider := custom.NewCustomMetricsProvider() + gopsutilMetricsProvider := gopsutil.NewGopsutilMetricsProvider() + aggregateMetricsProvider := agregate.NewAggregateMetricsProvider( + runtimeMetricsProvider, + customMetricsProvider, + gopsutilMetricsProvider, + ) + getMetricsWorker := worker.NewHardWorker(aggregateMetricsProvider.Update) + pushMetricsWorker := worker.NewHardWorker(func(workerContext context.Context) error { + return metricPusher.Push(workerContext, aggregateMetricsProvider.GetMetrics()) + }) + + ctx, cancel := context.WithCancel(context.Background()) + defer cancel() + + go getMetricsWorker.StartWork(ctx, conf.UpdateMetricsInterval) + pushMetricsWorker.StartWork(ctx, conf.SendMetricsInterval) +} + +func createConfig() (*config.Config, error) { + conf := &config.Config{CollectMetricsList: []string{ + "Alloc", + "BuckHashSys", + "Frees", + "GCCPUFraction", + "GCSys", + "HeapAlloc", + "HeapIdle", + "HeapInuse", + "HeapObjects", + "HeapReleased", + "HeapSys", + "LastGC", + "Lookups", + "MCacheInuse", + "MCacheSys", + "MSpanInuse", + "MSpanSys", + "Mallocs", + "NextGC", + "NumForcedGC", + "NumGC", + "OtherSys", + "PauseTotalNs", + "StackInuse", + "StackSys", + "Sys", + "TotalAlloc", + }} + + flag.StringVar(&conf.Key, "k", "", "Signer secret key") + flag.StringVar(&conf.ServerURL, "a", "localhost:8080", "Metrics server URL") + flag.IntVar(&conf.PushRateLimit, "l", 20, "Push metrics parallel workers limit") + flag.IntVar(&conf.PushTimeout, "t", 10, "Push metrics timeout") + flag.IntVar(&conf.SendMetricsInterval, "r", 10, "Send metrics interval") + flag.IntVar(&conf.UpdateMetricsInterval, "p", 2, "Update metrics interval") + flag.Parse() + + err := env.Parse(conf) + return conf, err +} diff --git a/cmd/server/README.md b/cmd/server/README.md index cf02a69..77f0148 100644 --- a/cmd/server/README.md +++ b/cmd/server/README.md @@ -1,3 +1,3 @@ # cmd/agent -В данной директории будет содержаться код Сервера, который скомпилируется в бинарное приложение +This directory contains the Server code that is compiled into a binary application. \ No newline at end of file diff --git a/cmd/server/main.go b/cmd/server/main.go index 38dd16d..61d0a31 100644 --- a/cmd/server/main.go +++ b/cmd/server/main.go @@ -1,3 +1,565 @@ package main -func main() {} +import ( + "compress/gzip" + "context" + "encoding/json" + "errors" + "flag" + "fmt" + "io" + "net/http" + + "go.uber.org/zap" + + "github.com/MlDenis/prometheus_wannabe/internal/converter" + "github.com/MlDenis/prometheus_wannabe/internal/database" + "github.com/MlDenis/prometheus_wannabe/internal/database/postgre" + "github.com/MlDenis/prometheus_wannabe/internal/database/stub" + "github.com/MlDenis/prometheus_wannabe/internal/hash" + "github.com/MlDenis/prometheus_wannabe/internal/logger" + "github.com/MlDenis/prometheus_wannabe/internal/metrics" + "github.com/MlDenis/prometheus_wannabe/internal/metrics/html" + "github.com/MlDenis/prometheus_wannabe/internal/metrics/model" + "github.com/MlDenis/prometheus_wannabe/internal/metrics/storage" + "github.com/MlDenis/prometheus_wannabe/internal/metrics/storage/db" + "github.com/MlDenis/prometheus_wannabe/internal/metrics/storage/file" + "github.com/MlDenis/prometheus_wannabe/internal/metrics/storage/memory" + "github.com/MlDenis/prometheus_wannabe/internal/worker" + + "github.com/caarlos0/env/v7" + "github.com/go-chi/chi/v5" + "github.com/go-chi/chi/v5/middleware" + + _ "net/http/pprof" +) + +// Constants +const ( + counterMetricName = "counter" + gaugeMetricName = "gauge" +) + +var compressContentTypes = []string{ + "application/javascript", + "application/json", + "text/css", + "text/html", + "text/plain", + "text/xml", +} + +// Configuration struct for holding server configuration. +type config struct { + Key string `env:"KEY"` + ServerURL string `env:"ADDRESS"` + StoreInterval int `env:"STORE_INTERVAL"` + StoreFile string `env:"STORE_FILE"` + Restore bool `env:"RESTORE"` + DB string `env:"DATABASE_DSN"` + LogLevel zap.AtomicLevel `env:"LOG_LEVEL"` +} + +// Struct for handling context keys related to metrics. +type metricInfoContextKey struct { + key string +} + +// Struct for handling metrics in the context of HTTP requests. +type metricsRequestContext struct { + requestMetrics []*model.Metrics + resultMetrics []*model.Metrics +} + +// main is the main entry point for the Prometheus Wannabe server. +// It initializes the server, parses configuration, sets up logging, database, storage, and starts the HTTP server. +func main() { + ctx, cancel := context.WithCancel(context.Background()) + defer cancel() + + conf, err := createConfig() + if err != nil { + panic(logger.WrapError("create config file", err)) + } + + logger.InitLogger(fmt.Sprint(conf.LogLevel)) + + logger.SugarLogger.Infof("Starting server with the following configuration:%v", conf) + + var base database.DataBase + var backupStorage storage.MetricsStorage + if conf.DB == "" { + base = &stub.StubDataBase{} + backupStorage = file.NewFileStorage(conf) + } else { + base, err = postgre.NewPostgresDataBase(ctx, conf) + if err != nil { + panic(logger.WrapError("create database", err)) + } + + backupStorage = db.NewDBStorage(base) + } + defer base.Close() + + inMemoryStorage := memory.NewInMemoryStorage() + storageStrategy := storage.NewStorageStrategy(conf, inMemoryStorage, backupStorage) + defer storageStrategy.Close() + + signer := hash.NewSigner(conf) + converter := model.NewMetricsConverter(conf, signer) + htmlPageBuilder := html.NewSimplePageBuilder() + router := initRouter(storageStrategy, converter, htmlPageBuilder, base) + + if conf.Restore { + logger.SugarLogger.Error("Restore metrics from backup") + err = storageStrategy.RestoreFromBackup(ctx) + if err != nil { + logger.SugarLogger.Errorf("failed to restore state from backup: %v", err) + } + } + + if !conf.SyncMode() { + logger.SugarLogger.Infof("Start periodic backup serice") + backgroundStore := worker.NewHardWorker(func(ctx context.Context) error { return storageStrategy.CreateBackup(ctx) }) + go backgroundStore.StartWork(ctx, conf.StoreInterval) + } + + logger.SugarLogger.Infof("Start listen " + conf.ServerURL) + err = http.ListenAndServe(conf.ServerURL, router) + if err != nil { + logger.SugarLogger.Error(err) + } + + logger.SugarLogger.Sync() +} + +// createConfig parses command line flags and environment variables to create a configuration object. +func createConfig() (*config, error) { + conf := &config{} + + flag.StringVar(&conf.Key, "k", "", "Signer secret key") + flag.BoolVar(&conf.Restore, "r", true, "Restore metric values from the server backup file") + flag.IntVar(&conf.StoreInterval, "i", 300, "Store backup interval") + flag.StringVar(&conf.ServerURL, "a", "localhost:8080", "Server listen URL") + flag.StringVar(&conf.StoreFile, "f", "/tmp/metrics-db.json", "Backup storage file path") + flag.StringVar(&conf.DB, "d", "", "Database connection stirng") + flag.Parse() + + err := env.Parse(conf) + return conf, err +} + +// initRouter initializes the HTTP router for the server, including middleware and route handlers. +func initRouter(metricsStorage storage.MetricsStorage, converter *model.MetricsConverter, htmlPageBuilder html.HTMLPageBuilder, dbStorage database.DataBase) *chi.Mux { + router := chi.NewRouter() + + router.Use(middleware.Logger) + router.Use(middleware.Compress(gzip.BestSpeed, compressContentTypes...)) + router.Mount("/debug", middleware.Profiler()) + router.Route("/update", func(r chi.Router) { + r.With(fillSingleJSONContext, updateMetrics(metricsStorage, converter)). + Post("/", successSingleJSONResponse()) + r.With(fillCommonURLContext, fillGaugeURLContext, updateMetrics(metricsStorage, converter)). + Post("/gauge/{metricName}/{metricValue}", successURLResponse()) + r.With(fillCommonURLContext, fillCounterURLContext, updateMetrics(metricsStorage, converter)). + Post("/counter/{metricName}/{metricValue}", successURLResponse()) + r.Post("/{metricType}/{metricName}/{metricValue}", func(w http.ResponseWriter, r *http.Request) { + message := fmt.Sprintf("unknown metric type: %s", chi.URLParam(r, "metricType")) + logger.SugarLogger.Error("failed to update metric: " + message) + http.Error(w, message, http.StatusNotImplemented) + }) + }) + + router.Route("/updates", func(r chi.Router) { + r.With(fillMultiJSONContext, updateMetrics(metricsStorage, converter)). + Post("/", successMultiJSONResponse()) + }) + + router.Route("/value", func(r chi.Router) { + r.With(fillSingleJSONContext, fillMetricValues(metricsStorage, converter)). + Post("/", successSingleJSONResponse()) + + r.With(fillCommonURLContext, fillMetricValues(metricsStorage, converter)). + Get("/{metricType}/{metricName}", successURLValueResponse(converter)) + }) + + router.Route("/ping", func(r chi.Router) { + r.Get("/", handleDBPing(dbStorage)) + }) + + router.Route("/", func(r chi.Router) { + r.Get("/", handleMetricsPage(htmlPageBuilder, metricsStorage)) + r.Get("/metrics", handleMetricsPage(htmlPageBuilder, metricsStorage)) + }) + + return router +} + +// fillCommonURLContext is a middleware to fill common metric information in the context for HTTP requests. +func fillCommonURLContext(next http.Handler) http.Handler { + return http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) { + ctx, metricsContext := ensureMetricsContext(r) + metricsContext.requestMetrics = append(metricsContext.requestMetrics, &model.Metrics{ + ID: chi.URLParam(r, "metricName"), + MType: chi.URLParam(r, "metricType"), + }) + + next.ServeHTTP(w, r.WithContext(ctx)) + }) +} + +// fillGaugeURLContext is a middleware to fill gauge metric information in the context for HTTP requests. +func fillGaugeURLContext(next http.Handler) http.Handler { + return http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) { + ctx, metricsContext := ensureMetricsContext(r) + if len(metricsContext.requestMetrics) != 1 { + logger.SugarLogger.Error("fillGaugeURLContext: wrong context") + http.Error(w, "fillGaugeURLContext: wrong context", http.StatusInternalServerError) + return + } + + strValue := chi.URLParam(r, "metricValue") + value, err := converter.ToFloat64(strValue) + if err != nil { + http.Error(w, logger.WrapError(fmt.Sprintf("parse value: %v", strValue), err).Error(), http.StatusBadRequest) + return + } + + metricsContext.requestMetrics[0].MType = gaugeMetricName + metricsContext.requestMetrics[0].Value = &value + next.ServeHTTP(w, r.WithContext(ctx)) + }) +} + +// fillCounterURLContext is a middleware to fill counter metric information in the context for HTTP requests. +func fillCounterURLContext(next http.Handler) http.Handler { + return http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) { + ctx, metricsContext := ensureMetricsContext(r) + if len(metricsContext.requestMetrics) != 1 { + logger.SugarLogger.Error("fillCounterURLContext: wrong context") + http.Error(w, "fillCounterURLContext: wrong context", http.StatusInternalServerError) + return + } + + strValue := chi.URLParam(r, "metricValue") + value, err := converter.ToInt64(strValue) + if err != nil { + http.Error(w, logger.WrapError(fmt.Sprintf("parse value: %v", strValue), err).Error(), http.StatusBadRequest) + return + } + + metricsContext.requestMetrics[0].MType = counterMetricName + metricsContext.requestMetrics[0].Delta = &value + next.ServeHTTP(w, r.WithContext(ctx)) + }) +} + +// fillSingleJSONContext is a middleware to fill single JSON metric information in the context for HTTP requests. +func fillSingleJSONContext(next http.Handler) http.Handler { + return http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) { + ctx, metricsContext := ensureMetricsContext(r) + var reader io.Reader + if r.Header.Get(`Content-Encoding`) == `gzip` { + gz, err := gzip.NewReader(r.Body) + if err != nil { + http.Error(w, logger.WrapError("create gzip reader", err).Error(), http.StatusInternalServerError) + return + } + reader = gz + defer gz.Close() + } else { + reader = r.Body + } + + metricContext := &model.Metrics{} + metricsContext.requestMetrics = append(metricsContext.requestMetrics, metricContext) + + err := json.NewDecoder(reader).Decode(metricContext) + if err != nil { + http.Error(w, logger.WrapError("unmarhsal json context", err).Error(), http.StatusBadRequest) + return + } + + if metricContext.ID == "" { + logger.SugarLogger.Error("Fail to collect json context: metric name is missed") + http.Error(w, "metric name is missed", http.StatusBadRequest) + return + } + + if metricContext.MType == "" { + logger.SugarLogger.Error("Fail to collect json context: metric type is missed") + http.Error(w, "metric types is missed", http.StatusBadRequest) + return + } + + next.ServeHTTP(w, r.WithContext(ctx)) + }) +} + +// fillMultiJSONContext is a middleware to fill multiple JSON metric information in the context for HTTP requests. +func fillMultiJSONContext(next http.Handler) http.Handler { + return http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) { + ctx, metricsContext := ensureMetricsContext(r) + var reader io.Reader + if r.Header.Get(`Content-Encoding`) == `gzip` { + gz, err := gzip.NewReader(r.Body) + if err != nil { + http.Error(w, logger.WrapError("create gzip reader", err).Error(), http.StatusInternalServerError) + return + } + reader = gz + defer gz.Close() + } else { + reader = r.Body + } + + metricsContext.requestMetrics = []*model.Metrics{} + err := json.NewDecoder(reader).Decode(&metricsContext.requestMetrics) + if err != nil { + http.Error(w, logger.WrapError("unmarshal request metrics", err).Error(), http.StatusBadRequest) + return + } + + for _, requestMetric := range metricsContext.requestMetrics { + if requestMetric.ID == "" { + logger.SugarLogger.Error("Fail to collect json context: metric name is missed") + http.Error(w, "metric name is missed", http.StatusBadRequest) + return + } + + if requestMetric.MType == "" { + logger.SugarLogger.Error("Fail to collect json context: metric type is missed") + http.Error(w, "metric types is missed", http.StatusBadRequest) + return + } + } + + next.ServeHTTP(w, r.WithContext(ctx)) + }) +} + +// updateMetrics is a middleware to handle metric updates in the storage. +func updateMetrics(storage storage.MetricsStorage, converter *model.MetricsConverter) func(next http.Handler) http.Handler { + return func(next http.Handler) http.Handler { + return http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) { + ctx, metricsContext := ensureMetricsContext(r) + metricsList := make([]metrics.Metric, len(metricsContext.requestMetrics)) + for i, metricContext := range metricsContext.requestMetrics { + metric, err := converter.FromModelMetric(metricContext) + if err != nil { + logger.SugarLogger.Errorf("Fail to parse metric: %v", err) + + var errUnknownMetricType *model.UnknownMetricTypeError + if errors.As(err, &errUnknownMetricType) { + http.Error(w, fmt.Sprintf("unknown metric type: %s", errUnknownMetricType.UnknownType), http.StatusNotImplemented) + } else { + http.Error(w, err.Error(), http.StatusBadRequest) + } + return + } + + metricsList[i] = metric + } + + resultMetrics, err := storage.AddMetricValues(ctx, metricsList) + if err != nil { + http.Error(w, logger.WrapError("update metric", err).Error(), http.StatusInternalServerError) + return + } + + metricsContext.resultMetrics = make([]*model.Metrics, len(resultMetrics)) + for i, resultMetric := range resultMetrics { + newValue, err := converter.ToModelMetric(resultMetric) + if err != nil { + http.Error(w, logger.WrapError("convert metric", err).Error(), http.StatusInternalServerError) + return + } + + logger.SugarLogger.Errorf("Updated metric: %v. newValue: %v", resultMetric.GetName(), newValue) + metricsContext.resultMetrics[i] = newValue + } + + next.ServeHTTP(w, r) + }) + } +} + +// fillMetricValues is a middleware to fill metric values from the storage in the context for HTTP requests. +func fillMetricValues(storage storage.MetricsStorage, converter *model.MetricsConverter) func(next http.Handler) http.Handler { + return func(next http.Handler) http.Handler { + return http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) { + ctx, metricsContext := ensureMetricsContext(r) + metricsContext.resultMetrics = make([]*model.Metrics, len(metricsContext.requestMetrics)) + for i, metricContext := range metricsContext.requestMetrics { + metric, err := storage.GetMetric(ctx, metricContext.MType, metricContext.ID) + if err != nil { + logger.SugarLogger.Errorf("Fail to get metric value: %v", err) + http.Error(w, "Metric not found", http.StatusNotFound) + return + } + + resultValue, err := converter.ToModelMetric(metric) + if err != nil { + http.Error(w, logger.WrapError("get metric value", err).Error(), http.StatusInternalServerError) + return + } + + metricsContext.resultMetrics[i] = resultValue + } + + next.ServeHTTP(w, r) + }) + } +} + +// successURLValueResponse is a handler function to respond with the value of a single metric in plain text. +func successURLValueResponse(converter *model.MetricsConverter) func(w http.ResponseWriter, r *http.Request) { + return func(w http.ResponseWriter, r *http.Request) { + _, metricsContext := ensureMetricsContext(r) + + if len(metricsContext.resultMetrics) != 1 { + logger.SugarLogger.Error("successURLValueResponse: wrong context") + http.Error(w, "successURLValueResponse: wrong context", http.StatusInternalServerError) + return + } + + metric, err := converter.FromModelMetric(metricsContext.resultMetrics[0]) + if err != nil { + http.Error(w, logger.WrapError("convert result metric", err).Error(), http.StatusInternalServerError) + return + } + + successResponse(w, "text/plain", metric.GetStringValue()) + } +} + +// handleMetricsPage is a handler function to respond with an HTML page containing metric values. +func handleMetricsPage(builder html.HTMLPageBuilder, storage storage.MetricsStorage) func(w http.ResponseWriter, r *http.Request) { + return func(w http.ResponseWriter, r *http.Request) { + values, err := storage.GetMetricValues(r.Context()) + if err != nil { + http.Error(w, logger.WrapError("get metric values", err).Error(), http.StatusInternalServerError) + return + } + successResponse(w, "text/html", builder.BuildMetricsPage(values)) + } +} + +// successURLResponse is a handler function to respond with a success message in plain text. +func successURLResponse() func(w http.ResponseWriter, r *http.Request) { + return func(w http.ResponseWriter, r *http.Request) { + successResponse(w, "text/plain", "ok") + } +} + +// successSingleJSONResponse is a handler function to respond with a single JSON metric in JSON format. +func successSingleJSONResponse() func(w http.ResponseWriter, r *http.Request) { + return func(w http.ResponseWriter, r *http.Request) { + _, metricsContext := ensureMetricsContext(r) + + if len(metricsContext.resultMetrics) != 1 { + logger.SugarLogger.Error("successSingleJSONResponse: wrong context") + http.Error(w, "successSingleJSONResponse: wrong context", http.StatusInternalServerError) + return + } + + result, err := json.Marshal(metricsContext.resultMetrics[0]) + if err != nil { + http.Error(w, logger.WrapError("serialise result", err).Error(), http.StatusInternalServerError) + return + } + + w.Header().Add("Content-Type", "application/json") + w.WriteHeader(http.StatusOK) + _, err = w.Write(result) + if err != nil { + logger.SugarLogger.Errorf("failed to write response: %v", err) + } + } +} + +// successMultiJSONResponse is a handler function to respond with a stub JSON metric in JSON format. +func successMultiJSONResponse() func(w http.ResponseWriter, r *http.Request) { + return func(w http.ResponseWriter, r *http.Request) { + + stubResult := &model.Metrics{} + result, err := json.Marshal(stubResult) + if err != nil { + http.Error(w, logger.WrapError("serialise result", err).Error(), http.StatusInternalServerError) + return + } + + w.Header().Add("Content-Type", "application/json") + w.WriteHeader(http.StatusOK) + _, err = w.Write(result) + if err != nil { + logger.SugarLogger.Errorf("failed to write response: %v", err) + } + } +} + +// successResponse is a generic function to respond with a success message in the specified content type. +func successResponse(w http.ResponseWriter, contentType string, message string) { + w.Header().Add("Content-Type", contentType) + w.WriteHeader(http.StatusOK) + _, err := w.Write([]byte(message)) + if err != nil { + logger.SugarLogger.Errorf("failed to write response: %v", err) + } +} + +// handleDBPing is a handler function to respond with a success message if the database is pingable. +func handleDBPing(dbStorage database.DataBase) func(w http.ResponseWriter, r *http.Request) { + return func(w http.ResponseWriter, r *http.Request) { + err := dbStorage.Ping(r.Context()) + if err == nil { + successResponse(w, "text/plain", "ok") + } else { + http.Error(w, logger.WrapError("ping database", err).Error(), http.StatusInternalServerError) + } + } +} + +// ensureMetricsContext ensures that the metrics context is present in the HTTP request context. +func ensureMetricsContext(r *http.Request) (context.Context, *metricsRequestContext) { + const metricsContextKey = "metricsContextKey" + ctx := r.Context() + metricsContext, ok := ctx.Value(metricInfoContextKey{key: metricsContextKey}).(*metricsRequestContext) + if !ok { + metricsContext = &metricsRequestContext{} + ctx = context.WithValue(r.Context(), metricInfoContextKey{key: metricsContextKey}, metricsContext) + } + + return ctx, metricsContext +} + +// StoreFilePath returns the configured file path for storing backups. +func (c *config) StoreFilePath() string { + return c.StoreFile +} + +// SyncMode returns true if the server is running in sync mode, i.e., using a database or with zero store interval. +func (c *config) SyncMode() bool { + return c.DB != "" || c.StoreInterval == 0 +} + +// String returns a formatted string representation of the server configuration. +func (c *config) String() string { + return fmt.Sprintf("\nServerURL:\t%v\nStoreInterval:\t%v\nStoreFile:\t%v\nRestore:\t%v\nDb:\t%v", + c.ServerURL, c.StoreInterval, c.StoreFile, c.Restore, c.DB) +} + +// GetKey returns the secret key used for signing metrics. +func (c *config) GetKey() []byte { + return []byte(c.Key) +} + +// SignMetrics returns true if metrics should be signed. +func (c *config) SignMetrics() bool { + return c.Key != "" +} + +// GetConnectionString returns the configured database connection string. +func (c *config) GetConnectionString() string { + return c.DB +} diff --git a/cmd/server/main_test.go b/cmd/server/main_test.go new file mode 100644 index 0000000..a83ae8a --- /dev/null +++ b/cmd/server/main_test.go @@ -0,0 +1,729 @@ +package main + +import ( + "bytes" + "context" + "encoding/json" + "fmt" + "log" + + "github.com/stretchr/testify/assert" + "github.com/stretchr/testify/require" + + "github.com/MlDenis/prometheus_wannabe/internal/converter" + "github.com/MlDenis/prometheus_wannabe/internal/database" + "github.com/MlDenis/prometheus_wannabe/internal/hash" + "github.com/MlDenis/prometheus_wannabe/internal/metrics" + "github.com/MlDenis/prometheus_wannabe/internal/metrics/html" + "github.com/MlDenis/prometheus_wannabe/internal/metrics/model" + "github.com/MlDenis/prometheus_wannabe/internal/metrics/storage/memory" + "github.com/MlDenis/prometheus_wannabe/internal/metrics/types" + + "io" + "net/http" + "net/http/httptest" + "strconv" + "strings" + "testing" +) + +type callResult struct { + status int + response string + responseObj *model.Metrics +} + +type modelRequest struct { + ID string `json:"id"` + MType string `json:"type"` + Delta *int64 `json:"delta,omitempty"` + Value *float64 `json:"value,omitempty"` +} + +type jsonAPIRequest struct { + httpMethod string + path string + request *modelRequest + metrics []metrics.Metric +} + +type testDescription struct { + testName string + httpMethod string + metricType string + metricName string + metricValue string + expected callResult +} + +type testConf struct { + key []byte + singEnabled bool +} + +type testDBStorage struct{} + +func Test_UpdateUrlRequest(t *testing.T) { + tests := []testDescription{} + for _, method := range getMethods() { + for _, metricType := range getMetricType() { + for _, metricName := range getMetricName() { + for _, metricValue := range getMetricValue() { + var expected *callResult + + // json api + if metricType == "" && metricName == "" && metricValue == "" { + if method == http.MethodPost { + expected = expectedBadRequest("failed to unmarhsal json context: EOF\n") + } else { + expected = expectedNotAllowed() + } + } + + // Unexpected method types + if expected == nil && method != http.MethodPost { + if metricType == "" || metricName == "" || metricValue == "" { + expected = expectedNotFound() + } else { + expected = expectedNotAllowed() + } + } + + // Unexpected metric types + if expected == nil && metricType != gaugeMetricName && metricType != counterMetricName { + if metricType == "" || metricName == "" || metricValue == "" { + expected = expectedNotFound() + } else { + expected = expectedNotImplemented(metricType) + } + } + + // Empty metric name + if expected == nil && metricName == "" { + expected = expectedNotFound() + } + + // Incorrect metric value + if expected == nil { + if metricValue == "" { + expected = expectedNotFound() + } else { + switch metricType { + case gaugeMetricName: + _, err := strconv.ParseFloat(metricValue, 64) + if err != nil { + expected = expectedBadRequest(fmt.Sprintf("failed to parse value: %v: %v\n", metricValue, err)) + } + case counterMetricName: + _, err := strconv.ParseInt(metricValue, 10, 64) + if err != nil { + expected = expectedBadRequest(fmt.Sprintf("failed to parse value: %v: %v\n", metricValue, err)) + } + } + } + } + + // Success + if expected == nil { + expected = expectedOk() + } + + tests = append(tests, testDescription{ + testName: "url_" + method + "_" + metricType + "_" + metricName + "_" + metricValue, + httpMethod: method, + metricType: metricType, + metricName: metricName, + metricValue: metricValue, + expected: *expected, + }) + } + } + } + } + + for _, tt := range tests { + t.Run(tt.testName, func(t *testing.T) { + urlBuilder := &strings.Builder{} + urlBuilder.WriteString("http://localhost:8080/update") + appendIfNotEmpty(urlBuilder, tt.metricType) + appendIfNotEmpty(urlBuilder, tt.metricName) + appendIfNotEmpty(urlBuilder, tt.metricValue) + + metricsStorage := memory.NewInMemoryStorage() + htmlPageBuilder := html.NewSimplePageBuilder() + request := httptest.NewRequest(tt.httpMethod, urlBuilder.String(), nil) + w := httptest.NewRecorder() + + conf := &testConf{key: nil, singEnabled: false} + signer := hash.NewSigner(conf) + converter := model.NewMetricsConverter(conf, signer) + router := initRouter(metricsStorage, converter, htmlPageBuilder, &testDBStorage{}) + router.ServeHTTP(w, request) + actual := w.Result() + + assert.Equal(t, tt.expected.status, actual.StatusCode) + + defer actual.Body.Close() + resBody, err := io.ReadAll(actual.Body) + if err != nil { + t.Fatal(err) + } + + assert.Equal(t, tt.expected.response, string(resBody)) + }) + } +} + +func Test_UpdateJsonRequest_MethodNotAllowed(t *testing.T) { + expected := expectedNotAllowed() + for _, method := range getMethods() { + if method == http.MethodPost || method == http.MethodGet { + continue + } + + t.Run("json_"+method+"_methodNotAllowed", func(t *testing.T) { + actual := runJSONTest(t, jsonAPIRequest{httpMethod: method, path: "update"}) + assert.Equal(t, expected, actual) + }) + } +} + +func Test_UpdateJsonRequest_MetricName(t *testing.T) { + for _, metricType := range []string{counterMetricName, gaugeMetricName} { + for _, metricName := range getMetricName() { + requestObj := modelRequest{ + ID: metricName, + MType: metricType, + } + + var expected *callResult + if metricName == "" { + expected = expectedBadRequest("metric name is missed\n") + } else { + if metricType == counterMetricName { + delta := int64(100) + requestObj.Delta = &delta + expected = getExpectedObj(requestObj.MType, requestObj.ID, &delta, nil) + } else if metricType == gaugeMetricName { + value := float64(100) + requestObj.Value = &value + expected = getExpectedObj(requestObj.MType, requestObj.ID, nil, &value) + } + } + + t.Run("json_"+metricName+"_"+metricType+"_metricName", func(t *testing.T) { + actual := runJSONTest(t, jsonAPIRequest{httpMethod: http.MethodPost, path: "update", request: &requestObj}) + assert.Equal(t, expected, actual) + }) + } + } +} + +func Test_UpdateJsonRequest_MetricType(t *testing.T) { + for _, metricType := range getMetricType() { + requestObj := modelRequest{ + ID: "testMetricName", + MType: metricType, + } + + var expected *callResult + switch metricType { + case "": + expected = expectedBadRequest("metric types is missed\n") + case counterMetricName: + delta := int64(100) + requestObj.Delta = &delta + expected = getExpectedObj(requestObj.MType, requestObj.ID, &delta, nil) + case gaugeMetricName: + value := float64(100) + requestObj.Value = &value + expected = getExpectedObj(requestObj.MType, requestObj.ID, nil, &value) + default: + expected = expectedNotImplemented(metricType) + } + + t.Run("json_"+metricType+"_metricType", func(t *testing.T) { + actual := runJSONTest(t, jsonAPIRequest{httpMethod: http.MethodPost, path: "update", request: &requestObj}) + assert.Equal(t, expected, actual) + }) + } +} + +func Test_UpdateJsonRequest_CounterMetricValue(t *testing.T) { + delta := int64(100) + for _, metricValue := range []*int64{nil, &delta} { + requestObj := modelRequest{ + ID: "testMetricName", + MType: counterMetricName, + Delta: metricValue, + } + + var valueString string + var expected *callResult + if metricValue == nil { + valueString = "nil" + expected = expectedBadRequest("failed to convert metric: metric value is missed\n") + } else { + valueString = converter.IntToString(*metricValue) + expected = getExpectedObj(requestObj.MType, requestObj.ID, metricValue, nil) + } + + t.Run("json_"+valueString+"_counterMetricValue", func(t *testing.T) { + actual := runJSONTest(t, jsonAPIRequest{httpMethod: http.MethodPost, path: "update", request: &requestObj}) + assert.Equal(t, expected, actual) + }) + } +} + +func Test_UpdateJsonRequest_GaugeMetricValue(t *testing.T) { + value := float64(100) + for _, metricValue := range []*float64{nil, &value} { + requestObj := modelRequest{ + ID: "testMetricName", + MType: gaugeMetricName, + Value: metricValue, + } + + var valueString string + var expected *callResult + if metricValue == nil { + valueString = "nil" + expected = expectedBadRequest("failed to convert metric: metric value is missed\n") + } else { + valueString = converter.FloatToString(*metricValue) + expected = getExpectedObj(requestObj.MType, requestObj.ID, nil, metricValue) + } + + t.Run("json_"+valueString+"_gaugeMetricValue", func(t *testing.T) { + actual := runJSONTest(t, jsonAPIRequest{httpMethod: http.MethodPost, path: "update", request: &requestObj}) + assert.Equal(t, expected, actual) + }) + } +} + +func Test_GetMetricUrlRequest(t *testing.T) { + tests := []struct { + name string + metricType string + metricName string + expectSuccess bool + }{ + { + name: "type_not_found", + metricType: "not_existed_type", + metricName: "metricName", + expectSuccess: false, + }, + { + name: "metric_name_not_found", + metricType: counterMetricName, + metricName: "not_existed_metric_name", + expectSuccess: false, + }, + { + name: "success_get_value", + metricType: counterMetricName, + metricName: "metricName", + expectSuccess: true, + }, + } + + for _, tt := range tests { + t.Run(tt.name, func(t *testing.T) { + url := fmt.Sprintf("http://localhost:8080/value/%v/%v", tt.metricType, tt.metricName) + + htmlPageBuilder := html.NewSimplePageBuilder() + metricsStorage := memory.NewInMemoryStorage() + metricsList := []metrics.Metric{createCounterMetric("metricName", 100)} + + _, err := metricsStorage.AddMetricValues(context.Background(), metricsList) + assert.NoError(t, err) + + request := httptest.NewRequest(http.MethodGet, url, nil) + w := httptest.NewRecorder() + + conf := &testConf{key: nil, singEnabled: false} + signer := hash.NewSigner(conf) + converter := model.NewMetricsConverter(conf, signer) + router := initRouter(metricsStorage, converter, htmlPageBuilder, &testDBStorage{}) + router.ServeHTTP(w, request) + actual := w.Result() + + if tt.expectSuccess { + assert.Equal(t, http.StatusOK, actual.StatusCode) + defer actual.Body.Close() + body, err := io.ReadAll(actual.Body) + if err != nil { + t.Fatal(err) + } + + assert.Equal(t, "100", string(body)) + } else { + assert.Equal(t, http.StatusNotFound, actual.StatusCode) + defer actual.Body.Close() + body, err := io.ReadAll(actual.Body) + if err != nil { + t.Fatal(err) + } + + assert.Equal(t, "Metric not found\n", string(body)) + } + }) + } +} + +func Test_GetMetricJsonRequest_MethodNotAllowed(t *testing.T) { + expected := expectedNotAllowed() + for _, method := range getMethods() { + if method == http.MethodPost { + continue + } + + t.Run("json_"+method+"_methodNotAllowed", func(t *testing.T) { + actual := runJSONTest(t, jsonAPIRequest{httpMethod: method, path: "value"}) + assert.Equal(t, expected, actual) + }) + } +} + +func Test_GetMetricJsonRequest_MetricName(t *testing.T) { + for _, metricType := range []string{counterMetricName, gaugeMetricName} { + for _, metricName := range getMetricName() { + requestObj := modelRequest{ + ID: metricName, + MType: metricType, + } + + var expected *callResult + metricList := []metrics.Metric{} + + if metricName == "" { + expected = expectedBadRequest("metric name is missed\n") + } else { + if metricType == counterMetricName { + delta := int64(100) + metricList = append(metricList, createCounterMetric(requestObj.ID, float64(delta))) + expected = getExpectedObj(requestObj.MType, requestObj.ID, &delta, nil) + } else if metricType == gaugeMetricName { + value := float64(100) + metricList = append(metricList, createGaugeMetric(requestObj.ID, value)) + expected = getExpectedObj(requestObj.MType, requestObj.ID, nil, &value) + } + } + + t.Run("json_"+metricName+"_"+metricType+"_metricName", func(t *testing.T) { + actual := runJSONTest(t, jsonAPIRequest{ + httpMethod: http.MethodPost, + path: "value", + request: &requestObj, + metrics: metricList, + }) + assert.Equal(t, expected, actual) + }) + } + } +} + +func Test_GetMetricJsonRequest_MetricType(t *testing.T) { + for _, metricType := range getMetricType() { + requestObj := modelRequest{ + ID: "testMetricName", + MType: metricType, + } + + var expected *callResult + metricList := []metrics.Metric{} + + switch metricType { + case "": + expected = expectedBadRequest("metric types is missed\n") + case counterMetricName: + delta := int64(100) + metricList = append(metricList, createCounterMetric(requestObj.ID, float64(delta))) + expected = getExpectedObj(requestObj.MType, requestObj.ID, &delta, nil) + case gaugeMetricName: + value := float64(100) + metricList = append(metricList, createGaugeMetric(requestObj.ID, value)) + expected = getExpectedObj(requestObj.MType, requestObj.ID, nil, &value) + default: + expected = expectedNotFoundMessage("Metric not found\n") + } + + t.Run("json_"+metricType+"_metricType", func(t *testing.T) { + actual := runJSONTest(t, jsonAPIRequest{ + httpMethod: http.MethodPost, + path: "value", + request: &requestObj, + metrics: metricList, + }) + assert.Equal(t, expected, actual) + }) + } +} + +func runJSONTest(t *testing.T, apiRequest jsonAPIRequest) *callResult { + t.Helper() + + var buffer bytes.Buffer + metricsStorage := memory.NewInMemoryStorage() + if apiRequest.metrics != nil { + _, err := metricsStorage.AddMetricValues(context.Background(), apiRequest.metrics) + assert.NoError(t, err) + } + htmlPageBuilder := html.NewSimplePageBuilder() + + if apiRequest.request != nil { + encoder := json.NewEncoder(&buffer) + err := encoder.Encode(apiRequest.request) + require.NoError(t, err) + } + + request := httptest.NewRequest(apiRequest.httpMethod, "http://localhost:8080/"+apiRequest.path, &buffer) + w := httptest.NewRecorder() + + conf := &testConf{} + signer := hash.NewSigner(conf) + converter := model.NewMetricsConverter(conf, signer) + router := initRouter(metricsStorage, converter, htmlPageBuilder, &testDBStorage{}) + router.ServeHTTP(w, request) + actual := w.Result() + result := &callResult{status: actual.StatusCode} + + defer actual.Body.Close() + resBody, _ := io.ReadAll(actual.Body) + resultObj := &model.Metrics{} + err := json.Unmarshal(resBody, resultObj) + if err != nil { + result.response = string(resBody) + } else { + result.responseObj = resultObj + } + + return result +} + +func appendIfNotEmpty(builder *strings.Builder, str string) { + if str != "" { + builder.WriteString("/") + builder.WriteString(str) + } +} + +func expectedNotFound() *callResult { + return expectedNotFoundMessage("404 page not found\n") +} + +func expectedNotFoundMessage(message string) *callResult { + return getExpected(http.StatusNotFound, message) +} + +func expectedNotAllowed() *callResult { + return getExpected(http.StatusMethodNotAllowed, "") +} + +func expectedBadRequest(message string) *callResult { + return getExpected(http.StatusBadRequest, message) +} + +func expectedNotImplemented(metricType string) *callResult { + return getExpected(http.StatusNotImplemented, fmt.Sprintf("unknown metric type: %s\n", metricType)) +} + +func expectedOk() *callResult { + return getExpected(http.StatusOK, "ok") +} + +func getExpected(status int, response string) *callResult { + return &callResult{ + status: status, + response: response, + } +} + +func getExpectedObj(metricType string, metricName string, delta *int64, value *float64) *callResult { + return &callResult{ + status: 200, + responseObj: &model.Metrics{ + ID: metricName, + MType: metricType, + Delta: delta, + Value: value, + }, + } +} + +func getMethods() []string { + return []string{ + http.MethodPost, + http.MethodGet, + http.MethodHead, + http.MethodPut, + http.MethodPatch, + http.MethodDelete, + http.MethodOptions, + http.MethodTrace, + } +} + +func getMetricType() []string { + return []string{ + gaugeMetricName, + counterMetricName, + "test", + "", + } +} + +func getMetricName() []string { + return []string{ + "test", + "", + } +} + +func getMetricValue() []string { + return []string{ + "100", + "100.001", + "test", + "", + } +} + +func createCounterMetric(name string, value float64) metrics.Metric { + return createMetric(types.NewCounterMetric, name, value) +} + +func createGaugeMetric(name string, value float64) metrics.Metric { + return createMetric(types.NewGaugeMetric, name, value) +} + +func createMetric(metricFactory func(string) metrics.Metric, name string, value float64) metrics.Metric { + metric := metricFactory(name) + metric.SetValue(value) + return metric +} + +func (t *testConf) SignMetrics() bool { + return t.singEnabled +} + +func (t *testConf) GetKey() []byte { + return t.key +} + +func (t testDBStorage) Ping(context.Context) error { + return nil +} + +func (t testDBStorage) Close() error { + return nil +} + +func (t *testDBStorage) UpdateItems(ctx context.Context, records []*database.DBItem) error { + // TODO: implement + panic("not implement") +} + +func (t *testDBStorage) ReadItem(ctx context.Context, metricType string, metricName string) (*database.DBItem, error) { + // TODO: implement + panic("not implement") +} + +func (t *testDBStorage) ReadAllItems(ctx context.Context) ([]*database.DBItem, error) { + // TODO: implement + panic("not implement") +} + +func Example() { + // httpServer configuration. + ctx := context.Background() + serverURL := "http://localhost:8080" + + // Create metric. + metricName := "metricName" + metricType := "counter" + metricValue := int64(100) + metricValueString := strconv.FormatInt(metricValue, 10) + metricModel := model.Metrics{ + ID: metricName, + MType: metricType, + Delta: &metricValue, + } + + // Send request and handle response function. + sendMetricRequest := func(request *http.Request) { + client := http.Client{} + response, err := client.Do(request) + if err != nil { + log.Fatal(err) + } + defer response.Body.Close() + + content, err := io.ReadAll(response.Body) + if err != nil { + log.Fatal(err) + } + + stringContent := string(content) + if response.StatusCode != http.StatusOK { + log.Fatal(err) + } + + log.Print(stringContent) + } + + // Use JSON model to update single metric value... + var buffer bytes.Buffer + err := json.NewEncoder(&buffer).Encode(metricModel) + if err != nil { + log.Fatal(err) + } + request, err := http.NewRequestWithContext(ctx, http.MethodPost, serverURL+"/update", &buffer) + if err != nil { + log.Fatal(err) + } + request.Header.Add("Content-Type", "application/json") + sendMetricRequest(request) + + // ... and get single metric value. + request, err = http.NewRequestWithContext(ctx, http.MethodPost, serverURL+"/value", &buffer) + if err != nil { + log.Fatal(err) + } + request.Header.Add("Content-Type", "application/json") + sendMetricRequest(request) + + // Use URL path params to update single metric value... + request, err = http.NewRequestWithContext(ctx, http.MethodPost, serverURL+"/update/"+metricType+"/"+metricName+"/"+metricValueString, nil) + if err != nil { + log.Fatal(err) + } + sendMetricRequest(request) + + // ... and get single metric value. + request, err = http.NewRequestWithContext(ctx, http.MethodGet, serverURL+"/value/"+metricType+"/"+metricName, nil) + if err != nil { + log.Fatal(err) + } + sendMetricRequest(request) + + // Use JSON model to update batch metrics values. + buffer.Reset() + err = json.NewEncoder(&buffer).Encode([]model.Metrics{metricModel}) + if err != nil { + log.Fatal(err) + } + request, err = http.NewRequestWithContext(ctx, http.MethodPost, serverURL+"/updates", &buffer) + if err != nil { + log.Fatal(err) + } + request.Header.Add("Content-Type", "application/json") + sendMetricRequest(request) + + // Get metrics report. + request, err = http.NewRequestWithContext(ctx, http.MethodGet, serverURL+"/", nil) + if err != nil { + log.Fatal(err) + } +} diff --git a/go.mod b/go.mod index 04bacf1..5adb3d5 100644 --- a/go.mod +++ b/go.mod @@ -1,3 +1,36 @@ module github.com/MlDenis/prometheus_wannabe -go 1.21rc2 +go 1.21 + +require ( + github.com/caarlos0/env/v7 v7.1.0 + github.com/go-chi/chi/v5 v5.0.10 + github.com/jackc/pgx/v5 v5.4.2 + github.com/shirou/gopsutil/v3 v3.23.6 + github.com/sirupsen/logrus v1.9.3 + github.com/stretchr/testify v1.8.4 + golang.org/x/sync v0.1.0 +) + +require go.uber.org/multierr v1.10.0 // indirect + +require ( + github.com/davecgh/go-spew v1.1.1 // indirect + github.com/go-ole/go-ole v1.2.6 // indirect + github.com/jackc/pgpassfile v1.0.0 // indirect + github.com/jackc/pgservicefile v0.0.0-20221227161230-091c0ba34f0a // indirect + github.com/lufia/plan9stats v0.0.0-20211012122336-39d0f177ccd0 // indirect + github.com/pmezard/go-difflib v1.0.0 // indirect + github.com/power-devops/perfstat v0.0.0-20210106213030-5aafc221ea8c // indirect + github.com/rogpeppe/go-internal v1.11.0 // indirect + github.com/shoenig/go-m1cpu v0.1.6 // indirect + github.com/stretchr/objx v0.5.0 // indirect + github.com/tklauser/go-sysconf v0.3.11 // indirect + github.com/tklauser/numcpus v0.6.0 // indirect + github.com/yusufpapurcu/wmi v1.2.3 // indirect + go.uber.org/zap v1.26.0 + golang.org/x/crypto v0.9.0 // indirect + golang.org/x/sys v0.9.0 // indirect + golang.org/x/text v0.9.0 // indirect + gopkg.in/yaml.v3 v3.0.1 // indirect +) diff --git a/go.sum b/go.sum new file mode 100644 index 0000000..2ef5d75 --- /dev/null +++ b/go.sum @@ -0,0 +1,79 @@ +github.com/caarlos0/env/v7 v7.1.0 h1:9lzTF5amyQeWHZzuZeKlCb5FWSUxpG1js43mhbY8ozg= +github.com/caarlos0/env/v7 v7.1.0/go.mod h1:LPPWniDUq4JaO6Q41vtlyikhMknqymCLBw0eX4dcH1E= +github.com/davecgh/go-spew v1.1.0/go.mod h1:J7Y8YcW2NihsgmVo/mv3lAwl/skON4iLHjSsI+c5H38= +github.com/davecgh/go-spew v1.1.1 h1:vj9j/u1bqnvCEfJOwUhtlOARqs3+rkHYY13jYWTU97c= +github.com/davecgh/go-spew v1.1.1/go.mod h1:J7Y8YcW2NihsgmVo/mv3lAwl/skON4iLHjSsI+c5H38= +github.com/go-chi/chi/v5 v5.0.10 h1:rLz5avzKpjqxrYwXNfmjkrYYXOyLJd37pz53UFHC6vk= +github.com/go-chi/chi/v5 v5.0.10/go.mod h1:DslCQbL2OYiznFReuXYUmQ2hGd1aDpCnlMNITLSKoi8= +github.com/go-ole/go-ole v1.2.6 h1:/Fpf6oFPoeFik9ty7siob0G6Ke8QvQEuVcuChpwXzpY= +github.com/go-ole/go-ole v1.2.6/go.mod h1:pprOEPIfldk/42T2oK7lQ4v4JSDwmV0As9GaiUsvbm0= +github.com/google/go-cmp v0.5.6/go.mod h1:v8dTdLbMG2kIc/vJvl+f65V22dbkXbowE6jgT/gNBxE= +github.com/google/go-cmp v0.5.9 h1:O2Tfq5qg4qc4AmwVlvv0oLiVAGB7enBSJ2x2DqQFi38= +github.com/google/go-cmp v0.5.9/go.mod h1:17dUlkBOakJ0+DkrSSNjCkIjxS6bF9zb3elmeNGIjoY= +github.com/jackc/pgpassfile v1.0.0 h1:/6Hmqy13Ss2zCq62VdNG8tM1wchn8zjSGOBJ6icpsIM= +github.com/jackc/pgpassfile v1.0.0/go.mod h1:CEx0iS5ambNFdcRtxPj5JhEz+xB6uRky5eyVu/W2HEg= +github.com/jackc/pgservicefile v0.0.0-20221227161230-091c0ba34f0a h1:bbPeKD0xmW/Y25WS6cokEszi5g+S0QxI/d45PkRi7Nk= +github.com/jackc/pgservicefile v0.0.0-20221227161230-091c0ba34f0a/go.mod h1:5TJZWKEWniPve33vlWYSoGYefn3gLQRzjfDlhSJ9ZKM= +github.com/jackc/pgx/v5 v5.4.2 h1:u1gmGDwbdRUZiwisBm/Ky2M14uQyUP65bG8+20nnyrg= +github.com/jackc/pgx/v5 v5.4.2/go.mod h1:q6iHT8uDNXWiFNOlRqJzBTaSH3+2xCXkokxHZC5qWFY= +github.com/kr/pretty v0.3.0 h1:WgNl7dwNpEZ6jJ9k1snq4pZsg7DOEN8hP9Xw0Tsjwk0= +github.com/kr/pretty v0.3.0/go.mod h1:640gp4NfQd8pI5XOwp5fnNeVWj67G7CFk/SaSQn7NBk= +github.com/kr/text v0.2.0 h1:5Nx0Ya0ZqY2ygV366QzturHI13Jq95ApcVaJBhpS+AY= +github.com/kr/text v0.2.0/go.mod h1:eLer722TekiGuMkidMxC/pM04lWEeraHUUmBw8l2grE= +github.com/lufia/plan9stats v0.0.0-20211012122336-39d0f177ccd0 h1:6E+4a0GO5zZEnZ81pIr0yLvtUWk2if982qA3F3QD6H4= +github.com/lufia/plan9stats v0.0.0-20211012122336-39d0f177ccd0/go.mod h1:zJYVVT2jmtg6P3p1VtQj7WsuWi/y4VnjVBn7F8KPB3I= +github.com/pmezard/go-difflib v1.0.0 h1:4DBwDE0NGyQoBHbLQYPwSUPoCMWR5BEzIk/f1lZbAQM= +github.com/pmezard/go-difflib v1.0.0/go.mod h1:iKH77koFhYxTK1pcRnkKkqfTogsbg7gZNVY4sRDYZ/4= +github.com/power-devops/perfstat v0.0.0-20210106213030-5aafc221ea8c h1:ncq/mPwQF4JjgDlrVEn3C11VoGHZN7m8qihwgMEtzYw= +github.com/power-devops/perfstat v0.0.0-20210106213030-5aafc221ea8c/go.mod h1:OmDBASR4679mdNQnz2pUhc2G8CO2JrUAVFDRBDP/hJE= +github.com/rogpeppe/go-internal v1.11.0 h1:cWPaGQEPrBb5/AsnsZesgZZ9yb1OQ+GOISoDNXVBh4M= +github.com/rogpeppe/go-internal v1.11.0/go.mod h1:ddIwULY96R17DhadqLgMfk9H9tvdUzkipdSkR5nkCZA= +github.com/shirou/gopsutil/v3 v3.23.6 h1:5y46WPI9QBKBbK7EEccUPNXpJpNrvPuTD0O2zHEHT08= +github.com/shirou/gopsutil/v3 v3.23.6/go.mod h1:j7QX50DrXYggrpN30W0Mo+I4/8U2UUIQrnrhqUeWrAU= +github.com/shoenig/go-m1cpu v0.1.6 h1:nxdKQNcEB6vzgA2E2bvzKIYRuNj7XNJ4S/aRSwKzFtM= +github.com/shoenig/go-m1cpu v0.1.6/go.mod h1:1JJMcUBvfNwpq05QDQVAnx3gUHr9IYF7GNg9SUEw2VQ= +github.com/shoenig/test v0.6.4 h1:kVTaSd7WLz5WZ2IaoM0RSzRsUD+m8wRR+5qvntpn4LU= +github.com/shoenig/test v0.6.4/go.mod h1:byHiCGXqrVaflBLAMq/srcZIHynQPQgeyvkvXnjqq0k= +github.com/sirupsen/logrus v1.9.3 h1:dueUQJ1C2q9oE3F7wvmSGAaVtTmUizReu6fjN8uqzbQ= +github.com/sirupsen/logrus v1.9.3/go.mod h1:naHLuLoDiP4jHNo9R0sCBMtWGeIprob74mVsIT4qYEQ= +github.com/stretchr/objx v0.1.0/go.mod h1:HFkY916IF+rwdDfMAkV7OtwuqBVzrE8GR6GFx+wExME= +github.com/stretchr/objx v0.4.0/go.mod h1:YvHI0jy2hoMjB+UWwv71VJQ9isScKT/TqJzVSSt89Yw= +github.com/stretchr/objx v0.5.0 h1:1zr/of2m5FGMsad5YfcqgdqdWrIhu+EBEJRhR1U7z/c= +github.com/stretchr/objx v0.5.0/go.mod h1:Yh+to48EsGEfYuaHDzXPcE3xhTkx73EhmCGUpEOglKo= +github.com/stretchr/testify v1.3.0/go.mod h1:M5WIy9Dh21IEIfnGCwXGc5bZfKNJtfHm1UVUgZn+9EI= +github.com/stretchr/testify v1.7.0/go.mod h1:6Fq8oRcR53rry900zMqJjRRixrwX3KX962/h/Wwjteg= +github.com/stretchr/testify v1.7.1/go.mod h1:6Fq8oRcR53rry900zMqJjRRixrwX3KX962/h/Wwjteg= +github.com/stretchr/testify v1.8.0/go.mod h1:yNjHg4UonilssWZ8iaSj1OCr/vHnekPRkoO+kdMU+MU= +github.com/stretchr/testify v1.8.4 h1:CcVxjf3Q8PM0mHUKJCdn+eZZtm5yQwehR5yeSVQQcUk= +github.com/stretchr/testify v1.8.4/go.mod h1:sz/lmYIOXD/1dqDmKjjqLyZ2RngseejIcXlSw2iwfAo= +github.com/tklauser/go-sysconf v0.3.11 h1:89WgdJhk5SNwJfu+GKyYveZ4IaJ7xAkecBo+KdJV0CM= +github.com/tklauser/go-sysconf v0.3.11/go.mod h1:GqXfhXY3kiPa0nAXPDIQIWzJbMCB7AmcWpGR8lSZfqI= +github.com/tklauser/numcpus v0.6.0 h1:kebhY2Qt+3U6RNK7UqpYNA+tJ23IBEGKkB7JQBfDYms= +github.com/tklauser/numcpus v0.6.0/go.mod h1:FEZLMke0lhOUG6w2JadTzp0a+Nl8PF/GFkQ5UVIcaL4= +github.com/yusufpapurcu/wmi v1.2.3 h1:E1ctvB7uKFMOJw3fdOW32DwGE9I7t++CRUEMKvFoFiw= +github.com/yusufpapurcu/wmi v1.2.3/go.mod h1:SBZ9tNy3G9/m5Oi98Zks0QjeHVDvuK0qfxQmPyzfmi0= +go.uber.org/goleak v1.2.0 h1:xqgm/S+aQvhWFTtR0XK3Jvg7z8kGV8P4X14IzwN3Eqk= +go.uber.org/goleak v1.2.0/go.mod h1:XJYK+MuIchqpmGmUSAzotztawfKvYLUIgg7guXrwVUo= +go.uber.org/multierr v1.10.0 h1:S0h4aNzvfcFsC3dRF1jLoaov7oRaKqRGC/pUEJ2yvPQ= +go.uber.org/multierr v1.10.0/go.mod h1:20+QtiLqy0Nd6FdQB9TLXag12DsQkrbs3htMFfDN80Y= +go.uber.org/zap v1.26.0 h1:sI7k6L95XOKS281NhVKOFCUNIvv9e0w4BF8N3u+tCRo= +go.uber.org/zap v1.26.0/go.mod h1:dtElttAiwGvoJ/vj4IwHBS/gXsEu/pZ50mUIRWuG0so= +golang.org/x/crypto v0.9.0 h1:LF6fAI+IutBocDJ2OT0Q1g8plpYljMZ4+lty+dsqw3g= +golang.org/x/crypto v0.9.0/go.mod h1:yrmDGqONDYtNj3tH8X9dzUun2m2lzPa9ngI6/RUPGR0= +golang.org/x/sync v0.1.0 h1:wsuoTGHzEhffawBOhz5CYhcrV4IdKZbEyZjBMuTp12o= +golang.org/x/sync v0.1.0/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= +golang.org/x/sys v0.0.0-20190916202348-b4ddaad3f8a3/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20201204225414-ed752295db88/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20220715151400-c0bba94af5f8/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= +golang.org/x/sys v0.2.0/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= +golang.org/x/sys v0.9.0 h1:KS/R3tvhPqvJvwcKfnBHJwwthS11LRhmM5D59eEXa0s= +golang.org/x/sys v0.9.0/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= +golang.org/x/text v0.9.0 h1:2sjJmO8cDvYveuX97RDLsxlyUxLl+GHoLxBiRdHllBE= +golang.org/x/text v0.9.0/go.mod h1:e1OnstbJyHTd6l/uOt8jFFHp6TRDWZR/bV3emEE/zU8= +golang.org/x/xerrors v0.0.0-20191204190536-9bdfabe68543/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0= +gopkg.in/check.v1 v0.0.0-20161208181325-20d25e280405/go.mod h1:Co6ibVJAznAaIkqp8huTwlJQCZ016jof/cbN4VW5Yz0= +gopkg.in/check.v1 v1.0.0-20201130134442-10cb98267c6c h1:Hei/4ADfdWqJk1ZMxUNpqntNwaWcugrBjAiHlqqRiVk= +gopkg.in/check.v1 v1.0.0-20201130134442-10cb98267c6c/go.mod h1:JHkPIbrfpd72SG/EVd6muEfDQjcINNoR0C8j2r3qZ4Q= +gopkg.in/yaml.v3 v3.0.0-20200313102051-9f266ea9e77c/go.mod h1:K4uyk7z7BCEPqu6E+C64Yfv1cQ7kz7rIZviUmN+EgEM= +gopkg.in/yaml.v3 v3.0.1 h1:fxVm/GzAzEWqLHuvctI91KS9hhNmmWOoWu0XTYJS7CA= +gopkg.in/yaml.v3 v3.0.1/go.mod h1:K4uyk7z7BCEPqu6E+C64Yfv1cQ7kz7rIZviUmN+EgEM= diff --git a/internal/README.md b/internal/README.md index 081537e..187e080 100644 --- a/internal/README.md +++ b/internal/README.md @@ -1,3 +1,3 @@ # internal -В данной директории и её поддиректориях будет содержаться имплементация вашего сервиса \ No newline at end of file +This directory and its subdirectories contain the implementation of the service \ No newline at end of file diff --git a/internal/config/config.go b/internal/config/config.go new file mode 100644 index 0000000..01e36d3 --- /dev/null +++ b/internal/config/config.go @@ -0,0 +1,42 @@ +package config + +import ( + "time" + + "github.com/sirupsen/logrus" +) + +type Config struct { + Key string `env:"KEY"` + ServerURL string `env:"ADDRESS"` + PushRateLimit int `env:"RATE_LIMIT"` + PushTimeout int `env:"PUSH_TIMEOUT"` + SendMetricsInterval int `env:"REPORT_INTERVAL"` + UpdateMetricsInterval int `env:"POLL_INTERVAL"` + LogLevel logrus.Level `env:"LOG_LEVEL"` + CollectMetricsList []string +} + +func (c *Config) MetricsList() []string { + return c.CollectMetricsList +} + +func (c *Config) MetricsServerURL() string { + return c.ServerURL +} + +func (c *Config) PushMetricsTimeout() time.Duration { + return time.Duration(c.PushTimeout) * time.Second +} + +func (c *Config) ParallelLimit() int { + return c.PushRateLimit +} + +func (c *Config) GetKey() []byte { + return []byte(c.Key) +} + +func (c *Config) SignMetrics() bool { + return c.Key != "" +} diff --git a/internal/converter/converter.go b/internal/converter/converter.go new file mode 100644 index 0000000..13a71f7 --- /dev/null +++ b/internal/converter/converter.go @@ -0,0 +1,22 @@ +package converter + +import ( + "fmt" + "strconv" +) + +func ToFloat64(str string) (float64, error) { + return strconv.ParseFloat(str, 64) +} + +func ToInt64(str string) (int64, error) { + return strconv.ParseInt(str, 10, 64) +} + +func FloatToString(num float64) string { + return fmt.Sprintf("%g", num) +} + +func IntToString(num int64) string { + return strconv.FormatInt(num, 10) +} diff --git a/internal/converter/converter_test.go b/internal/converter/converter_test.go new file mode 100644 index 0000000..a9b9f7f --- /dev/null +++ b/internal/converter/converter_test.go @@ -0,0 +1,217 @@ +package converter + +import ( + "testing" + + "github.com/stretchr/testify/assert" +) + +func TestToFloat64(t *testing.T) { + tests := []struct { + name string + value string + expectError bool + expected float64 + }{ + { + name: "zero_success", + value: "0", + expectError: false, + expected: 0, + }, + { + name: "positive_success", + value: "100", + expectError: false, + expected: 100, + }, + { + name: "positive_float_success", + value: "100.001", + expectError: false, + expected: 100.001, + }, + { + name: "negative_success", + value: "-100", + expectError: false, + expected: -100, + }, + { + name: "negative_float_success", + value: "-100.001", + expectError: false, + expected: -100.001, + }, + { + name: "emmpty_fail", + value: "", + expectError: true, + }, + { + name: "str_fail", + value: "str", + expectError: true, + }, + } + + for _, tt := range tests { + t.Run(tt.name, func(t *testing.T) { + actual, err := ToFloat64(tt.value) + if tt.expectError { + assert.Error(t, err) + } else { + assert.Equal(t, tt.expected, actual) + } + }) + } +} + +func TestToInt64(t *testing.T) { + tests := []struct { + name string + value string + expectError bool + expected int64 + }{ + { + name: "zero_success", + value: "0", + expectError: false, + expected: 0, + }, + { + name: "positive_success", + value: "100", + expectError: false, + expected: 100, + }, + { + name: "positive_float_fail", + value: "100.001", + expectError: true, + }, + { + name: "negative_success", + value: "-100", + expectError: false, + expected: -100, + }, + { + name: "negative_float_fail", + value: "-100.001", + expectError: true, + }, + { + name: "emmpty_fail", + value: "", + expectError: true, + }, + { + name: "str_fail", + value: "str", + expectError: true, + }, + } + + for _, tt := range tests { + t.Run(tt.name, func(t *testing.T) { + actual, err := ToInt64(tt.value) + if tt.expectError { + assert.Error(t, err) + } else { + assert.Equal(t, tt.expected, actual) + } + }) + } +} + +func TestFloatToString(t *testing.T) { + tests := []struct { + name string + value float64 + expected string + }{ + { + name: "zero", + value: 0, + expected: "0", + }, + { + name: "positive", + value: 100, + expected: "100", + }, + { + name: "positive_float", + value: 100.001, + expected: "100.001", + }, + { + name: "negative", + value: -100, + expected: "-100", + }, + { + name: "negative_float", + value: -100.001, + expected: "-100.001", + }, + { + name: "positive_double", + value: 100.5555555555555555555555, + expected: "100.55555555555556", + }, + { + name: "negative_double", + value: -100.5555555555555555555555, + expected: "-100.55555555555556", + }, + } + + for _, tt := range tests { + t.Run(tt.name, func(t *testing.T) { + assert.Equal(t, tt.expected, FloatToString(tt.value)) + }) + } +} + +func TestIntToString(t *testing.T) { + tests := []struct { + name string + value int64 + expected string + }{ + { + name: "zero", + value: 0, + expected: "0", + }, + { + name: "positive", + value: 100, + expected: "100", + }, + { + name: "negative", + value: -100, + expected: "-100", + }, + { + name: "positive_long", + value: 1000000000000000, + expected: "1000000000000000", + }, + { + name: "negative_long", + value: -1000000000000000, + expected: "-1000000000000000", + }, + } + + for _, tt := range tests { + t.Run(tt.name, func(t *testing.T) { + assert.Equal(t, tt.expected, IntToString(tt.value)) + }) + } +} diff --git a/internal/database/postgre/00001_init.sql b/internal/database/postgre/00001_init.sql new file mode 100644 index 0000000..ad133f7 --- /dev/null +++ b/internal/database/postgre/00001_init.sql @@ -0,0 +1,19 @@ +-- +goose Up +CREATE TABLE IF NOT EXISTS metricType ( + id SMALLSERIAL PRIMARY KEY, + name TEXT +); + +CREATE TABLE IF NOT EXISTS metric ( + id SERIAL PRIMARY KEY, + name TEXT, + typeId SMALLSERIAL, + value DOUBLE PRECISION +); + +CREATE UNIQUE INDEX IF NOT EXISTS metric_name_type_idx ON metric (name, typeId); + +-- +goose Down +DROP INDEX IF EXISTS metric_name_type_idx; +DROP TABLE IF EXISTS metric; +DROP TABLE IF EXISTS metricType; diff --git a/internal/database/postgre/initializer.go b/internal/database/postgre/initializer.go new file mode 100644 index 0000000..194dfed --- /dev/null +++ b/internal/database/postgre/initializer.go @@ -0,0 +1,68 @@ +package postgre + +import ( + "context" + "database/sql" + + "github.com/MlDenis/prometheus_wannabe/internal/logger" + + _ "github.com/jackc/pgx/v5/stdlib" + "github.com/sirupsen/logrus" +) + +func initDB(ctx context.Context, connectionString string) (*sql.DB, error) { + logrus.Info("Initialize database schema") + + conn, err := sql.Open("pgx", connectionString) + if err != nil { + return nil, logger.WrapError("open db connection", err) + } + + err = conn.PingContext(ctx) + if err != nil { + return nil, logger.WrapError("ping db connection", err) + } + + return conn, nil +} + +func getOrCreateMetricTypeID(ctx context.Context, conn *sql.DB, typeName string) (int, error) { + var typeID int + err := conn.QueryRowContext(ctx, "SELECT id FROM metricType WHERE name = $1", typeName).Scan(&typeID) + if err != nil { + if err == sql.ErrNoRows { + err = conn.QueryRowContext(ctx, "INSERT INTO metricType(name) VALUES ($1) RETURNING id", typeName).Scan(&typeID) + } + if err != nil { + return 0, err + } + } + return typeID, nil +} + +func getOrCreateMetricID(ctx context.Context, conn *sql.DB, metricTypeName string, metricName string) (int, error) { + var metricID int + metricTypeID, err := getOrCreateMetricTypeID(ctx, conn, metricTypeName) + if err != nil { + return 0, err + } + err = conn.QueryRowContext(ctx, "SELECT id FROM metric WHERE name = $1 AND typeId = $2", metricName, metricTypeID).Scan(&metricID) + if err != nil { + if err == sql.ErrNoRows { + err = conn.QueryRowContext(ctx, "INSERT INTO metric(name, typeId) VALUES ($1, $2) RETURNING id", metricName, metricTypeID).Scan(&metricID) + } + if err != nil { + return 0, err + } + } + return metricID, nil +} + +func updateOrCreateMetric(ctx context.Context, conn *sql.DB, metricTypeName string, metricName string, metricValue float64) error { + metricID, err := getOrCreateMetricID(ctx, conn, metricTypeName, metricName) + if err != nil { + return err + } + _, err = conn.ExecContext(ctx, "UPDATE metric SET value = $1 WHERE id = $2", metricValue, metricID) + return err +} diff --git a/internal/database/postgre/postgreDataBase.go b/internal/database/postgre/postgreDataBase.go new file mode 100644 index 0000000..247c6d3 --- /dev/null +++ b/internal/database/postgre/postgreDataBase.go @@ -0,0 +1,154 @@ +package postgre + +import ( + "context" + "database/sql" + + "github.com/jackc/pgx/v5" + "github.com/sirupsen/logrus" + + "github.com/MlDenis/prometheus_wannabe/internal/database" +) + +type PostgresDataaBaseConfig interface { + GetConnectionString() string +} + +type postgresDataBase struct { + conn *sql.DB +} + +func NewPostgresDataBase(ctx context.Context, conf PostgresDataaBaseConfig) (database.DataBase, error) { + conn, err := initDB(ctx, conf.GetConnectionString()) + if err != nil { + return nil, err + } + + return &postgresDataBase{conn: conn}, nil +} + +func (p *postgresDataBase) UpdateItems(ctx context.Context, records []*database.DBItem) error { + return p.callInTransaction(ctx, func(ctx context.Context, tx *sql.Tx) error { + for _, record := range records { + // statements for stored procedure are stored in a db + _, err := tx.ExecContext(ctx, "CALL UpdateOrCreateMetric"+"(@metricType, @metricName, @metricValue)", pgx.NamedArgs{ + "metricType": record.MetricType.String, + "metricName": record.Name.String, + "metricValue": record.Value.Float64}) + + if err != nil { + return err + } + } + + return nil + }) +} + +func (p *postgresDataBase) ReadItem(ctx context.Context, metricType string, metricName string) (*database.DBItem, error) { + result, err := p.callInTransactionResult(ctx, func(ctx context.Context, tx *sql.Tx) ([]*database.DBItem, error) { + const command = "SELECT mt.name, m.name, m.value " + + "FROM metric m " + + "JOIN metricType mt ON m.typeId = mt.id " + + "WHERE " + + " m.name = @metricName " + + " and mt.name = @metricType" + + return p.readRecords(ctx, tx, command, pgx.NamedArgs{ + "metricType": metricType, + "metricName": metricName, + }) + }) + + if err != nil { + return nil, err + } + + count := len(result) + if count == 0 { + return nil, nil + } + + if count > 1 { + logrus.Errorf("More than one metric in logical primary key: %v, %v", metricType, metricName) + } + + return result[0], nil +} + +func (p *postgresDataBase) ReadAllItems(ctx context.Context) ([]*database.DBItem, error) { + return p.callInTransactionResult(ctx, func(ctx context.Context, tx *sql.Tx) ([]*database.DBItem, error) { + const command = "SELECT mt.name, m.name, m.value " + + "FROM metric m " + + "JOIN metricType mt on m.typeId = mt.id" + + return p.readRecords(ctx, tx, command) + }) +} + +func (p *postgresDataBase) Ping(ctx context.Context) error { + return p.conn.PingContext(ctx) +} + +func (p *postgresDataBase) Close() error { + return p.conn.Close() +} + +func (p *postgresDataBase) callInTransaction(ctx context.Context, action func(context.Context, *sql.Tx) error) error { + _, err := p.callInTransactionResult(ctx, func(ctx context.Context, tx *sql.Tx) ([]*database.DBItem, error) { + return nil, action(ctx, tx) + }) + + return err +} + +func (p *postgresDataBase) callInTransactionResult(ctx context.Context, action func(context.Context, *sql.Tx) ([]*database.DBItem, error)) ([]*database.DBItem, error) { + tx, err := p.conn.BeginTx(ctx, &sql.TxOptions{ReadOnly: false}) + if err != nil { + return nil, err + } + + result, err := action(ctx, tx) + if err != nil { + rollbackError := tx.Rollback() + if rollbackError != nil { + logrus.Errorf("Fail to rollback transaction: %v", rollbackError) + } + + return nil, err + } + + err = tx.Commit() + if err != nil { + logrus.Errorf("Fail to commit transaction: %v", err) + return nil, err + } + + return result, nil +} + +func (p *postgresDataBase) readRecords(ctx context.Context, tx *sql.Tx, command string, args ...any) ([]*database.DBItem, error) { + rows, err := tx.QueryContext(ctx, command, args...) + + if err != nil { + return nil, err + } + + result := []*database.DBItem{} + for rows.Next() { + var record database.DBItem + err = rows.Scan(&record.MetricType, &record.Name, &record.Value) + if err != nil { + return nil, err + } + + result = append(result, &record) + } + + err = rows.Err() + if err != nil { + return nil, err + } + + return result, nil +} diff --git a/internal/database/stub/stubDatabase.go b/internal/database/stub/stubDatabase.go new file mode 100644 index 0000000..ad2bf47 --- /dev/null +++ b/internal/database/stub/stubDatabase.go @@ -0,0 +1,29 @@ +package stub + +import ( + "context" + + "github.com/MlDenis/prometheus_wannabe/internal/database" +) + +type StubDataBase struct{} + +func (s *StubDataBase) UpdateItems(context.Context, []*database.DBItem) error { + return nil +} + +func (s *StubDataBase) ReadItem(context.Context, string, string) (*database.DBItem, error) { + return nil, nil +} + +func (s *StubDataBase) ReadAllItems(context.Context) ([]*database.DBItem, error) { + return nil, nil +} + +func (s *StubDataBase) Ping(context.Context) error { + return nil +} + +func (s *StubDataBase) Close() error { + return nil +} diff --git a/internal/database/types.go b/internal/database/types.go new file mode 100644 index 0000000..8d3049c --- /dev/null +++ b/internal/database/types.go @@ -0,0 +1,24 @@ +package database + +import ( + "context" + "io" + + "database/sql" + "database/sql/driver" +) + +type DataBase interface { + driver.Pinger + io.Closer + + UpdateItems(ctx context.Context, records []*DBItem) error + ReadItem(ctx context.Context, metricType string, metricName string) (*DBItem, error) + ReadAllItems(ctx context.Context) ([]*DBItem, error) +} + +type DBItem struct { + MetricType sql.NullString + Name sql.NullString + Value sql.NullFloat64 +} diff --git a/internal/hash/errors.go b/internal/hash/errors.go new file mode 100644 index 0000000..c2c9744 --- /dev/null +++ b/internal/hash/errors.go @@ -0,0 +1,5 @@ +package hash + +import "errors" + +var ErrMissedSecretKey = errors.New("secret key was not initialized") diff --git a/internal/hash/hashHolder.go b/internal/hash/hashHolder.go new file mode 100644 index 0000000..3205283 --- /dev/null +++ b/internal/hash/hashHolder.go @@ -0,0 +1,7 @@ +package hash + +import "hash" + +type HashHolder interface { + GetHash(hash hash.Hash) ([]byte, error) +} diff --git a/internal/hash/signer.go b/internal/hash/signer.go new file mode 100644 index 0000000..3120959 --- /dev/null +++ b/internal/hash/signer.go @@ -0,0 +1,67 @@ +package hash + +import ( + "hash" + "sync" + + "github.com/MlDenis/prometheus_wannabe/internal/logger" + + "crypto/hmac" + "crypto/sha256" + "encoding/hex" +) + +type SignerConfig interface { + GetKey() []byte +} + +type Signer struct { + hash hash.Hash + lock sync.Mutex +} + +func NewSigner(config SignerConfig) *Signer { + var h hash.Hash + key := config.GetKey() + if key != nil { + h = hmac.New(sha256.New, key) + } + + return &Signer{ + hash: h, + } +} + +func (s *Signer) GetSignString(holder HashHolder) (string, error) { + sign, err := s.GetSign(holder) + if err != nil { + return "", logger.WrapError("get sign", err) + } + + return hex.EncodeToString(sign), nil +} + +func (s *Signer) GetSign(holder HashHolder) ([]byte, error) { + if s.hash == nil { + return nil, logger.WrapError("get signature", ErrMissedSecretKey) + } + s.lock.Lock() + defer s.lock.Unlock() + defer s.hash.Reset() + + return holder.GetHash(s.hash) +} + +func (s *Signer) CheckSign(holder HashHolder, signature string) (bool, error) { + sign, err := hex.DecodeString(signature) + if err != nil { + return false, logger.WrapError("decode signature", err) + } + + holderSign, err := s.GetSign(holder) + if err != nil { + return false, logger.WrapError("get holder hash", err) + } + + return hmac.Equal(holderSign, sign), nil +} diff --git a/internal/hash/signer_test.go b/internal/hash/signer_test.go new file mode 100644 index 0000000..7adc22f --- /dev/null +++ b/internal/hash/signer_test.go @@ -0,0 +1 @@ +package hash diff --git a/internal/logger/logger.go b/internal/logger/logger.go new file mode 100644 index 0000000..c314306 --- /dev/null +++ b/internal/logger/logger.go @@ -0,0 +1,63 @@ +package logger + +import ( + "fmt" + + "go.uber.org/zap" + "go.uber.org/zap/zapcore" +) + +const ( + LogDebug = "debug" + LogInfo = "info" +) + +// Global logger +var log *zap.Logger +var SugarLogger *zap.SugaredLogger + +// Initializing the logger with a given debug level +func InitLogger(debugLevel string) { + var level zapcore.Level + switch debugLevel { + case LogInfo: + level = zapcore.InfoLevel + case LogDebug: + level = zapcore.DebugLevel + default: + level = zapcore.InfoLevel + } + + config := zap.NewProductionConfig() + config.Level.SetLevel(level) + var err error + log, err = config.Build() + if err != nil { + panic(err) + } + SugarLogger = log.Sugar() +} + +// Error logging +func Error(message string) { + if log != nil { + log.Error(message) + } +} + +func ErrorObj(err error) { + if err != nil { + Error(err.Error()) + } +} + +func ErrorFormat(format string, v ...interface{}) { + Error(fmt.Sprintf(format, v...)) +} + +func WrapError(message string, err error) error { + wrap := fmt.Errorf("failed to "+message+": %w", err) + ErrorObj(wrap) + + return wrap +} diff --git a/internal/logger/logger_test.go b/internal/logger/logger_test.go new file mode 100644 index 0000000..90c66f6 --- /dev/null +++ b/internal/logger/logger_test.go @@ -0,0 +1 @@ +package logger diff --git a/internal/metrics/errors.go b/internal/metrics/errors.go new file mode 100644 index 0000000..bef7a72 --- /dev/null +++ b/internal/metrics/errors.go @@ -0,0 +1,16 @@ +package metrics + +import "errors" + +var ( + ErrEmptyURL = errors.New("empty url string") + ErrFieldNameNotFound = errors.New("field name was not found") + ErrInvalidRecordMetricType = errors.New("invalid record metric type") + ErrInvalidRecordMetricName = errors.New("invalid record metric name") + ErrInvalidRecordMetricValue = errors.New("invalid record metric value") + ErrInvalidSignature = errors.New("invalid signature") + ErrMetricNotFound = errors.New("metric not found") + ErrMetricValueMissed = errors.New("metric value is missed") + ErrUnexpectedStatusCode = errors.New("unexpected status code") + ErrUnknownMetricType = errors.New("unknown metric type") +) diff --git a/internal/metrics/html/htmlPageBuilder.go b/internal/metrics/html/htmlPageBuilder.go new file mode 100644 index 0000000..2af19b6 --- /dev/null +++ b/internal/metrics/html/htmlPageBuilder.go @@ -0,0 +1,5 @@ +package html + +type HTMLPageBuilder interface { + BuildMetricsPage(metricsByType map[string]map[string]string) string +} diff --git a/internal/metrics/html/simplePageBuilder.go b/internal/metrics/html/simplePageBuilder.go new file mode 100644 index 0000000..6308bd7 --- /dev/null +++ b/internal/metrics/html/simplePageBuilder.go @@ -0,0 +1,46 @@ +package html + +import ( + "fmt" + "sort" + "strings" +) + +type simplePageBuilder struct { +} + +func NewSimplePageBuilder() HTMLPageBuilder { + return &simplePageBuilder{} +} + +func (s simplePageBuilder) BuildMetricsPage(metricsByType map[string]map[string]string) string { + sb := strings.Builder{} + sb.WriteString("") + + metricTypes := make([]string, len(metricsByType)) + i := 0 + for metricType := range metricsByType { + metricTypes[i] = metricType + i++ + } + sort.Strings(metricTypes) + + for _, metricType := range metricTypes { + metricsList := metricsByType[metricType] + metricNames := make([]string, len(metricsList)) + j := 0 + for metricName := range metricsList { + metricNames[j] = metricName + j++ + } + sort.Strings(metricNames) + + for _, metricName := range metricNames { + sb.WriteString(fmt.Sprintf("%v: %v", metricName, metricsList[metricName])) + sb.WriteString("
") + } + } + + sb.WriteString("") + return sb.String() +} diff --git a/internal/metrics/html/simplePageBuilder_test.go b/internal/metrics/html/simplePageBuilder_test.go new file mode 100644 index 0000000..e5c7c11 --- /dev/null +++ b/internal/metrics/html/simplePageBuilder_test.go @@ -0,0 +1,54 @@ +package html + +import ( + "testing" + + "github.com/stretchr/testify/assert" +) + +func TestSimplePageBuilder_BuildMetricsPage(t *testing.T) { + tests := []struct { + name string + counterMetrics map[string]string + gaugeMetrics map[string]string + expected string + }{ + { + name: "no_metric", + counterMetrics: map[string]string{}, + gaugeMetrics: map[string]string{}, + expected: "", + }, { + name: "all_metric", + counterMetrics: map[string]string{ + "metricName2": "300", + "metricName3": "-400", + "metricName1": "200"}, + gaugeMetrics: map[string]string{ + "metricName5": "300.003", + "metricName4": "100.001", + "metricName6": "-400.004"}, + expected: "" + + "metricName1: 200
" + + "metricName2: 300
" + + "metricName3: -400
" + + "metricName4: 100.001
" + + "metricName5: 300.003
" + + "metricName6: -400.004
" + + "", + }, + } + + for _, tt := range tests { + t.Run(tt.name, func(t *testing.T) { + builder := NewSimplePageBuilder() + metricsByType := map[string]map[string]string{ + "counter": tt.counterMetrics, + "gauge": tt.gaugeMetrics, + } + + actual := builder.BuildMetricsPage(metricsByType) + assert.Equal(t, tt.expected, actual) + }) + } +} diff --git a/internal/metrics/metric.go b/internal/metrics/metric.go new file mode 100644 index 0000000..9903a80 --- /dev/null +++ b/internal/metrics/metric.go @@ -0,0 +1,14 @@ +package metrics + +import "github.com/MlDenis/prometheus_wannabe/internal/hash" + +type Metric interface { + hash.HashHolder + + GetName() string + GetType() string + GetValue() float64 + GetStringValue() string + SetValue(value float64) float64 + ResetState() +} diff --git a/internal/metrics/metricsProvider.go b/internal/metrics/metricsProvider.go new file mode 100644 index 0000000..54345bd --- /dev/null +++ b/internal/metrics/metricsProvider.go @@ -0,0 +1,8 @@ +package metrics + +import "context" + +type MetricsProvider interface { + GetMetrics() <-chan (Metric) + Update(ctx context.Context) error +} diff --git a/internal/metrics/model/metricConverter.go b/internal/metrics/model/metricConverter.go new file mode 100644 index 0000000..24e4d74 --- /dev/null +++ b/internal/metrics/model/metricConverter.go @@ -0,0 +1,104 @@ +package model + +import ( + "github.com/MlDenis/prometheus_wannabe/internal/hash" + "github.com/MlDenis/prometheus_wannabe/internal/logger" + "github.com/MlDenis/prometheus_wannabe/internal/metrics" + "github.com/MlDenis/prometheus_wannabe/internal/metrics/types" + + "github.com/sirupsen/logrus" +) + +type UnknownMetricTypeError struct { + UnknownType string +} + +func (e *UnknownMetricTypeError) Error() string { + return "unknown metric type: " + e.UnknownType +} + +type MetricsConverterConfig interface { + SignMetrics() bool +} + +type MetricsConverter struct { + signer *hash.Signer + signMetrics bool +} + +func NewMetricsConverter(conf MetricsConverterConfig, signer *hash.Signer) *MetricsConverter { + return &MetricsConverter{ + signMetrics: conf.SignMetrics(), + signer: signer, + } +} + +func (c *MetricsConverter) ToModelMetric(metric metrics.Metric) (*Metrics, error) { + modelMetric := &Metrics{ + ID: metric.GetName(), + MType: metric.GetType(), + } + + metricValue := metric.GetValue() + switch modelMetric.MType { + case "counter": + counterValue := int64(metricValue) + modelMetric.Delta = &counterValue + case "gauge": + modelMetric.Value = &metricValue + default: + logrus.Errorf("unknown metric type: %v", modelMetric.MType) + return nil, &UnknownMetricTypeError{UnknownType: modelMetric.MType} + } + + if c.signMetrics { + signature, err := c.signer.GetSignString(metric) + if err != nil { + return nil, logger.WrapError("get signature string", err) + } + + modelMetric.Hash = signature + } + + return modelMetric, nil +} + +func (c *MetricsConverter) FromModelMetric(modelMetric *Metrics) (metrics.Metric, error) { + var metric metrics.Metric + var value float64 + + switch modelMetric.MType { + case "counter": + if modelMetric.Delta == nil { + return nil, logger.WrapError("convert metric", metrics.ErrMetricValueMissed) + } + + metric = types.NewCounterMetric(modelMetric.ID) + value = float64(*modelMetric.Delta) + case "gauge": + if modelMetric.Value == nil { + return nil, logger.WrapError("convert metric", metrics.ErrMetricValueMissed) + } + + metric = types.NewGaugeMetric(modelMetric.ID) + value = *modelMetric.Value + default: + logrus.Errorf("unknown metric type: %v", modelMetric.MType) + return nil, &UnknownMetricTypeError{UnknownType: modelMetric.MType} + } + + metric.SetValue(value) + + if c.signMetrics && modelMetric.Hash != "" { + ok, err := c.signer.CheckSign(metric, modelMetric.Hash) + if err != nil { + return nil, logger.WrapError("check signature", err) + } + + if !ok { + return nil, logger.WrapError("check signature", metrics.ErrInvalidSignature) + } + } + + return metric, nil +} diff --git a/internal/metrics/model/model.go b/internal/metrics/model/model.go new file mode 100644 index 0000000..be239a4 --- /dev/null +++ b/internal/metrics/model/model.go @@ -0,0 +1,9 @@ +package model + +type Metrics struct { + ID string `json:"id"` // metric name + MType string `json:"type"` // a parameter that takes the value gauge or counter + Delta *int64 `json:"delta,omitempty"` // metric value in case of passing counter + Value *float64 `json:"value,omitempty"` // metric value in case of passing gauge + Hash string `json:"hash,omitempty"` // hash value +} diff --git a/internal/metrics/provider/agregate/aggregateMetricsProvider.go b/internal/metrics/provider/agregate/aggregateMetricsProvider.go new file mode 100644 index 0000000..cae55d3 --- /dev/null +++ b/internal/metrics/provider/agregate/aggregateMetricsProvider.go @@ -0,0 +1,61 @@ +package agregate + +import ( + "context" + "sync" + + "github.com/MlDenis/prometheus_wannabe/internal/logger" + "github.com/MlDenis/prometheus_wannabe/internal/metrics" + + "golang.org/x/sync/errgroup" +) + +type aggregateMetricsProvider struct { + providers []metrics.MetricsProvider +} + +func NewAggregateMetricsProvider(providers ...metrics.MetricsProvider) metrics.MetricsProvider { + return &aggregateMetricsProvider{ + providers: providers, + } +} + +func (a *aggregateMetricsProvider) GetMetrics() <-chan metrics.Metric { + result := make(chan metrics.Metric) + + go func() { + wg := sync.WaitGroup{} + for _, provider := range a.providers { + wg.Add(1) + go func(p metrics.MetricsProvider) { + for metric := range p.GetMetrics() { + result <- metric + } + wg.Done() + }(provider) + } + + wg.Wait() + close(result) + }() + + return result +} + +func (a *aggregateMetricsProvider) Update(ctx context.Context) error { + eg, ctx := errgroup.WithContext(ctx) + + for _, provider := range a.providers { + provider := provider + eg.Go(func() error { + err := provider.Update(ctx) + if err != nil { + return logger.WrapError("update metrics", err) + } + + return nil + }) + } + + return eg.Wait() +} diff --git a/internal/metrics/provider/agregate/agregateMetricsProvider_test.go b/internal/metrics/provider/agregate/agregateMetricsProvider_test.go new file mode 100644 index 0000000..aef18b5 --- /dev/null +++ b/internal/metrics/provider/agregate/agregateMetricsProvider_test.go @@ -0,0 +1,112 @@ +package agregate + +import ( + "context" + "testing" + + "github.com/MlDenis/prometheus_wannabe/internal/metrics" + "github.com/MlDenis/prometheus_wannabe/internal/metrics/types" + "github.com/MlDenis/prometheus_wannabe/internal/test" + + "github.com/stretchr/testify/assert" + "github.com/stretchr/testify/mock" +) + +type aggregateMetricsProviderMock struct { + mock.Mock +} + +func TestAggregateMetricsProvider_GetMetrics(t *testing.T) { + counter := types.NewCounterMetric("counterMetric") + gauge := types.NewCounterMetric("gaugeMetric") + + tests := []struct { + name string + firstProviderMetrics []metrics.Metric + secondProviderMetrics []metrics.Metric + expectedMetrics []metrics.Metric + }{ + { + name: "empty_metrics", + firstProviderMetrics: []metrics.Metric{}, + secondProviderMetrics: []metrics.Metric{}, + expectedMetrics: []metrics.Metric{}, + }, + { + name: "success", + firstProviderMetrics: []metrics.Metric{counter}, + secondProviderMetrics: []metrics.Metric{gauge}, + expectedMetrics: []metrics.Metric{counter, gauge}, + }, + } + + for _, tt := range tests { + t.Run(tt.name, func(t *testing.T) { + firstProvider := new(aggregateMetricsProviderMock) + secondProvider := new(aggregateMetricsProviderMock) + + firstProvider.On("GetMetrics").Return(test.ArrayToChan(tt.firstProviderMetrics)) + secondProvider.On("GetMetrics").Return(test.ArrayToChan(tt.secondProviderMetrics)) + + provider := NewAggregateMetricsProvider(firstProvider, secondProvider) + actualMetrics := test.ChanToArray(provider.GetMetrics()) + + assert.ElementsMatch(t, tt.expectedMetrics, actualMetrics) + + firstProvider.AssertCalled(t, "GetMetrics") + secondProvider.AssertCalled(t, "GetMetrics") + }) + } +} + +func TestAggregateMetricsProvider_Update(t *testing.T) { + ctx := context.Background() + tests := []struct { + name string + firstProviderError error + secondProviderError error + expectedError error + }{ + { + name: "first_provider_error", + firstProviderError: test.ErrTest, + expectedError: test.ErrTest, + }, + { + name: "second_provider_error", + secondProviderError: test.ErrTest, + expectedError: test.ErrTest, + }, + { + name: "success", + }, + } + + for _, tt := range tests { + t.Run(tt.name, func(t *testing.T) { + firstProvider := new(aggregateMetricsProviderMock) + secondProvider := new(aggregateMetricsProviderMock) + + firstProvider.On("Update", mock.Anything).Return(tt.firstProviderError) + secondProvider.On("Update", mock.Anything).Return(tt.secondProviderError) + + provider := NewAggregateMetricsProvider(firstProvider, secondProvider) + actualError := provider.Update(ctx) + + assert.ErrorIs(t, actualError, tt.expectedError) + + firstProvider.AssertCalled(t, "Update", mock.Anything) + secondProvider.AssertCalled(t, "Update", mock.Anything) + }) + } +} + +func (a *aggregateMetricsProviderMock) Update(ctx context.Context) error { + args := a.Called(ctx) + return args.Error(0) +} + +func (a *aggregateMetricsProviderMock) GetMetrics() <-chan metrics.Metric { + args := a.Called() + return args.Get(0).(<-chan metrics.Metric) +} diff --git a/internal/metrics/provider/custom/customMetricProvider_test.go b/internal/metrics/provider/custom/customMetricProvider_test.go new file mode 100644 index 0000000..85f80e1 --- /dev/null +++ b/internal/metrics/provider/custom/customMetricProvider_test.go @@ -0,0 +1,90 @@ +package custom + +import ( + "context" + "fmt" + "runtime" + "time" + + "github.com/sirupsen/logrus" + + "github.com/MlDenis/prometheus_wannabe/internal/logger" + "github.com/MlDenis/prometheus_wannabe/internal/metrics" + "github.com/MlDenis/prometheus_wannabe/internal/metrics/types" + + "github.com/shirou/gopsutil/v3/cpu" + "github.com/shirou/gopsutil/v3/mem" + "golang.org/x/sync/errgroup" +) + +type GopsutilMetricsProvider struct { + totalMetric metrics.Metric + freeMetric metrics.Metric + cpuUtilizationMetrics map[int]metrics.Metric +} + +func NewGopsutilMetricsProvider() *GopsutilMetricsProvider { + numCPU := runtime.NumCPU() + cpuUtilizationMetrics := make(map[int]metrics.Metric, numCPU) + for i := 0; i < numCPU; i++ { + cpuUtilizationMetrics[i] = types.NewGaugeMetric(fmt.Sprintf("CPUutilization%v", i+1)) + } + + return &GopsutilMetricsProvider{ + totalMetric: types.NewGaugeMetric("TotalMemory"), + freeMetric: types.NewGaugeMetric("FreeMemory"), + cpuUtilizationMetrics: cpuUtilizationMetrics, + } +} + +func (g *GopsutilMetricsProvider) GetMetrics() <-chan metrics.Metric { + result := make(chan metrics.Metric) + go func() { + defer close(result) + result <- g.totalMetric + result <- g.freeMetric + for _, metric := range g.cpuUtilizationMetrics { + result <- metric + } + }() + + return result +} + +func (g *GopsutilMetricsProvider) Update(ctx context.Context) error { + eg, ctx := errgroup.WithContext(ctx) + eg.Go(func() error { return g.updateMemoryMetrics(ctx) }) + eg.Go(func() error { return g.updateCPUMetrics(ctx) }) + + return eg.Wait() +} + +func (g *GopsutilMetricsProvider) updateMemoryMetrics(ctx context.Context) error { + memoryStats, err := mem.VirtualMemoryWithContext(ctx) + if err != nil { + return logger.WrapError("get memory stats", err) + } + + g.totalMetric.SetValue(float64(memoryStats.Total)) + logrus.Infof("Updated metric: %v. value: %v", g.totalMetric.GetName(), g.totalMetric.GetStringValue()) + + g.freeMetric.SetValue(float64(memoryStats.Free)) + logrus.Infof("Updated metric: %v. value: %v", g.freeMetric.GetName(), g.freeMetric.GetStringValue()) + + return nil +} + +func (g *GopsutilMetricsProvider) updateCPUMetrics(ctx context.Context) error { + cpuStats, err := cpu.PercentWithContext(ctx, time.Millisecond*100, true) + if err != nil { + return logger.WrapError("get cpu stats", err) + } + + for i, val := range cpuStats { + metric := g.cpuUtilizationMetrics[i] + metric.SetValue(val) + logrus.Infof("Updated metric: %v. value: %v", metric.GetName(), metric.GetStringValue()) + } + + return nil +} diff --git a/internal/metrics/provider/custom/customMetricsProvider.go b/internal/metrics/provider/custom/customMetricsProvider.go new file mode 100644 index 0000000..6815af7 --- /dev/null +++ b/internal/metrics/provider/custom/customMetricsProvider.go @@ -0,0 +1,46 @@ +package custom + +import ( + "context" + "math/rand" + + "github.com/MlDenis/prometheus_wannabe/internal/metrics" + "github.com/MlDenis/prometheus_wannabe/internal/metrics/types" + + "github.com/sirupsen/logrus" +) + +type customMetricsProvider struct { + poolMetric metrics.Metric + randomMetric metrics.Metric +} + +func NewCustomMetricsProvider() metrics.MetricsProvider { + return &customMetricsProvider{ + poolMetric: types.NewCounterMetric("PollCount"), + randomMetric: types.NewGaugeMetric("RandomValue"), + } +} + +func (c *customMetricsProvider) GetMetrics() <-chan metrics.Metric { + result := make(chan metrics.Metric) + go func() { + defer close(result) + result <- c.poolMetric + result <- c.randomMetric + }() + + return result +} + +func (c *customMetricsProvider) Update(context.Context) error { + logrus.Info("Start collect custom metrics") + + c.poolMetric.SetValue(1) + logrus.Infof("Updated metric: %v. value: %v", c.poolMetric.GetName(), c.poolMetric.GetStringValue()) + + c.randomMetric.SetValue(rand.Float64()) + logrus.Infof("Updated metric: %v. value: %v", c.randomMetric.GetName(), c.randomMetric.GetStringValue()) + + return nil +} diff --git a/internal/metrics/provider/gopsutil/gopsutilMetricProvider.go b/internal/metrics/provider/gopsutil/gopsutilMetricProvider.go new file mode 100644 index 0000000..a75dad7 --- /dev/null +++ b/internal/metrics/provider/gopsutil/gopsutilMetricProvider.go @@ -0,0 +1,93 @@ +package gopsutil + +import ( + "context" + "fmt" + "runtime" + "time" + + "github.com/MlDenis/prometheus_wannabe/internal/logger" + "github.com/MlDenis/prometheus_wannabe/internal/metrics" + "github.com/MlDenis/prometheus_wannabe/internal/metrics/types" + + "github.com/shirou/gopsutil/v3/cpu" + "github.com/shirou/gopsutil/v3/mem" + "github.com/sirupsen/logrus" + "golang.org/x/sync/errgroup" +) + +const cpuInterval = 100 * time.Millisecond + +// GopsutilMetricsProvider is a provider of Gopsutil metrics. +type GopsutilMetricsProvider struct { + totalMetric metrics.Metric + freeMetric metrics.Metric + cpuUtilizationMetrics []metrics.Metric +} + +// NewGopsutilMetricsProvider create new instance of GopsutilMetricsProvider. +func NewGopsutilMetricsProvider() *GopsutilMetricsProvider { + numCPU := runtime.NumCPU() + cpuUtilizationMetrics := make([]metrics.Metric, numCPU) + for i := 0; i < numCPU; i++ { + cpuUtilizationMetrics[i] = types.NewGaugeMetric(fmt.Sprintf("CPUutilization%v", i+1)) + } + + return &GopsutilMetricsProvider{ + totalMetric: types.NewGaugeMetric("TotalMemory"), + freeMetric: types.NewGaugeMetric("FreeMemory"), + cpuUtilizationMetrics: cpuUtilizationMetrics, + } +} + +func (g *GopsutilMetricsProvider) GetMetrics() <-chan metrics.Metric { + result := make(chan metrics.Metric) + go func() { + defer close(result) + result <- g.totalMetric + result <- g.freeMetric + for _, metric := range g.cpuUtilizationMetrics { + result <- metric + } + }() + + return result +} + +func (g *GopsutilMetricsProvider) Update(ctx context.Context) error { + eg, ctx := errgroup.WithContext(ctx) + eg.Go(func() error { return g.updateMemoryMetrics(ctx) }) + eg.Go(func() error { return g.updateCPUMetrics(ctx) }) + + return eg.Wait() +} + +func (g *GopsutilMetricsProvider) updateMemoryMetrics(ctx context.Context) error { + memoryStats, err := mem.VirtualMemoryWithContext(ctx) + if err != nil { + return logger.WrapError("get memory stats", err) + } + + g.totalMetric.SetValue(float64(memoryStats.Total)) + logrus.Infof("Updated metric: %v. value: %v", g.totalMetric.GetName(), g.totalMetric.GetStringValue()) + + g.freeMetric.SetValue(float64(memoryStats.Free)) + logrus.Infof("Updated metric: %v. value: %v", g.freeMetric.GetName(), g.freeMetric.GetStringValue()) + + return nil +} + +func (g *GopsutilMetricsProvider) updateCPUMetrics(ctx context.Context) error { + cpuStats, err := cpu.PercentWithContext(ctx, cpuInterval, true) + if err != nil { + return logger.WrapError("get cpu stats", err) + } + + for i, val := range cpuStats { + metric := g.cpuUtilizationMetrics[i] + metric.SetValue(val) + logrus.Infof("Updated metric: %v. value: %v", metric.GetName(), metric.GetStringValue()) + } + + return nil +} diff --git a/internal/metrics/provider/gopsutil/gopsutilMetricProvider_test.go b/internal/metrics/provider/gopsutil/gopsutilMetricProvider_test.go new file mode 100644 index 0000000..7edfe33 --- /dev/null +++ b/internal/metrics/provider/gopsutil/gopsutilMetricProvider_test.go @@ -0,0 +1,53 @@ +package gopsutil + +import ( + "context" + "fmt" + "runtime" + "strings" + "testing" + + "github.com/MlDenis/prometheus_wannabe/internal/test" + + "github.com/stretchr/testify/assert" +) + +func TestGopsutilMetricsProvider_GetMetrics(t *testing.T) { + expected := []string{ + "FreeMemory", + "TotalMemory", + } + for i := 1; i < runtime.NumCPU()+1; i++ { + expected = append(expected, fmt.Sprintf("CPUutilization%d", i)) + } + + provider := NewGopsutilMetricsProvider() + actual := test.ChanToArray(provider.GetMetrics()) + + assert.Len(t, expected, len(actual)) + for _, actualMetric := range actual { + assert.Contains(t, expected, actualMetric.GetName()) + assert.Equal(t, actualMetric.GetStringValue(), "0") + } +} + +func TestGopsutilMetricsProvider_Update(t *testing.T) { + ctx := context.Background() + provider := NewGopsutilMetricsProvider() + assert.NoError(t, provider.Update(ctx)) + + actual := test.ChanToArray(provider.GetMetrics()) + + cpuChecked := false + for _, actualMetric := range actual { + name := actualMetric.GetName() + if name == "FreeMemory" || name == "TotalMemory" { + assert.NotEqual(t, actualMetric.GetStringValue(), "0") + } + + if strings.HasPrefix(name, "CPUutilization") && !cpuChecked { + cpuChecked = actualMetric.GetStringValue() != "0" + } + } + assert.True(t, cpuChecked) +} diff --git a/internal/metrics/provider/runtime/runtimeMetricsProvider.go b/internal/metrics/provider/runtime/runtimeMetricsProvider.go new file mode 100644 index 0000000..045d97b --- /dev/null +++ b/internal/metrics/provider/runtime/runtimeMetricsProvider.go @@ -0,0 +1,91 @@ +package runtime + +import ( + "context" + "fmt" + "reflect" + "runtime" + + "github.com/MlDenis/prometheus_wannabe/internal/logger" + "github.com/MlDenis/prometheus_wannabe/internal/metrics" + "github.com/MlDenis/prometheus_wannabe/internal/metrics/types" + + "github.com/sirupsen/logrus" +) + +type runtimeMetricsProviderConfig interface { + MetricsList() []string +} + +type runtimeMetricsProvider struct { + metrics []metrics.Metric +} + +func NewRuntimeMetricsProvider(config runtimeMetricsProviderConfig) metrics.MetricsProvider { + metricNames := config.MetricsList() + metricsList := make([]metrics.Metric, len(metricNames)) + for i, metricName := range metricNames { + metricsList[i] = types.NewGaugeMetric(metricName) + } + + return &runtimeMetricsProvider{metrics: metricsList} +} + +func (p *runtimeMetricsProvider) Update(context.Context) error { + logrus.Info("Start collect runtime metrics") + stats := runtime.MemStats{} + runtime.ReadMemStats(&stats) + + var err error + for _, metric := range p.metrics { + metricName := metric.GetName() + metricValue, metricErr := getFieldValue(&stats, metricName) + if metricErr != nil { + err = logger.WrapError(fmt.Sprintf("get %s runtime metric value", metricName), metricErr) + logrus.Error(err) + continue + } + + metric.SetValue(metricValue) + } + + return err +} + +func (p *runtimeMetricsProvider) GetMetrics() <-chan metrics.Metric { + result := make(chan metrics.Metric) + go func() { + defer close(result) + for _, metric := range p.metrics { + result <- metric + } + }() + + return result +} + +func getFieldValue(stats *runtime.MemStats, fieldName string) (float64, error) { + r := reflect.ValueOf(stats) + f := reflect.Indirect(r).FieldByName(fieldName) + + value, ok := convertValue(f) + if !ok { + return value, logger.WrapError(fmt.Sprintf("get field with name %s", fieldName), metrics.ErrFieldNameNotFound) + } + + return value, nil +} + +func convertValue(value reflect.Value) (float64, bool) { + if value.CanInt() { + return float64(value.Int()), true + } + if value.CanUint() { + return float64(value.Uint()), true + } + if value.CanFloat() { + return value.Float(), true + } + + return 0, false +} diff --git a/internal/metrics/provider/runtime/runtimeMetricsProvider_test.go b/internal/metrics/provider/runtime/runtimeMetricsProvider_test.go new file mode 100644 index 0000000..3b07c30 --- /dev/null +++ b/internal/metrics/provider/runtime/runtimeMetricsProvider_test.go @@ -0,0 +1,88 @@ +package runtime + +import ( + "context" + "testing" + + "github.com/MlDenis/prometheus_wannabe/internal/test" + + "github.com/stretchr/testify/assert" +) + +type config struct { + metricNames []string +} + +func (c *config) MetricsList() []string { + return c.metricNames +} + +func TestRuntimeMetricsProvider_Update(t *testing.T) { + type expected struct { + expectError bool + expectMetrics []string + } + + tests := []struct { + name string + metricNames []string + expected expected + }{ + { + name: "empty_metrics_list", + metricNames: []string{}, + expected: expected{ + expectError: false, + expectMetrics: []string{}, + }, + }, { + name: "unknown_metric_name", + metricNames: []string{"UnknownMetricName"}, + expected: expected{ + expectError: true, + expectMetrics: []string{}, + }, + }, { + name: "correct_metrics_list", + metricNames: []string{"Alloc", "LastGC"}, + expected: expected{ + expectError: false, + expectMetrics: []string{"Alloc", "LastGC"}, + }, + }, + } + + for _, tt := range tests { + t.Run(tt.name, func(t *testing.T) { + ctx, cancel := context.WithCancel(context.Background()) + defer cancel() + provider := NewRuntimeMetricsProvider(&config{metricNames: tt.metricNames}) + err := provider.Update(ctx) + + if tt.expected.expectError { + assert.Error(t, err) + } else { + actualMetrics := test.ChanToArray(provider.GetMetrics()) + assert.Equal(t, len(tt.expected.expectMetrics), len(actualMetrics)) + for _, actualMetric := range actualMetrics { + assert.Contains(t, tt.expected.expectMetrics, actualMetric.GetName()) + } + } + }) + } +} + +func TestRuntimeMetricsProvider_GetMetrics(t *testing.T) { + expectedMetrics := []string{"Alloc", "TotalAlloc"} + ctx, cancel := context.WithCancel(context.Background()) + defer cancel() + provider := NewRuntimeMetricsProvider(&config{metricNames: expectedMetrics}) + assert.NoErrorf(t, provider.Update(ctx), "fail to update metrics") + + actualMetrics := test.ChanToArray(provider.GetMetrics()) + assert.Len(t, actualMetrics, len(expectedMetrics)) + for _, actualMetric := range actualMetrics { + assert.Contains(t, expectedMetrics, actualMetric.GetName()) + assert.NotEqual(t, actualMetric.GetStringValue(), "0") + } +} diff --git a/internal/metrics/sendler/http/httpMetricsSendler.go b/internal/metrics/sendler/http/httpMetricsSendler.go new file mode 100644 index 0000000..3da5038 --- /dev/null +++ b/internal/metrics/sendler/http/httpMetricsSendler.go @@ -0,0 +1,172 @@ +package http + +import ( + "bytes" + "context" + "encoding/json" + "fmt" + "io" + "net/http" + "net/url" + "sync" + "time" + + "github.com/MlDenis/prometheus_wannabe/internal/logger" + "github.com/MlDenis/prometheus_wannabe/internal/metrics" + "github.com/MlDenis/prometheus_wannabe/internal/metrics/model" + "github.com/MlDenis/prometheus_wannabe/internal/metrics/sendler" + + "github.com/sirupsen/logrus" + "golang.org/x/sync/errgroup" +) + +type metricsPusherConfig interface { + ParallelLimit() int + MetricsServerURL() string + PushMetricsTimeout() time.Duration +} + +type httpMetricsPusher struct { + parallelLimit int + client *http.Client + metricsServerURL string + pushTimeout time.Duration + converter *model.MetricsConverter +} + +var bufPool = sync.Pool{ + New: func() interface{} { + return new(bytes.Buffer) + }, +} + +func NewMetricsPusher(config metricsPusherConfig, converter *model.MetricsConverter) (sendler.MetricsPusher, error) { + serverURL, err := normalizeURL(config.MetricsServerURL()) + if err != nil { + return nil, logger.WrapError("normalize url", err) + } + + return &httpMetricsPusher{ + parallelLimit: config.ParallelLimit(), + client: &http.Client{}, + metricsServerURL: serverURL.String(), + pushTimeout: config.PushMetricsTimeout(), + converter: converter, + }, nil +} + +func (p *httpMetricsPusher) Push(ctx context.Context, metricsChan <-chan metrics.Metric) error { + eg, ctx := errgroup.WithContext(ctx) + + for i := 0; i < p.parallelLimit; i++ { + eg.Go(func() error { + for { + select { + case metric, ok := <-metricsChan: + if !ok { + return nil + } + + err := p.pushMetrics(ctx, []metrics.Metric{metric}) + if err != nil { + return err + } + case <-ctx.Done(): + return ctx.Err() + } + } + }) + } + + return eg.Wait() +} + +func (p *httpMetricsPusher) pushMetrics(ctx context.Context, metricsList []metrics.Metric) error { + metricsCount := len(metricsList) + if metricsCount == 0 { + logrus.Info("Nothing to push") + } + logrus.Infof("Push %v metrics", metricsCount) + + pushCtx, cancel := context.WithTimeout(ctx, p.pushTimeout) + defer cancel() + + modelMetrics := make([]*model.Metrics, metricsCount) + for i, metric := range metricsList { + modelMetric, err := p.converter.ToModelMetric(metric) + if err != nil { + return logger.WrapError("create model request", err) + } + + modelMetrics[i] = modelMetric + } + + buf := bufPool.Get().(*bytes.Buffer) + defer bufPool.Put(buf) + buf.Reset() + + err := json.NewEncoder(buf).Encode(modelMetrics) + if err != nil { + return logger.WrapError("serialize model request", err) + } + + request, err := http.NewRequestWithContext(pushCtx, http.MethodPost, p.metricsServerURL+"/updates", buf) + if err != nil { + return logger.WrapError("create push request", err) + } + request.Header.Add("Content-Type", "application/json") + + response, err := p.client.Do(request) + if err != nil { + return logger.WrapError("push metrics", err) + } + + defer response.Body.Close() + + content, err := io.ReadAll(response.Body) + if err != nil { + return logger.WrapError("read response body", err) + } + + stringContent := string(content) + if response.StatusCode != http.StatusOK { + logrus.Errorf("Unexpected response status code: %v %v", response.Status, stringContent) + return logger.WrapError(fmt.Sprintf("push metric: %s", stringContent), metrics.ErrUnexpectedStatusCode) + } + + for _, metric := range metricsList { + logrus.WithFields(logrus.Fields{ + "metric": metric.GetName(), + "value": metric.GetStringValue(), + "status": response.Status, + }).Info("Pushed metric") + metric.ResetState() + } + + return nil +} + +func normalizeURL(urlStr string) (*url.URL, error) { + if urlStr == "" { + return nil, logger.WrapError("normalize url", metrics.ErrEmptyURL) + } + + result, err := url.ParseRequestURI(urlStr) + if err != nil { + result, err = url.ParseRequestURI("http://" + urlStr) + if err != nil { + return nil, logger.WrapError("parse request url", err) + } + } + + if result.Scheme == "localhost" { + // =) + return normalizeURL("http://" + result.String()) + } + + if result.Scheme == "" { + result.Scheme = "http" + } + + return result, nil +} diff --git a/internal/metrics/sendler/http/httpMetricsSendler_test.go b/internal/metrics/sendler/http/httpMetricsSendler_test.go new file mode 100644 index 0000000..3fdfd47 --- /dev/null +++ b/internal/metrics/sendler/http/httpMetricsSendler_test.go @@ -0,0 +1,258 @@ +package http + +import ( + "context" + "encoding/json" + "hash" + "net/http" + "net/http/httptest" + "sync" + "testing" + "time" + + "github.com/MlDenis/prometheus_wannabe/internal/converter" + internalHash "github.com/MlDenis/prometheus_wannabe/internal/hash" + "github.com/MlDenis/prometheus_wannabe/internal/metrics" + "github.com/MlDenis/prometheus_wannabe/internal/metrics/model" + "github.com/MlDenis/prometheus_wannabe/internal/metrics/types" + "github.com/MlDenis/prometheus_wannabe/internal/test" + + "github.com/stretchr/testify/assert" +) + +type testConf struct { + connectionString string + timeout time.Duration + signEnabled bool + key []byte + parallelLimit int +} + +type testMetric struct { + name string + metricType string + value float64 + hash []byte +} + +func TestHttpMetricsPusher_Push(t *testing.T) { + var ( + counterValue int64 = 100 + gaugeValue = 100.001 + lock = sync.Mutex{} + ) + + tests := []struct { + name string + metricsToPush []metrics.Metric + expectedRequests []model.Metrics + expectedErrorMessage string + responseStatusCode int + }{ + { + name: "empty_metrics_list", + metricsToPush: []metrics.Metric{}, + expectedRequests: []model.Metrics{}, + responseStatusCode: http.StatusOK, + }, + { + name: "unknown_metric_type", + metricsToPush: []metrics.Metric{ + &testMetric{metricType: "invalid_type"}, + }, + expectedErrorMessage: "unknown metric type: invalid_type", + }, + { + name: "wrong_status_code", + metricsToPush: []metrics.Metric{ + createCounterMetric("counterMetric1", counterValue), + }, + expectedErrorMessage: "failed to push metric: ", + responseStatusCode: http.StatusBadRequest, + }, + { + name: "simple_metrics", + metricsToPush: []metrics.Metric{ + createCounterMetric("counterMetric1", counterValue), + createGaugeMetric("gaugeMetric1", gaugeValue), + }, + expectedRequests: []model.Metrics{ + { + ID: "counterMetric1", + MType: "counter", + Delta: &counterValue, + }, { + ID: "gaugeMetric1", + MType: "gauge", + Value: &gaugeValue, + }, + }, + responseStatusCode: http.StatusOK, + }, + } + + for _, tt := range tests { + t.Run(tt.name, func(t *testing.T) { + ctx, cancel := context.WithCancel(context.Background()) + defer cancel() + + called := map[string]bool{} + for _, request := range tt.expectedRequests { + called[request.ID+request.MType] = false + } + + server := httptest.NewServer(http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) { + assert.Equal(t, "application/json", r.Header.Get("Content-Type")) + + defer r.Body.Close() + modelRequest := []*model.Metrics{} + err := json.NewDecoder(r.Body).Decode(&modelRequest) + assert.NoError(t, err) + for _, modelMetric := range modelRequest { + lock.Lock() + defer lock.Unlock() + + called[modelMetric.ID+modelMetric.MType] = true + } + + w.WriteHeader(tt.responseStatusCode) + })) + defer server.Close() + + conf := &testConf{ + connectionString: server.URL, + timeout: 10 * time.Second, + signEnabled: false, + key: nil, + parallelLimit: 10, + } + signer := internalHash.NewSigner(conf) + converter := model.NewMetricsConverter(conf, signer) + pusher, err := NewMetricsPusher(conf, converter) + assert.NoError(t, err) + + err = pusher.Push(ctx, test.ArrayToChan(tt.metricsToPush)) + + if tt.expectedErrorMessage != "" { + assert.ErrorContains(t, err, tt.expectedErrorMessage) + } + + for key, call := range called { + assert.True(t, call, "Metric was not pushed, %v", key) + } + }) + } +} + +func Test_URLNormalization(t *testing.T) { + tests := []struct { + name string + input string + expectedError string + expectedURL string + }{ + { + name: "empty_url", + input: "", + expectedError: "failed to normalize url: empty url string", + }, + { + name: "no_schema_no_port", + input: "127.0.0.1", + expectedURL: "http://127.0.0.1", + }, + { + name: "no_schema_port", + input: "127.0.0.1:1234", + expectedURL: "http://127.0.0.1:1234", + }, + { + name: "schema_port", + input: "ftp://127.0.0.1:1234", + expectedURL: "ftp://127.0.0.1:1234", + }, + { + name: "localhost", + input: "localhost:1234", + expectedURL: "http://localhost:1234", + }, + { + name: "valid", + input: "https://ya.ru", + expectedURL: "https://ya.ru", + }, + } + + for _, tt := range tests { + t.Run(tt.name, func(t *testing.T) { + actual, err := normalizeURL(tt.input) + + if tt.expectedError != "" { + assert.Error(t, err) + assert.Equal(t, tt.expectedError, err.Error()) + } else { + assert.Equal(t, tt.expectedURL, actual.String()) + } + }) + } +} + +func createCounterMetric(name string, value int64) metrics.Metric { + metric := types.NewCounterMetric(name) + metric.SetValue(float64(value)) + return metric +} + +func createGaugeMetric(name string, value float64) metrics.Metric { + metric := types.NewGaugeMetric(name) + metric.SetValue(value) + return metric +} + +func (c *testConf) MetricsServerURL() string { + return c.connectionString +} + +func (c *testConf) PushMetricsTimeout() time.Duration { + return c.timeout +} + +func (t *testMetric) GetName() string { + return t.name +} + +func (t *testMetric) GetType() string { + return t.metricType +} + +func (t *testMetric) GetValue() float64 { + return t.value +} + +func (t *testMetric) GetStringValue() string { + return converter.FloatToString(t.value) +} + +func (t *testMetric) SetValue(value float64) float64 { + t.value = value + return value +} + +func (t *testMetric) ResetState() { +} + +func (t *testMetric) GetHash(hash.Hash) ([]byte, error) { + return t.hash, nil +} + +func (c *testConf) SignMetrics() bool { + return c.signEnabled +} + +func (c *testConf) GetKey() []byte { + return c.key +} + +func (c *testConf) ParallelLimit() int { + return c.parallelLimit +} diff --git a/internal/metrics/sendler/metricsSendler.go b/internal/metrics/sendler/metricsSendler.go new file mode 100644 index 0000000..1f94758 --- /dev/null +++ b/internal/metrics/sendler/metricsSendler.go @@ -0,0 +1,11 @@ +package sendler + +import ( + "context" + + "github.com/MlDenis/prometheus_wannabe/internal/metrics" +) + +type MetricsPusher interface { + Push(ctx context.Context, metrics <-chan metrics.Metric) error +} diff --git a/internal/metrics/sendler/metricsSendler_test.go b/internal/metrics/sendler/metricsSendler_test.go new file mode 100644 index 0000000..e8685ca --- /dev/null +++ b/internal/metrics/sendler/metricsSendler_test.go @@ -0,0 +1 @@ +package sendler diff --git a/internal/metrics/storage/db/converter.go b/internal/metrics/storage/db/converter.go new file mode 100644 index 0000000..2a880a9 --- /dev/null +++ b/internal/metrics/storage/db/converter.go @@ -0,0 +1,49 @@ +package db + +import ( + "database/sql" + "fmt" + + "github.com/MlDenis/prometheus_wannabe/internal/database" + "github.com/MlDenis/prometheus_wannabe/internal/logger" + "github.com/MlDenis/prometheus_wannabe/internal/metrics" + "github.com/MlDenis/prometheus_wannabe/internal/metrics/types" +) + +func toDBRecord(metric metrics.Metric) *database.DBItem { + return &database.DBItem{ + MetricType: sql.NullString{String: metric.GetType(), Valid: true}, + Name: sql.NullString{String: metric.GetName(), Valid: true}, + Value: sql.NullFloat64{Float64: metric.GetValue(), Valid: true}, + } +} + +func fromDBRecord(record *database.DBItem) (metrics.Metric, error) { + if !record.MetricType.Valid { + return nil, logger.WrapError("read record", metrics.ErrInvalidRecordMetricType) + } + metricType := record.MetricType.String + + if !record.Name.Valid { + return nil, logger.WrapError("read record", metrics.ErrInvalidRecordMetricName) + } + metricName := record.Name.String + + if !record.Value.Valid { + return nil, logger.WrapError("read record", metrics.ErrInvalidRecordMetricValue) + } + value := record.Value.Float64 + + var metric metrics.Metric + switch metricType { + case "gauge": + metric = types.NewGaugeMetric(metricName) + case "counter": + metric = types.NewCounterMetric(metricName) + default: + return nil, logger.WrapError(fmt.Sprintf("read record with type '%s'", metricType), metrics.ErrUnknownMetricType) + } + + metric.SetValue(value) + return metric, nil +} diff --git a/internal/metrics/storage/db/dbStorage.go b/internal/metrics/storage/db/dbStorage.go new file mode 100644 index 0000000..37b901d --- /dev/null +++ b/internal/metrics/storage/db/dbStorage.go @@ -0,0 +1,102 @@ +package db + +import ( + "context" + "database/sql" + + "github.com/MlDenis/prometheus_wannabe/internal/converter" + "github.com/MlDenis/prometheus_wannabe/internal/database" + "github.com/MlDenis/prometheus_wannabe/internal/logger" + "github.com/MlDenis/prometheus_wannabe/internal/metrics" + "github.com/MlDenis/prometheus_wannabe/internal/metrics/storage" +) + +type dbStorage struct { + dataBase database.DataBase +} + +func NewDBStorage(dataBase database.DataBase) storage.MetricsStorage { + return &dbStorage{dataBase: dataBase} +} + +func (d *dbStorage) AddMetricValues(ctx context.Context, metricsList []metrics.Metric) ([]metrics.Metric, error) { + dbRecords := make([]*database.DBItem, len(metricsList)) + for i, metric := range metricsList { + dbRecords[i] = toDBRecord(metric) + } + + err := d.dataBase.UpdateItems(ctx, dbRecords) + if err != nil { + return nil, logger.WrapError("update db record", err) + } + + return metricsList, nil +} + +func (d *dbStorage) GetMetricValues(ctx context.Context) (map[string]map[string]string, error) { + records, err := d.dataBase.ReadAllItems(ctx) + if err != nil { + return nil, logger.WrapError("read all db records", err) + } + + result := map[string]map[string]string{} + for _, record := range records { + if !record.MetricType.Valid { + return nil, logger.WrapError("read record", metrics.ErrInvalidRecordMetricType) + } + + metricType := record.MetricType.String + metricsByType, ok := result[metricType] + if !ok { + metricsByType = map[string]string{} + result[metricType] = metricsByType + } + + if !record.Name.Valid { + return nil, logger.WrapError("read record", metrics.ErrInvalidRecordMetricName) + } + metricName := record.Name.String + + if !record.Value.Valid { + return nil, logger.WrapError("read record", metrics.ErrInvalidRecordMetricValue) + } + + metricsByType[metricName] = converter.FloatToString(record.Value.Float64) + } + + return result, nil +} + +func (d *dbStorage) GetMetric(ctx context.Context, metricType string, metricName string) (metrics.Metric, error) { + result, err := d.dataBase.ReadItem(ctx, metricType, metricName) + if err != nil { + return nil, logger.WrapError("read db record", err) + } + + return fromDBRecord(result) +} + +func (d *dbStorage) Restore(ctx context.Context, metricValues map[string]map[string]string) error { + records := []*database.DBItem{} + for metricType, metricsByType := range metricValues { + for metricName, metricValue := range metricsByType { + value, err := converter.ToFloat64(metricValue) + if err != nil { + return logger.WrapError("parse metric value", err) + } + + records = append(records, &database.DBItem{ + MetricType: sql.NullString{String: metricType, Valid: true}, + Name: sql.NullString{String: metricName, Valid: true}, + Value: sql.NullFloat64{Float64: value, Valid: true}, + }) + } + } + + err := d.dataBase.UpdateItems(ctx, records) + if err != nil { + return logger.WrapError("update records", err) + } + + return nil +} diff --git a/internal/metrics/storage/db/dbStorage_test.go b/internal/metrics/storage/db/dbStorage_test.go new file mode 100644 index 0000000..3a49c63 --- /dev/null +++ b/internal/metrics/storage/db/dbStorage_test.go @@ -0,0 +1 @@ +package db diff --git a/internal/metrics/storage/file/fileStorage.go b/internal/metrics/storage/file/fileStorage.go new file mode 100644 index 0000000..d6474d3 --- /dev/null +++ b/internal/metrics/storage/file/fileStorage.go @@ -0,0 +1,238 @@ +package file + +import ( + "context" + "encoding/json" + "errors" + "fmt" + "io" + "os" + "sync" + + "github.com/MlDenis/prometheus_wannabe/internal/converter" + "github.com/MlDenis/prometheus_wannabe/internal/logger" + "github.com/MlDenis/prometheus_wannabe/internal/metrics" + "github.com/MlDenis/prometheus_wannabe/internal/metrics/storage" + "github.com/MlDenis/prometheus_wannabe/internal/metrics/types" + + "github.com/sirupsen/logrus" +) + +const fileMode os.FileMode = 0o644 + +type storageRecord struct { + Type string `json:"types"` + Name string `json:"name"` + Value string `json:"value"` +} + +type storageRecords []*storageRecord + +type fileStorageConfig interface { + StoreFilePath() string +} + +type fileStorage struct { + filePath string + lock sync.Mutex +} + +func NewFileStorage(config fileStorageConfig) storage.MetricsStorage { + result := &fileStorage{ + filePath: config.StoreFilePath(), + } + + if _, err := os.Stat(result.filePath); err != nil && result.filePath != "" && errors.Is(err, os.ErrNotExist) { + logrus.Infof("Init storage file in %v", result.filePath) + err = result.writeRecordsToFile(storageRecords{}) + if err != nil { + logrus.Errorf("failed to init storage file: %v", err) + } + } + + return result +} + +func (f *fileStorage) AddMetricValues(ctx context.Context, metricsList []metrics.Metric) ([]metrics.Metric, error) { + return metricsList, f.updateMetrics(metricsList) +} + +func (f *fileStorage) GetMetric(ctx context.Context, metricType string, metricName string) (metrics.Metric, error) { + records, err := f.readRecordsFromFile(func(record *storageRecord) bool { + return record.Type == metricType && record.Name == metricName + }) + if err != nil { + return nil, logger.WrapError("read records from file", err) + } + if len(records) != 1 { + return nil, logger.WrapError(fmt.Sprintf("get metric with name '%s' and type '%s'", metricName, metricType), metrics.ErrMetricNotFound) + } + + return f.toMetric(*records[0]) +} + +func (f *fileStorage) GetMetricValues(context.Context) (map[string]map[string]string, error) { + records, err := f.readRecordsFromFile(func(record *storageRecord) bool { return true }) + if err != nil { + return nil, logger.WrapError("read records from file", err) + } + + result := map[string]map[string]string{} + for _, record := range records { + metricsByType, ok := result[record.Type] + if !ok { + metricsByType = map[string]string{} + result[record.Type] = metricsByType + } + + metricsByType[record.Name] = record.Value + } + + return result, nil +} + +func (f *fileStorage) Restore(ctx context.Context, metricValues map[string]map[string]string) error { + var records storageRecords + for metricType, metricsByType := range metricValues { + for metricName, metricValue := range metricsByType { + records = append(records, &storageRecord{ + Type: metricType, + Name: metricName, + Value: metricValue, + }) + } + } + + return f.writeRecordsToFile(records) +} + +func (f *fileStorage) updateMetrics(metricsList []metrics.Metric) error { + // Read and write + return f.workWithFile(os.O_CREATE|os.O_RDWR, func(fileStream *os.File) error { + f.lock.Lock() + defer f.lock.Unlock() + + metricsMap := map[string]metrics.Metric{} // contains? + for _, metric := range metricsList { + metricsMap[metric.GetType()+metric.GetName()] = metric + } + + records, err := f.readRecords(fileStream, func(record *storageRecord) bool { + _, found := metricsMap[record.Type+record.Name] + return !found + }) + if err != nil { + return logger.WrapError("read records", err) + } + + _, err = fileStream.Seek(0, io.SeekStart) + if err != nil { + return logger.WrapError("seek pointer", err) + } + err = fileStream.Truncate(0) + if err != nil { + return logger.WrapError("truncate file stream", err) + } + + for _, metric := range metricsList { + records = append(records, &storageRecord{ + Type: metric.GetType(), + Name: metric.GetName(), + Value: metric.GetStringValue(), + }) + } + + return f.writeRecords(fileStream, records) + }) +} + +func (f *fileStorage) readRecordsFromFile(isValid func(*storageRecord) bool) (storageRecords, error) { + // ReadOnly + return f.workWithFileResult(os.O_CREATE|os.O_RDONLY, func(fileStream *os.File) (storageRecords, error) { + return f.readRecords(fileStream, isValid) + }) +} + +func (f *fileStorage) readRecords(fileStream *os.File, isValid func(*storageRecord) bool) (storageRecords, error) { + var records storageRecords + err := json.NewDecoder(fileStream).Decode(&records) + if err != nil { + return nil, logger.WrapError("decode storage", err) + } + + result := storageRecords{} + for _, record := range records { + if isValid(record) { + result = append(result, record) + } + } + + return result, nil +} + +func (f *fileStorage) writeRecordsToFile(records storageRecords) error { + // WriteOnly + return f.workWithFile(os.O_CREATE|os.O_WRONLY, func(fileStream *os.File) error { + return f.writeRecords(fileStream, records) + }) +} + +func (f *fileStorage) writeRecords(fileStream *os.File, records storageRecords) error { + encoder := json.NewEncoder(fileStream) + encoder.SetIndent("", " ") + err := encoder.Encode(records) + if err != nil { + return logger.WrapError("write records", err) + } + + return nil +} + +func (f *fileStorage) workWithFile(flag int, work func(file *os.File) error) error { + _, err := f.workWithFileResult(flag, func(fileStream *os.File) (storageRecords, error) { + return nil, work(fileStream) + }) + return err +} + +func (f *fileStorage) workWithFileResult(flag int, work func(file *os.File) (storageRecords, error)) (storageRecords, error) { + if f.filePath == "" { + return nil, nil + } + + f.lock.Lock() + defer f.lock.Unlock() + + fileStream, err := os.OpenFile(f.filePath, flag, fileMode) + if err != nil { + return nil, logger.WrapError("open file", err) + } + defer func(fileStream *os.File) { + err = fileStream.Close() + if err != nil { + logrus.Errorf("failed to close file: %v", err) + } + }(fileStream) + + return work(fileStream) +} + +func (f *fileStorage) toMetric(record storageRecord) (metrics.Metric, error) { + var metric metrics.Metric + switch record.Type { + case "counter": + metric = types.NewCounterMetric(record.Name) + case "gauge": + metric = types.NewGaugeMetric(record.Name) + default: + return nil, logger.WrapError(fmt.Sprintf("convert to metric with type %s", record.Type), metrics.ErrUnknownMetricType) + } + + value, err := converter.ToFloat64(record.Value) + if err != nil { + return nil, logger.WrapError("parse record value", err) + } + + metric.SetValue(value) + return metric, nil +} diff --git a/internal/metrics/storage/file/fileStorage_test.go b/internal/metrics/storage/file/fileStorage_test.go new file mode 100644 index 0000000..9fda2cf --- /dev/null +++ b/internal/metrics/storage/file/fileStorage_test.go @@ -0,0 +1,236 @@ +package file + +import ( + "context" + "encoding/json" + "os" + "testing" + + "github.com/MlDenis/prometheus_wannabe/internal/metrics" + "github.com/MlDenis/prometheus_wannabe/internal/test" + + "github.com/stretchr/testify/assert" +) + +type config struct { + filePath string +} + +func TestFileStorage_New(t *testing.T) { + tests := []struct { + name string + filePath string + }{ + { + name: "empty_path", + }, + { + name: "success", + filePath: os.TempDir() + "TestFileStorage_New", + }, + } + + for _, tt := range tests { + t.Run(tt.name, func(t *testing.T) { + storage := NewFileStorage(&config{filePath: tt.filePath}) + assert.NotNil(t, storage) + + if tt.filePath != "" { + defer func(name string) { + _ = os.Remove(name) + }(tt.filePath) + + actualRecords := readRecords(t, tt.filePath) + assert.Empty(t, actualRecords) + } + }) + } +} + +func TestFileStorage_AddGaugeMetricValue(t *testing.T) { + tests := []struct { + name string + values []test.KeyValue + expecredRecords storageRecords + }{ + { + name: "one_value", + values: []test.KeyValue{ + {Key: "testMetric", Value: 100.001}, + }, + expecredRecords: storageRecords{ + {Type: "gauge", Name: "testMetric", Value: "100.001"}, + }, + }, + { + name: "many_values", + values: []test.KeyValue{ + {Key: "testMetric1", Value: 100.001}, + {Key: "testMetric2", Value: 200.002}, + {Key: "testMetric3", Value: 300.003}, + }, + expecredRecords: storageRecords{ + {Type: "gauge", Name: "testMetric1", Value: "100.001"}, + {Type: "gauge", Name: "testMetric2", Value: "200.002"}, + {Type: "gauge", Name: "testMetric3", Value: "300.003"}, + }, + }, + } + + for _, tt := range tests { + t.Run(tt.name, func(t *testing.T) { + filePath := os.TempDir() + "TestFileStorage_AddGaugeMetricValue" + defer func(name string) { + _ = os.Remove(name) + }(filePath) + + storage := NewFileStorage(&config{filePath: filePath}) + + metricsList := make([]metrics.Metric, len(tt.values)) + for i, m := range tt.values { + metricsList[i] = test.CreateGaugeMetric(m.Key, m.Value) + } + + _, err := storage.AddMetricValues(context.Background(), metricsList) + assert.NoError(t, err) + + actualRecords := readRecords(t, filePath) + assert.Equal(t, tt.expecredRecords, actualRecords) + }) + } +} + +func TestFileStorage_AddCounterMetricValue(t *testing.T) { + tests := []struct { + name string + values []test.KeyValue + expectedRecords storageRecords + }{ + { + name: "one_value", + values: []test.KeyValue{ + {Key: "testMetric", Value: 100}, + }, + expectedRecords: storageRecords{ + {Type: "counter", Name: "testMetric", Value: "100"}, + }, + }, + { + name: "many_values", + values: []test.KeyValue{ + {Key: "testMetric1", Value: 100}, + {Key: "testMetric2", Value: 200}, + {Key: "testMetric3", Value: 300}, + }, + expectedRecords: storageRecords{ + {Type: "counter", Name: "testMetric1", Value: "100"}, + {Type: "counter", Name: "testMetric2", Value: "200"}, + {Type: "counter", Name: "testMetric3", Value: "300"}, + }, + }, + } + + for _, tt := range tests { + t.Run(tt.name, func(t *testing.T) { + filePath := os.TempDir() + "TestFileStorage_AddCounterMetricValue" + defer func(name string) { + assert.NoError(t, os.Remove(name)) + }(filePath) + + storage := NewFileStorage(&config{filePath: filePath}) + + metricsList := make([]metrics.Metric, len(tt.values)) + for i, m := range tt.values { + metricsList[i] = test.CreateCounterMetric(m.Key, m.Value) + } + + _, err := storage.AddMetricValues(context.Background(), metricsList) + assert.NoError(t, err) + + actualRecords := readRecords(t, filePath) + assert.Equal(t, tt.expectedRecords, actualRecords) + }) + } +} + +func TestFileStorage_GetMetric(t *testing.T) { + expectedMetricType := "gauge" + expectedMetricName := "expectedMetricName" + expectedValue := float64(300) + + tests := []struct { + name string + stored storageRecords + expectedErrorMessage string + }{ + { + name: "empty_store", + stored: storageRecords{}, + expectedErrorMessage: "failed to get metric with name 'expectedMetricName' and type 'gauge': metric not found", + }, + { + name: "notFound", + stored: storageRecords{ + {Type: "counter", Name: "metricName", Value: "100"}, + }, + expectedErrorMessage: "failed to get metric with name 'expectedMetricName' and type 'gauge': metric not found", + }, + { + name: "success", + stored: storageRecords{ + {Type: "counter", Name: "metricName", Value: "100"}, + {Type: expectedMetricType, Name: expectedMetricName, Value: "300"}, + }, + }, + } + + for _, tt := range tests { + t.Run(tt.name, func(t *testing.T) { + filePath := os.TempDir() + "TestFileStorage_GetMetricValue" + defer func(name string) { + _ = os.Remove(name) + }(filePath) + writeRecords(t, filePath, tt.stored) + + storage := NewFileStorage(&config{filePath: filePath}) + actualValue, err := storage.GetMetric(context.Background(), expectedMetricType, expectedMetricName) + + if tt.expectedErrorMessage == "" { + assert.Equal(t, expectedValue, actualValue.GetValue()) + } else { + assert.ErrorContains(t, err, tt.expectedErrorMessage) + } + }) + } +} + +func readRecords(t *testing.T, filePath string) storageRecords { + t.Helper() + _, err := os.Stat(filePath) + assert.NoError(t, err) + + content, err := os.ReadFile(filePath) + assert.NoError(t, err) + + records := storageRecords{} + err = json.Unmarshal(content, &records) + assert.NoError(t, err) + + return records +} + +func writeRecords(t *testing.T, filePath string, records storageRecords) { + t.Helper() + fileStream, err := os.OpenFile(filePath, os.O_WRONLY|os.O_CREATE, 0o644) + assert.NoError(t, err) + defer func(fileStream *os.File) { + _ = fileStream.Close() + }(fileStream) + + err = json.NewEncoder(fileStream).Encode(records) + assert.NoError(t, err) +} + +func (c *config) StoreFilePath() string { + return c.filePath +} diff --git a/internal/metrics/storage/memory/inMemoryStorage.go b/internal/metrics/storage/memory/inMemoryStorage.go new file mode 100644 index 0000000..756d1df --- /dev/null +++ b/internal/metrics/storage/memory/inMemoryStorage.go @@ -0,0 +1,124 @@ +package memory + +import ( + "context" + "fmt" + "sync" + + "github.com/MlDenis/prometheus_wannabe/internal/converter" + "github.com/MlDenis/prometheus_wannabe/internal/metrics" + "github.com/MlDenis/prometheus_wannabe/internal/metrics/storage" + "github.com/MlDenis/prometheus_wannabe/internal/metrics/types" +) + +type inMemoryStorage struct { + metricsByType map[string]map[string]metrics.Metric + lock sync.RWMutex +} + +func NewInMemoryStorage() storage.MetricsStorage { + return &inMemoryStorage{ + metricsByType: map[string]map[string]metrics.Metric{}, + lock: sync.RWMutex{}, + } +} + +func (s *inMemoryStorage) AddMetricValues(ctx context.Context, metricList []metrics.Metric) ([]metrics.Metric, error) { + s.lock.Lock() + defer s.lock.Unlock() + + result := make([]metrics.Metric, len(metricList)) + + for i, metric := range metricList { + metricType := metric.GetType() + typedMetrics, ok := s.metricsByType[metricType] + if !ok { + typedMetrics = map[string]metrics.Metric{} + s.metricsByType[metricType] = typedMetrics + } + + metricName := metric.GetName() + currentMetric, ok := typedMetrics[metricName] + if ok { + currentMetric.SetValue(metric.GetValue()) + } else { + currentMetric = metric + typedMetrics[metricName] = currentMetric + } + result[i] = currentMetric + } + + return result, nil +} + +func (s *inMemoryStorage) GetMetricValues(context.Context) (map[string]map[string]string, error) { + s.lock.RLock() + defer s.lock.RUnlock() + + metricValues := map[string]map[string]string{} + for metricsType, metricsList := range s.metricsByType { + values := map[string]string{} + metricValues[metricsType] = values + + for metricName, metric := range metricsList { + values[metricName] = metric.GetStringValue() + } + } + + return metricValues, nil +} + +func (s *inMemoryStorage) GetMetric(ctx context.Context, metricType string, metricName string) (metrics.Metric, error) { + s.lock.RLock() + defer s.lock.RUnlock() + metricsByName, ok := s.metricsByType[metricType] + if !ok { + return nil, fmt.Errorf("get metric with type %s: %w", metricType, metrics.ErrMetricNotFound) + } + + metric, ok := metricsByName[metricName] + if !ok { + return nil, fmt.Errorf("metrics with name %v and types %v not found: %w", metricName, metricType, metrics.ErrMetricNotFound) + } + + return metric, nil +} + +func (s *inMemoryStorage) Restore(ctx context.Context, metricValues map[string]map[string]string) error { + s.lock.Lock() + defer s.lock.Unlock() + + s.metricsByType = map[string]map[string]metrics.Metric{} + + for metricType, metricsByType := range metricValues { + metricFactory := types.NewGaugeMetric + if metricType == "counter" { + metricFactory = types.NewCounterMetric + } else if metricType != "gauge" { + return fmt.Errorf("handle backup metric with type '%s': %w", metricType, metrics.ErrUnknownMetricType) + } + + for metricName, metricValue := range metricsByType { + value, err := converter.ToFloat64(metricValue) + if err != nil { + return fmt.Errorf("parse float metric value: %w", err) + } + + metricsList, ok := s.metricsByType[metricType] + if !ok { + metricsList = map[string]metrics.Metric{} + s.metricsByType[metricType] = metricsList + } + + currentMetric, ok := metricsList[metricName] + if !ok { + currentMetric = metricFactory(metricName) + metricsList[metricName] = currentMetric + } + + currentMetric.SetValue(value) + } + } + + return nil +} diff --git a/internal/metrics/storage/memory/inMemoryStorage_test.go b/internal/metrics/storage/memory/inMemoryStorage_test.go new file mode 100644 index 0000000..50acc6d --- /dev/null +++ b/internal/metrics/storage/memory/inMemoryStorage_test.go @@ -0,0 +1,347 @@ +package memory + +import ( + "context" + "testing" + + "github.com/MlDenis/prometheus_wannabe/internal/metrics" + "github.com/MlDenis/prometheus_wannabe/internal/test" + + "github.com/stretchr/testify/assert" +) + +func TestInMemoryStorage_AddCounterMetricValue(t *testing.T) { + tests := []struct { + name string + counterMetrics []test.KeyValue + expected map[string]map[string]string + }{ + { + name: "single_metric", + counterMetrics: []test.KeyValue{ + {Key: "metricName1", Value: 100}, + }, + expected: map[string]map[string]string{ + "counter": {"metricName1": "100"}, + }, + }, + { + name: "single_negative_metric", + counterMetrics: []test.KeyValue{ + {Key: "metricName1", Value: -100}, + }, + expected: map[string]map[string]string{ + "counter": {"metricName1": "-100"}, + }, + }, + { + name: "multi_metrics", + counterMetrics: []test.KeyValue{ + {Key: "metricName1", Value: 100}, + {Key: "metricName2", Value: 200}, + }, + expected: map[string]map[string]string{ + "counter": { + "metricName1": "100", + "metricName2": "200", + }, + }, + }, + { + name: "same_metrics", + counterMetrics: []test.KeyValue{ + {Key: "metricName1", Value: 100}, + {Key: "metricName1", Value: 200}, + }, + expected: map[string]map[string]string{ + "counter": {"metricName1": "300"}, + }, + }, + } + + for _, tt := range tests { + t.Run(tt.name, func(t *testing.T) { + storage := NewInMemoryStorage() + + metricsList := make([]metrics.Metric, len(tt.counterMetrics)) + for i, m := range tt.counterMetrics { + metricsList[i] = test.CreateCounterMetric(m.Key, m.Value) + } + _, err := storage.AddMetricValues(context.Background(), metricsList) + assert.NoError(t, err) + + actual, _ := storage.GetMetricValues(context.Background()) + assert.Equal(t, tt.expected, actual) + }) + } +} + +func TestInMemoryStorage_AddGaugeMetricValue(t *testing.T) { + tests := []struct { + name string + gaugeMetrics []test.KeyValue + expected map[string]map[string]string + }{ + { + name: "single_metric", + gaugeMetrics: []test.KeyValue{ + {Key: "metricName1", Value: 100.001}, + }, + expected: map[string]map[string]string{ + "gauge": {"metricName1": "100.001"}, + }, + }, + { + name: "single_negative_metric", + gaugeMetrics: []test.KeyValue{ + {Key: "metricName1", Value: -100.001}, + }, + expected: map[string]map[string]string{ + "gauge": {"metricName1": "-100.001"}, + }, + }, + { + name: "multi_metrics", + gaugeMetrics: []test.KeyValue{ + {Key: "metricName1", Value: 100.001}, + {Key: "metricName2", Value: 200.002}, + }, + expected: map[string]map[string]string{ + "gauge": { + "metricName1": "100.001", + "metricName2": "200.002", + }, + }, + }, + { + name: "same_metrics", + gaugeMetrics: []test.KeyValue{ + {Key: "metricName1", Value: 100.001}, + {Key: "metricName1", Value: 200.002}, + }, + expected: map[string]map[string]string{ + "gauge": {"metricName1": "200.002"}, + }, + }, + } + + for _, tt := range tests { + t.Run(tt.name, func(t *testing.T) { + storage := NewInMemoryStorage() + + metricsList := make([]metrics.Metric, len(tt.gaugeMetrics)) + for i, m := range tt.gaugeMetrics { + metricsList[i] = test.CreateGaugeMetric(m.Key, m.Value) + } + _, err := storage.AddMetricValues(context.Background(), metricsList) + assert.NoError(t, err) + + actual, _ := storage.GetMetricValues(context.Background()) + assert.Equal(t, tt.expected, actual) + }) + } +} + +func TestInMemoryStorage_GetMetricValues(t *testing.T) { + tests := []struct { + name string + counterMetrics []test.KeyValue + gaugeMetrics []test.KeyValue + expected map[string]map[string]string + }{ + { + name: "no_metric", + expected: map[string]map[string]string{}, + }, + { + name: "all_metric", + counterMetrics: []test.KeyValue{ + {Key: "metricName2", Value: 300}, + {Key: "metricName1", Value: 100}, + {Key: "metricName3", Value: -400}, + {Key: "metricName1", Value: 200}, + }, + gaugeMetrics: []test.KeyValue{ + {Key: "metricName5", Value: 300.003}, + {Key: "metricName4", Value: 100.001}, + {Key: "metricName6", Value: -400.004}, + {Key: "metricName4", Value: 200.002}, + }, + expected: map[string]map[string]string{ + "counter": { + "metricName1": "300", + "metricName2": "300", + "metricName3": "-400", + }, + "gauge": { + "metricName4": "200.002", + "metricName5": "300.003", + "metricName6": "-400.004", + }, + }, + }, + } + + for _, tt := range tests { + t.Run(tt.name, func(t *testing.T) { + storage := NewInMemoryStorage() + + metricsList := make([]metrics.Metric, len(tt.counterMetrics)+len(tt.gaugeMetrics)) + for i, m := range tt.counterMetrics { + metricsList[i] = test.CreateCounterMetric(m.Key, m.Value) + } + for i, m := range tt.gaugeMetrics { + metricsList[len(tt.counterMetrics)+i] = test.CreateGaugeMetric(m.Key, m.Value) + } + _, err := storage.AddMetricValues(context.Background(), metricsList) + assert.NoError(t, err) + + actual, _ := storage.GetMetricValues(context.Background()) + assert.Equal(t, tt.expected, actual) + }) + } +} + +func TestInMemoryStorage_Restore(t *testing.T) { + tests := []struct { + name string + values map[string]map[string]string + expectedErrorMessage string + }{ + { + name: "unknown_metric_type", + expectedErrorMessage: "failed to handle backup metric with type 'unknownType': unknown metric type", + values: map[string]map[string]string{ + "unknownType": { + "metricName1": "300", + }, + }, + }, + { + name: "success", + values: map[string]map[string]string{ + "counter": { + "metricName1": "300", + "metricName2": "300", + "metricName3": "-400", + }, + "gauge": { + "metricName4": "200.002", + "metricName5": "300.003", + "metricName6": "-400.004", + }, + }, + }, + } + + for _, tt := range tests { + t.Run(tt.name, func(t *testing.T) { + storage := NewInMemoryStorage() + + actualError := storage.Restore(context.Background(), tt.values) + if tt.expectedErrorMessage == "" { + actual, _ := storage.GetMetricValues(context.Background()) + assert.Equal(t, tt.values, actual) + } else { + assert.ErrorContains(t, actualError, tt.expectedErrorMessage) + } + }) + } +} + +func TestInMemoryStorage_GetMetricValue(t *testing.T) { + tests := []struct { + name string + counterMetrics []test.KeyValue + gaugeMetrics []test.KeyValue + expectedOk bool + expectedCounters []test.KeyValue + expectedGauges []test.KeyValue + }{ + { + name: "empty_metrics", + counterMetrics: []test.KeyValue{}, + gaugeMetrics: []test.KeyValue{}, + expectedOk: false, + expectedCounters: []test.KeyValue{{Key: "not_existed_metric", Value: 0}}, + expectedGauges: []test.KeyValue{{Key: "not_existed_metric", Value: 0}}, + }, + { + name: "metric_not_found", + counterMetrics: []test.KeyValue{ + {Key: "metricName1", Value: 100}, + {Key: "metricName2", Value: 300}, + {Key: "metricName3", Value: -400}, + }, + gaugeMetrics: []test.KeyValue{ + {Key: "metricName4", Value: 100.001}, + {Key: "metricName5", Value: 300.003}, + {Key: "metricName6", Value: -400.004}, + }, + expectedOk: false, + expectedCounters: []test.KeyValue{{Key: "not_existed_metric", Value: 0}}, + expectedGauges: []test.KeyValue{{Key: "not_existed_metric", Value: 0}}, + }, + { + name: "success_values", + counterMetrics: []test.KeyValue{ + {Key: "metricName1", Value: 100}, + {Key: "metricName2", Value: 300}, + {Key: "metricName3", Value: -400}, + }, + gaugeMetrics: []test.KeyValue{ + {Key: "metricName4", Value: 100.001}, + {Key: "metricName5", Value: 300.003}, + {Key: "metricName6", Value: -400.004}, + }, + expectedOk: true, + expectedCounters: []test.KeyValue{ + {Key: "metricName1", Value: 100}, + {Key: "metricName2", Value: 300}, + {Key: "metricName3", Value: -400}, + }, + expectedGauges: []test.KeyValue{ + {Key: "metricName4", Value: 100.001}, + {Key: "metricName5", Value: 300.003}, + {Key: "metricName6", Value: -400.004}, + }, + }, + } + + for _, tt := range tests { + t.Run(tt.name, func(t *testing.T) { + storage := NewInMemoryStorage() + + metricsList := make([]metrics.Metric, len(tt.counterMetrics)+len(tt.gaugeMetrics)) + for i, m := range tt.counterMetrics { + metricsList[i] = test.CreateCounterMetric(m.Key, m.Value) + } + for i, m := range tt.gaugeMetrics { + metricsList[len(tt.counterMetrics)+i] = test.CreateGaugeMetric(m.Key, m.Value) + } + + _, err := storage.AddMetricValues(context.Background(), metricsList) + assert.NoError(t, err) + + for _, expectedCounter := range tt.expectedCounters { + actualValue, err := storage.GetMetric(context.Background(), "counter", expectedCounter.Key) + if tt.expectedOk { + assert.NoError(t, err) + assert.Equal(t, expectedCounter.Value, actualValue.GetValue()) + } else { + assert.Error(t, err) + } + } + + for _, expectedGauge := range tt.expectedGauges { + actualValue, err := storage.GetMetric(context.Background(), "gauge", expectedGauge.Key) + if tt.expectedOk { + assert.NoError(t, err) + assert.Equal(t, expectedGauge.Value, actualValue.GetValue()) + } else { + assert.Error(t, err) + } + } + }) + } +} diff --git a/internal/metrics/storage/metricsStorage.go b/internal/metrics/storage/metricsStorage.go new file mode 100644 index 0000000..e87fa06 --- /dev/null +++ b/internal/metrics/storage/metricsStorage.go @@ -0,0 +1,14 @@ +package storage + +import ( + "context" + + "github.com/MlDenis/prometheus_wannabe/internal/metrics" +) + +type MetricsStorage interface { + AddMetricValues(ctx context.Context, metric []metrics.Metric) ([]metrics.Metric, error) + GetMetricValues(ctx context.Context) (map[string]map[string]string, error) + GetMetric(ctx context.Context, metricType string, metricName string) (metrics.Metric, error) + Restore(ctx context.Context, metricValues map[string]map[string]string) error +} diff --git a/internal/metrics/storage/storageStratedy_test.go b/internal/metrics/storage/storageStratedy_test.go new file mode 100644 index 0000000..b5720f6 --- /dev/null +++ b/internal/metrics/storage/storageStratedy_test.go @@ -0,0 +1,631 @@ +package storage + +import ( + "context" + "testing" + + "github.com/MlDenis/prometheus_wannabe/internal/metrics" + "github.com/MlDenis/prometheus_wannabe/internal/test" + + "github.com/stretchr/testify/assert" + "github.com/stretchr/testify/mock" +) + +type configMock struct { + mock.Mock +} + +type metricStorageMock struct { + mock.Mock +} + +const ( + metricType = "metricType" + metricName = "metricName" + metricValue float64 = 100 +) + +func TestStorageStrategy_AddGaugeMetricValue(t *testing.T) { + tests := []struct { + name string + syncMode bool + inMemoryStorageError error + backupStorageErrorError error + expectedResult []metrics.Metric + expectedError error + }{ + { + name: "noSync_inMemoryStorage_error", + syncMode: false, + inMemoryStorageError: test.ErrTest, + expectedError: test.ErrTest, + }, + { + name: "sync_inMemoryStorage_error", + syncMode: true, + inMemoryStorageError: test.ErrTest, + expectedError: test.ErrTest, + }, + { + name: "noSync_backupStorage_error", + syncMode: false, + backupStorageErrorError: test.ErrTest, + expectedResult: []metrics.Metric{test.CreateGaugeMetric("resultMetric", 100)}, + }, + { + name: "sync_backupStorage_error", + syncMode: true, + backupStorageErrorError: test.ErrTest, + expectedError: test.ErrTest, + }, + { + name: "noSync_success", + syncMode: false, + expectedResult: []metrics.Metric{test.CreateGaugeMetric("resultMetric", 100)}, + }, + { + name: "sync_success", + syncMode: true, + expectedResult: []metrics.Metric{test.CreateGaugeMetric("resultMetric", 100)}, + }, + } + + for _, tt := range tests { + t.Run(tt.name, func(t *testing.T) { + ctx := context.Background() + + confMock := new(configMock) + inMemoryStorageMock := new(metricStorageMock) + backupStorageMock := new(metricStorageMock) + + metricsList := []metrics.Metric{test.CreateGaugeMetric(metricName, metricValue)} + + confMock.On("SyncMode").Return(tt.syncMode) + inMemoryStorageMock.On("AddMetricValues", ctx, metricsList).Return(tt.expectedResult, tt.inMemoryStorageError) + backupStorageMock.On("AddMetricValues", ctx, tt.expectedResult).Return(tt.expectedResult, tt.backupStorageErrorError) + + strategy := NewStorageStrategy(confMock, inMemoryStorageMock, backupStorageMock) + actualResult, actualError := strategy.AddMetricValues(ctx, metricsList) + + assert.Equal(t, tt.expectedResult, actualResult) + assert.ErrorIs(t, actualError, tt.expectedError) + + inMemoryStorageMock.AssertCalled(t, "AddMetricValues", ctx, metricsList) + + if tt.inMemoryStorageError == nil { + if tt.syncMode { + backupStorageMock.AssertCalled(t, "AddMetricValues", ctx, tt.expectedResult) + } else { + backupStorageMock.AssertNotCalled(t, "AddMetricValues", mock.Anything, mock.Anything) + } + } else { + backupStorageMock.AssertNotCalled(t, "AddMetricValues", mock.Anything, mock.Anything) + } + }) + } +} + +func TestStorageStrategy_AddCounterMetricValue(t *testing.T) { + tests := []struct { + name string + syncMode bool + inMemoryStorageError error + backupStorageErrorError error + expectedResult []metrics.Metric + expectedError error + }{ + { + name: "noSync_inMemoryStorage_error", + syncMode: false, + inMemoryStorageError: test.ErrTest, + expectedError: test.ErrTest, + }, + { + name: "sync_inMemoryStorage_error", + syncMode: true, + inMemoryStorageError: test.ErrTest, + expectedError: test.ErrTest, + }, + { + name: "noSync_backupStorage_error", + syncMode: false, + backupStorageErrorError: test.ErrTest, + expectedResult: []metrics.Metric{test.CreateCounterMetric("resultMetric", 100)}, + }, + { + name: "sync_backupStorage_error", + syncMode: true, + backupStorageErrorError: test.ErrTest, + expectedError: test.ErrTest, + }, + { + name: "noSync_success", + syncMode: false, + expectedResult: []metrics.Metric{test.CreateCounterMetric("resultMetric", 100)}, + }, + { + name: "sync_success", + syncMode: true, + expectedResult: []metrics.Metric{test.CreateCounterMetric("resultMetric", 100)}, + }, + } + + for _, tt := range tests { + t.Run(tt.name, func(t *testing.T) { + ctx := context.Background() + + confMock := new(configMock) + inMemoryStorageMock := new(metricStorageMock) + backupStorageMock := new(metricStorageMock) + + metricsList := []metrics.Metric{test.CreateCounterMetric(metricName, metricValue)} + + confMock.On("SyncMode").Return(tt.syncMode) + inMemoryStorageMock.On("AddMetricValues", ctx, metricsList).Return(tt.expectedResult, tt.inMemoryStorageError) + backupStorageMock.On("AddMetricValues", ctx, tt.expectedResult).Return(tt.expectedResult, tt.backupStorageErrorError) + + strategy := NewStorageStrategy(confMock, inMemoryStorageMock, backupStorageMock) + actualResult, actualError := strategy.AddMetricValues(ctx, metricsList) + + assert.Equal(t, tt.expectedResult, actualResult) + assert.ErrorIs(t, actualError, tt.expectedError) + + inMemoryStorageMock.AssertCalled(t, "AddMetricValues", ctx, metricsList) + + if tt.inMemoryStorageError == nil { + if tt.syncMode { + backupStorageMock.AssertCalled(t, "AddMetricValues", ctx, tt.expectedResult) + } else { + backupStorageMock.AssertNotCalled(t, "AddMetricValues", mock.Anything, mock.Anything) + } + } else { + backupStorageMock.AssertNotCalled(t, "AddMetricValues", mock.Anything, mock.Anything) + } + }) + } +} + +func TestStorageStrategy_GetMetricValues(t *testing.T) { + result := map[string]map[string]string{} + + tests := []struct { + name string + syncMode bool + storageResult map[string]map[string]string + storageError error + expectedResult map[string]map[string]string + expectedError error + }{ + { + name: "noSync_error", + syncMode: false, + storageError: test.ErrTest, + expectedError: test.ErrTest, + }, + { + name: "sync_error", + syncMode: true, + storageError: test.ErrTest, + expectedError: test.ErrTest, + }, + { + name: "noSync_success", + syncMode: true, + storageResult: result, + expectedResult: result, + }, + { + name: "sync_success", + syncMode: true, + storageResult: result, + expectedResult: result, + }, + } + + for _, tt := range tests { + t.Run(tt.name, func(t *testing.T) { + ctx := context.Background() + + confMock := new(configMock) + inMemoryStorageMock := new(metricStorageMock) + backupStorageMock := new(metricStorageMock) + + confMock.On("SyncMode").Return(tt.syncMode) + inMemoryStorageMock.On("GetMetricValues", ctx).Return(tt.storageResult, tt.storageError) + backupStorageMock.On("GetMetricValues", ctx).Return(tt.storageResult, tt.storageError) + + strategy := NewStorageStrategy(confMock, inMemoryStorageMock, backupStorageMock) + actualResult, actualError := strategy.GetMetricValues(ctx) + + assert.Equal(t, tt.expectedResult, actualResult) + assert.Equal(t, tt.expectedError, actualError) + + inMemoryStorageMock.AssertCalled(t, "GetMetricValues", ctx) + backupStorageMock.AssertNotCalled(t, "GetMetricValues", mock.Anything) + }) + } +} + +func TestStorageStrategy_GetMetric(t *testing.T) { + resultMetric := test.CreateGaugeMetric(metricName, metricValue) + tests := []struct { + name string + syncMode bool + storageResult metrics.Metric + storageError error + expectedResult metrics.Metric + expectedError error + }{ + { + name: "noSync_error", + syncMode: false, + storageError: test.ErrTest, + expectedError: test.ErrTest, + }, + { + name: "sync_error", + syncMode: true, + storageError: test.ErrTest, + expectedError: test.ErrTest, + }, + { + name: "noSync_success", + syncMode: true, + storageResult: resultMetric, + expectedResult: resultMetric, + }, + { + name: "sync_success", + syncMode: true, + storageResult: resultMetric, + expectedResult: resultMetric, + }, + } + + for _, tt := range tests { + t.Run(tt.name, func(t *testing.T) { + ctx := context.Background() + + confMock := new(configMock) + inMemoryStorageMock := new(metricStorageMock) + backupStorageMock := new(metricStorageMock) + + confMock.On("SyncMode").Return(tt.syncMode) + inMemoryStorageMock.On("GetMetric", ctx, metricType, metricName).Return(tt.storageResult, tt.storageError) + backupStorageMock.On("GetMetric", ctx, metricType, metricName).Return(tt.storageResult, tt.storageError) + + strategy := NewStorageStrategy(confMock, inMemoryStorageMock, backupStorageMock) + actualResult, actualError := strategy.GetMetric(ctx, metricType, metricName) + + assert.Equal(t, tt.expectedResult, actualResult) + assert.Equal(t, tt.expectedError, actualError) + + inMemoryStorageMock.AssertCalled(t, "GetMetric", ctx, metricType, metricName) + backupStorageMock.AssertNotCalled(t, "GetMetric", mock.Anything, mock.Anything, mock.Anything) + }) + } +} + +func TestStorageStrategy_Restore(t *testing.T) { + values := map[string]map[string]string{} + + tests := []struct { + name string + syncMode bool + storageError error + expectedError error + }{ + { + name: "noSync_error", + syncMode: false, + storageError: test.ErrTest, + expectedError: test.ErrTest, + }, + { + name: "sync_error", + syncMode: true, + storageError: test.ErrTest, + expectedError: test.ErrTest, + }, + { + name: "noSync_success", + syncMode: true, + }, + { + name: "sync_success", + syncMode: true, + }, + } + + for _, tt := range tests { + t.Run(tt.name, func(t *testing.T) { + ctx := context.Background() + + confMock := new(configMock) + inMemoryStorageMock := new(metricStorageMock) + backupStorageMock := new(metricStorageMock) + + confMock.On("SyncMode").Return(tt.syncMode) + inMemoryStorageMock.On("Restore", ctx, values).Return(tt.storageError) + backupStorageMock.On("Restore", ctx, values).Return(tt.storageError) + + strategy := NewStorageStrategy(confMock, inMemoryStorageMock, backupStorageMock) + actualError := strategy.Restore(ctx, values) + + assert.Equal(t, tt.expectedError, actualError) + + inMemoryStorageMock.AssertCalled(t, "Restore", ctx, values) + backupStorageMock.AssertNotCalled(t, "Restore", mock.Anything, mock.Anything) + }) + } +} + +func TestStorageStrategy_CreateBackup(t *testing.T) { + values := map[string]map[string]string{} + + tests := []struct { + name string + syncMode bool + currentStateValues map[string]map[string]string + currentStateError error + restoreError error + expectedError error + }{ + { + name: "noSync_currentState_error", + syncMode: false, + currentStateError: test.ErrTest, + expectedError: test.ErrTest, + }, + { + name: "sync_currentState_error", + syncMode: true, + currentStateError: test.ErrTest, + expectedError: test.ErrTest, + }, + { + name: "noSync_restore_error", + syncMode: false, + currentStateValues: values, + restoreError: test.ErrTest, + expectedError: test.ErrTest, + }, + { + name: "sync_restore_error", + syncMode: true, + currentStateValues: values, + restoreError: test.ErrTest, + expectedError: test.ErrTest, + }, + { + name: "noSync_success", + syncMode: true, + currentStateValues: values, + }, + { + name: "sync_success", + syncMode: true, + currentStateValues: values, + }, + } + + for _, tt := range tests { + t.Run(tt.name, func(t *testing.T) { + ctx := context.Background() + + confMock := new(configMock) + inMemoryStorageMock := new(metricStorageMock) + backupStorageMock := new(metricStorageMock) + + confMock.On("SyncMode").Return(tt.syncMode) + inMemoryStorageMock.On("GetMetricValues", ctx).Return(tt.currentStateValues, tt.currentStateError) + backupStorageMock.On("Restore", ctx, tt.currentStateValues).Return(tt.restoreError) + + strategy := NewStorageStrategy(confMock, inMemoryStorageMock, backupStorageMock) + actualError := strategy.CreateBackup(ctx) + + assert.ErrorIs(t, actualError, tt.expectedError) + + inMemoryStorageMock.AssertCalled(t, "GetMetricValues", ctx) + + if tt.currentStateError == nil { + backupStorageMock.AssertCalled(t, "Restore", ctx, tt.currentStateValues) + } else { + backupStorageMock.AssertNotCalled(t, "Restore", mock.Anything, mock.Anything) + } + }) + } +} + +func TestStorageStrategy_RestoreFromBackup(t *testing.T) { + values := map[string]map[string]string{} + + tests := []struct { + name string + syncMode bool + currentStateValues map[string]map[string]string + currentStateError error + restoreError error + expectedError error + }{ + { + name: "noSync_currentState_error", + syncMode: false, + currentStateError: test.ErrTest, + expectedError: test.ErrTest, + }, + { + name: "sync_currentState_error", + syncMode: true, + currentStateError: test.ErrTest, + expectedError: test.ErrTest, + }, + { + name: "noSync_restore_error", + syncMode: false, + currentStateValues: values, + restoreError: test.ErrTest, + expectedError: test.ErrTest, + }, + { + name: "sync_restore_error", + syncMode: true, + currentStateValues: values, + restoreError: test.ErrTest, + expectedError: test.ErrTest, + }, + { + name: "noSync_success", + syncMode: true, + currentStateValues: values, + }, + { + name: "sync_success", + syncMode: true, + currentStateValues: values, + }, + } + + for _, tt := range tests { + t.Run(tt.name, func(t *testing.T) { + ctx := context.Background() + + confMock := new(configMock) + inMemoryStorageMock := new(metricStorageMock) + backupStorageMock := new(metricStorageMock) + + confMock.On("SyncMode").Return(tt.syncMode) + backupStorageMock.On("GetMetricValues", ctx).Return(tt.currentStateValues, tt.currentStateError) + inMemoryStorageMock.On("Restore", ctx, tt.currentStateValues).Return(tt.restoreError) + + strategy := NewStorageStrategy(confMock, inMemoryStorageMock, backupStorageMock) + actualError := strategy.RestoreFromBackup(ctx) + + assert.ErrorIs(t, actualError, tt.expectedError) + + backupStorageMock.AssertCalled(t, "GetMetricValues", ctx) + + if tt.currentStateError == nil { + inMemoryStorageMock.AssertCalled(t, "Restore", ctx, tt.currentStateValues) + } else { + inMemoryStorageMock.AssertNotCalled(t, "Restore", mock.Anything, mock.Anything) + } + }) + } +} + +func TestStorageStrategy_Close(t *testing.T) { + values := map[string]map[string]string{} + + tests := []struct { + name string + syncMode bool + currentStateValues map[string]map[string]string + currentStateError error + restoreError error + expectedError error + }{ + { + name: "noSync_currentState_error", + syncMode: false, + currentStateError: test.ErrTest, + expectedError: test.ErrTest, + }, + { + name: "sync_currentState_error", + syncMode: true, + currentStateError: test.ErrTest, + expectedError: test.ErrTest, + }, + { + name: "noSync_restore_error", + syncMode: false, + currentStateValues: values, + restoreError: test.ErrTest, + expectedError: test.ErrTest, + }, + { + name: "sync_restore_error", + syncMode: true, + currentStateValues: values, + restoreError: test.ErrTest, + expectedError: test.ErrTest, + }, + { + name: "noSync_success", + syncMode: true, + currentStateValues: values, + }, + { + name: "sync_success", + syncMode: true, + currentStateValues: values, + }, + } + + for _, tt := range tests { + t.Run(tt.name, func(t *testing.T) { + ctx := context.Background() + + confMock := new(configMock) + inMemoryStorageMock := new(metricStorageMock) + backupStorageMock := new(metricStorageMock) + + confMock.On("SyncMode").Return(tt.syncMode) + inMemoryStorageMock.On("GetMetricValues", ctx).Return(tt.currentStateValues, tt.currentStateError) + backupStorageMock.On("Restore", ctx, tt.currentStateValues).Return(tt.restoreError) + + strategy := NewStorageStrategy(confMock, inMemoryStorageMock, backupStorageMock) + actualError := strategy.Close() + + assert.ErrorIs(t, actualError, tt.expectedError) + + inMemoryStorageMock.AssertCalled(t, "GetMetricValues", ctx) + + if tt.currentStateError == nil { + backupStorageMock.AssertCalled(t, "Restore", ctx, tt.currentStateValues) + } else { + backupStorageMock.AssertNotCalled(t, "Restore", mock.Anything, mock.Anything) + } + }) + } +} + +func (c *configMock) SyncMode() bool { + args := c.Called() + return args.Bool(0) +} + +func (s *metricStorageMock) GetMetric(ctx context.Context, metricType string, metricName string) (metrics.Metric, error) { + args := s.Called(ctx, metricType, metricName) + result := args.Get(0) + if result == nil { + return nil, args.Error(1) + } + return result.(metrics.Metric), args.Error(1) +} + +func (s *metricStorageMock) AddMetricValues(ctx context.Context, metric []metrics.Metric) ([]metrics.Metric, error) { + args := s.Called(ctx, metric) + result := args.Get(0) + if result == nil { + return nil, args.Error(1) + } + + return result.([]metrics.Metric), args.Error(1) +} + +func (s *metricStorageMock) GetMetricValues(ctx context.Context) (map[string]map[string]string, error) { + args := s.Called(ctx) + return args.Get(0).(map[string]map[string]string), args.Error(1) +} + +func (s *metricStorageMock) GetMetricValue(ctx context.Context, metricType string, metricName string) (float64, error) { + args := s.Called(ctx, metricType, metricName) + return args.Get(0).(float64), args.Error(1) +} + +func (s *metricStorageMock) Restore(ctx context.Context, metricValues map[string]map[string]string) error { + args := s.Called(ctx, metricValues) + return args.Error(0) +} diff --git a/internal/metrics/storage/storageStrategy.go b/internal/metrics/storage/storageStrategy.go new file mode 100644 index 0000000..31423d8 --- /dev/null +++ b/internal/metrics/storage/storageStrategy.go @@ -0,0 +1,90 @@ +package storage + +import ( + "context" + "sync" + + "github.com/MlDenis/prometheus_wannabe/internal/logger" + "github.com/MlDenis/prometheus_wannabe/internal/metrics" +) + +type storageStrategyConfig interface { + SyncMode() bool +} + +type StorageStrategy struct { + backupStorage MetricsStorage + inMemoryStorage MetricsStorage + syncMode bool + lock sync.RWMutex +} + +func NewStorageStrategy(config storageStrategyConfig, inMemoryStorage MetricsStorage, fileStorage MetricsStorage) *StorageStrategy { + return &StorageStrategy{ + backupStorage: fileStorage, + inMemoryStorage: inMemoryStorage, + syncMode: config.SyncMode(), + } +} + +func (s *StorageStrategy) AddMetricValues(ctx context.Context, metric []metrics.Metric) ([]metrics.Metric, error) { + s.lock.Lock() + defer s.lock.Unlock() + + result, err := s.inMemoryStorage.AddMetricValues(ctx, metric) + if err != nil { + return result, logger.WrapError("add metric values to memory storage", err) + } + + if s.syncMode { + _, err = s.backupStorage.AddMetricValues(ctx, result) + if err != nil { + return nil, logger.WrapError("add metric values to backup storage", err) + } + } + + return result, nil +} + +func (s *StorageStrategy) GetMetricValues(ctx context.Context) (map[string]map[string]string, error) { + s.lock.RLock() + defer s.lock.RUnlock() + + return s.inMemoryStorage.GetMetricValues(ctx) +} + +func (s *StorageStrategy) GetMetric(ctx context.Context, metricType string, metricName string) (metrics.Metric, error) { + s.lock.RLock() + defer s.lock.RUnlock() + + return s.inMemoryStorage.GetMetric(ctx, metricType, metricName) +} + +func (s *StorageStrategy) Restore(ctx context.Context, metricValues map[string]map[string]string) error { + s.lock.Lock() + defer s.lock.Unlock() + + return s.inMemoryStorage.Restore(ctx, metricValues) +} + +func (s *StorageStrategy) CreateBackup(ctx context.Context) error { + currentState, err := s.inMemoryStorage.GetMetricValues(ctx) + if err != nil { + return logger.WrapError("get metrics from memory storage", err) + } + + return s.backupStorage.Restore(ctx, currentState) +} + +func (s *StorageStrategy) RestoreFromBackup(ctx context.Context) error { + restoredState, err := s.backupStorage.GetMetricValues(ctx) + if err != nil { + return logger.WrapError("get metrics from backup storage", err) + } + + return s.inMemoryStorage.Restore(ctx, restoredState) +} + +func (s *StorageStrategy) Close() error { + return s.CreateBackup(context.Background()) // force backup +} diff --git a/internal/metrics/types/counterMetric.go b/internal/metrics/types/counterMetric.go new file mode 100644 index 0000000..9fb2db1 --- /dev/null +++ b/internal/metrics/types/counterMetric.go @@ -0,0 +1,61 @@ +package types + +import ( + "fmt" + "hash" + + "github.com/MlDenis/prometheus_wannabe/internal/converter" + "github.com/MlDenis/prometheus_wannabe/internal/metrics" +) + +type counterMetric struct { + name string + value int64 +} + +func NewCounterMetric(name string) metrics.Metric { + return &counterMetric{ + name: name, + } +} + +func (m *counterMetric) GetType() string { + return "counter" +} + +func (m *counterMetric) GetName() string { + return m.name +} + +func (m *counterMetric) GetValue() float64 { + + return float64(m.value) +} + +func (m *counterMetric) GetStringValue() string { + + return converter.IntToString(m.value) +} + +func (m *counterMetric) SetValue(value float64) float64 { + return m.setValue(m.value + int64(value)) +} + +func (m *counterMetric) ResetState() { + m.setValue(0) +} + +func (m *counterMetric) GetHash(hash hash.Hash) ([]byte, error) { + + _, err := hash.Write([]byte(fmt.Sprintf("%s:counter:%d", m.name, m.value))) + if err != nil { + return nil, err + } + + return hash.Sum(nil), nil +} + +func (m *counterMetric) setValue(value int64) float64 { + m.value = value + return float64(m.value) +} diff --git a/internal/metrics/types/gaugeMetric.go b/internal/metrics/types/gaugeMetric.go new file mode 100644 index 0000000..eef3d83 --- /dev/null +++ b/internal/metrics/types/gaugeMetric.go @@ -0,0 +1,56 @@ +package types + +import ( + "fmt" + "hash" + + "github.com/MlDenis/prometheus_wannabe/internal/converter" + "github.com/MlDenis/prometheus_wannabe/internal/metrics" +) + +type gaugeMetric struct { + name string + value float64 +} + +func NewGaugeMetric(name string) metrics.Metric { + return &gaugeMetric{ + name: name, + } +} + +func (m *gaugeMetric) GetType() string { + return "gauge" +} + +func (m *gaugeMetric) GetName() string { + return m.name +} + +func (m *gaugeMetric) GetValue() float64 { + + return m.value +} + +func (m *gaugeMetric) GetStringValue() string { + + return converter.FloatToString(m.value) +} + +func (m *gaugeMetric) SetValue(value float64) float64 { + m.value = value + + return m.value +} + +func (m *gaugeMetric) ResetState() { +} + +func (m *gaugeMetric) GetHash(hash hash.Hash) ([]byte, error) { + + _, err := hash.Write([]byte(fmt.Sprintf("%s:gauge:%f", m.name, m.value))) + if err != nil { + return nil, err + } + return hash.Sum(nil), nil +} diff --git a/internal/test/errors.go b/internal/test/errors.go new file mode 100644 index 0000000..c2790ad --- /dev/null +++ b/internal/test/errors.go @@ -0,0 +1,5 @@ +package test + +import "errors" + +var ErrTest = errors.New("test error message") diff --git a/internal/test/helpers.go b/internal/test/helpers.go new file mode 100644 index 0000000..cc8cc10 --- /dev/null +++ b/internal/test/helpers.go @@ -0,0 +1,46 @@ +package test + +import ( + "github.com/MlDenis/prometheus_wannabe/internal/metrics" + "github.com/MlDenis/prometheus_wannabe/internal/metrics/types" +) + +type KeyValue struct { + Key string + Value float64 +} + +func CreateCounterMetric(name string, value float64) metrics.Metric { + return CreateMetric(types.NewCounterMetric, name, value) +} + +func CreateGaugeMetric(name string, value float64) metrics.Metric { + return CreateMetric(types.NewGaugeMetric, name, value) +} + +func CreateMetric(metricFactory func(string) metrics.Metric, name string, value float64) metrics.Metric { + metric := metricFactory(name) + metric.SetValue(value) + return metric +} + +func ArrayToChan[T any](items []T) <-chan T { + result := make(chan T) + go func() { + defer close(result) + for _, item := range items { + result <- item + } + }() + + return result +} + +func ChanToArray[T any](items <-chan T) []T { + result := []T{} + for item := range items { + result = append(result, item) + } + + return result +} diff --git a/internal/worker/hardWorker.go b/internal/worker/hardWorker.go new file mode 100644 index 0000000..efb1bb2 --- /dev/null +++ b/internal/worker/hardWorker.go @@ -0,0 +1,37 @@ +package worker + +import ( + "context" + "time" + + "github.com/sirupsen/logrus" +) + +type HardWorker struct { + workFunc func(ctx context.Context) error +} + +func NewHardWorker(workFunc func(ctx context.Context) error) HardWorker { + return HardWorker{ + workFunc: workFunc, + } +} + +func (w *HardWorker) StartWork(ctx context.Context, inv int) { + interval := time.Duration(inv) * time.Second + + ticker := time.NewTicker(interval) + defer ticker.Stop() + for { + select { + case <-ticker.C: + err := w.workFunc(ctx) + if err != nil { + logrus.Errorf("periodic worker error: %v", err) + } + case <-ctx.Done(): + logrus.Errorf("periodic worker canceled") + return + } + } +} diff --git a/internal/worker/hardWorker_test.go b/internal/worker/hardWorker_test.go new file mode 100644 index 0000000..eb9e1f8 --- /dev/null +++ b/internal/worker/hardWorker_test.go @@ -0,0 +1,43 @@ +package worker + +import ( + "context" + "testing" + + "github.com/MlDenis/prometheus_wannabe/internal/test" + + "github.com/stretchr/testify/assert" +) + +func TestPeriodicWorker_CloseContext(t *testing.T) { + wasCalled := false + ctx, cancel := context.WithCancel(context.Background()) + + worker := NewHardWorker(func(context.Context) error { + wasCalled = true + return nil + }) + + cancel() + worker.StartWork(ctx, 1) + assert.False(t, wasCalled) +} + +func TestPeriodicWorker_SuccessCall(t *testing.T) { + wasCalled := false + ctx, cancel := context.WithCancel(context.Background()) + defer cancel() + + worker := NewHardWorker(func(context.Context) error { + if !wasCalled { + wasCalled = true + return test.ErrTest + } + + cancel() + return nil + }) + + worker.StartWork(ctx, 1) + assert.True(t, wasCalled) +} diff --git a/profiles/base.pprof b/profiles/base.pprof new file mode 100644 index 0000000..971077c Binary files /dev/null and b/profiles/base.pprof differ diff --git a/profiles/result.pprof b/profiles/result.pprof new file mode 100644 index 0000000..e4de47c Binary files /dev/null and b/profiles/result.pprof differ