diff --git a/.env b/.env index 71c1871..a19e0c7 100644 --- a/.env +++ b/.env @@ -40,9 +40,9 @@ TEST_PROXY_SERVICE_EVM_RPC_HOSTNAME=localhost:7777 TEST_PROXY_SERVICE_EVM_RPC_PRUNING_URL=http://localhost:7778 TEST_PROXY_BACKEND_EVM_RPC_HOST_URL=http://localhost:8545 TEST_DATABASE_ENDPOINT_URL=localhost:5432 -TEST_PROXY_BACKEND_HOST_URL_MAP=localhost:7777>http://kava-validator:8545,localhost:7778>http://kava-pruning:8545 +TEST_PROXY_BACKEND_HOST_URL_MAP=localhost:7777 >http://kava-validator:8545,localhost:7778 >http://kava-pruning:8545 TEST_PROXY_HEIGHT_BASED_ROUTING_ENABLED=true -TEST_PROXY_PRUNING_BACKEND_HOST_URL_MAP=localhost:7777>http://kava-pruning:8545,localhost:7778>http://kava-pruning:8545 +TEST_PROXY_PRUNING_BACKEND_HOST_URL_MAP=localhost:7777 >http://kava-pruning:8545,localhost:7778 >http://kava-pruning:8545 # What level of logging to use for service objects constructed during # unit tests TEST_SERVICE_LOG_LEVEL=ERROR @@ -51,10 +51,13 @@ TEST_SERVICE_LOG_LEVEL=ERROR TEST_EVM_QUERY_SERVICE_URL=http://kava-validator:8545 # TEST_REDIS_ENDPOINT_URL is an url of redis TEST_REDIS_ENDPOINT_URL=localhost:6379 +# tests for metrics look for metrics created within a given window of time. +# TEST_EXTEND_METRIC_WINDOW_MS allows extending that window which is useful +# for when tests fail because the metrics hadn't finished being created in the window. +TEST_EXTEND_METRIC_WINDOW_MS=100 ##### Kava Node Config - ##### Kava Proxy Config # What port the proxy service listens on PROXY_SERVICE_PORT=7777 @@ -62,12 +65,12 @@ LOG_LEVEL=DEBUG HTTP_READ_TIMEOUT_SECONDS=30 HTTP_WRITE_TIMEOUT_SECONDS=60 # Address of the origin server to proxy all requests to -PROXY_BACKEND_HOST_URL_MAP=localhost:7777>http://kava-validator:8545,localhost:7778>http://kava-pruning:8545 +PROXY_BACKEND_HOST_URL_MAP=localhost:7777 >http://kava-validator:8545,localhost:7778 >http://kava-pruning:8545 # height-based routing will look at the height of an incoming EVM request # iff. the height is "latest", it routes to the corresponding PROXY_PRUNING_BACKEND_HOST_URL_MAP value # otherwise, it falls back to the value in PROXY_BACKEND_HOST_URL_MAP PROXY_HEIGHT_BASED_ROUTING_ENABLED=true -PROXY_PRUNING_BACKEND_HOST_URL_MAP=localhost:7777>http://kava-pruning:8545,localhost:7778>http://kava-pruning:8545 +PROXY_PRUNING_BACKEND_HOST_URL_MAP=localhost:7777 >http://kava-pruning:8545,localhost:7778 >http://kava-pruning:8545 # PROXY_MAXIMUM_REQ_BATCH_SIZE is a proxy-enforced limit on the number of subrequest in a batch PROXY_MAXIMUM_REQ_BATCH_SIZE=100 # Configuration for the servcie to connect to it's database @@ -150,7 +153,6 @@ HOSTNAME_TO_ACCESS_CONTROL_ALLOW_ORIGIN_VALUE_MAP= ##### Database Config POSTGRES_PASSWORD=password - ##### Redis Config # for local development don't require a password to connect to the redis server ALLOW_EMPTY_PASSWORD=yes diff --git a/main_test.go b/main_test.go index d0d2167..18f589a 100644 --- a/main_test.go +++ b/main_test.go @@ -55,6 +55,7 @@ var ( } return logger }() + testExtendMetricWindowMs, _ = strconv.ParseInt(os.Getenv("TEST_EXTEND_METRIC_WINDOW_MS"), 10, 0) proxyUnconfiguredUrl = os.Getenv("TEST_UNCONFIGURED_PROXY_URL") @@ -88,7 +89,8 @@ var ( // search for any request metrics between startTime and time.Now() for particular request methods // if testedmethods is empty, all metrics in timeframe are returned. func findMetricsInWindowForMethods(db database.PostgresClient, startTime time.Time, testedmethods []string) []database.ProxiedRequestMetric { - endTime := time.Now() + // add small buffer into future in case metrics are still being created + endTime := time.Now().Add(time.Duration(testExtendMetricWindowMs) * time.Millisecond) var nextCursor int64 var proxiedRequestMetrics []database.ProxiedRequestMetric @@ -151,7 +153,7 @@ func waitForMetricsInWindow( // besides verification, waiting for the metrics ensures future tests don't fail b/c metrics are being processed require.Eventually(t, func() bool { - metrics = findMetricsInWindowForMethods(db, startTime, []string{}) + metrics = findMetricsInWindowForMethods(db, startTime, testedmethods) return len(metrics) >= expected }, timeout, time.Millisecond, fmt.Sprintf("failed to find %d metrics in %f seconds from start %s", expected, timeout.Seconds(), startTime))