diff --git a/.github/workflows/benchmark.yaml b/.github/workflows/benchmark.yaml new file mode 100644 index 0000000000..14faf4801c --- /dev/null +++ b/.github/workflows/benchmark.yaml @@ -0,0 +1,98 @@ +# Copyright 2021 iLogtail Authors +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +name: Benchmark + +on: + pull_request: + paths-ignore: + - 'docs/**' + - 'example_config/**' + - 'docker/**' + - 'k8s_template/**' + - 'changes/**' + - 'licenses/**' + - 'CHANGELOG.md' + push: + branches: + - main + - 1.* +jobs: + CI: + runs-on: ${{ matrix.runner }} + timeout-minutes: 60 + strategy: + matrix: + go-version: [ 1.19.10 ] + runner: [ ubuntu-latest ] + fail-fast: true + steps: + # Clean up space to prevent action from running out of disk space. + - name: clean + if: matrix.runner == 'ubuntu-latest' + run: | + sudo rm -rf /usr/share/dotnet + sudo rm -rf /opt/ghc + sudo rm -rf "/usr/local/share/boost" + sudo rm -rf "$AGENT_TOOLSDIRECTORY" + sudo -E apt-get -qq autoremove --purge + sudo -E apt-get -qq clean + + - name: Check disk space + run: | + df -hT $PWD + + - name: Set up Go ${{ matrix.go-version }} + uses: actions/setup-go@v2 + with: + go-version: ${{ matrix.go-version }} + + - name: Check out code + uses: actions/checkout@v2 + with: + submodules: true + + - name: Update Docker-compose to v2 + run: | + sudo curl -SL https://github.com/docker/compose/releases/download/v2.7.0/docker-compose-linux-x86_64 -o /usr/local/bin/docker-compose + sudo chmod +x /usr/local/bin/docker-compose + + - name: System environment + run: | + uname -r + docker --version + go version + + - name: Run benchmark + env: + BUILD_LOGTAIL_UT: OFF + WITHOUTGDB: ON + run: make benchmark + + - name: Store benchmark result + uses: benchmark-action/github-action-benchmark@v1 + with: + name: benchmark + tool: "customSmallerIsBetter" + output-file-path: "test/benchmark/report/combined_benchmark.json" + github-token: ${{ secrets.GITHUB_TOKEN }} + auto-push: true + + result: + runs-on: ubuntu-latest + timeout-minutes: 60 + needs: [ CI ] + steps: + - name: Build Result + run: echo "Just to make the GitHub merge button green" \ No newline at end of file diff --git a/Makefile b/Makefile index dea4e8a51d..da1bea47c8 100644 --- a/Makefile +++ b/Makefile @@ -185,7 +185,7 @@ e2e-performance: clean docker gocdocker .PHONY: unittest_e2e_engine unittest_e2e_engine: clean gocdocker - cd test && go test $$(go list ./... | grep -Ev "engine|e2e") -coverprofile=../e2e-engine-coverage.txt -covermode=atomic -tags docker_ready + cd test && go test $$(go list ./... | grep -Ev "engine|e2e|benchmark") -coverprofile=../e2e-engine-coverage.txt -covermode=atomic -tags docker_ready .PHONY: unittest_plugin unittest_plugin: clean import_plugins @@ -209,6 +209,13 @@ unittest_pluginmanager: clean import_plugins go test $$(go list ./...|grep -Ev "telegraf|external|envconfig"| grep -E "plugin_main|pluginmanager") -coverprofile .coretestCoverage.txt mv ./plugins/input/prometheus/input_prometheus.go.bak ./plugins/input/prometheus/input_prometheus.go +# benchmark +.PHONY: benchmark +benchmark: clean gocdocker e2edocker + ./scripts/e2e.sh benchmark performance + ./scripts/benchmark_collect_result.sh + + .PHONY: all all: clean import_plugins ./scripts/gen_build_scripts.sh all $(GENERATED_HOME) $(VERSION) $(BUILD_REPOSITORY) $(OUT_DIR) $(DOCKER_BUILD_EXPORT_GO_ENVS) $(DOCKER_BUILD_COPY_GIT_CONFIGS) $(PLUGINS_CONFIG_FILE) $(GO_MOD_FILE) diff --git a/scripts/benchmark_collect_result.sh b/scripts/benchmark_collect_result.sh new file mode 100755 index 0000000000..1eeac303c7 --- /dev/null +++ b/scripts/benchmark_collect_result.sh @@ -0,0 +1,24 @@ +#!/bin/bash + +# Define the input files and the output file +input_files=($(find test/benchmark/report -type f -name '*benchmark.json')) +output_file="test/benchmark/report/combined_benchmark.json" + +# Start the output file with an opening square bracket +rm -f "$output_file" +touch "$output_file" +echo '[' > "$output_file" + +# Loop through each input file +for i in "${!input_files[@]}"; do + # Read the file, remove the first and last line (the square brackets), and append to the output file + cat "${input_files[$i]}" | sed '1d;$d' >> "$output_file" + + # If this is not the last file, append a comma to separate the arrays + if [ $i -lt $((${#input_files[@]} - 1)) ]; then + echo ',' >> "$output_file" + fi +done + +# Finish the output file with a closing square bracket +echo ']' >> "$output_file" \ No newline at end of file diff --git a/test/benchmark/test_cases/performance_file_to_blackhole_filebeat/docker-compose.yaml b/test/benchmark/test_cases/performance_file_to_blackhole_filebeat/docker-compose.yaml index 8e12d4a340..9e90378fa9 100644 --- a/test/benchmark/test_cases/performance_file_to_blackhole_filebeat/docker-compose.yaml +++ b/test/benchmark/test_cases/performance_file_to_blackhole_filebeat/docker-compose.yaml @@ -1,22 +1,6 @@ version: '3.8' services: - elasticsearch: - image: docker.elastic.co/elasticsearch/elasticsearch:8.14.2 - environment: - - ELASTIC_PASSWORD=elastic - - discovery.type=single-node - - xpack.security.http.ssl.enabled=false - - xpack.license.self_generated.type=trial - ports: - - 9200:9200 - healthcheck: - test: ["CMD-SHELL", "curl -u elastic:elastic -s http://localhost:9200/_cluster/health | grep -q '\"status\":\"green\"'"] - interval: 10s - timeout: 5s - retries: 3 - restart: always - filebeat: image: docker.elastic.co/beats/filebeat:8.14.2 user: root @@ -24,9 +8,3 @@ services: - ./filebeat.yml:/usr/share/filebeat/filebeat.yml:ro - ./a.log:/home/filebeat/a.log:ro command: filebeat -e --strict.perms=false - environment: - - OUTPUT_ELASTICSEARCH_HOSTS=["elasticsearch:9200"] - depends_on: - elasticsearch: - condition: service_healthy - restart: always diff --git a/test/benchmark/test_cases/performance_file_to_blackhole_filebeat/filebeat.yml b/test/benchmark/test_cases/performance_file_to_blackhole_filebeat/filebeat.yml index 1c95c5ec77..e2de4b2806 100644 --- a/test/benchmark/test_cases/performance_file_to_blackhole_filebeat/filebeat.yml +++ b/test/benchmark/test_cases/performance_file_to_blackhole_filebeat/filebeat.yml @@ -7,11 +7,12 @@ filebeat.inputs: processors: - decode_json_fields: fields: ["message"] - process_array: false - max_depth: 1 - target: "" - overwrite_keys: false - add_error_key: true + target: "json" + - drop_event: + when: + not: + equals: + json.user-agent: "no-agent" -output.discard: - enabled: true +output.console: + pretty: true diff --git a/test/benchmark/test_cases/performance_file_to_blackhole_fluentbit/main.conf b/test/benchmark/test_cases/performance_file_to_blackhole_fluentbit/main.conf index baaf715b82..7436cae6b6 100644 --- a/test/benchmark/test_cases/performance_file_to_blackhole_fluentbit/main.conf +++ b/test/benchmark/test_cases/performance_file_to_blackhole_fluentbit/main.conf @@ -1,10 +1,16 @@ [SERVICE] - Parsers+File /tmp/parsers.conf + parsers_file /tmp/parsers.conf [INPUT] - Name tail - Path /home/fluentbit/a.log + name tail + path /home/fluentbit/a.log + parser json + +[FILTER] + name grep + match * + regex user-agent no-agent [OUTPUT] - Name stdout - Match non + name stdout + match * diff --git a/test/benchmark/test_cases/performance_file_to_blackhole_fluentbit/parsers.conf b/test/benchmark/test_cases/performance_file_to_blackhole_fluentbit/parsers.conf index 27cc2efc00..9229d0ab6a 100644 --- a/test/benchmark/test_cases/performance_file_to_blackhole_fluentbit/parsers.conf +++ b/test/benchmark/test_cases/performance_file_to_blackhole_fluentbit/parsers.conf @@ -1,3 +1,3 @@ [PARSER] - Name docker + Name json Format json \ No newline at end of file diff --git a/test/benchmark/test_cases/performance_file_to_blackhole_vector/docker-compose.yaml b/test/benchmark/test_cases/performance_file_to_blackhole_vector/docker-compose.yaml index 3763d9a3e6..c133cb69ec 100644 --- a/test/benchmark/test_cases/performance_file_to_blackhole_vector/docker-compose.yaml +++ b/test/benchmark/test_cases/performance_file_to_blackhole_vector/docker-compose.yaml @@ -5,5 +5,4 @@ services: image: timberio/vector:0.39.0-debian volumes: - ./vector.yaml:/etc/vector/vector.yaml - - ./a.log:/home/vector-log/a.log - restart: always \ No newline at end of file + - ./a.log:/home/vector-log/a.log \ No newline at end of file diff --git a/test/benchmark/test_cases/performance_file_to_blackhole_vector/vector.yaml b/test/benchmark/test_cases/performance_file_to_blackhole_vector/vector.yaml index cd13fc3489..c108a3bdc8 100644 --- a/test/benchmark/test_cases/performance_file_to_blackhole_vector/vector.yaml +++ b/test/benchmark/test_cases/performance_file_to_blackhole_vector/vector.yaml @@ -4,8 +4,25 @@ sources: include: - /home/vector-log/*.log +transforms: + transform_json: + type: remap + inputs: + - input_file + source: |- + . = parse_json!(string!(.message)) + filter_agent: + type: filter + inputs: + - transform_json + condition: + type: "vrl" + source: ."usr-agent" == "no-agent" + sinks: - my_sink_id: - type: blackhole + output_std: + type: console inputs: - - input_file \ No newline at end of file + - filter_agent + encoding: + codec: json \ No newline at end of file diff --git a/test/engine/setup/monitor/monitor.go b/test/engine/setup/monitor/monitor.go index 3d65521df6..214c47ddf2 100644 --- a/test/engine/setup/monitor/monitor.go +++ b/test/engine/setup/monitor/monitor.go @@ -3,10 +3,8 @@ package monitor import ( "context" "fmt" - "log" "os" "path/filepath" - "strings" "sync/atomic" "time" @@ -53,38 +51,21 @@ func StopMonitor(ctx context.Context) (context.Context, error) { func monitoring(client *client.Client, containerName string) { // create csv file - reportDir := filepath.Join(config.CaseHome, "report") - reportDir, err := filepath.Abs(reportDir) - if err != nil { - log.Fatalf("Failed to get absolute path: %s", err) - } - if _, err = os.Stat(reportDir); os.IsNotExist(err) { - // 文件夹不存在,创建文件夹 - err = os.MkdirAll(reportDir, 0750) - if err != nil { - log.Fatalf("Failed to create folder: %s", err) - } - } + root, _ := filepath.Abs(".") + reportDir := root + "/report/" + benchmarkFile := reportDir + config.CaseName + "_benchmark.json" // new ticker ticker := time.NewTicker(interval * time.Second) defer ticker.Stop() // read from cadvisor per interval seconds request := &v1.ContainerInfoRequest{NumStats: 10} - monitorStatistic := NewMonitorStatistic() + monitorStatistic := NewMonitorStatistic(config.CaseName) for { select { case <-stopCh: isMonitoring.Store(false) - var builder strings.Builder - builder.WriteString("Metric,Value\n") - builder.WriteString(fmt.Sprintf("%s,%f\n", "CPU Usage Max(%)", monitorStatistic.cpu.maxVal)) - builder.WriteString(fmt.Sprintf("%s,%f\n", "CPU Usage Avg(%)", monitorStatistic.cpu.avgVal)) - builder.WriteString(fmt.Sprintf("%s,%f\n", "Memory Usage Max(MB)", monitorStatistic.mem.maxVal)) - builder.WriteString(fmt.Sprintf("%s,%f\n", "Memory Usage Avg(MB)", monitorStatistic.mem.avgVal)) - err = os.WriteFile(filepath.Join(reportDir, "monitor.csv"), []byte(builder.String()), 0600) - if err != nil { - log.Default().Printf("Failed to write monitor result: %s", err) - } + bytes, _ := monitorStatistic.MarshalJSON() + _ = os.WriteFile(benchmarkFile, bytes, 0600) return case <-ticker.C: // 获取容器信息 diff --git a/test/engine/setup/monitor/statistic.go b/test/engine/setup/monitor/statistic.go index 442628dfe6..efa5eb54ba 100644 --- a/test/engine/setup/monitor/statistic.go +++ b/test/engine/setup/monitor/statistic.go @@ -1,6 +1,9 @@ package monitor import ( + "encoding/json" + "fmt" + v1 "github.com/google/cadvisor/info/v1" ) @@ -25,13 +28,15 @@ func (s *Info) Add(val float64) { } type Statistic struct { + name string cpu Info mem Info lastStat *v1.ContainerStats } -func NewMonitorStatistic() *Statistic { +func NewMonitorStatistic(name string) *Statistic { return &Statistic{ + name: name, cpu: Info{}, mem: Info{}, lastStat: nil, @@ -57,3 +62,28 @@ func calculateCPUUsageRate(lastStat, stat *v1.ContainerStats) float64 { cpuUsageRateTotal := float64(cpuUsageTotal) * 100 / float64(stat.Timestamp.Sub(lastStat.Timestamp).Nanoseconds()) return cpuUsageRateTotal } + +type StatisticItem struct { + Name string `json:"name"` + Value float64 `json:"value"` + Unit string `json:"unit"` +} + +func (m *Statistic) MarshalJSON() ([]byte, error) { + items := []StatisticItem{ + {"CPU_Usage_Max-" + m.name, m.cpu.maxVal, "%"}, + {"CPU_Usage_Avg-" + m.name, m.cpu.avgVal, "%"}, + {"Memory_Usage_Max-" + m.name, m.mem.maxVal, "MB"}, + {"Memory_Usage_Avg-" + m.name, m.mem.avgVal, "MB"}, + } + + // Serialize the slice to JSON + jsonData, err := json.MarshalIndent(items, "", " ") + if err != nil { + fmt.Println("Error serializing statistics:", err) + return nil, err + } + + // Output the JSON string + return jsonData, nil +}