Skip to content

Commit

Permalink
Add templates for report use case
Browse files Browse the repository at this point in the history
  • Loading branch information
JuanDGiraldoM committed Jan 25, 2024
1 parent 2fbaf93 commit 34c21bb
Show file tree
Hide file tree
Showing 5 changed files with 161 additions and 103 deletions.
6 changes: 4 additions & 2 deletions lib/domain/model/config/response.ex
Original file line number Diff line number Diff line change
Expand Up @@ -8,8 +8,10 @@ defmodule DistributedPerformanceAnalyzer.Domain.Model.Config.Response do
constructor do
field(:status, :integer, constructor: &is_integer/1)
field(:message, String.t(), constructor: &is_string/1)
field(:headers, :list, constructor: &is_list/1)
field(:time, :integer, constructor: &is_integer/1)
field(:elapsed, :integer, constructor: &is_integer/1)
field(:timestamp, :integer, constructor: &is_integer/1)
field(:connection_time, :integer, constructor: &is_integer/1)
field(:content_type, String.t(), constructor: &is_string/1)
field(:received_bytes, :integer, constructor: &is_integer/1)
end
end
1 change: 1 addition & 0 deletions lib/domain/model/user/user.ex
Original file line number Diff line number Diff line change
Expand Up @@ -7,6 +7,7 @@ defmodule DistributedPerformanceAnalyzer.Domain.Model.User do
"""

constructor do
field(:id, String.t(), constructor: &is_string/1)
field(:request, Request.t(), constructor: &Request.new/1)
field(:dataset_name, :atomics | String.t())
end
Expand Down
112 changes: 112 additions & 0 deletions lib/domain/use_cases/report_use_case.ex
Original file line number Diff line number Diff line change
@@ -0,0 +1,112 @@
defmodule DistributedPerformanceAnalyzer.Domain.UseCase.ReportUseCase do
@moduledoc """
Use case report
the report use case is called by all modules that need
to print information to outgoing files or logs
"""
use Task
require Logger

alias DistributedPerformanceAnalyzer.Config.AppConfig
alias DistributedPerformanceAnalyzer.Domain.Model.RequestResult

alias DistributedPerformanceAnalyzer.Domain.UseCase.{
Config.ConfigUseCase,
MetricsAnalyzerUseCase
}

alias DistributedPerformanceAnalyzer.Utils.DataTypeUtils

@report_exporter Application.compile_env!(AppConfig.get_app_name(), :report_exporter)
@valid_extensions ["csv"]
@path_report_jmeter "config/jmeter.csv"
@path_csv_report "config/result.csv"

def init(sorted_curve, total_data) do
start = DataTypeUtils.start_time()
Logger.info("Generating report...")

resume_total_data(total_data)

if ConfigUseCase.get(:jmeter_report, true) do
tasks = [
Task.async(fn -> generate_jmeter_report(sorted_curve) end),
Task.async(fn -> generate_csv_report(sorted_curve) end)
]

Task.await_many(tasks, :infinity)
else
generate_csv_report(sorted_curve)
end

Logger.info("Report generated in #{DataTypeUtils.duration_time(start)}ms...")
end

def resume_total_data([steps_count, total_success_count, total_error_count, total_duration]) do
~s(
Total success requests count: #{total_success_count}
Total failed requests count: #{total_error_count}
Total steps: #{steps_count}
Total duration: #{total_duration} seconds\n)
|> IO.puts()
end

def generate_csv_report(sorted_curve) do
sorted_curve
|> Enum.map(
&"#{&1.concurrency}, #{&1.throughput}, #{&1.min_latency}, #{&1.avg_latency}, #{&1.max_latency}, #{&1.p90_latency}, #{&1.p95_latency}, #{&1.p99_latency}, #{&1.http_avg_latency}, #{&1.http_max_latency}, #{&1.success_count}, #{&1.redirect_count}, #{&1.bad_request_count}, #{&1.server_error_count}, #{&1.http_error_count}, #{&1.protocol_error_count}, #{&1.invocation_error_count}, #{&1.nil_conn_count}, #{&1.error_conn_count}, #{&1.error_count}, #{&1.total_count}"
)
|> export_report(
@path_csv_report,
"concurrency, throughput, min latency (ms), mean latency (ms), max latency (ms), p90 latency (ms), p95 latency (ms), p99 latency (ms), http_mean_latency, http_max_latency, 2xx requests, 3xx requests, 4xx requests, 5xx requests, http_errors, protocol_errors, invocation_errors, nil_connection_errors, connection_errors, total_errors, total_requests",
true
)
end

def generate_jmeter_report(sorted_curve) do
sorted_curve
|> Enum.reduce([], &Enum.concat(&1.requests, &2))
|> Enum.sort(fn req_a, req_b -> req_a.time_stamp < req_b.time_stamp end)
|> Enum.map(fn %RequestResult{
start: _start,
time_stamp: time_stamp,
label: label,
thread_name: thread_name,
grp_threads: grp_threads,
all_threads: all_threads,
url: url,
elapsed: elapsed,
response_code: response_code,
failure_message: failure_message,
sent_bytes: sent_bytes,
latency: latency,
idle_time: idle_time,
connect: connect,
received_bytes: received_bytes,
content_type: content_type
} ->
"#{time_stamp},#{elapsed},#{label},#{response_code},#{MetricsAnalyzerUseCase.response_for_code(response_code)},#{thread_name},#{content_type},#{MetricsAnalyzerUseCase.success?(response_code)},#{MetricsAnalyzerUseCase.with_failure(response_code, failure_message)},#{received_bytes},#{sent_bytes},#{grp_threads},#{all_threads},#{url},#{latency},#{idle_time},#{connect}"
end)
|> export_report(
@path_report_jmeter,
"timeStamp,elapsed,label,responseCode,responseMessage,threadName,dataType,success,failureMessage,bytes,sentBytes,grpThreads,allThreads,URL,Latency,IdleTime,Connect",
false
)
end

def export_report(data, file, header, print) do
start = DataTypeUtils.start_time()
report_format = String.ends_with?(file, Enum.at(@valid_extensions, 0))

case report_format do
true ->
@report_exporter.save_csv(data, file, header, print)

false ->
{:error, "invalid report extensions type"}
end

Logger.info("#{file} exported in #{DataTypeUtils.duration_time(start)}ms...")
end
end
110 changes: 9 additions & 101 deletions lib/domain/use_cases/reports/report_use_case.ex
Original file line number Diff line number Diff line change
@@ -1,112 +1,20 @@
defmodule DistributedPerformanceAnalyzer.Domain.UseCase.Reports.ReportUseCase do
@moduledoc """
Use case report
the report use case is called by all modules that need
to print information to outgoing files or logs
Provides functions for generating reports, based on the results of the step
"""
use Task
require Logger

alias DistributedPerformanceAnalyzer.Config.AppConfig
alias DistributedPerformanceAnalyzer.Domain.Model.RequestResult

alias DistributedPerformanceAnalyzer.Domain.UseCase.{
Config.ConfigUseCase,
MetricsAnalyzerUseCase
}

alias DistributedPerformanceAnalyzer.Utils.DataTypeUtils

@report_exporter Application.compile_env!(AppConfig.get_app_name(), :report_exporter)
@valid_extensions ["csv"]
@path_report_jmeter "config/jmeter.csv"
@path_csv_report "config/result.csv"

def init(sorted_curve, total_data) do
start = DataTypeUtils.start_time()
Logger.info("Generating report...")

resume_total_data(total_data)

if ConfigUseCase.get(:jmeter_report, true) do
tasks = [
Task.async(fn -> generate_jmeter_report(sorted_curve) end),
Task.async(fn -> generate_csv_report(sorted_curve) end)
]

Task.await_many(tasks, :infinity)
else
generate_csv_report(sorted_curve)
end
# TODO: init, create tables (step results)

Logger.info("Report generated in #{DataTypeUtils.duration_time(start)}ms...")
def start_step_collector(id) do
# TODO: Create mnesia tables
# (if jmeter true, ordered set too)
end

def resume_total_data([steps_count, total_success_count, total_error_count, total_duration]) do
~s(
Total success requests count: #{total_success_count}
Total failed requests count: #{total_error_count}
Total steps: #{steps_count}
Total duration: #{total_duration} seconds\n)
|> IO.puts()
def consolidate_step(id) do
# TODO: Get all results from mnesia, save results to step results table
end

def generate_csv_report(sorted_curve) do
sorted_curve
|> Enum.map(
&"#{&1.concurrency}, #{&1.throughput}, #{&1.min_latency}, #{&1.avg_latency}, #{&1.max_latency}, #{&1.p90_latency}, #{&1.p95_latency}, #{&1.p99_latency}, #{&1.http_avg_latency}, #{&1.http_max_latency}, #{&1.success_count}, #{&1.redirect_count}, #{&1.bad_request_count}, #{&1.server_error_count}, #{&1.http_error_count}, #{&1.protocol_error_count}, #{&1.invocation_error_count}, #{&1.nil_conn_count}, #{&1.error_conn_count}, #{&1.error_count}, #{&1.total_count}"
)
|> export_report(
@path_csv_report,
"concurrency, throughput, min latency (ms), mean latency (ms), max latency (ms), p90 latency (ms), p95 latency (ms), p99 latency (ms), http_mean_latency, http_max_latency, 2xx requests, 3xx requests, 4xx requests, 5xx requests, http_errors, protocol_errors, invocation_errors, nil_connection_errors, connection_errors, total_errors, total_requests",
true
)
end

def generate_jmeter_report(sorted_curve) do
sorted_curve
|> Enum.reduce([], &Enum.concat(&1.requests, &2))
|> Enum.sort(fn req_a, req_b -> req_a.time_stamp < req_b.time_stamp end)
|> Enum.map(fn %RequestResult{
start: _start,
time_stamp: time_stamp,
label: label,
thread_name: thread_name,
grp_threads: grp_threads,
all_threads: all_threads,
url: url,
elapsed: elapsed,
response_code: response_code,
failure_message: failure_message,
sent_bytes: sent_bytes,
latency: latency,
idle_time: idle_time,
connect: connect,
received_bytes: received_bytes,
content_type: content_type
} ->
"#{time_stamp},#{elapsed},#{label},#{response_code},#{MetricsAnalyzerUseCase.response_for_code(response_code)},#{thread_name},#{content_type},#{MetricsAnalyzerUseCase.success?(response_code)},#{MetricsAnalyzerUseCase.with_failure(response_code, failure_message)},#{received_bytes},#{sent_bytes},#{grp_threads},#{all_threads},#{url},#{latency},#{idle_time},#{connect}"
end)
|> export_report(
@path_report_jmeter,
"timeStamp,elapsed,label,responseCode,responseMessage,threadName,dataType,success,failureMessage,bytes,sentBytes,grpThreads,allThreads,URL,Latency,IdleTime,Connect",
false
)
end

def export_report(data, file, header, print) do
start = DataTypeUtils.start_time()
report_format = String.ends_with?(file, Enum.at(@valid_extensions, 0))

case report_format do
true ->
@report_exporter.save_csv(data, file, header, print)

false ->
{:error, "invalid report extensions type"}
end

Logger.info("#{file} exported in #{DataTypeUtils.duration_time(start)}ms...")
def save_response(id, %Response{} = response) do
# TODO: Sort response type (success or error) and save response to mnesia (if jmeter true, parse and save)
end
end
35 changes: 35 additions & 0 deletions lib/infrastructure/driven_adapters/jmeter/parser.ex
Original file line number Diff line number Diff line change
@@ -0,0 +1,35 @@
defmodule DistributedPerformanceAnalyzer.Infrastructure.Adapters.JMeter.Parser do
@moduledoc """
Provides functions for parsing JMeter results
"""

alias DistributedPerformanceAnalyzer.Domain.Model.Config.Response
alias DistributedPerformanceAnalyzer.Domain.Model.User
alias DistributedPerformanceAnalyzer.Utils.DataTypeUtils

@behaviour Parser

# Template: "timeStamp,elapsed,label,responseCode,responseMessage,threadName,dataType,success,failureMessage,bytes,sentBytes,grpThreads,allThreads,URL,Latency,IdleTime,Connect",

@impl true
def parse(%Response{} = response) do
%{
status: status,
message: message,
headers: headers,
elapsed: elapsed,
timestamp: timestamp,
connection_time: connection_time,
content_type: content_type,
received_bytes: received_bytes
} = response

# TODO: complete info
result =
~s|#{elapsed},#{label},#{response_code},#{sanitize(response_message)},#{thread_name},#{data_type},#{success},#{sanitize(failure_message)},#{bytes},#{sent_bytes},#{grp_threads},#{all_threads},#{url},#{latency},#{idle_time},#{connect}|

{:ok, {timestamp, result}}
end

defp sanitize(input), do: String.replace(input, ",", ";")
end

0 comments on commit 34c21bb

Please sign in to comment.