diff --git a/Cargo.lock b/Cargo.lock index c2077db5b..74755ac4c 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -168,6 +168,14 @@ version = "0.3.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "f7144d30dcf0fafbce74250a3963025d8d52177934239851c917d29f1df280c2" +[[package]] +name = "context-checks" +version = "0.1.0" +dependencies = [ + "log", + "proxy-wasm 0.2.1", +] + [[package]] name = "core-foundation-sys" version = "0.8.4" @@ -589,6 +597,15 @@ version = "0.8.2" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "c08c74e62047bb2de4ff487b251e4a92e24f48745648451635cec7d591162d9f" +[[package]] +name = "root-context-checks" +version = "0.1.0" +dependencies = [ + "context-checks", + "log", + "proxy-wasm 0.2.1", +] + [[package]] name = "rust-sdk-ver-zero-one" version = "0.1.0" diff --git a/Cargo.toml b/Cargo.toml index 0aa52f776..82b36c1f7 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -11,6 +11,8 @@ members = [ "t/lib/proxy-wasm-tests/rust-sdk-ver-zero-one", "t/lib/proxy-wasm-tests/benchmarks", "t/lib/proxy-wasm-tests/instance-lifecycle", + "t/lib/proxy-wasm-tests/contexts", + "t/lib/proxy-wasm-tests/contexts/crates/root", ] exclude = [ "lib/ngx-wasm-rs", diff --git a/src/common/proxy_wasm/ngx_proxy_wasm.c b/src/common/proxy_wasm/ngx_proxy_wasm.c index f3c1d9e81..5d49f3462 100644 --- a/src/common/proxy_wasm/ngx_proxy_wasm.c +++ b/src/common/proxy_wasm/ngx_proxy_wasm.c @@ -501,6 +501,7 @@ action2rc(ngx_proxy_wasm_ctx_t *pwctx, case NGX_HTTP_REWRITE_PHASE: case NGX_HTTP_ACCESS_PHASE: case NGX_HTTP_CONTENT_PHASE: + case NGX_WASM_BACKGROUND_PHASE: ngx_log_debug6(NGX_LOG_DEBUG_WASM, pwctx->log, 0, "proxy_wasm pausing in \"%V\" phase " "(filter: %l/%l, step: %d, action: %d, " @@ -683,6 +684,7 @@ ngx_proxy_wasm_run_step(ngx_proxy_wasm_exec_t *pwexec, ngx_int_t rc; ngx_proxy_wasm_err_e ecode; ngx_proxy_wasm_action_e action = NGX_PROXY_WASM_ACTION_CONTINUE; + ngx_proxy_wasm_exec_t *out; ngx_proxy_wasm_ctx_t *pwctx = pwexec->parent; ngx_proxy_wasm_filter_t *filter = pwexec->filter; #if (NGX_DEBUG) @@ -700,10 +702,12 @@ ngx_proxy_wasm_run_step(ngx_proxy_wasm_exec_t *pwexec, if (pwexec->ictx == NULL || pwexec->ictx->instance->trapped) { #endif ecode = ngx_proxy_wasm_create_context(filter, pwctx, pwexec->id, - pwexec, NULL); + pwexec, &out); if (ecode != NGX_PROXY_WASM_ERR_NONE) { return ecode; } + + pwexec = out; #if 1 } #endif @@ -842,6 +846,9 @@ get_instance(ngx_proxy_wasm_filter_t *filter, dd("get instance in store: %p", store); + /* store initialized */ + ngx_wasm_assert(store->pool); + for (q = ngx_queue_head(&store->busy); q != ngx_queue_sentinel(&store->busy); q = ngx_queue_next(q)) @@ -988,9 +995,6 @@ ngx_proxy_wasm_create_context(ngx_proxy_wasm_filter_t *filter, ictx = in->ictx; } else { - /* store initialized */ - ngx_wasm_assert(store->pool); - ictx = get_instance(filter, store, log); if (ictx == NULL) { goto error; @@ -1187,10 +1191,15 @@ ngx_proxy_wasm_create_context(ngx_proxy_wasm_filter_t *filter, pwexec->started = 1; } - } - if (out) { - *out = pwexec; + if (out) { + *out = pwexec; + } + + } else { + if (out) { + *out = rexec; + } } return NGX_PROXY_WASM_ERR_NONE; @@ -1232,10 +1241,8 @@ ngx_proxy_wasm_on_done(ngx_proxy_wasm_exec_t *pwexec) { ngx_wavm_instance_t *instance; ngx_proxy_wasm_filter_t *filter = pwexec->filter; -#if 0 #ifdef NGX_WASM_HTTP ngx_http_proxy_wasm_dispatch_t *call; -#endif #endif instance = ngx_proxy_wasm_pwexec2instance(pwexec); @@ -1244,25 +1251,13 @@ ngx_proxy_wasm_on_done(ngx_proxy_wasm_exec_t *pwexec) "filter %l/%l finalizing context", pwexec->index + 1, pwexec->parent->nfilters); -#if 0 - /** - * Currently, dispatches are synchronous hence will always - * have been executed when on_done is invoked. - */ #ifdef NGX_WASM_HTTP call = pwexec->call; if (call) { - ngx_log_debug3(NGX_LOG_DEBUG_WASM, pwexec->log, 0, - "proxy_wasm \"%V\" filter (%l/%l) " - "cancelling HTTP dispatch", - pwexec->filter->name, pwexec->index + 1, - pwexec->parent->nfilters); - ngx_http_proxy_wasm_dispatch_destroy(call); pwexec->call = NULL; } -#endif #endif (void) ngx_wavm_instance_call_funcref(instance, diff --git a/src/common/proxy_wasm/ngx_proxy_wasm_host.c b/src/common/proxy_wasm/ngx_proxy_wasm_host.c index a5be0a1a3..a3b53ceef 100644 --- a/src/common/proxy_wasm/ngx_proxy_wasm_host.c +++ b/src/common/proxy_wasm/ngx_proxy_wasm_host.c @@ -26,21 +26,44 @@ static ngx_int_t ngx_proxy_wasm_hfuncs_no_http(ngx_wavm_instance_t *instance, static ngx_chain_t * ngx_proxy_wasm_get_buffer_helper(ngx_wavm_instance_t *instance, - ngx_proxy_wasm_buffer_type_e buf_type, unsigned *none) + ngx_proxy_wasm_buffer_type_e buf_type, unsigned *none, char **trapmsg) { #ifdef NGX_WASM_HTTP ngx_chain_t *cl; ngx_http_wasm_req_ctx_t *rctx; ngx_http_request_t *r; + ngx_proxy_wasm_ctx_t *pwctx; +#endif + ngx_proxy_wasm_exec_t *pwexec; - rctx = ngx_http_proxy_wasm_get_rctx(instance); - r = rctx->r; + pwexec = ngx_proxy_wasm_instance2pwexec(instance); +#ifdef NGX_WASM_HTTP + pwctx = pwexec->parent; #endif switch (buf_type) { #ifdef NGX_WASM_HTTP case NGX_PROXY_WASM_BUFFER_HTTP_REQUEST_BODY: + + /* check context */ + + switch (pwctx->step) { + case NGX_PROXY_WASM_STEP_REQ_HEADERS: + case NGX_PROXY_WASM_STEP_REQ_BODY: + case NGX_PROXY_WASM_STEP_LOG: + break; + default: + *trapmsg = "can only get request body during " + "\"on_request_body\", \"on_log\""; + return NULL; + } + + /* get */ + + rctx = ngx_http_proxy_wasm_get_rctx(instance); + r = rctx->r; + if (r->request_body == NULL || r->request_body->bufs == NULL) { @@ -61,6 +84,23 @@ ngx_proxy_wasm_get_buffer_helper(ngx_wavm_instance_t *instance, return r->request_body->bufs; case NGX_PROXY_WASM_BUFFER_HTTP_RESPONSE_BODY: + + /* check context */ + + switch (pwctx->step) { + case NGX_PROXY_WASM_STEP_RESP_BODY: + case NGX_PROXY_WASM_STEP_LOG: + break; + default: + *trapmsg = "can only get response body during " + "\"on_response_body\", \"on_log\""; + return NULL; + } + + /* get */ + + rctx = ngx_http_proxy_wasm_get_rctx(instance); + cl = rctx->resp_chunk; if (cl == NULL) { /* no body */ @@ -74,9 +114,21 @@ ngx_proxy_wasm_get_buffer_helper(ngx_wavm_instance_t *instance, { ngx_wasm_http_reader_ctx_t *reader; ngx_http_proxy_wasm_dispatch_t *call; - ngx_proxy_wasm_exec_t *pwexec; - pwexec = ngx_proxy_wasm_instance2pwexec(instance); + /* check context */ + + switch (pwctx->step) { + case NGX_PROXY_WASM_STEP_DISPATCH_RESPONSE: + case NGX_PROXY_WASM_STEP_LOG: + break; + default: + *trapmsg = "can only get dispatch response body during " + "\"on_http_dispatch_response\""; + return NULL; + } + + /* get */ + call = pwexec->call; if (call == NULL) { return NULL; @@ -210,8 +262,9 @@ ngx_proxy_wasm_hfuncs_set_tick_period(ngx_wavm_instance_t *instance, ngx_wasm_assert(rexec->root_id == NGX_PROXY_WASM_ROOT_CTX_ID); if (rexec->root_id != NGX_PROXY_WASM_ROOT_CTX_ID) { - /* ignore */ - return ngx_proxy_wasm_result_ok(rets); + return ngx_proxy_wasm_result_trap(rexec, + "can only set tick_period in " + "root context", rets, NGX_WAVM_OK); } if (ngx_exiting) { @@ -254,6 +307,7 @@ ngx_proxy_wasm_hfuncs_get_buffer(ngx_wavm_instance_t *instance, { size_t offset, max_len, len, chunk_len; unsigned none = 0; + char *trapmsg = NULL; u_char *start = NULL; ngx_chain_t *cl = NULL; ngx_buf_t *buf; @@ -282,8 +336,14 @@ ngx_proxy_wasm_hfuncs_get_buffer(ngx_wavm_instance_t *instance, break; default: - cl = ngx_proxy_wasm_get_buffer_helper(instance, buf_type, &none); + cl = ngx_proxy_wasm_get_buffer_helper(instance, buf_type, &none, + &trapmsg); if (cl == NULL) { + if (trapmsg) { + return ngx_proxy_wasm_result_trap(pwexec, trapmsg, + rets, NGX_WAVM_BAD_USAGE); + } + if (none) { return ngx_proxy_wasm_result_notfound(rets); } @@ -376,9 +436,10 @@ ngx_proxy_wasm_hfuncs_set_buffer(ngx_wavm_instance_t *instance, ngx_wavm_ptr_t *buf_data; ngx_http_wasm_req_ctx_t *rctx; ngx_proxy_wasm_exec_t *pwexec; + ngx_proxy_wasm_ctx_t *pwctx; - rctx = ngx_http_proxy_wasm_get_rctx(instance); pwexec = ngx_proxy_wasm_instance2pwexec(instance); + pwctx = pwexec->parent; offset = args[1].of.i32; max = args[2].of.i32; @@ -400,6 +461,26 @@ ngx_proxy_wasm_hfuncs_set_buffer(ngx_wavm_instance_t *instance, #ifdef NGX_WASM_HTTP case NGX_PROXY_WASM_BUFFER_HTTP_REQUEST_BODY: + + /* check context */ + + switch (pwctx->step) { + case NGX_PROXY_WASM_STEP_REQ_HEADERS: + case NGX_PROXY_WASM_STEP_REQ_BODY: + break; + default: + return ngx_proxy_wasm_result_trap(pwexec, + "can only set request body " + "during \"on_request_body\"", + rets, NGX_WAVM_BAD_USAGE); + } + + /* set */ + + rctx = ngx_http_proxy_wasm_get_rctx(instance); + + ngx_wasm_assert(rctx); + if (offset == 0 && max == 0 && buf_len > 0) { rc = ngx_http_wasm_prepend_req_body(rctx, &s); @@ -407,15 +488,29 @@ ngx_proxy_wasm_hfuncs_set_buffer(ngx_wavm_instance_t *instance, rc = ngx_http_wasm_set_req_body(rctx, &s, offset, max); } - if (rc == NGX_ABORT) { + ngx_wasm_assert(rc != NGX_ABORT); + break; + + case NGX_PROXY_WASM_BUFFER_HTTP_RESPONSE_BODY: + + /* check context */ + + switch (pwctx->step) { + case NGX_PROXY_WASM_STEP_RESP_BODY: + break; + default: return ngx_proxy_wasm_result_trap(pwexec, - "cannot set request body", + "can only set response body " + "during \"on_response_body\"", rets, NGX_WAVM_BAD_USAGE); } - break; + /* set */ + + rctx = ngx_http_proxy_wasm_get_rctx(instance); + + ngx_wasm_assert(rctx); - case NGX_PROXY_WASM_BUFFER_HTTP_RESPONSE_BODY: if (offset == 0 && max == 0 && buf_len > 0) { rc = ngx_http_wasm_prepend_resp_body(rctx, &s); @@ -423,12 +518,7 @@ ngx_proxy_wasm_hfuncs_set_buffer(ngx_wavm_instance_t *instance, rc = ngx_http_wasm_set_resp_body(rctx, &s, offset, max); } - if (rc == NGX_ABORT) { - return ngx_proxy_wasm_result_trap(pwexec, - "cannot set response body", - rets, NGX_WAVM_BAD_USAGE); - } - + ngx_wasm_assert(rc != NGX_ABORT); break; #endif diff --git a/src/http/proxy_wasm/ngx_http_proxy_wasm_dispatch.c b/src/http/proxy_wasm/ngx_http_proxy_wasm_dispatch.c index 6b5c37243..41d967a87 100644 --- a/src/http/proxy_wasm/ngx_http_proxy_wasm_dispatch.c +++ b/src/http/proxy_wasm/ngx_http_proxy_wasm_dispatch.c @@ -155,7 +155,6 @@ ngx_http_proxy_wasm_dispatch(ngx_proxy_wasm_exec_t *pwexec, /* rctx or fake request */ if (rctx == NULL) { - ngx_wasm_assert(pwexec->in_tick); ngx_wasm_assert(pwexec->root_id == NGX_PROXY_WASM_ROOT_CTX_ID); ngx_wasm_assert(pwexec->parent->id == NGX_PROXY_WASM_ROOT_CTX_ID); @@ -825,6 +824,11 @@ ngx_http_proxy_wasm_dispatch_resume_handler(ngx_wasm_socket_tcp_t *sock) /* save step */ step = pwexec->parent->step; +#ifdef NGX_WASM_HTTP + pwexec->parent->phase = ngx_wasm_phase_lookup(&ngx_http_wasm_subsystem, + NGX_WASM_BACKGROUND_PHASE); +#endif + ecode = ngx_proxy_wasm_run_step(pwexec, NGX_PROXY_WASM_STEP_DISPATCH_RESPONSE); if (ecode != NGX_PROXY_WASM_ERR_NONE) { @@ -838,6 +842,12 @@ ngx_http_proxy_wasm_dispatch_resume_handler(ngx_wasm_socket_tcp_t *sock) /* resume current step if unfinished */ rc = ngx_proxy_wasm_resume(pwexec->parent, pwexec->parent->phase, step); + if (rc == NGX_AGAIN) { + goto done; + + } else if (rc != NGX_OK) { + goto error; + } } else { /* another call was setup during the callback */ diff --git a/t/03-proxy_wasm/hfuncs/114-proxy_set_http_request_body.t b/t/03-proxy_wasm/hfuncs/114-proxy_set_http_request_body.t index 1c31f21b1..8d63af23a 100644 --- a/t/03-proxy_wasm/hfuncs/114-proxy_set_http_request_body.t +++ b/t/03-proxy_wasm/hfuncs/114-proxy_set_http_request_body.t @@ -254,10 +254,11 @@ HelloWorld qr/from_request_headers from_request_body [\s\S]+500 Internal Server Error/ ---- grep_error_log eval: qr/(.*?cannot set|\[.*?failed resuming).*/ +--- grep_error_log eval: qr/(.*?can only set|\[.*?failed resuming).*/ --- grep_error_log_out eval -qr/.*?host trap \(bad usage\): cannot set request body.* +qr/.*?host trap \(bad usage\): can only set request body during.* \[info\] .*? \*\d+ .*? filter chain failed resuming: previous error \(instance trapped\).*? subrequest: "\/response_headers".* +\[info\] .*? \*\d+ .*? filter chain failed resuming: previous error \(instance trapped\).*? request: "GET \/t.* \z/ --- no_error_log [alert] diff --git a/t/03-proxy_wasm/hfuncs/116-proxy_set_http_response_body.t b/t/03-proxy_wasm/hfuncs/116-proxy_set_http_response_body.t index 15ce88076..a49616de1 100644 --- a/t/03-proxy_wasm/hfuncs/116-proxy_set_http_response_body.t +++ b/t/03-proxy_wasm/hfuncs/116-proxy_set_http_response_body.t @@ -133,7 +133,7 @@ on_response_body, 0 bytes, eof: true.*/ location /t { echo_subrequest GET /subrequest; - proxy_wasm hostcalls 'on=response_headers \ + proxy_wasm hostcalls 'on=log \ test=/t/log/response_body'; } --- response_headers @@ -344,13 +344,13 @@ Transfer-Encoding: chunked Content-Length: --- response_body eval qr/500 Internal Server Error/ ---- grep_error_log eval: qr/(.*?cannot set|\[.*?failed resuming).*/ +--- grep_error_log eval: qr/(.*?can only set|\[.*?failed resuming).*/ --- grep_error_log_out eval -qr/.*?host trap \(bad usage\): cannot set response body.* +qr/.*?host trap \(bad usage\): can only set response body during.* \[info\] .*? \*\d+ .*? filter chain failed resuming: previous error \(instance trapped\).*? subrequest: "\/request_headers".* -.*?host trap \(bad usage\): cannot set response body.* +.*?host trap \(bad usage\): can only set response body during.* \[info\] .*? \*\d+ .*? filter chain failed resuming: previous error \(instance trapped\).*? subrequest: "\/response_headers".* -.*?host trap \(bad usage\): cannot set response body.*/ +.*?host trap \(bad usage\): can only set response body during.*/ --- no_error_log [alert] [stderr] diff --git a/t/03-proxy_wasm/hfuncs/root-context/001-set_tick_period.t b/t/03-proxy_wasm/hfuncs/root-context/001-set_tick_period.t new file mode 100644 index 000000000..2bee4dc7e --- /dev/null +++ b/t/03-proxy_wasm/hfuncs/root-context/001-set_tick_period.t @@ -0,0 +1,85 @@ +# vim:set ft= ts=4 sts=4 sw=4 et fdm=marker: + +use strict; +use lib '.'; +use t::TestWasm; + +skip_hup(); +skip_no_debug(); +skip_valgrind(); + +plan tests => repeat_each() * (blocks() * 4); + +run_tests(); + +__DATA__ + +=== TEST 1: proxy_wasm contexts - set_tick_period on_vm_start +--- main_config + wasm { + module root_context_checks $TEST_NGINX_CRATES_DIR/root_context_checks.wasm 'set_tick_period'; + } +--- config + location /t { + proxy_wasm root_context_checks; + return 200; + } +--- ignore_response_body +--- error_log +set_tick_period status: 0 +--- no_error_log +[error] +[crit] + + + +=== TEST 2: proxy_wasm contexts - set_tick_period on_configure +--- wasm_modules: root_context_checks +--- config + location /t { + proxy_wasm root_context_checks 'on_configure=set_tick_period'; + return 200; + } +--- ignore_response_body +--- error_log +set_tick_period status: 0 +--- no_error_log +[error] +[crit] + + + +=== TEST 3: proxy_wasm contexts - set_tick_period on_tick +--- wasm_modules: root_context_checks +--- config + location /t { + proxy_wasm root_context_checks 'on_tick=set_tick_period'; + return 200; + } +--- ignore_response_body +--- error_log +[error] +tick_period already set +--- no_error_log +[crit] + + + +=== TEST 4: proxy_wasm contexts - set_tick_period on_http_dispatch_response +--- wasm_modules: root_context_checks +--- config + location /t { + proxy_wasm root_context_checks 'on_http_dispatch_response=set_tick_period \ + host=127.0.0.1:$TEST_NGINX_SERVER_PORT'; + return 200; + } + + location /dispatch { + return 200; + } +--- ignore_response_body +--- error_log +set_tick_period status: 0 +--- no_error_log +[error] +[crit] diff --git a/t/03-proxy_wasm/hfuncs/root-context/002-get_buffer_bytes.t b/t/03-proxy_wasm/hfuncs/root-context/002-get_buffer_bytes.t new file mode 100644 index 000000000..4153fbb86 --- /dev/null +++ b/t/03-proxy_wasm/hfuncs/root-context/002-get_buffer_bytes.t @@ -0,0 +1,175 @@ +# vim:set ft= ts=4 sts=4 sw=4 et fdm=marker: + +use strict; +use lib '.'; +use t::TestWasm; + +skip_hup(); +skip_no_debug(); +skip_valgrind(); + +plan tests => repeat_each() * (blocks() * 4); + +run_tests(); + +__DATA__ + +=== TEST 1: proxy_wasm contexts - get_buffer_bytes on_vm_start (request_body_buffer) +'daemon off' must be set to check exit_code is 2 +Valgrind mode already writes 'daemon off' +HUP mode does not catch the worker exit_code +--- skip_eval: 4: $ENV{TEST_NGINX_USE_HUP} == 1 +--- main_config eval +qq{ + wasm { + module root_context_checks $ENV{TEST_NGINX_CRATES_DIR}/root_context_checks.wasm 'get_request_body_buffer'; + } +}.($ENV{TEST_NGINX_USE_VALGRIND} ? '' : 'daemon off;') +--- config + location /t { + proxy_wasm root_context_checks; + return 200; + } +--- ignore_response_body +--- error_log +[error] +[emerg] +can only get request body during "on_request_body", "on_log" +--- no_error_log +--- must_die: 2 + + + +=== TEST 2: proxy_wasm contexts - get_buffer_bytes on_vm_start (response_body_buffer) +'daemon off' must be set to check exit_code is 2 +Valgrind mode already writes 'daemon off' +HUP mode does not catch the worker exit_code +--- skip_eval: 4: $ENV{TEST_NGINX_USE_HUP} == 1 +--- main_config eval +qq{ + wasm { + module root_context_checks $ENV{TEST_NGINX_CRATES_DIR}/root_context_checks.wasm 'get_response_body_buffer'; + } +}.($ENV{TEST_NGINX_USE_VALGRIND} ? '' : 'daemon off;') +--- config + location /t { + proxy_wasm root_context_checks; + return 200; + } +--- ignore_response_body +--- error_log +[error] +[emerg] +can only get response body during "on_response_body", "on_log" +--- no_error_log +--- must_die: 2 + + + +=== TEST 3: proxy_wasm contexts - get_buffer_bytes on_configure (request_body_buffer) +'daemon off' must be set to check exit_code is 2 +Valgrind mode already writes 'daemon off' +HUP mode does not catch the worker exit_code +--- skip_eval: 4: $ENV{TEST_NGINX_USE_HUP} == 1 +--- main_config eval +qq{ + wasm { + module root_context_checks $ENV{TEST_NGINX_CRATES_DIR}/root_context_checks.wasm; + } +}.($ENV{TEST_NGINX_USE_VALGRIND} ? '' : 'daemon off;') +--- config + location /t { + proxy_wasm root_context_checks 'on_configure=get_request_body_buffer'; + return 200; + } + +--- ignore_response_body +--- error_log +[error] +[emerg] +can only get request body during "on_request_body", "on_log" +--- must_die: 2 + + + +=== TEST 4: proxy_wasm contexts - get_buffer_bytes on_configure (response_body_buffer) +'daemon off' must be set to check exit_code is 2 +Valgrind mode already writes 'daemon off' +HUP mode does not catch the worker exit_code +--- skip_eval: 4: $ENV{TEST_NGINX_USE_HUP} == 1 +--- main_config eval +qq{ + wasm { + module root_context_checks $ENV{TEST_NGINX_CRATES_DIR}/root_context_checks.wasm; + } +}.($ENV{TEST_NGINX_USE_VALGRIND} ? '' : 'daemon off;') +--- config + location /t { + proxy_wasm root_context_checks 'on_configure=get_response_body_buffer'; + return 200; + } +--- ignore_response_body +--- error_log +[error] +[emerg] +can only get response body during "on_response_body", "on_log" +--- must_die: 2 + + + +=== TEST 5: proxy_wasm contexts - get_buffer_bytes on_tick +--- wasm_modules: root_context_checks +--- config + location /t { + proxy_wasm root_context_checks 'on_tick=get_request_body_buffer'; + proxy_wasm root_context_checks 'on_tick=get_response_body_buffer'; + proxy_wasm root_context_checks 'on_tick=get_dispatch_response_body_buffer'; + return 200; + } +--- ignore_response_body +--- error_log +can only get request body during "on_request_body", "on_log" +can only get response body during "on_response_body", "on_log" +can only get dispatch response body during "on_http_dispatch_response" + + + +=== TEST 6: proxy_wasm contexts - get_buffer_bytes on_http_dispatch_response (request_body_buffer) +--- wasm_modules: root_context_checks +--- config + location /t { + proxy_wasm root_context_checks 'on_http_dispatch_response=get_request_body_buffer \ + host=127.0.0.1:$TEST_NGINX_SERVER_PORT'; + return 200; + } + + location /dispatch { + return 200; + } +--- ignore_response_body +--- error_log +[error] +can only get request body during "on_request_body", "on_log" +--- no_error_log +[crit] + + + +=== TEST 7: proxy_wasm contexts - get_buffer_bytes on_http_dispatch_response (response_body_buffer) +--- wasm_modules: root_context_checks +--- config + location /t { + proxy_wasm root_context_checks 'on_http_dispatch_response=get_response_body_buffer \ + host=127.0.0.1:$TEST_NGINX_SERVER_PORT'; + return 200; + } + + location /dispatch { + return 200; + } +--- ignore_response_body +--- error_log +[error] +can only get response body during "on_response_body", "on_log" +--- no_error_log +[crit] diff --git a/t/03-proxy_wasm/hfuncs/root-context/003-set_buffer_bytes.t b/t/03-proxy_wasm/hfuncs/root-context/003-set_buffer_bytes.t new file mode 100644 index 000000000..564206e90 --- /dev/null +++ b/t/03-proxy_wasm/hfuncs/root-context/003-set_buffer_bytes.t @@ -0,0 +1,174 @@ +# vim:set ft= ts=4 sts=4 sw=4 et fdm=marker: + +use strict; +use lib '.'; +use t::TestWasm; + +skip_hup(); +skip_no_debug(); +skip_valgrind(); + +plan tests => repeat_each() * (blocks() * 4); + +run_tests(); + +__DATA__ + +=== TEST 1: proxy_wasm contexts - set_buffer_bytes on_vm_start (request_body_buffer) +'daemon off' must be set to check exit_code is 2 +Valgrind mode already writes 'daemon off' +HUP mode does not catch the worker exit_code +--- skip_eval: 4: $ENV{TEST_NGINX_USE_HUP} == 1 +--- main_config eval +qq{ + wasm { + module root_context_checks $ENV{TEST_NGINX_CRATES_DIR}/root_context_checks.wasm 'set_request_body_buffer'; + } +}.($ENV{TEST_NGINX_USE_VALGRIND} ? '' : 'daemon off;') +--- config + location /t { + proxy_wasm root_context_checks; + return 200; + } +--- ignore_response_body +--- error_log +[error] +[emerg] +can only set request body during "on_request_body" +--- no_error_log +--- must_die: 2 + + + +=== TEST 2: proxy_wasm contexts - set_buffer_bytes on_vm_start (response_body_buffer) +'daemon off' must be set to check exit_code is 2 +Valgrind mode already writes 'daemon off' +HUP mode does not catch the worker exit_code +--- skip_eval: 4: $ENV{TEST_NGINX_USE_HUP} == 1 +--- main_config eval +qq{ + wasm { + module root_context_checks $ENV{TEST_NGINX_CRATES_DIR}/root_context_checks.wasm 'set_response_body_buffer'; + } +}.($ENV{TEST_NGINX_USE_VALGRIND} ? '' : 'daemon off;') +--- config + location /t { + proxy_wasm root_context_checks; + return 200; + } +--- ignore_response_body +--- error_log +[error] +[emerg] +can only set response body during "on_response_body" +--- no_error_log +--- must_die: 2 + + + +=== TEST 3: proxy_wasm contexts - set_buffer_bytes on_configure (request_body_buffer) +'daemon off' must be set to check exit_code is 2 +Valgrind mode already writes 'daemon off' +HUP mode does not catch the worker exit_code +--- skip_eval: 4: $ENV{TEST_NGINX_USE_HUP} == 1 +--- main_config eval +qq{ + wasm { + module root_context_checks $ENV{TEST_NGINX_CRATES_DIR}/root_context_checks.wasm; + } +}.($ENV{TEST_NGINX_USE_VALGRIND} ? '' : 'daemon off;') +--- config + location /t { + proxy_wasm root_context_checks 'on_configure=set_request_body_buffer'; + return 200; + } + +--- ignore_response_body +--- error_log +[error] +[emerg] +can only set request body during "on_request_body" +--- must_die: 2 + + + +=== TEST 4: proxy_wasm contexts - set_buffer_bytes on_configure (response_body_buffer) +'daemon off' must be set to check exit_code is 2 +Valgrind mode already writes 'daemon off' +HUP mode does not catch the worker exit_code +--- skip_eval: 4: $ENV{TEST_NGINX_USE_HUP} == 1 +--- main_config eval +qq{ + wasm { + module root_context_checks $ENV{TEST_NGINX_CRATES_DIR}/root_context_checks.wasm; + } +}.($ENV{TEST_NGINX_USE_VALGRIND} ? '' : 'daemon off;') +--- config + location /t { + proxy_wasm root_context_checks 'on_configure=set_response_body_buffer'; + return 200; + } +--- ignore_response_body +--- error_log +[error] +[emerg] +can only set response body during "on_response_body" +--- must_die: 2 + + + +=== TEST 5: proxy_wasm contexts - set_buffer_bytes on_tick +--- wasm_modules: root_context_checks +--- config + location /t { + proxy_wasm root_context_checks 'on_tick=set_request_body_buffer'; + proxy_wasm root_context_checks 'on_tick=set_response_body_buffer'; + return 200; + } +--- ignore_response_body +--- error_log +[error] +can only set request body during "on_request_body" +can only set response body during "on_response_body" + + + +=== TEST 6: proxy_wasm contexts - set_buffer_bytes on_http_dispatch_response (request_body_buffer) +--- wasm_modules: root_context_checks +--- config + location /t { + proxy_wasm root_context_checks 'on_http_dispatch_response=set_request_body_buffer \ + host=127.0.0.1:$TEST_NGINX_SERVER_PORT'; + return 200; + } + + location /dispatch { + return 200; + } +--- ignore_response_body +--- error_log +[error] +can only set request body during "on_request_body" +--- no_error_log +[crit] + + + +=== TEST 7: proxy_wasm contexts - set_buffer_bytes on_http_dispatch_response (response_body_buffer) +--- wasm_modules: root_context_checks +--- config + location /t { + proxy_wasm root_context_checks 'on_http_dispatch_response=set_response_body_buffer \ + host=127.0.0.1:$TEST_NGINX_SERVER_PORT'; + return 200; + } + + location /dispatch { + return 200; + } +--- ignore_response_body +--- error_log +[error] +can only set response body during "on_response_body" +--- no_error_log +[crit] diff --git a/t/04-openresty/ffi/200-proxy_wasm_and_lua_sanity.t b/t/04-openresty/ffi/200-proxy_wasm_and_lua_sanity.t index 161bbed3c..60a18aaf4 100644 --- a/t/04-openresty/ffi/200-proxy_wasm_and_lua_sanity.t +++ b/t/04-openresty/ffi/200-proxy_wasm_and_lua_sanity.t @@ -245,7 +245,7 @@ qr/on_http_call_response \(id: \d+, status: 200, headers: 5, body_bytes: \d+, tr --- grep_error_log eval: qr/\*\d+.*?\[proxy-wasm\].*?(resuming|freeing).*/ --- grep_error_log_out eval qr/\A\*\d+ .*? filter 1\/1 resuming "on_request_headers" step in "rewrite" phase[^#*]* -\*\d+ .*? filter 1\/1 resuming "on_dispatch_response" step in "access" phase[^#*]* +\*\d+ .*? filter 1\/1 resuming "on_dispatch_response" step in "background" phase[^#*]* \*\d+ .*? filter 1\/1 resuming "on_response_headers" step in "header_filter" phase[^#*]* \*\d+ .*? filter 1\/1 resuming "on_response_body" step in "body_filter" phase[^#*]* \*\d+ .*? filter 1\/1 resuming "on_log" step in "log" phase[^#*]* @@ -308,7 +308,7 @@ qr/on_http_call_response \(id: \d+, status: 200, headers: 5, body_bytes: \d+, tr --- grep_error_log eval: qr/\*\d+.*?\[proxy-wasm\].*?(resuming|freeing).*/ --- grep_error_log_out eval qr/\A\*\d+ .*? filter 1\/1 resuming "on_request_headers" step in "rewrite" phase[^#*]* -\*\d+ .*? filter 1\/1 resuming "on_dispatch_response" step in "access" phase[^#*]* +\*\d+ .*? filter 1\/1 resuming "on_dispatch_response" step in "background" phase[^#*]* \*\d+ .*? filter 1\/1 resuming "on_response_headers" step in "header_filter" phase[^#*]* \*\d+ .*? filter 1\/1 resuming "on_response_body" step in "body_filter" phase[^#*]* \*\d+ .*? filter 1\/1 resuming "on_response_body" step in "body_filter" phase[^#*]* @@ -377,7 +377,7 @@ qr/\A\*\d+ .*? filter 1\/1 resuming "on_request_headers" step in "rewrite" phase --- grep_error_log eval: qr/\*\d+.*?\[proxy-wasm\].*?(resuming|freeing).*/ --- grep_error_log_out eval qr/\A\*\d+ .*? filter 1\/1 resuming "on_request_headers" step in "rewrite" phase[^#*]* -\*\d+ .*? filter 1\/1 resuming "on_dispatch_response" step in "access" phase[^#*]* +\*\d+ .*? filter 1\/1 resuming "on_dispatch_response" step in "background" phase[^#*]* \*\d+ .*? filter chain failed resuming: previous error \(dispatch failure\)[^#*]* \*\d+ .*? filter freeing context #\d+ \(1\/1\)[^#*]*\Z/ diff --git a/t/05-others/010-client_connection_abort.t b/t/05-others/010-client_connection_abort.t index d49637f5a..71111b674 100644 --- a/t/05-others/010-client_connection_abort.t +++ b/t/05-others/010-client_connection_abort.t @@ -4,12 +4,9 @@ use strict; use lib '.'; use t::TestWasm; -if ($ENV{TEST_NGINX_USE_HUP}) { - plan(skip_all => "unavailable in HUP mode"); +skip_hup(); -} else { - plan tests => repeat_each() * (blocks() * 4); -} +plan tests => repeat_each() * (blocks() * 4); run_tests(); diff --git a/t/05-others/011-upstream_connection_abort.t b/t/05-others/011-upstream_connection_abort.t index 86703b0c7..1a17fd98a 100644 --- a/t/05-others/011-upstream_connection_abort.t +++ b/t/05-others/011-upstream_connection_abort.t @@ -4,14 +4,10 @@ use strict; use lib '.'; use t::TestWasm; +skip_hup(); skip_valgrind(); -if ($ENV{TEST_NGINX_USE_HUP}) { - plan(skip_all => "unavailable in HUP mode"); - -} else { - plan tests => repeat_each() * (blocks() * 4); -} +plan tests => repeat_each() * (blocks() * 4); run_tests(); diff --git a/t/TestWasm.pm b/t/TestWasm.pm index 14638bec4..9493a58e2 100644 --- a/t/TestWasm.pm +++ b/t/TestWasm.pm @@ -31,6 +31,7 @@ our @EXPORT = qw( skip_no_debug skip_no_tinygo skip_valgrind + skip_hup ); $ENV{TEST_NGINX_USE_HUP} ||= 0; @@ -64,6 +65,12 @@ sub skip_valgrind (@) { } } +sub skip_hup { + if ($ENV{TEST_NGINX_USE_HUP} == 1) { + plan(skip_all => "skipped in HUP mode"); + } +} + sub skip_no_debug { if ($nginxV !~ m/--with-debug/) { plan(skip_all => "--with-debug required (NGX_BUILD_DEBUG=1)"); diff --git a/t/lib/proxy-wasm-tests/contexts/Cargo.toml b/t/lib/proxy-wasm-tests/contexts/Cargo.toml new file mode 100644 index 000000000..a1f3f5c7a --- /dev/null +++ b/t/lib/proxy-wasm-tests/contexts/Cargo.toml @@ -0,0 +1,9 @@ +[package] +name = "context-checks" +version = "0.1.0" +authors = ["Thibault Charbonnier "] +edition = "2018" + +[dependencies] +proxy-wasm = "0.2" +log = "0.4" diff --git a/t/lib/proxy-wasm-tests/contexts/crates/root/Cargo.toml b/t/lib/proxy-wasm-tests/contexts/crates/root/Cargo.toml new file mode 100644 index 000000000..beed79e32 --- /dev/null +++ b/t/lib/proxy-wasm-tests/contexts/crates/root/Cargo.toml @@ -0,0 +1,13 @@ +[package] +name = "root-context-checks" +version = "0.1.0" +authors = ["Thibault Charbonnier "] +edition = "2018" + +[lib] +crate-type = ["cdylib"] + +[dependencies] +proxy-wasm = "0.2" +log = "0.4" +context-checks = { path = "../.." } diff --git a/t/lib/proxy-wasm-tests/contexts/crates/root/src/lib.rs b/t/lib/proxy-wasm-tests/contexts/crates/root/src/lib.rs new file mode 100644 index 000000000..f905b1214 --- /dev/null +++ b/t/lib/proxy-wasm-tests/contexts/crates/root/src/lib.rs @@ -0,0 +1,136 @@ +use context_checks::*; +use log::*; +use proxy_wasm::{traits::*, types::*}; +use std::collections::HashMap; +use std::time::Duration; + +pub struct TestRoot { + pub config: HashMap, +} + +pub trait TestContext { + fn get_config(&self, name: &str) -> Option<&str>; + fn check_host_function(&self, name: &str); +} + +impl Context for dyn TestContext {} +impl TestContext for TestRoot { + fn get_config(&self, name: &str) -> Option<&str> { + self.config.get(name).map(|s| s.as_str()) + } + + fn check_host_function(&self, name: &str) { + match name { + "set_tick_period" => set_tick_period(), + "set_request_body_buffer" => set_request_body_buffer(), + "set_response_body_buffer" => set_response_body_buffer(), + "get_request_body_buffer" => get_request_body_buffer(), + "get_response_body_buffer" => get_response_body_buffer(), + "get_dispatch_response_body_buffer" => get_dispatch_response_body_buffer(), + _ => (), + } + } +} + +struct TestContextHttp; +impl Context for TestContextHttp {} +impl HttpContext for TestContextHttp {} + +proxy_wasm::main! {{ + proxy_wasm::set_log_level(LogLevel::Trace); + proxy_wasm::set_root_context(|_| -> Box { + Box::new(TestRoot { + config: HashMap::new(), + }) + }); +}} + +impl RootContext for TestRoot { + fn on_vm_start(&mut self, _: usize) -> bool { + info!("on_vm_start"); + + if let Some(config) = self.get_vm_configuration() { + match std::str::from_utf8(&config) { + Ok(text) => { + info!("vm config: {}", text); + self.check_host_function(text); + } + _ => info!("cannot parse vm config"), + } + } + + true + } + + fn on_configure(&mut self, _: usize) -> bool { + info!("on_configure"); + + if let Some(config_bytes) = self.get_plugin_configuration() { + let config_str = String::from_utf8(config_bytes).unwrap(); + self.config = config_str + .split_whitespace() + .filter_map(|s| s.split_once('=')) + .map(|(k, v)| (k.to_string(), v.to_string())) + .collect(); + } else { + self.config = HashMap::new(); + } + + if self.get_config("on_tick").is_some() { + self.set_tick_period(Duration::from_millis(100)); + } + + if let Some(name) = self.get_config("on_configure") { + self.check_host_function(name) + } + + if self.get_config("on_http_dispatch_response").is_some() { + let _ = self.dispatch_http_call( + self.get_config("host").unwrap_or(""), + vec![(":path", "/dispatch"), (":method", "GET")], + None, + vec![], + Duration::from_secs(0), + ); + } + + true + } + + fn on_tick(&mut self) { + info!("on_tick",); + + if let Some(name) = self.get_config("on_tick") { + self.check_host_function(name) + } + } + + fn get_type(&self) -> Option { + Some(ContextType::HttpContext) + } + + fn create_http_context(&self, _: u32) -> Option> { + Some(Box::new(TestContextHttp {})) + } +} + +impl Context for TestRoot { + fn on_http_call_response( + &mut self, + token_id: u32, + nheaders: usize, + body_size: usize, + ntrailers: usize, + ) { + let status = self.get_http_call_response_header(":status"); + + info!( + "on_root_http_call_response (id: {}, status: {}, headers: {}, body_bytes: {}, trailers: {})", + token_id, status.unwrap_or("".to_string()), nheaders, body_size, ntrailers + ); + + if let Some(name) = self.get_config("on_http_dispatch_response") { + self.check_host_function(name) + } + } +} diff --git a/t/lib/proxy-wasm-tests/contexts/src/lib.rs b/t/lib/proxy-wasm-tests/contexts/src/lib.rs new file mode 100644 index 000000000..08e061cda --- /dev/null +++ b/t/lib/proxy-wasm-tests/contexts/src/lib.rs @@ -0,0 +1,123 @@ +use log::*; +use proxy_wasm::types::*; +use std::ptr::null_mut; + +#[allow(improper_ctypes)] +extern "C" { + fn proxy_set_tick_period_milliseconds(period: u32) -> Status; +} + +pub fn set_tick_period() { + unsafe { + let status = proxy_set_tick_period_milliseconds(10000); + info!("set_tick_period status: {}", status as u32); + } +} + +#[allow(improper_ctypes)] +extern "C" { + fn proxy_set_buffer_bytes( + buffer_type: u32, + start: usize, + size: usize, + buffer_data: *const u8, + buffer_size: usize, + ) -> Status; +} + +pub fn set_request_body_buffer() { + let v = String::default(); + + unsafe { + let status = proxy_set_buffer_bytes( + BufferType::HttpRequestBody as u32, + 0, + 0, + v.as_ptr(), + v.len(), + ); + + info!("set_request_body_buffer status: {}", status as u32); + } +} + +pub fn set_response_body_buffer() { + let v = String::default(); + + unsafe { + let status = proxy_set_buffer_bytes( + BufferType::HttpResponseBody as u32, + 0, + 0, + v.as_ptr(), + v.len(), + ); + + info!("set_response_body_buffer status: {}", status as u32); + } +} + +#[allow(improper_ctypes)] +extern "C" { + fn proxy_get_buffer_bytes( + buffer_type: u32, + start: usize, + max_size: usize, + return_buffer_data: *mut *mut u8, + return_buffer_size: *mut usize, + ) -> Status; +} + +pub fn get_request_body_buffer() { + let mut return_data: *mut u8 = null_mut(); + let mut return_size: usize = 0; + + unsafe { + let status = proxy_get_buffer_bytes( + BufferType::HttpRequestBody as u32, + 0, + 0, + &mut return_data, + &mut return_size, + ); + + info!("get_request_body_buffer status: {}", status as u32); + } +} + +pub fn get_response_body_buffer() { + let mut return_data: *mut u8 = null_mut(); + let mut return_size: usize = 0; + + unsafe { + let status = proxy_get_buffer_bytes( + BufferType::HttpResponseBody as u32, + 0, + 0, + &mut return_data, + &mut return_size, + ); + + info!("get_response_body_buffer status: {}", status as u32); + } +} + +pub fn get_dispatch_response_body_buffer() { + let mut return_data: *mut u8 = null_mut(); + let mut return_size: usize = 0; + + unsafe { + let status = proxy_get_buffer_bytes( + BufferType::HttpCallResponseBody as u32, + 0, + 0, + &mut return_data, + &mut return_size, + ); + + info!( + "get_dispatch_response_body_buffer status: {}", + status as u32 + ); + } +}