diff --git a/lib/resty/wasmx/proxy_wasm.lua b/lib/resty/wasmx/proxy_wasm.lua index b771c2d70..523a449a3 100644 --- a/lib/resty/wasmx/proxy_wasm.lua +++ b/lib/resty/wasmx/proxy_wasm.lua @@ -160,7 +160,7 @@ function _M.load(c_plan) end if get_request() then - -- FFI GC: hold a reference tied to the request lifecycle so users + -- ffi_gc: hold a reference tied to the request lifecycle so users -- don't have to (like our test suite). if not ngx.ctx[_M] then ngx.ctx[_M] = {} diff --git a/src/common/lua/ngx_wasm_lua.c b/src/common/lua/ngx_wasm_lua.c index 49824ca61..3c87d8491 100644 --- a/src/common/lua/ngx_wasm_lua.c +++ b/src/common/lua/ngx_wasm_lua.c @@ -27,33 +27,33 @@ static const char *WASM_LUA_ENTRY_SCRIPT = "" "end\n"; -unsigned -ngx_wasm_lua_running(ngx_wasm_subsys_env_t *env) -{ - ngx_wasm_lua_ctx_t *entry_lctx = env->entry_lctx; - - if (entry_lctx == NULL) { - return 0; - } - - return !ngx_queue_empty(&entry_lctx->sub_ctxs); -} - - static void destroy_thread(ngx_wasm_lua_ctx_t *lctx) { ngx_log_debug2(NGX_LOG_DEBUG_WASM, ngx_cycle->log, 0, "wasm freeing lua%sthread (lctx: %p)", - lctx->entry ? " entry " : " ", lctx); + lctx->entry ? " entry " : " user ", lctx); ngx_pfree(lctx->pool, lctx->cache_key); ngx_pfree(lctx->pool, lctx); } +static ngx_inline unsigned +entry_thread_empty(ngx_wasm_subsys_env_t *env) +{ + ngx_wasm_lua_ctx_t *entry_lctx = env->entry_lctx; + + if (entry_lctx == NULL) { + return 1; + } + + return ngx_queue_empty(&entry_lctx->sub_ctxs); +} + + static void -thread_cleanup_handler(void *data) +entry_thread_cleanup_handler(void *data) { ngx_wasm_lua_ctx_t *lctx = data; @@ -87,9 +87,8 @@ entry_thread_start(ngx_wasm_subsys_env_t *env) if (entry_lctx == NULL) { /** - * In OpenResty, all user threads *must* be attached to a parent - * coroutine, so we create a "fake" one simulating a - * *_by_lua_block context. + * In OpenResty, all uthreads *must* be attached to a parent coroutine, + * so we create a "fake" one simulating a *_by_lua_block context. */ dd("creating entry thread"); @@ -216,10 +215,6 @@ thread_init(ngx_wasm_lua_ctx_t *lctx) #endif if (!lctx->entry) { - /** - * Attach new user thread to entry_ctx for OpenResty's internals; - * entry_ctx can be a *_by_lua_block context or our fake entry_ctx. - */ coctx->parent_co_ctx = &ctx->entry_co_ctx; } @@ -266,7 +261,7 @@ thread_handle_rc(ngx_wasm_lua_ctx_t *lctx, ngx_int_t rc) } else { /* thread is dead, determine state by checking its return value placed - * on the stack by OpenResty's user thread implementation */ + * on the stack by OpenResty's uthread implementation */ ngx_wa_assert(lua_isboolean(lctx->co, 1)); lctx->co_ctx->co_status = NGX_HTTP_LUA_CO_DEAD; @@ -281,6 +276,28 @@ thread_handle_rc(ngx_wasm_lua_ctx_t *lctx, ngx_int_t rc) switch (rc) { case NGX_AGAIN: dd("wasm lua thread yield"); + ngx_wa_assert(lctx->yielded); + + if (lctx->entry) { + /* find the pending sleep timer to cancel at pool cleanup */ + sentinel = ngx_event_timer_rbtree.sentinel; + root = ngx_event_timer_rbtree.root; + + if (root != sentinel) { + for (node = ngx_rbtree_min(root, sentinel); + node; + node = ngx_rbtree_next(&ngx_event_timer_rbtree, node)) + { + ev = ngx_rbtree_data(node, ngx_event_t, timer); + + if (ev->data == entry_lctx->co_ctx) { + entry_lctx->ev = ev; + break; + } + } + } + } + ngx_wasm_yield(env); break; case NGX_OK: @@ -288,12 +305,17 @@ thread_handle_rc(ngx_wasm_lua_ctx_t *lctx, ngx_int_t rc) ngx_wa_assert(thread_is_dead(lctx)); if (!lctx->entry) { + lctx->finished = 1; + ngx_queue_remove(&lctx->q); + + if (entry_thread_empty(env)) { + /* last yielding thread finished */ + ngx_wasm_continue(env); + } + if (lctx->success_handler) { (void) lctx->success_handler(lctx); } - - lctx->finished = 1; - ngx_queue_remove(&lctx->q); } break; @@ -302,13 +324,12 @@ thread_handle_rc(ngx_wasm_lua_ctx_t *lctx, ngx_int_t rc) ngx_wasm_error(env); if (!lctx->entry) { + lctx->finished = 1; + ngx_queue_remove(&lctx->q); + if (lctx->error_handler) { (void) lctx->error_handler(lctx); - goto done; } - - lctx->finished = 1; - ngx_queue_remove(&lctx->q); } break; @@ -320,38 +341,14 @@ thread_handle_rc(ngx_wasm_lua_ctx_t *lctx, ngx_int_t rc) break; } - if (lctx->entry) { - /* find the pending sleep timer to cancel at pool cleanup */ - sentinel = ngx_event_timer_rbtree.sentinel; - root = ngx_event_timer_rbtree.root; - - if (root == sentinel) { - goto done; - } - - for (node = ngx_rbtree_min(root, sentinel); - node; - node = ngx_rbtree_next(&ngx_event_timer_rbtree, node)) - { - ev = ngx_rbtree_data(node, ngx_event_t, timer); - - if (ev->data == entry_lctx->co_ctx) { - entry_lctx->ev = ev; - break; - } - } - } + ngx_wa_assert(rc == NGX_OK + || rc == NGX_AGAIN + || rc == NGX_ERROR); if (lctx->finished) { destroy_thread(lctx); } -done: - - ngx_wa_assert(rc == NGX_OK - || rc == NGX_AGAIN - || rc == NGX_ERROR); - return rc; } @@ -365,7 +362,7 @@ thread_resume(ngx_wasm_lua_ctx_t *lctx) ngx_log_debug4(NGX_LOG_DEBUG_WASM, lctx->log, 0, "wasm resuming lua%sthread " "(lctx: %p, L: %p, co: %p)", - lctx->entry ? " entry " : " ", + lctx->entry ? " entry " : " user ", lctx, lctx->L, lctx->co); switch (env->subsys->kind) { @@ -391,7 +388,7 @@ thread_resume(ngx_wasm_lua_ctx_t *lctx) } dd("lua%sthread resume handler rc: %ld", - lctx->entry ? " entry " : " ", rc); + lctx->entry ? " entry " : " user ", rc); return thread_handle_rc(lctx, rc); } @@ -433,10 +430,14 @@ ngx_wasm_lua_thread_new(const char *tag, const char *src, goto error; } - cln->handler = thread_cleanup_handler; + cln->handler = entry_thread_cleanup_handler; cln->data = lctx; } + ngx_log_debug2(NGX_LOG_DEBUG_WASM, lctx->log, 0, + "wasm creating new lua%sthread (lctx: %p)", + lctx->entry ? " entry " : " user ", lctx); + /* Lua VM + thread */ switch (env->subsys->kind) { @@ -536,7 +537,7 @@ ngx_wasm_lua_thread_run(ngx_wasm_lua_ctx_t *lctx) ngx_log_debug4(NGX_LOG_DEBUG_WASM, lctx->log, 0, "wasm running lua%sthread (lctx: %p, L: %p, co: %p)", - lctx->entry ? " entry " : " ", + lctx->entry ? " entry " : " user ", lctx, lctx->L, lctx->co); if (env->entry_lctx == NULL) { @@ -582,12 +583,7 @@ ngx_wasm_lua_thread_run(ngx_wasm_lua_ctx_t *lctx) dd("lua_run_thread rc: %ld", rc); - if (rc == NGX_AGAIN - && !lctx->entry - && !lua_isboolean(lctx->co, 1)) - { - /* thread is yielded, stash the co_ctx->data pointer and place our - * lctx instead, we will swap it back before resuming */ + if (rc == NGX_AGAIN && !lua_isboolean(lctx->co, 1)) { lctx->yielded = 1; } @@ -595,16 +591,7 @@ ngx_wasm_lua_thread_run(ngx_wasm_lua_ctx_t *lctx) ngx_queue_insert_tail(&entry_lctx->sub_ctxs, &lctx->q); } - rc = thread_handle_rc(lctx, rc); - -#if (NGX_DEBUG) - if (rc == NGX_OK) { - ngx_wa_assert(!lctx->yielded); - ngx_wa_assert(lctx->finished); - } -#endif - - return rc; + return thread_handle_rc(lctx, rc); } @@ -627,8 +614,8 @@ ngx_wasm_lua_resume(ngx_wasm_subsys_env_t *env) dd("enter"); - if (!ngx_wasm_lua_running(env)) { - goto done; + if (entry_thread_empty(env)) { + return NGX_OK; } ctx = ngx_http_get_module_ctx(env->ctx.rctx->r, ngx_http_lua_module); @@ -672,14 +659,14 @@ ngx_wasm_lua_resume(ngx_wasm_subsys_env_t *env) ngx_wa_assert(coctx != &ctx->entry_co_ctx); } - dd("resuming%slctx: %p", lctx->entry ? " entry " : " ", lctx); + dd("resuming%slctx: %p", lctx->entry ? " entry " : " user ", lctx); rc = thread_resume(lctx); if (rc == NGX_ERROR) { return NGX_ERROR; } -done: + dd("rc: %ld, state: %d", rc, env->state); switch (env->state) { case NGX_WASM_STATE_YIELD: diff --git a/src/common/lua/ngx_wasm_lua.h b/src/common/lua/ngx_wasm_lua.h index f7ed35285..71236b9de 100644 --- a/src/common/lua/ngx_wasm_lua.h +++ b/src/common/lua/ngx_wasm_lua.h @@ -27,7 +27,6 @@ struct ngx_wasm_lua_ctx_s { ngx_wasm_lua_handler_pt success_handler; ngx_event_t *ev; /* entry lctx sleep event */ void *data; - void *data2; const char *code; u_char *cache_key; @@ -55,7 +54,6 @@ struct ngx_wasm_lua_ctx_s { }; -unsigned ngx_wasm_lua_running(ngx_wasm_subsys_env_t *env); ngx_wasm_lua_ctx_t *ngx_wasm_lua_thread_new(const char *tag, const char *src, ngx_wasm_subsys_env_t *env, ngx_log_t *log, void *data, ngx_wasm_lua_handler_pt success_handler, diff --git a/src/common/ngx_wasm_socket_tcp.c b/src/common/ngx_wasm_socket_tcp.c index cd0f3cf00..3f69d9103 100644 --- a/src/common/ngx_wasm_socket_tcp.c +++ b/src/common/ngx_wasm_socket_tcp.c @@ -400,7 +400,6 @@ ngx_wasm_socket_tcp_connect(ngx_wasm_socket_tcp_t *sock) "wasm tcp socket resolving..."); rc = resolver_pt(rslv_ctx); - if (rc != NGX_OK && rc != NGX_AGAIN) { ngx_log_debug0(NGX_LOG_DEBUG_WASM, sock->log, 0, "wasm tcp socket resolver failed before query"); @@ -510,6 +509,10 @@ ngx_wasm_socket_resolve_handler(ngx_resolver_ctx_t *ctx) /* connect */ + /* Note: the Lua bridge may have finished all threads and + * resumed continuation, but we still need to hold the yield */ + ngx_wasm_yield(sock->env); + ngx_wasm_socket_tcp_connect_peer(sock); return; diff --git a/src/http/ngx_http_wasm_module.c b/src/http/ngx_http_wasm_module.c index d20fc3c31..b9d64c4b0 100644 --- a/src/http/ngx_http_wasm_module.c +++ b/src/http/ngx_http_wasm_module.c @@ -771,6 +771,7 @@ ngx_http_wasm_rewrite_handler(ngx_http_request_t *r) goto done; } +#if 0 if (rctx->fake_request) { /* Our wasm lua entry thread sleep handler is resuming on * ngx_http_core_run_phases as we are in a tick or background phase; we @@ -779,6 +780,7 @@ ngx_http_wasm_rewrite_handler(ngx_http_request_t *r) rc = NGX_DONE; goto done; } +#endif #endif rc = ngx_wasm_ops_resume(&rctx->opctx, NGX_HTTP_REWRITE_PHASE); @@ -1137,7 +1139,6 @@ ngx_http_wasm_wev_handler(ngx_http_request_t *r) rctx->in_wev = 1; } else if (rctx->in_wev) { - /* TODO */ return; } diff --git a/src/wasm/ngx_wasm_core_host.c b/src/wasm/ngx_wasm_core_host.c index 3dfd93649..becd4b0ec 100644 --- a/src/wasm/ngx_wasm_core_host.c +++ b/src/wasm/ngx_wasm_core_host.c @@ -113,6 +113,35 @@ ngx_wasm_hfuncs_test_lua_error(ngx_wavm_instance_t *instance, return NGX_WAVM_OK; } + + +ngx_int_t +ngx_wasm_hfuncs_test_lua_sleep(ngx_wavm_instance_t *instance, + wasm_val_t args[], wasm_val_t rets[]) +{ + ngx_wasm_lua_ctx_t *lctx; +#if (NGX_WASM_HTTP) + ngx_http_wasm_req_ctx_t *rctx = instance->data; +#endif + static const char *SCRIPT_NAME = "sleep_lua_chunk"; + static const char *SCRIPT = "for i = 1, 2 do\n" + " print('sleeping for 250ms')\n" + " ngx.sleep(0.25)\n" + "end"; + + lctx = ngx_wasm_lua_thread_new(SCRIPT_NAME, + SCRIPT, + &rctx->env, + instance->log, + NULL, NULL, NULL); + if (lctx == NULL) { + return NGX_WAVM_ERROR; + } + + (void) ngx_wasm_lua_thread_run(lctx); + + return NGX_WAVM_OK; +} #endif @@ -138,6 +167,11 @@ static ngx_wavm_host_func_def_t ngx_wasm_core_hfuncs[] = { &ngx_wasm_hfuncs_test_lua_error, NULL, NULL }, + + { ngx_string("ngx_wasm_lua_test_sleep"), + &ngx_wasm_hfuncs_test_lua_sleep, + NULL, + NULL }, #endif ngx_wavm_hfunc_null diff --git a/src/wasm/ngx_wasm_ops.c b/src/wasm/ngx_wasm_ops.c index 65bac3ae9..4c4af5721 100644 --- a/src/wasm/ngx_wasm_ops.c +++ b/src/wasm/ngx_wasm_ops.c @@ -160,6 +160,7 @@ ngx_wasm_ops_plan_load(ngx_wasm_ops_plan_t *plan, ngx_log_t *log) break; case NGX_WASM_OP_CALL: op->handler = &ngx_wasm_op_call_handler; + op->conf.call.idx = j; op->conf.call.funcref = ngx_wavm_module_func_lookup(op->module, &op->conf.call.func_name); @@ -254,9 +255,9 @@ ngx_wasm_ops_resume(ngx_wasm_op_ctx_t *ctx, ngx_uint_t phaseidx) dd("enter (phaseidx: %ld, phase: \"%.*s\")", phaseidx, (int) phase->name.len, phase->name.data); +#if 0 /* check last phase */ -#if 0 switch (phaseidx) { default: if (ctx->last_phase @@ -299,6 +300,10 @@ ngx_wasm_ops_resume(ngx_wasm_op_ctx_t *ctx, ngx_uint_t phaseidx) || rc == NGX_AGAIN || rc == NGX_DONE); + if (phase != ctx->last_phase) { + ctx->cur_idx = 0; + } + ctx->last_phase = phase; dd("ops resume: setting last phase to \"%.*s\" (%ld)", @@ -318,11 +323,17 @@ ngx_wasm_op_call_handler(ngx_wasm_op_ctx_t *opctx, ngx_wasm_phase_t *phase, ngx_wasm_op_t *op) { ngx_int_t rc; + ngx_uint_t idx; ngx_wavm_instance_t *instance; ngx_wavm_funcref_t *funcref; ngx_wa_assert(op->code == NGX_WASM_OP_CALL); + idx = op->conf.call.idx; + + dd("enter (op: %p, cur_idx: %ld, op idx: %ld)", + op, opctx->cur_idx, idx); + funcref = op->conf.call.funcref; if (funcref == NULL) { ngx_wasm_log_error(NGX_LOG_ERR, opctx->log, 0, @@ -332,6 +343,21 @@ ngx_wasm_op_call_handler(ngx_wasm_op_ctx_t *opctx, ngx_wasm_phase_t *phase, return NGX_ERROR; } + if (opctx->cur_idx > idx) { + return NGX_DECLINED; + } + + if (opctx->cur_idx == idx) { + switch (opctx->env->state) { + case NGX_WASM_STATE_YIELD: + return NGX_AGAIN; + case NGX_WASM_STATE_ERROR: + return NGX_ERROR; + default: + break; + } + } + ngx_log_debug3(NGX_LOG_DEBUG_WASM, opctx->log, 0, "wasm ops calling \"%V.%V\" in \"%V\" phase", &op->module->name, &funcref->name, &phase->name); @@ -346,12 +372,16 @@ ngx_wasm_op_call_handler(ngx_wasm_op_ctx_t *opctx, ngx_wasm_phase_t *phase, ngx_wavm_instance_destroy(instance); + opctx->cur_idx++; + if (rc == NGX_ERROR || rc == NGX_ABORT) { return NGX_ERROR; } ngx_wa_assert(rc == NGX_OK); + dd("ops state: %d", opctx->env->state); + switch (opctx->env->state) { case NGX_WASM_STATE_YIELD: return NGX_AGAIN; diff --git a/src/wasm/ngx_wasm_ops.h b/src/wasm/ngx_wasm_ops.h index f26567741..a1f2b79e6 100644 --- a/src/wasm/ngx_wasm_ops.h +++ b/src/wasm/ngx_wasm_ops.h @@ -38,6 +38,7 @@ typedef struct { typedef struct { + ngx_uint_t idx; ngx_str_t func_name; ngx_wavm_funcref_t *funcref; } ngx_wasm_op_call_t; @@ -103,6 +104,7 @@ struct ngx_wasm_op_ctx_s { ngx_wasm_ops_plan_t *plan; ngx_wasm_phase_t *last_phase; void *data; + ngx_uint_t cur_idx; union { ngx_wasm_op_call_ctx_t call; diff --git a/t/03-proxy_wasm/hfuncs/133-proxy_dispatch_http_edge_cases.t b/t/03-proxy_wasm/hfuncs/133-proxy_dispatch_http_edge_cases.t index 31d1bdbde..c8ac4634d 100644 --- a/t/03-proxy_wasm/hfuncs/133-proxy_dispatch_http_edge_cases.t +++ b/t/03-proxy_wasm/hfuncs/133-proxy_dispatch_http_edge_cases.t @@ -207,9 +207,9 @@ called 2 times echo fail; } --- response_headers_like -pwm-call-1: dispatch 1 -pwm-call-2: dispatch 2 -pwm-call-3: dispatch 3 +pwm-call-0: dispatch 1 +pwm-call-1: dispatch 2 +pwm-call-2: dispatch 3 --- response_body called 3 times --- no_error_log diff --git a/t/04-openresty/lua-bridge/001-sanity.t b/t/04-openresty/lua-bridge/001-sanity.t index 7a3631fa3..16c7e3db3 100644 --- a/t/04-openresty/lua-bridge/001-sanity.t +++ b/t/04-openresty/lua-bridge/001-sanity.t @@ -68,7 +68,23 @@ qr/\[info\] .*? arg: argument/ -=== TEST 4: Lua bridge - Lua chunk can error after yielding +=== TEST 4: Lua bridge - Lua chunk can yield +--- config + location /t { + wasm_call rewrite ngx_lua_tests test_lua_sleep; + return 200; + } +--- error_log eval +[ + qr/\[notice\] .*? sleeping for 250ms/, + qr/\[notice\] .*? sleeping for 250ms/ +] +--- no_error_log +[crit] + + + +=== TEST 5: Lua bridge - Lua chunk can error after yielding --- valgrind --- load_nginx_modules: ngx_http_echo_module --- http_config diff --git a/t/04-openresty/lua-bridge/002-proxy_wasm_lua_resolver_sanity.t b/t/04-openresty/lua-bridge/002-proxy_wasm_lua_resolver_sanity.t index 4c4578c01..01e0cc698 100644 --- a/t/04-openresty/lua-bridge/002-proxy_wasm_lua_resolver_sanity.t +++ b/t/04-openresty/lua-bridge/002-proxy_wasm_lua_resolver_sanity.t @@ -554,7 +554,7 @@ qq{ echo ok; } --- response_headers_like -pwm-call-id: ((1, 0)|(0, 1)) +pwm-call-id: \d, \d --- response_body ok --- no_error_log @@ -593,7 +593,7 @@ qq{ echo ok; } --- response_headers_like -pwm-call-id: ((1, 0)|(0, 1)) +pwm-call-id: \d, \d --- response_body ok --- no_error_log @@ -618,7 +618,7 @@ ok echo ok; } --- response_headers_like -pwm-call-id: ((1, 0)|(0, 1)) +pwm-call-id: \d, \d --- response_body ok --- no_error_log @@ -671,7 +671,7 @@ TODO: also test with no_postpone echo ok; } --- response_headers_like -pwm-call-id: ((1, 0)|(0, 1)) +pwm-call-id: \d, \d --- response_body ok --- no_error_log @@ -696,7 +696,7 @@ ok echo ok; } --- response_headers_like -pwm-call-id: ((1, 0)|(0, 1)) +pwm-call-id: \d, \d --- response_body ok --- no_error_log @@ -748,7 +748,74 @@ TODO: also test with no_postpone echo ok; } --- response_headers_like -pwm-call-id: ((1, 0)|(0, 1)) +pwm-call-id: \d, \d +--- response_body +ok +--- no_error_log +[error] +[crit] + + + +=== TEST 21: proxy_wasm - proxy_wasm_lua_resolver, subsequent multiple calls in rewrite +--- timeout eval: $::ExtTimeout +--- load_nginx_modules: ngx_http_echo_module +--- wasm_modules: hostcalls +--- config + location /t { + proxy_wasm_lua_resolver on; + proxy_wasm hostcalls 'on=request_headers \ + test=/t/dispatch_http_call \ + hosts=httpbin.org,example.com \ + path=/headers \ + ncalls=2'; + proxy_wasm hostcalls 'on=request_body \ + test=/t/dispatch_http_call \ + hosts=httpbin.org,example.com \ + path=/headers \ + ncalls=2'; + echo ok; + } +--- request +GET /t + +Hello world +--- response_headers_like +pwm-call-id: \d, \d, \d, \d +--- response_body +ok +--- no_error_log +[error] +[crit] + + + +=== TEST 22: proxy_wasm - proxy_wasm_lua_resolver, subsequent multiple calls in access +--- timeout eval: $::ExtTimeout +--- load_nginx_modules: ngx_http_echo_module +--- wasm_modules: hostcalls +--- config + location /t { + proxy_wasm_lua_resolver on; + proxy_wasm_request_headers_in_access on; + proxy_wasm hostcalls 'on=request_headers \ + test=/t/dispatch_http_call \ + hosts=httpbin.org,example.com \ + path=/headers \ + ncalls=2'; + proxy_wasm hostcalls 'on=request_body \ + test=/t/dispatch_http_call \ + hosts=httpbin.org,example.com \ + path=/headers \ + ncalls=2'; + echo ok; + } +--- request +GET /t + +Hello world +--- response_headers_like +pwm-call-id: \d, \d, \d, \d --- response_body ok --- no_error_log diff --git a/t/lib/ngx-lua-tests/src/lua_bridge.rs b/t/lib/ngx-lua-tests/src/lua_bridge.rs index 7d883bacf..a99a1e702 100644 --- a/t/lib/ngx-lua-tests/src/lua_bridge.rs +++ b/t/lib/ngx-lua-tests/src/lua_bridge.rs @@ -2,6 +2,7 @@ extern "C" { fn ngx_wasm_lua_test_argsrets(); fn ngx_wasm_lua_test_bad_chunk(); fn ngx_wasm_lua_test_error(); + fn ngx_wasm_lua_test_sleep(); } #[no_mangle] @@ -18,3 +19,8 @@ pub fn test_bad_lua_chunk() { pub fn test_lua_error() { unsafe { ngx_wasm_lua_test_error() } } + +#[no_mangle] +pub fn test_lua_sleep() { + unsafe { ngx_wasm_lua_test_sleep() } +} diff --git a/t/lib/proxy-wasm-tests/hostcalls/src/filter.rs b/t/lib/proxy-wasm-tests/hostcalls/src/filter.rs index 879f2102e..9f771c645 100644 --- a/t/lib/proxy-wasm-tests/hostcalls/src/filter.rs +++ b/t/lib/proxy-wasm-tests/hostcalls/src/filter.rs @@ -25,8 +25,6 @@ impl Context for TestHttp { self.add_http_response_header("pwm-call-id", token_id.to_string().as_str()); - info!("op: {}", op); - match op { "trap" => panic!("trap!"), "log_request_properties" => { @@ -94,8 +92,8 @@ impl Context for TestHttp { .map_or(1, |v| v.parse().expect("bad n_sync_calls value")); if self.n_sync_calls < again { - self.send_http_dispatch(self.n_sync_calls); self.n_sync_calls += 1; + self.send_http_dispatch(self.n_sync_calls - 1); return; }