Skip to content

Commit

Permalink
bug: fix dataset leaking workspace directories
Browse files Browse the repository at this point in the history
  • Loading branch information
ibuildthecloud committed Nov 6, 2024
1 parent 1c2d1de commit 2e26a10
Show file tree
Hide file tree
Showing 6 changed files with 47 additions and 5 deletions.
8 changes: 5 additions & 3 deletions pkg/engine/http.go
Original file line number Diff line number Diff line change
Expand Up @@ -5,9 +5,11 @@ import (
"encoding/json"
"fmt"
"io"
"maps"
"net/http"
"net/url"
"os"
"slices"
"strings"

"github.com/gptscript-ai/gptscript/pkg/types"
Expand Down Expand Up @@ -75,9 +77,9 @@ func (e *Engine) runHTTP(ctx context.Context, prg *types.Program, tool types.Too
return nil, err
}

for _, env := range e.Env {
if strings.HasPrefix(env, "GPTSCRIPT_WORKSPACE_") {
req.Header.Add("X-GPTScript-Env", env)
for _, k := range slices.Sorted(maps.Keys(envMap)) {
if strings.HasPrefix(k, "GPTSCRIPT_WORKSPACE_") {
req.Header.Add("X-GPTScript-Env", k+"="+envMap[k])
}
}

Expand Down
25 changes: 24 additions & 1 deletion pkg/loader/loader.go
Original file line number Diff line number Diff line change
Expand Up @@ -29,6 +29,18 @@ import (

const CacheTimeout = time.Hour

var Remap = map[string]string{}

func init() {
remap := os.Getenv("GPTSCRIPT_TOOL_REMAP")
for _, pair := range strings.Split(remap, ",") {
k, v, ok := strings.Cut(pair, "=")
if ok {
Remap[k] = v
}
}
}

type source struct {
// Content The content of the source
Content []byte
Expand Down Expand Up @@ -68,8 +80,19 @@ func openFile(path string) (io.ReadCloser, bool, error) {
}

func loadLocal(base *source, name string) (*source, bool, error) {
var remapped bool
if !strings.HasPrefix(name, ".") {
for k, v := range Remap {
if strings.HasPrefix(name, k) {
name = v + name[len(k):]
remapped = true
break
}
}
}

filePath := name
if !filepath.IsAbs(name) {
if !remapped && !filepath.IsAbs(name) {
// We want to keep all strings in / format, and only convert to platform specific when reading
// This is why we use path instead of filepath.
filePath = path.Join(base.Path, name)
Expand Down
2 changes: 1 addition & 1 deletion pkg/openai/client.go
Original file line number Diff line number Diff line change
Expand Up @@ -617,7 +617,7 @@ func (c *Client) call(ctx context.Context, request openai.ChatCompletionRequest,
}
partialMessage = appendMessage(partialMessage, response)
if partial != nil {
if time.Since(start) > 500*time.Millisecond {
if time.Since(start) > 100*time.Millisecond {
last = last[:0]
partial <- types.CompletionStatus{
CompletionID: transactionID,
Expand Down
3 changes: 3 additions & 0 deletions pkg/runner/input.go
Original file line number Diff line number Diff line change
Expand Up @@ -15,6 +15,9 @@ func (r *Runner) handleInput(callCtx engine.Context, monitor Monitor, env []stri
}

for _, inputToolRef := range inputToolRefs {
if callCtx.Program.ToolSet[inputToolRef.ToolID].IsNoop() {
continue
}
data := map[string]any{}
_ = json.Unmarshal([]byte(input), &data)
data["input"] = input
Expand Down
3 changes: 3 additions & 0 deletions pkg/runner/output.go
Original file line number Diff line number Diff line change
Expand Up @@ -76,6 +76,9 @@ func (r *Runner) handleOutput(callCtx engine.Context, monitor Monitor, env []str
}

for _, outputToolRef := range outputToolRefs {
if callCtx.Program.ToolSet[outputToolRef.ToolID].IsNoop() {
continue
}
inputData, err := argsForFilters(callCtx.Program, outputToolRef, startState, map[string]any{
"output": output,
"continuation": continuation,
Expand Down
11 changes: 11 additions & 0 deletions pkg/sdkserver/datasets.go
Original file line number Diff line number Diff line change
Expand Up @@ -4,6 +4,7 @@ import (
"encoding/json"
"fmt"
"net/http"
"strings"

gcontext "github.com/gptscript-ai/gptscript/pkg/context"
"github.com/gptscript-ai/gptscript/pkg/gptscript"
Expand Down Expand Up @@ -39,6 +40,12 @@ func (r datasetRequest) opts(o gptscript.Options) gptscript.Options {
Monitor: o.Monitor,
Runner: o.Runner,
}
for _, e := range r.Env {
v, ok := strings.CutPrefix(e, "GPTSCRIPT_WORKSPACE_ID=")
if ok {
opts.Workspace = v
}
}
return opts
}

Expand All @@ -61,6 +68,7 @@ func (s *server) listDatasets(w http.ResponseWriter, r *http.Request) {
writeError(logger, w, http.StatusInternalServerError, fmt.Errorf("failed to initialize gptscript: %w", err))
return
}
defer g.Close(false)

prg, err := loader.Program(r.Context(), s.getDatasetTool(req), "List Datasets", loader.Options{
Cache: g.Cache,
Expand Down Expand Up @@ -118,6 +126,7 @@ func (s *server) addDatasetElements(w http.ResponseWriter, r *http.Request) {
writeError(logger, w, http.StatusInternalServerError, fmt.Errorf("failed to initialize gptscript: %w", err))
return
}
defer g.Close(false)

var args addDatasetElementsArgs
if err := json.Unmarshal([]byte(req.Input), &args); err != nil {
Expand Down Expand Up @@ -177,6 +186,7 @@ func (s *server) listDatasetElements(w http.ResponseWriter, r *http.Request) {
writeError(logger, w, http.StatusInternalServerError, fmt.Errorf("failed to initialize gptscript: %w", err))
return
}
defer g.Close(false)

var args listDatasetElementsArgs
if err := json.Unmarshal([]byte(req.Input), &args); err != nil {
Expand Down Expand Up @@ -239,6 +249,7 @@ func (s *server) getDatasetElement(w http.ResponseWriter, r *http.Request) {
writeError(logger, w, http.StatusInternalServerError, fmt.Errorf("failed to initialize gptscript: %w", err))
return
}
defer g.Close(false)

var args getDatasetElementArgs
if err := json.Unmarshal([]byte(req.Input), &args); err != nil {
Expand Down

0 comments on commit 2e26a10

Please sign in to comment.