-
Notifications
You must be signed in to change notification settings - Fork 129
Commit
This commit does not belong to any branch on this repository, and may belong to a fork outside of the repository.
# Description Used to test ai sfn. ## Usage ```sh $ yomo test-prompt -h Test LLM prompt Usage: yomo test-prompt [flags] Aliases: test-prompt, p Flags: -a, --ai-server string LLM API server address (default "http://localhost:8000") -h, --help help for test-prompt --sfn strings sfn source directory -s, --system-prompt string system prompt (default "You are a very helpful assistant. Your job is to choose the best possible action to solve the user question or task. Don't make assumptions about what values to plug into functions. Ask for clarification if a user request is ambiguous. If you don't know the answer, stop the conversation by saying \"no func call\"") -u, --user-prompt string user prompt ``` Test [llm-sfn-get-weather](https://github.com/yomorun/yomo/tree/master/example/10-ai/llm-sfn-get-weather/main.go) ```sh $ yomo p --sfn ./llm-sfn-get-weather -u "What's the difference between the weather in Beijing and New York?" ℹ️ -------------------------------------------------------- ℹ️ Run AI SFN on directory: . ℹ️ Register AI function success ℹ️ Invoke LLM API "http://localhost:8000/invoke" ℹ️ >> LLM API Request ℹ️ Messages: ℹ️ System: You are a very helpful assistant. Your job is to choose the best possible action to solve the user question or task. Don't make assumptions about what values to plug into functions. Ask for clarification if a user request is ambiguous. ℹ️ User: What's the difference between the weather in Beijing and New York? ℹ️ << LLM API Response ℹ️ Invoke functions[2]: ℹ️ [call_LUD1TeeQhnh7DgOpEWBPEvub] tag: 17, name: get-weather, arguments: {"city_name": "Beijing"}, result: [Beijing] temperature: 25°C ℹ️ [call_Ml5GMJNoJflFvloAIVFfn9eo] tag: 17, name: get-weather, arguments: {"city_name": "New York"}, result: [New York] temperature: 30°C ℹ️ Finish Reason: stop ℹ️ Content: The current temperature in Beijing is 25°C, while the temperature in New York is 30°C. This means that New York is currently 5°C warmer than Beijing. ``` --------- Co-authored-by: C.C <[email protected]>
- Loading branch information
1 parent
1d7d54e
commit 8b24008
Showing
9 changed files
with
273 additions
and
18 deletions.
There are no files selected for viewing
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,226 @@ | ||
/* | ||
Copyright © 2021 Allegro Networks | ||
Licensed under the Apache License, Version 2.0 (the "License"); | ||
you may not use this file except in compliance with the License. | ||
You may obtain a copy of the License at | ||
http://www.apache.org/licenses/LICENSE-2.0 | ||
Unless required by applicable law or agreed to in writing, software | ||
distributed under the License is distributed on an "AS IS" BASIS, | ||
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. | ||
See the License for the specific language governing permissions and | ||
limitations under the License. | ||
*/ | ||
package cli | ||
|
||
import ( | ||
"bufio" | ||
"bytes" | ||
"encoding/json" | ||
"fmt" | ||
"net/http" | ||
"os" | ||
"os/exec" | ||
"strings" | ||
"syscall" | ||
"time" | ||
|
||
"github.com/spf13/cobra" | ||
"github.com/yomorun/yomo/ai" | ||
"github.com/yomorun/yomo/pkg/log" | ||
|
||
// serverless registrations | ||
_ "github.com/yomorun/yomo/cli/serverless/deno" | ||
_ "github.com/yomorun/yomo/cli/serverless/golang" | ||
_ "github.com/yomorun/yomo/cli/serverless/wasm" | ||
) | ||
|
||
var ( | ||
sfnDir []string | ||
userPrompt string | ||
systemPrompt string | ||
aiServerAddr string | ||
) | ||
|
||
// testPromptCmd represents the test prompt command for LLM function | ||
// the source code of the LLM function is in the sfnDir | ||
var testPromptCmd = &cobra.Command{ | ||
Use: "test-prompt", | ||
Aliases: []string{"p"}, | ||
Short: "Test LLM prompt", | ||
Long: "Test LLM prompt", | ||
Run: func(cmd *cobra.Command, args []string) { | ||
// sfn source directory | ||
if len(sfnDir) == 0 { | ||
sfnDir = append(sfnDir, ".") | ||
} | ||
for _, dir := range sfnDir { | ||
// run sfn | ||
log.InfoStatusEvent(os.Stdout, "--------------------------------------------------------") | ||
log.InfoStatusEvent(os.Stdout, "Attaching LLM function in directory: %v", dir) | ||
cmd := exec.Command("go", "run", ".") | ||
cmd.Dir = dir | ||
env := os.Environ() | ||
env = append(env, "YOMO_LOG_LEVEL=info") | ||
cmd.Env = env | ||
// cmd.Stdout = io.Discard | ||
// cmd.Stderr = io.Discard | ||
cmd.SysProcAttr = &syscall.SysProcAttr{ | ||
Setpgid: true, | ||
} | ||
stdout, err := cmd.StdoutPipe() | ||
if err != nil { | ||
log.FailureStatusEvent(os.Stdout, "Failed to attach LLM function in directory: %v, error: %v", dir, err) | ||
continue | ||
} | ||
defer stdout.Close() | ||
outputReader := bufio.NewReader(stdout) | ||
// read outputReader | ||
output := make(chan string) | ||
defer close(output) | ||
go func(outputReader *bufio.Reader, output chan string) { | ||
for { | ||
line, err := outputReader.ReadString('\n') | ||
if err != nil { | ||
break | ||
} | ||
if len(line) > 0 { | ||
output <- line | ||
} | ||
} | ||
}(outputReader, output) | ||
// start cmd | ||
if err := cmd.Start(); err != nil { | ||
log.FailureStatusEvent(os.Stdout, "Failed to run LLM function in directory: %v, error: %v", dir, err) | ||
continue | ||
} else { | ||
defer func(cmd *exec.Cmd) { | ||
pgid, err := syscall.Getpgid(cmd.Process.Pid) | ||
if err == nil { | ||
syscall.Kill(-pgid, syscall.SIGTERM) | ||
} else { | ||
cmd.Process.Kill() | ||
} | ||
}(cmd) | ||
} | ||
// wait for the sfn to be ready | ||
for { | ||
select { | ||
case out := <-output: | ||
// log.InfoStatusEvent(os.Stdout, "AI SFN Output: %s", out) | ||
if len(out) > 0 && strings.Contains(out, "register ai function success") { | ||
log.InfoStatusEvent(os.Stdout, "Register LLM function success") | ||
goto REQUEST | ||
} | ||
case <-time.After(5 * time.Second): | ||
log.FailureStatusEvent(os.Stdout, "Connect to zipper failed, please check the zipper is running or not") | ||
os.Exit(1) | ||
} | ||
} | ||
// invoke llm api | ||
// request | ||
REQUEST: | ||
apiEndpoint := fmt.Sprintf("%s/invoke", aiServerAddr) | ||
log.InfoStatusEvent(os.Stdout, `Invoking LLM API "%s"`, apiEndpoint) | ||
invokeReq := ai.InvokeRequest{ | ||
IncludeCallStack: true, // include call stack | ||
Prompt: userPrompt, | ||
} | ||
reqBuf, err := json.Marshal(invokeReq) | ||
if err != nil { | ||
log.FailureStatusEvent(os.Stdout, "Failed to marshal invoke request: %v", err) | ||
continue | ||
} | ||
// invoke api endpoint | ||
log.InfoStatusEvent(os.Stdout, ">> LLM API Request") | ||
log.InfoStatusEvent(os.Stdout, "Messages:") | ||
log.InfoStatusEvent(os.Stdout, "\tSystem: %s", systemPrompt) | ||
log.InfoStatusEvent(os.Stdout, "\tUser: %s", userPrompt) | ||
resp, err := http.Post(apiEndpoint, "application/json", bytes.NewBuffer(reqBuf)) | ||
if err != nil { | ||
log.FailureStatusEvent(os.Stdout, "Failed to invoke llm api: %v", err) | ||
continue | ||
} | ||
defer resp.Body.Close() | ||
// response | ||
// failed to invoke llm api | ||
log.InfoStatusEvent(os.Stdout, "<< LLM API Response") | ||
if resp.StatusCode != http.StatusOK { | ||
var errorResp ai.ErrorResponse | ||
err := json.NewDecoder(resp.Body).Decode(&errorResp) | ||
if err != nil { | ||
log.FailureStatusEvent(os.Stdout, "Failed to decode LLM API response: %v", err) | ||
continue | ||
} | ||
log.FailureStatusEvent(os.Stdout, "Failed to invoke LLM API response: %s", errorResp.Error) | ||
continue | ||
} | ||
// success to invoke LLM API | ||
var invokeResp ai.InvokeResponse | ||
if err := json.NewDecoder(resp.Body).Decode(&invokeResp); err != nil { | ||
log.FailureStatusEvent(os.Stdout, "Failed to decode LLM API response: %v", err) | ||
continue | ||
} | ||
// tool calls | ||
for tag, tcs := range invokeResp.ToolCalls { | ||
toolCallCount := len(tcs) | ||
if toolCallCount > 0 { | ||
log.InfoStatusEvent(os.Stdout, "Invoking functions[%d]:", toolCallCount) | ||
for _, tc := range tcs { | ||
if invokeResp.ToolMessages == nil { | ||
log.InfoStatusEvent(os.Stdout, | ||
"\t[%s] tag: %d, name: %s, arguments: %s", | ||
tc.ID, | ||
tag, | ||
tc.Function.Name, | ||
tc.Function.Arguments, | ||
) | ||
} else { | ||
log.InfoStatusEvent(os.Stdout, | ||
"\t[%s] tag: %d, name: %s, arguments: %s\n🌟 result: %s", | ||
tc.ID, | ||
tag, | ||
tc.Function.Name, | ||
tc.Function.Arguments, | ||
getToolCallResult(tc, invokeResp.ToolMessages), | ||
) | ||
} | ||
} | ||
} | ||
} | ||
// finish reason | ||
log.InfoStatusEvent(os.Stdout, "Finish Reason: %s", invokeResp.FinishReason) | ||
log.InfoStatusEvent(os.Stdout, "Final Content: \n🤖 %s", invokeResp.Content) | ||
} | ||
}, | ||
} | ||
|
||
func getToolCallResult(tc *ai.ToolCall, tms []ai.ToolMessage) string { | ||
result := "" | ||
for _, tm := range tms { | ||
if tm.ToolCallId == tc.ID { | ||
result = tm.Content | ||
} | ||
} | ||
return result | ||
} | ||
|
||
func init() { | ||
rootCmd.AddCommand(testPromptCmd) | ||
|
||
testPromptCmd.Flags().StringSliceVarP(&sfnDir, "sfn", "", []string{}, "sfn source directory") | ||
testPromptCmd.Flags().StringVarP(&userPrompt, "user-prompt", "u", "", "user prompt") | ||
testPromptCmd.MarkFlagRequired("user-prompt") | ||
testPromptCmd.Flags().StringVarP( | ||
&systemPrompt, | ||
"system-prompt", | ||
"s", | ||
`You are a very helpful assistant. Your job is to choose the best possible action to solve the user question or task. Don't make assumptions about what values to plug into functions. Ask for clarification if a user request is ambiguous.`, | ||
"system prompt", | ||
) | ||
testPromptCmd.Flags().StringVarP(&aiServerAddr, "ai-server", "a", "http://localhost:8000", "LLM API server address") | ||
|
||
runViper = bindViper(testPromptCmd) | ||
} |
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Oops, something went wrong.