autog

package module
v0.0.12 Latest Latest
Warning

This package is not in the latest version of its module.

Go to latest
Published: Apr 1, 2024 License: Apache-2.0 Imports: 5 Imported by: 0

README

AutoG

AutoG is a lightweight, comprehensive, and flexible Agent development framework

  • Lightweight: Developed in pure Go language, zero third-party dependencies.
  • Comprehensive: Fully-featured, includes a prompt framework, RAG, model interfacing interfaces, supports long-term and short-term memory, planning, action, and reflection capabilities, etc.
  • Flexible: A functional + react framework, capable of implementing multi-Agent interactions and dynamic state graphs and control flows through the capabilities of Future functions.
Examples

See ExampleOpenAiChatAgent in example_openai_test.go See ExampleOllamaChatAgent in example_ollama_test.go

    // Step 1. A complete agent that supports continuous chat conversations
    &autog.Agent{}.Prompt(system, longHistory, shortHistory).
        ReadQuestion(nil, input, output).
        AskLLM(openai, true). // stream = true
        WaitResponse(nil).
        Action(nil).
        Reflection(nil, 3).
        Summarize(nil, summary, prefix, true) // force = true

See ExampleOpenAiRag in example_openai_test.go See ExampleOllamaRag in example_ollama_test.go

    // Step 1. Create a RAG with a memory vector database
    memDB, _ := rag.NewMemDatabase()
    memRag := &autog.Rag{ Database: memDB, EmbeddingModel: openai }

    // Step 2. Split `docstring` into chunks, and save to database
    splitter := &rag.TextSplitter{ChunkSize: 100}
    memRag.Indexing(cxt, "/doc", docstring, splitter, false)

    // Step 3. Search database by question `what is AutoG?`
    scoredss, _ := memRag.Retrieval(cxt, "/doc", []string{"what is AutoG?"}, 3)
    for _, scoreds := range scoredss {
        for _, scored := range scoreds {
            fmt.Printf("Score:%f\n", scored.Score)
            fmt.Printf("Content:[%s]\n", scored.Chunk.GetContent())
        }
    }

AutoG是一个轻量、完整、灵活的Agent开发框架

  • 轻量:纯Go语言开发,零第三方依赖。
  • 完整:功能齐全,包含提示工程框架,RAG,模型对接接口,支持长短期记忆、计划、行动和反思能力等。
  • 灵活:函数式+响应式框架,可通过Future函数的能力,实现多Agent交互以及动态的状态图和控制流。
样例

See ExampleOpenAiChatAgent in example_openai_test.go See ExampleOllamaChatAgent in example_ollama_test.go

    // 步骤 1. 一个完整的支持连续聊天对话的智能体
    &autog.Agent{}.Prompt(system, longHistory, shortHistory).
        ReadQuestion(nil, input, output).
        AskLLM(openai, true). // stream = true
        WaitResponse(nil).
        Action(nil).
        Reflection(nil, 3).
        Summarize(nil, summary, prefix, true) // force = true

See ExampleOpenAiRag in example_openai_test.go See ExampleOllamaRag in example_ollama_test.go

    // 步骤 1. 创建一个RAG并初始化,使其使用内存向量数据库
    memDB, _ := rag.NewMemDatabase()
    memRag := &autog.Rag{ Database: memDB, EmbeddingModel: openai }

    // 步骤 2. 将 `docstring` 分割成小块块,并保存到数据库
    splitter := &rag.TextSplitter{ChunkSize: 100}
    memRag.Indexing(cxt, "/doc", docstring, splitter, false)

    // 步骤 2. 用问题 `what is AutoG?` 去检索向量数据库
    scoredss, _ := memRag.Retrieval(cxt, "/doc", []string{"what is AutoG?"}, 3)
    for _, scoreds := range scoredss {
        for _, scored := range scoreds {
            fmt.Printf("Score:%f\n", scored.Score)
            fmt.Printf("Content:[%s]\n", scored.Chunk.GetContent())
        }
    }

Documentation

Index

Constants

View Source
const (
	VerboseNone int = iota
	VerboseShowSending
	VerboseShowReceiving
)
View Source
const (
	ROLE_SYSTEM    string = "system"
	ROLE_USER      string = "user"
	ROLE_ASSISTANT string = "assistant"
)
View Source
const (
	DOCUMENT_PATH_NONE = ""
)

Variables

This section is empty.

Functions

func IsValidRole

func IsValidRole(role string) bool

func OutputSummaryContent

func OutputSummaryContent(output StreamReader, contentbuf *strings.Builder, delta string)

func OutputSummaryError

func OutputSummaryError(output StreamReader, contentbuf *strings.Builder, status LLMStatus, errstr string)

Types

type Action

type Action struct {
	Name    string
	Desc    string
	NeedRun func(content string) (need bool)
	Check   func(content string) (ok bool, err string, payload interface{})
	Run     func(content string, payload interface{}) (ok bool, err string)
}

type Agent

type Agent struct {
	Prompts              []*PromptItem
	Request              string
	Context              context.Context
	Input                *Input
	Output               *Output
	LongHistoryMessages  []ChatMessage
	ShortHistoryMessages []ChatMessage
	PromptMessages       []ChatMessage
	LLM                  LLM
	Stream               bool
	ResponseStatus       LLMStatus
	ResponseMessage      ChatMessage
	ReflectionContent    string
	AgentStage           AgentStage
	CanDoAction          bool
	DoAction             *DoAction
	CanDoReflection      bool
	DoReflection         *DoReflection
}

func (*Agent) Action

func (a *Agent) Action(doAct *DoAction) *Agent

func (*Agent) AskLLM

func (a *Agent) AskLLM(llm LLM, stream bool) *Agent

func (*Agent) AskReflection

func (a *Agent) AskReflection(reflection string) *Agent

func (*Agent) GetLongHistory

func (a *Agent) GetLongHistory() []ChatMessage

func (*Agent) GetShortHistory

func (a *Agent) GetShortHistory() []ChatMessage

func (*Agent) Prompt

func (a *Agent) Prompt(prompts ...*PromptItem) *Agent

func (*Agent) ReadQuestion

func (a *Agent) ReadQuestion(cxt context.Context, input *Input, output *Output) *Agent

func (*Agent) Reflection

func (a *Agent) Reflection(doRef *DoReflection, retry int) *Agent

func (*Agent) StreamDelta added in v0.0.12

func (a *Agent) StreamDelta(contentbuf *strings.Builder, delta string)

func (*Agent) StreamEnd added in v0.0.12

func (a *Agent) StreamEnd(contentbuf *strings.Builder)

func (*Agent) StreamError added in v0.0.12

func (a *Agent) StreamError(contentbuf *strings.Builder, status LLMStatus, errstr string)

func (*Agent) StreamStart added in v0.0.12

func (a *Agent) StreamStart() *strings.Builder

func (*Agent) Summarize

func (a *Agent) Summarize(cxt context.Context, summary *PromptItem, prefix *PromptItem, force bool) *Agent

func (*Agent) WaitResponse

func (a *Agent) WaitResponse(cxt context.Context) *Agent

type AgentStage

type AgentStage int
const (
	AsReadQuestion AgentStage = iota
	AsAskLLM
	AsAskReflection
	AsWaitResponse
	AsAction
	AsReflection
	AsSummarize
)

type ChatMessage

type ChatMessage struct {
	Role    string `json:"role"`
	Content string `json:"content"`
}

func (*ChatMessage) String

func (cm *ChatMessage) String() string

type Chunk

type Chunk interface {
	GetIndex() int
	SetIndex(index int)
	GetPath() string
	SetPath(path string)
	GetQuery() string
	SetQuery(query string)
	GetByteStart() int
	SetByteStart(i int)
	GetByteEnd() int
	SetByteEnd(i int)
	GetContent() string
	SetContent(content string)
	GetPayload() interface{}
	SetPayload(payload interface{})
	GetEmbedding() Embedding
	SetEmbedding(embed Embedding)
}

type Database

type Database interface {
	AppendChunks(path string, payload interface{}, chunks []Chunk) error
	SaveChunks(path string, payload interface{}, chunks []Chunk) error
	SearchChunks(path string, embeds []Embedding, topk int) ([]ScoredChunks, error)
}

type DoAction

type DoAction struct {
	Do func(content string) (ok bool, reflection string)
}

type DoReflection

type DoReflection struct {
	Do func(reflection string, retry int)
}

type Embedding

type Embedding []float64

func (*Embedding) String

func (e *Embedding) String(d int) string

type EmbeddingModel

type EmbeddingModel interface {
	Embeddings(cxt context.Context, dimensions int, texts []string) ([]Embedding, error)
}

type EmbeddingStage added in v0.0.10

type EmbeddingStage int
const (
	EmbeddingStageIndexing EmbeddingStage = iota
	EmbeddingStageRetrieval
)

type Input

type Input struct {
	ReadContent func() string
}

type LLM

type LLM interface {
	InitLLM() error

	CalcTokens(cxt context.Context, content string) int
	SendMessages(cxt context.Context, msgs []ChatMessage) (LLMStatus, ChatMessage)
	SendMessagesStream(cxt context.Context, msgs []ChatMessage, reader StreamReader) (LLMStatus, ChatMessage)

	CalcTokensByWeakModel(cxt context.Context, content string) int
	SendMessagesByWeakModel(cxt context.Context, msgs []ChatMessage) (LLMStatus, ChatMessage)
	SendMessagesStreamByWeakModel(cxt context.Context, msgs []ChatMessage, reader StreamReader) (LLMStatus, ChatMessage)
}

type LLMStatus

type LLMStatus int
const (
	LLM_STATUS_OK LLMStatus = iota
	LLM_STATUS_USER_CANCELED
	LLM_STATUS_EXCEED_CONTEXT
	LLM_STATUS_BED_REQUEST
	LLM_STATUS_BED_RESPONSE
	LLM_STATUS_BED_MESSAGE
	LLM_STATUS_UNKNOWN_ERROR
)

type Output

type Output struct {
	WriteContent func(stage AgentStage, stream StreamStage, buf *strings.Builder, str string)
}

type ParserFunction

type ParserFunction func(path string, payload interface{}) ([]Chunk, error)

type PromptItem

type PromptItem struct {
	Name        string
	GetMessages func(query string) []ChatMessage
	GetPrompt   func(query string) (role string, prompt string)
}

type Rag

type Rag struct {
	Database            Database
	EmbeddingModel      EmbeddingModel
	EmbeddingBatch      int
	EmbeddingRoutines   int
	EmbeddingDimensions int
	EmbeddingCallback   func(stage EmbeddingStage, texts []string, embeds []Embedding, i, j int, finished, tried int, err error) bool
}

func (*Rag) Embeddings

func (r *Rag) Embeddings(cxt context.Context, stage EmbeddingStage, texts []string) ([]Embedding, error)

func (*Rag) Indexing

func (r *Rag) Indexing(cxt context.Context, path string, payload interface{}, splitter Splitter, overwrite bool) error

func (*Rag) Retrieval

func (r *Rag) Retrieval(cxt context.Context, path string, queries []string, topk int) ([]ScoredChunks, error)

type ScoredChunk

type ScoredChunk struct {
	Chunk Chunk
	Score float64
}

type ScoredChunks

type ScoredChunks []*ScoredChunk

type Splitter

type Splitter interface {
	GetParser() ParserFunction
}

type StreamReader

type StreamReader interface {
	StreamStart() *strings.Builder
	StreamDelta(contentbuf *strings.Builder, delta string)
	StreamError(contentbuf *strings.Builder, status LLMStatus, errstr string)
	StreamEnd(contentbuf *strings.Builder)
}

type StreamStage added in v0.0.12

type StreamStage int
const (
	StreamStageStart StreamStage = iota
	StreamStageDelta
	StreamStageError
	StreamStageEnd
)

type Summary

type Summary struct {
	Cxt              context.Context
	StreamReader     StreamReader
	StreamBuffer     *strings.Builder
	LLM              LLM
	PromptSummary    string
	PromptPrefix     string
	DisableStream    bool
	MinSummaryTokens int

	MinSplit int
	MaxDepth int
}

func (*Summary) AskLLM

func (s *Summary) AskLLM(msgs []ChatMessage) (LLMStatus, ChatMessage)

func (*Summary) InitSummary

func (s *Summary) InitSummary() error

func (*Summary) Summarize

func (s *Summary) Summarize(longHistory []ChatMessage, shortHistory []ChatMessage, force bool) (LLMStatus, []ChatMessage)

func (*Summary) SummarizeOnce

func (s *Summary) SummarizeOnce(msgs []ChatMessage) (LLMStatus, []ChatMessage)

func (*Summary) SummarizeSplit

func (s *Summary) SummarizeSplit(force bool, msgs []ChatMessage, depth int) (LLMStatus, []ChatMessage)

func (*Summary) TokenizeMessages

func (s *Summary) TokenizeMessages(messages []ChatMessage) ([]TokenizedMessage, int)

type TokenizedMessage

type TokenizedMessage struct {
	Tokens int
}

Directories

Path Synopsis

Jump to

Keyboard shortcuts

? : This menu
/ : Search site
f or F : Jump to
y or Y : Canonical URL