From d11b312c5741ecb5e98a66eb1e91cd814df1582d Mon Sep 17 00:00:00 2001
From: Tony Worm <1390600+verdverm@users.noreply.github.com>
Date: Mon, 8 May 2023 05:37:27 -0400
Subject: [PATCH] hof/chat: mvp for chatgpt & llm features (#201)
MIME-Version: 1.0
Content-Type: text/plain; charset=UTF-8
Content-Transfer-Encoding: 8bit
🎉
---
.hof/shadow/cli/cmd/hof/cmd/chat.go | 95 ++++++
.hof/shadow/cli/cmd/hof/cmd/root.go | 1 +
.hof/shadow/cli/cmd/hof/flags/chat.go | 9 +
cmd/hof/cmd/chat.go | 99 ++++++
cmd/hof/cmd/root.go | 1 +
cmd/hof/flags/chat.go | 9 +
design/cmds/chat.cue | 48 +++
design/main.cue | 1 +
docs/code/hof-schemas/dm/fields/common.cue | 7 +-
docs/code/hof-schemas/dm/sql/chat-etl.cue | 61 ++++
docs/code/hof-schemas/dm/sql/dm.cue | 3 +-
docs/code/hof-schemas/dm/sql/fields.cue | 9 +-
docs/code/hof-schemas/hof.cue | 10 +
.../code-generation/template-writing/scope.md | 51 ++++
flow/chat/cmds.cue | 108 +++++++
flow/chat/prompts/test.cue | 8 +
flow/run.go | 2 +
flow/tags.go | 15 +
flow/tasker/tasker.go | 8 +-
flow/tasks/api/call.go | 14 +-
go.mod | 1 +
go.sum | 2 +
lib/chat/cmd/cmd.go | 288 ++++++++++++++++++
lib/chat/cmd/prompt.go | 79 +++++
lib/chat/openai.go | 91 ++++++
lib/hof/find.go | 5 +
lib/hof/hof.go | 10 +
lib/structural/input.go | 6 +-
schema/dm/fields/common.cue | 7 +-
schema/dm/sql/chat-etl.cue | 61 ++++
schema/dm/sql/dm.cue | 3 +-
schema/dm/sql/fields.cue | 9 +-
schema/hof.cue | 10 +
33 files changed, 1114 insertions(+), 17 deletions(-)
create mode 100644 .hof/shadow/cli/cmd/hof/cmd/chat.go
create mode 100644 .hof/shadow/cli/cmd/hof/flags/chat.go
create mode 100644 cmd/hof/cmd/chat.go
create mode 100644 cmd/hof/flags/chat.go
create mode 100644 design/cmds/chat.cue
create mode 100644 docs/code/hof-schemas/dm/sql/chat-etl.cue
create mode 100644 docs/content/code-generation/template-writing/scope.md
create mode 100644 flow/chat/cmds.cue
create mode 100644 flow/chat/prompts/test.cue
create mode 100644 lib/chat/cmd/cmd.go
create mode 100644 lib/chat/cmd/prompt.go
create mode 100644 lib/chat/openai.go
create mode 100644 schema/dm/sql/chat-etl.cue
diff --git a/.hof/shadow/cli/cmd/hof/cmd/chat.go b/.hof/shadow/cli/cmd/hof/cmd/chat.go
new file mode 100644
index 000000000..45175336b
--- /dev/null
+++ b/.hof/shadow/cli/cmd/hof/cmd/chat.go
@@ -0,0 +1,95 @@
+package cmd
+
+import (
+ "fmt"
+ "os"
+
+ "github.com/spf13/cobra"
+
+ "github.com/hofstadter-io/hof/cmd/hof/flags"
+
+ "github.com/hofstadter-io/hof/cmd/hof/ga"
+)
+
+var chatLong = `Use chat to work with hof features or from modules you import.
+Module authors can provide custom prompts for their schemas.
+
+Currently, only ChatGPT is supported. You can use any of the
+gpt-3.5 or gpt-4 models. The flag should match OpenAI API options.
+
+Set OPENAI_API_KEY`
+
+func init() {
+
+ ChatCmd.Flags().StringVarP(&(flags.ChatFlags.Model), "model", "M", "gpt-3.5-turbo", "LLM model to use [gpt-3.5-turbo,gpt-4]")
+ ChatCmd.Flags().StringVarP(&(flags.ChatFlags.Prompt), "prompt", "P", "", "path to the system prompt, the first message in the chat")
+ ChatCmd.Flags().StringVarP(&(flags.ChatFlags.Outfile), "outfile", "O", "", "path to write the output to")
+}
+
+func ChatRun(args []string) (err error) {
+
+ // you can safely comment this print out
+ fmt.Println("not implemented")
+
+ return err
+}
+
+var ChatCmd = &cobra.Command{
+
+ Use: "chat [args]",
+
+ Short: "Co-design with AI (alpha)",
+
+ Long: chatLong,
+
+ Run: func(cmd *cobra.Command, args []string) {
+
+ ga.SendCommandPath(cmd.CommandPath())
+
+ var err error
+
+ // Argument Parsing
+
+ err = ChatRun(args)
+ if err != nil {
+ fmt.Println(err)
+ os.Exit(1)
+ }
+ },
+}
+
+func init() {
+ extra := func(cmd *cobra.Command) bool {
+
+ return false
+ }
+
+ ohelp := ChatCmd.HelpFunc()
+ ousage := ChatCmd.UsageFunc()
+
+ help := func(cmd *cobra.Command, args []string) {
+
+ ga.SendCommandPath(cmd.CommandPath() + " help")
+
+ if extra(cmd) {
+ return
+ }
+ ohelp(cmd, args)
+ }
+ usage := func(cmd *cobra.Command) error {
+ if extra(cmd) {
+ return nil
+ }
+ return ousage(cmd)
+ }
+
+ thelp := func(cmd *cobra.Command, args []string) {
+ help(cmd, args)
+ }
+ tusage := func(cmd *cobra.Command) error {
+ return usage(cmd)
+ }
+ ChatCmd.SetHelpFunc(thelp)
+ ChatCmd.SetUsageFunc(tusage)
+
+}
diff --git a/.hof/shadow/cli/cmd/hof/cmd/root.go b/.hof/shadow/cli/cmd/hof/cmd/root.go
index 960740984..97245f1f0 100644
--- a/.hof/shadow/cli/cmd/hof/cmd/root.go
+++ b/.hof/shadow/cli/cmd/hof/cmd/root.go
@@ -129,6 +129,7 @@ func RootInit() {
RootCmd.AddCommand(FlowCmd)
RootCmd.AddCommand(FmtCmd)
RootCmd.AddCommand(ModCmd)
+ RootCmd.AddCommand(ChatCmd)
RootCmd.AddCommand(RunCmd)
RootCmd.AddCommand(FeedbackCmd)
diff --git a/.hof/shadow/cli/cmd/hof/flags/chat.go b/.hof/shadow/cli/cmd/hof/flags/chat.go
new file mode 100644
index 000000000..ccbf02d41
--- /dev/null
+++ b/.hof/shadow/cli/cmd/hof/flags/chat.go
@@ -0,0 +1,9 @@
+package flags
+
+type ChatFlagpole struct {
+ Model string
+ Prompt string
+ Outfile string
+}
+
+var ChatFlags ChatFlagpole
diff --git a/cmd/hof/cmd/chat.go b/cmd/hof/cmd/chat.go
new file mode 100644
index 000000000..04a621b4b
--- /dev/null
+++ b/cmd/hof/cmd/chat.go
@@ -0,0 +1,99 @@
+package cmd
+
+import (
+ "fmt"
+ "os"
+
+ "github.com/spf13/cobra"
+
+ "github.com/hofstadter-io/hof/cmd/hof/flags"
+
+ "github.com/hofstadter-io/hof/cmd/hof/ga"
+
+ "github.com/hofstadter-io/hof/lib/chat/cmd"
+)
+
+var chatLong = `Use chat to work with hof features or from modules you import.
+Module authors can provide custom prompts for their schemas.
+
+Currently, only ChatGPT is supported. You can use any of the
+gpt-3.5 or gpt-4 models. The flag should match OpenAI API options.
+
+Set OPENAI_API_KEY`
+
+func init() {
+
+ ChatCmd.Flags().StringVarP(&(flags.ChatFlags.Model), "model", "M", "gpt-3.5-turbo", "LLM model to use [gpt-3.5-turbo,gpt-4]")
+ ChatCmd.Flags().StringVarP(&(flags.ChatFlags.Prompt), "prompt", "P", "", "path to the system prompt, the first message in the chat")
+ ChatCmd.Flags().StringVarP(&(flags.ChatFlags.Outfile), "outfile", "O", "", "path to write the output to")
+}
+
+func ChatRun(args []string) (err error) {
+
+ // you can safely comment this print out
+ // fmt.Println("not implemented")
+
+ err = cmd.Run(args, flags.RootPflags, flags.ChatFlags)
+
+ return err
+}
+
+var ChatCmd = &cobra.Command{
+
+ Use: "chat [args]",
+
+ Short: "Co-design with AI (alpha)",
+
+ Long: chatLong,
+
+ Run: func(cmd *cobra.Command, args []string) {
+
+ ga.SendCommandPath(cmd.CommandPath())
+
+ var err error
+
+ // Argument Parsing
+
+ err = ChatRun(args)
+ if err != nil {
+ fmt.Println(err)
+ os.Exit(1)
+ }
+ },
+}
+
+func init() {
+ extra := func(cmd *cobra.Command) bool {
+
+ return false
+ }
+
+ ohelp := ChatCmd.HelpFunc()
+ ousage := ChatCmd.UsageFunc()
+
+ help := func(cmd *cobra.Command, args []string) {
+
+ ga.SendCommandPath(cmd.CommandPath() + " help")
+
+ if extra(cmd) {
+ return
+ }
+ ohelp(cmd, args)
+ }
+ usage := func(cmd *cobra.Command) error {
+ if extra(cmd) {
+ return nil
+ }
+ return ousage(cmd)
+ }
+
+ thelp := func(cmd *cobra.Command, args []string) {
+ help(cmd, args)
+ }
+ tusage := func(cmd *cobra.Command) error {
+ return usage(cmd)
+ }
+ ChatCmd.SetHelpFunc(thelp)
+ ChatCmd.SetUsageFunc(tusage)
+
+}
diff --git a/cmd/hof/cmd/root.go b/cmd/hof/cmd/root.go
index 04548e8a5..d1997cb3e 100644
--- a/cmd/hof/cmd/root.go
+++ b/cmd/hof/cmd/root.go
@@ -130,6 +130,7 @@ func RootInit() {
RootCmd.AddCommand(FlowCmd)
RootCmd.AddCommand(FmtCmd)
RootCmd.AddCommand(ModCmd)
+ RootCmd.AddCommand(ChatCmd)
RootCmd.AddCommand(RunCmd)
RootCmd.AddCommand(FeedbackCmd)
diff --git a/cmd/hof/flags/chat.go b/cmd/hof/flags/chat.go
new file mode 100644
index 000000000..ccbf02d41
--- /dev/null
+++ b/cmd/hof/flags/chat.go
@@ -0,0 +1,9 @@
+package flags
+
+type ChatFlagpole struct {
+ Model string
+ Prompt string
+ Outfile string
+}
+
+var ChatFlags ChatFlagpole
diff --git a/design/cmds/chat.cue b/design/cmds/chat.cue
new file mode 100644
index 000000000..895c320cc
--- /dev/null
+++ b/design/cmds/chat.cue
@@ -0,0 +1,48 @@
+package cmds
+
+import (
+ "github.com/hofstadter-io/hofmod-cli/schema"
+)
+
+#ChatCommand: schema.#Command & {
+ Name: "chat"
+ Usage: "chat [args]"
+ Short: "Co-design with AI (alpha)"
+ Long: #ChatRootHelp
+
+ Flags: [...schema.#Flag] & [ {
+ Name: "model"
+ Type: "string"
+ Default: "\"gpt-3.5-turbo\""
+ Help: "LLM model to use [gpt-3.5-turbo,gpt-4]"
+ Long: "model"
+ Short: "M"
+ },
+ {
+ Name: "prompt"
+ Type: "string"
+ Default: "\"\""
+ Help: "path to the system prompt, the first message in the chat"
+ Long: "prompt"
+ Short: "P"
+ },
+ {
+ Name: "outfile"
+ Type: "string"
+ Default: "\"\""
+ Help: "path to write the output to"
+ Long: "outfile"
+ Short: "O"
+ },
+ ]
+}
+
+#ChatRootHelp: #"""
+ Use chat to work with hof features or from modules you import.
+ Module authors can provide custom prompts for their schemas.
+
+ Currently, only ChatGPT is supported. You can use any of the
+ gpt-3.5 or gpt-4 models. The flag should match OpenAI API options.
+
+ Set OPENAI_API_KEY
+ """#
diff --git a/design/main.cue b/design/main.cue
index eb424518e..83c6ea5d3 100644
--- a/design/main.cue
+++ b/design/main.cue
@@ -35,6 +35,7 @@ import (
cmds.#ModCommand,
// beta commands
+ cmds.#ChatCommand,
cmds.#RunCommand,
// additional commands
diff --git a/docs/code/hof-schemas/dm/fields/common.cue b/docs/code/hof-schemas/dm/fields/common.cue
index 470ef6370..05528e1ac 100644
--- a/docs/code/hof-schemas/dm/fields/common.cue
+++ b/docs/code/hof-schemas/dm/fields/common.cue
@@ -14,9 +14,10 @@ DataTypes: ID |
ID: UUID & {Default: "" | *"uuid_generate_v4()"}
Field: {
- Name: string
- Type: string
- Reln?: string
+ Name: string
+ Plural: string | *"\(Name)s"
+ Type: string
+ Reln?: string
}
UUID: Field & {
diff --git a/docs/code/hof-schemas/dm/sql/chat-etl.cue b/docs/code/hof-schemas/dm/sql/chat-etl.cue
new file mode 100644
index 000000000..89c796049
--- /dev/null
+++ b/docs/code/hof-schemas/dm/sql/chat-etl.cue
@@ -0,0 +1,61 @@
+package sql
+
+import (
+ "strings"
+
+ "github.com/hofstadter-io/hof/schema/dm/fields"
+)
+
+ChatETL: {
+ // input models model fields value nested map (relns)
+ Original: Models: [string]: [string]: string | {[string]: string}
+ Datamodel: Models: {
+ for m, M in Original.Models {
+ (m): {
+ Name: m
+ Fields: {
+ for f, F in M {
+ // regular field
+ if !strings.HasPrefix(f, "$") {
+ (f): {
+ Name: f
+ [
+ if F == "string" {Varchar},
+ if F == "int" {fields.Int},
+ if F == "bool" {fields.Bool},
+ if F == "float" {fields.Float},
+ if F == "uuid" {fields.UUID},
+ if F == "datetime" {fields.Datetime},
+ if F == "email" {fields.Email},
+ if F == "password" {fields.Password},
+ if F == "url" {Varchar},
+ "UNKNOWN TYPE: \(F)" & false,
+ ][0]
+ }
+ }
+
+ // $relations
+ if f == "$relations" {
+ for f2, F2 in F {
+ (f2): {
+ fields.UUID
+ Name: f2
+ Relation: {
+ Name: F2.name
+ Type: F2.type
+ Other: F2.model
+ }
+ }
+ }
+ }
+
+ // special fields
+ if f == "id" {
+ (f): SQL: PrimaryKey: true
+ }
+ }
+ }
+ }
+ }
+ }
+}
diff --git a/docs/code/hof-schemas/dm/sql/dm.cue b/docs/code/hof-schemas/dm/sql/dm.cue
index 2c1aeb0c7..d2e7429be 100644
--- a/docs/code/hof-schemas/dm/sql/dm.cue
+++ b/docs/code/hof-schemas/dm/sql/dm.cue
@@ -32,7 +32,8 @@ Model: M={
// $hof: History: ...
// for easy access
- Name: M.$hof.metadata.name
+ Name: M.$hof.metadata.name
+ Plural: string | *"\(Name)s"
// These are the fields of a model
// they can map onto database columnts and form fields
diff --git a/docs/code/hof-schemas/dm/sql/fields.cue b/docs/code/hof-schemas/dm/sql/fields.cue
index ebc079ced..80c5fab30 100644
--- a/docs/code/hof-schemas/dm/sql/fields.cue
+++ b/docs/code/hof-schemas/dm/sql/fields.cue
@@ -5,7 +5,7 @@ import (
)
CommonFields: {
- ID: fields.UUID & {Default: "" | *"uuid_generate_v4()"}
+ ID: fields.UUID & {Default: string | *"uuid_generate_v4()"}
CreatedAt: fields.Datetime
UpdatedAt: fields.Datetime
}
@@ -14,6 +14,11 @@ SoftDelete: {
DeletedAt: fields.Datetime
}
+PrimaryKey: fields.UUID & {
+ Default: string | *"uuid_generate_v4()"
+ SQL: PrimaryKey: true
+}
+
Varchar: F=fields.String & {
- sqlType: "varchar(\(F.Length))"
+ SQL: Type: "character varying(\(F.Length))"
}
diff --git a/docs/code/hof-schemas/hof.cue b/docs/code/hof-schemas/hof.cue
index 9125d66ce..20fd759d0 100644
--- a/docs/code/hof-schemas/hof.cue
+++ b/docs/code/hof-schemas/hof.cue
@@ -37,6 +37,8 @@ Hof: {
// hof/gen
gen?: {
+ root: bool | *false
+
// name of the generator
name: string | *""
@@ -47,6 +49,8 @@ Hof: {
// hof/flow, used for both flows & tasks
flow?: {
+ root: bool | *false
+
// name of the flow or task
name: string | *""
@@ -55,6 +59,12 @@ Hof: {
// TODO, maybe we make this "flow" for flows?
op: string | *"flow"
}
+
+ chat?: {
+ root: bool | *false
+ name: string | *""
+ type: string | *""
+ }
}
}
diff --git a/docs/content/code-generation/template-writing/scope.md b/docs/content/code-generation/template-writing/scope.md
new file mode 100644
index 000000000..e64dfd3a3
--- /dev/null
+++ b/docs/content/code-generation/template-writing/scope.md
@@ -0,0 +1,51 @@
+---
+title: "Variables & Scopes"
+weight: 21
+---
+
+{{}}
+The underlying Go text/template system has semantics which
+will impact how variables are scoped and can be accessed.
+This page will help you understand and work through these.
+{{}}
+
+
+
+## Range and With statements
+
+A range or with statement will hide any `.Value` paths
+because it creates a new scope.
+To access variables outside, assign them to `$Value`
+like in this example.
+
+We often recommend that you capture your important
+variables at the start of a template or partial
+as a best practice you should adopt.
+
+```
+{{ $DM := .DM }}
+{{ $M := .Model }}
+CREATE TABLE {{ snake $M.Plural }} (
+{{ range $K, $F := $M.Fields }}
+{{ if ne $K "$hof" }}
+{{ template "sql/create-field.sql" (dict "Field" $F "Model" $M "DM" $DM) }}
+{{ end }}
+{{ end }}
+);
+```
+
+## Passing variables to sub-templates (partials)
+
+In the same way, a call to a partial template will create a new scope.
+Your top-level or saved variables will not be accessible.
+These partial templates only accept one value,
+so to pass multiple, we use the `dict` helper.
+You can see this in the above example.
+
+The partial template referenced:
+
+```
+{{ snake .Field.Name }} {{ if .Field.sqlType -}}
+{{ .Field.sqlType }}{{ else }}{{ .Field.Type -}}{{end}}{{ with .Field.Default }} DEFAULT {{.}}{{ end }},
+{{ if .Field.SQL.PrimaryKey }}constraint {{ .Model.Plural }}_pkey primary key ({{ snake .Field.Name }}),{{ end }}
+```
diff --git a/flow/chat/cmds.cue b/flow/chat/cmds.cue
new file mode 100644
index 000000000..064043aac
--- /dev/null
+++ b/flow/chat/cmds.cue
@@ -0,0 +1,108 @@
+package chat
+
+import (
+ "encoding/json"
+ "list"
+
+ "github.com/hofstadter-io/hof/flow/chat/prompts"
+)
+
+models: MakeCall & {
+ @flow(gpt/list)
+ path: "/v1/models"
+
+ etl: {
+ resp: {}
+ out: list.SortStrings([ for _, M in resp.data {M.id}])
+ }
+}
+
+info: MakeCall & {
+ @flow(gpt/info)
+ path: "/v1/models/gpt-3.5-turbo"
+}
+
+call: {
+ @flow(gpt/call)
+ MakeCall
+ method: "POST"
+ path: "/v1/completions"
+ data: {
+ model: "text-davinci-003"
+ prompt: prompts.BlueSky[0].content
+ }
+}
+
+ask: {
+ @flow(gpt/ask)
+ MakeCall
+ question: string @tag(question)
+ method: "POST"
+ path: "/v1/chat/completions"
+ data: {
+ model: "gpt-3.5-turbo"
+ messages: [{
+ role: "user"
+ content: question
+ }]
+ temperature: 1.0
+ }
+ etl: {
+ resp: {}
+ out: resp.choices[0].message.content
+ }
+}
+
+chat: {
+ @flow(gpt/chat)
+ MakeCall
+ method: "POST"
+ path: "/v1/chat/completions"
+ data: {
+ model: "gpt-3.5-turbo"
+ messages: prompts.BlueSky
+ }
+}
+
+MakeCall: {
+ method: string | *"GET"
+ path: string
+ data: {}
+ etl: {
+ resp: {}
+ out: _ | *resp
+ }
+
+ steps: {
+
+ env: {
+ @task(os.Getenv)
+ OPENAI_API_KEY: string
+ }
+
+ call: {
+ @task(api.Call)
+ req: {
+ host: "https://api.openai.com"
+ headers: {
+ "Content-Type": "application/json"
+ Authorization: "Bearer \(env.OPENAI_API_KEY)"
+ }
+
+ "method": method
+ "path": path
+ "data": data
+ }
+ resp: {
+ body: _
+ }
+ }
+
+ filter: etl & {resp: call.resp.body}
+
+ out: {
+ @task(os.Stdout)
+ text: json.Indent(json.Marshal(filter.out), "", " ") + "\n"
+ }
+ }
+}
diff --git a/flow/chat/prompts/test.cue b/flow/chat/prompts/test.cue
new file mode 100644
index 000000000..7b4504fe9
--- /dev/null
+++ b/flow/chat/prompts/test.cue
@@ -0,0 +1,8 @@
+package prompts
+
+BlueSky: [{
+ role: "user"
+ content: ##"""
+ Why is the sky blue?
+ """##
+}]
diff --git a/flow/run.go b/flow/run.go
index 08ffb40e8..437337253 100644
--- a/flow/run.go
+++ b/flow/run.go
@@ -55,6 +55,8 @@ func run(entrypoints []string, opts *flags.RootPflagpole, popts *flags.FlowFlagp
popts.Flow = append(popts.Flow, flowArgs...)
opts.Tags = append(opts.Tags, tagArgs...)
+ // fmt.Println("args:", popts.Flow, opts.Tags)
+
// load in CUE files
root, err := structural.LoadCueInputs(entrypoints, ctx, nil)
if err != nil {
diff --git a/flow/tags.go b/flow/tags.go
index 53616a69d..8aa4703d4 100644
--- a/flow/tags.go
+++ b/flow/tags.go
@@ -67,6 +67,21 @@ func injectSecrets(val cue.Value, tags []string) (cue.Value, error) {
tagPaths[arg] = v.Path()
}
+ return false
+ }
+ if attr.Name() == "tag" {
+ if attr.NumArgs() == 0 {
+ err = fmt.Errorf("@secret() has no inner args at %s", v.Path())
+ errs = append(errs, err)
+ return false
+ }
+ // TODO, better options &| UX here
+ arg, _ := attr.String(0)
+ _, ok := tagMap[arg]
+ if ok {
+ tagPaths[arg] = v.Path()
+ }
+
return false
}
}
diff --git a/flow/tasker/tasker.go b/flow/tasker/tasker.go
index d4eeb59d8..9e08b8223 100644
--- a/flow/tasker/tasker.go
+++ b/flow/tasker/tasker.go
@@ -10,6 +10,8 @@ import (
"github.com/hofstadter-io/hof/flow/task"
)
+var debug = false
+
func NewTasker(ctx *hofcontext.Context) cueflow.TaskFunc {
// This function implements the Runner interface.
// It parses Cue values, you will see all of them recursively
@@ -45,9 +47,9 @@ func NewTasker(ctx *hofcontext.Context) cueflow.TaskFunc {
}
func maybeTask(ctx *hofcontext.Context, val cue.Value, attr cue.Attribute) (cueflow.Runner, error) {
- //if ctx.DebugTasks {
- //fmt.Println("task?:", attr)
- //}
+ if debug {
+ fmt.Println("task?:", attr)
+ }
taskId, err := attr.String(0)
if err != nil {
diff --git a/flow/tasks/api/call.go b/flow/tasks/api/call.go
index 54f94c095..c93d5c11e 100644
--- a/flow/tasks/api/call.go
+++ b/flow/tasks/api/call.go
@@ -16,6 +16,8 @@ import (
- catch / retry on failed connection
*/
+var debug = false
+
type call struct {
}
@@ -192,10 +194,12 @@ func buildRequest(val cue.Value) (R *gorequest.SuperAgent, err error) {
data := req.LookupPath(cue.ParsePath("data"))
if data.Exists() {
- err := data.Decode(&R.Data)
+ d := map[string]any{}
+ err := data.Decode(&d)
if err != nil {
return R, err
}
+ R = R.Send(d)
}
timeout := req.LookupPath(cue.ParsePath("timeout"))
@@ -274,6 +278,14 @@ func makeRequest(R *gorequest.SuperAgent) (gorequest.Response, error) {
}
}()
+ if debug {
+ s, err := R.Clone().AsCurlCommand()
+ if err != nil {
+ return nil, err
+ }
+ fmt.Println("CURL:", s)
+ }
+
resp, body, errs := R.End()
if len(errs) != 0 && resp == nil {
diff --git a/go.mod b/go.mod
index b635cecf3..ec6894c7e 100644
--- a/go.mod
+++ b/go.mod
@@ -34,6 +34,7 @@ require (
github.com/parnurzeal/gorequest v0.2.16
github.com/pkg/errors v0.9.1
github.com/sabhiram/go-gitignore v0.0.0-20210923224102-525f6e181f06
+ github.com/sashabaranov/go-openai v1.9.1
github.com/sergi/go-diff v1.3.1
github.com/spf13/cobra v1.7.0
github.com/spf13/viper v1.15.0
diff --git a/go.sum b/go.sum
index e28142a2e..2b37916c9 100644
--- a/go.sum
+++ b/go.sum
@@ -354,6 +354,8 @@ github.com/rogpeppe/go-internal v1.10.0/go.mod h1:UQnix2H7Ngw/k4C5ijL5+65zddjncj
github.com/russross/blackfriday/v2 v2.1.0/go.mod h1:+Rmxgy9KzJVeS9/2gXHxylqXiyQDYRxCVz55jmeOWTM=
github.com/sabhiram/go-gitignore v0.0.0-20210923224102-525f6e181f06 h1:OkMGxebDjyw0ULyrTYWeN0UNCCkmCWfjPnIA2W6oviI=
github.com/sabhiram/go-gitignore v0.0.0-20210923224102-525f6e181f06/go.mod h1:+ePHsJ1keEjQtpvf9HHw0f4ZeJ0TLRsxhunSI2hYJSs=
+github.com/sashabaranov/go-openai v1.9.1 h1:3N52HkJKo9Zlo/oe1AVv5ZkCOny0ra58/ACvAxkN3MM=
+github.com/sashabaranov/go-openai v1.9.1/go.mod h1:lj5b/K+zjTSFxVLijLSTDZuP7adOgerWeFyZLUhAKRg=
github.com/sergi/go-diff v1.1.0/go.mod h1:STckp+ISIX8hZLjrqAeVduY0gWCT9IjLuqbuNXdaHfM=
github.com/sergi/go-diff v1.3.1 h1:xkr+Oxo4BOQKmkn/B9eMK0g5Kg/983T9DqqPHwYqD+8=
github.com/sergi/go-diff v1.3.1/go.mod h1:aMJSSKb2lpPvRNec0+w3fl7LP9IOFzdc9Pa4NFbPK1I=
diff --git a/lib/chat/cmd/cmd.go b/lib/chat/cmd/cmd.go
new file mode 100644
index 000000000..7d893a3b0
--- /dev/null
+++ b/lib/chat/cmd/cmd.go
@@ -0,0 +1,288 @@
+package cmd
+
+import (
+ "context"
+ "encoding/json"
+ "fmt"
+ "io"
+ "os"
+ "strings"
+
+ "github.com/sashabaranov/go-openai"
+
+ "github.com/hofstadter-io/hof/cmd/hof/flags"
+ "github.com/hofstadter-io/hof/lib/chat"
+ // "github.com/hofstadter-io/hof/lib/runtime"
+)
+
+func Run(args []string, rflags flags.RootPflagpole, cflags flags.ChatFlagpole) error {
+ if len(args) == 0 {
+ return fmt.Errorf("no input provided")
+ }
+
+ // fmt.Printf("lib/chat.Run: %v %v %v\n", args, rflags, cflags)
+
+ cmd, rest := args[0], args[1:]
+ switch cmd {
+ case "dm", "data", "datamodel":
+ // fmt.Println("embed:", rest)
+ if len(args) == 0 {
+ return fmt.Errorf("no input for datamodel provided")
+ }
+ return dmCall(rest, rflags, cflags)
+
+ case "embed":
+ // fmt.Println("embed:", rest)
+ if len(args) == 0 {
+ return fmt.Errorf("no input for embedding provided")
+ }
+ return embedCall(rest, rflags, cflags)
+
+ default:
+ return chatCall(args, rflags, cflags)
+
+ }
+
+ // load our cue, for future use
+ /*
+ R, err := runtime.New(extra, rflags)
+ if err != nil {
+ return err
+ }
+ err = R.Load()
+ if err != nil {
+ return err
+ }
+ */
+
+ // load code
+ /*
+ cbytes, err := os.ReadFile(jsonfile)
+ if err != nil {
+ return err
+ }
+ code := string(cbytes)
+
+ // possibly load inst
+ if strings.HasPrefix(inst, "./") {
+ ibytes, err := os.ReadFile(inst)
+ if err != nil {
+ return err
+ }
+ inst = string(ibytes)
+ }
+
+ // make call
+ resp, err := chat.OpenaiChat(code, inst, cflags.Model)
+ if err != nil {
+ return err
+ }
+
+ // write code
+ fmt.Println(resp)
+ err = os.WriteFile(jsonfile, []byte(resp), 0644)
+ if err != nil {
+ return err
+ }
+ */
+}
+
+func chatCall(args []string, rflags flags.RootPflagpole, cflags flags.ChatFlagpole) error {
+ lines := []string{}
+
+ for _, arg := range args {
+ if arg == "-" {
+ s, err := io.ReadAll(os.Stdin)
+ if err != nil {
+ return err
+ }
+ lines = append(lines, string(s))
+ } else if info, err := os.Lstat(arg); err == nil && !info.IsDir() {
+ bs, err := os.ReadFile(info.Name())
+ if err != nil {
+ return err
+ }
+ s := string(bs)
+ lines = append(lines, s)
+ } else {
+ lines = append(lines, arg)
+ }
+
+ }
+
+ m := strings.Join(lines, "\n")
+ msgs := make([]openai.ChatCompletionMessage,0)
+ if cflags.Prompt != "" {
+ bs, err := os.ReadFile(cflags.Prompt)
+ if err != nil {
+ return err
+ }
+ s := string(bs)
+ msg := openai.ChatCompletionMessage{
+ Role: "system",
+ Content: s,
+ }
+ msgs = append(msgs, msg)
+ }
+ msg := openai.ChatCompletionMessage{
+ Role: "user",
+ Content: m,
+ }
+ msgs = append(msgs, msg)
+ resp, err := chat.OpenaiChat(msgs, cflags.Model)
+ fmt.Println(resp)
+ if err != nil {
+ return err
+ }
+ return nil
+}
+
+func dmCall(args []string, rflags flags.RootPflagpole, cflags flags.ChatFlagpole) error {
+ lines := []string{}
+
+ sysMsg := dmPretextString
+
+ // construct inputs, we append the first file-like input
+ // to the pretext as our model, and output the result to the same
+ file := ""
+ for _, arg := range args {
+ if arg == "-" {
+ b, err := io.ReadAll(os.Stdin)
+ if err != nil {
+ return err
+ }
+ s := string(b)
+ if file == "" {
+ file = "-"
+ sysMsg += s
+ } else {
+ lines = append(lines, s)
+ }
+ } else if info, err := os.Lstat(arg); err == nil && !info.IsDir() {
+ bs, err := os.ReadFile(info.Name())
+ if err != nil {
+ return err
+ }
+ s := string(bs)
+ if file == "" {
+ file = info.Name()
+ sysMsg += s
+ } else {
+ lines = append(lines, s)
+ }
+ } else {
+ lines = append(lines, arg)
+ }
+ }
+
+ // create the user message, by starting with the current or first datamodel
+ usrMsg := ""
+ if file == "" {
+ // no file, probably the first iteration?
+ file = "dm.cue"
+ _, err := os.Stat(file)
+ if err != nil {
+ // fmt.Println(err)
+ // not found, new dm most liklye
+ usrMsg = dmStartingJSON
+ } else {
+ bs, err := os.ReadFile("dm.cue")
+ if err != nil {
+ return err
+ }
+ usrMsg = string(bs)
+ }
+ }
+
+ usrMsg += strings.Join(lines, "\n")
+
+ if rflags.Verbosity > 0 {
+ fmt.Println(sysMsg)
+ fmt.Println(usrMsg)
+ fmt.Printf("\nlength: %d\n\n", len(sysMsg) + len(usrMsg))
+ }
+
+ // make our chat messages
+ msgs := []openai.ChatCompletionMessage {
+ // system message
+ {
+ Role: "system",
+ Content: sysMsg,
+ },
+ // user instructions
+ {
+ Role: "user",
+ Content: usrMsg,
+ },
+ }
+
+ // make the call
+ resp, err := chat.OpenaiChat(msgs, cflags.Model)
+ if err != nil {
+ return err
+ }
+
+ //
+ // fixes
+ //
+ // remove any triple ticks, they keep showing up despite the prompt...
+ resp += "\n"
+ resp = strings.Replace(resp, "```\n", "\n", -1)
+ resp = strings.Replace(resp, "```json", "", -1)
+
+ // add a new line for writing
+ resp = strings.TrimSpace(resp) + "\n"
+
+ // Print the final model
+ fmt.Println(resp)
+ // also write the file
+ if file != "-" {
+ if cflags.Outfile != "" {
+ file = cflags.Outfile
+ }
+ err := os.WriteFile(file, []byte(resp), 0644)
+ if err != nil {
+ return err
+ }
+ }
+ return nil
+}
+
+func embedCall(args []string, rflags flags.RootPflagpole, cflags flags.ChatFlagpole) error {
+ inputs := []string{}
+ for _, R := range args {
+ bs, err := os.ReadFile(R)
+ if err != nil {
+ return err
+ }
+ s := string(bs)
+ inputs = append(inputs, s)
+ }
+
+ var req openai.EmbeddingRequest
+ apiKey := os.Getenv("OPENAI_API_KEY")
+ client := openai.NewClient(apiKey)
+
+ req.Model = openai.AdaEmbeddingV2
+ req.Input = inputs
+
+ ctx := context.Background()
+ resp, err := client.CreateEmbeddings(ctx, req)
+ if err != nil {
+ return err
+ }
+
+ D := map[string]any{}
+ for _, d := range resp.Data {
+ f := args[d.Index]
+ D[f] = d.Embedding
+ }
+
+ bs, err := json.Marshal(D)
+ if err != nil {
+ return err
+ }
+ fmt.Println(string(bs))
+
+ return nil
+}
diff --git a/lib/chat/cmd/prompt.go b/lib/chat/cmd/prompt.go
new file mode 100644
index 000000000..003d7d3b2
--- /dev/null
+++ b/lib/chat/cmd/prompt.go
@@ -0,0 +1,79 @@
+package cmd
+
+var dmUserPrefix = `
+From now on, only output the changes to the datamodel. Be sure to include the parent fields so they changes appear at the right level.
+`
+
+var dmStartingJSON = `
+Datamodel: {
+ @datamodel()
+ Models: {
+ User: {
+ id: "uuid"
+ created_at: "datetime"
+ updated_at: "datetime"
+ deleted_at: "datetime"
+ email: "email"
+ active: "bool"
+ verified: "bool"
+ }
+ }
+}
+`
+
+var dmPretextString = `
+TYPES: [string, int, bool, float, uuid, datetime, email, url]
+RELATIONS: [belongs-to, has-one, has-many, many-to-many]
+
+SCHEMA: """
+Datamodel: {
+ @datamodel()
+ Models: {
+ : {
+ : ""
+ : ""
+ $relations: {
+ : {
+ type: ""
+ model: ""
+ }
+ : {
+ ...
+ }
+ }
+ }
+ : {
+ ...
+ }
+ }
+}
+"""
+
+Your task is to modify the original JSON object according to the instructions.
+The JSON should conform to the SCHEMA and follow the GUIDELINES.
+Use the following guidelines when performing the task.
+
+GUIDELINES:
+- The Datamodel is a loose representation of a SQL database, models are tables and fields are columns.
+- The Datamodel is composed of Models, Models are composed of fields and $relations.
+- Do NOT add extra models the user does not ask for.
+- Do NOT place a Model within another Model. You may only modify them to add $relations.
+- You are allowed to make assumptions about the need for new models or fields if the instructions seem to imply their need.
+- must come from the TYPES list,
+- The common database fields are id, created_at, updated_at, and deleted_at.
+- When adding a new model, include the common database fields, unless instructed otherwise.
+- If the instructions do not specify the field type, you should make your best guess.
+- You should try to keep the number of models concise and not introduce unnecessary duplication of information.
+- If a field can be calculated by a SQL query on another table, don't add it.
+- must come from the RELATIONS list.
+- Models can have relations between them. If you make a relation, there must be a model for both sides.
+- If a user has something, this implies a new Model and Relation. It is up to you to determine the correct relation type.
+- "has-many" and "many-to-many" relations should be named as the plural of the model they refer to.
+- "many-to-many" relations require an extra model to hold the linking information.
+- Remove quotes from keys, unless they contain unusual characters.
+- You should only output the results with no explanation, extra labels, or other words.
+
+When you are done generating the resulst, reconsider the above instructions and ensure
+the results are vaild for the SCHEMA and GUIDELINES.
+
+`
diff --git a/lib/chat/openai.go b/lib/chat/openai.go
new file mode 100644
index 000000000..63702298d
--- /dev/null
+++ b/lib/chat/openai.go
@@ -0,0 +1,91 @@
+package chat
+
+import (
+ "context"
+ "fmt"
+ "os"
+ "strings"
+
+ "github.com/sashabaranov/go-openai"
+
+ // "github.com/hofstadter-io/hof/lib/templates"
+)
+
+var apiKey string
+
+func init() {
+ apiKey = os.Getenv("OPENAI_API_KEY")
+ if apiKey == "" {
+ // fmt.Println("OPENAI_API_KEY environment var is missing\nVisit https://platform.openai.com/account/api-keys to get one")
+ }
+}
+
+func OpenaiChat(messages []openai.ChatCompletionMessage, model string) (string, error) {
+ // override default model in interactive | chat mode
+ if !(strings.HasPrefix(model, "gpt-3") || strings.HasPrefix(model, "gpt-4")) {
+ return "", fmt.Errorf("using chat compatible model: %q", model, "\n")
+ }
+
+ client := openai.NewClient(apiKey)
+
+ // initial req setup
+ var req openai.ChatCompletionRequest
+
+ req.Model = model
+ req.N = 1
+ req.MaxTokens = 2500
+ req.Temperature = 0.042
+ req.TopP = 0.69
+ req.Messages = messages
+
+
+ ctx := context.Background()
+ resp, err := client.CreateChatCompletion(ctx, req)
+ if err != nil {
+ return "", err
+ }
+ R := resp.Choices
+ final := R[0].Message.Content
+
+ return final, nil
+
+ // add our message to the conversation
+ /* TODO, once we are interactive
+ msg = openai.ChatCompletionMessage{
+ Role: "assistant",
+ Content: final,
+ }
+ req.Messages = append(req.Messages, msg)
+ */
+}
+
+func OpenaiEmbedding(inputs []string) (string, error) {
+
+ client := openai.NewClient(apiKey)
+
+ // initial req setup
+ var req openai.EmbeddingRequest
+
+ req.Model = openai.AdaEmbeddingV2
+ req.Input = inputs
+
+ ctx := context.Background()
+ resp, err := client.CreateEmbeddings(ctx, req)
+ if err != nil {
+ return "", err
+ }
+ D := resp.Data
+ final := fmt.Sprint(D[0].Embedding)
+
+ return final, nil
+
+ // add our message to the conversation
+ /* TODO, once we are interactive
+ msg = openai.ChatCompletionMessage{
+ Role: "assistant",
+ Content: final,
+ }
+ req.Messages = append(req.Messages, msg)
+ */
+}
+
diff --git a/lib/hof/find.go b/lib/hof/find.go
index 440670fd7..8f8e02048 100644
--- a/lib/hof/find.go
+++ b/lib/hof/find.go
@@ -106,6 +106,11 @@ func FindHofs(value cue.Value) (roots []*Node[any], err error) {
stack.Hof.Flow.Task = ac
stack.Hof.Flow.Name = label
+ case "chat":
+ stack.Hof.Chat.Root = true
+ stack.Hof.Chat.Name = label
+ stack.Hof.Chat.Type = ac
+
default:
found = false
}
diff --git a/lib/hof/hof.go b/lib/hof/hof.go
index 4e2dcb3e1..35fd51ebc 100644
--- a/lib/hof/hof.go
+++ b/lib/hof/hof.go
@@ -23,6 +23,7 @@ type Hof struct {
Datamodel Datamodel
Gen Gen
Flow Flow
+ Chat Chat
// any extra config, set by users
Extra map[string]any
@@ -89,6 +90,15 @@ type Gen struct {
*/
type Flow struct {
Root bool
+ Name string
Task string
+}
+
+/*
+ hof/chat configuration
+*/
+type Chat struct {
+ Root bool
Name string
+ Type string
}
diff --git a/lib/structural/input.go b/lib/structural/input.go
index b986861eb..eaf8c949c 100644
--- a/lib/structural/input.go
+++ b/lib/structural/input.go
@@ -24,6 +24,8 @@ type Input struct {
Value cue.Value
}
+var debug = false
+
// Loads the entrypoints using the context provided
// returns the value from the load after validating it
func LoadCueInputs(entrypoints []string, ctx *cue.Context, cfg *load.Config) (cue.Value, error) {
@@ -75,7 +77,9 @@ func LoadCueInputs(entrypoints []string, ctx *cue.Context, cfg *load.Config) (cu
bi.AddSyntax(F)
default:
- fmt.Println("unknown encoding for", f.Filename, f.Encoding)
+ if debug {
+ fmt.Println("unknown encoding for", f.Filename, f.Encoding)
+ }
}
}
diff --git a/schema/dm/fields/common.cue b/schema/dm/fields/common.cue
index 470ef6370..05528e1ac 100644
--- a/schema/dm/fields/common.cue
+++ b/schema/dm/fields/common.cue
@@ -14,9 +14,10 @@ DataTypes: ID |
ID: UUID & {Default: "" | *"uuid_generate_v4()"}
Field: {
- Name: string
- Type: string
- Reln?: string
+ Name: string
+ Plural: string | *"\(Name)s"
+ Type: string
+ Reln?: string
}
UUID: Field & {
diff --git a/schema/dm/sql/chat-etl.cue b/schema/dm/sql/chat-etl.cue
new file mode 100644
index 000000000..89c796049
--- /dev/null
+++ b/schema/dm/sql/chat-etl.cue
@@ -0,0 +1,61 @@
+package sql
+
+import (
+ "strings"
+
+ "github.com/hofstadter-io/hof/schema/dm/fields"
+)
+
+ChatETL: {
+ // input models model fields value nested map (relns)
+ Original: Models: [string]: [string]: string | {[string]: string}
+ Datamodel: Models: {
+ for m, M in Original.Models {
+ (m): {
+ Name: m
+ Fields: {
+ for f, F in M {
+ // regular field
+ if !strings.HasPrefix(f, "$") {
+ (f): {
+ Name: f
+ [
+ if F == "string" {Varchar},
+ if F == "int" {fields.Int},
+ if F == "bool" {fields.Bool},
+ if F == "float" {fields.Float},
+ if F == "uuid" {fields.UUID},
+ if F == "datetime" {fields.Datetime},
+ if F == "email" {fields.Email},
+ if F == "password" {fields.Password},
+ if F == "url" {Varchar},
+ "UNKNOWN TYPE: \(F)" & false,
+ ][0]
+ }
+ }
+
+ // $relations
+ if f == "$relations" {
+ for f2, F2 in F {
+ (f2): {
+ fields.UUID
+ Name: f2
+ Relation: {
+ Name: F2.name
+ Type: F2.type
+ Other: F2.model
+ }
+ }
+ }
+ }
+
+ // special fields
+ if f == "id" {
+ (f): SQL: PrimaryKey: true
+ }
+ }
+ }
+ }
+ }
+ }
+}
diff --git a/schema/dm/sql/dm.cue b/schema/dm/sql/dm.cue
index 2c1aeb0c7..d2e7429be 100644
--- a/schema/dm/sql/dm.cue
+++ b/schema/dm/sql/dm.cue
@@ -32,7 +32,8 @@ Model: M={
// $hof: History: ...
// for easy access
- Name: M.$hof.metadata.name
+ Name: M.$hof.metadata.name
+ Plural: string | *"\(Name)s"
// These are the fields of a model
// they can map onto database columnts and form fields
diff --git a/schema/dm/sql/fields.cue b/schema/dm/sql/fields.cue
index ebc079ced..80c5fab30 100644
--- a/schema/dm/sql/fields.cue
+++ b/schema/dm/sql/fields.cue
@@ -5,7 +5,7 @@ import (
)
CommonFields: {
- ID: fields.UUID & {Default: "" | *"uuid_generate_v4()"}
+ ID: fields.UUID & {Default: string | *"uuid_generate_v4()"}
CreatedAt: fields.Datetime
UpdatedAt: fields.Datetime
}
@@ -14,6 +14,11 @@ SoftDelete: {
DeletedAt: fields.Datetime
}
+PrimaryKey: fields.UUID & {
+ Default: string | *"uuid_generate_v4()"
+ SQL: PrimaryKey: true
+}
+
Varchar: F=fields.String & {
- sqlType: "varchar(\(F.Length))"
+ SQL: Type: "character varying(\(F.Length))"
}
diff --git a/schema/hof.cue b/schema/hof.cue
index 9125d66ce..20fd759d0 100644
--- a/schema/hof.cue
+++ b/schema/hof.cue
@@ -37,6 +37,8 @@ Hof: {
// hof/gen
gen?: {
+ root: bool | *false
+
// name of the generator
name: string | *""
@@ -47,6 +49,8 @@ Hof: {
// hof/flow, used for both flows & tasks
flow?: {
+ root: bool | *false
+
// name of the flow or task
name: string | *""
@@ -55,6 +59,12 @@ Hof: {
// TODO, maybe we make this "flow" for flows?
op: string | *"flow"
}
+
+ chat?: {
+ root: bool | *false
+ name: string | *""
+ type: string | *""
+ }
}
}