diff --git a/.github/ISSUE_TEMPLATE/bug_report.md b/.github/ISSUE_TEMPLATE/bug_report.md new file mode 100644 index 0000000..b97af66 --- /dev/null +++ b/.github/ISSUE_TEMPLATE/bug_report.md @@ -0,0 +1,27 @@ +--- +name: Bug report +about: Create a report to help me improve +title: '' +labels: bug +assignees: '' + +--- + +**Describe the bug** +*A clear and concise description of what the bug is.* + +**Console Output** +*If applicable, add console output.* + +**tlm Version** +*Which tlm version are you running? Type `tlm version` to get the version* + +e.g. `tlm 1.0 (windows/amd64)` + + +**Platform Information (please complete the following information):** +*Which operating system are you running on?*- + + +**Additional context** +*Add any other context about the problem here.* diff --git a/.github/ISSUE_TEMPLATE/custom.md b/.github/ISSUE_TEMPLATE/custom.md new file mode 100644 index 0000000..48d5f81 --- /dev/null +++ b/.github/ISSUE_TEMPLATE/custom.md @@ -0,0 +1,10 @@ +--- +name: Custom issue template +about: Describe this issue template's purpose here. +title: '' +labels: '' +assignees: '' + +--- + + diff --git a/.github/workflows/build.yaml b/.github/workflows/build.yaml index b4f6e2f..36cf6cf 100644 --- a/.github/workflows/build.yaml +++ b/.github/workflows/build.yaml @@ -1,10 +1,9 @@ -name: Go +name: ci -on: [push] +on: [push, pull_request] jobs: build: - runs-on: ubuntu-latest steps: - uses: actions/checkout@v4 @@ -14,6 +13,14 @@ jobs: with: go-version: 1.21 + - name: Cache Go modules + uses: actions/cache@v3 + with: + path: | + ~/.cache/go-build + ~/go/pkg/mod + key: ${{ runner.os }}-go-${{ hashFiles('**/go.sum') }} + - name: Display Go version run: go version @@ -27,5 +34,48 @@ jobs: uses: actions/upload-artifact@v4 with: name: dist - path: | - dist + path: dist/ + retention-days: 1 + e2e: + runs-on: ubuntu-latest + steps: + - uses: actions/checkout@v4 + + - name: Download artifacts + uses: actions/download-artifact@v4 + with: + name: dist + + - name: Cache pip dependencies + uses: actions/cache@v3 + with: + path: ~/.cache/pip + key: ${{ runner.os }}-pip-${{ hashFiles('e2e/requirements.txt') }} + + - name: Install tlm + run: | + mv $(cat VERSION)/tlm_$(cat VERSION)_linux_amd64 /usr/local/bin/tlm + chmod +x /usr/local/bin/tlm + tlm help + + - name: Set up Python 3.11 + uses: actions/setup-python@v3 + with: + python-version: 3.11 + + - name: Install dependencies + run: pip install -r e2e/requirements.txt + + - name: Run Tests wo/ Ollama + run: robot --outputdir dist --name tlm --include no-ollama tests/ + working-directory: e2e/ + + - name: Archive e2e artifacts + uses: actions/upload-artifact@v4 + if: always() + with: + name: e2e-report + path: e2e/dist/ + + needs: + - build diff --git a/.gitignore b/.gitignore index 7b381fb..d155e2e 100644 --- a/.gitignore +++ b/.gitignore @@ -20,4 +20,8 @@ # Go workspace file go.work .idea -dist \ No newline at end of file +.repomix/ +dist/ +.venv/ +.vscode/ +__pycache__/ \ No newline at end of file diff --git a/LICENSE b/LICENSE index 261eeb9..9db919b 100644 --- a/LICENSE +++ b/LICENSE @@ -186,7 +186,7 @@ same "printed page" as the copyright notice for easier identification within third-party archives. - Copyright [yyyy] [name of copyright owner] + Copyright 2024 Yusuf Can Bayrak Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. diff --git a/README.md b/README.md index 61a5fb9..74c03ea 100644 --- a/README.md +++ b/README.md @@ -1,25 +1,34 @@ -# tlm - Local terminal companion, powered by CodeLLaMa. +# tlm - Local CLI Copilot, powered by Ollama. πŸ’»πŸ¦™ -tlm is your CLI companion which requires nothing except your workstation. It uses most efficient and powerful [CodeLLaMa](https://ai.meta.com/blog/code-llama-large-language-model-coding/) in your local environment to provide you the best possible command line suggestions. +![Latest Build](https://img.shields.io/github/actions/workflow/status/yusufcanb/tlm/build.yaml?style=for-the-badge&logo=github) +[![Sonar Quality Gate](https://img.shields.io/sonar/quality_gate/yusufcanb_tlm?server=https%3A%2F%2Fsonarcloud.io&style=for-the-badge&logo=sonar)](https://sonarcloud.io/project/overview?id=yusufcanb_tlm) +[![Latest Release](https://img.shields.io/github/v/release/yusufcanb/tlm?display_name=release&style=for-the-badge&logo=github&link=https%3A%2F%2Fgithub.com%2Fyusufcanb%2Ftlm%2Freleases)](https://github.com/yusufcanb/tlm/releases) -![Suggest](./assets/suggest.gif) +tlm is your CLI companion which requires nothing except your workstation. It uses most efficient and powerful open-source models like [Llama 3.3](https://ollama.com/library/llama3.3), [Phi4](https://ollama.com/library/phi4), [DeepSeek-R1](https://ollama.com/library/deepseek-r1), [Qwen](https://ollama.com/library/qwen2.5-coder) of your choice in your local environment to provide you the best possible command line assistance. -![Explain](./assets/explain.gif) +| Get a suggestion | Explain a command | +| -------------------------------- | -------------------------------- | +| ![Suggest](./assets/suggest.gif) | ![Explain](./assets/explain.gif) | -![Config](./assets/config.gif) +| Ask with context (One-liner RAG) | Configure your favorite model | +| -------------------------------- | ------------------------------ | +| ![Ask](./assets/ask.gif) | ![Config](./assets/config.gif) | ## Features -- πŸ’Έ No API Key (Subscription) is required. (ChatGPT, Github Copilot, Azure OpenAI, etc.) +- πŸ’Έ No API Key (Subscription) is required. (ChatGPT, Claude, Github Copilot, Azure OpenAI, etc.) - πŸ“‘ No internet connection is required. - πŸ’» Works on macOS, Linux and Windows. -- πŸ‘©πŸ»β€πŸ’» Automatic shell detection. - +- πŸ‘©πŸ»β€πŸ’» Automatic shell detection. (Powershell, Bash, Zsh) + - πŸš€ One liner generation and command explanation. +- πŸ–Ί No-brainer RAG (Retrieval Augmented Generation) + +- 🧠 Experiment any model. ([Llama3](https://ollama.com/library/llama3.3), [Phi4](https://ollama.com/library/phi4), [DeepSeek-R1](https://ollama.com/library/deepseek-r1), [Qwen](https://ollama.com/library/qwen2.5-coder)) with parameters of your choice. ## Installation @@ -28,65 +37,142 @@ Installation can be done in two ways; - [Installation script](#installation-script) (recommended) - [Go Install](#go-install) -### Prerequisites +### Installation Script + +Installation script is the recommended way to install tlm. +It will recognize the which platform and architecture to download and will execute install command for you. -[Ollama](https://ollama.com/) is needed to download to necessary models. -It can be downloaded with the following methods on different platforms. +#### Linux and macOS; -- On Linux and macOS; +Download and execute the installation script by using the following command; -```bash -curl -fsSL https://ollama.com/install.sh | sh +```bash +curl -fsSL https://raw.githubusercontent.com/yusufcanb/tlm/1.2/install.sh | sudo -E bash ``` -- On Windows; +#### Windows (Powershell 5.5 or higher) + +Download and execute the installation script by using the following command; + +```powershell +Invoke-RestMethod -Uri https://raw.githubusercontent.com/yusufcanb/tlm/1.2/install.ps1 | Invoke-Expression +``` -Download instructions can be followed at the following link: [https://ollama.com/download](https://ollama.com/download) +### Go Install -- Or using official Docker images 🐳; +If you have Go 1.22 or higher installed on your system, you can easily use the following command to install tlm; ```bash -# CPU Only -docker run -d -v ollama:/root/.ollama -p 11434:11434 --name ollama ollama/ollama +go install github.com/yusufcanb/tlm@1.2 +``` -# With GPU (Nvidia only) -docker run -d --gpus=all -v ollama:/root/.ollama -p 11434:11434 --name ollama ollama/ollama" +You're ready! Check installation by using the following command; + +```bash +tlm ``` -### Installation Script +## Usage -Installation script is the recommended way to install tlm. -It will recognize the which platform and architecture to download and will execute install command for you. +``` +$ tlm +NAME: + tlm - terminal copilot, powered by open-source models. + +USAGE: + tlm suggest "" + tlm s --model=qwen2.5-coder:1.5b --style=stable "" + + tlm explain "" # explain a command + tlm e --model=llama3.2:1b --style=balanced "" # explain a command with a overrided model + + tlm ask "" # ask a question + tlm ask --context . --include *.md "" # ask a question with a context + +VERSION: + 1.2 + +COMMANDS: + ask, a Asks a question (beta) + suggest, s Suggests a command. + explain, e Explains a command. + config, c Configures language model, style and shell + version, v Prints tlm version. + help, h Shows a list of commands or help for one command + +GLOBAL OPTIONS: + --help, -h show help + --version, -v print the version +``` -#### Linux and macOS; +### Ask - Ask something with or without context +Ask a question with context. Here is an example question with a context of this repositories Go files under ask package. -Download and execute the installation script by using the following command; +``` +$ tlm ask --help +NAME: + tlm ask - Asks a question (beta) + +USAGE: + tlm ask "" # ask a question + tlm ask --context . --include *.md "" # ask a question with a context + +OPTIONS: + --context value, -c value context directory path + --include value, -i value [ --include value, -i value ] include patterns. e.g. --include=*.txt or --include=*.txt,*.md + --exclude value, -e value [ --exclude value, -e value ] exclude patterns. e.g. --exclude=**/*_test.go or --exclude=*.pyc,*.pyd + --interactive, --it enable interactive chat mode (default: false) + --model value, -m value override the model for command suggestion. (default: qwen2 5-coder:3b) + --help, -h show help +``` + +### Suggest - Get Command by Prompt -```bash -curl -fsSL https://raw.githubusercontent.com/yusufcanb/tlm/main/install.sh | sudo bash -E ``` +$ tlm suggest --help +NAME: + tlm suggest - Suggests a command. -#### Windows (Powershell 5.1 or higher) +USAGE: + tlm suggest + tlm suggest --model=llama3.2:1b + tlm suggest --model=llama3.2:1b --style= -Download and execute the installation script by using the following command; +DESCRIPTION: + suggests a command for given prompt. -```powershell -Invoke-RestMethod -Uri https://raw.githubusercontent.com/yusufcanb/tlm/main/install.ps1 | Invoke-Expression -``` +COMMANDS: + help, h Shows a list of commands or help for one command -### Go Install +OPTIONS: + --model value, -m value override the model for command suggestion. (default: qwen2.5-coder:3b) + --style value, -s value override the style for command suggestion. (default: balanced) + --help, -h show help +``` -If you have Go 1.21 or higher installed on your system, you can easily use the following command to install tlm; +### Explain - Explain a Command -```bash -go install github.com/yusufcanb/tlm@latest ``` +$ tlm explain --help +NAME: + tlm explain - Explains a command. -Check installation by using the following command; +USAGE: + tlm explain + tlm explain --model=llama3.2:1b + tlm explain --model=llama3.2:1b --style= -```bash -tlm help +DESCRIPTION: + explains given shell command. + +COMMANDS: + help, h Shows a list of commands or help for one command + +OPTIONS: + --model value, -m value override the model for command suggestion. (default: qwen2.5-coder:3b) + --style value, -s value override the style for command suggestion. (default: balanced) + --help, -h show help ``` ## Uninstall @@ -95,4 +181,12 @@ On Linux and macOS; ```bash rm /usr/local/bin/tlm +rm ~/.tlm.yml +``` + +On Windows; + +```powershell +Remove-Item -Recurse -Force "C:\Users\$env:USERNAME\AppData\Local\Programs\tlm" +Remove-Item -Force "$HOME\.tlm.yml" ``` diff --git a/VERSION b/VERSION index d9090c1..ea710ab 100644 --- a/VERSION +++ b/VERSION @@ -1 +1 @@ -1.0-rc3 \ No newline at end of file +1.2 \ No newline at end of file diff --git a/app/Modelfile.explain b/app/Modelfile.explain deleted file mode 100644 index a760c74..0000000 --- a/app/Modelfile.explain +++ /dev/null @@ -1,8 +0,0 @@ -FROM codellama:7b - -PARAMETER temperature 0.25 -PARAMETER top_p 0.2 -PARAMETER top_k 25 -PARAMETER seed 42 - -SYSTEM You are a command line application which helps user to get brief explanations for shell commands. You will be explaining given executable shell command to user with shortest possible explanation. If given input is not a shell command, you will respond with "I can only explain shell commands. Please provide a shell command to explain". You will never respond any question out of shell command explanation context. \ No newline at end of file diff --git a/app/Modelfile.suggest b/app/Modelfile.suggest deleted file mode 100644 index 85751ff..0000000 --- a/app/Modelfile.suggest +++ /dev/null @@ -1,8 +0,0 @@ -FROM codellama:7b - -PARAMETER temperature 0.1 -PARAMETER top_p 0.5 -PARAMETER top_k 40 -PARAMETER seed 1 - -SYSTEM You are software program specifically for Command Line Interface usage. User will ask you some thing that can be convertible to a UNIX or Windows command. You won't provide information or explanations and your output will be just an executable shell command inside three backticks. \ No newline at end of file diff --git a/app/app.go b/app/app.go deleted file mode 100644 index 73d8e6c..0000000 --- a/app/app.go +++ /dev/null @@ -1,66 +0,0 @@ -package app - -import ( - _ "embed" - "fmt" - ollama "github.com/jmorganca/ollama/api" - "github.com/yusufcanb/tlm/config" - "github.com/yusufcanb/tlm/explain" - "github.com/yusufcanb/tlm/install" - "github.com/yusufcanb/tlm/suggest" - "runtime" - - "github.com/urfave/cli/v2" -) - -//go:embed Modelfile.explain -var explainModelfile string - -//go:embed Modelfile.suggest -var suggestModelfile string - -type TlmApp struct { - App *cli.App - - explainModelfile string - suggestModelfile string -} - -func New(version string) *TlmApp { - con := config.New() - con.LoadOrCreateConfig() - - o, _ := ollama.ClientFromEnvironment() - sug := suggest.New(o, suggestModelfile) - exp := explain.New(o, explainModelfile) - ins := install.New(o, suggestModelfile, explainModelfile) - - cliApp := &cli.App{ - Name: "tlm", - Usage: "local terminal companion powered by CodeLLaMa.", - Version: version, - HideHelpCommand: true, - Action: func(c *cli.Context) error { - return cli.ShowAppHelp(c) - }, - Commands: []*cli.Command{ - sug.Command(), - exp.Command(), - ins.Command(), - con.Command(), - { - Name: "version", - Aliases: []string{"v"}, - Usage: "print version.", - Action: func(c *cli.Context) error { - fmt.Printf("tlm version %s %s/%s", version, runtime.GOOS, runtime.GOARCH) - return nil - }, - }, - }, - } - - return &TlmApp{ - App: cliApp, - } -} diff --git a/assets/ask.gif b/assets/ask.gif new file mode 100644 index 0000000..2c2ee2b Binary files /dev/null and b/assets/ask.gif differ diff --git a/assets/config.gif b/assets/config.gif index 765b6b6..12519d2 100644 Binary files a/assets/config.gif and b/assets/config.gif differ diff --git a/assets/explain.gif b/assets/explain.gif index e54f135..3a41097 100644 Binary files a/assets/explain.gif and b/assets/explain.gif differ diff --git a/assets/suggest.gif b/assets/suggest.gif index df090f4..9ec2394 100644 Binary files a/assets/suggest.gif and b/assets/suggest.gif differ diff --git a/assets/tapes/ask.tape b/assets/tapes/ask.tape new file mode 100644 index 0000000..5ff4185 --- /dev/null +++ b/assets/tapes/ask.tape @@ -0,0 +1,19 @@ +Output ask.gif + +Set Shell zsh +Set Theme "Cyberdyne" + +Set Width 1200 +Set Height 600 +Set FontSize 22 + +Hide +Type "source ~/.zshrc && clear" +Enter +Hide + +Show +Type "tlm ask --context . --include=pkg/ask/**/*.go 'briefly explain tlm ask command'" +Sleep 500ms +Enter +Sleep 15s diff --git a/assets/tapes/config.tape b/assets/tapes/config.tape index d5ec658..b5182d5 100644 --- a/assets/tapes/config.tape +++ b/assets/tapes/config.tape @@ -1,37 +1,49 @@ Output config.gif +Set Shell zsh Set Theme "Cyberdyne" Set Width 1200 Set Height 600 Set FontSize 22 +Hide +Type "source ~/.zshrc && clear" +Enter +Hide + +Show Type "tlm config" -Sleep 500ms +Sleep 250ms Enter Sleep 2s -# host +Down Sleep 500ms -Enter -Sleep 1s -# shell -Up +Down Sleep 500ms -Enter -Sleep 1s -# suggest -Sleep 500ms -Up -Sleep 500ms -Enter -Sleep 1s +Down +Sleep 750ms -# explain -Sleep 500ms Down -Sleep 3s -Enter +Sleep 750ms + +Down +Sleep 750ms + +Down +Sleep 300ms + +Down +Sleep 300ms + +Down +Sleep 300ms + +Down +Sleep 300ms + + Sleep 2s diff --git a/assets/tapes/explain.tape b/assets/tapes/explain.tape index 052b916..e58cbf3 100644 --- a/assets/tapes/explain.tape +++ b/assets/tapes/explain.tape @@ -1,12 +1,18 @@ Output explain.gif +Set Shell zsh Set Theme "Cyberdyne" Set Width 1200 -Set Height 600 -Set FontSize 32 +Set Height 650 +Set FontSize 22 -Type "tlm e 'mvn build -T 8 -DskipTests \!container'" -Sleep 250ms +Hide +Type "source ~/.zshrc && clear" Enter -Sleep 20s + +Show +Type 'tlm explain "sed -r s/(foo)(bar)/\2\1/; s/\b([a-z]+)\b/\U\1/g; /baz/d\ in > out"' +Sleep 500ms +Enter +Sleep 5s diff --git a/assets/tapes/suggest.tape b/assets/tapes/suggest.tape index ee0d884..49e7f69 100644 --- a/assets/tapes/suggest.tape +++ b/assets/tapes/suggest.tape @@ -1,11 +1,17 @@ Output suggest.gif +Set Shell zsh Set Theme "Cyberdyne" Set Width 1200 -Set Height 600 -Set FontSize 32 +Set Height 650 +Set FontSize 22 +Hide +Type "source ~/.zshrc && clear" +Enter + +Show Type "tlm suggest 'list all network interfaces but only their ip addresses'" Sleep 250ms Enter diff --git a/build.ps1 b/build.ps1 index 78d7650..ed3bf0d 100644 --- a/build.ps1 +++ b/build.ps1 @@ -17,13 +17,14 @@ New-Item -ItemType Directory -Path "dist" # Build Function (Helper) Function Build-Target($os, $version, $arch) { $outputName = "${appName}_${version}_${os}_${arch}" + $sha1 = (git rev-parse --short HEAD).Trim() if ($os -eq "windows") { $outputName += ".exe" } Write-Output "Building for $os/$arch (version: $version) -> $outputName" # Invokes the Go toolchain (assumes it's in the PATH) - go build -o "dist/$version/$outputName" "main.go" + go build -o "dist/$version/$outputName" -ldflags "-X main.sha1ver=$sha1" "main.go" } # Build for each target OS diff --git a/build.sh b/build.sh index ef4dc88..cb96466 100644 --- a/build.sh +++ b/build.sh @@ -6,6 +6,7 @@ build() { app_name=$2 version=$3 arch=$4 + sha1=$(git rev-parse --short HEAD | tr -d '\n') # Determine output filename with optional .exe extension output_name="${app_name}_${version}_${os}_${arch}" @@ -14,7 +15,7 @@ build() { fi echo "Building for $os/$arch (version: $version) -> $output_name" - CGO_ENABLED=0 GOOS=$os GOARCH=$arch go build -o "dist/${version}/${output_name}" main.go + CGO_ENABLED=0 GOOS=$os GOARCH=$arch go build -o "dist/${version}/${output_name}" -ldflags "-X main.sha1ver=$sha1" main.go } # Operating systems to target diff --git a/config/api.go b/config/api.go deleted file mode 100644 index e45ba90..0000000 --- a/config/api.go +++ /dev/null @@ -1,58 +0,0 @@ -package config - -import ( - "fmt" - "github.com/spf13/viper" - "github.com/yusufcanb/tlm/shell" - "log" - "os" - "path" -) - -var defaultLLMHost = "http://localhost:11434" - -func isExists(path string) bool { - if _, err := os.Stat(path); os.IsNotExist(err) { - return false - } - return true -} - -func (c *Config) LoadOrCreateConfig() { - viper.SetConfigName(".tlm") - viper.SetConfigType("yaml") - viper.AddConfigPath("$HOME") - - homeDir, err := os.UserHomeDir() - if err != nil { - log.Fatal(err) - } - - configPath := path.Join(homeDir, ".tlm.yaml") - if !isExists(configPath) { - viper.Set("shell", shell.GetShell()) - - viper.Set("llm.host", defaultLLMHost) - viper.Set("llm.suggestion", "balanced") - viper.Set("llm.explain", "balanced") - - err := os.Setenv("OLLAMA_HOST", defaultLLMHost) - if err != nil { - fmt.Printf(shell.Err()+" error writing config file, %s", err) - } - - if err := viper.WriteConfigAs(path.Join(homeDir, ".tlm.yaml")); err != nil { - fmt.Printf(shell.Err()+" error writing config file, %s", err) - } - } - - err = viper.ReadInConfig() - if err != nil { - log.Fatalf("Error reading config file, %s", err) - } - - err = os.Setenv("OLLAMA_HOST", viper.GetString("llm.host")) - if err != nil { - fmt.Printf(shell.Err()+" %s", err) - } -} diff --git a/config/cli.go b/config/cli.go deleted file mode 100644 index d62c0a2..0000000 --- a/config/cli.go +++ /dev/null @@ -1,54 +0,0 @@ -package config - -import ( - "fmt" - "github.com/spf13/viper" - "github.com/urfave/cli/v2" - "github.com/yusufcanb/tlm/shell" -) - -func (c *Config) Action(_ *cli.Context) error { - var err error - - form := ConfigForm{ - host: viper.GetString("llm.host"), - explain: viper.GetString("llm.explain"), - suggest: viper.GetString("llm.suggest"), - } - - err = form.Run() - if err != nil { - return err - } - - viper.Set("shell", form.shell) - viper.Set("llm.host", form.host) - viper.Set("llm.explain", form.explain) - viper.Set("llm.suggest", form.suggest) - - err = viper.WriteConfig() - if err != nil { - return err - } - - fmt.Println(shell.Ok() + " configuration saved") - return nil -} - -func (c *Config) Command() *cli.Command { - return &cli.Command{ - Name: "config", - Aliases: []string{"c"}, - Usage: "configure preferences.", - Action: c.Action, - Subcommands: []*cli.Command{ - { - Name: "set", - Usage: "set configuration", - Action: func(context *cli.Context) error { - return nil - }, - }, - }, - } -} diff --git a/config/config.go b/config/config.go deleted file mode 100644 index 31d2d0c..0000000 --- a/config/config.go +++ /dev/null @@ -1,8 +0,0 @@ -package config - -type Config struct { -} - -func New() *Config { - return &Config{} -} diff --git a/config/form.go b/config/form.go deleted file mode 100644 index b73159e..0000000 --- a/config/form.go +++ /dev/null @@ -1,53 +0,0 @@ -package config - -import "github.com/charmbracelet/huh" - -type ConfigForm struct { - form *huh.Form - - host string - shell string - explain string - suggest string -} - -func (c *ConfigForm) Run() error { - c.form = huh.NewForm( - huh.NewGroup( - - huh.NewInput(). - Title("Ollama"). - Value(&c.host), - - huh.NewSelect[string](). - Title("Default Shell (Windows)"). - Options( - huh.NewOption("Windows Powershell", "powershell"), - huh.NewOption("Windows Command Prompt", "cmd"), - ). - Value(&c.shell), - - huh.NewSelect[string](). - Title("Suggestion Preference"). - Description("Sets preference for command suggestions"). - Options( - huh.NewOption("Stable", "stable"), - huh.NewOption("Balanced", "balanced"), - huh.NewOption("Creative", "creative"), - ). - Value(&c.explain), - - huh.NewSelect[string](). - Title("Explain Preference"). - Description("Sets preference for command explanations"). - Options( - huh.NewOption("Stable", "stable"), - huh.NewOption("Balanced", "balanced"), - huh.NewOption("Creative", "creative"), - ). - Value(&c.suggest), - ), - ) - - return c.form.WithTheme(huh.ThemeBase16()).Run() -} diff --git a/e2e/requirements.txt b/e2e/requirements.txt new file mode 100644 index 0000000..a5fb0c0 Binary files /dev/null and b/e2e/requirements.txt differ diff --git a/e2e/tests/ask.robot b/e2e/tests/ask.robot new file mode 100644 index 0000000..bcb1cc8 --- /dev/null +++ b/e2e/tests/ask.robot @@ -0,0 +1,107 @@ +*** Settings *** +Library Collections +Library OperatingSystem +Resource ../tlm.resource + +Suite Setup Run Command tlm config set llm.model ${model} +Suite Teardown Run Command tlm config set llm.model ${model} + +Test Tags command=ask + +Name tlm ask + + +*** Variables *** +${model} qwen2.5-coder:3b + + +*** Test Cases *** +tlm ask + ${rc} ${output}= Run Command tlm ask + Verify Help Command Output ${rc} ${output} + +tlm ask + ${rc} ${output}= Run Command tlm ask "Why the sky is blue? Name the concept." + + Should Be Equal As Integers ${rc} 0 + Should Contain ${output} Rayleigh scatte + + ${rc} ${output}= Run Command tlm a "Why the sky is blue? Name the concept." + + Should Be Equal As Integers ${rc} 0 + Should Contain ${output} Rayleigh scattering + +tlm ask --context= --include= + ${rc} ${output}= Run Command tlm ask --context=. --include=**/*.robot "explain provided context" + ${expected_file_list}= Create List tests/ask.robot tests/suggest.robot tests/help.robot + + Verify Ask Command Output With Context + ... ${rc} + ... ${output} + ... ${expected_file_list} + +tlm ask --context= --exclude= + ${rc} ${output}= Run Command tlm ask --context=. --exclude=**/*.robot "explain provided context" + ${expected_file_list}= Create List tlm.resource tlm_lib.py requirements.txt + Verify Ask Command Output With Context + ... ${rc} + ... ${output} + ... ${expected_file_list} + +tlm ask (no ollama) + [Tags] no-ollama + + # Test that the command fails when OLLAMA_HOST is not set + Remove Environment Variable OLLAMA_HOST + ${rc} ${output}= Run Command tlm ask "What is the meaning of life?" + Should Not Be Equal As Integers ${rc} 0 + Should Contain ${output} (err) + Should Contain + ... ${output} + ... OLLAMA_HOST environment variable is not set + + # Test the command fails when OLLAMA_HOST is set but not reachable + Set Environment Variable OLLAMA_HOST http://localhost:11434 + ${rc} ${output}= Run Command tlm ask "What is the meaning of life?" + + Should Not Be Equal As Integers ${rc} 0 + Should Contain ${output} (err) + Should Contain + ... ${output} + ... Ollama connection failed. Please check your Ollama if it's running or configured correctly. + +tlm ask (non-exist model) + ${model}= Set Variable non-exist-model:1b + Run Command tlm config set llm.model ${model} + + ${rc} ${output}= Run Command tlm ask 'What is the meaning of life?' + Should Not Be Equal As Integers ${rc} 0 + Should Contain ${output} model "${model}" not found, try pulling it first + + +*** Keywords *** +Verify Ask Command Output With Context + [Arguments] ${rc} ${output} ${expected_file_list} + + Should Be Equal As Numbers ${rc} 0 + + FOR ${file} IN @{expected_file_list} + Should Contain ${output} ${file} + END + + Should Contain ${output} Context Summary: + Should Contain ${output} Total Files: + Should Contain ${output} Total Chars: + Should Contain ${output} Total Tokens: + +Verify Help Command Output + [Arguments] ${rc} ${output} + + Should Not Be Equal As Numbers ${rc} 0 + + Should Contain ${output} NAME: + Should Contain ${output} tlm ask - Asks a question + + Should Contain ${output} USAGE: + Should Contain ${output} tlm ask "" + Should Contain ${output} tlm ask --context . --include *.md "" diff --git a/e2e/tests/config.robot b/e2e/tests/config.robot new file mode 100644 index 0000000..abd57d6 --- /dev/null +++ b/e2e/tests/config.robot @@ -0,0 +1,42 @@ +*** Settings *** +Library Collections +Library OperatingSystem +Resource ../tlm.resource + +Test Setup Remove Config File + +Test Tags command=explain + +Name tlm explain + + +*** Variables *** +${model} qwen2.5-coder:1.5b +${style} balanced + + +*** Test Cases *** +tlm config + ${rc} ${output}= Run Hanging Command And Verify Output tlm config "ls -all" + Should Contain ${output} + ... Sets a default model from the list of all available models. + ... Use `ollama pull ` to download new models. + + Should Contain ${output} + ... Sets a default model from the list of all available models. + ... Use `ollama pull ` to download new models. + +tlm config ls + ${rc} ${output}= Run Command tlm config ls + +tlm config set + ${rc} ${output}= Run Command tlm config set llm.model ${model} + +tlm config get + ${rc} ${output}= Run Command tlm config get llm.model + + +*** Keywords *** +Remove Config File + ${HOME_DIR}= Get Environment Variable HOME + Remove File path=${HOME_DIR}/.tlm.yml diff --git a/e2e/tests/explain.robot b/e2e/tests/explain.robot new file mode 100644 index 0000000..bb42fc7 --- /dev/null +++ b/e2e/tests/explain.robot @@ -0,0 +1,34 @@ +*** Settings *** +Library Collections +Library OperatingSystem +Resource ../tlm.resource + +Suite Setup Run Command tlm config set llm.model ${model} +Suite Teardown Run Command tlm config set llm.model ${model} + +Test Tags command=explain + +Name tlm explain + + +*** Variables *** +${model} qwen2.5-coder:1.5b +${model2} llama3.2:1b +${style} balanced + + +*** Test Cases *** +tlm explain + ${rc} ${output}= Run Command tlm explain "ls -all" + Should Contain ${output} list ignore_case=True + Should Contain ${output} file ignore_case=True + +tlm explain --model= --style=