From f287830583f2d7d8dc9de805341f1775a94995cc Mon Sep 17 00:00:00 2001 From: pancake Date: Thu, 19 Oct 2023 15:28:48 +0200 Subject: [PATCH] Improve readme and move doc/ examples/ and fix another main repl logic --- README.md | 21 +++++++++++++-- r2ai.pdf => doc/r2ai.pdf | Bin .../conversation.r2.js | 0 main.py | 24 ++++++++++-------- 4 files changed, 32 insertions(+), 13 deletions(-) rename r2ai.pdf => doc/r2ai.pdf (100%) rename conversation.r2.js => examples/conversation.r2.js (100%) diff --git a/README.md b/README.md index e4b39fb..c873edd 100644 --- a/README.md +++ b/README.md @@ -8,17 +8,34 @@ • ``` -Run r2ai in local, no google bard or chatgpt. Just use your CPU/GPU/NPU and interact with r2 using natural language. +Run r2ai in local, without internet or leaking any data. Use your CPU/GPU/NPU and interact with r2 using natural language. + +The current implementation is based on `llama-cpp` and the default model is `CodeLlama-CherryPop` --pancake ## Installation ``` -pip3 install rich inquirer python-dotenv openai litellm tokentrim +pip3 install rich inquirer llama-cpp tokentrim r2pm -i r2ai ``` +## Execution + +There are 4 different ways to run `r2ai`: + +* Standalone and interactive: `r2pm -r r2ai` +* Batch mode: `r2ai '-r act as a calculator' '3+3=?' +* From radare2 (requires `r2pm -ci rlang-python`): `r2 -c 'r2ai -h'` +* Using r2pipe: `#!pipe python main.py` + +## Scripting + +You can interact with r2ai from standalone python, from r2pipe via r2 keeping a global state or using the javascript intrepreter embedded inside `radare2`. + +* [examples/conversation.r2.js](conversation.r2.js) - load two models and make them talk to each other + ## Development/Testing Just run `make` .. or well `python main.py /path/to/file` diff --git a/r2ai.pdf b/doc/r2ai.pdf similarity index 100% rename from r2ai.pdf rename to doc/r2ai.pdf diff --git a/conversation.r2.js b/examples/conversation.r2.js similarity index 100% rename from conversation.r2.js rename to examples/conversation.r2.js diff --git a/main.py b/main.py index b3e4571..5f73c7c 100755 --- a/main.py +++ b/main.py @@ -9,7 +9,6 @@ try: except: pass -import builtins import traceback import r2ai from r2ai.utils import slurp @@ -17,6 +16,8 @@ from r2ai.utils import slurp r2 = None have_rlang = False have_r2pipe = False +within_r2 = False + try: import r2lang have_rlang = True @@ -71,7 +72,7 @@ ai.model = "TheBloke/llama2-7b-chat-codeCherryPop-qLoRA-GGUF" # ai.model = "models/models/mistral-7b-v0.1.Q4_K_M.gguf" #interpreter.model = "models/models/mistral-7b-instruct-v0.1.Q2_K.gguf" #interpreter.model = "TheBloke/Mistral-7B-Instruct-v0.1-GGUF" -#builtins.print("TheBloke/Mistral-7B-Instruct-v0.1-GGUF") +#print("TheBloke/Mistral-7B-Instruct-v0.1-GGUF") dir_path = os.path.dirname(os.path.realpath(__file__)) model_path = dir_path + "/" + ai.model @@ -103,9 +104,9 @@ def runline(usertext): if usertext == "": return if usertext.startswith("?") or usertext.startswith("-h"): - builtins.print(help_message) + print(help_message) elif usertext.startswith("clear") or usertext.startswith("-k"): - builtins.print("\x1b[2J\x1b[0;0H\r") + print("\x1b[2J\x1b[0;0H\r") elif usertext.startswith("-M"): r2ai.models() elif usertext.startswith("-m"): @@ -113,7 +114,7 @@ def runline(usertext): if len(words) > 1: ai.model = words[1] else: - builtins.print(ai.model) + print(ai.model) elif usertext == "reset" or usertext.startswith("-R"): ai.reset() elif usertext == "-q" or usertext == "exit": @@ -189,19 +190,19 @@ def runline(usertext): que = words[1] else: que = input("[Query]>> ") - tag = "CODE" # CODE, TEXT, .. + tag = "CODE" # TEXT, .. ai.chat("Human: " + que + ":\n[" + tag + "]\n" + res + "\n[/" + tag + "]\n") elif usertext[0] == "!": # Deprecate. we have -c now if r2 is None: - builtins.print("r2 is not available") + print("r2 is not available") elif usertext[1] == "!": res = r2_cmd(usertext[2:]) que = input("[Query]>> ") ai.chat("Q: " + que + ":\n[INPUT]\n"+ res+"\n[/INPUT]\n") # , return_messages=True) else: - builtins.print(r2_cmd(usertext[1:])) + print(r2_cmd(usertext[1:])) elif usertext.startswith("-"): - builtins.print("Unknown flag. See 'r2ai -h' for help") + print("Unknown flag. See 'r2ai -h' for help") else: ai.chat(usertext) # r2ai.load(res) @@ -238,11 +239,12 @@ if have_r2pipe: try: if "R2PIPE_IN" in os.environ.keys(): r2 = r2pipe.open() + within_r2 = True else: file = sys.argv[1] if len(sys.argv) > 1 else "/bin/ls" r2 = r2pipe.open(file) except: - print("error") + traceback.print_exc() if have_rlang: def r2ai_rlang_plugin(a): @@ -269,5 +271,5 @@ elif len(sys.argv) > 1: for arg in sys.argv[1:]: runline(arg) r2ai_repl() -elif not have_r2pipe and "R2PIPE_IN" not in os.environ: +elif not within_r2: r2ai_repl()