179 lines
4.8 KiB
C
Raw Normal View History

2025-01-04 05:00:03 +00:00
#include "openai.h"
#include "markdown.h"
#include "plugin.h"
#include "line.h"
#include <locale.h>
#include <stdio.h>
2025-01-04 07:35:39 +00:00
char * get_prompt_from_args(int c, char **argv) {
2025-01-04 05:00:03 +00:00
char * prompt = malloc(1024*1024 + 1);
prompt[0] = 0;
2025-01-04 07:35:39 +00:00
for(int i = 1; i < c; i++) {
2025-01-04 05:00:03 +00:00
if(argv[i][0] == '-')
break;
strncat(prompt, argv[i], 1024*1024);
2025-01-04 07:35:39 +00:00
if(i < c - 1) {
2025-01-04 05:00:03 +00:00
strncat(prompt, " ", 1024*1024);
2025-01-04 07:35:39 +00:00
} else {
2025-01-04 05:00:03 +00:00
strncat(prompt, ".", 1024*1024);
}
}
2025-01-04 07:35:39 +00:00
if(!*prompt) {
2025-01-04 05:00:03 +00:00
free(prompt);
return NULL;
}
return prompt;
}
2025-01-04 07:35:39 +00:00
bool try_prompt(int argc,char*argv[]) {
char * prompt = get_prompt_from_args(argc, argv);
if(prompt != NULL) {
2025-01-04 05:00:03 +00:00
char * response = openai_chat("user",prompt);
parse_markdown_to_ansi(response);
printf("\n");
free(response);
free(prompt);
return true;
}
return false;
}
void help();
void render(char *);
2025-01-04 07:35:39 +00:00
void serve() {
2025-01-04 05:00:03 +00:00
render("Starting server. *Put executables in a dir named cgi-bin and they will behave as webpages.*");
int res = system("python3 -m http.server --cgi");
(void)res;
}
2025-01-04 07:35:39 +00:00
void render(char * content) {
2025-01-04 05:00:03 +00:00
parse_markdown_to_ansi(content);
printf("\n\n");
}
2025-01-04 07:35:39 +00:00
void repl() {
2025-01-04 05:00:03 +00:00
line_init();
setbuf(stdout, NULL);
char *line;
char *previous_line = NULL;
2025-01-04 07:35:39 +00:00
while((line = line_read("> "))) {
if(!line || !*line) {
2025-01-04 05:00:03 +00:00
line = previous_line;
}
if(!line || !*line)
continue;
previous_line = line;
2025-01-04 07:35:39 +00:00
if(line[0] == '!') {
2025-01-04 05:00:03 +00:00
plugin_run(line + 1);
continue;
}
2025-01-04 07:35:39 +00:00
if(!strncmp(line,"exit", 4)) {
2025-01-04 05:00:03 +00:00
exit(0);
}
2025-01-04 07:35:39 +00:00
if(!strncmp(line,"help",4)) {
2025-01-04 05:00:03 +00:00
help();
continue;
}
2025-01-04 07:35:39 +00:00
if(!strncmp(line,"serve",5)) {
2025-01-04 05:00:03 +00:00
serve();
}
2025-01-04 07:35:39 +00:00
if(!strncmp(line,"spar ",5)) {
2025-01-04 05:00:03 +00:00
char * response = line+5;
2025-01-04 07:35:39 +00:00
while(true) {
2025-01-04 05:00:03 +00:00
render(response);
sleep(2);
2025-01-04 07:35:39 +00:00
response = openai_chat("user",response);
}
2025-01-04 05:00:03 +00:00
}
2025-01-04 07:35:39 +00:00
if(!strncmp(line,"ls",2) || !strncmp(line,"list",4)) {
2025-01-04 05:00:03 +00:00
int offset = 2;
2025-01-04 07:35:39 +00:00
if(!strncmp(line,"list",4)) {
2025-01-04 05:00:03 +00:00
offset = 4;
}
char * command = (char *)malloc(strlen(line) + 42);
command[0] = 0;
strcpy(command, "ls ");
strcat(command, line + offset);
int res = system(command);
(void)res;
free(command);
continue;
}
line_add_history(line);
char * response = openai_chat("user", line);
render(response);
free(response);
}
}
2025-01-04 07:35:39 +00:00
void help() {
2025-01-04 05:00:03 +00:00
char help_text[1024*1024] = {0};
char * template = "# Help\n"
"Written by retoor@molodetz.nl.\n\n"
"## Features\n"
" - navigate trough history using `arrows`.\n"
" - navigate trough history with **recursive search** using `ctrl+r`.\n"
" - **inception with python** for *incomming* and *outgoing* content.\n"
" - markdown and **syntax highlighting**.\n"
" - **execute python commands** with prefixing `!`\n"
" - list files of current workdirectory using `ls`.\n"
" - type `serve` to start a webserver with directory listing. Easy for network transfers.\n\n"
"## Configuration\n"
" - model temperature is %f.\n"
" - model name is %s.\n"
" - max tokens is %d.\n\n"
"## In development\n"
" - **google search** and actions with those results.\n"
" - **reminders**.\n"
" - predefined **templates** for **reviewing** / **refactoring** so you can personalize.\n";
2025-01-04 07:35:39 +00:00
sprintf(help_text,template,prompt_temperature,prompt_model,prompt_max_tokens);
render(help_text);
2025-01-04 05:00:03 +00:00
}
2025-01-04 07:35:39 +00:00
void openai_include(char * path) {
2025-01-04 05:00:03 +00:00
FILE * file = fopen(path,"r");
2025-01-04 07:35:39 +00:00
if(file == NULL) {
2025-01-04 05:00:03 +00:00
return;
}
fseek(file, 0, SEEK_END);
long size = ftell(file);
fseek(file, 0, SEEK_SET);
char * buffer = (char *)malloc(size);
size_t read = fread(buffer,1,size,file);
2025-01-04 07:35:39 +00:00
if(read == 0) {
2025-01-04 05:00:03 +00:00
return;
}
fclose(file);
buffer[read] = 0;
openai_system(buffer);
free(buffer);
}
2025-01-04 07:35:39 +00:00
void init() {
line_init();
const char *locale = setlocale(LC_ALL, NULL);
char payload[4096] = {0};
sprintf(payload, "User locale is %s. User lang is %s.\n"
"You are Retoor. Use a lot of markdown in response.\n"
"Be confident and short in answers.\n"
"You divide things by zero if you have to."
, locale, locale);
printf("%s","Loading...");
openai_system(payload);
openai_include("context.txt");
printf("%s", "\rLoaded! Type help for feautures.\n");
2025-01-04 05:00:03 +00:00
}
2025-01-04 07:35:39 +00:00
int main(int argc, char *argv[]) {
2025-01-04 05:00:03 +00:00
init();
if(try_prompt(argc,argv))
return 0;
repl();
return 0;
2025-01-04 07:35:39 +00:00
}