#ifndef R_H
#define R_H
#include "auth.h"
#include "malloc.h"
#include "utils.h"
#include <stdbool.h>
#include <string.h>
bool is_verbose = false;

#ifndef RD
#ifndef OLLAMA
char *models_api_url = "https://api.openai.com/v1/models";
// char *completions_api_url = "https://ollama.molodetz.nl/v1/chat/completions";
char *completions_api_url = "https://api.openai.com/v1/chat/completions";
char *advanced_model = "gpt-4o-mini";
char *fast_model = "gpt-3.5-turbo";
#endif 
#endif

#ifdef RD

char *models_api_url = "https://api.openai.com/v1/models";
char *completions_api_url = "https://api.deepinfra.com/v1/openai/chat/completions";
//char *advanced_model = "meta-llama/Meta-Llama-3.1-8B-Instruct";
char *advanced_model = "google/gemini-1.5-flash";
char *fast_model = "Qwen/Qwen2.5-Coder-32B-Instruct";

#endif
#ifdef OLLAMA
char *models_api_url = "https://ollama.molodetz.nl/v1/models";
char *completions_api_url = "https://ollama.molodetz.nl/v1/chat/completions";
char *advanced_model = "qwen2.5:3b";
char *fast_model = "qwen2.5:0.5b";
#endif

char *_model = NULL;

#define DB_FILE "~/.r.db"
static int prompt_max_tokens = 10000;
#define PROMPT_TEMPERATURE 0.1

void set_prompt_model(const char *model) {
  if (_model != NULL) {
    free(_model);
  }
  _model = strdup(model);
}

const char *get_prompt_model() {
  if (auth_type != AUTH_TYPE_API_KEY) {
    if (_model == NULL) {
      _model = strdup(fast_model);
    }
  } else if (_model == NULL) {
    _model = strdup(advanced_model);
  }
  return _model;
}

#endif