Festi AI
TODO
Usage
$prompt = (new Prompt())
->addCondition("What is the capital of France?")
->addCondition("Please provide a brief explanation.");
$modelId = "gpt-3.5-turbo";
$apiKey = "sk-proj-XXX";
$orgId = "org-XXX";
$config = new AIConfig($modelId, $apiKey, $orgId);
var_dump($config);
$connector = new OpenAiService($config, new OpenAiClient($config));
$connector->connect();
$response = $connector->ask($prompt);
var_dump($response->getAnswer()->getContent());
$config = new AiConfig(
modelId: 'phi3:latest',
apiKey: null, // No API key for local setup
endpoint: 'http://localhost:11434/v1/'
);
$connector = new OllamaService($config, new OllamaClient($config));
$connector->connect();
$response = $connector->ask($prompt);
var_dump($response->getAnswer()->getContent());
Load and save a prompt to a JSON file
$prompt = new Prompt();
$prompt->addCondition("Fix the following code style issues:")
->addCondition("- Do not change business logic.")
->setTemplate("Here are the conditions:\n{conditions}\n{snippets}")
->setRole("Developer")
->setResultFormat("Code output only");
// Save to file
$prompt->saveToFile('prompt.json');
// Load from file
$loadedPrompt = Prompt::loadFromFile('prompt.json');
echo $loadedPrompt;
Ollama
ollama list
ollama restart
ollama run phi3
curl http://localhost:11434/v1/models
curl -X POST http://localhost:11434/v1/completions \
-H "Content-Type: application/json" \
-d '{
"model": "phi3:latest",
"prompt": "What is the capital of France?"
}'