1. Go to this page and download the library: Download cognesy/instructor-php library. Choose the download type require.
2. Extract the ZIP file and open the index.php.
3. Add this code to the index.php.
<?php
require_once('vendor/autoload.php');
/* Start to develop here. Best regards https://php-download.com/ */
cognesy / instructor-php example snippets
use Cognesy\Instructor\Instructor;
// Step 0: Create .env file in your project root:
// OPENAI_API_KEY=your_api_key
// Step 1: Define target data structure(s)
class Person {
public string $name;
public int $age;
}
// Step 2: Provide content to process
$text = "His name is Jason and he is 28 years old.";
// Step 3: Use Instructor to run LLM inference
$person = (new Instructor)->respond(
messages: $text,
responseModel: Person::class,
);
// Step 4: Work with structured response data
assert($person instanceof Person); // true
assert($person->name === 'Jason'); // true
assert($person->age === 28); // true
echo $person->name; // Jason
echo $person->age; // 28
var_dump($person);
// Person {
// name: "Jason",
// age: 28
// }
use Symfony\Component\Validator\Constraints as Assert;
class Person {
public string $name;
#[Assert\PositiveOrZero]
public int $age;
}
$text = "His name is Jason, he is -28 years old.";
$person = (new Instructor)->respond(
messages: [['role' => 'user', 'content' => $text]],
responseModel: Person::class,
);
// if the resulting object does not validate, Instructor throws an exception
use Symfony\Component\Validator\Constraints as Assert;
class Person {
#[Assert\Length(min: 3)]
public string $name;
#[Assert\PositiveOrZero]
public int $age;
}
$text = "His name is JX, aka Jason, he is -28 years old.";
$person = (new Instructor)->respond(
messages: [['role' => 'user', 'content' => $text]],
responseModel: Person::class,
maxRetries: 3,
);
// if all LLM's attempts to self-correct the results fail, Instructor throws an exception
Loading please wait ...
Before you can download the PHP files, the dependencies should be resolved. This can take some minutes. Please be patient.