Fix form AI creation bug + use gpt4 turbo with Json mode

This commit is contained in:
Julien Nahum 2024-01-29 10:25:00 +01:00
parent 28e55574e6
commit 381824183c
5 changed files with 35 additions and 14 deletions

View File

@ -227,11 +227,12 @@ class GenerateTemplate extends Command
->setAiModel('gpt-3.5-turbo-16k')
->useStreaming()
->setSystemMessage('You are an assistant helping to generate forms.');
$completer->completeChat([
$completer->expectsJson()->completeChat([
["role" => "user", "content" => Str::of(self::FORM_STRUCTURE_PROMPT)->replace('[REPLACE]', $this->argument('prompt'))->toString()]
], 6000);
$formData = $completer->getArray();
$completer->doesNotExpectJson();
$formDescriptionPrompt = Str::of(self::FORM_DESCRIPTION_PROMPT)->replace('[REPLACE]', $this->argument('prompt'))->toString();
$formShortDescription = $completer->completeChat([
["role" => "user", "content" => Str::of(self::FORM_SHORT_DESCRIPTION_PROMPT)->replace('[REPLACE]', $this->argument('prompt'))->toString()]
@ -240,6 +241,7 @@ class GenerateTemplate extends Command
$formShortDescription = Str::of($formShortDescription)->replaceMatches('/^"(.*)"$/', '$1')->toString();
// Get industry & types
$completer->expectsJson();
$industry = $this->getIndustries($completer, $this->argument('prompt'));
$types = $this->getTypes($completer, $this->argument('prompt'));
@ -247,10 +249,12 @@ class GenerateTemplate extends Command
$relatedTemplates = $this->getRelatedTemplates($industry, $types);
// Now get description and QAs
$completer->doesNotExpectJson();
$formDescription = $completer->completeChat([
["role" => "user", "content" => $formDescriptionPrompt]
])->getHtml();
$completer->expectsJson();
$formCoverKeywords = $completer->completeChat([
["role" => "user", "content" => $formDescriptionPrompt],
["role" => "assistant", "content" => $formDescription],
@ -263,6 +267,7 @@ class GenerateTemplate extends Command
["role" => "assistant", "content" => $formDescription],
["role" => "user", "content" => self::FORM_QAS_PROMPT]
])->getArray();
$completer->doesNotExpectJson();
$formTitle = $completer->completeChat([
["role" => "user", "content" => $formDescriptionPrompt],
["role" => "assistant", "content" => $formDescription],
@ -306,7 +311,6 @@ class GenerateTemplate extends Command
private function getIndustries(GptCompleter $completer, string $formPrompt): array
{
$industriesString = Template::getAllIndustries()->pluck('slug')->join(', ');
return $completer->completeChat([
["role" => "user", "content" => Str::of(self::FORM_INDUSTRY_PROMPT)
->replace('[REPLACE]', $formPrompt)
@ -318,7 +322,6 @@ class GenerateTemplate extends Command
private function getTypes(GptCompleter $completer, string $formPrompt): array
{
$typesString = Template::getAllTypes()->pluck('slug')->join(', ');
return $completer->completeChat([
["role" => "user", "content" => Str::of(self::FORM_TYPES_PROMPT)
->replace('[REPLACE]', $formPrompt)

View File

@ -39,7 +39,8 @@ class GenerateAiForm implements ShouldQueue
$completer = (new GptCompleter(config('services.openai.api_key')))
->useStreaming()
->setSystemMessage('You are a robot helping to generate forms.');
->setSystemMessage('You are a robot helping to generate forms.')
->expectsJson();
try {
$completer->completeChat([

View File

@ -14,13 +14,14 @@ use OpenAI\Exceptions\ErrorException;
*/
class GptCompleter
{
const AI_MODEL = 'gpt-4';
const AI_MODEL = 'gpt-4-turbo-preview';
protected Client $openAi;
protected mixed $result;
protected array $completionInput;
protected ?string $systemMessage;
protected bool $expectsJson = false;
protected int $tokenUsed = 0;
protected bool $useStreaming = false;
@ -47,8 +48,23 @@ class GptCompleter
return $this;
}
public function completeChat(array $messages, int $maxTokens = 4096, float $temperature = 0.81): self
public function expectsJson(): self
{
$this->expectsJson = true;
return $this;
}
public function doesNotExpectJson(): self
{
$this->expectsJson = false;
return $this;
}
public function completeChat(array $messages, int $maxTokens = 4096, float $temperature = 0.81, ?bool $exceptJson = null): self
{
if (!is_null($exceptJson)) {
$this->expectsJson = $exceptJson;
}
$this->computeChatCompletion($messages, $maxTokens, $temperature)
->queryCompletion();
@ -129,6 +145,12 @@ class GptCompleter
'temperature' => $temperature
];
if ($this->expectsJson) {
$completionInput['response_format'] = [
'type' => 'json_object'
];
}
$this->completionInput = $completionInput;
return $this;
}

View File

@ -280,14 +280,9 @@ export default {
form_slug: response.form.slug
})
this.displayFormModificationAlert(response)
useRouter().push({
name: 'forms-show',
params: {
slug: this.createdForm.slug,
new_form: response.users_first_form
}
})
useRouter().push({ name: 'forms-slug-show-share', params: { slug: this.createdFormSlug, new_form: response.users_first_form } })
}).catch((error) => {
console.error(error)
if (error.response && error.response.status === 422) {
this.validationErrorResponse = error.response
this.showValidationErrors()

View File

@ -64,7 +64,7 @@ export const useCrisp = () => {
function pushEvent(event, data = {}) {
if (!crisp) return
crisp.pushEvent(event, data)
crisp.session.pushEvent(event, data)
}
function setSegments(segments, overwrite = false) {