<?phprequire'examples/boot.php';useCognesy\Addons\Chat\ChatFactory;useCognesy\Addons\Chat\ContinuationCriteria\ResponseContentCheck;useCognesy\Addons\Chat\ContinuationCriteria\StepsLimit;useCognesy\Addons\Chat\Data\ChatState;useCognesy\Addons\Chat\Data\Collections\ChatStateProcessors;useCognesy\Addons\Chat\Data\Collections\ContinuationCriteria;useCognesy\Addons\Chat\Data\Collections\Participants;useCognesy\Addons\Chat\Participants\LLMParticipant;useCognesy\Addons\Chat\Participants\ScriptedParticipant;useCognesy\Addons\Chat\Processors\AccumulateTokenUsage;useCognesy\Addons\Chat\Processors\AppendStateMessages;useCognesy\Addons\Chat\Processors\MoveMessagesToBuffer;useCognesy\Addons\Chat\Processors\SummarizeBuffer;useCognesy\Addons\Chat\Utils\SummarizeMessages;useCognesy\Events\Dispatchers\EventDispatcher;useCognesy\Events\Event;useCognesy\Messages\Messages;useCognesy\Polyglot\Inference\LLMProvider;$events=newEventDispatcher();$student=newScriptedParticipant(name:'student',messages:['Help me get better sales results.','What should I do next?','Give me one more actionable tip.','How could I apply this in practice?',"What are some common pitfalls to avoid?",'Any final advice?',''// Empty string to signal end of conversation],);$expert=newLLMParticipant(name:'expert',llmProvider:LLMProvider::using('openai'),systemPrompt:'You are a helpful assistant explaining Challenger Sale. Be very brief (one sentence), pragmatic and focused on practical bizdev problems.');// Build a Chat with summary + buffer processors and an assistant participant$chat=ChatFactory::default(participants:newParticipants($student,$expert),continuationCriteria:newContinuationCriteria(newStepsLimit(12),newResponseContentCheck(fn($lastResponse)=>$lastResponse!==''),),stepProcessors:newChatStateProcessors(newAccumulateTokenUsage(),newAppendStateMessages(),newMoveMessagesToBuffer(maxTokens:1024,bufferSection:'buffer',events:$events),newSummarizeBuffer(maxBufferTokens:128,maxSummaryTokens:512,bufferSection:'buffer',summarySection:'summary',summarizer:newSummarizeMessages(llm:LLMProvider::using('openai')),events:$events,),),events:$events,)->wiretap(fn(Event$e)=>$e->print());$context="# CONTEXT\n\n".file_get_contents(__DIR__.'/summary.md');$state=(newChatState)->withMessages(Messages::fromString(content:$context,role:'system'));while($chat->hasNextTurn($state)){$state=$chat->nextTurn($state);$step=$state->currentStep();$name=$step?->participantName()??'unknown';$content=trim($step?->outputMessage()->toString()??'');echo"\n--- Step ".($state->stepCount())." ($name) ---\n";echo$content."\n";}?>