Skip to content
Snippets Groups Projects
Commit 42bde8d7 authored by Marcus Johansson's avatar Marcus Johansson
Browse files

Issue #3453595 by Marcus_Johansson: Groq LLM Provider

parent 5a5b3032
No related branches found
No related tags found
No related merge requests found
Pipeline #204746 passed with warnings
Showing with 337 additions and 2 deletions
api_key: ''
provider_groq.settings:
type: mapping
label: 'LM Studio Settings'
mapping:
api_key:
type: string
label: 'API Key'
required: true
chat:
input:
description: 'Input provided to the model.'
type: 'array'
default:
- { role: "system", content: "You are a helpful assistant." }
- { role: "user", content: "Introduce yourself!" }
required: true
authentication:
description: 'Groq API Key.'
type: 'string'
default: ''
required: true
configuration:
max_tokens:
label: 'Max Tokens'
description: 'The maximum number of tokens that can be generated in the chat completion.'
type: 'integer'
default: 1024
required: false
temperature:
label: 'Temperature'
description: 'Sampling temperature 0-1. Higher values mean more random output.'
type: 'float'
default: 1
required: false
constraints:
min: 0
max: 2
step: 0.1
frequency_penalty:
label: 'Frequency Penalty'
description: 'Number between -2.0 and 2.0. Positive values penalize new tokens based on existing frequency in the text so far.'
type: 'integer'
default: 0
required: false
constraints:
min: -2
max: 2
step: 0.1
presence_penalty:
label: 'Presence Penalty'
description: 'Number between -2.0 and 2.0. Positive values penalize new tokens on whether they appear in the text so far.'
type: 'integer'
default: 0
required: false
constraints:
min: -2
max: 2
step: 0.1
top_p:
label: 'Top P'
description: 'An alternative to sampling with temperature, called nucleus sampling.'
type: 'float'
default: 1
required: false
constraints:
min: 0
max: 1
step: 0.1
stream:
label: 'Stream'
description: 'Enable/disable stream of generated tokens.'
type: 'boolean'
default: false
required: false
name: Groq Provider
description: This enables the use of Groq for the AI module.
package: AI Providers
type: module
core_version_requirement: ^10.3 || ^11
configure: provider_groq.settings_form
dependencies:
- ai:ai
- key:key
provider_groq.settings_menu:
title: "Groq Configuration"
description: "Setup Groq"
route_name: provider_groq.settings_form
parent: ai.admin_providers
provider_groq.settings_form:
path: '/admin/config/ai/providers/groq'
defaults:
_form: '\Drupal\provider_groq\Form\GroqConfigForm'
_title: 'Setup Groq Authentication'
requirements:
_permission: 'administer ai providers'
<?php
namespace Drupal\provider_groq\Form;
use Drupal\Core\Form\ConfigFormBase;
use Drupal\Core\Form\FormStateInterface;
/**
* Configure Groq API access.
*/
class GroqConfigForm extends ConfigFormBase {
/**
* Config settings.
*/
const CONFIG_NAME = 'provider_groq.settings';
/**
* {@inheritdoc}
*/
public function getFormId() {
return 'groq_settings';
}
/**
* {@inheritdoc}
*/
protected function getEditableConfigNames() {
return [
static::CONFIG_NAME,
];
}
/**
* {@inheritdoc}
*/
public function buildForm(array $form, FormStateInterface $form_state) {
$config = $this->config(static::CONFIG_NAME);
$form['api_key'] = [
'#type' => 'key_select',
'#title' => $this->t('Groq API Key'),
'#description' => $this->t('The API Key. Can be found on <a href="https://console.groq.com/keys">https://console.groq.com/keys</a>.'),
'#default_value' => $config->get('api_key'),
];
return parent::buildForm($form, $form_state);
}
/**
* {@inheritdoc}
*/
public function submitForm(array &$form, FormStateInterface $form_state) {
// Retrieve the configuration.
$this->config(static::CONFIG_NAME)
->set('api_key', $form_state->getValue('api_key'))
->save();
parent::submitForm($form, $form_state);
}
}
<?php
namespace Drupal\provider_groq\Plugin\AiProvider;
use Drupal\ai\Attribute\AiProvider;
use Drupal\ai\Base\AiProviderClientBase;
use Drupal\ai\OperationType\Chat\ChatInput;
use Drupal\ai\OperationType\Chat\ChatInterface;
use Drupal\ai\OperationType\Chat\ChatMessage;
use Drupal\ai\OperationType\Chat\ChatOutput;
use Drupal\Core\Config\ImmutableConfig;
use Drupal\Core\Plugin\ContainerFactoryPluginInterface;
use Drupal\Core\StringTranslation\TranslatableMarkup;
use OpenAI\Client;
use Symfony\Component\Yaml\Yaml;
/**
* Plugin implementation of the 'groq' provider.
*/
#[AiProvider(
id: 'groq',
label: new TranslatableMarkup('Groq'),
)]
class GroqProvider extends AiProviderClientBase implements
ContainerFactoryPluginInterface,
ChatInterface {
/**
* The OpenAI Client for API calls.
*
* @var \OpenAI\Client|null
*/
protected $client;
/**
* API Key.
*
* @var string
*/
protected string $apiKey = '';
/**
* {@inheritdoc}
*/
public function getConfiguredModels(string $operation_type = NULL): array {
$response = $this->getClient()->models()->list()->toArray();
$models = [];
if (isset($response['data'])) {
foreach ($response['data'] as $model) {
$models[$model['id']] = $model['id'];
}
}
return $models;
}
/**
* {@inheritdoc}
*/
public function isUsable(string $operation_type = NULL): bool {
// If its not configured, it is not usable.
if (!$this->getConfig()->get('api_key')) {
return FALSE;
}
// If its one of the bundles that Groq supports its usable.
if ($operation_type) {
return in_array($operation_type, $this->getSupportedOperationTypes());
}
return TRUE;
}
/**
* {@inheritdoc}
*/
public function getSupportedOperationTypes(): array {
return [
'chat',
'embeddings',
];
}
/**
* {@inheritdoc}
*/
public function getConfig(): ImmutableConfig {
return $this->configFactory->get('provider_groq.settings');
}
/**
* {@inheritdoc}
*/
public function getApiDefinition(): array {
// Load the configuration.
$definition = Yaml::parseFile($this->moduleHandler->getModule('provider_groq')->getPath() . '/definitions/api_defaults.yml');
return $definition;
}
/**
* {@inheritdoc}
*/
public function getModelSettings(string $model_id): array {
return [];
}
/**
* {@inheritdoc}
*/
public function setAuthentication(mixed $authentication): void {
// Set the new API key and reset the client.
$this->apiKey = $authentication;
$this->client = NULL;
}
/**
* Gets the raw client.
*
* This is the client for inference.
*
* @return \OpenAI\Client
* The OpenAI client.
*/
public function getClient(): Client {
$this->loadClient();
return $this->client;
}
/**
* Loads the Groq Client with authentication if not initialized.
*/
protected function loadClient(): void {
if (!$this->client) {
if (!$this->apiKey) {
$this->setAuthentication($this->loadApiKey());
}
$this->client = \OpenAI::factory()
->withApiKey($this->apiKey)
->withBaseUri('https://api.groq.com/openai/v1')
->withHttpClient($this->httpClient)
->make();
}
}
/**
* {@inheritdoc}
*/
public function chat(array|ChatInput $input, string $model_id, array $tags = []): ChatOutput {
$this->loadClient();
// Normalize the input if needed.
$chat_input = $input;
if ($input instanceof ChatInput) {
$chat_input = [];
foreach ($input->getMessages() as $message) {
$chat_input[] = [
'role' => $message->getRole(),
'content' => $message->getMessage(),
];
}
}
$payload = [
'model' => $model_id,
'messages' => $chat_input,
] + $this->configuration;
$response = $this->client->chat()->create($payload);
$message = new ChatMessage($response['choices'][0]['message']['role'], $response['choices'][0]['message']['content']);
return new ChatOutput($message, $response, []);
}
/**
* Load API key from key module.
*
* @return string
* The API key.
*/
protected function loadApiKey(): string {
return $this->keyRepository->getKey($this->getConfig()->get('api_key'))->getKeyValue();
}
}
......@@ -15,7 +15,6 @@ use Drupal\Core\Config\ImmutableConfig;
use Drupal\Core\Plugin\ContainerFactoryPluginInterface;
use Drupal\Core\StringTranslation\TranslatableMarkup;
use Drupal\provider_lmstudio\LmStudioControlApi;
use Drupal\provider_ollama\OllamaControlApi;
use OpenAI\Client;
use Symfony\Component\DependencyInjection\ContainerInterface;
use Symfony\Component\Yaml\Yaml;
......@@ -151,7 +150,6 @@ class LmStudioProvider extends AiProviderClientBase implements
return $this->controlApi;
}
/**
* Loads the Ollama Client with hostname and port.
*/
......
0% Loading or .
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment