Skip to content
Snippets Groups Projects
Commit 5a5b3032 authored by Marcus Johansson's avatar Marcus Johansson
Browse files

Issue #3453592 by Marcus_Johansson: LM Studio LLM Provider

parent 42cc0cde
No related branches found
No related tags found
No related merge requests found
Pipeline #204694 passed with warnings
Showing
with 529 additions and 9 deletions
host_name: ''
port: null
provider_lmstudio.settings:
type: mapping
label: 'LM Studio Settings'
mapping:
host_name:
type: string
label: 'Host Name'
required: true
port:
type: integer
label: 'Port'
required: false
chat:
input:
description: 'Input provided to the model.'
type: 'array'
default:
- { role: "system", content: "You are a helpful assistant." }
- { role: "user", content: "Introduce yourself!" }
required: true
configuration:
max_tokens:
label: 'Max Tokens'
description: 'The maximum number of tokens that can be generated in the chat completion.'
type: 'integer'
default: 1024
required: false
temperature:
label: 'Temperature'
description: 'Sampling temperature 0-1. Higher values mean more random output.'
type: 'float'
default: 1
required: false
constraints:
min: 0
max: 2
step: 0.1
frequency_penalty:
label: 'Frequency Penalty'
description: 'Number between -2.0 and 2.0. Positive values penalize new tokens based on existing frequency in the text so far.'
type: 'integer'
default: 0
required: false
constraints:
min: -2
max: 2
step: 0.1
presence_penalty:
label: 'Presence Penalty'
description: 'Number between -2.0 and 2.0. Positive values penalize new tokens on whether they appear in the text so far.'
type: 'integer'
default: 0
required: false
constraints:
min: -2
max: 2
step: 0.1
top_p:
label: 'Top P'
description: 'An alternative to sampling with temperature, called nucleus sampling.'
type: 'float'
default: 1
required: false
constraints:
min: 0
max: 1
step: 0.1
stream:
label: 'Stream'
description: 'Enable/disable stream of generated tokens.'
type: 'boolean'
default: false
required: false
embeddings:
input:
description: 'Textual representation of the speech.'
type: 'string'
default: 'Once upon a time in London.'
required: true
configuration:
dimension:
label: 'Vector Dimension'
type: 'integer'
default: 1536
required: false
name: LM Studio Provider
description: This enables the use of LM Studio for the AI module.
package: AI Providers
type: module
core_version_requirement: ^10.3 || ^11
configure: provider_lmstudio.settings_form
dependencies:
- ai:ai
provider_lmstudio.settings_menu:
title: "LM Studio Configuration"
description: "Setup LM Studio"
route_name: provider_lmstudio.settings_form
parent: ai.admin_providers
provider_lmstudio.settings_form:
path: '/admin/config/ai/providers/lmstudio'
defaults:
_form: '\Drupal\provider_lmstudio\Form\LmStudioConfigForm'
_title: 'Setup LM Studio Authentication'
requirements:
_permission: 'administer ai providers'
services:
provider_lmstudio.control_api:
class: Drupal\provider_lmstudio\LmStudioControlApi
arguments: ['@http_client']
<?php
namespace Drupal\provider_lmstudio\Form;
use Drupal\Core\Form\ConfigFormBase;
use Drupal\Core\Form\FormStateInterface;
/**
* Configure LM Studio API access.
*/
class LmStudioConfigForm extends ConfigFormBase {
/**
* Config settings.
*/
const CONFIG_NAME = 'provider_lmstudio.settings';
/**
* {@inheritdoc}
*/
public function getFormId() {
return 'lmstudio_settings';
}
/**
* {@inheritdoc}
*/
protected function getEditableConfigNames() {
return [
static::CONFIG_NAME,
];
}
/**
* {@inheritdoc}
*/
public function buildForm(array $form, FormStateInterface $form_state) {
$config = $this->config(static::CONFIG_NAME);
$form['host_name'] = [
'#type' => 'textfield',
'#title' => $this->t('Host Name'),
'#description' => $this->t('The host name for the API, including protocol.'),
'#required' => TRUE,
'#default_value' => $config->get('host_name'),
'#attributes' => [
'placeholder' => 'http://127.0.0.1',
],
];
$form['port'] = [
'#type' => 'textfield',
'#title' => $this->t('Port'),
'#description' => $this->t('The port number for the API. Can be left empty if 80 or 443.'),
'#default_value' => $config->get('port'),
'#attributes' => [
'placeholder' => '1234',
],
];
return parent::buildForm($form, $form_state);
}
/**
* {@inheritdoc}
*/
public function submitForm(array &$form, FormStateInterface $form_state) {
// Retrieve the configuration.
$this->config(static::CONFIG_NAME)
->set('api_key', $form_state->getValue('api_key'))
->set('host_name', $form_state->getValue('host_name'))
->set('port', $form_state->getValue('port'))
->save();
parent::submitForm($form, $form_state);
}
}
<?php
namespace Drupal\provider_lmstudio;
use GuzzleHttp\Client;
/**
* L; Studio Control API.
*/
class LmStudioControlApi {
/**
* The http client.
*/
protected Client $client;
/**
* The base host.
*/
protected string $baseHost;
/**
* Constructs a new LM Studio AI object.
*
* @param \GuzzleHttp\Client $client
* Http client.
*/
public function __construct(Client $client) {
$this->client = $client;
}
/**
* Sets connect data.
*
* @param string $baseUrl
* The base url.
*/
public function setConnectData($baseUrl) {
$this->baseHost = $baseUrl;
}
/**
* Get all models in LM Studio.
*
* @return array
* The response.
*/
public function getModels() {
$result = json_decode($this->makeRequest("v1/models", [], 'GET'), TRUE);
return $result;
}
/**
* Make LM Studio call.
*
* @param string $path
* The path.
* @param array $query_string
* The query string.
* @param string $method
* The method.
* @param string $body
* Data to attach if POST/PUT/PATCH.
* @param array $options
* Extra headers.
*
* @return string|object
* The return response.
*/
protected function makeRequest($path, array $query_string = [], $method = 'GET', $body = '', array $options = []) {
// Don't wait to long.
$options['connect_timeout'] = 120;
$options['read_timeout'] = 120;
$options['timeout'] = 120;
// JSON unless its multipart.
if (empty($options['multipart'])) {
$options['headers']['Content-Type'] = 'application/json';
}
if ($body) {
$options['body'] = json_encode($body);
}
$new_url = rtrim($this->baseHost, '/') . '/' . $path;
$new_url .= count($query_string) ? '?' . http_build_query($query_string) : '';
$res = $this->client->request($method, $new_url, $options);
return $res->getBody();
}
}
<?php
namespace Drupal\provider_lmstudio\Plugin\AiProvider;
use Drupal\ai\Attribute\AiProvider;
use Drupal\ai\Base\AiProviderClientBase;
use Drupal\ai\OperationType\Chat\ChatInput;
use Drupal\ai\OperationType\Chat\ChatInterface;
use Drupal\ai\OperationType\Chat\ChatMessage;
use Drupal\ai\OperationType\Chat\ChatOutput;
use Drupal\ai\OperationType\Embeddings\EmbeddingsInput;
use Drupal\ai\OperationType\Embeddings\EmbeddingsInterface;
use Drupal\ai\OperationType\Embeddings\EmbeddingsOutput;
use Drupal\Core\Config\ImmutableConfig;
use Drupal\Core\Plugin\ContainerFactoryPluginInterface;
use Drupal\Core\StringTranslation\TranslatableMarkup;
use Drupal\provider_lmstudio\LmStudioControlApi;
use Drupal\provider_ollama\OllamaControlApi;
use OpenAI\Client;
use Symfony\Component\DependencyInjection\ContainerInterface;
use Symfony\Component\Yaml\Yaml;
/**
* Plugin implementation of the 'lmstudio' provider.
*/
#[AiProvider(
id: 'lmstudio',
label: new TranslatableMarkup('LM Studio'),
)]
class LmStudioProvider extends AiProviderClientBase implements
ContainerFactoryPluginInterface,
ChatInterface,
EmbeddingsInterface {
/**
* The OpenAI Client for API calls.
*
* @var \OpenAI\Client|null
*/
protected $client;
/**
* The control API.
*
* @var \Drupal\provider_lmstudio\LmStudioControlApi
*/
protected $controlApi;
/**
* Dependency Injection for the LM Studio Control API.
*/
public static function create(ContainerInterface $container, array $configuration, $plugin_id, $plugin_definition) {
$instance = parent::create($container, $configuration, $plugin_id, $plugin_definition);
$instance->controlApi = $container->get('provider_lmstudio.control_api');
$instance->controlApi->setConnectData($instance->getBaseHost());
return $instance;
}
/**
* {@inheritdoc}
*/
public function getConfiguredModels(string $operation_type = NULL): array {
$this->loadClient();
$response = $this->controlApi->getModels();
$models = [];
if (isset($response['data'])) {
foreach ($response['data'] as $model) {
$models[$model['id']] = $model['id'];
}
}
return $models;
}
/**
* {@inheritdoc}
*/
public function isUsable(string $operation_type = NULL): bool {
if (!$this->getBaseHost()) {
return FALSE;
}
// If its one of the bundles that Ollama supports its usable.
if ($operation_type) {
return in_array($operation_type, $this->getSupportedOperationTypes());
}
return TRUE;
}
/**
* {@inheritdoc}
*/
public function getSupportedOperationTypes(): array {
return [
'chat',
'embeddings',
];
}
/**
* {@inheritdoc}
*/
public function getConfig(): ImmutableConfig {
return $this->configFactory->get('provider_lmstudio.settings');
}
/**
* {@inheritdoc}
*/
public function getApiDefinition(): array {
// Load the configuration.
$definition = Yaml::parseFile($this->moduleHandler->getModule('provider_lmstudio')->getPath() . '/definitions/api_defaults.yml');
return $definition;
}
/**
* {@inheritdoc}
*/
public function getModelSettings(string $model_id): array {
return [];
}
/**
* {@inheritdoc}
*/
public function setAuthentication(mixed $authentication): void {
// Doesn't do anything.
$this->client = NULL;
}
/**
* Gets the raw client.
*
* This is the client for inference.
*
* @return \OpenAI\Client
* The OpenAI client.
*/
public function getClient(): Client {
$this->loadClient();
return $this->client;
}
/**
* Get control client.
*
* This is the client for controlling the LM Studio API.
*
* @return \Drupal\provider_lmstudio\LmStudioControlApi
* The control client.
*/
public function getControlClient(): LmStudioControlApi {
return $this->controlApi;
}
/**
* Loads the Ollama Client with hostname and port.
*/
protected function loadClient(): void {
if (!$this->client) {
$host = $this->getBaseHost();
$host .= '/v1';
$this->client = \OpenAI::factory()
->withHttpClient($this->httpClient)
->withBaseUri($host)
->make();
}
}
/**
* {@inheritdoc}
*/
public function chat(array|ChatInput $input, string $model_id, array $tags = []): ChatOutput {
$this->loadClient();
// Normalize the input if needed.
$chat_input = $input;
if ($input instanceof ChatInput) {
$chat_input = [];
foreach ($input->getMessages() as $message) {
$chat_input[] = [
'role' => $message->getRole(),
'content' => $message->getMessage(),
];
}
}
$payload = [
'model' => $model_id,
'messages' => $chat_input,
] + $this->configuration;
$response = $this->client->chat()->create($payload);
$message = new ChatMessage($response['choices'][0]['message']['role'], $response['choices'][0]['message']['content']);
return new ChatOutput($message, $response, []);
}
/**
* {@inheritdoc}
*/
public function embeddings(string|EmbeddingsInput $input, string $model_id, array $tags = []): EmbeddingsOutput {
$this->loadClient();
// Normalize the input if needed.
if ($input instanceof EmbeddingsInput) {
$input = $input->getPrompt();
}
// Send the request.
$payload = [
'model' => $model_id,
'input' => $input,
] + $this->configuration;
$response = $this->client->embeddings()->create($payload)->toArray();
return new EmbeddingsOutput($response['data'][0]['embedding'], $response, []);
}
/**
* Gets the base host.
*
* @return string
* The base host.
*/
protected function getBaseHost(): string {
$host = rtrim($this->getConfig()->get('host_name'), '/');
if ($this->getConfig()->get('port')) {
$host .= ':' . $this->getConfig()->get('port');
}
return $host;
}
}
provider_openai.settings:
provider_ollama.settings:
type: mapping
label: 'Ollama Settings'
mapping:
......
......@@ -6,17 +6,12 @@ chat:
- { role: "system", content: "You are a helpful assistant." }
- { role: "user", content: "Introduce yourself!" }
required: true
authentication:
description: 'OpenAI API Key.'
type: 'string'
default: ''
required: true
configuration:
max_tokens:
label: 'Max Tokens'
description: 'The maximum number of tokens that can be generated in the chat completion.'
type: 'integer'
default: 4096
default: 1024
required: false
temperature:
label: 'Temperature'
......@@ -64,4 +59,15 @@ chat:
type: 'boolean'
default: false
required: false
embeddings:
input:
description: 'Textual representation of the speech.'
type: 'string'
default: 'Once upon a time in London.'
required: true
configuration:
dimension:
label: 'Vector Dimension'
type: 'integer'
default: 1536
required: false
provider_ollama.settings_menu:
title: "Ollama Authentication"
title: "Ollama Configuration"
description: "Setup Ollama"
route_name: provider_ollama.settings_form
parent: ai.admin_providers
......@@ -74,6 +74,9 @@ class OllamaProvider extends AiProviderClientBase implements
*/
public function isUsable(string $operation_type = NULL): bool {
// If its one of the bundles that Ollama supports its usable.
if (!$this->getBaseHost()) {
return FALSE;
}
if ($operation_type) {
return in_array($operation_type, $this->getSupportedOperationTypes());
}
......
0% Loading or .
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment