Skip to content

Commit

Permalink
feat(provider): llamacpp show cmd options #1249 (#1270)
Browse files Browse the repository at this point in the history
  • Loading branch information
mikbry authored Sep 20, 2024
1 parent 46edb2e commit 8d8a2ce
Show file tree
Hide file tree
Showing 4 changed files with 25 additions and 4 deletions.
12 changes: 11 additions & 1 deletion webapp/components/common/Parameter/index.tsx
Original file line number Diff line number Diff line change
Expand Up @@ -144,6 +144,16 @@ export default function Parameter({
{value as string}
</a>
)}
{disabled && type === 'text' && (
<p
className={cn(
textCss,
'w-full min-w-[220px] px-3 py-1 text-left text-muted-foreground',
)}
>
{value as string}
</p>
)}
{disabled && !onAction && (type === 'file' || type === 'path') && (
<div className="flex items-center gap-4">
<Button variant="link" className="text-muted-foreground" onClick={handleShowfile}>
Expand All @@ -164,7 +174,7 @@ export default function Parameter({
}}
/>
)}
{(type === 'text' ||
{((type === 'text' && !disabled) ||
type === 'number' ||
type === 'password' ||
(type === 'url' && !disabled)) && (
Expand Down
3 changes: 2 additions & 1 deletion webapp/features/Providers/opla/index.tsx
Original file line number Diff line number Diff line change
Expand Up @@ -36,6 +36,7 @@ import { getLocalModels, getLocalModelsAsItems } from '@/utils/data/models';
import { getServerConfig, setActiveModel } from '@/utils/backend/commands';
import { LllamaCppParameterDefinitions } from '@/utils/providers/llama.cpp/constants';
import { useModelsStore, useServerStore } from '@/stores';
import { getCommandLineOptions } from '@/utils/providers/llama.cpp';

export default function Opla({
provider,
Expand Down Expand Up @@ -90,7 +91,7 @@ export default function Opla({
<Parameter
label={t('Inference engine')}
name="metadata.server.name"
value={deepGet(provider, 'metadata.server.name', '')}
value={`${deepGet(provider, 'metadata.server.name', 'llama.cpp')} ${getCommandLineOptions(modelPath, provider.metadata?.server.parameters || {})}`}
disabled
type="text"
onChange={onParameterChange}
Expand Down
2 changes: 1 addition & 1 deletion webapp/utils/providers/llama.cpp/constants.ts
Original file line number Diff line number Diff line change
Expand Up @@ -271,7 +271,7 @@ const LllamaCppParameterDefinitions: ParameterDefinition[] = Object.keys(LlamaCp
}),
);

const LlamaCppOptions = {
const LlamaCppOptions: Record<string, Array<string>> = {
model: ['-m', '--model'],
host: ['--host'],
port: ['--port'],
Expand Down
12 changes: 11 additions & 1 deletion webapp/utils/providers/llama.cpp/index.ts
Original file line number Diff line number Diff line change
Expand Up @@ -18,7 +18,7 @@ import { invokeTauri } from '@/utils/backend/tauri';
import { mapKeys } from '@/utils/data';
import { toSnakeCase } from '@/utils/string';
import { ServerParameters } from '@/types';
import { LlamaCppParameters, LlamaCppArgumentsSchema } from './constants';
import { LlamaCppParameters, LlamaCppArgumentsSchema, LlamaCppOptions } from './constants';

const parseLLamaCppServerParameters = (params: ServerParameters) =>
LlamaCppArgumentsSchema.parse(params);
Expand Down Expand Up @@ -51,9 +51,19 @@ const restartLLamaCppServer = async (
return startLLamaCppServer(model, parameters, 'start_opla_server');
};

const getCommandLineOptions = (model: string, _parameters: ServerParameters) => {
const parameters = parseLLamaCppServerParameters({ ..._parameters });

return Object.keys(parameters).reduce(
(options: string, key: string) => `${options} ${LlamaCppOptions[key][0]} ${parameters[key]}`,
`${LlamaCppOptions['model'][0]} ${model}`,
);
};

export {
parseLLamaCppServerParameters,
restartLLamaCppServer,
startLLamaCppServer,
stopLLamaCppServer,
getCommandLineOptions,
};

0 comments on commit 8d8a2ce

Please sign in to comment.