Advertisement
PiXLFAIL

Untitled

Nov 2nd, 2024
48
0
Never
Not a member of Pastebin yet? Sign Up, it unlocks many cool features!
text 4.86 KB | None | 0 0
  1. import type { ModelInfo, OllamaApiResponse, OllamaModel } from './types';
  2.  
  3. export const WORK_DIR_NAME = 'project';
  4. export const WORK_DIR = `/home/${WORK_DIR_NAME}`;
  5. export const MODIFICATIONS_TAG_NAME = 'bolt_file_modifications';
  6. export const MODEL_REGEX = /^\[Model: (.*?)\]\n\n/;
  7. export const DEFAULT_MODEL = 'claude-3-5-sonnet-20240620';
  8. export const DEFAULT_PROVIDER = 'Anthropic';
  9.  
  10. const staticModels: ModelInfo[] = [
  11. { name: 'claude-3-5-sonnet-20240620', label: 'Claude 3.5 Sonnet', provider: 'Anthropic' },
  12. { name: 'gpt-4o', label: 'GPT-4o', provider: 'OpenAI' },
  13. { name: 'anthropic/claude-3.5-sonnet', label: 'Anthropic: Claude 3.5 Sonnet (OpenRouter)', provider: 'OpenRouter' },
  14. { name: 'anthropic/claude-3-haiku', label: 'Anthropic: Claude 3 Haiku (OpenRouter)', provider: 'OpenRouter' },
  15. { name: 'deepseek/deepseek-coder', label: 'Deepseek-Coder V2 236B (OpenRouter)', provider: 'OpenRouter' },
  16. { name: 'google/gemini-flash-1.5', label: 'Google Gemini Flash 1.5 (OpenRouter)', provider: 'OpenRouter' },
  17. { name: 'google/gemini-pro-1.5', label: 'Google Gemini Pro 1.5 (OpenRouter)', provider: 'OpenRouter' },
  18. { name: 'mistralai/mistral-nemo', label: 'OpenRouter Mistral Nemo (OpenRouter)', provider: 'OpenRouter' },
  19. { name: 'qwen/qwen-110b-chat', label: 'OpenRouter Qwen 110b Chat (OpenRouter)', provider: 'OpenRouter' },
  20. { name: 'cohere/command', label: 'Cohere Command (OpenRouter)', provider: 'OpenRouter' },
  21. { name: 'gemini-1.5-flash-latest', label: 'Gemini 1.5 Flash', provider: 'Google' },
  22. { name: 'gemini-1.5-pro-latest', label: 'Gemini 1.5 Pro', provider: 'Google'},
  23. { name: 'llama-3.1-70b-versatile', label: 'Llama 3.1 70b (Groq)', provider: 'Groq' },
  24. { name: 'llama-3.1-8b-instant', label: 'Llama 3.1 8b (Groq)', provider: 'Groq' },
  25. { name: 'llama-3.2-11b-vision-preview', label: 'Llama 3.2 11b (Groq)', provider: 'Groq' },
  26. { name: 'llama-3.2-3b-preview', label: 'Llama 3.2 3b (Groq)', provider: 'Groq' },
  27. { name: 'llama-3.2-1b-preview', label: 'Llama 3.2 1b (Groq)', provider: 'Groq' },
  28. { name: 'claude-3-opus-20240229', label: 'Claude 3 Opus', provider: 'Anthropic' },
  29. { name: 'claude-3-sonnet-20240229', label: 'Claude 3 Sonnet', provider: 'Anthropic' },
  30. { name: 'claude-3-haiku-20240307', label: 'Claude 3 Haiku', provider: 'Anthropic' },
  31. { name: 'gpt-4o-mini', label: 'GPT-4o Mini', provider: 'OpenAI' },
  32. { name: 'gpt-4-turbo', label: 'GPT-4 Turbo', provider: 'OpenAI' },
  33. { name: 'gpt-4', label: 'GPT-4', provider: 'OpenAI' },
  34. { name: 'gpt-3.5-turbo', label: 'GPT-3.5 Turbo', provider: 'OpenAI' },
  35. { name: 'deepseek-coder', label: 'Deepseek-Coder', provider: 'Deepseek'},
  36. { name: 'deepseek-chat', label: 'Deepseek-Chat', provider: 'Deepseek'},
  37. { name: 'open-mistral-7b', label: 'Mistral 7B', provider: 'Mistral' },
  38. { name: 'open-mixtral-8x7b', label: 'Mistral 8x7B', provider: 'Mistral' },
  39. { name: 'open-mixtral-8x22b', label: 'Mistral 8x22B', provider: 'Mistral' },
  40. { name: 'open-codestral-mamba', label: 'Codestral Mamba', provider: 'Mistral' },
  41. { name: 'open-mistral-nemo', label: 'Mistral Nemo', provider: 'Mistral' },
  42. { name: 'ministral-8b-latest', label: 'Mistral 8B', provider: 'Mistral' },
  43. { name: 'mistral-small-latest', label: 'Mistral Small', provider: 'Mistral' },
  44. { name: 'codestral-latest', label: 'Codestral', provider: 'Mistral' },
  45. { name: 'mistral-large-latest', label: 'Mistral Large Latest', provider: 'Mistral' },
  46. ];
  47.  
  48. export let MODEL_LIST: ModelInfo[] = [...staticModels];
  49.  
  50. async function getOllamaModels(): Promise<ModelInfo[]> {
  51. try {
  52. const base_url = import.meta.env.OLLAMA_API_BASE_URL || "http://localhost:11434";
  53. const response = await fetch(`${base_url}/api/tags`);
  54. const data = await response.json() as OllamaApiResponse;
  55.  
  56. return data.models.map((model: OllamaModel) => ({
  57. name: model.name,
  58. label: `${model.name} (${model.details.parameter_size})`,
  59. provider: 'Ollama',
  60. }));
  61. } catch (e) {
  62. return [];
  63. }
  64. }
  65.  
  66. async function getOpenAILikeModels(): Promise<ModelInfo[]> {
  67. try {
  68. const base_url =import.meta.env.OPENAI_LIKE_API_BASE_URL || "";
  69. if (!base_url) {
  70. return [];
  71. }
  72. const api_key = import.meta.env.OPENAI_LIKE_API_KEY ?? "";
  73. const response = await fetch(`${base_url}/models`, {
  74. headers: {
  75. Authorization: `Bearer ${api_key}`,
  76. }
  77. });
  78. const res = await response.json() as any;
  79. return res.data.map((model: any) => ({
  80. name: model.id,
  81. label: model.id,
  82. provider: 'OpenAILike',
  83. }));
  84. }catch (e) {
  85. return []
  86. }
  87.  
  88. }
  89. async function initializeModelList(): Promise<void> {
  90. const ollamaModels = await getOllamaModels();
  91. const openAiLikeModels = await getOpenAILikeModels();
  92. MODEL_LIST = [...ollamaModels,...openAiLikeModels, ...staticModels];
  93. }
  94. initializeModelList().then();
  95. export { getOllamaModels, getOpenAILikeModels, initializeModelList };
  96.  
Advertisement
Add Comment
Please, Sign In to add comment
Advertisement