File size: 2,136 Bytes
81ff144
 
 
 
 
 
 
 
 
 
0cb1aa7
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
81ff144
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
export type SupportedProvider = 'openai' | 'amd' | 'groq' | 'gemini' | 'local';

export const providerOptions: Array<{
  id: SupportedProvider;
  label: string;
  models: string[];
}> = [
  {
    id: 'groq',
    label: 'Groq',
    models: [
      'llama-3.3-70b-versatile', 
      'llama-3.1-8b-instant', 
      'openai/gpt-oss-120b',
      'openai/gpt-oss-20b',
      'openai/gpt-oss-safeguard-20b',
      'meta-llama/llama-4-scout-17b-16e-instruct',
      'qwen/qwen3-32b',
      'groq/compound',
      'groq/compound-mini',
      'allam-2-7b',
      'meta-llama/llama-prompt-guard-2-22m',
      'meta-llama/llama-prompt-guard-2-86m',
      'canopylabs/orpheus-arabic-saudi',
      'canopylabs/orpheus-v1-english',
      'mixtral-8x7b-32768'
    ]
  },
  {
    id: 'openai',
    label: 'OpenAI',
    models: ['gpt-4o', 'gpt-4o-mini']
  },
  {
    id: 'gemini',
    label: 'Google Gemini',
    models: ['gemini-2.0-flash', 'gemini-1.5-pro']
  },
  {
    id: 'amd',
    label: 'AMD Inference',
    models: ['gpt-4o']
  },
  {
    id: 'local',
    label: 'Local (Ollama)',
    models: ['llama3.1:8b', 'mistral', 'qwen2.5']
  }
];

export const providerStorageKeys = {
  provider: 'aubm.defaultProvider',
  model: 'aubm.defaultModel'
};

export const getDefaultProvider = (): SupportedProvider => {
  const stored = localStorage.getItem(providerStorageKeys.provider);
  const validProviders: SupportedProvider[] = ['openai', 'amd', 'groq', 'gemini', 'local'];
  return (stored && validProviders.includes(stored as SupportedProvider)) ? (stored as SupportedProvider) : 'groq';
};

export const getDefaultModel = (provider: SupportedProvider): string => {
  const stored = localStorage.getItem(providerStorageKeys.model);
  const providerModels = providerOptions.find((option) => option.id === provider)?.models ?? ['llama-3.3-70b-versatile'];
  return stored && providerModels.includes(stored) ? stored : providerModels[0];
};

export const saveProviderDefaults = (provider: SupportedProvider, model: string) => {
  localStorage.setItem(providerStorageKeys.provider, provider);
  localStorage.setItem(providerStorageKeys.model, model);
};