mishig HF staff commited on
Commit
4038e22
1 Parent(s): d54176d

handle when /api/model err

Browse files
src/lib/components/InferencePlayground/InferencePlaygroundGenerationConfig.svelte CHANGED
@@ -1,10 +1,7 @@
1
  <script lang="ts">
2
  import type { Conversation } from "$lib/types";
3
 
4
- import {
5
- GENERATION_CONFIG_KEYS,
6
- GENERATION_CONFIG_SETTINGS,
7
- } from "./generationConfigSettings";
8
 
9
  export let conversation: Conversation;
10
  export let classNames = "";
 
1
  <script lang="ts">
2
  import type { Conversation } from "$lib/types";
3
 
4
+ import { GENERATION_CONFIG_KEYS, GENERATION_CONFIG_SETTINGS } from "./generationConfigSettings";
 
 
 
5
 
6
  export let conversation: Conversation;
7
  export let classNames = "";
src/routes/+page.server.ts CHANGED
@@ -12,6 +12,10 @@ export const load: PageServerLoad = async ({ fetch }) => {
12
  Authorization: `Bearer ${HF_TOKEN}`,
13
  },
14
  });
 
 
 
 
15
  const compatibleModels: ModelEntry[] = await res.json();
16
  compatibleModels.sort((a, b) => a.id.toLowerCase().localeCompare(b.id.toLowerCase()));
17
 
 
12
  Authorization: `Bearer ${HF_TOKEN}`,
13
  },
14
  });
15
+ if (!res.ok) {
16
+ console.error(`Error fetching warm models`, res.status, res.statusText);
17
+ return { models: [] };
18
+ }
19
  const compatibleModels: ModelEntry[] = await res.json();
20
  compatibleModels.sort((a, b) => a.id.toLowerCase().localeCompare(b.id.toLowerCase()));
21