diff --git a/ui/components/ChatWindow.tsx b/ui/components/ChatWindow.tsx
index ea9a93d..5e6d382 100644
--- a/ui/components/ChatWindow.tsx
+++ b/ui/components/ChatWindow.tsx
@@ -38,53 +38,54 @@ const useSocket = (
           'embeddingModelProvider',
         );
 
+        const providers = await fetch(
+          `${process.env.NEXT_PUBLIC_API_URL}/models`,
+          {
+            headers: {
+              'Content-Type': 'application/json',
+            },
+          },
+        ).then(async (res) => await res.json());
+
         if (
           !chatModel ||
           !chatModelProvider ||
           !embeddingModel ||
           !embeddingModelProvider
         ) {
-          const providers = await fetch(
-            `${process.env.NEXT_PUBLIC_API_URL}/models`,
-            {
-              headers: {
-                'Content-Type': 'application/json',
-              },
-            },
-          ).then(async (res) => await res.json());
+          if (!chatModel || !chatModelProvider) {
+            const chatModelProviders = providers.chatModelProviders;
 
-          const chatModelProviders = providers.chatModelProviders;
+            chatModelProvider = Object.keys(chatModelProviders)[0];
 
-          chatModelProvider = Object.keys(chatModelProviders)[0];
-
-          if (chatModelProvider === 'custom_openai') {
-            toast.error(
-              'Seems like you are using the custom OpenAI provider, please open the settings and configure the API key and base URL',
-            );
-            setError(true);
-            return;
-          } else {
-            chatModel = Object.keys(chatModelProviders[chatModelProvider])[0];
-
-            if (
-              !chatModelProviders ||
-              Object.keys(chatModelProviders).length === 0
-            )
-              return toast.error('No chat models available');
+            if (chatModelProvider === 'custom_openai') {
+              toast.error('Seems like you are using the custom OpenAI provider, please open the settings and configure the API key and base URL');
+              setError(true);
+              return;
+            } else {
+              chatModel = Object.keys(chatModelProviders[chatModelProvider])[0];
+              if (
+                !chatModelProviders ||
+                Object.keys(chatModelProviders).length === 0
+              )
+                return toast.error('No chat models available');
+            }
           }
 
-          const embeddingModelProviders = providers.embeddingModelProviders;
+          if (!embeddingModel || !embeddingModelProvider) {
+            const embeddingModelProviders = providers.embeddingModelProviders;
 
-          if (
-            !embeddingModelProviders ||
-            Object.keys(embeddingModelProviders).length === 0
-          )
-            return toast.error('No embedding models available');
+            if (
+              !embeddingModelProviders ||
+              Object.keys(embeddingModelProviders).length === 0
+            )
+              return toast.error('No embedding models available');
 
-          embeddingModelProvider = Object.keys(embeddingModelProviders)[0];
-          embeddingModel = Object.keys(
-            embeddingModelProviders[embeddingModelProvider],
-          )[0];
+            embeddingModelProvider = Object.keys(embeddingModelProviders)[0];
+            embeddingModel = Object.keys(
+              embeddingModelProviders[embeddingModelProvider],
+            )[0];
+          }
 
           localStorage.setItem('chatModel', chatModel!);
           localStorage.setItem('chatModelProvider', chatModelProvider);
@@ -94,15 +95,6 @@ const useSocket = (
             embeddingModelProvider,
           );
         } else {
-          const providers = await fetch(
-            `${process.env.NEXT_PUBLIC_API_URL}/models`,
-            {
-              headers: {
-                'Content-Type': 'app  lication/json',
-              },
-            },
-          ).then(async (res) => await res.json());
-
           const chatModelProviders = providers.chatModelProviders;
           const embeddingModelProviders = providers.embeddingModelProviders;
 
@@ -171,8 +163,6 @@ const useSocket = (
 
         const timeoutId = setTimeout(() => {
           if (ws.readyState !== 1) {
-            ws.close();
-            setError(true);
             toast.error(
               'Failed to connect to the server. Please try again later.',
             );
@@ -182,7 +172,6 @@ const useSocket = (
         ws.onopen = () => {
           console.log('[DEBUG] open');
           clearTimeout(timeoutId);
-          setError(false);
           setIsWSReady(true);
         };
 
@@ -203,7 +192,7 @@ const useSocket = (
           if (data.type === 'error') {
             toast.error(data.data);
           }
-        });
+        })
 
         setWs(ws);
       };