rerun prettier

This commit is contained in:
Ian Arawjo 2024-03-14 22:18:46 -04:00
parent d9559dc955
commit cf23bf1150
5 changed files with 6 additions and 11 deletions

View File

@ -508,11 +508,10 @@ const App = () => {
// Import data to the cache stored on the local filesystem (in backend)
const handleImportCache = useCallback(
(cache_data: Dict<Dict>) =>
(cache_data: Dict<Dict>) =>
importCache(cache_data)
.then(importGlobalStateFromCache)
.catch(handleError)
,
.catch(handleError),
[handleError, importGlobalStateFromCache],
);

View File

@ -9,8 +9,6 @@ import React, {
useEffect,
useRef,
useMemo,
lazy,
Suspense,
} from "react";
import {
MultiSelect,

View File

@ -10,7 +10,7 @@
* Descriptions of OpenAI model parameters copied from OpenAI's official chat completions documentation: https://platform.openai.com/docs/models/model-endpoint-compatibility
*/
import { LLM, LLMProvider, RATE_LIMITS, getProvider } from "./backend/models";
import { LLMProvider, RATE_LIMITS, getProvider } from "./backend/models";
import {
Dict,
JSONCompatible,

View File

@ -143,9 +143,7 @@ export const ResponseBox: React.FC<ResponseBoxProps> = ({
{llmName !== undefined ? (
children
) : (
<div className="response-item-llm-name-wrapper">
{children}
</div>
<div className="response-item-llm-name-wrapper">{children}</div>
)}
</div>
);
@ -161,7 +159,7 @@ export const genResponseTextsDisplay = (
customTextDisplay?: (txt: string) => React.ReactNode,
onlyShowScores?: boolean,
llmName?: string,
wideFormat?:boolean,
wideFormat?: boolean,
): React.ReactNode[] | React.ReactNode => {
if (!res_obj) return <></>;

View File

@ -1944,7 +1944,7 @@ export function repairCachedResponses(
return data;
}
/**
/**
* Generates a function that can be called to debounce another function,
* inside a React component. Note that it requires passing (and capturing) a React ref using useRef.
* The ref is used so that when the function is called multiple times; it will 'debounce' --cancel any pending call.