mirror of
https://github.com/ianarawjo/ChainForge.git
synced 2025-03-15 00:36:29 +00:00
rerun prettier
This commit is contained in:
parent
d9559dc955
commit
cf23bf1150
@ -508,11 +508,10 @@ const App = () => {
|
||||
|
||||
// Import data to the cache stored on the local filesystem (in backend)
|
||||
const handleImportCache = useCallback(
|
||||
(cache_data: Dict<Dict>) =>
|
||||
(cache_data: Dict<Dict>) =>
|
||||
importCache(cache_data)
|
||||
.then(importGlobalStateFromCache)
|
||||
.catch(handleError)
|
||||
,
|
||||
.catch(handleError),
|
||||
[handleError, importGlobalStateFromCache],
|
||||
);
|
||||
|
||||
|
@ -9,8 +9,6 @@ import React, {
|
||||
useEffect,
|
||||
useRef,
|
||||
useMemo,
|
||||
lazy,
|
||||
Suspense,
|
||||
} from "react";
|
||||
import {
|
||||
MultiSelect,
|
||||
|
@ -10,7 +10,7 @@
|
||||
* Descriptions of OpenAI model parameters copied from OpenAI's official chat completions documentation: https://platform.openai.com/docs/models/model-endpoint-compatibility
|
||||
*/
|
||||
|
||||
import { LLM, LLMProvider, RATE_LIMITS, getProvider } from "./backend/models";
|
||||
import { LLMProvider, RATE_LIMITS, getProvider } from "./backend/models";
|
||||
import {
|
||||
Dict,
|
||||
JSONCompatible,
|
||||
|
@ -143,9 +143,7 @@ export const ResponseBox: React.FC<ResponseBoxProps> = ({
|
||||
{llmName !== undefined ? (
|
||||
children
|
||||
) : (
|
||||
<div className="response-item-llm-name-wrapper">
|
||||
{children}
|
||||
</div>
|
||||
<div className="response-item-llm-name-wrapper">{children}</div>
|
||||
)}
|
||||
</div>
|
||||
);
|
||||
@ -161,7 +159,7 @@ export const genResponseTextsDisplay = (
|
||||
customTextDisplay?: (txt: string) => React.ReactNode,
|
||||
onlyShowScores?: boolean,
|
||||
llmName?: string,
|
||||
wideFormat?:boolean,
|
||||
wideFormat?: boolean,
|
||||
): React.ReactNode[] | React.ReactNode => {
|
||||
if (!res_obj) return <></>;
|
||||
|
||||
|
@ -1944,7 +1944,7 @@ export function repairCachedResponses(
|
||||
return data;
|
||||
}
|
||||
|
||||
/**
|
||||
/**
|
||||
* Generates a function that can be called to debounce another function,
|
||||
* inside a React component. Note that it requires passing (and capturing) a React ref using useRef.
|
||||
* The ref is used so that when the function is called multiple times; it will 'debounce' --cancel any pending call.
|
||||
|
Loading…
x
Reference in New Issue
Block a user