Skip to content
Open
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
102 changes: 26 additions & 76 deletions libs/providers/langchain-google-common/src/chat_models.ts
Original file line number Diff line number Diff line change
Expand Up @@ -65,6 +65,19 @@ import {
schemaToGeminiParameters,
} from "./utils/zod_to_gemini_parameters.js";

/**
* Helper to automatically infer Gemini property ordering from a Zod schema.
*/
function withPropertyOrdering<RunOutput extends Record<string, any>>(
schema: InteropZodType<RunOutput>
): string[] {
try {
return Object.keys(schema.shape ?? {});
} catch {
return [];
}
}

export class ChatConnection<AuthOptions> extends AbstractGoogleLLMConnection<
BaseMessage[],
AuthOptions
Expand Down Expand Up @@ -176,7 +189,6 @@ export abstract class ChatGoogleBase<AuthOptions>
extends BaseChatModel<GoogleAIBaseLanguageModelCallOptions, AIMessageChunk>
implements ChatGoogleBaseInput<AuthOptions>
{
// Used for tracing, replace with the same name as your class
static lc_name() {
return "ChatGoogle";
}
Expand All @@ -189,52 +201,28 @@ export abstract class ChatGoogleBase<AuthOptions>

lc_serializable = true;

// Set based on modelName
model: string;

modelName = "gemini-pro";

temperature: number;

maxOutputTokens: number;

maxReasoningTokens: number;

topP: number;

topK: number;

seed: number;

presencePenalty: number;

frequencyPenalty: number;

stopSequences: string[] = [];

logprobs: boolean;

topLogprobs: number = 0;

safetySettings: GoogleAISafetySetting[] = [];

responseModalities?: GoogleAIModelModality[];

// May intentionally be undefined, meaning to compute this.
convertSystemMessageToHumanContent: boolean | undefined;

safetyHandler: GoogleAISafetyHandler;

speechConfig: GoogleSpeechConfig;

streamUsage = true;

streaming = false;

labels?: Record<string, string>;

protected connection: ChatConnection<AuthOptions>;

protected streamedConnection: ChatConnection<AuthOptions>;

constructor(fields?: ChatGoogleBaseInput<AuthOptions>) {
Expand Down Expand Up @@ -317,14 +305,10 @@ export abstract class ChatGoogleBase<AuthOptions>
return this.withConfig({ tools: convertToGeminiTools(tools), ...kwargs });
}

// Replace
_llmType() {
return "chat_integration";
}

/**
* Get the parameters used to invoke the model
*/
override invocationParams(options?: this["ParsedCallOptions"]) {
return copyAIModelParams(this, options);
}
Expand All @@ -344,9 +328,7 @@ export abstract class ChatGoogleBase<AuthOptions>
if (!finalChunk) {
throw new Error("No chunks were returned from the stream.");
}
return {
generations: [finalChunk],
};
return { generations: [finalChunk] };
}

const response = await this.connection.request(
Expand All @@ -368,28 +350,20 @@ export abstract class ChatGoogleBase<AuthOptions>
options: this["ParsedCallOptions"],
runManager?: CallbackManagerForLLMRun
): AsyncGenerator<ChatGenerationChunk> {
// Make the call as a streaming request
const parameters = this.invocationParams(options);
const response = await this.streamedConnection.request(
_messages,
parameters,
options,
runManager
);

// Get the streaming parser of the response
const stream = response.data as JsonStream;
let usageMetadata: UsageMetadata | undefined;
// Loop until the end of the stream
// During the loop, yield each time we get a chunk from the streaming parser
// that is either available or added to the queue
while (!stream.streamDone) {
const output = await stream.nextChunk();
await runManager?.handleCustomEvent(
`google-chunk-${this.constructor.name}`,
{
output,
}
{ output }
);
if (
output &&
Expand Down Expand Up @@ -428,49 +402,20 @@ export abstract class ChatGoogleBase<AuthOptions>
}
}

/** @ignore */
_combineLLMOutput() {
return [];
}

withStructuredOutput<
// eslint-disable-next-line @typescript-eslint/no-explicit-any
RunOutput extends Record<string, any> = Record<string, any>
>(
outputSchema:
| InteropZodType<RunOutput>
// eslint-disable-next-line @typescript-eslint/no-explicit-any
| Record<string, any>,
config?: StructuredOutputMethodOptions<false>
): Runnable<BaseLanguageModelInput, RunOutput>;

withStructuredOutput<
// eslint-disable-next-line @typescript-eslint/no-explicit-any
RunOutput extends Record<string, any> = Record<string, any>
>(
outputSchema:
| InteropZodType<RunOutput>
// eslint-disable-next-line @typescript-eslint/no-explicit-any
| Record<string, any>,
config?: StructuredOutputMethodOptions<true>
): Runnable<BaseLanguageModelInput, { raw: BaseMessage; parsed: RunOutput }>;

withStructuredOutput<
// eslint-disable-next-line @typescript-eslint/no-explicit-any
RunOutput extends Record<string, any> = Record<string, any>
>(
outputSchema:
| InteropZodType<RunOutput>
// eslint-disable-next-line @typescript-eslint/no-explicit-any
| Record<string, any>,
config?: StructuredOutputMethodOptions<boolean>
):
| Runnable<BaseLanguageModelInput, RunOutput>
| Runnable<
BaseLanguageModelInput,
{ raw: BaseMessage; parsed: RunOutput }
> {
// eslint-disable-next-line @typescript-eslint/no-explicit-any
| Runnable<BaseLanguageModelInput, { raw: BaseMessage; parsed: RunOutput }> {
const schema: InteropZodType<RunOutput> | Record<string, any> =
outputSchema;
const name = config?.name;
Expand All @@ -483,8 +428,15 @@ export abstract class ChatGoogleBase<AuthOptions>
let functionName = name ?? "extract";
let outputParser: BaseLLMOutputParser<RunOutput>;
let tools: GeminiTool[];

if (isInteropZodSchema(schema)) {
const jsonSchema = schemaToGeminiParameters(schema);

// 🧩 Add inferred property ordering for Gemini structured outputs
if (!jsonSchema.propertyOrdering) {
jsonSchema.propertyOrdering = withPropertyOrdering(schema);
}

Comment on lines +434 to +439
Copy link
Member

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

If I'm reading this right, you have to specify propertyOrdering as a property on the schema object to enable this?

z.object({
  propertyOrdering: z.something()
});

tools = [
{
functionDeclarations: [
Expand Down Expand Up @@ -512,7 +464,6 @@ export abstract class ChatGoogleBase<AuthOptions>
geminiFunctionDefinition = schema as GeminiFunctionDeclaration;
functionName = schema.name;
} else {
// We are providing the schema for *just* the parameters, probably
const parameters: GeminiJsonSchema = removeAdditionalProperties(schema);
geminiFunctionDefinition = {
name: functionName,
Expand All @@ -530,6 +481,7 @@ export abstract class ChatGoogleBase<AuthOptions>
keyName: functionName,
});
}

const llm = this.bindTools(tools).withConfig({ tool_choice: functionName });

if (!includeRaw) {
Expand All @@ -539,7 +491,6 @@ export abstract class ChatGoogleBase<AuthOptions>
}

const parserAssign = RunnablePassthrough.assign({
// eslint-disable-next-line @typescript-eslint/no-explicit-any
parsed: (input: any, config) => outputParser.invoke(input.raw, config),
});
const parserNone = RunnablePassthrough.assign({
Expand All @@ -548,13 +499,12 @@ export abstract class ChatGoogleBase<AuthOptions>
const parsedWithFallback = parserAssign.withFallbacks({
fallbacks: [parserNone],
});

return RunnableSequence.from<
BaseLanguageModelInput,
{ raw: BaseMessage; parsed: RunOutput }
>([
{
raw: llm,
},
{ raw: llm },
parsedWithFallback,
]).withConfig({
runName: "StructuredOutputRunnable",
Expand Down