Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

LLM function calling parameter separation. #1261

Merged
merged 1 commit into from
Sep 7, 2024
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
2 changes: 1 addition & 1 deletion benchmark/package.json
Original file line number Diff line number Diff line change
Expand Up @@ -72,6 +72,6 @@
"suppress-warnings": "^1.0.2",
"tstl": "^3.0.0",
"uuid": "^9.0.1",
"typia": "../typia-6.10.0-dev.20240907.tgz"
"typia": "../typia-6.10.0-dev.20240908.tgz"
}
}
24 changes: 24 additions & 0 deletions debug/src/llm.ts
Original file line number Diff line number Diff line change
@@ -0,0 +1,24 @@
import { ILlmApplication, LlmTypeChecker } from "@samchon/openapi";
import typia, { tags } from "typia";

interface IBbsArticle extends IBbsArticle.ICreate {
id: string & tags.Format<"uuid">;
created_at: string & tags.Format<"date-time">;
}
namespace IBbsArticle {
export interface ICreate {
title: string;
body: string;
file: string & tags.Format<"uri"> & tags.ContentMediaType<"*/*">;
}
}

interface BbsArticleApplication {
create(input: IBbsArticle.ICreate): Promise<IBbsArticle>;
}

const app: ILlmApplication = typia.llm.application<BbsArticleApplication>({
separate: (schema) =>
LlmTypeChecker.isString(schema) && schema.contentMediaType !== undefined,
});
console.log(app);
7 changes: 0 additions & 7 deletions debug/src/metadata.ts

This file was deleted.

2 changes: 1 addition & 1 deletion errors/package.json
Original file line number Diff line number Diff line change
Expand Up @@ -32,6 +32,6 @@
"typescript": "^5.3.2"
},
"dependencies": {
"typia": "../typia-6.10.0-dev.20240907.tgz"
"typia": "../typia-6.10.0-dev.20240908.tgz"
}
}
2 changes: 1 addition & 1 deletion package.json
Original file line number Diff line number Diff line change
@@ -1,6 +1,6 @@
{
"name": "typia",
"version": "6.10.0-dev.20240907",
"version": "6.10.0-dev.20240908",
"description": "Superfast runtime validators with only one line",
"main": "lib/index.js",
"typings": "lib/index.d.ts",
Expand Down
4 changes: 2 additions & 2 deletions packages/typescript-json/package.json
Original file line number Diff line number Diff line change
@@ -1,6 +1,6 @@
{
"name": "typescript-json",
"version": "6.10.0-dev.20240906",
"version": "6.10.0-dev.20240908",
"description": "Superfast runtime validators with only one line",
"main": "lib/index.js",
"typings": "lib/index.d.ts",
Expand Down Expand Up @@ -63,7 +63,7 @@
},
"homepage": "https://typia.io",
"dependencies": {
"typia": "6.10.0-dev.20240906"
"typia": "6.10.0-dev.20240908"
},
"peerDependencies": {
"typescript": ">=4.8.0 <5.6.0"
Expand Down
1 change: 1 addition & 0 deletions src/functional/Namespace/index.ts
Original file line number Diff line number Diff line change
Expand Up @@ -14,6 +14,7 @@ export * as http from "./http";
export * as notations from "./notations";
export * as misc from "./misc";
export * as protobuf from "./protobuf";
export * as llm from "./llm";

export { is };

Expand Down
20 changes: 20 additions & 0 deletions src/functional/Namespace/llm.ts
Original file line number Diff line number Diff line change
@@ -0,0 +1,20 @@
import { ILlmApplication } from "@samchon/openapi";

import { LlmSchemaSeparator } from "@samchon/openapi/lib/utils/LlmSchemaSeparator";

export const application = () => ({
finalize: (
app: ILlmApplication,
options?: ILlmApplication.IOptions,
): void => {
app.options = {
separate: options?.separate ?? null,
};
if (app.options.separate === null) return;
for (const func of app.functions)
func.separated = LlmSchemaSeparator.parameters({
parameters: func.parameters,
predicator: app.options.separate,
});
},
});
38 changes: 29 additions & 9 deletions src/llm.ts
Original file line number Diff line number Diff line change
@@ -1,5 +1,7 @@
import { ILlmApplication, ILlmSchema } from "@samchon/openapi";

import * as Namespace from "./functional/Namespace";

/**
* > You must configure the generic argument `App`.
*
Expand All @@ -14,25 +16,31 @@ import { ILlmApplication, ILlmSchema } from "@samchon/openapi";
* proper function and fill its arguments from the conversation (maybe chatting text)
* with user (human). This is the concept of the LLM function calling.
*
* By the way, there can be some parameters (or their nested properties) that must be
* By the way, there can be some parameters (or their nested properties) which must be
* composed by human, not by LLM. File uploading feature or some sensitive information
* like secrety key (password) are the examples. In that case, you can separate the
* function parameters to both LLM and human sides by configuring the
* {@link ILlmApplication.IOptions.separate} property.
* {@link ILlmApplication.IOptions.separate} property. The separated parameters are
* assigned to the {@link ILlmFunction.separated} property.
*
* Additionally, the actual function call execution is not by LLM, but by you.
* For reference, the actual function call execution is not by LLM, but by you.
* When the LLM selects the proper function and fills the arguments, you just call
* the function with the LLM prepared arguments. And then informs the return value to
* the LLM by system prompt. The LLM will continue the next conversation based on
* the return value.
*
* Additionally, if you've configured {@link ILlmApplication.IOptions.separate},
* so that the parameters are separated to human and LLM sides, you can merge these
* humand and LLM sides' parameters into one through {@link HttpLlm.mergeParameters}
* before the actual LLM function call execution.
*
* @template App Target class or interface type collecting the functions to call
* @param options Options for the LLM application construction
* @returns Application of LLM function calling schemas
* @reference https://platform.openai.com/docs/guides/function-calling
* @author Jeongho Nam - https://github.com/samchon
*/
export function application(options?: ILlmApplication.IOptions): never;
function application(options?: ILlmApplication.IOptions): never;

/**
* TypeScript functions to LLM function schemas.
Expand All @@ -46,35 +54,47 @@ export function application(options?: ILlmApplication.IOptions): never;
* proper function and fill its arguments from the conversation (maybe chatting text)
* with user (human). This is the concept of the LLM function calling.
*
* By the way, there can be some parameters (or their nested properties) that must be
* By the way, there can be some parameters (or their nested properties) which must be
* composed by human, not by LLM. File uploading feature or some sensitive information
* like secrety key (password) are the examples. In that case, you can separate the
* function parameters to both LLM and human sides by configuring the
* {@link ILlmApplication.IOptions.separate} property.
* {@link ILlmApplication.IOptions.separate} property. The separated parameters are
* assigned to the {@link ILlmFunction.separated} property.
*
* Additionally, the actual function call execution is not by LLM, but by you.
* For reference, the actual function call execution is not by LLM, but by you.
* When the LLM selects the proper function and fills the arguments, you just call
* the function with the LLM prepared arguments. And then informs the return value to
* the LLM by system prompt. The LLM will continue the next conversation based on
* the return value.
*
* Additionally, if you've configured {@link ILlmApplication.IOptions.separate},
* so that the parameters are separated to human and LLM sides, you can merge these
* humand and LLM sides' parameters into one through {@link HttpLlm.mergeParameters}
* before the actual LLM function call execution.
*
* @template App Target class or interface type collecting the functions to call
* @param options Options for the LLM application construction
* @returns Application of LLM function calling schemas
* @reference https://platform.openai.com/docs/guides/function-calling
* @author Jeongho Nam - https://github.com/samchon
*/
export function application<App extends object>(
function application<App extends object>(
options?: ILlmApplication.IOptions,
): ILlmApplication;

/**
* @internal
*/
export function application(): never {
function application(): never {
halt("application");
}

const applicationPure = /** @__PURE__ */ Object.assign<typeof application, {}>(
application,
/** @__PURE__ */ Namespace.llm.application(),
);
export { applicationPure as application };

/**
* > You must configure the generic argument `T`.
*
Expand Down
2 changes: 1 addition & 1 deletion src/transformers/CallExpressionTransformer.ts
Original file line number Diff line number Diff line change
Expand Up @@ -356,7 +356,7 @@ const FUNCTORS: Record<string, Record<string, () => Task>> = {
createValidateQuery: () => CreateHttpValidateQueryTransformer.transform,
},
llm: {
application: () => (project) => () =>
application: () => (project) =>
LlmApplicationTransformer.transform(project),
schema: () => (project) => () => LlmSchemaTransformer.transform(project),
},
Expand Down
33 changes: 32 additions & 1 deletion src/transformers/features/llm/LlmApplicationTransformer.ts
Original file line number Diff line number Diff line change
@@ -1,9 +1,13 @@
import { ILlmApplication } from "@samchon/openapi";
import ts from "typescript";

import { ExpressionFactory } from "../../../factories/ExpressionFactory";
import { IdentifierFactory } from "../../../factories/IdentifierFactory";
import { LiteralFactory } from "../../../factories/LiteralFactory";
import { MetadataCollection } from "../../../factories/MetadataCollection";
import { MetadataFactory } from "../../../factories/MetadataFactory";
import { StatementFactory } from "../../../factories/StatementFactory";
import { TypeFactory } from "../../../factories/TypeFactory";

import { Metadata } from "../../../schemas/metadata/Metadata";

Expand All @@ -17,6 +21,7 @@ import { TransformerError } from "../../TransformerError";
export namespace LlmApplicationTransformer {
export const transform =
(project: IProject) =>
(modulo: ts.LeftHandSideExpression) =>
(expression: ts.CallExpression): ts.Expression => {
// GET GENERIC ARGUMENT
if (!expression.typeArguments?.length)
Expand Down Expand Up @@ -51,6 +56,32 @@ export namespace LlmApplicationTransformer {
const schema: ILlmApplication = LlmApplicationProgrammer.write(
result.data,
);
return LiteralFactory.generate(schema);

return ExpressionFactory.selfCall(
ts.factory.createBlock(
[
StatementFactory.constant("app", LiteralFactory.generate(schema)),
ts.factory.createExpressionStatement(
ts.factory.createCallExpression(
ts.factory.createAsExpression(
IdentifierFactory.access(modulo)("finalize"),
TypeFactory.keyword("any"),
),
undefined,
[
ts.factory.createIdentifier("app"),
...(expression.arguments?.[0]
? [expression.arguments[0]]
: []),
],
),
),
ts.factory.createReturnStatement(
ts.factory.createIdentifier("app"),
),
],
true,
),
);
};
}
2 changes: 1 addition & 1 deletion test-esm/package.json
Original file line number Diff line number Diff line change
Expand Up @@ -36,6 +36,6 @@
"typescript": "^5.4.5"
},
"dependencies": {
"typia": "../typia-6.10.0-dev.20240907.tgz"
"typia": "../typia-6.10.0-dev.20240908.tgz"
}
}
2 changes: 1 addition & 1 deletion test/package.json
Original file line number Diff line number Diff line change
Expand Up @@ -52,6 +52,6 @@
"suppress-warnings": "^1.0.2",
"tstl": "^3.0.0",
"uuid": "^9.0.1",
"typia": "../typia-6.10.0-dev.20240907.tgz"
"typia": "../typia-6.10.0-dev.20240908.tgz"
}
}
64 changes: 64 additions & 0 deletions test/src/features/llm.application/test_llm_application_separate.ts
Original file line number Diff line number Diff line change
@@ -0,0 +1,64 @@
import { ILlmApplication, LlmTypeChecker } from "@samchon/openapi";
import { ILlmFunction } from "@samchon/openapi/lib/structures/ILlmFunction";
import typia, { tags } from "typia";

import { TestValidator } from "../../helpers/TestValidator";

export const test_llm_application_separate = (): void => {
const app: ILlmApplication = typia.llm.application<BbsArticleApplication>({
separate: (schema) =>
LlmTypeChecker.isString(schema) && schema.contentMediaType !== undefined,
});
const func: ILlmFunction = app.functions[0]!;
TestValidator.equals("separated.human")(func.separated?.human)([
{
index: 0,
schema: {
type: "object",
properties: {
file: {
type: "string",
format: "uri",
contentMediaType: "*/*",
},
},
nullable: false,
required: ["file"],
},
},
]);
TestValidator.equals("separated.llm")(func.separated?.llm)([
{
index: 0,
schema: {
type: "object",
properties: {
title: {
type: "string",
},
body: {
type: "string",
},
},
nullable: false,
required: ["title", "body"],
},
},
]);
};

interface IBbsArticle extends IBbsArticle.ICreate {
id: string & tags.Format<"uuid">;
created_at: string & tags.Format<"date-time">;
}
namespace IBbsArticle {
export interface ICreate {
title: string;
body: string;
file: string & tags.Format<"uri"> & tags.ContentMediaType<"*/*">;
}
}

interface BbsArticleApplication {
create(input: IBbsArticle.ICreate): Promise<IBbsArticle>;
}