From 789bbef52073f1524d080a3c22133d2884519894 Mon Sep 17 00:00:00 2001 From: API Engineering Date: Wed, 17 Dec 2025 11:43:24 +0000 Subject: [PATCH] [bot] Updated client based on openapi-8e30ff8/clientgen --- changelist.md | 3 +- ...v2_databases.DatabasesPostRequestBody.html | 56 +-- ...bases_item_users.UsersPostRequestBody.html | 14 +- ...2_uptime_checks.ChecksPostRequestBody.html | 10 +- ..._checks_item.WithCheck_PutRequestBody.html | 10 +- ...item.WithVpc_peering_PatchRequestBody.html | 2 +- ...item.WithVpc_peering_PatchRequestBody.html | 2 +- src/dots/kiota-lock.json | 2 +- src/dots/models/index.ts | 337 +----------------- 9 files changed, 50 insertions(+), 386 deletions(-) diff --git a/changelist.md b/changelist.md index 4cd6f3f16..a088fc9cc 100644 --- a/changelist.md +++ b/changelist.md @@ -1,6 +1,5 @@ ## Changelist -Current commit: digitalocean/openapi@578e1b5 (2025-12-16T18:30:04Z) +Current commit: digitalocean/openapi@8e30ff8 (2025-12-17T10:34:53Z) Target commit: digitalocean/openapi@8e30ff8 (2025-12-17T10:34:53Z) -* digitalocean/openapi#1122: update examples/curl for create KB and create data source - 2025-12-17T10:34:53Z [] diff --git a/docs/interfaces/v2_databases.DatabasesPostRequestBody.html b/docs/interfaces/v2_databases.DatabasesPostRequestBody.html index da06fdac9..7354a5217 100644 --- a/docs/interfaces/v2_databases.DatabasesPostRequestBody.html +++ b/docs/interfaces/v2_databases.DatabasesPostRequestBody.html @@ -31,32 +31,32 @@

Properties

additionalData?: Record<string, unknown>

Gets the additional data for this object that did not belong to the properties.

The additional data for this object.

autoscale?: null | Database_autoscale_params

Autoscaling configuration for the database cluster. Currently only supports storage autoscaling. If null, autoscaling is not configured for the cluster.

-
backupRestore?: null | Database_backup

The backup_restore property

+
backupRestore?: null | Database_backup

The backup_restore property

connection?: null | Database_connection

The connection property

-
createdAt?: null | Date

A time value given in ISO8601 combined date and time format that represents when the database cluster was created.

-
dbNames?: null | string[]

An array of strings containing the names of databases created in the database cluster.

-
engine?: null | Database_cluster_engine

A slug representing the database engine used for the cluster. The possible values are: "pg" for PostgreSQL, "mysql" for MySQL, "redis" for Caching, "mongodb" for MongoDB, "kafka" for Kafka, "opensearch" for OpenSearch, and "valkey" for Valkey.

-
id?: null | string

A unique ID that can be used to identify and reference a database cluster.

-
maintenanceWindow?: null | Database_maintenance_window

The maintenance_window property

-
metricsEndpoints?: null | Database_service_endpoint[]

Public hostname and port of the cluster's metrics endpoint(s). Includes one record for the cluster's primary node and a second entry for the cluster's standby node(s).

-
name?: null | string

A unique, human-readable name referring to a database cluster.

-
numNodes?: null | number

The number of nodes in the database cluster.

-
privateConnection?: null | Database_connection

The private_connection property

-
privateNetworkUuid?: null | string

A string specifying the UUID of the VPC to which the database cluster will be assigned. If excluded, the cluster when creating a new database cluster, it will be assigned to your account's default VPC for the region.

Requires vpc:read scope.

-
projectId?: null | string

The ID of the project that the database cluster is assigned to. If excluded when creating a new database cluster, it will be assigned to your default project.

Requires project:update scope.

-
region?: null | string

The slug identifier for the region where the database cluster is located.

-
rules?: null | Firewall_rule[]

The rules property

-
schemaRegistryConnection?: null | Schema_registry_connection

The connection details for Schema Registry.

-
semanticVersion?: null | string

A string representing the semantic version of the database engine in use for the cluster.

-
size?: null | string

The slug identifier representing the size of the nodes in the database cluster.

-
standbyConnection?: null | Database_connection

The standby_connection property

-
standbyPrivateConnection?: null | Database_connection

The standby_private_connection property

-
status?: null | Database_cluster_status

A string representing the current status of the database cluster.

-
storageSizeMib?: null | number

Additional storage added to the cluster, in MiB. If null, no additional storage is added to the cluster, beyond what is provided as a base amount from the 'size' and any previously added additional storage.

-
tags?: null | string[]

An array of tags (as strings) to apply to the database cluster.

Requires tag:create scope.

-
uiConnection?: null | Opensearch_connection

The connection details for OpenSearch dashboard.

-
users?: null | Database_user[]

The users property

-
version?: null | string

A string representing the version of the database engine in use for the cluster.

-
versionEndOfAvailability?: null | string

A timestamp referring to the date when the particular version will no longer be available for creating new clusters. If null, the version does not have an end of availability timeline.

-
versionEndOfLife?: null | string

A timestamp referring to the date when the particular version will no longer be supported. If null, the version does not have an end of life timeline.

-
+
createdAt?: null | Date

A time value given in ISO8601 combined date and time format that represents when the database cluster was created.

+
dbNames?: null | string[]

An array of strings containing the names of databases created in the database cluster.

+
engine?: null | Database_cluster_engine

A slug representing the database engine used for the cluster. The possible values are: "pg" for PostgreSQL, "mysql" for MySQL, "redis" for Caching, "mongodb" for MongoDB, "kafka" for Kafka, "opensearch" for OpenSearch, and "valkey" for Valkey.

+
id?: null | string

A unique ID that can be used to identify and reference a database cluster.

+
maintenanceWindow?: null | Database_maintenance_window

The maintenance_window property

+
metricsEndpoints?: null | Database_service_endpoint[]

Public hostname and port of the cluster's metrics endpoint(s). Includes one record for the cluster's primary node and a second entry for the cluster's standby node(s).

+
name?: null | string

A unique, human-readable name referring to a database cluster.

+
numNodes?: null | number

The number of nodes in the database cluster.

+
privateConnection?: null | Database_connection

The private_connection property

+
privateNetworkUuid?: null | string

A string specifying the UUID of the VPC to which the database cluster will be assigned. If excluded, the cluster when creating a new database cluster, it will be assigned to your account's default VPC for the region.

Requires vpc:read scope.

+
projectId?: null | string

The ID of the project that the database cluster is assigned to. If excluded when creating a new database cluster, it will be assigned to your default project.

Requires project:update scope.

+
region?: null | string

The slug identifier for the region where the database cluster is located.

+
rules?: null | Firewall_rule[]

The rules property

+
schemaRegistryConnection?: null | Schema_registry_connection

The connection details for Schema Registry.

+
semanticVersion?: null | string

A string representing the semantic version of the database engine in use for the cluster.

+
size?: null | string

The slug identifier representing the size of the nodes in the database cluster.

+
standbyConnection?: null | Database_connection

The standby_connection property

+
standbyPrivateConnection?: null | Database_connection

The standby_private_connection property

+
status?: null | Database_cluster_status

A string representing the current status of the database cluster.

+
storageSizeMib?: null | number

Additional storage added to the cluster, in MiB. If null, no additional storage is added to the cluster, beyond what is provided as a base amount from the 'size' and any previously added additional storage.

+
tags?: null | string[]

An array of tags (as strings) to apply to the database cluster.

Requires tag:create scope.

+
uiConnection?: null | Opensearch_connection

The connection details for OpenSearch dashboard.

+
users?: null | Database_user[]

The users property

+
version?: null | string

A string representing the version of the database engine in use for the cluster.

+
versionEndOfAvailability?: null | string

A timestamp referring to the date when the particular version will no longer be available for creating new clusters. If null, the version does not have an end of availability timeline.

+
versionEndOfLife?: null | string

A timestamp referring to the date when the particular version will no longer be supported. If null, the version does not have an end of life timeline.

+
diff --git a/docs/interfaces/v2_databases_item_users.UsersPostRequestBody.html b/docs/interfaces/v2_databases_item_users.UsersPostRequestBody.html index 93e1bfc57..103220d0a 100644 --- a/docs/interfaces/v2_databases_item_users.UsersPostRequestBody.html +++ b/docs/interfaces/v2_databases_item_users.UsersPostRequestBody.html @@ -8,13 +8,13 @@ role? settings?

Properties

accessCert?: null | string

Access certificate for TLS client authentication. (Kafka only)

-
accessKey?: null | string

Access key for TLS client authentication. (Kafka only)

-
additionalData?: Record<string, unknown>

Gets the additional data for this object that did not belong to the properties.

+
accessKey?: null | string

Access key for TLS client authentication. (Kafka only)

+
additionalData?: Record<string, unknown>

Gets the additional data for this object that did not belong to the properties.

The additional data for this object.

mysqlSettings?: null | Mysql_settings

The mysql_settings property

-
name?: null | string

The name of a database user.

-
password?: null | string

A randomly generated password for the database user.
Requires database:view_credentials scope.

-
readonly?: null | boolean

(To be deprecated: use settings.mongo_user_settings.role instead for access controls to MongoDB databases). For MongoDB clusters, set to true to create a read-only user.This option is not currently supported for other database engines.

+
name?: null | string

The name of a database user.

+
password?: null | string

A randomly generated password for the database user.
Requires database:view_credentials scope.

+
readonly?: null | boolean

(To be deprecated: use settings.mongo_user_settings.role instead for access controls to MongoDB databases). For MongoDB clusters, set to true to create a read-only user.This option is not currently supported for other database engines.

role?: null | Database_user_role

A string representing the database user's role. The value will be either"primary" or "normal".

-
settings?: null | User_settings

The settings property

-
+
settings?: null | User_settings

The settings property

+
diff --git a/docs/interfaces/v2_uptime_checks.ChecksPostRequestBody.html b/docs/interfaces/v2_uptime_checks.ChecksPostRequestBody.html index 1a4932406..788760abe 100644 --- a/docs/interfaces/v2_uptime_checks.ChecksPostRequestBody.html +++ b/docs/interfaces/v2_uptime_checks.ChecksPostRequestBody.html @@ -7,8 +7,8 @@

Properties

additionalData?: Record<string, unknown>

Gets the additional data for this object that did not belong to the properties.

The additional data for this object.

enabled?: null | boolean

A boolean value indicating whether the check is enabled/disabled.

-
name?: null | string

A human-friendly display name.

-
regions?: null | Check_updatable_regions[]

An array containing the selected regions to perform healthchecks from.

-
target?: null | string

The endpoint to perform healthchecks on.

-
type?: null | Check_updatable_type

The type of health check to perform.

-
+
name?: null | string

A human-friendly display name.

+
regions?: null | Check_updatable_regions[]

An array containing the selected regions to perform healthchecks from.

+
target?: null | string

The endpoint to perform healthchecks on.

+
type?: null | Check_updatable_type

The type of health check to perform.

+
diff --git a/docs/interfaces/v2_uptime_checks_item.WithCheck_PutRequestBody.html b/docs/interfaces/v2_uptime_checks_item.WithCheck_PutRequestBody.html index 4ac83b676..292f51937 100644 --- a/docs/interfaces/v2_uptime_checks_item.WithCheck_PutRequestBody.html +++ b/docs/interfaces/v2_uptime_checks_item.WithCheck_PutRequestBody.html @@ -7,8 +7,8 @@

Properties

additionalData?: Record<string, unknown>

Gets the additional data for this object that did not belong to the properties.

The additional data for this object.

enabled?: null | boolean

A boolean value indicating whether the check is enabled/disabled.

-
name?: null | string

A human-friendly display name.

-
regions?: null | Check_updatable_regions[]

An array containing the selected regions to perform healthchecks from.

-
target?: null | string

The endpoint to perform healthchecks on.

-
type?: null | Check_updatable_type

The type of health check to perform.

-
+
name?: null | string

A human-friendly display name.

+
regions?: null | Check_updatable_regions[]

An array containing the selected regions to perform healthchecks from.

+
target?: null | string

The endpoint to perform healthchecks on.

+
type?: null | Check_updatable_type

The type of health check to perform.

+
diff --git a/docs/interfaces/v2_vpc_peerings_item.WithVpc_peering_PatchRequestBody.html b/docs/interfaces/v2_vpc_peerings_item.WithVpc_peering_PatchRequestBody.html index 206a6aca0..5f49aa5cd 100644 --- a/docs/interfaces/v2_vpc_peerings_item.WithVpc_peering_PatchRequestBody.html +++ b/docs/interfaces/v2_vpc_peerings_item.WithVpc_peering_PatchRequestBody.html @@ -3,4 +3,4 @@

Properties

additionalData?: Record<string, unknown>

Gets the additional data for this object that did not belong to the properties.

The additional data for this object.

name?: null | string

The name of the VPC peering. Must be unique within the team and may only contain alphanumeric characters and dashes.

-
+
diff --git a/docs/interfaces/v2_vpcs_item_peerings_item.WithVpc_peering_PatchRequestBody.html b/docs/interfaces/v2_vpcs_item_peerings_item.WithVpc_peering_PatchRequestBody.html index 7d958b80f..7bb16f405 100644 --- a/docs/interfaces/v2_vpcs_item_peerings_item.WithVpc_peering_PatchRequestBody.html +++ b/docs/interfaces/v2_vpcs_item_peerings_item.WithVpc_peering_PatchRequestBody.html @@ -3,4 +3,4 @@

Properties

additionalData?: Record<string, unknown>

Gets the additional data for this object that did not belong to the properties.

The additional data for this object.

name?: null | string

The name of the VPC peering. Must be unique within the team and may only contain alphanumeric characters and dashes.

-
+
diff --git a/src/dots/kiota-lock.json b/src/dots/kiota-lock.json index 5c0976a0f..236bf58d9 100644 --- a/src/dots/kiota-lock.json +++ b/src/dots/kiota-lock.json @@ -1,5 +1,5 @@ { - "descriptionHash": "96D2A795192959C984AF6B1BB1F20A71BD754D24BBC18B84AB6570BD050F1CE2FE24CEE384DD98C6140373D64EA4D163A51265807C2196BC90D17D9A20DACF44", + "descriptionHash": "45DD6689BB4C57C32DB58B5600B2544E285DFE1AC9F0D1552B69C263CD1E7DEE1ED324E8B9044284A3D62F36F07926684D76905DEAF3DD1196207CCC8AD8791A", "descriptionLocation": "../../DigitalOcean-public.v2.yaml", "lockFileVersion": "1.0.0", "kiotaVersion": "1.29.0", diff --git a/src/dots/models/index.ts b/src/dots/models/index.ts index e9873e857..32c8c201c 100644 --- a/src/dots/models/index.ts +++ b/src/dots/models/index.ts @@ -1488,25 +1488,6 @@ export interface ApiChatbot extends AdditionalDataHolder, Parsable { */ startingMessage?: string | null; } -export type ApiChunkingAlgorithm = (typeof ApiChunkingAlgorithmObject)[keyof typeof ApiChunkingAlgorithmObject]; -export interface ApiChunkingOptions extends AdditionalDataHolder, Parsable { - /** - * The child_chunk_size property - */ - childChunkSize?: number | null; - /** - * Common options - */ - maxChunkSize?: number | null; - /** - * Hierarchical options - */ - parentChunkSize?: number | null; - /** - * Semantic options - */ - semanticThreshold?: number | null; -} export type ApiCrawlingOption = (typeof ApiCrawlingOptionObject)[keyof typeof ApiCrawlingOptionObject]; export interface ApiCreateAgentAPIKeyInputPublic extends AdditionalDataHolder, Parsable { /** @@ -1634,10 +1615,6 @@ export interface ApiCreateDataSourceFileUploadPresignedUrlsOutput extends Additi * Creates an evaluation dataset for an agent */ export interface ApiCreateEvaluationDatasetInputPublic extends AdditionalDataHolder, Parsable { - /** - * The dataset_type property - */ - datasetType?: ApiEvaluationDatasetType | null; /** * File to upload as data source for knowledge base. */ @@ -1657,10 +1634,6 @@ export interface ApiCreateEvaluationDatasetOutput extends AdditionalDataHolder, evaluationDatasetUuid?: string | null; } export interface ApiCreateEvaluationTestCaseInputPublic extends AdditionalDataHolder, Parsable { - /** - * The agent_workspace_name property - */ - agentWorkspaceName?: string | null; /** * Dataset against which the test‑case is executed. */ @@ -1700,14 +1673,6 @@ export interface ApiCreateKnowledgeBaseDataSourceInputPublic extends AdditionalD * AWS S3 Data Source */ awsDataSource?: ApiAWSDataSource | null; - /** - * The chunking_algorithm property - */ - chunkingAlgorithm?: ApiChunkingAlgorithm | null; - /** - * The chunking_options property - */ - chunkingOptions?: ApiChunkingOptions | null; /** * Knowledge base id */ @@ -2035,7 +2000,6 @@ export interface ApiEvaluationDataset extends AdditionalDataHolder, Parsable { */ rowCount?: number | null; } -export type ApiEvaluationDatasetType = (typeof ApiEvaluationDatasetTypeObject)[keyof typeof ApiEvaluationDatasetTypeObject]; export interface ApiEvaluationMetric extends AdditionalDataHolder, Parsable { /** * The category property @@ -2116,10 +2080,6 @@ export interface ApiEvaluationRun extends AdditionalDataHolder, Parsable { * Whether agent is deleted */ agentDeleted?: boolean | null; - /** - * The agent deployment name - */ - agentDeploymentName?: string | null; /** * Agent name */ @@ -2286,49 +2246,6 @@ export interface ApiEvaluationTestCaseMetricList extends AdditionalDataHolder, P */ metricUuids?: string[] | null; } -/** - * Represents a span within an evaluatioin trace (e.g., LLM call, tool call, etc.) - */ -export interface ApiEvaluationTraceSpan extends AdditionalDataHolder, Parsable { - /** - * When the span was created - */ - createdAt?: Date | null; - /** - * Input data for the span (flexible structure - can be messages array, string, etc.) - */ - input?: ApiEvaluationTraceSpan_input | null; - /** - * Name/identifier for the span - */ - name?: string | null; - /** - * Output data from the span (flexible structure - can be message, string, etc.) - */ - output?: ApiEvaluationTraceSpan_output | null; - /** - * Any retriever span chunks that were included as part of the span. - */ - retrieverChunks?: ApiPromptChunk[] | null; - /** - * The span-level metric results. - */ - spanLevelMetricResults?: ApiEvaluationMetricResult[] | null; - /** - * Types of spans in a trace - */ - type?: ApiTraceSpanType | null; -} -/** - * Input data for the span (flexible structure - can be messages array, string, etc.) - */ -export interface ApiEvaluationTraceSpan_input extends AdditionalDataHolder, Parsable { -} -/** - * Output data from the span (flexible structure - can be message, string, etc.) - */ -export interface ApiEvaluationTraceSpan_output extends AdditionalDataHolder, Parsable { -} /** * Detailed info about each presigned URL returned to the client. */ @@ -2663,14 +2580,6 @@ export interface ApiKBDataSource extends AdditionalDataHolder, Parsable { * Deprecated, moved to data_source_details */ bucketRegion?: string | null; - /** - * The chunking_algorithm property - */ - chunkingAlgorithm?: ApiChunkingAlgorithm | null; - /** - * The chunking_options property - */ - chunkingOptions?: ApiChunkingOptions | null; /** * Dropbox Data Source */ @@ -2765,14 +2674,6 @@ export interface ApiKnowledgeBaseDataSource extends AdditionalDataHolder, Parsab * Name of storage bucket - Deprecated, moved to data_source_details */ bucketName?: string | null; - /** - * The chunking_algorithm property - */ - chunkingAlgorithm?: ApiChunkingAlgorithm | null; - /** - * The chunking_options property - */ - chunkingOptions?: ApiChunkingOptions | null; /** * Creation date / time */ @@ -3220,18 +3121,6 @@ export interface ApiModel extends AdditionalDataHolder, Parsable { * True if it is a foundational model provided by do */ isFoundational?: boolean | null; - /** - * Default chunking size limit to show in UI - */ - kbDefaultChunkSize?: number | null; - /** - * Maximum chunk size limit of model - */ - kbMaxChunkSize?: number | null; - /** - * Minimum chunking size token limits if model supports KNOWLEDGEBASE usecase - */ - kbMinChunkSize?: number | null; /** * Additional meta data */ @@ -3362,18 +3251,6 @@ export interface ApiModelPublic extends AdditionalDataHolder, Parsable { * True if it is a foundational model provided by do */ isFoundational?: boolean | null; - /** - * Default chunking size limit to show in UI - */ - kbDefaultChunkSize?: number | null; - /** - * Maximum chunk size limit of model - */ - kbMaxChunkSize?: number | null; - /** - * Minimum chunking size token limits if model supports KNOWLEDGEBASE usecase - */ - kbMinChunkSize?: number | null; /** * Display name of the model */ @@ -3508,10 +3385,6 @@ export interface ApiPresignedUrlFile extends AdditionalDataHolder, Parsable { fileSize?: string | null; } export interface ApiPrompt extends AdditionalDataHolder, Parsable { - /** - * The evaluated trace spans. - */ - evaluationTraceSpans?: ApiEvaluationTraceSpan[] | null; /** * The ground truth for the prompt. */ @@ -3544,10 +3417,6 @@ export interface ApiPrompt extends AdditionalDataHolder, Parsable { * The metric results for the prompt. */ promptLevelMetricResults?: ApiEvaluationMetricResult[] | null; - /** - * The trace id for the prompt. - */ - traceId?: string | null; } export interface ApiPromptChunk extends AdditionalDataHolder, Parsable { /** @@ -3630,11 +3499,7 @@ export interface ApiRollbackToAgentVersionOutput extends AdditionalDataHolder, P */ export interface ApiRunEvaluationTestCaseInputPublic extends AdditionalDataHolder, Parsable { /** - * Agent deployment names to run the test case against (ADK agent workspaces). - */ - agentDeploymentNames?: string[] | null; - /** - * Agent UUIDs to run the test case against (legacy agents). + * Agent UUIDs to run the test case against. */ agentUuids?: string[] | null; /** @@ -3754,7 +3619,6 @@ export interface ApiStartKnowledgeBaseIndexingJobOutput extends AdditionalDataHo */ job?: ApiIndexingJob | null; } -export type ApiTraceSpanType = (typeof ApiTraceSpanTypeObject)[keyof typeof ApiTraceSpanTypeObject]; /** * Information about a newly unlinked agent */ @@ -7863,15 +7727,6 @@ export function createApiCancelKnowledgeBaseIndexingJobOutputFromDiscriminatorVa export function createApiChatbotFromDiscriminatorValue(parseNode: ParseNode | undefined) : ((instance?: Parsable) => Record void>) { return deserializeIntoApiChatbot; } -/** - * Creates a new instance of the appropriate class based on discriminator value - * @param parseNode The parse node to use to read the discriminator value and create the object - * @returns {ApiChunkingOptions} - */ -// @ts-ignore -export function createApiChunkingOptionsFromDiscriminatorValue(parseNode: ParseNode | undefined) : ((instance?: Parsable) => Record void>) { - return deserializeIntoApiChunkingOptions; -} /** * Creates a new instance of the appropriate class based on discriminator value * @param parseNode The parse node to use to read the discriminator value and create the object @@ -8268,33 +8123,6 @@ export function createApiEvaluationTestCaseFromDiscriminatorValue(parseNode: Par export function createApiEvaluationTestCaseMetricListFromDiscriminatorValue(parseNode: ParseNode | undefined) : ((instance?: Parsable) => Record void>) { return deserializeIntoApiEvaluationTestCaseMetricList; } -/** - * Creates a new instance of the appropriate class based on discriminator value - * @param parseNode The parse node to use to read the discriminator value and create the object - * @returns {ApiEvaluationTraceSpan_input} - */ -// @ts-ignore -export function createApiEvaluationTraceSpan_inputFromDiscriminatorValue(parseNode: ParseNode | undefined) : ((instance?: Parsable) => Record void>) { - return deserializeIntoApiEvaluationTraceSpan_input; -} -/** - * Creates a new instance of the appropriate class based on discriminator value - * @param parseNode The parse node to use to read the discriminator value and create the object - * @returns {ApiEvaluationTraceSpan_output} - */ -// @ts-ignore -export function createApiEvaluationTraceSpan_outputFromDiscriminatorValue(parseNode: ParseNode | undefined) : ((instance?: Parsable) => Record void>) { - return deserializeIntoApiEvaluationTraceSpan_output; -} -/** - * Creates a new instance of the appropriate class based on discriminator value - * @param parseNode The parse node to use to read the discriminator value and create the object - * @returns {ApiEvaluationTraceSpan} - */ -// @ts-ignore -export function createApiEvaluationTraceSpanFromDiscriminatorValue(parseNode: ParseNode | undefined) : ((instance?: Parsable) => Record void>) { - return deserializeIntoApiEvaluationTraceSpan; -} /** * Creates a new instance of the appropriate class based on discriminator value * @param parseNode The parse node to use to read the discriminator value and create the object @@ -15010,20 +14838,6 @@ export function deserializeIntoApiChatbot(apiChatbot: Partial | unde "starting_message": n => { apiChatbot.startingMessage = n.getStringValue(); }, } } -/** - * The deserialization information for the current model - * @param ApiChunkingOptions The instance to deserialize into. - * @returns {Record void>} - */ -// @ts-ignore -export function deserializeIntoApiChunkingOptions(apiChunkingOptions: Partial | undefined = {}) : Record void> { - return { - "child_chunk_size": n => { apiChunkingOptions.childChunkSize = n.getNumberValue(); }, - "max_chunk_size": n => { apiChunkingOptions.maxChunkSize = n.getNumberValue(); }, - "parent_chunk_size": n => { apiChunkingOptions.parentChunkSize = n.getNumberValue(); }, - "semantic_threshold": n => { apiChunkingOptions.semanticThreshold = n.getNumberValue(); }, - } -} /** * The deserialization information for the current model * @param ApiCreateAgentAPIKeyInputPublic The instance to deserialize into. @@ -15134,7 +14948,6 @@ export function deserializeIntoApiCreateDataSourceFileUploadPresignedUrlsOutput( // @ts-ignore export function deserializeIntoApiCreateEvaluationDatasetInputPublic(apiCreateEvaluationDatasetInputPublic: Partial | undefined = {}) : Record void> { return { - "dataset_type": n => { apiCreateEvaluationDatasetInputPublic.datasetType = n.getEnumValue(ApiEvaluationDatasetTypeObject) ?? ApiEvaluationDatasetTypeObject.EVALUATION_DATASET_TYPE_UNKNOWN; }, "file_upload_dataset": n => { apiCreateEvaluationDatasetInputPublic.fileUploadDataset = n.getObjectValue(createApiFileUploadDataSourceFromDiscriminatorValue); }, "name": n => { apiCreateEvaluationDatasetInputPublic.name = n.getStringValue(); }, } @@ -15158,7 +14971,6 @@ export function deserializeIntoApiCreateEvaluationDatasetOutput(apiCreateEvaluat // @ts-ignore export function deserializeIntoApiCreateEvaluationTestCaseInputPublic(apiCreateEvaluationTestCaseInputPublic: Partial | undefined = {}) : Record void> { return { - "agent_workspace_name": n => { apiCreateEvaluationTestCaseInputPublic.agentWorkspaceName = n.getStringValue(); }, "dataset_uuid": n => { apiCreateEvaluationTestCaseInputPublic.datasetUuid = n.getStringValue(); }, "description": n => { apiCreateEvaluationTestCaseInputPublic.description = n.getStringValue(); }, "metrics": n => { apiCreateEvaluationTestCaseInputPublic.metrics = n.getCollectionOfPrimitiveValues(); }, @@ -15187,8 +14999,6 @@ export function deserializeIntoApiCreateEvaluationTestCaseOutput(apiCreateEvalua export function deserializeIntoApiCreateKnowledgeBaseDataSourceInputPublic(apiCreateKnowledgeBaseDataSourceInputPublic: Partial | undefined = {}) : Record void> { return { "aws_data_source": n => { apiCreateKnowledgeBaseDataSourceInputPublic.awsDataSource = n.getObjectValue(createApiAWSDataSourceFromDiscriminatorValue); }, - "chunking_algorithm": n => { apiCreateKnowledgeBaseDataSourceInputPublic.chunkingAlgorithm = n.getEnumValue(ApiChunkingAlgorithmObject) ?? ApiChunkingAlgorithmObject.CHUNKING_ALGORITHM_UNKNOWN; }, - "chunking_options": n => { apiCreateKnowledgeBaseDataSourceInputPublic.chunkingOptions = n.getObjectValue(createApiChunkingOptionsFromDiscriminatorValue); }, "knowledge_base_uuid": n => { apiCreateKnowledgeBaseDataSourceInputPublic.knowledgeBaseUuid = n.getStringValue(); }, "spaces_data_source": n => { apiCreateKnowledgeBaseDataSourceInputPublic.spacesDataSource = n.getObjectValue(createApiSpacesDataSourceFromDiscriminatorValue); }, "web_crawler_data_source": n => { apiCreateKnowledgeBaseDataSourceInputPublic.webCrawlerDataSource = n.getObjectValue(createApiWebCrawlerDataSourceFromDiscriminatorValue); }, @@ -15553,7 +15363,6 @@ export function deserializeIntoApiEvaluationMetricResult(apiEvaluationMetricResu export function deserializeIntoApiEvaluationRun(apiEvaluationRun: Partial | undefined = {}) : Record void> { return { "agent_deleted": n => { apiEvaluationRun.agentDeleted = n.getBooleanValue(); }, - "agent_deployment_name": n => { apiEvaluationRun.agentDeploymentName = n.getStringValue(); }, "agent_name": n => { apiEvaluationRun.agentName = n.getStringValue(); }, "agent_uuid": n => { apiEvaluationRun.agentUuid = n.getStringValue(); }, "agent_version_hash": n => { apiEvaluationRun.agentVersionHash = n.getStringValue(); }, @@ -15616,43 +15425,6 @@ export function deserializeIntoApiEvaluationTestCaseMetricList(apiEvaluationTest "metric_uuids": n => { apiEvaluationTestCaseMetricList.metricUuids = n.getCollectionOfPrimitiveValues(); }, } } -/** - * The deserialization information for the current model - * @param ApiEvaluationTraceSpan The instance to deserialize into. - * @returns {Record void>} - */ -// @ts-ignore -export function deserializeIntoApiEvaluationTraceSpan(apiEvaluationTraceSpan: Partial | undefined = {}) : Record void> { - return { - "created_at": n => { apiEvaluationTraceSpan.createdAt = n.getDateValue(); }, - "input": n => { apiEvaluationTraceSpan.input = n.getObjectValue(createApiEvaluationTraceSpan_inputFromDiscriminatorValue); }, - "name": n => { apiEvaluationTraceSpan.name = n.getStringValue(); }, - "output": n => { apiEvaluationTraceSpan.output = n.getObjectValue(createApiEvaluationTraceSpan_outputFromDiscriminatorValue); }, - "retriever_chunks": n => { apiEvaluationTraceSpan.retrieverChunks = n.getCollectionOfObjectValues(createApiPromptChunkFromDiscriminatorValue); }, - "span_level_metric_results": n => { apiEvaluationTraceSpan.spanLevelMetricResults = n.getCollectionOfObjectValues(createApiEvaluationMetricResultFromDiscriminatorValue); }, - "type": n => { apiEvaluationTraceSpan.type = n.getEnumValue(ApiTraceSpanTypeObject) ?? ApiTraceSpanTypeObject.TRACE_SPAN_TYPE_UNKNOWN; }, - } -} -/** - * The deserialization information for the current model - * @param ApiEvaluationTraceSpan_input The instance to deserialize into. - * @returns {Record void>} - */ -// @ts-ignore -export function deserializeIntoApiEvaluationTraceSpan_input(apiEvaluationTraceSpan_input: Partial | undefined = {}) : Record void> { - return { - } -} -/** - * The deserialization information for the current model - * @param ApiEvaluationTraceSpan_output The instance to deserialize into. - * @returns {Record void>} - */ -// @ts-ignore -export function deserializeIntoApiEvaluationTraceSpan_output(apiEvaluationTraceSpan_output: Partial | undefined = {}) : Record void> { - return { - } -} /** * The deserialization information for the current model * @param ApiFilePresignedUrlResponse The instance to deserialize into. @@ -15934,8 +15706,6 @@ export function deserializeIntoApiKBDataSource(apiKBDataSource: Partial { apiKBDataSource.awsDataSource = n.getObjectValue(createApiAWSDataSourceFromDiscriminatorValue); }, "bucket_name": n => { apiKBDataSource.bucketName = n.getStringValue(); }, "bucket_region": n => { apiKBDataSource.bucketRegion = n.getStringValue(); }, - "chunking_algorithm": n => { apiKBDataSource.chunkingAlgorithm = n.getEnumValue(ApiChunkingAlgorithmObject) ?? ApiChunkingAlgorithmObject.CHUNKING_ALGORITHM_UNKNOWN; }, - "chunking_options": n => { apiKBDataSource.chunkingOptions = n.getObjectValue(createApiChunkingOptionsFromDiscriminatorValue); }, "dropbox_data_source": n => { apiKBDataSource.dropboxDataSource = n.getObjectValue(createApiDropboxDataSourceFromDiscriminatorValue); }, "file_upload_data_source": n => { apiKBDataSource.fileUploadDataSource = n.getObjectValue(createApiFileUploadDataSourceFromDiscriminatorValue); }, "google_drive_data_source": n => { apiKBDataSource.googleDriveDataSource = n.getObjectValue(createApiGoogleDriveDataSourceFromDiscriminatorValue); }, @@ -15977,8 +15747,6 @@ export function deserializeIntoApiKnowledgeBaseDataSource(apiKnowledgeBaseDataSo return { "aws_data_source": n => { apiKnowledgeBaseDataSource.awsDataSource = n.getObjectValue(createApiAWSDataSourceDisplayFromDiscriminatorValue); }, "bucket_name": n => { apiKnowledgeBaseDataSource.bucketName = n.getStringValue(); }, - "chunking_algorithm": n => { apiKnowledgeBaseDataSource.chunkingAlgorithm = n.getEnumValue(ApiChunkingAlgorithmObject) ?? ApiChunkingAlgorithmObject.CHUNKING_ALGORITHM_UNKNOWN; }, - "chunking_options": n => { apiKnowledgeBaseDataSource.chunkingOptions = n.getObjectValue(createApiChunkingOptionsFromDiscriminatorValue); }, "created_at": n => { apiKnowledgeBaseDataSource.createdAt = n.getDateValue(); }, "dropbox_data_source": n => { apiKnowledgeBaseDataSource.dropboxDataSource = n.getObjectValue(createApiDropboxDataSourceDisplayFromDiscriminatorValue); }, "file_upload_data_source": n => { apiKnowledgeBaseDataSource.fileUploadDataSource = n.getObjectValue(createApiFileUploadDataSourceFromDiscriminatorValue); }, @@ -16360,9 +16128,6 @@ export function deserializeIntoApiModel(apiModel: Partial | undefined "inference_name": n => { apiModel.inferenceName = n.getStringValue(); }, "inference_version": n => { apiModel.inferenceVersion = n.getStringValue(); }, "is_foundational": n => { apiModel.isFoundational = n.getBooleanValue(); }, - "kb_default_chunk_size": n => { apiModel.kbDefaultChunkSize = n.getNumberValue(); }, - "kb_max_chunk_size": n => { apiModel.kbMaxChunkSize = n.getNumberValue(); }, - "kb_min_chunk_size": n => { apiModel.kbMinChunkSize = n.getNumberValue(); }, "metadata": n => { apiModel.metadata = n.getObjectValue(createApiModel_metadataFromDiscriminatorValue); }, "name": n => { apiModel.name = n.getStringValue(); }, "parent_uuid": n => { apiModel.parentUuid = n.getStringValue(); }, @@ -16431,9 +16196,6 @@ export function deserializeIntoApiModelPublic(apiModelPublic: Partial { apiModelPublic.createdAt = n.getDateValue(); }, "id": n => { apiModelPublic.id = n.getStringValue(); }, "is_foundational": n => { apiModelPublic.isFoundational = n.getBooleanValue(); }, - "kb_default_chunk_size": n => { apiModelPublic.kbDefaultChunkSize = n.getNumberValue(); }, - "kb_max_chunk_size": n => { apiModelPublic.kbMaxChunkSize = n.getNumberValue(); }, - "kb_min_chunk_size": n => { apiModelPublic.kbMinChunkSize = n.getNumberValue(); }, "name": n => { apiModelPublic.name = n.getStringValue(); }, "parent_uuid": n => { apiModelPublic.parentUuid = n.getStringValue(); }, "updated_at": n => { apiModelPublic.updatedAt = n.getDateValue(); }, @@ -16530,7 +16292,6 @@ export function deserializeIntoApiPresignedUrlFile(apiPresignedUrlFile: Partial< // @ts-ignore export function deserializeIntoApiPrompt(apiPrompt: Partial | undefined = {}) : Record void> { return { - "evaluation_trace_spans": n => { apiPrompt.evaluationTraceSpans = n.getCollectionOfObjectValues(createApiEvaluationTraceSpanFromDiscriminatorValue); }, "ground_truth": n => { apiPrompt.groundTruth = n.getStringValue(); }, "input": n => { apiPrompt.input = n.getStringValue(); }, "input_tokens": n => { apiPrompt.inputTokens = n.getStringValue(); }, @@ -16539,7 +16300,6 @@ export function deserializeIntoApiPrompt(apiPrompt: Partial | undefin "prompt_chunks": n => { apiPrompt.promptChunks = n.getCollectionOfObjectValues(createApiPromptChunkFromDiscriminatorValue); }, "prompt_id": n => { apiPrompt.promptId = n.getNumberValue(); }, "prompt_level_metric_results": n => { apiPrompt.promptLevelMetricResults = n.getCollectionOfObjectValues(createApiEvaluationMetricResultFromDiscriminatorValue); }, - "trace_id": n => { apiPrompt.traceId = n.getStringValue(); }, } } /** @@ -16625,7 +16385,6 @@ export function deserializeIntoApiRollbackToAgentVersionOutput(apiRollbackToAgen // @ts-ignore export function deserializeIntoApiRunEvaluationTestCaseInputPublic(apiRunEvaluationTestCaseInputPublic: Partial | undefined = {}) : Record void> { return { - "agent_deployment_names": n => { apiRunEvaluationTestCaseInputPublic.agentDeploymentNames = n.getCollectionOfPrimitiveValues(); }, "agent_uuids": n => { apiRunEvaluationTestCaseInputPublic.agentUuids = n.getCollectionOfPrimitiveValues(); }, "run_name": n => { apiRunEvaluationTestCaseInputPublic.runName = n.getStringValue(); }, "test_case_uuid": n => { apiRunEvaluationTestCaseInputPublic.testCaseUuid = n.getStringValue(); }, @@ -29128,21 +28887,6 @@ export function serializeApiChatbot(writer: SerializationWriter, apiChatbot: Par writer.writeStringValue("starting_message", apiChatbot.startingMessage); writer.writeAdditionalData(apiChatbot.additionalData); } -/** - * Serializes information the current object - * @param ApiChunkingOptions The instance to serialize from. - * @param isSerializingDerivedType A boolean indicating whether the serialization is for a derived type. - * @param writer Serialization writer to use to serialize this model - */ -// @ts-ignore -export function serializeApiChunkingOptions(writer: SerializationWriter, apiChunkingOptions: Partial | undefined | null = {}, isSerializingDerivedType: boolean = false) : void { - if (!apiChunkingOptions || isSerializingDerivedType) { return; } - writer.writeNumberValue("child_chunk_size", apiChunkingOptions.childChunkSize); - writer.writeNumberValue("max_chunk_size", apiChunkingOptions.maxChunkSize); - writer.writeNumberValue("parent_chunk_size", apiChunkingOptions.parentChunkSize); - writer.writeNumberValue("semantic_threshold", apiChunkingOptions.semanticThreshold); - writer.writeAdditionalData(apiChunkingOptions.additionalData); -} /** * Serializes information the current object * @param ApiCreateAgentAPIKeyInputPublic The instance to serialize from. @@ -29262,7 +29006,6 @@ export function serializeApiCreateDataSourceFileUploadPresignedUrlsOutput(writer // @ts-ignore export function serializeApiCreateEvaluationDatasetInputPublic(writer: SerializationWriter, apiCreateEvaluationDatasetInputPublic: Partial | undefined | null = {}, isSerializingDerivedType: boolean = false) : void { if (!apiCreateEvaluationDatasetInputPublic || isSerializingDerivedType) { return; } - writer.writeEnumValue("dataset_type", apiCreateEvaluationDatasetInputPublic.datasetType ?? ApiEvaluationDatasetTypeObject.EVALUATION_DATASET_TYPE_UNKNOWN); writer.writeObjectValue("file_upload_dataset", apiCreateEvaluationDatasetInputPublic.fileUploadDataset, serializeApiFileUploadDataSource); writer.writeStringValue("name", apiCreateEvaluationDatasetInputPublic.name); writer.writeAdditionalData(apiCreateEvaluationDatasetInputPublic.additionalData); @@ -29288,7 +29031,6 @@ export function serializeApiCreateEvaluationDatasetOutput(writer: SerializationW // @ts-ignore export function serializeApiCreateEvaluationTestCaseInputPublic(writer: SerializationWriter, apiCreateEvaluationTestCaseInputPublic: Partial | undefined | null = {}, isSerializingDerivedType: boolean = false) : void { if (!apiCreateEvaluationTestCaseInputPublic || isSerializingDerivedType) { return; } - writer.writeStringValue("agent_workspace_name", apiCreateEvaluationTestCaseInputPublic.agentWorkspaceName); writer.writeStringValue("dataset_uuid", apiCreateEvaluationTestCaseInputPublic.datasetUuid); writer.writeStringValue("description", apiCreateEvaluationTestCaseInputPublic.description); writer.writeCollectionOfPrimitiveValues("metrics", apiCreateEvaluationTestCaseInputPublic.metrics); @@ -29319,8 +29061,6 @@ export function serializeApiCreateEvaluationTestCaseOutput(writer: Serialization export function serializeApiCreateKnowledgeBaseDataSourceInputPublic(writer: SerializationWriter, apiCreateKnowledgeBaseDataSourceInputPublic: Partial | undefined | null = {}, isSerializingDerivedType: boolean = false) : void { if (!apiCreateKnowledgeBaseDataSourceInputPublic || isSerializingDerivedType) { return; } writer.writeObjectValue("aws_data_source", apiCreateKnowledgeBaseDataSourceInputPublic.awsDataSource, serializeApiAWSDataSource); - writer.writeEnumValue("chunking_algorithm", apiCreateKnowledgeBaseDataSourceInputPublic.chunkingAlgorithm ?? ApiChunkingAlgorithmObject.CHUNKING_ALGORITHM_UNKNOWN); - writer.writeObjectValue("chunking_options", apiCreateKnowledgeBaseDataSourceInputPublic.chunkingOptions, serializeApiChunkingOptions); writer.writeStringValue("knowledge_base_uuid", apiCreateKnowledgeBaseDataSourceInputPublic.knowledgeBaseUuid); writer.writeObjectValue("spaces_data_source", apiCreateKnowledgeBaseDataSourceInputPublic.spacesDataSource, serializeApiSpacesDataSource); writer.writeObjectValue("web_crawler_data_source", apiCreateKnowledgeBaseDataSourceInputPublic.webCrawlerDataSource, serializeApiWebCrawlerDataSource); @@ -29714,7 +29454,6 @@ export function serializeApiEvaluationMetricResult(writer: SerializationWriter, export function serializeApiEvaluationRun(writer: SerializationWriter, apiEvaluationRun: Partial | undefined | null = {}, isSerializingDerivedType: boolean = false) : void { if (!apiEvaluationRun || isSerializingDerivedType) { return; } writer.writeBooleanValue("agent_deleted", apiEvaluationRun.agentDeleted); - writer.writeStringValue("agent_deployment_name", apiEvaluationRun.agentDeploymentName); writer.writeStringValue("agent_name", apiEvaluationRun.agentName); writer.writeStringValue("agent_uuid", apiEvaluationRun.agentUuid); writer.writeStringValue("agent_version_hash", apiEvaluationRun.agentVersionHash); @@ -29779,46 +29518,6 @@ export function serializeApiEvaluationTestCaseMetricList(writer: SerializationWr writer.writeCollectionOfPrimitiveValues("metric_uuids", apiEvaluationTestCaseMetricList.metricUuids); writer.writeAdditionalData(apiEvaluationTestCaseMetricList.additionalData); } -/** - * Serializes information the current object - * @param ApiEvaluationTraceSpan The instance to serialize from. - * @param isSerializingDerivedType A boolean indicating whether the serialization is for a derived type. - * @param writer Serialization writer to use to serialize this model - */ -// @ts-ignore -export function serializeApiEvaluationTraceSpan(writer: SerializationWriter, apiEvaluationTraceSpan: Partial | undefined | null = {}, isSerializingDerivedType: boolean = false) : void { - if (!apiEvaluationTraceSpan || isSerializingDerivedType) { return; } - writer.writeDateValue("created_at", apiEvaluationTraceSpan.createdAt); - writer.writeObjectValue("input", apiEvaluationTraceSpan.input, serializeApiEvaluationTraceSpan_input); - writer.writeStringValue("name", apiEvaluationTraceSpan.name); - writer.writeObjectValue("output", apiEvaluationTraceSpan.output, serializeApiEvaluationTraceSpan_output); - writer.writeCollectionOfObjectValues("retriever_chunks", apiEvaluationTraceSpan.retrieverChunks, serializeApiPromptChunk); - writer.writeCollectionOfObjectValues("span_level_metric_results", apiEvaluationTraceSpan.spanLevelMetricResults, serializeApiEvaluationMetricResult); - writer.writeEnumValue("type", apiEvaluationTraceSpan.type ?? ApiTraceSpanTypeObject.TRACE_SPAN_TYPE_UNKNOWN); - writer.writeAdditionalData(apiEvaluationTraceSpan.additionalData); -} -/** - * Serializes information the current object - * @param ApiEvaluationTraceSpan_input The instance to serialize from. - * @param isSerializingDerivedType A boolean indicating whether the serialization is for a derived type. - * @param writer Serialization writer to use to serialize this model - */ -// @ts-ignore -export function serializeApiEvaluationTraceSpan_input(writer: SerializationWriter, apiEvaluationTraceSpan_input: Partial | undefined | null = {}, isSerializingDerivedType: boolean = false) : void { - if (!apiEvaluationTraceSpan_input || isSerializingDerivedType) { return; } - writer.writeAdditionalData(apiEvaluationTraceSpan_input.additionalData); -} -/** - * Serializes information the current object - * @param ApiEvaluationTraceSpan_output The instance to serialize from. - * @param isSerializingDerivedType A boolean indicating whether the serialization is for a derived type. - * @param writer Serialization writer to use to serialize this model - */ -// @ts-ignore -export function serializeApiEvaluationTraceSpan_output(writer: SerializationWriter, apiEvaluationTraceSpan_output: Partial | undefined | null = {}, isSerializingDerivedType: boolean = false) : void { - if (!apiEvaluationTraceSpan_output || isSerializingDerivedType) { return; } - writer.writeAdditionalData(apiEvaluationTraceSpan_output.additionalData); -} /** * Serializes information the current object * @param ApiFilePresignedUrlResponse The instance to serialize from. @@ -30122,8 +29821,6 @@ export function serializeApiKBDataSource(writer: SerializationWriter, apiKBDataS writer.writeObjectValue("aws_data_source", apiKBDataSource.awsDataSource, serializeApiAWSDataSource); writer.writeStringValue("bucket_name", apiKBDataSource.bucketName); writer.writeStringValue("bucket_region", apiKBDataSource.bucketRegion); - writer.writeEnumValue("chunking_algorithm", apiKBDataSource.chunkingAlgorithm ?? ApiChunkingAlgorithmObject.CHUNKING_ALGORITHM_UNKNOWN); - writer.writeObjectValue("chunking_options", apiKBDataSource.chunkingOptions, serializeApiChunkingOptions); writer.writeObjectValue("dropbox_data_source", apiKBDataSource.dropboxDataSource, serializeApiDropboxDataSource); writer.writeObjectValue("file_upload_data_source", apiKBDataSource.fileUploadDataSource, serializeApiFileUploadDataSource); writer.writeObjectValue("google_drive_data_source", apiKBDataSource.googleDriveDataSource, serializeApiGoogleDriveDataSource); @@ -30167,8 +29864,6 @@ export function serializeApiKnowledgeBaseDataSource(writer: SerializationWriter, if (!apiKnowledgeBaseDataSource || isSerializingDerivedType) { return; } writer.writeObjectValue("aws_data_source", apiKnowledgeBaseDataSource.awsDataSource, serializeApiAWSDataSourceDisplay); writer.writeStringValue("bucket_name", apiKnowledgeBaseDataSource.bucketName); - writer.writeEnumValue("chunking_algorithm", apiKnowledgeBaseDataSource.chunkingAlgorithm ?? ApiChunkingAlgorithmObject.CHUNKING_ALGORITHM_UNKNOWN); - writer.writeObjectValue("chunking_options", apiKnowledgeBaseDataSource.chunkingOptions, serializeApiChunkingOptions); writer.writeDateValue("created_at", apiKnowledgeBaseDataSource.createdAt); writer.writeObjectValue("dropbox_data_source", apiKnowledgeBaseDataSource.dropboxDataSource, serializeApiDropboxDataSourceDisplay); writer.writeObjectValue("file_upload_data_source", apiKnowledgeBaseDataSource.fileUploadDataSource, serializeApiFileUploadDataSource); @@ -30580,9 +30275,6 @@ export function serializeApiModel(writer: SerializationWriter, apiModel: Partial writer.writeStringValue("inference_name", apiModel.inferenceName); writer.writeStringValue("inference_version", apiModel.inferenceVersion); writer.writeBooleanValue("is_foundational", apiModel.isFoundational); - writer.writeNumberValue("kb_default_chunk_size", apiModel.kbDefaultChunkSize); - writer.writeNumberValue("kb_max_chunk_size", apiModel.kbMaxChunkSize); - writer.writeNumberValue("kb_min_chunk_size", apiModel.kbMinChunkSize); writer.writeObjectValue("metadata", apiModel.metadata, serializeApiModel_metadata); writer.writeStringValue("name", apiModel.name); writer.writeStringValue("parent_uuid", apiModel.parentUuid); @@ -30656,9 +30348,6 @@ export function serializeApiModelPublic(writer: SerializationWriter, apiModelPub writer.writeDateValue("created_at", apiModelPublic.createdAt); writer.writeStringValue("id", apiModelPublic.id); writer.writeBooleanValue("is_foundational", apiModelPublic.isFoundational); - writer.writeNumberValue("kb_default_chunk_size", apiModelPublic.kbDefaultChunkSize); - writer.writeNumberValue("kb_max_chunk_size", apiModelPublic.kbMaxChunkSize); - writer.writeNumberValue("kb_min_chunk_size", apiModelPublic.kbMinChunkSize); writer.writeStringValue("name", apiModelPublic.name); writer.writeStringValue("parent_uuid", apiModelPublic.parentUuid); writer.writeDateValue("updated_at", apiModelPublic.updatedAt); @@ -30762,7 +30451,6 @@ export function serializeApiPresignedUrlFile(writer: SerializationWriter, apiPre // @ts-ignore export function serializeApiPrompt(writer: SerializationWriter, apiPrompt: Partial | undefined | null = {}, isSerializingDerivedType: boolean = false) : void { if (!apiPrompt || isSerializingDerivedType) { return; } - writer.writeCollectionOfObjectValues("evaluation_trace_spans", apiPrompt.evaluationTraceSpans, serializeApiEvaluationTraceSpan); writer.writeStringValue("ground_truth", apiPrompt.groundTruth); writer.writeStringValue("input", apiPrompt.input); writer.writeStringValue("input_tokens", apiPrompt.inputTokens); @@ -30771,7 +30459,6 @@ export function serializeApiPrompt(writer: SerializationWriter, apiPrompt: Parti writer.writeCollectionOfObjectValues("prompt_chunks", apiPrompt.promptChunks, serializeApiPromptChunk); writer.writeNumberValue("prompt_id", apiPrompt.promptId); writer.writeCollectionOfObjectValues("prompt_level_metric_results", apiPrompt.promptLevelMetricResults, serializeApiEvaluationMetricResult); - writer.writeStringValue("trace_id", apiPrompt.traceId); writer.writeAdditionalData(apiPrompt.additionalData); } /** @@ -30864,7 +30551,6 @@ export function serializeApiRollbackToAgentVersionOutput(writer: SerializationWr // @ts-ignore export function serializeApiRunEvaluationTestCaseInputPublic(writer: SerializationWriter, apiRunEvaluationTestCaseInputPublic: Partial | undefined | null = {}, isSerializingDerivedType: boolean = false) : void { if (!apiRunEvaluationTestCaseInputPublic || isSerializingDerivedType) { return; } - writer.writeCollectionOfPrimitiveValues("agent_deployment_names", apiRunEvaluationTestCaseInputPublic.agentDeploymentNames); writer.writeCollectionOfPrimitiveValues("agent_uuids", apiRunEvaluationTestCaseInputPublic.agentUuids); writer.writeStringValue("run_name", apiRunEvaluationTestCaseInputPublic.runName); writer.writeStringValue("test_case_uuid", apiRunEvaluationTestCaseInputPublic.testCaseUuid); @@ -39745,13 +39431,6 @@ export const ApiBatchJobPhaseObject = { BATCH_JOB_PHASE_ERROR: "BATCH_JOB_PHASE_ERROR", BATCH_JOB_PHASE_CANCELLED: "BATCH_JOB_PHASE_CANCELLED", } as const; -export const ApiChunkingAlgorithmObject = { - CHUNKING_ALGORITHM_UNKNOWN: "CHUNKING_ALGORITHM_UNKNOWN", - CHUNKING_ALGORITHM_SECTION_BASED: "CHUNKING_ALGORITHM_SECTION_BASED", - CHUNKING_ALGORITHM_HIERARCHICAL: "CHUNKING_ALGORITHM_HIERARCHICAL", - CHUNKING_ALGORITHM_SEMANTIC: "CHUNKING_ALGORITHM_SEMANTIC", - CHUNKING_ALGORITHM_FIXED_LENGTH: "CHUNKING_ALGORITHM_FIXED_LENGTH", -} as const; /** * Options for specifying how URLs found on pages should be handled. - UNKNOWN: Default unknown value - SCOPED: Only include the base URL. - PATH: Crawl the base URL and linked pages within the URL path. - DOMAIN: Crawl the base URL and linked pages within the same domain. - SUBDOMAINS: Crawl the base URL and linked pages for any subdomain. - SITEMAP: Crawl URLs discovered in the sitemap. */ @@ -39785,11 +39464,6 @@ export const ApiDeploymentVisibilityObject = { VISIBILITY_PUBLIC: "VISIBILITY_PUBLIC", VISIBILITY_PRIVATE: "VISIBILITY_PRIVATE", } as const; -export const ApiEvaluationDatasetTypeObject = { - EVALUATION_DATASET_TYPE_UNKNOWN: "EVALUATION_DATASET_TYPE_UNKNOWN", - EVALUATION_DATASET_TYPE_ADK: "EVALUATION_DATASET_TYPE_ADK", - EVALUATION_DATASET_TYPE_NON_ADK: "EVALUATION_DATASET_TYPE_NON_ADK", -} as const; export const ApiEvaluationMetricCategoryObject = { METRIC_CATEGORY_UNSPECIFIED: "METRIC_CATEGORY_UNSPECIFIED", METRIC_CATEGORY_CORRECTNESS: "METRIC_CATEGORY_CORRECTNESS", @@ -39875,15 +39549,6 @@ export const ApiRetrievalMethodObject = { RETRIEVAL_METHOD_SUB_QUERIES: "RETRIEVAL_METHOD_SUB_QUERIES", RETRIEVAL_METHOD_NONE: "RETRIEVAL_METHOD_NONE", } as const; -/** - * Types of spans in a trace - */ -export const ApiTraceSpanTypeObject = { - TRACE_SPAN_TYPE_UNKNOWN: "TRACE_SPAN_TYPE_UNKNOWN", - TRACE_SPAN_TYPE_LLM: "TRACE_SPAN_TYPE_LLM", - TRACE_SPAN_TYPE_RETRIEVER: "TRACE_SPAN_TYPE_RETRIEVER", - TRACE_SPAN_TYPE_TOOL: "TRACE_SPAN_TYPE_TOOL", -} as const; export const App_alert_phaseObject = { UNKNOWN: "UNKNOWN", PENDING: "PENDING",