[sync] [T2288] feat(v2): add field command explain endpoints (#1383) (#2722)

Synced from teableio/teable-ee@e5e3103

Co-authored-by: nichenqin <nichenqin@hotmail.com>
This commit is contained in:
Bieber 2026-03-10 12:41:15 +08:00 committed by GitHub
parent 33a06d1ac6
commit 2ccdb73ca3
No known key found for this signature in database
GPG Key ID: B5690EEEBB952194
27 changed files with 2292 additions and 10 deletions

View File

@ -1,6 +1,11 @@
// Schema (DDL) exports - re-export selectively to avoid conflicts
export type { IV2PostgresDdlAdapterConfig } from './schema';
export { v2PostgresDdlAdapterConfigSchema } from './schema';
export {
FieldValueChangeCollectorVisitor,
TableAddFieldCollectorVisitor,
TableSchemaUpdateVisitor,
} from './schema';
export * from './schema/rules';
export * from './schema/repositories';
export * from './schema/naming';

View File

@ -6,6 +6,7 @@ export * from './ComputedUpdatePlanner';
export * from './ComputedUpdateRun';
export * from './FieldDependencyGraph';
export * from './UpdateFromSelectBuilder';
export * from './isPersistedAsGeneratedColumn';
export * from './outbox/ComputedUpdateOutbox';
export * from './outbox/ComputedUpdateOutboxPayload';
export * from './outbox/ComputedUpdateSeedPayload';

View File

@ -3,6 +3,9 @@ export * from './di/register';
export * from './di/tokens';
export * from './repositories/PostgresTableSchemaRepository';
export * from './rules';
export { FieldValueChangeCollectorVisitor } from './visitors/FieldValueChangeCollectorVisitor';
export { TableAddFieldCollectorVisitor } from './visitors/TableAddFieldCollectorVisitor';
export { TableSchemaUpdateVisitor } from './visitors/TableSchemaUpdateVisitor';
// Re-export visitor types except TableSchemaStatementBuilder (already exported from rules)
export type { ICreateTableBuilderRef } from './visitors/PostgresTableSchemaFieldCreateVisitor';
export { PostgresTableSchemaFieldCreateVisitor } from './visitors/PostgresTableSchemaFieldCreateVisitor';

View File

@ -1,4 +1,5 @@
export * from './DependencyChangeDetectorVisitor';
export * from './FieldValueChangeCollectorVisitor';
export * from './FieldValueDuplicateVisitor';
export * from './LinkFieldValueDuplicateVisitor';
export * from './PostgresTableSchemaFieldColumn';

View File

@ -0,0 +1,146 @@
import { inject, injectable } from '@teable/v2-di';
import { err, ok, safeTry } from 'neverthrow';
import type { Result } from 'neverthrow';
import {
CreateFieldCommand,
CreateFieldHandler,
FieldCreationSideEffectService,
FieldUndoRedoSnapshotService,
ForeignTableLoaderService,
type DomainError,
type IExecutionContext,
type ITableRepository,
TableByIdSpec,
v2CoreTokens,
} from '@teable/v2-core';
import {
v2RecordRepositoryPostgresTokens,
type ComputedUpdatePlanner,
} from '@teable/v2-adapter-table-repository-postgres';
import { formulaSqlPgTokens, type IPgTypeValidationStrategy } from '@teable/v2-formula-sql-pg';
import type { Kysely } from 'kysely';
import type { V1TeableDatabase } from '@teable/v2-postgres-schema';
import type { ICommandAnalyzer } from './ICommandAnalyzer';
import {
buildFieldSqlExplains,
createFieldExplainDryRunEnvironment,
createNoopUndoRedoService,
} from './FieldCommandAnalyzeHelpers';
import type { CommandExplainInfo, ExplainOptions, ExplainResult } from '../types';
import { DEFAULT_EXPLAIN_OPTIONS } from '../types';
import { v2CommandExplainTokens } from '../di/tokens';
import { SqlExplainRunner } from '../utils/SqlExplainRunner';
import { ComplexityCalculator } from '../utils/ComplexityCalculator';
@injectable()
export class CreateFieldAnalyzer implements ICommandAnalyzer<CreateFieldCommand> {
constructor(
@inject(v2RecordRepositoryPostgresTokens.db)
private readonly db: Kysely<V1TeableDatabase>,
@inject(v2CoreTokens.tableRepository)
private readonly tableRepository: ITableRepository,
@inject(v2CoreTokens.foreignTableLoaderService)
private readonly foreignTableLoaderService: ForeignTableLoaderService,
@inject(v2CoreTokens.fieldUndoRedoSnapshotService)
private readonly fieldUndoRedoSnapshotService: FieldUndoRedoSnapshotService,
@inject(v2RecordRepositoryPostgresTokens.computedUpdatePlanner)
private readonly computedUpdatePlanner: ComputedUpdatePlanner,
@inject(v2CommandExplainTokens.sqlExplainRunner)
private readonly sqlExplainRunner: SqlExplainRunner,
@inject(v2CommandExplainTokens.complexityCalculator)
private readonly complexityCalculator: ComplexityCalculator,
@inject(formulaSqlPgTokens.typeValidationStrategy)
private readonly typeValidationStrategy: IPgTypeValidationStrategy
) {}
async analyze(
context: IExecutionContext,
command: CreateFieldCommand,
options: ExplainOptions,
startTime: number
): Promise<Result<ExplainResult, DomainError>> {
const analyzer = this;
const mergedOptions = { ...DEFAULT_EXPLAIN_OPTIONS, ...options };
return safeTry<ExplainResult, DomainError>(async function* () {
const beforeTableSpec = TableByIdSpec.create(command.tableId);
const beforeTableResult = await analyzer.tableRepository.findOne(context, beforeTableSpec);
if (beforeTableResult.isErr()) {
return err(beforeTableResult.error);
}
const beforeTable = beforeTableResult.value;
const dryRun = createFieldExplainDryRunEnvironment({
db: analyzer.db,
tableRepository: analyzer.tableRepository,
computedUpdatePlanner: analyzer.computedUpdatePlanner,
typeValidationStrategy: analyzer.typeValidationStrategy,
});
const handler = new CreateFieldHandler(
dryRun.tableUpdateFlow,
new FieldCreationSideEffectService(dryRun.tableUpdateFlow),
analyzer.foreignTableLoaderService,
createNoopUndoRedoService() as never,
analyzer.fieldUndoRedoSnapshotService
);
const commandResult = await handler.handle(context, command);
if (commandResult.isErr()) {
return err(commandResult.error);
}
const afterTable = commandResult.value.table;
const beforeFieldIds = new Set(beforeTable.getFields().map((field) => field.id().toString()));
const createdField =
afterTable.getFields().find((field) => !beforeFieldIds.has(field.id().toString())) ??
afterTable
.getFields()
.find((field) => command.field.id != null && field.id().toString() === command.field.id);
const commandInfo: CommandExplainInfo = {
type: 'CreateField',
tableId: afterTable.id().toString(),
tableName: afterTable.name().toString(),
recordIds: [],
changedFieldIds: createdField ? [createdField.id().toString()] : undefined,
changedFieldNames: createdField ? [createdField.name().toString()] : undefined,
changedFieldTypes: createdField ? [createdField.type().toString()] : undefined,
changeType: 'insert',
};
const sqlExplainStartTime = Date.now();
const sqlExplains = mergedOptions.includeSql
? await buildFieldSqlExplains(
analyzer.sqlExplainRunner,
analyzer.db,
dryRun.captureTableSchemaRepository.getStatements(),
mergedOptions.analyze
)
: [];
const sqlExplainMs = Date.now() - sqlExplainStartTime;
const complexity = analyzer.complexityCalculator.calculate({
commandInfo,
computedImpact: null,
sqlExplains,
});
return ok({
command: commandInfo,
computedImpact: null,
computedLocks: null,
linkLocks: null,
sqlExplains,
complexity,
timing: {
totalMs: Date.now() - startTime,
dependencyGraphMs: 0,
planningMs: 0,
sqlExplainMs,
},
});
});
}
}

View File

@ -0,0 +1,147 @@
import { inject, injectable } from '@teable/v2-di';
import { err, ok, safeTry } from 'neverthrow';
import type { Result } from 'neverthrow';
import {
DeleteFieldCommand,
DeleteFieldHandler,
FieldDeletionSideEffectService,
FieldUndoRedoSnapshotService,
ForeignTableLoaderService,
TableByIdSpec,
type DomainError,
type IExecutionContext,
type ITableRepository,
v2CoreTokens,
} from '@teable/v2-core';
import {
v2RecordRepositoryPostgresTokens,
type ComputedUpdatePlanner,
} from '@teable/v2-adapter-table-repository-postgres';
import { formulaSqlPgTokens, type IPgTypeValidationStrategy } from '@teable/v2-formula-sql-pg';
import type { Kysely } from 'kysely';
import type { V1TeableDatabase } from '@teable/v2-postgres-schema';
import type { ICommandAnalyzer } from './ICommandAnalyzer';
import {
buildFieldSqlExplains,
createFieldExplainDryRunEnvironment,
createNoopUndoRedoService,
} from './FieldCommandAnalyzeHelpers';
import type { CommandExplainInfo, ExplainOptions, ExplainResult } from '../types';
import { DEFAULT_EXPLAIN_OPTIONS } from '../types';
import { v2CommandExplainTokens } from '../di/tokens';
import { SqlExplainRunner } from '../utils/SqlExplainRunner';
import { ComplexityCalculator } from '../utils/ComplexityCalculator';
@injectable()
export class DeleteFieldAnalyzer implements ICommandAnalyzer<DeleteFieldCommand> {
constructor(
@inject(v2RecordRepositoryPostgresTokens.db)
private readonly db: Kysely<V1TeableDatabase>,
@inject(v2CoreTokens.tableRepository)
private readonly tableRepository: ITableRepository,
@inject(v2CoreTokens.foreignTableLoaderService)
private readonly foreignTableLoaderService: ForeignTableLoaderService,
@inject(v2CoreTokens.fieldUndoRedoSnapshotService)
private readonly fieldUndoRedoSnapshotService: FieldUndoRedoSnapshotService,
@inject(v2RecordRepositoryPostgresTokens.computedUpdatePlanner)
private readonly computedUpdatePlanner: ComputedUpdatePlanner,
@inject(v2CommandExplainTokens.sqlExplainRunner)
private readonly sqlExplainRunner: SqlExplainRunner,
@inject(v2CommandExplainTokens.complexityCalculator)
private readonly complexityCalculator: ComplexityCalculator,
@inject(formulaSqlPgTokens.typeValidationStrategy)
private readonly typeValidationStrategy: IPgTypeValidationStrategy
) {}
async analyze(
context: IExecutionContext,
command: DeleteFieldCommand,
options: ExplainOptions,
startTime: number
): Promise<Result<ExplainResult, DomainError>> {
const analyzer = this;
const mergedOptions = { ...DEFAULT_EXPLAIN_OPTIONS, ...options };
return safeTry<ExplainResult, DomainError>(async function* () {
const beforeTableSpec = TableByIdSpec.create(command.tableId);
const beforeTableResult = await analyzer.tableRepository.findOne(context, beforeTableSpec);
if (beforeTableResult.isErr()) {
return err(beforeTableResult.error);
}
const beforeTable = beforeTableResult.value;
const deletedFieldResult = beforeTable.getField((field) =>
field.id().equals(command.fieldId)
);
if (deletedFieldResult.isErr()) {
return err(deletedFieldResult.error);
}
const deletedField = deletedFieldResult.value;
const dryRun = createFieldExplainDryRunEnvironment({
db: analyzer.db,
tableRepository: analyzer.tableRepository,
computedUpdatePlanner: analyzer.computedUpdatePlanner,
typeValidationStrategy: analyzer.typeValidationStrategy,
});
const handler = new DeleteFieldHandler(
dryRun.overlayTableRepository,
dryRun.tableUpdateFlow,
new FieldDeletionSideEffectService(dryRun.tableUpdateFlow),
analyzer.foreignTableLoaderService,
createNoopUndoRedoService() as never,
analyzer.fieldUndoRedoSnapshotService
);
const commandResult = await handler.handle(context, command);
if (commandResult.isErr()) {
return err(commandResult.error);
}
const afterTable = commandResult.value.table;
const commandInfo: CommandExplainInfo = {
type: 'DeleteField',
tableId: afterTable.id().toString(),
tableName: afterTable.name().toString(),
recordIds: [],
changedFieldIds: [deletedField.id().toString()],
changedFieldNames: [deletedField.name().toString()],
changedFieldTypes: [deletedField.type().toString()],
changeType: 'delete',
};
const sqlExplainStartTime = Date.now();
const sqlExplains = mergedOptions.includeSql
? await buildFieldSqlExplains(
analyzer.sqlExplainRunner,
analyzer.db,
dryRun.captureTableSchemaRepository.getStatements(),
mergedOptions.analyze
)
: [];
const sqlExplainMs = Date.now() - sqlExplainStartTime;
const complexity = analyzer.complexityCalculator.calculate({
commandInfo,
computedImpact: null,
sqlExplains,
});
return ok({
command: commandInfo,
computedImpact: null,
computedLocks: null,
linkLocks: null,
sqlExplains,
complexity,
timing: {
totalMs: Date.now() - startTime,
dependencyGraphMs: 0,
planningMs: 0,
sqlExplainMs,
},
});
});
}
}

View File

@ -0,0 +1,109 @@
import {
type DomainError,
TableUpdateFlow,
type IExecutionContext,
type ITableRepository,
} from '@teable/v2-core';
import type { IPgTypeValidationStrategy } from '@teable/v2-formula-sql-pg';
import type { V1TeableDatabase } from '@teable/v2-postgres-schema';
import type { Kysely } from 'kysely';
import { ok } from 'neverthrow';
import type { Result } from 'neverthrow';
import type { SqlExplainInfo } from '../types';
import {
CaptureTableSchemaRepository,
NoopEventBus,
NoopUnitOfWork,
OverlayTableRepository,
} from '../utils/FieldCommandExplainHarness';
import { SqlExplainRunner } from '../utils/SqlExplainRunner';
import type { ComputedUpdatePlanner } from '@teable/v2-adapter-table-repository-postgres';
export type FieldExplainDryRunEnvironment = {
overlayTableRepository: OverlayTableRepository;
captureTableSchemaRepository: CaptureTableSchemaRepository;
tableUpdateFlow: TableUpdateFlow;
};
export const createFieldExplainDryRunEnvironment = (input: {
db: Kysely<V1TeableDatabase>;
tableRepository: ITableRepository;
computedUpdatePlanner: ComputedUpdatePlanner;
typeValidationStrategy: IPgTypeValidationStrategy;
}): FieldExplainDryRunEnvironment => {
const overlayTableRepository = new OverlayTableRepository(input.tableRepository);
const captureTableSchemaRepository = new CaptureTableSchemaRepository({
db: input.db,
tableRepository: overlayTableRepository,
computedUpdatePlanner: input.computedUpdatePlanner,
typeValidationStrategy: input.typeValidationStrategy,
});
const tableUpdateFlow = new TableUpdateFlow(
overlayTableRepository,
captureTableSchemaRepository,
new NoopEventBus(),
new NoopUnitOfWork()
);
return {
overlayTableRepository,
captureTableSchemaRepository,
tableUpdateFlow,
};
};
export const createNoopUndoRedoService = () =>
({
async recordEntry() {
return ok(undefined);
},
}) as {
recordEntry: (
context: IExecutionContext,
tableId: { toString(): string },
entry: unknown
) => Promise<Result<void, DomainError>>;
};
export const buildFieldSqlExplains = async (
runner: SqlExplainRunner,
db: Kysely<V1TeableDatabase>,
statements: ReadonlyArray<{
description: string;
sql: string;
parameters: ReadonlyArray<unknown>;
explainable: boolean;
execute: boolean;
initialError?: string;
}>,
analyze: boolean
): Promise<ReadonlyArray<SqlExplainInfo>> => {
if (statements.length === 0) {
return [];
}
const sequentialResult = await runner.explainSequentialInTransaction(db, statements, analyze);
if (sequentialResult.isErr()) {
return statements.map((statement) => ({
stepDescription: statement.description,
sql: statement.sql,
parameters: statement.parameters,
explainAnalyze: null,
explainOnly: null,
explainError: sequentialResult.error.message,
}));
}
return statements.map((statement, index) => {
const result = sequentialResult.value[index];
return {
stepDescription: statement.description,
sql: statement.sql,
parameters: statement.parameters,
explainAnalyze: result?.explainAnalyze ?? null,
explainOnly: result?.explainOnly ?? null,
explainError: result?.error ?? statement.initialError ?? null,
};
});
};

View File

@ -0,0 +1,169 @@
import { inject, injectable } from '@teable/v2-di';
import { err, ok, safeTry } from 'neverthrow';
import type { Result } from 'neverthrow';
import {
FieldCrossTableUpdateSideEffectService,
FieldUndoRedoSnapshotService,
FieldUpdateSideEffectService,
ForeignTableLoaderService,
LinkFieldUpdateSideEffectService,
TableByIdSpec,
UpdateFieldCommand,
UpdateFieldHandler,
type DomainError,
type IExecutionContext,
type ITableRecordQueryRepository,
type ITableRepository,
v2CoreTokens,
} from '@teable/v2-core';
import {
v2RecordRepositoryPostgresTokens,
type ComputedUpdatePlanner,
} from '@teable/v2-adapter-table-repository-postgres';
import { formulaSqlPgTokens, type IPgTypeValidationStrategy } from '@teable/v2-formula-sql-pg';
import type { Kysely } from 'kysely';
import type { V1TeableDatabase } from '@teable/v2-postgres-schema';
import type { ICommandAnalyzer } from './ICommandAnalyzer';
import {
buildFieldSqlExplains,
createFieldExplainDryRunEnvironment,
createNoopUndoRedoService,
} from './FieldCommandAnalyzeHelpers';
import type { CommandExplainInfo, ExplainOptions, ExplainResult } from '../types';
import { DEFAULT_EXPLAIN_OPTIONS } from '../types';
import { v2CommandExplainTokens } from '../di/tokens';
import { SqlExplainRunner } from '../utils/SqlExplainRunner';
import { ComplexityCalculator } from '../utils/ComplexityCalculator';
import { NoopEventBus } from '../utils/FieldCommandExplainHarness';
@injectable()
export class UpdateFieldAnalyzer implements ICommandAnalyzer<UpdateFieldCommand> {
constructor(
@inject(v2RecordRepositoryPostgresTokens.db)
private readonly db: Kysely<V1TeableDatabase>,
@inject(v2CoreTokens.tableRepository)
private readonly tableRepository: ITableRepository,
@inject(v2CoreTokens.tableRecordQueryRepository)
private readonly tableRecordQueryRepository: ITableRecordQueryRepository,
@inject(v2CoreTokens.foreignTableLoaderService)
private readonly foreignTableLoaderService: ForeignTableLoaderService,
@inject(v2CoreTokens.fieldUndoRedoSnapshotService)
private readonly fieldUndoRedoSnapshotService: FieldUndoRedoSnapshotService,
@inject(v2RecordRepositoryPostgresTokens.computedUpdatePlanner)
private readonly computedUpdatePlanner: ComputedUpdatePlanner,
@inject(v2CommandExplainTokens.sqlExplainRunner)
private readonly sqlExplainRunner: SqlExplainRunner,
@inject(v2CommandExplainTokens.complexityCalculator)
private readonly complexityCalculator: ComplexityCalculator,
@inject(formulaSqlPgTokens.typeValidationStrategy)
private readonly typeValidationStrategy: IPgTypeValidationStrategy
) {}
async analyze(
context: IExecutionContext,
command: UpdateFieldCommand,
options: ExplainOptions,
startTime: number
): Promise<Result<ExplainResult, DomainError>> {
const analyzer = this;
const mergedOptions = { ...DEFAULT_EXPLAIN_OPTIONS, ...options };
return safeTry<ExplainResult, DomainError>(async function* () {
const beforeTableSpec = TableByIdSpec.create(command.tableId);
const beforeTableResult = await analyzer.tableRepository.findOne(context, beforeTableSpec);
if (beforeTableResult.isErr()) {
return err(beforeTableResult.error);
}
const beforeTable = beforeTableResult.value;
const previousFieldResult = beforeTable.getField((field) =>
field.id().equals(command.fieldId)
);
if (previousFieldResult.isErr()) {
return err(previousFieldResult.error);
}
const dryRun = createFieldExplainDryRunEnvironment({
db: analyzer.db,
tableRepository: analyzer.tableRepository,
computedUpdatePlanner: analyzer.computedUpdatePlanner,
typeValidationStrategy: analyzer.typeValidationStrategy,
});
const tableUpdateFlow = dryRun.tableUpdateFlow;
const fieldUpdateSideEffectService = new FieldUpdateSideEffectService(
tableUpdateFlow,
dryRun.overlayTableRepository,
new LinkFieldUpdateSideEffectService(tableUpdateFlow),
new FieldCrossTableUpdateSideEffectService(dryRun.overlayTableRepository, tableUpdateFlow)
);
const handler = new UpdateFieldHandler(
dryRun.overlayTableRepository,
tableUpdateFlow,
fieldUpdateSideEffectService,
analyzer.foreignTableLoaderService,
analyzer.tableRecordQueryRepository,
new NoopEventBus() as never,
createNoopUndoRedoService() as never,
analyzer.fieldUndoRedoSnapshotService
);
const commandResult = await handler.handle(context, command);
if (commandResult.isErr()) {
return err(commandResult.error);
}
const afterTable = commandResult.value.table;
const effectiveFieldResult = afterTable.getField((field) =>
field.id().equals(command.fieldId)
);
const effectiveField = effectiveFieldResult.isOk()
? effectiveFieldResult.value
: previousFieldResult.value;
const commandInfo: CommandExplainInfo = {
type: 'UpdateField',
tableId: afterTable.id().toString(),
tableName: afterTable.name().toString(),
recordIds: [],
changedFieldIds: [command.fieldId.toString()],
changedFieldNames: [effectiveField.name().toString()],
changedFieldTypes: [effectiveField.type().toString()],
changeType: 'update',
};
const sqlExplainStartTime = Date.now();
const sqlExplains = mergedOptions.includeSql
? await buildFieldSqlExplains(
analyzer.sqlExplainRunner,
analyzer.db,
dryRun.captureTableSchemaRepository.getStatements(),
mergedOptions.analyze
)
: [];
const sqlExplainMs = Date.now() - sqlExplainStartTime;
const complexity = analyzer.complexityCalculator.calculate({
commandInfo,
computedImpact: null,
sqlExplains,
});
return ok({
command: commandInfo,
computedImpact: null,
computedLocks: null,
linkLocks: null,
sqlExplains,
complexity,
timing: {
totalMs: Date.now() - startTime,
dependencyGraphMs: 0,
planningMs: 0,
sqlExplainMs,
},
});
});
}
}

View File

@ -1,5 +1,8 @@
export * from './ICommandAnalyzer';
export * from './CreateFieldAnalyzer';
export * from './UpdateRecordAnalyzer';
export * from './CreateRecordAnalyzer';
export * from './UpdateFieldAnalyzer';
export * from './DeleteFieldAnalyzer';
export * from './DeleteRecordsAnalyzer';
export * from './PasteCommandAnalyzer';

View File

@ -5,8 +5,11 @@ import { v2CommandExplainTokens } from './tokens';
import { ExplainService } from '../service/ExplainService';
import { SqlExplainRunner } from '../utils/SqlExplainRunner';
import { ComplexityCalculator } from '../utils/ComplexityCalculator';
import { CreateFieldAnalyzer } from '../analyzers/CreateFieldAnalyzer';
import { UpdateRecordAnalyzer } from '../analyzers/UpdateRecordAnalyzer';
import { CreateRecordAnalyzer } from '../analyzers/CreateRecordAnalyzer';
import { UpdateFieldAnalyzer } from '../analyzers/UpdateFieldAnalyzer';
import { DeleteFieldAnalyzer } from '../analyzers/DeleteFieldAnalyzer';
import { DeleteRecordsAnalyzer } from '../analyzers/DeleteRecordsAnalyzer';
import { PasteCommandAnalyzer } from '../analyzers/PasteCommandAnalyzer';
@ -23,6 +26,15 @@ export const registerCommandExplainModule = (container: DependencyContainer): vo
});
// Register analyzers
container.register(v2CommandExplainTokens.createFieldAnalyzer, CreateFieldAnalyzer, {
lifecycle: Lifecycle.Singleton,
});
container.register(v2CommandExplainTokens.updateFieldAnalyzer, UpdateFieldAnalyzer, {
lifecycle: Lifecycle.Singleton,
});
container.register(v2CommandExplainTokens.deleteFieldAnalyzer, DeleteFieldAnalyzer, {
lifecycle: Lifecycle.Singleton,
});
container.register(v2CommandExplainTokens.updateRecordAnalyzer, UpdateRecordAnalyzer, {
lifecycle: Lifecycle.Singleton,
});

View File

@ -5,6 +5,9 @@ export const v2CommandExplainTokens = {
explainService: Symbol('v2.commandExplain.explainService'),
sqlExplainRunner: Symbol('v2.commandExplain.sqlExplainRunner'),
complexityCalculator: Symbol('v2.commandExplain.complexityCalculator'),
createFieldAnalyzer: Symbol('v2.commandExplain.createFieldAnalyzer'),
updateFieldAnalyzer: Symbol('v2.commandExplain.updateFieldAnalyzer'),
deleteFieldAnalyzer: Symbol('v2.commandExplain.deleteFieldAnalyzer'),
updateRecordAnalyzer: Symbol('v2.commandExplain.updateRecordAnalyzer'),
createRecordAnalyzer: Symbol('v2.commandExplain.createRecordAnalyzer'),
deleteRecordsAnalyzer: Symbol('v2.commandExplain.deleteRecordsAnalyzer'),

View File

@ -5,17 +5,23 @@ import {
type DomainError,
type IExecutionContext,
domainError,
DeleteFieldCommand,
UpdateRecordCommand,
CreateRecordCommand,
CreateFieldCommand,
DeleteRecordsCommand,
PasteCommand,
UpdateFieldCommand,
} from '@teable/v2-core';
import type { ExplainResult, ExplainOptions } from '../types';
import { DEFAULT_EXPLAIN_OPTIONS } from '../types';
import { v2CommandExplainTokens } from '../di/tokens';
import type { CreateFieldAnalyzer } from '../analyzers/CreateFieldAnalyzer';
import type { UpdateRecordAnalyzer } from '../analyzers/UpdateRecordAnalyzer';
import type { CreateRecordAnalyzer } from '../analyzers/CreateRecordAnalyzer';
import type { UpdateFieldAnalyzer } from '../analyzers/UpdateFieldAnalyzer';
import type { DeleteFieldAnalyzer } from '../analyzers/DeleteFieldAnalyzer';
import type { DeleteRecordsAnalyzer } from '../analyzers/DeleteRecordsAnalyzer';
import type { PasteCommandAnalyzer } from '../analyzers/PasteCommandAnalyzer';
@ -37,6 +43,12 @@ export interface IExplainService {
@injectable()
export class ExplainService implements IExplainService {
constructor(
@inject(v2CommandExplainTokens.createFieldAnalyzer)
private readonly createFieldAnalyzer: CreateFieldAnalyzer,
@inject(v2CommandExplainTokens.updateFieldAnalyzer)
private readonly updateFieldAnalyzer: UpdateFieldAnalyzer,
@inject(v2CommandExplainTokens.deleteFieldAnalyzer)
private readonly deleteFieldAnalyzer: DeleteFieldAnalyzer,
@inject(v2CommandExplainTokens.updateRecordAnalyzer)
private readonly updateRecordAnalyzer: UpdateRecordAnalyzer,
@inject(v2CommandExplainTokens.createRecordAnalyzer)
@ -64,6 +76,18 @@ export class ExplainService implements IExplainService {
const mergedOptions = { ...DEFAULT_EXPLAIN_OPTIONS, ...options };
// Route to appropriate analyzer based on command type
if (command instanceof CreateFieldCommand) {
return this.createFieldAnalyzer.analyze(context, command, mergedOptions, startTime);
}
if (command instanceof UpdateFieldCommand) {
return this.updateFieldAnalyzer.analyze(context, command, mergedOptions, startTime);
}
if (command instanceof DeleteFieldCommand) {
return this.deleteFieldAnalyzer.analyze(context, command, mergedOptions, startTime);
}
if (command instanceof UpdateRecordCommand) {
return this.updateRecordAnalyzer.analyze(context, command, mergedOptions, startTime);
}

View File

@ -6,7 +6,14 @@
* Information about the command being explained.
*/
export type CommandExplainInfo = {
readonly type: 'CreateRecord' | 'UpdateRecord' | 'DeleteRecords' | 'Paste';
readonly type:
| 'CreateField'
| 'UpdateField'
| 'DeleteField'
| 'CreateRecord'
| 'UpdateRecord'
| 'DeleteRecords'
| 'Paste';
readonly tableId: string;
readonly tableName: string;
readonly recordIds: ReadonlyArray<string>;

View File

@ -0,0 +1,602 @@
import {
type DomainError,
Field,
FieldType,
type FieldId,
type IDomainEvent,
type IEventBus,
type IExecutionContext,
type IFindOptions,
type ISpecification,
type ITableRepository,
type ITableSchemaRepository,
type ITableSpecVisitor,
type IUnitOfWork,
Table,
TableByIdSpec,
TableId,
type TableSortKey,
type TableUpdatePersistResult,
type UnitOfWorkOperation,
DbFieldName,
} from '@teable/v2-core';
import {
ComputedTableRecordQueryBuilder,
ComputedUpdatePlanner,
FieldValueChangeCollectorVisitor,
TableAddFieldCollectorVisitor,
TableSchemaUpdateVisitor,
UpdateFromSelectBuilder,
isPersistedAsGeneratedColumn,
type DynamicDB,
} from '@teable/v2-adapter-table-repository-postgres';
import type { IPgTypeValidationStrategy } from '@teable/v2-formula-sql-pg';
import type { V1TeableDatabase } from '@teable/v2-postgres-schema';
import type { Kysely } from 'kysely';
import { err, ok, safeTry } from 'neverthrow';
import type { Result } from 'neverthrow';
export type CapturedFieldExplainStatement = {
readonly description: string;
readonly sql: string;
readonly parameters: ReadonlyArray<unknown>;
readonly explainable: boolean;
readonly execute: boolean;
readonly initialError?: string;
};
export const isExplainableSqlStatement = (sqlText: string): boolean => {
const normalized = sqlText.trimStart().toLowerCase();
return (
normalized.startsWith('select ') ||
normalized.startsWith('insert ') ||
normalized.startsWith('update ') ||
normalized.startsWith('delete ') ||
normalized.startsWith('merge ') ||
normalized.startsWith('with ')
);
};
export class NoopEventBus implements IEventBus {
async publish(
_context: IExecutionContext,
_event: IDomainEvent
): Promise<Result<void, DomainError>> {
return ok(undefined);
}
async publishMany(
_context: IExecutionContext,
_events: ReadonlyArray<IDomainEvent>
): Promise<Result<void, DomainError>> {
return ok(undefined);
}
}
export class NoopUnitOfWork implements IUnitOfWork {
async withTransaction<T>(
context: IExecutionContext,
work: UnitOfWorkOperation<T>
): Promise<Result<T, DomainError>> {
return work(context);
}
}
export class OverlayTableRepository implements ITableRepository {
private readonly overlayByTableId = new Map<string, Table>();
private readonly deletedTableIds = new Set<string>();
constructor(private readonly delegate: ITableRepository) {}
async insert(context: IExecutionContext, table: Table): Promise<Result<Table, DomainError>> {
this.deletedTableIds.delete(table.id().toString());
this.overlayByTableId.set(table.id().toString(), table);
return ok(table);
}
async insertMany(
context: IExecutionContext,
tables: ReadonlyArray<Table>
): Promise<Result<ReadonlyArray<Table>, DomainError>> {
for (const table of tables) {
this.deletedTableIds.delete(table.id().toString());
this.overlayByTableId.set(table.id().toString(), table);
}
return ok(tables);
}
async findOne(
context: IExecutionContext,
spec: ISpecification<Table, ITableSpecVisitor>
): Promise<Result<Table, DomainError>> {
for (const table of this.overlayByTableId.values()) {
if (!this.deletedTableIds.has(table.id().toString()) && spec.isSatisfiedBy(table)) {
return ok(table);
}
}
const result = await this.delegate.findOne(context, spec);
if (result.isErr()) {
return err(result.error);
}
const overlay = this.overlayByTableId.get(result.value.id().toString());
if (overlay && !this.deletedTableIds.has(overlay.id().toString())) {
return ok(overlay);
}
return result;
}
async find(
context: IExecutionContext,
spec: ISpecification<Table, ITableSpecVisitor>,
options?: IFindOptions<TableSortKey>
): Promise<Result<ReadonlyArray<Table>, DomainError>> {
const result = await this.delegate.find(context, spec, options);
if (result.isErr()) {
return err(result.error);
}
const mergedByTableId = new Map<string, Table>();
for (const table of result.value) {
const overlay = this.overlayByTableId.get(table.id().toString());
const effective = overlay ?? table;
if (this.deletedTableIds.has(effective.id().toString())) {
continue;
}
if (spec.isSatisfiedBy(effective)) {
mergedByTableId.set(effective.id().toString(), effective);
}
}
for (const table of this.overlayByTableId.values()) {
if (this.deletedTableIds.has(table.id().toString())) {
continue;
}
if (spec.isSatisfiedBy(table)) {
mergedByTableId.set(table.id().toString(), table);
}
}
return ok([...mergedByTableId.values()]);
}
async updateOne(
_context: IExecutionContext,
table: Table,
_mutateSpec: ISpecification<Table, ITableSpecVisitor>
): Promise<Result<TableUpdatePersistResult | void, DomainError>> {
this.deletedTableIds.delete(table.id().toString());
this.overlayByTableId.set(table.id().toString(), table);
return ok(undefined);
}
async delete(context: IExecutionContext, table: Table): Promise<Result<void, DomainError>> {
this.overlayByTableId.delete(table.id().toString());
this.deletedTableIds.add(table.id().toString());
return ok(undefined);
}
}
type CaptureSchemaRepositoryOptions = {
db: Kysely<V1TeableDatabase>;
tableRepository: ITableRepository;
computedUpdatePlanner: ComputedUpdatePlanner;
typeValidationStrategy: IPgTypeValidationStrategy;
};
export class CaptureTableSchemaRepository implements ITableSchemaRepository {
private readonly statements: CapturedFieldExplainStatement[] = [];
constructor(private readonly options: CaptureSchemaRepositoryOptions) {}
getStatements(): ReadonlyArray<CapturedFieldExplainStatement> {
return this.statements;
}
async insert(_context: IExecutionContext, _table: Table): Promise<Result<void, DomainError>> {
return ok(undefined);
}
async insertMany(
_context: IExecutionContext,
_tables: ReadonlyArray<Table>
): Promise<Result<void, DomainError>> {
return ok(undefined);
}
async update(
context: IExecutionContext,
table: Table,
mutateSpec: ISpecification<Table, ITableSpecVisitor>
): Promise<Result<void, DomainError>> {
const repository = this;
return safeTry<void, DomainError>(async function* () {
yield* ensureDbFieldNames(table.getFields());
const dbTableNameResult = table
.dbTableName()
.andThen((name) => name.split({ defaultSchema: null }));
if (dbTableNameResult.isErr()) {
return err(dbTableNameResult.error);
}
const { schema, tableName } = dbTableNameResult.value;
const visitor = new TableSchemaUpdateVisitor({
db: repository.options.db,
schema,
tableName,
tableId: table.id().toString(),
table,
});
yield* mutateSpec.accept(visitor);
const schemaStatements = yield* visitor.where();
for (let i = 0; i < schemaStatements.length; i++) {
repository.captureCompiledStatement(
`Schema step ${i + 1}: table ${table.name().toString()}`,
schemaStatements[i]!.compile(repository.options.db)
);
}
const addFieldCollector = new TableAddFieldCollectorVisitor();
yield* mutateSpec.accept(addFieldCollector);
yield* await repository.captureFieldBackfillStatements(
context,
table,
addFieldCollector.fields(),
{
descriptionPrefix: 'Field backfill',
includeOneManyTwoWay: addFieldCollector
.fields()
.some((field: Field) => field.type().equals(FieldType.link())),
}
);
const valueChangeVisitor = new FieldValueChangeCollectorVisitor();
yield* mutateSpec.accept(valueChangeVisitor);
yield* await repository.captureCascadeStatements(context, table, {
selfBackfillFieldIds: valueChangeVisitor.selfBackfillFields(),
valueChangedFieldIds: valueChangeVisitor.valueChangedFields(),
deferredBackfillFieldIds: valueChangeVisitor.deferredBackfillFields(),
hasDbStorageTypeChange: valueChangeVisitor.hasDbStorageTypeChange(),
});
return ok(undefined);
});
}
async refreshInMemoryTableAfterUpdate(
_context: IExecutionContext,
table: Table,
_mutateSpec: ISpecification<Table, ITableSpecVisitor>
): Promise<Result<Table, DomainError>> {
return ok(table);
}
async replayDeferredBackfillAfterUpdate(
context: IExecutionContext,
table: Table,
mutateSpec: ISpecification<Table, ITableSpecVisitor>
): Promise<Result<void, DomainError>> {
const repository = this;
return safeTry<void, DomainError>(async function* () {
const valueChangeVisitor = new FieldValueChangeCollectorVisitor();
yield* mutateSpec.accept(valueChangeVisitor);
const deferredFieldIds = valueChangeVisitor.deferredBackfillFields();
if (deferredFieldIds.length === 0) {
return ok(undefined);
}
yield* await repository.captureCascadePlanStatements(context, table, {
fieldIds: deferredFieldIds,
skipDistinctFilter: valueChangeVisitor.hasDbStorageTypeChange(),
descriptionPrefix: 'Deferred computed cascade',
});
return ok(undefined);
});
}
async delete(_context: IExecutionContext, _table: Table): Promise<Result<void, DomainError>> {
return ok(undefined);
}
private captureCompiledStatement(
description: string,
compiled: { sql: string; parameters: ReadonlyArray<unknown> }
) {
this.statements.push({
description,
sql: compiled.sql,
parameters: compiled.parameters,
explainable: isExplainableSqlStatement(compiled.sql),
execute: true,
});
}
private captureBuildError(description: string, error: unknown) {
this.statements.push({
description,
sql: `-- ${describeError(error)}`,
parameters: [],
explainable: false,
execute: false,
initialError: describeError(error),
});
}
private async captureFieldBackfillStatements(
context: IExecutionContext,
table: Table,
fields: ReadonlyArray<Field>,
options: {
descriptionPrefix: string;
includeOneManyTwoWay?: boolean;
skipDistinctFilter?: boolean;
}
): Promise<Result<void, DomainError>> {
const backfillFields: Field[] = [];
for (const field of fields) {
if (!needsBackfill(field, options.includeOneManyTwoWay)) {
continue;
}
const persistedAsGeneratedResult = isPersistedAsGeneratedColumn(field);
if (persistedAsGeneratedResult.isErr()) {
return err(persistedAsGeneratedResult.error);
}
if (persistedAsGeneratedResult.value) {
continue;
}
backfillFields.push(field);
}
if (backfillFields.length === 0) {
return ok(undefined);
}
const fieldIds = backfillFields.map((field) => field.id());
const fieldLabels = backfillFields.map(
(field) => `${field.name().toString()} (${field.type().toString()})`
);
const builder = new ComputedTableRecordQueryBuilder(
this.options.db as unknown as Kysely<DynamicDB>,
{
typeValidationStrategy: this.options.typeValidationStrategy,
forceLookupArrayOutput: true,
}
)
.from(table)
.select(fieldIds);
const prepareResult = await builder.prepare({
context,
tableRepository: this.options.tableRepository,
});
if (prepareResult.isErr()) {
this.captureBuildError(
`${options.descriptionPrefix}: table ${table.name().toString()} [${fieldLabels.join(', ')}]`,
prepareResult.error
);
return ok(undefined);
}
const selectQueryResult = builder.build();
if (selectQueryResult.isErr()) {
this.captureBuildError(
`${options.descriptionPrefix}: table ${table.name().toString()} [${fieldLabels.join(', ')}]`,
selectQueryResult.error
);
return ok(undefined);
}
const updateBuilder = new UpdateFromSelectBuilder(
this.options.db as unknown as Kysely<DynamicDB>
);
const compiledResult = updateBuilder.build({
table,
fieldIds,
selectQuery: selectQueryResult.value,
skipDistinctFilter: options.skipDistinctFilter,
});
if (compiledResult.isErr()) {
this.captureBuildError(
`${options.descriptionPrefix}: table ${table.name().toString()} [${fieldLabels.join(', ')}]`,
compiledResult.error
);
return ok(undefined);
}
this.captureCompiledStatement(
`${options.descriptionPrefix}: table ${table.name().toString()}, fields [${fieldLabels.join(', ')}]`,
compiledResult.value
);
return ok(undefined);
}
private async captureCascadeStatements(
context: IExecutionContext,
table: Table,
options: {
selfBackfillFieldIds: ReadonlyArray<FieldId>;
valueChangedFieldIds: ReadonlyArray<FieldId>;
deferredBackfillFieldIds: ReadonlyArray<FieldId>;
hasDbStorageTypeChange: boolean;
}
): Promise<Result<void, DomainError>> {
const deferredFieldIdSet = new Set(
options.deferredBackfillFieldIds.map((fieldId) => fieldId.toString())
);
const eligibleSelfBackfillFieldIds = options.selfBackfillFieldIds.filter(
(fieldId) => !deferredFieldIdSet.has(fieldId.toString())
);
const eligibleValueChangedFieldIds = options.valueChangedFieldIds.filter(
(fieldId) => !deferredFieldIdSet.has(fieldId.toString())
);
if (eligibleSelfBackfillFieldIds.length > 0) {
const fields = resolveFieldsById(table, eligibleSelfBackfillFieldIds);
const backfillResult = await this.captureFieldBackfillStatements(context, table, fields, {
descriptionPrefix: 'Computed self-backfill',
includeOneManyTwoWay: true,
skipDistinctFilter: options.hasDbStorageTypeChange,
});
if (backfillResult.isErr()) {
return err(backfillResult.error);
}
}
const changedFieldIds = dedupFieldIds([
...eligibleSelfBackfillFieldIds,
...eligibleValueChangedFieldIds,
]);
if (changedFieldIds.length === 0) {
return ok(undefined);
}
return this.captureCascadePlanStatements(context, table, {
fieldIds: changedFieldIds,
skipDistinctFilter: options.hasDbStorageTypeChange,
descriptionPrefix: 'Computed cascade',
});
}
private async captureCascadePlanStatements(
context: IExecutionContext,
table: Table,
options: {
fieldIds: ReadonlyArray<FieldId>;
skipDistinctFilter: boolean;
descriptionPrefix: string;
}
): Promise<Result<void, DomainError>> {
if (options.fieldIds.length === 0) {
return ok(undefined);
}
const planResult = await this.options.computedUpdatePlanner.plan(
{
table,
changedFieldIds: options.fieldIds,
changedRecordIds: [],
changeType: 'update',
cyclePolicy: 'skip',
},
context
);
if (planResult.isErr()) {
return err(planResult.error);
}
const sortedSteps = [...planResult.value.steps].sort((left, right) => left.level - right.level);
for (const step of sortedSteps) {
const targetTable = step.tableId.equals(table.id())
? table
: await this.loadTableById(context, step.tableId.toString());
if (!targetTable) {
continue;
}
const fields = resolveFieldsById(targetTable, step.fieldIds);
const result = await this.captureFieldBackfillStatements(context, targetTable, fields, {
descriptionPrefix: `${options.descriptionPrefix} level ${step.level}`,
includeOneManyTwoWay: true,
skipDistinctFilter: options.skipDistinctFilter,
});
if (result.isErr()) {
return err(result.error);
}
}
return ok(undefined);
}
private async loadTableById(
context: IExecutionContext,
tableId: string
): Promise<Table | undefined> {
const tableIdResult = TableId.create(tableId);
if (tableIdResult.isErr()) {
return undefined;
}
const tableResult = await this.options.tableRepository.findOne(
context,
TableByIdSpec.create(tableIdResult.value)
);
if (tableResult.isErr()) {
return undefined;
}
return tableResult.value;
}
}
const resolveFieldsById = (table: Table, fieldIds: ReadonlyArray<FieldId>): Field[] => {
const fieldIdSet = new Set(fieldIds.map((fieldId) => fieldId.toString()));
return table.getFields().filter((field) => fieldIdSet.has(field.id().toString()));
};
const dedupFieldIds = (fieldIds: ReadonlyArray<FieldId>): FieldId[] => {
const fieldIdsByString = new Map<string, FieldId>();
for (const fieldId of fieldIds) {
fieldIdsByString.set(fieldId.toString(), fieldId);
}
return [...fieldIdsByString.values()];
};
const needsBackfill = (field: Field, includeOneManyTwoWay = false): boolean => {
const computedFieldSpecResult = Field.specs().isComputed().build();
if (computedFieldSpecResult.isOk() && computedFieldSpecResult.value.isSatisfiedBy(field)) {
return true;
}
if (field.type().equals(FieldType.link())) {
const linkField = field as Field & {
relationship: () => { toString(): string };
isOneWay: () => boolean;
};
if (linkField.relationship().toString() === 'oneMany' && !linkField.isOneWay()) {
return includeOneManyTwoWay;
}
return true;
}
return false;
};
const ensureDbFieldNames = (fields: ReadonlyArray<Field>): Result<void, DomainError> => {
for (const field of fields) {
if (field.dbFieldName().isOk()) {
continue;
}
const dbFieldNameResult = DbFieldName.rehydrate(field.id().toString());
if (dbFieldNameResult.isErr()) {
return err(dbFieldNameResult.error);
}
const setDbFieldNameResult = field.setDbFieldName(dbFieldNameResult.value);
if (setDbFieldNameResult.isErr()) {
return err(setDbFieldNameResult.error);
}
}
return ok(undefined);
};
const describeError = (error: unknown): string => {
if (error instanceof Error) {
return error.message || error.name;
}
if (typeof error === 'string') {
return error;
}
try {
return JSON.stringify(error) ?? String(error);
} catch {
return String(error);
}
};

View File

@ -28,12 +28,34 @@ class BatchRollbackSignal extends Error {
}
}
/**
* Error class used to signal intentional rollback after sequential EXPLAIN execution.
*/
class SequentialRollbackSignal extends Error {
constructor(readonly results: SequentialExplainStatementResult[]) {
super('Intentional rollback after sequential EXPLAIN');
this.name = 'SequentialRollbackSignal';
}
}
export type BatchExplainStatement = {
sql: string;
parameters: ReadonlyArray<unknown>;
description: string;
};
export type SequentialExplainStatement = BatchExplainStatement & {
explainable?: boolean;
execute?: boolean;
initialError?: string;
};
export type SequentialExplainStatementResult = {
explainAnalyze: ExplainAnalyzeOutput | null;
explainOnly: ExplainOutput | null;
error: string | null;
};
/**
* Setup statement to run before EXPLAIN statements.
* Used to create temporary tables needed by the SQL being explained.
@ -238,6 +260,176 @@ export class SqlExplainRunner {
}
}
/**
* Run a mixed DDL/DML statement sequence in one transaction and roll it back at the end.
*
* Non-explainable statements are executed normally so later explainable statements can observe
* the expected schema/data state. In plan-only mode, explainable statements are executed after
* EXPLAIN so the sequence can continue against the mutated in-transaction state.
*/
async explainSequentialInTransaction(
db: Kysely<V1TeableDatabase>,
statements: ReadonlyArray<SequentialExplainStatement>,
analyze: boolean
): Promise<Result<SequentialExplainStatementResult[], DomainError>> {
if (statements.length === 0) {
return ok([]);
}
try {
await db.transaction().execute(async (trx) => {
const results: SequentialExplainStatementResult[] = [];
for (let i = 0; i < statements.length; i++) {
const statement = statements[i]!;
const savepointName = `seq_stmt_${i}`;
await sql`SAVEPOINT ${sql.raw(savepointName)}`.execute(trx);
try {
if (statement.initialError) {
results.push({
explainAnalyze: null,
explainOnly: null,
error: statement.initialError,
});
await sql`ROLLBACK TO SAVEPOINT ${sql.raw(savepointName)}`.execute(trx);
await sql`RELEASE SAVEPOINT ${sql.raw(savepointName)}`.execute(trx);
continue;
}
if (statement.execute === false) {
results.push({
explainAnalyze: null,
explainOnly: null,
error: 'Statement capture skipped execution',
});
await sql`ROLLBACK TO SAVEPOINT ${sql.raw(savepointName)}`.execute(trx);
await sql`RELEASE SAVEPOINT ${sql.raw(savepointName)}`.execute(trx);
continue;
}
if (statement.explainable === false) {
await this.executeSql(trx, statement.sql, statement.parameters);
results.push({
explainAnalyze: null,
explainOnly: null,
error: 'PostgreSQL EXPLAIN does not support this statement type',
});
await sql`RELEASE SAVEPOINT ${sql.raw(savepointName)}`.execute(trx);
continue;
}
if (analyze) {
try {
const analyzeRows = await this.executeExplainQuery(
trx,
`EXPLAIN (ANALYZE, BUFFERS, FORMAT JSON) ${statement.sql}`,
statement.parameters
);
results.push({
explainAnalyze: this.parseExplainAnalyzeJson(analyzeRows),
explainOnly: null,
error: null,
});
} catch (analyzeError) {
await sql`ROLLBACK TO SAVEPOINT ${sql.raw(savepointName)}`.execute(trx);
await sql`SAVEPOINT ${sql.raw(savepointName)}`.execute(trx);
try {
const explainRows = await this.executeExplainQuery(
trx,
`EXPLAIN (FORMAT JSON) ${statement.sql}`,
statement.parameters
);
results.push({
explainAnalyze: null,
explainOnly: this.parseExplainOnlyJson(explainRows),
error: `EXPLAIN ANALYZE failed: ${analyzeError instanceof Error ? analyzeError.message : String(analyzeError)}`,
});
} catch (explainError) {
results.push({
explainAnalyze: null,
explainOnly: null,
error: `EXPLAIN failed: ${explainError instanceof Error ? explainError.message : String(explainError)}`,
});
await sql`ROLLBACK TO SAVEPOINT ${sql.raw(savepointName)}`.execute(trx);
}
}
await sql`RELEASE SAVEPOINT ${sql.raw(savepointName)}`.execute(trx);
continue;
}
let explainOnly: ExplainOutput | null = null;
let explainError: string | null = null;
try {
const explainRows = await this.executeExplainQuery(
trx,
`EXPLAIN (FORMAT JSON) ${statement.sql}`,
statement.parameters
);
explainOnly = this.parseExplainOnlyJson(explainRows);
} catch (error) {
explainError = `EXPLAIN failed: ${error instanceof Error ? error.message : String(error)}`;
}
try {
await this.executeSql(trx, statement.sql, statement.parameters);
} catch (executeError) {
const executeErrorMessage = `Statement execution failed: ${executeError instanceof Error ? executeError.message : String(executeError)}`;
explainError = explainError
? `${explainError}; ${executeErrorMessage}`
: executeErrorMessage;
await sql`ROLLBACK TO SAVEPOINT ${sql.raw(savepointName)}`.execute(trx);
}
results.push({
explainAnalyze: null,
explainOnly,
error: explainError,
});
await sql`RELEASE SAVEPOINT ${sql.raw(savepointName)}`.execute(trx);
} catch (statementError) {
try {
await sql`ROLLBACK TO SAVEPOINT ${sql.raw(savepointName)}`.execute(trx);
await sql`RELEASE SAVEPOINT ${sql.raw(savepointName)}`.execute(trx);
} catch (rollbackError) {
console.warn('Failed to rollback sequential explain step', {
statement: statement.description,
statementError,
rollbackError,
});
}
results.push({
explainAnalyze: null,
explainOnly: null,
error: `Sequential explain failed: ${statementError instanceof Error ? statementError.message : String(statementError)}`,
});
}
}
throw new SequentialRollbackSignal(results);
});
return err(
domainError.invariant({
message: 'Transaction should have rolled back',
})
);
} catch (error) {
if (error instanceof SequentialRollbackSignal) {
return ok(error.results);
}
return err(
domainError.infrastructure({
message: `Sequential EXPLAIN failed: ${error instanceof Error ? error.message : String(error)}`,
})
);
}
}
private async runExplainAnalyzeInTransaction(
db: Kysely<V1TeableDatabase>,
sqlStatement: string,
@ -355,6 +547,37 @@ export class SqlExplainRunner {
}
}
private async executeExplainQuery(
db: Kysely<V1TeableDatabase>,
explainSql: string,
parameters: ReadonlyArray<unknown>
): Promise<Array<{ 'QUERY PLAN': string | object }>> {
const query = sql`${sql.raw(explainSql)}`;
const compiled = query.compile(db);
const finalQuery = {
...compiled,
parameters: [...parameters],
};
const result = await db.executeQuery<{ 'QUERY PLAN': string | object }>(finalQuery);
return result.rows;
}
private async executeSql(
db: Kysely<V1TeableDatabase>,
statementSql: string,
parameters: ReadonlyArray<unknown>
): Promise<void> {
const query = sql`${sql.raw(statementSql)}`;
const compiled = query.compile(db);
const finalQuery = {
...compiled,
parameters: [...parameters],
};
await db.executeQuery(finalQuery);
}
private parseExplainAnalyzeJson(
rows: Array<{ 'QUERY PLAN': string | object }>
): ExplainAnalyzeOutput {

View File

@ -2,14 +2,51 @@ import type { IExplainService } from '@teable/v2-command-explain';
import type { IExplainEndpointResult, IExplainResultDto } from '@teable/v2-contract-http';
import { mapDomainErrorToHttpError, mapDomainErrorToHttpStatus } from '@teable/v2-contract-http';
import {
CreateFieldCommand,
CreateRecordCommand,
UpdateRecordCommand,
DeleteFieldCommand,
DeleteRecordsCommand,
RecordId,
TableId,
UpdateFieldCommand,
UpdateRecordCommand,
} from '@teable/v2-core';
import type { IExecutionContext } from '@teable/v2-core';
export interface IExplainCreateFieldInput {
baseId: string;
tableId: string;
field: Record<string, unknown>;
order?: {
viewId: string;
orderIndex: number;
};
analyze?: boolean;
includeSql?: boolean;
includeGraph?: boolean;
includeLocks?: boolean;
}
export interface IExplainUpdateFieldInput {
tableId: string;
fieldId: string;
field: Record<string, unknown>;
analyze?: boolean;
includeSql?: boolean;
includeGraph?: boolean;
includeLocks?: boolean;
}
export interface IExplainDeleteFieldInput {
baseId: string;
tableId: string;
fieldId: string;
analyze?: boolean;
includeSql?: boolean;
includeGraph?: boolean;
includeLocks?: boolean;
}
export interface IExplainCreateRecordInput {
tableId: string;
fields: Record<string, unknown>;
@ -38,6 +75,120 @@ export interface IExplainDeleteRecordsInput {
includeLocks?: boolean;
}
export const executeExplainCreateFieldEndpoint = async (
context: IExecutionContext,
input: IExplainCreateFieldInput,
explainService: IExplainService
): Promise<IExplainEndpointResult> => {
const commandResult = CreateFieldCommand.create(input);
if (commandResult.isErr()) {
const error = commandResult.error;
return {
status: mapDomainErrorToHttpStatus(error),
body: { ok: false, error: mapDomainErrorToHttpError(error) },
};
}
const result = await explainService.explain(context, commandResult.value, {
analyze: input.analyze ?? false,
includeSql: input.includeSql ?? true,
includeGraph: input.includeGraph ?? false,
includeLocks: input.includeLocks ?? true,
});
if (result.isErr()) {
const error = result.error;
return {
status: mapDomainErrorToHttpStatus(error),
body: { ok: false, error: mapDomainErrorToHttpError(error) },
};
}
return {
status: 200,
body: {
ok: true,
data: result.value as IExplainResultDto,
},
};
};
export const executeExplainUpdateFieldEndpoint = async (
context: IExecutionContext,
input: IExplainUpdateFieldInput,
explainService: IExplainService
): Promise<IExplainEndpointResult> => {
const commandResult = UpdateFieldCommand.create(input);
if (commandResult.isErr()) {
const error = commandResult.error;
return {
status: mapDomainErrorToHttpStatus(error),
body: { ok: false, error: mapDomainErrorToHttpError(error) },
};
}
const result = await explainService.explain(context, commandResult.value, {
analyze: input.analyze ?? false,
includeSql: input.includeSql ?? true,
includeGraph: input.includeGraph ?? false,
includeLocks: input.includeLocks ?? true,
});
if (result.isErr()) {
const error = result.error;
return {
status: mapDomainErrorToHttpStatus(error),
body: { ok: false, error: mapDomainErrorToHttpError(error) },
};
}
return {
status: 200,
body: {
ok: true,
data: result.value as IExplainResultDto,
},
};
};
export const executeExplainDeleteFieldEndpoint = async (
context: IExecutionContext,
input: IExplainDeleteFieldInput,
explainService: IExplainService
): Promise<IExplainEndpointResult> => {
const commandResult = DeleteFieldCommand.create(input);
if (commandResult.isErr()) {
const error = commandResult.error;
return {
status: mapDomainErrorToHttpStatus(error),
body: { ok: false, error: mapDomainErrorToHttpError(error) },
};
}
const result = await explainService.explain(context, commandResult.value, {
analyze: input.analyze ?? false,
includeSql: input.includeSql ?? true,
includeGraph: input.includeGraph ?? false,
includeLocks: input.includeLocks ?? true,
});
if (result.isErr()) {
const error = result.error;
return {
status: mapDomainErrorToHttpStatus(error),
body: { ok: false, error: mapDomainErrorToHttpError(error) },
};
}
return {
status: 200,
body: {
ok: true,
data: result.value as IExplainResultDto,
},
};
};
export const executeExplainCreateRecordEndpoint = async (
context: IExecutionContext,
input: IExplainCreateRecordInput,

View File

@ -24,8 +24,11 @@ import { executeDeleteFieldEndpoint } from './handlers/tables/deleteField';
import { executeDeleteRecordsEndpoint } from './handlers/tables/deleteRecords';
import { executeDeleteTableEndpoint } from './handlers/tables/deleteTable';
import {
executeExplainCreateFieldEndpoint,
executeExplainCreateRecordEndpoint,
executeExplainDeleteFieldEndpoint,
executeExplainDeleteRecordsEndpoint,
executeExplainUpdateFieldEndpoint,
executeExplainUpdateRecordEndpoint,
} from './handlers/tables/explainCommand';
import { executeGetRecordByIdEndpoint } from './handlers/tables/getRecordById';
@ -864,6 +867,96 @@ export const createV2OrpcRouter = (options: IV2OrpcRouterOptions = {}) => {
throwDomainError('INTERNAL_SERVER_ERROR', result.body.error);
});
const tablesExplainCreateField = os.tables.explainCreateField.handler(async ({ input }) => {
const container = await resolveContainer();
let executionContext: IExecutionContext;
try {
executionContext = await createExecutionContext();
} catch {
throw new ORPCError('INTERNAL_SERVER_ERROR', {
message: executionContextErrorMessage,
});
}
const explainService = container.resolve<IExplainService>(
v2CommandExplainTokens.explainService
);
const result = await executeExplainCreateFieldEndpoint(executionContext, input, explainService);
if (result.status === 200) return result.body;
if (result.status === 400) {
throwDomainError('BAD_REQUEST', result.body.error);
}
if (result.status === 404) {
throwDomainError('NOT_FOUND', result.body.error);
}
throwDomainError('INTERNAL_SERVER_ERROR', result.body.error);
});
const tablesExplainUpdateField = os.tables.explainUpdateField.handler(async ({ input }) => {
const container = await resolveContainer();
let executionContext: IExecutionContext;
try {
executionContext = await createExecutionContext();
} catch {
throw new ORPCError('INTERNAL_SERVER_ERROR', {
message: executionContextErrorMessage,
});
}
const explainService = container.resolve<IExplainService>(
v2CommandExplainTokens.explainService
);
const result = await executeExplainUpdateFieldEndpoint(executionContext, input, explainService);
if (result.status === 200) return result.body;
if (result.status === 400) {
throwDomainError('BAD_REQUEST', result.body.error);
}
if (result.status === 404) {
throwDomainError('NOT_FOUND', result.body.error);
}
throwDomainError('INTERNAL_SERVER_ERROR', result.body.error);
});
const tablesExplainDeleteField = os.tables.explainDeleteField.handler(async ({ input }) => {
const container = await resolveContainer();
let executionContext: IExecutionContext;
try {
executionContext = await createExecutionContext();
} catch {
throw new ORPCError('INTERNAL_SERVER_ERROR', {
message: executionContextErrorMessage,
});
}
const explainService = container.resolve<IExplainService>(
v2CommandExplainTokens.explainService
);
const result = await executeExplainDeleteFieldEndpoint(executionContext, input, explainService);
if (result.status === 200) return result.body;
if (result.status === 400) {
throwDomainError('BAD_REQUEST', result.body.error);
}
if (result.status === 404) {
throwDomainError('NOT_FOUND', result.body.error);
}
throwDomainError('INTERNAL_SERVER_ERROR', result.body.error);
});
const tablesExplainUpdateRecord = os.tables.explainUpdateRecord.handler(async ({ input }) => {
const container = await resolveContainer();
@ -942,6 +1035,8 @@ export const createV2OrpcRouter = (options: IV2OrpcRouterOptions = {}) => {
createTables: tablesCreateTables,
createField: tablesCreateField,
updateField: tablesUpdateField,
explainCreateField: tablesExplainCreateField,
explainUpdateField: tablesExplainUpdateField,
createRecord: tablesCreateRecord,
submitRecord: tablesSubmitRecord,
createRecords: tablesCreateRecords,
@ -954,6 +1049,7 @@ export const createV2OrpcRouter = (options: IV2OrpcRouterOptions = {}) => {
deleteByRange: tablesDeleteByRange,
deleteRecords: tablesDeleteRecords,
deleteField: tablesDeleteField,
explainDeleteField: tablesExplainDeleteField,
delete: tablesDelete,
getById: tablesGetById,
getRecord: tablesGetRecord,

View File

@ -44,9 +44,12 @@ import { deleteTableErrorResponseSchema, deleteTableOkResponseSchema } from './t
import { duplicateFieldOkResponseSchema } from './table/duplicateField';
import { duplicateRecordOkResponseSchema } from './table/duplicateRecord';
import {
explainCreateFieldInputSchema,
explainCreateRecordInputSchema,
explainDeleteFieldInputSchema,
explainDeleteRecordsInputSchema,
explainOkResponseSchema,
explainUpdateFieldInputSchema,
explainUpdateRecordInputSchema,
} from './table/explainCommand';
import { getRecordByIdOkResponseSchema } from './table/getRecordById';
@ -73,8 +76,11 @@ const TABLES_CREATE_RECORDS_PATH = '/tables/createRecords';
const TABLES_DELETE_RECORDS_PATH = '/tables/deleteRecords';
const TABLES_DELETE_FIELD_PATH = '/tables/deleteField';
const TABLES_DELETE_PATH = '/tables/delete';
const TABLES_EXPLAIN_CREATE_FIELD_PATH = '/tables/explainCreateField';
const TABLES_EXPLAIN_CREATE_RECORD_PATH = '/tables/explainCreateRecord';
const TABLES_EXPLAIN_UPDATE_FIELD_PATH = '/tables/explainUpdateField';
const TABLES_EXPLAIN_UPDATE_RECORD_PATH = '/tables/explainUpdateRecord';
const TABLES_EXPLAIN_DELETE_FIELD_PATH = '/tables/explainDeleteField';
const TABLES_EXPLAIN_DELETE_RECORDS_PATH = '/tables/explainDeleteRecords';
const TABLES_GET_PATH = '/tables/get';
const TABLES_GET_RECORD_PATH = '/tables/getRecord';
@ -146,6 +152,16 @@ export const v2Contract: AnyContractRouter = {
})
.input(createFieldInputSchema)
.output(createFieldOkResponseSchema),
explainCreateField: oc
.route({
method: 'POST',
path: TABLES_EXPLAIN_CREATE_FIELD_PATH,
successStatus: 200,
summary: 'Explain create field',
tags: ['tables'],
})
.input(explainCreateFieldInputSchema)
.output(explainOkResponseSchema),
updateField: oc
.route({
method: 'POST',
@ -156,6 +172,16 @@ export const v2Contract: AnyContractRouter = {
})
.input(updateFieldInputSchema)
.output(updateFieldOkResponseSchema),
explainUpdateField: oc
.route({
method: 'POST',
path: TABLES_EXPLAIN_UPDATE_FIELD_PATH,
successStatus: 200,
summary: 'Explain update field',
tags: ['tables'],
})
.input(explainUpdateFieldInputSchema)
.output(explainOkResponseSchema),
createRecord: oc
.route({
method: 'POST',
@ -206,6 +232,16 @@ export const v2Contract: AnyContractRouter = {
})
.input(deleteFieldInputSchema)
.output(deleteFieldOkResponseSchema),
explainDeleteField: oc
.route({
method: 'POST',
path: TABLES_EXPLAIN_DELETE_FIELD_PATH,
successStatus: 200,
summary: 'Explain delete field',
tags: ['tables'],
})
.input(explainDeleteFieldInputSchema)
.output(explainOkResponseSchema),
delete: oc
.route({
method: 'DELETE',

View File

@ -1,4 +1,9 @@
import { z } from 'zod';
import {
createFieldInputSchema,
deleteFieldInputSchema,
updateFieldInputSchema,
} from '@teable/v2-core';
import {
apiOkResponseDtoSchema,
@ -10,6 +15,27 @@ import {
} from '../shared/http';
// Input schemas for explain endpoints
export const explainCreateFieldInputSchema = createFieldInputSchema.extend({
analyze: z.boolean().optional().default(false),
includeSql: z.boolean().optional().default(true),
includeGraph: z.boolean().optional().default(false),
includeLocks: z.boolean().optional().default(true),
});
export const explainUpdateFieldInputSchema = updateFieldInputSchema.extend({
analyze: z.boolean().optional().default(false),
includeSql: z.boolean().optional().default(true),
includeGraph: z.boolean().optional().default(false),
includeLocks: z.boolean().optional().default(true),
});
export const explainDeleteFieldInputSchema = deleteFieldInputSchema.extend({
analyze: z.boolean().optional().default(false),
includeSql: z.boolean().optional().default(true),
includeGraph: z.boolean().optional().default(false),
includeLocks: z.boolean().optional().default(true),
});
export const explainCreateRecordInputSchema = z.object({
tableId: z.string(),
fields: z.record(z.string(), z.unknown()),
@ -38,6 +64,9 @@ export const explainDeleteRecordsInputSchema = z.object({
includeLocks: z.boolean().optional().default(true),
});
export type IExplainCreateFieldInput = z.infer<typeof explainCreateFieldInputSchema>;
export type IExplainUpdateFieldInput = z.infer<typeof explainUpdateFieldInputSchema>;
export type IExplainDeleteFieldInput = z.infer<typeof explainDeleteFieldInputSchema>;
export type IExplainCreateRecordInput = z.infer<typeof explainCreateRecordInputSchema>;
export type IExplainUpdateRecordInput = z.infer<typeof explainUpdateRecordInputSchema>;
export type IExplainDeleteRecordsInput = z.infer<typeof explainDeleteRecordsInputSchema>;
@ -311,7 +340,15 @@ const complexityAssessmentSchema = z.object({
});
const commandExplainInfoSchema = z.object({
type: z.enum(['CreateRecord', 'UpdateRecord', 'DeleteRecords', 'Paste']),
type: z.enum([
'CreateField',
'UpdateField',
'DeleteField',
'CreateRecord',
'UpdateRecord',
'DeleteRecords',
'Paste',
]),
tableId: z.string(),
tableName: z.string(),
recordIds: z.array(z.string()),

View File

@ -0,0 +1,78 @@
import { Command, Options } from '@effect/cli';
import { Effect, Option } from 'effect';
import type { ICreateFieldCommandInput } from '@teable/v2-core';
import { ValidationError } from '../../errors/CliError';
import { CommandExplain } from '../../services/CommandExplain';
import { Output } from '../../services/Output';
import { analyzeOption, baseIdOption, connectionOption, tableIdOption } from '../shared';
const fieldOption = Options.text('field').pipe(
Options.withDescription('JSON field payload matching CreateFieldCommand input')
);
const orderOption = Options.text('order').pipe(
Options.withDescription('Optional JSON order payload: {"viewId":"...","orderIndex":0}'),
Options.optional
);
const parseJson = <T>(json: string, field: string): Effect.Effect<T, ValidationError> =>
Effect.try({
try: () => JSON.parse(json) as T,
catch: () => new ValidationError({ message: `Invalid JSON in --${field}`, field }),
});
const parseOptionalJson = <T>(
json: Option.Option<string>,
field: string
): Effect.Effect<T | undefined, ValidationError> => {
const raw = Option.getOrUndefined(json);
if (!raw) return Effect.succeed(undefined);
return parseJson<T>(raw, field);
};
const handler = (args: {
readonly connection: Option.Option<string>;
readonly baseId: string;
readonly tableId: string;
readonly field: string;
readonly order: Option.Option<string>;
readonly analyze: boolean;
}) =>
Effect.gen(function* () {
const commandExplain = yield* CommandExplain;
const output = yield* Output;
const field = yield* parseJson<ICreateFieldCommandInput['field']>(args.field, 'field');
const order = yield* parseOptionalJson<ICreateFieldCommandInput['order']>(args.order, 'order');
const input = {
baseId: args.baseId,
tableId: args.tableId,
field,
order,
analyze: args.analyze,
};
const result = yield* commandExplain.explainCreateField(input).pipe(
Effect.catchAll((error) =>
Effect.gen(function* () {
yield* output.error('explain.create-field', input, error);
return yield* Effect.fail(error);
})
)
);
yield* output.success('explain.create-field', input, result);
});
export const explainCreateField = Command.make(
'create-field',
{
connection: connectionOption,
baseId: baseIdOption,
tableId: tableIdOption,
field: fieldOption,
order: orderOption,
analyze: analyzeOption,
},
handler
).pipe(Command.withDescription('Explain CreateField command execution plan'));

View File

@ -0,0 +1,53 @@
import { Command } from '@effect/cli';
import { Effect, Option } from 'effect';
import { CommandExplain } from '../../services/CommandExplain';
import { Output } from '../../services/Output';
import {
analyzeOption,
baseIdOptionalOption,
connectionOption,
fieldIdOption,
tableIdOption,
} from '../shared';
const handler = (args: {
readonly connection: Option.Option<string>;
readonly baseId: Option.Option<string>;
readonly tableId: string;
readonly fieldId: string;
readonly analyze: boolean;
}) =>
Effect.gen(function* () {
const commandExplain = yield* CommandExplain;
const output = yield* Output;
const input = {
baseId: Option.getOrUndefined(args.baseId),
tableId: args.tableId,
fieldId: args.fieldId,
analyze: args.analyze,
};
const result = yield* commandExplain.explainDeleteField(input).pipe(
Effect.catchAll((error) =>
Effect.gen(function* () {
yield* output.error('explain.delete-field', input, error);
return yield* Effect.fail(error);
})
)
);
yield* output.success('explain.delete-field', input, result);
});
export const explainDeleteField = Command.make(
'delete-field',
{
connection: connectionOption,
baseId: baseIdOptionalOption,
tableId: tableIdOption,
fieldId: fieldIdOption,
analyze: analyzeOption,
},
handler
).pipe(Command.withDescription('Explain DeleteField command execution plan'));

View File

@ -1,4 +1,7 @@
export { explainCreate } from './create';
export { explainUpdate } from './update';
export { explainDelete } from './delete';
export { explainCreateField } from './create-field';
export { explainUpdateField } from './update-field';
export { explainDeleteField } from './delete-field';
export { explainPaste } from './paste';

View File

@ -0,0 +1,60 @@
import { Command, Options } from '@effect/cli';
import { Effect, Option } from 'effect';
import type { IFieldUpdateInput } from '@teable/v2-core';
import { ValidationError } from '../../errors/CliError';
import { CommandExplain } from '../../services/CommandExplain';
import { Output } from '../../services/Output';
import { analyzeOption, connectionOption, fieldIdOption, tableIdOption } from '../shared';
const fieldOption = Options.text('field').pipe(
Options.withDescription('JSON field update payload matching UpdateFieldCommand input')
);
const parseJson = <T>(json: string, field: string): Effect.Effect<T, ValidationError> =>
Effect.try({
try: () => JSON.parse(json) as T,
catch: () => new ValidationError({ message: `Invalid JSON in --${field}`, field }),
});
const handler = (args: {
readonly connection: Option.Option<string>;
readonly tableId: string;
readonly fieldId: string;
readonly field: string;
readonly analyze: boolean;
}) =>
Effect.gen(function* () {
const commandExplain = yield* CommandExplain;
const output = yield* Output;
const field = yield* parseJson<IFieldUpdateInput>(args.field, 'field');
const input = {
tableId: args.tableId,
fieldId: args.fieldId,
field,
analyze: args.analyze,
};
const result = yield* commandExplain.explainUpdateField(input).pipe(
Effect.catchAll((error) =>
Effect.gen(function* () {
yield* output.error('explain.update-field', input, error);
return yield* Effect.fail(error);
})
)
);
yield* output.success('explain.update-field', input, result);
});
export const explainUpdateField = Command.make(
'update-field',
{
connection: connectionOption,
tableId: tableIdOption,
fieldId: fieldIdOption,
field: fieldOption,
analyze: analyzeOption,
},
handler
).pipe(Command.withDescription('Explain UpdateField command execution plan'));

View File

@ -1,6 +1,14 @@
import { Command } from '@effect/cli';
import { dottea } from './dottea';
import { explainCreate, explainUpdate, explainDelete, explainPaste } from './explain';
import {
explainCreate,
explainUpdate,
explainDelete,
explainCreateField,
explainUpdateField,
explainDeleteField,
explainPaste,
} from './explain';
import { mockGenerate } from './mock';
import { recordsList, recordsGet, recordsCreate, recordsUpdate, recordsDelete } from './records';
import { relations } from './relations';
@ -18,7 +26,15 @@ import {
// explain subcommand group
export const explain = Command.make('explain').pipe(
Command.withDescription('Explain command execution plans'),
Command.withSubcommands([explainCreate, explainUpdate, explainDelete, explainPaste])
Command.withSubcommands([
explainCreate,
explainUpdate,
explainDelete,
explainCreateField,
explainUpdateField,
explainDeleteField,
explainPaste,
])
);
// mock subcommand group

View File

@ -6,31 +6,43 @@ import {
} from '@teable/v2-command-explain';
import {
CreateRecordCommand,
CreateFieldCommand,
UpdateRecordCommand,
UpdateFieldCommand,
DeleteRecordsCommand,
DeleteFieldCommand,
PasteCommand,
ActorId,
TableByIdSpec,
TableId,
v2CoreTokens,
type ITableRepository,
} from '@teable/v2-core';
import { registerV2DebugData } from '@teable/v2-debug-data';
import { Effect, Layer } from 'effect';
import { CliError } from '../errors/CliError';
import {
CommandExplain,
type ExplainCreateFieldInput,
type ExplainCreateInput,
type ExplainUpdateInput,
type ExplainDeleteFieldInput,
type ExplainDeleteInput,
type ExplainPasteInput,
type ExplainUpdateFieldInput,
type ExplainUpdateInput,
} from '../services/CommandExplain';
import { Database } from '../services/Database';
const createContext = () => {
const createContextUnsafe = () => {
const actorIdResult = ActorId.create('cli-debug');
if (actorIdResult.isErr()) {
return Effect.fail(CliError.fromUnknown(actorIdResult.error));
throw CliError.fromUnknown(actorIdResult.error);
}
return Effect.succeed({ actorId: actorIdResult.value });
return { actorId: actorIdResult.value };
};
const createContext = () => Effect.sync(createContextUnsafe);
export const CommandExplainLive = Layer.effect(
CommandExplain,
Effect.gen(function* () {
@ -42,8 +54,116 @@ export const CommandExplainLive = Layer.effect(
const explainService = container.resolve(
v2CommandExplainTokens.explainService
) as IExplainService;
const tableRepository = container.resolve(v2CoreTokens.tableRepository) as ITableRepository;
const resolveBaseId = (tableIdRaw: string): Effect.Effect<string, CliError> =>
Effect.tryPromise({
try: async () => {
const tableId = TableId.create(tableIdRaw);
if (tableId.isErr()) throw tableId.error;
const context = createContextUnsafe();
const table = await tableRepository.findOne(context, TableByIdSpec.create(tableId.value));
if (table.isErr()) throw table.error;
if (!table.value) throw new Error(`Table "${tableIdRaw}" not found`);
return table.value.baseId().toString();
},
catch: (e) => CliError.fromUnknown(e),
});
return {
explainCreateField: (
input: ExplainCreateFieldInput
): Effect.Effect<ExplainResult, CliError> =>
Effect.gen(function* () {
const context = yield* createContext();
const commandResult = CreateFieldCommand.create({
baseId: input.baseId,
tableId: input.tableId,
field: input.field,
order: input.order,
});
if (commandResult.isErr()) {
return yield* Effect.fail(CliError.fromUnknown(commandResult.error));
}
return yield* Effect.tryPromise({
try: async () => {
const result = await explainService.explain(context, commandResult.value, {
analyze: input.analyze,
includeSql: true,
includeGraph: false,
includeLocks: true,
});
if (result.isErr()) throw result.error;
return result.value;
},
catch: (e) => CliError.fromUnknown(e),
});
}),
explainUpdateField: (
input: ExplainUpdateFieldInput
): Effect.Effect<ExplainResult, CliError> =>
Effect.gen(function* () {
const context = yield* createContext();
const commandResult = UpdateFieldCommand.create({
tableId: input.tableId,
fieldId: input.fieldId,
field: input.field,
});
if (commandResult.isErr()) {
return yield* Effect.fail(CliError.fromUnknown(commandResult.error));
}
return yield* Effect.tryPromise({
try: async () => {
const result = await explainService.explain(context, commandResult.value, {
analyze: input.analyze,
includeSql: true,
includeGraph: false,
includeLocks: true,
});
if (result.isErr()) throw result.error;
return result.value;
},
catch: (e) => CliError.fromUnknown(e),
});
}),
explainDeleteField: (
input: ExplainDeleteFieldInput
): Effect.Effect<ExplainResult, CliError> =>
Effect.gen(function* () {
const context = yield* createContext();
const baseId = input.baseId ?? (yield* resolveBaseId(input.tableId));
const commandResult = DeleteFieldCommand.create({
baseId,
tableId: input.tableId,
fieldId: input.fieldId,
});
if (commandResult.isErr()) {
return yield* Effect.fail(CliError.fromUnknown(commandResult.error));
}
return yield* Effect.tryPromise({
try: async () => {
const result = await explainService.explain(context, commandResult.value, {
analyze: input.analyze,
includeSql: true,
includeGraph: false,
includeLocks: true,
});
if (result.isErr()) throw result.error;
return result.value;
},
catch: (e) => CliError.fromUnknown(e),
});
}),
explainCreate: (input: ExplainCreateInput): Effect.Effect<ExplainResult, CliError> =>
Effect.gen(function* () {
const context = yield* createContext();

View File

@ -1,9 +1,32 @@
import type { ExplainResult } from '@teable/v2-command-explain';
import type { IPasteCommandInput } from '@teable/v2-core';
import type {
ICreateFieldCommandInput,
IDeleteFieldCommandInput,
IFieldUpdateInput,
IPasteCommandInput,
} from '@teable/v2-core';
import type { Effect } from 'effect';
import { Context } from 'effect';
import type { CliError } from '../errors';
export interface ExplainCreateFieldInput extends ICreateFieldCommandInput {
readonly analyze: boolean;
}
export interface ExplainUpdateFieldInput {
readonly tableId: string;
readonly fieldId: string;
readonly field: IFieldUpdateInput;
readonly analyze: boolean;
}
export interface ExplainDeleteFieldInput {
readonly baseId?: IDeleteFieldCommandInput['baseId'];
readonly tableId: IDeleteFieldCommandInput['tableId'];
readonly fieldId: IDeleteFieldCommandInput['fieldId'];
readonly analyze: boolean;
}
export interface ExplainCreateInput {
readonly tableId: string;
readonly fields: Record<string, unknown>;
@ -30,6 +53,15 @@ export interface ExplainPasteInput extends IPasteCommandInput {
export class CommandExplain extends Context.Tag('CommandExplain')<
CommandExplain,
{
readonly explainCreateField: (
input: ExplainCreateFieldInput
) => Effect.Effect<ExplainResult, CliError>;
readonly explainUpdateField: (
input: ExplainUpdateFieldInput
) => Effect.Effect<ExplainResult, CliError>;
readonly explainDeleteField: (
input: ExplainDeleteFieldInput
) => Effect.Effect<ExplainResult, CliError>;
readonly explainCreate: (input: ExplainCreateInput) => Effect.Effect<ExplainResult, CliError>;
readonly explainUpdate: (input: ExplainUpdateInput) => Effect.Effect<ExplainResult, CliError>;
readonly explainDelete: (input: ExplainDeleteInput) => Effect.Effect<ExplainResult, CliError>;

View File

@ -0,0 +1,145 @@
import { afterAll, beforeAll, describe, expect, it } from 'vitest';
import { explainOkResponseSchema } from '@teable/v2-contract-http';
import { getSharedTestContext, type SharedTestContext } from './shared/globalTestContext';
describe('v2 field explain endpoints (e2e)', () => {
let ctx: SharedTestContext;
let tableId = '';
let primaryFieldId = '';
let formulaFieldId = '';
const postExplain = async (path: string, payload: Record<string, unknown>) => {
const response = await fetch(`${ctx.baseUrl}${path}`, {
method: 'POST',
headers: { 'content-type': 'application/json' },
body: JSON.stringify(payload),
});
if (response.status !== 200) {
throw new Error(`Explain request failed (${response.status}): ${await response.text()}`);
}
const raw = await response.json();
const parsed = explainOkResponseSchema.safeParse(raw);
expect(parsed.success).toBe(true);
if (!parsed.success || !parsed.data.ok) {
throw new Error('Failed to parse explain response');
}
return parsed.data.data;
};
beforeAll(async () => {
ctx = await getSharedTestContext();
const table = await ctx.createTable({
baseId: ctx.baseId,
name: 'Field Explain',
fields: [{ type: 'singleLineText', name: 'Name', isPrimary: true }],
views: [{ type: 'grid' }],
});
tableId = table.id;
primaryFieldId = table.fields.find((field) => field.name === 'Name')?.id ?? '';
if (!primaryFieldId) {
throw new Error('Missing primary field id');
}
await ctx.createRecord(tableId, {
[primaryFieldId]: 'Alpha',
});
const updatedTable = await ctx.createField({
baseId: ctx.baseId,
tableId,
field: {
type: 'formula',
name: 'Computed',
options: {
expression: `{${primaryFieldId}}`,
},
},
});
formulaFieldId = updatedTable.fields.find((field) => field.name === 'Computed')?.id ?? '';
if (!formulaFieldId) {
throw new Error('Missing formula field id');
}
});
afterAll(async () => {
if (tableId) {
await ctx.deleteTable(tableId);
}
});
it('explains create field with schema and backfill SQL', async () => {
const result = await postExplain('/tables/explainCreateField', {
baseId: ctx.baseId,
tableId,
field: {
type: 'formula',
name: 'Preview Formula',
options: {
expression: `UPPER({${primaryFieldId}})`,
},
},
analyze: false,
includeSql: true,
includeGraph: false,
includeLocks: false,
});
expect(result.command.type).toBe('CreateField');
expect(result.command.changedFieldNames?.[0]).toBe('Preview Formula');
expect(result.sqlExplains.some((step) => step.sql.toLowerCase().includes('alter table'))).toBe(
true
);
expect(result.sqlExplains.some((step) => step.explainOnly != null)).toBe(true);
});
it('explains update field with captured SQL', async () => {
const result = await postExplain('/tables/explainUpdateField', {
tableId,
fieldId: formulaFieldId,
field: {
options: {
expression: `LOWER({${primaryFieldId}})`,
},
},
analyze: false,
includeSql: true,
includeGraph: false,
includeLocks: false,
});
expect(result.command.type).toBe('UpdateField');
expect(result.command.changedFieldIds).toEqual([formulaFieldId]);
expect(result.sqlExplains.length).toBeGreaterThan(0);
expect(
result.sqlExplains.some((step) => {
const normalized = step.sql.toLowerCase();
return normalized.startsWith('update ') || normalized.startsWith('with ');
})
).toBe(true);
});
it('explains delete field with drop-column SQL', async () => {
const result = await postExplain('/tables/explainDeleteField', {
baseId: ctx.baseId,
tableId,
fieldId: formulaFieldId,
analyze: false,
includeSql: true,
includeGraph: false,
includeLocks: false,
});
expect(result.command.type).toBe('DeleteField');
expect(result.command.changedFieldIds).toEqual([formulaFieldId]);
expect(result.sqlExplains.some((step) => step.sql.toLowerCase().includes('drop column'))).toBe(
true
);
});
});