Revert "Feat/parent child retrieval" (#12095)

This commit is contained in:
-LAN-
2024-12-25 20:55:44 +08:00
committed by GitHub
parent 9231fdbf4c
commit db2aa83a7c
216 changed files with 3116 additions and 9066 deletions

View File

@@ -1,223 +0,0 @@
import groupBy from 'lodash-es/groupBy'
import type { MutationOptions } from '@tanstack/react-query'
import { useMutation } from '@tanstack/react-query'
import { createDocument, createFirstDocument, fetchDefaultProcessRule, fetchFileIndexingEstimate } from '../datasets'
import { type IndexingType } from '@/app/components/datasets/create/step-two'
import type { ChunkingMode, CrawlOptions, CrawlResultItem, CreateDocumentReq, CustomFile, DataSourceType, FileIndexingEstimateResponse, IndexingEstimateParams, NotionInfo, ProcessRule, ProcessRuleResponse, createDocumentResponse } from '@/models/datasets'
import type { DataSourceProvider, NotionPage } from '@/models/common'
export const getNotionInfo = (
notionPages: NotionPage[],
) => {
const workspacesMap = groupBy(notionPages, 'workspace_id')
const workspaces = Object.keys(workspacesMap).map((workspaceId) => {
return {
workspaceId,
pages: workspacesMap[workspaceId],
}
})
return workspaces.map((workspace) => {
return {
workspace_id: workspace.workspaceId,
pages: workspace.pages.map((page) => {
const { page_id, page_name, page_icon, type } = page
return {
page_id,
page_name,
page_icon,
type,
}
}),
}
}) as NotionInfo[]
}
export const getWebsiteInfo = (
opts: {
websiteCrawlProvider: DataSourceProvider
websiteCrawlJobId: string
websitePages: CrawlResultItem[]
crawlOptions?: CrawlOptions
},
) => {
const { websiteCrawlProvider, websiteCrawlJobId, websitePages, crawlOptions } = opts
return {
provider: websiteCrawlProvider,
job_id: websiteCrawlJobId,
urls: websitePages.map(page => page.source_url),
only_main_content: crawlOptions?.only_main_content,
}
}
type GetFileIndexingEstimateParamsOptionBase = {
docForm: ChunkingMode
docLanguage: string
indexingTechnique: IndexingType
processRule: ProcessRule
dataset_id: string
}
type GetFileIndexingEstimateParamsOptionFile = GetFileIndexingEstimateParamsOptionBase & {
dataSourceType: DataSourceType.FILE
files: CustomFile[]
}
const getFileIndexingEstimateParamsForFile = ({
docForm,
docLanguage,
dataSourceType,
files,
indexingTechnique,
processRule,
dataset_id,
}: GetFileIndexingEstimateParamsOptionFile): IndexingEstimateParams => {
return {
info_list: {
data_source_type: dataSourceType,
file_info_list: {
file_ids: files.map(file => file.id) as string[],
},
},
indexing_technique: indexingTechnique,
process_rule: processRule,
doc_form: docForm,
doc_language: docLanguage,
dataset_id,
}
}
export const useFetchFileIndexingEstimateForFile = (
options: GetFileIndexingEstimateParamsOptionFile,
mutationOptions: MutationOptions<FileIndexingEstimateResponse> = {},
) => {
return useMutation({
mutationFn: async () => {
return fetchFileIndexingEstimate(getFileIndexingEstimateParamsForFile(options))
},
...mutationOptions,
})
}
type GetFileIndexingEstimateParamsOptionNotion = GetFileIndexingEstimateParamsOptionBase & {
dataSourceType: DataSourceType.NOTION
notionPages: NotionPage[]
}
const getFileIndexingEstimateParamsForNotion = ({
docForm,
docLanguage,
dataSourceType,
notionPages,
indexingTechnique,
processRule,
dataset_id,
}: GetFileIndexingEstimateParamsOptionNotion): IndexingEstimateParams => {
return {
info_list: {
data_source_type: dataSourceType,
notion_info_list: getNotionInfo(notionPages),
},
indexing_technique: indexingTechnique,
process_rule: processRule,
doc_form: docForm,
doc_language: docLanguage,
dataset_id,
}
}
export const useFetchFileIndexingEstimateForNotion = (
options: GetFileIndexingEstimateParamsOptionNotion,
mutationOptions: MutationOptions<FileIndexingEstimateResponse> = {},
) => {
return useMutation({
mutationFn: async () => {
return fetchFileIndexingEstimate(getFileIndexingEstimateParamsForNotion(options))
},
...mutationOptions,
})
}
type GetFileIndexingEstimateParamsOptionWeb = GetFileIndexingEstimateParamsOptionBase & {
dataSourceType: DataSourceType.WEB
websitePages: CrawlResultItem[]
crawlOptions?: CrawlOptions
websiteCrawlProvider: DataSourceProvider
websiteCrawlJobId: string
}
const getFileIndexingEstimateParamsForWeb = ({
docForm,
docLanguage,
dataSourceType,
websitePages,
crawlOptions,
websiteCrawlProvider,
websiteCrawlJobId,
indexingTechnique,
processRule,
dataset_id,
}: GetFileIndexingEstimateParamsOptionWeb): IndexingEstimateParams => {
return {
info_list: {
data_source_type: dataSourceType,
website_info_list: getWebsiteInfo({
websiteCrawlProvider,
websiteCrawlJobId,
websitePages,
crawlOptions,
}),
},
indexing_technique: indexingTechnique,
process_rule: processRule,
doc_form: docForm,
doc_language: docLanguage,
dataset_id,
}
}
export const useFetchFileIndexingEstimateForWeb = (
options: GetFileIndexingEstimateParamsOptionWeb,
mutationOptions: MutationOptions<FileIndexingEstimateResponse> = {},
) => {
return useMutation({
mutationFn: async () => {
return fetchFileIndexingEstimate(getFileIndexingEstimateParamsForWeb(options))
},
...mutationOptions,
})
}
export const useCreateFirstDocument = (
mutationOptions: MutationOptions<createDocumentResponse, Error, CreateDocumentReq> = {},
) => {
return useMutation({
mutationFn: async (createDocumentReq: CreateDocumentReq,
) => {
return createFirstDocument({ body: createDocumentReq })
},
...mutationOptions,
})
}
export const useCreateDocument = (
datasetId: string,
mutationOptions: MutationOptions<createDocumentResponse, Error, CreateDocumentReq> = {},
) => {
return useMutation({
mutationFn: async (req: CreateDocumentReq) => {
return createDocument({ datasetId, body: req })
},
...mutationOptions,
})
}
export const useFetchDefaultProcessRule = (
mutationOptions: MutationOptions<ProcessRuleResponse, Error, string> = {},
) => {
return useMutation({
mutationFn: async (url: string) => {
return fetchDefaultProcessRule({ url })
},
...mutationOptions,
})
}

View File

@@ -1,124 +0,0 @@
import {
useMutation,
useQuery,
} from '@tanstack/react-query'
import { del, get, patch } from '../base'
import { useInvalid } from '../use-base'
import type { MetadataType } from '../datasets'
import type { DocumentDetailResponse, SimpleDocumentDetail, UpdateDocumentBatchParams } from '@/models/datasets'
import { DocumentActionType } from '@/models/datasets'
import type { CommonResponse } from '@/models/common'
const NAME_SPACE = 'knowledge/document'
const useDocumentListKey = [NAME_SPACE, 'documentList']
export const useDocumentList = (payload: {
datasetId: string
query: {
keyword: string
page: number
limit: number
}
}) => {
const { query, datasetId } = payload
return useQuery<{ data: SimpleDocumentDetail[] }>({
queryKey: [...useDocumentListKey, datasetId, query],
queryFn: () => get<{ data: SimpleDocumentDetail[] }>(`/datasets/${datasetId}/documents`, {
params: query,
}),
})
}
const useAutoDisabledDocumentKey = [NAME_SPACE, 'autoDisabledDocument']
export const useAutoDisabledDocuments = (datasetId: string) => {
return useQuery({
queryKey: [...useAutoDisabledDocumentKey, datasetId],
queryFn: () => get<{ document_ids: string[] }>(`/datasets/${datasetId}/auto-disable-logs`),
})
}
export const useInvalidDisabledDocument = () => {
return useInvalid(useAutoDisabledDocumentKey)
}
const toBatchDocumentsIdParams = (documentIds: string[] | string) => {
const ids = Array.isArray(documentIds) ? documentIds : [documentIds]
return ids.map(id => `document_id=${id}`).join('&')
}
export const useDocumentBatchAction = (action: DocumentActionType) => {
return useMutation({
mutationFn: ({ datasetId, documentIds, documentId }: UpdateDocumentBatchParams) => {
return patch<CommonResponse>(`/datasets/${datasetId}/documents/status/${action}/batch?${toBatchDocumentsIdParams(documentId || documentIds!)}`)
},
})
}
export const useDocumentEnable = () => {
return useDocumentBatchAction(DocumentActionType.enable)
}
export const useDocumentDisable = () => {
return useDocumentBatchAction(DocumentActionType.disable)
}
export const useDocumentArchive = () => {
return useDocumentBatchAction(DocumentActionType.archive)
}
export const useDocumentUnArchive = () => {
return useDocumentBatchAction(DocumentActionType.unArchive)
}
export const useDocumentDelete = () => {
return useMutation({
mutationFn: ({ datasetId, documentIds, documentId }: UpdateDocumentBatchParams) => {
return del<CommonResponse>(`/datasets/${datasetId}/documents?${toBatchDocumentsIdParams(documentId || documentIds!)}`)
},
})
}
export const useSyncDocument = () => {
return useMutation({
mutationFn: ({ datasetId, documentId }: UpdateDocumentBatchParams) => {
return get<CommonResponse>(`/datasets/${datasetId}/documents/${documentId}/notion/sync`)
},
})
}
export const useSyncWebsite = () => {
return useMutation({
mutationFn: ({ datasetId, documentId }: UpdateDocumentBatchParams) => {
return get<CommonResponse>(`/datasets/${datasetId}/documents/${documentId}/website-sync`)
},
})
}
const useDocumentDetailKey = [NAME_SPACE, 'documentDetail']
export const useDocumentDetail = (payload: {
datasetId: string
documentId: string
params: { metadata: MetadataType }
}) => {
const { datasetId, documentId, params } = payload
return useQuery<DocumentDetailResponse>({
queryKey: [...useDocumentDetailKey, 'withoutMetaData', datasetId, documentId],
queryFn: () => get<DocumentDetailResponse>(`/datasets/${datasetId}/documents/${documentId}`, { params }),
})
}
export const useDocumentMetadata = (payload: {
datasetId: string
documentId: string
params: { metadata: MetadataType }
}) => {
const { datasetId, documentId, params } = payload
return useQuery<DocumentDetailResponse>({
queryKey: [...useDocumentDetailKey, 'withMetaData', datasetId, documentId],
queryFn: () => get<DocumentDetailResponse>(`/datasets/${datasetId}/documents/${documentId}`, { params }),
})
}
export const useInvalidDocumentDetailKey = () => {
return useInvalid(useDocumentDetailKey)
}

View File

@@ -1,169 +0,0 @@
import { useMutation, useQuery } from '@tanstack/react-query'
import { del, get, patch, post } from '../base'
import type { CommonResponse } from '@/models/common'
import type {
BatchImportResponse,
ChildChunkDetail,
ChildSegmentsResponse,
ChunkingMode,
SegmentDetailModel,
SegmentUpdater,
SegmentsResponse,
} from '@/models/datasets'
const NAME_SPACE = 'segment'
export const useSegmentListKey = [NAME_SPACE, 'chunkList']
export const useSegmentList = (
payload: {
datasetId: string
documentId: string
params: {
page: number
limit: number
keyword: string
enabled: boolean | 'all'
}
},
disable?: boolean,
) => {
const { datasetId, documentId, params } = payload
const { page, limit, keyword, enabled } = params
return useQuery<SegmentsResponse>({
queryKey: [...useSegmentListKey, datasetId, documentId, page, limit, keyword, enabled],
queryFn: () => {
return get<SegmentsResponse>(`/datasets/${datasetId}/documents/${documentId}/segments`, { params })
},
enabled: !disable,
})
}
export const useUpdateSegment = () => {
return useMutation({
mutationKey: [NAME_SPACE, 'update'],
mutationFn: (payload: { datasetId: string; documentId: string; segmentId: string; body: SegmentUpdater }) => {
const { datasetId, documentId, segmentId, body } = payload
return patch<{ data: SegmentDetailModel; doc_form: ChunkingMode }>(`/datasets/${datasetId}/documents/${documentId}/segments/${segmentId}`, { body })
},
})
}
export const useAddSegment = () => {
return useMutation({
mutationKey: [NAME_SPACE, 'add'],
mutationFn: (payload: { datasetId: string; documentId: string; body: SegmentUpdater }) => {
const { datasetId, documentId, body } = payload
return post<{ data: SegmentDetailModel; doc_form: ChunkingMode }>(`/datasets/${datasetId}/documents/${documentId}/segment`, { body })
},
})
}
export const useEnableSegment = () => {
return useMutation({
mutationKey: [NAME_SPACE, 'enable'],
mutationFn: (payload: { datasetId: string; documentId: string; segmentIds: string[] }) => {
const { datasetId, documentId, segmentIds } = payload
const query = segmentIds.map(id => `segment_id=${id}`).join('&')
return patch<CommonResponse>(`/datasets/${datasetId}/documents/${documentId}/segment/enable?${query}`)
},
})
}
export const useDisableSegment = () => {
return useMutation({
mutationKey: [NAME_SPACE, 'disable'],
mutationFn: (payload: { datasetId: string; documentId: string; segmentIds: string[] }) => {
const { datasetId, documentId, segmentIds } = payload
const query = segmentIds.map(id => `segment_id=${id}`).join('&')
return patch<CommonResponse>(`/datasets/${datasetId}/documents/${documentId}/segment/disable?${query}`)
},
})
}
export const useDeleteSegment = () => {
return useMutation({
mutationKey: [NAME_SPACE, 'delete'],
mutationFn: (payload: { datasetId: string; documentId: string; segmentIds: string[] }) => {
const { datasetId, documentId, segmentIds } = payload
const query = segmentIds.map(id => `segment_id=${id}`).join('&')
return del<CommonResponse>(`/datasets/${datasetId}/documents/${documentId}/segments?${query}`)
},
})
}
export const useChildSegmentListKey = [NAME_SPACE, 'childChunkList']
export const useChildSegmentList = (
payload: {
datasetId: string
documentId: string
segmentId: string
params: {
page: number
limit: number
keyword: string
}
},
disable?: boolean,
) => {
const { datasetId, documentId, segmentId, params } = payload
const { page, limit, keyword } = params
return useQuery({
queryKey: [...useChildSegmentListKey, datasetId, documentId, segmentId, page, limit, keyword],
queryFn: () => {
return get<ChildSegmentsResponse>(`/datasets/${datasetId}/documents/${documentId}/segments/${segmentId}/child_chunks`, { params })
},
enabled: !disable,
})
}
export const useDeleteChildSegment = () => {
return useMutation({
mutationKey: [NAME_SPACE, 'childChunk', 'delete'],
mutationFn: (payload: { datasetId: string; documentId: string; segmentId: string; childChunkId: string }) => {
const { datasetId, documentId, segmentId, childChunkId } = payload
return del<CommonResponse>(`/datasets/${datasetId}/documents/${documentId}/segments/${segmentId}/child_chunks/${childChunkId}`)
},
})
}
export const useAddChildSegment = () => {
return useMutation({
mutationKey: [NAME_SPACE, 'childChunk', 'add'],
mutationFn: (payload: { datasetId: string; documentId: string; segmentId: string; body: { content: string } }) => {
const { datasetId, documentId, segmentId, body } = payload
return post<{ data: ChildChunkDetail }>(`/datasets/${datasetId}/documents/${documentId}/segments/${segmentId}/child_chunks`, { body })
},
})
}
export const useUpdateChildSegment = () => {
return useMutation({
mutationKey: [NAME_SPACE, 'childChunk', 'update'],
mutationFn: (payload: { datasetId: string; documentId: string; segmentId: string; childChunkId: string; body: { content: string } }) => {
const { datasetId, documentId, segmentId, childChunkId, body } = payload
return patch<{ data: ChildChunkDetail }>(`/datasets/${datasetId}/documents/${documentId}/segments/${segmentId}/child_chunks/${childChunkId}`, { body })
},
})
}
export const useSegmentBatchImport = () => {
return useMutation({
mutationKey: [NAME_SPACE, 'batchImport'],
mutationFn: (payload: { url: string; body: FormData }) => {
const { url, body } = payload
return post<BatchImportResponse>(url, { body }, { bodyStringify: false, deleteContentType: true })
},
})
}
export const useCheckSegmentBatchImportProgress = () => {
return useMutation({
mutationKey: [NAME_SPACE, 'batchImport', 'checkProgress'],
mutationFn: (payload: { jobID: string }) => {
const { jobID } = payload
return get<BatchImportResponse>(`/datasets/batch_import_status/${jobID}`)
},
})
}