mirror of
https://github.com/langgenius/dify.git
synced 2026-01-14 06:07:33 +08:00
Signed-off-by: -LAN- <laipz8200@outlook.com> Signed-off-by: kenwoodjw <blackxin55+@gmail.com> Signed-off-by: Yongtao Huang <yongtaoh2022@gmail.com> Signed-off-by: yihong0618 <zouzou0208@gmail.com> Signed-off-by: zhanluxianshen <zhanluxianshen@163.com> Co-authored-by: -LAN- <laipz8200@outlook.com> Co-authored-by: GuanMu <ballmanjq@gmail.com> Co-authored-by: Davide Delbianco <davide.delbianco@outlook.com> Co-authored-by: NeatGuyCoding <15627489+NeatGuyCoding@users.noreply.github.com> Co-authored-by: kenwoodjw <blackxin55+@gmail.com> Co-authored-by: Yongtao Huang <yongtaoh2022@gmail.com> Co-authored-by: Yongtao Huang <99629139+hyongtao-db@users.noreply.github.com> Co-authored-by: Qiang Lee <18018968632@163.com> Co-authored-by: 李强04 <liqiang04@gaotu.cn> Co-authored-by: autofix-ci[bot] <114827586+autofix-ci[bot]@users.noreply.github.com> Co-authored-by: Asuka Minato <i@asukaminato.eu.org> Co-authored-by: Matri Qi <matrixdom@126.com> Co-authored-by: huayaoyue6 <huayaoyue@163.com> Co-authored-by: Bowen Liang <liangbowen@gf.com.cn> Co-authored-by: znn <jubinkumarsoni@gmail.com> Co-authored-by: crazywoola <427733928@qq.com> Co-authored-by: crazywoola <100913391+crazywoola@users.noreply.github.com> Co-authored-by: Copilot <175728472+Copilot@users.noreply.github.com> Co-authored-by: yihong <zouzou0208@gmail.com> Co-authored-by: Muke Wang <shaodwaaron@gmail.com> Co-authored-by: wangmuke <wangmuke@kingsware.cn> Co-authored-by: Wu Tianwei <30284043+WTW0313@users.noreply.github.com> Co-authored-by: quicksand <quicksandzn@gmail.com> Co-authored-by: 非法操作 <hjlarry@163.com> Co-authored-by: zxhlyh <jasonapring2015@outlook.com> Co-authored-by: Eric Guo <eric.guocz@gmail.com> Co-authored-by: Zhedong Cen <cenzhedong2@126.com> Co-authored-by: jiangbo721 <jiangbo721@163.com> Co-authored-by: github-actions[bot] <41898282+github-actions[bot]@users.noreply.github.com> Co-authored-by: hjlarry <25834719+hjlarry@users.noreply.github.com> Co-authored-by: lxsummer <35754229+lxjustdoit@users.noreply.github.com> Co-authored-by: 湛露先生 <zhanluxianshen@163.com> Co-authored-by: Guangdong Liu <liugddx@gmail.com> Co-authored-by: QuantumGhost <obelisk.reg+git@gmail.com> Co-authored-by: Claude <noreply@anthropic.com> Co-authored-by: Yessenia-d <yessenia.contact@gmail.com> Co-authored-by: huangzhuo1949 <167434202+huangzhuo1949@users.noreply.github.com> Co-authored-by: huangzhuo <huangzhuo1@xiaomi.com> Co-authored-by: 17hz <0x149527@gmail.com> Co-authored-by: Amy <1530140574@qq.com> Co-authored-by: Joel <iamjoel007@gmail.com> Co-authored-by: Nite Knite <nkCoding@gmail.com> Co-authored-by: Yeuoly <45712896+Yeuoly@users.noreply.github.com> Co-authored-by: Petrus Han <petrus.hanks@gmail.com> Co-authored-by: iamjoel <2120155+iamjoel@users.noreply.github.com> Co-authored-by: Kalo Chin <frog.beepers.0n@icloud.com> Co-authored-by: Ujjwal Maurya <ujjwalsbx@gmail.com> Co-authored-by: Maries <xh001x@hotmail.com>
120 lines
4.7 KiB
Python
120 lines
4.7 KiB
Python
import logging
|
|
import time
|
|
|
|
import click
|
|
from celery import shared_task
|
|
|
|
from core.indexing_runner import DocumentIsPausedError, IndexingRunner
|
|
from core.rag.extractor.notion_extractor import NotionExtractor
|
|
from core.rag.index_processor.index_processor_factory import IndexProcessorFactory
|
|
from extensions.ext_database import db
|
|
from libs.datetime_utils import naive_utc_now
|
|
from models.dataset import Dataset, Document, DocumentSegment
|
|
from models.source import DataSourceOauthBinding
|
|
|
|
logger = logging.getLogger(__name__)
|
|
|
|
|
|
@shared_task(queue="dataset")
|
|
def document_indexing_sync_task(dataset_id: str, document_id: str):
|
|
"""
|
|
Async update document
|
|
:param dataset_id:
|
|
:param document_id:
|
|
|
|
Usage: document_indexing_sync_task.delay(dataset_id, document_id)
|
|
"""
|
|
logger.info(click.style(f"Start sync document: {document_id}", fg="green"))
|
|
start_at = time.perf_counter()
|
|
|
|
document = db.session.query(Document).where(Document.id == document_id, Document.dataset_id == dataset_id).first()
|
|
|
|
if not document:
|
|
logger.info(click.style(f"Document not found: {document_id}", fg="red"))
|
|
db.session.close()
|
|
return
|
|
|
|
data_source_info = document.data_source_info_dict
|
|
if document.data_source_type == "notion_import":
|
|
if (
|
|
not data_source_info
|
|
or "notion_page_id" not in data_source_info
|
|
or "notion_workspace_id" not in data_source_info
|
|
):
|
|
raise ValueError("no notion page found")
|
|
workspace_id = data_source_info["notion_workspace_id"]
|
|
page_id = data_source_info["notion_page_id"]
|
|
page_type = data_source_info["type"]
|
|
page_edited_time = data_source_info["last_edited_time"]
|
|
data_source_binding = (
|
|
db.session.query(DataSourceOauthBinding)
|
|
.where(
|
|
db.and_(
|
|
DataSourceOauthBinding.tenant_id == document.tenant_id,
|
|
DataSourceOauthBinding.provider == "notion",
|
|
DataSourceOauthBinding.disabled == False,
|
|
DataSourceOauthBinding.source_info["workspace_id"] == f'"{workspace_id}"',
|
|
)
|
|
)
|
|
.first()
|
|
)
|
|
if not data_source_binding:
|
|
raise ValueError("Data source binding not found.")
|
|
|
|
loader = NotionExtractor(
|
|
notion_workspace_id=workspace_id,
|
|
notion_obj_id=page_id,
|
|
notion_page_type=page_type,
|
|
notion_access_token=data_source_binding.access_token,
|
|
tenant_id=document.tenant_id,
|
|
)
|
|
|
|
last_edited_time = loader.get_notion_last_edited_time()
|
|
|
|
# check the page is updated
|
|
if last_edited_time != page_edited_time:
|
|
document.indexing_status = "parsing"
|
|
document.processing_started_at = naive_utc_now()
|
|
db.session.commit()
|
|
|
|
# delete all document segment and index
|
|
try:
|
|
dataset = db.session.query(Dataset).where(Dataset.id == dataset_id).first()
|
|
if not dataset:
|
|
raise Exception("Dataset not found")
|
|
index_type = document.doc_form
|
|
index_processor = IndexProcessorFactory(index_type).init_index_processor()
|
|
|
|
segments = db.session.query(DocumentSegment).where(DocumentSegment.document_id == document_id).all()
|
|
index_node_ids = [segment.index_node_id for segment in segments]
|
|
|
|
# delete from vector index
|
|
index_processor.clean(dataset, index_node_ids, with_keywords=True, delete_child_chunks=True)
|
|
|
|
for segment in segments:
|
|
db.session.delete(segment)
|
|
|
|
end_at = time.perf_counter()
|
|
logger.info(
|
|
click.style(
|
|
"Cleaned document when document update data source or process rule: {} latency: {}".format(
|
|
document_id, end_at - start_at
|
|
),
|
|
fg="green",
|
|
)
|
|
)
|
|
except Exception:
|
|
logger.exception("Cleaned document when document update data source or process rule failed")
|
|
|
|
try:
|
|
indexing_runner = IndexingRunner()
|
|
indexing_runner.run([document])
|
|
end_at = time.perf_counter()
|
|
logger.info(click.style(f"update document: {document.id} latency: {end_at - start_at}", fg="green"))
|
|
except DocumentIsPausedError as ex:
|
|
logger.info(click.style(str(ex), fg="yellow"))
|
|
except Exception:
|
|
logger.exception("document_indexing_sync_task failed, document_id: %s", document_id)
|
|
finally:
|
|
db.session.close()
|