clean_notion_document_task.py 1.9 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051
  1. import logging
  2. import time
  3. import click
  4. from celery import shared_task
  5. from core.rag.index_processor.index_processor_factory import IndexProcessorFactory
  6. from extensions.ext_database import db
  7. from models.dataset import Dataset, Document, DocumentSegment
  8. @shared_task(queue='dataset')
  9. def clean_notion_document_task(document_ids: list[str], dataset_id: str):
  10. """
  11. Clean document when document deleted.
  12. :param document_ids: document ids
  13. :param dataset_id: dataset id
  14. Usage: clean_notion_document_task.delay(document_ids, dataset_id)
  15. """
  16. logging.info(click.style('Start clean document when import form notion document deleted: {}'.format(dataset_id), fg='green'))
  17. start_at = time.perf_counter()
  18. try:
  19. dataset = db.session.query(Dataset).filter(Dataset.id == dataset_id).first()
  20. if not dataset:
  21. raise Exception('Document has no dataset')
  22. index_type = dataset.doc_form
  23. index_processor = IndexProcessorFactory(index_type).init_index_processor()
  24. for document_id in document_ids:
  25. document = db.session.query(Document).filter(
  26. Document.id == document_id
  27. ).first()
  28. db.session.delete(document)
  29. segments = db.session.query(DocumentSegment).filter(DocumentSegment.document_id == document_id).all()
  30. index_node_ids = [segment.index_node_id for segment in segments]
  31. index_processor.clean(dataset, index_node_ids)
  32. for segment in segments:
  33. db.session.delete(segment)
  34. db.session.commit()
  35. end_at = time.perf_counter()
  36. logging.info(
  37. click.style('Clean document when import form notion document deleted end :: {} latency: {}'.format(
  38. dataset_id, end_at - start_at),
  39. fg='green'))
  40. except Exception:
  41. logging.exception("Cleaned document when import form notion document deleted failed")