clean_document_task.py 1.7 KB

12345678910111213141516171819202122232425262728293031323334353637383940414243444546
  1. import logging
  2. import time
  3. import click
  4. from celery import shared_task
  5. from core.rag.index_processor.index_processor_factory import IndexProcessorFactory
  6. from extensions.ext_database import db
  7. from models.dataset import Dataset, DocumentSegment
  8. @shared_task(queue='dataset')
  9. def clean_document_task(document_id: str, dataset_id: str, doc_form: str):
  10. """
  11. Clean document when document deleted.
  12. :param document_id: document id
  13. :param dataset_id: dataset id
  14. :param doc_form: doc_form
  15. Usage: clean_document_task.delay(document_id, dataset_id)
  16. """
  17. logging.info(click.style('Start clean document when document deleted: {}'.format(document_id), fg='green'))
  18. start_at = time.perf_counter()
  19. try:
  20. dataset = db.session.query(Dataset).filter(Dataset.id == dataset_id).first()
  21. if not dataset:
  22. raise Exception('Document has no dataset')
  23. segments = db.session.query(DocumentSegment).filter(DocumentSegment.document_id == document_id).all()
  24. # check segment is exist
  25. if segments:
  26. index_node_ids = [segment.index_node_id for segment in segments]
  27. index_processor = IndexProcessorFactory(doc_form).init_index_processor()
  28. index_processor.clean(dataset, index_node_ids)
  29. for segment in segments:
  30. db.session.delete(segment)
  31. db.session.commit()
  32. end_at = time.perf_counter()
  33. logging.info(
  34. click.style('Cleaned document when document deleted: {} latency: {}'.format(document_id, end_at - start_at), fg='green'))
  35. except Exception:
  36. logging.exception("Cleaned document when document deleted failed")