clean_document_task.py 1.7 KB

12345678910111213141516171819202122232425262728293031323334353637383940414243444546474849505152
  1. import logging
  2. import time
  3. import click
  4. from celery import shared_task
  5. from core.index.index import IndexBuilder
  6. from extensions.ext_database import db
  7. from models.dataset import DocumentSegment, Dataset
  8. @shared_task(queue='dataset')
  9. def clean_document_task(document_id: str, dataset_id: str):
  10. """
  11. Clean document when document deleted.
  12. :param document_id: document id
  13. :param dataset_id: dataset id
  14. Usage: clean_document_task.delay(document_id, dataset_id)
  15. """
  16. logging.info(click.style('Start clean document when document deleted: {}'.format(document_id), fg='green'))
  17. start_at = time.perf_counter()
  18. try:
  19. dataset = db.session.query(Dataset).filter(Dataset.id == dataset_id).first()
  20. if not dataset:
  21. raise Exception('Document has no dataset')
  22. vector_index = IndexBuilder.get_index(dataset, 'high_quality')
  23. kw_index = IndexBuilder.get_index(dataset, 'economy')
  24. segments = db.session.query(DocumentSegment).filter(DocumentSegment.document_id == document_id).all()
  25. index_node_ids = [segment.index_node_id for segment in segments]
  26. # delete from vector index
  27. if vector_index:
  28. vector_index.delete_by_document_id(document_id)
  29. # delete from keyword index
  30. if index_node_ids:
  31. kw_index.delete_by_ids(index_node_ids)
  32. for segment in segments:
  33. db.session.delete(segment)
  34. db.session.commit()
  35. end_at = time.perf_counter()
  36. logging.info(
  37. click.style('Cleaned document when document deleted: {} latency: {}'.format(document_id, end_at - start_at), fg='green'))
  38. except Exception:
  39. logging.exception("Cleaned document when document deleted failed")