enable_segment_to_index_task.py 2.7 KB

1234567891011121314151617181920212223242526272829303132333435363738394041424344454647484950515253545556575859606162636465666768697071727374757677787980818283
  1. import datetime
  2. import logging
  3. import time
  4. import click
  5. from celery import shared_task
  6. from langchain.schema import Document
  7. from werkzeug.exceptions import NotFound
  8. from core.index.index import IndexBuilder
  9. from extensions.ext_database import db
  10. from extensions.ext_redis import redis_client
  11. from models.dataset import DocumentSegment
  12. @shared_task(queue='dataset')
  13. def enable_segment_to_index_task(segment_id: str):
  14. """
  15. Async enable segment to index
  16. :param segment_id:
  17. Usage: enable_segment_to_index_task.delay(segment_id)
  18. """
  19. logging.info(click.style('Start enable segment to index: {}'.format(segment_id), fg='green'))
  20. start_at = time.perf_counter()
  21. segment = db.session.query(DocumentSegment).filter(DocumentSegment.id == segment_id).first()
  22. if not segment:
  23. raise NotFound('Segment not found')
  24. if segment.status != 'completed':
  25. return
  26. indexing_cache_key = 'segment_{}_indexing'.format(segment.id)
  27. try:
  28. document = Document(
  29. page_content=segment.content,
  30. metadata={
  31. "doc_id": segment.index_node_id,
  32. "doc_hash": segment.index_node_hash,
  33. "document_id": segment.document_id,
  34. "dataset_id": segment.dataset_id,
  35. }
  36. )
  37. dataset = segment.dataset
  38. if not dataset:
  39. logging.info(click.style('Segment {} has no dataset, pass.'.format(segment.id), fg='cyan'))
  40. return
  41. dataset_document = segment.document
  42. if not dataset_document:
  43. logging.info(click.style('Segment {} has no document, pass.'.format(segment.id), fg='cyan'))
  44. return
  45. if not dataset_document.enabled or dataset_document.archived or dataset_document.indexing_status != 'completed':
  46. logging.info(click.style('Segment {} document status is invalid, pass.'.format(segment.id), fg='cyan'))
  47. return
  48. # save vector index
  49. index = IndexBuilder.get_index(dataset, 'high_quality')
  50. if index:
  51. index.add_texts([document], duplicate_check=True)
  52. # save keyword index
  53. index = IndexBuilder.get_index(dataset, 'economy')
  54. if index:
  55. index.add_texts([document])
  56. end_at = time.perf_counter()
  57. logging.info(click.style('Segment enabled to index: {} latency: {}'.format(segment.id, end_at - start_at), fg='green'))
  58. except Exception as e:
  59. logging.exception("enable segment to index failed")
  60. segment.enabled = False
  61. segment.disabled_at = datetime.datetime.utcnow()
  62. segment.status = 'error'
  63. segment.error = str(e)
  64. db.session.commit()
  65. finally:
  66. redis_client.delete(indexing_cache_key)