batch_import_annotations_task.py 3.7 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990
  1. import logging
  2. import time
  3. import click
  4. from celery import shared_task
  5. from werkzeug.exceptions import NotFound
  6. from core.rag.datasource.vdb.vector_factory import Vector
  7. from core.rag.models.document import Document
  8. from extensions.ext_database import db
  9. from extensions.ext_redis import redis_client
  10. from models.dataset import Dataset
  11. from models.model import App, AppAnnotationSetting, MessageAnnotation
  12. from services.dataset_service import DatasetCollectionBindingService
  13. @shared_task(queue="dataset")
  14. def batch_import_annotations_task(job_id: str, content_list: list[dict], app_id: str, tenant_id: str, user_id: str):
  15. """
  16. Add annotation to index.
  17. :param job_id: job_id
  18. :param content_list: content list
  19. :param app_id: app id
  20. :param tenant_id: tenant id
  21. :param user_id: user_id
  22. """
  23. logging.info(click.style("Start batch import annotation: {}".format(job_id), fg="green"))
  24. start_at = time.perf_counter()
  25. indexing_cache_key = "app_annotation_batch_import_{}".format(str(job_id))
  26. # get app info
  27. app = db.session.query(App).filter(App.id == app_id, App.tenant_id == tenant_id, App.status == "normal").first()
  28. if app:
  29. try:
  30. documents = []
  31. for content in content_list:
  32. annotation = MessageAnnotation(
  33. app_id=app.id, content=content["answer"], question=content["question"], account_id=user_id
  34. )
  35. db.session.add(annotation)
  36. db.session.flush()
  37. document = Document(
  38. page_content=content["question"],
  39. metadata={"annotation_id": annotation.id, "app_id": app_id, "doc_id": annotation.id},
  40. )
  41. documents.append(document)
  42. # if annotation reply is enabled , batch add annotations' index
  43. app_annotation_setting = (
  44. db.session.query(AppAnnotationSetting).filter(AppAnnotationSetting.app_id == app_id).first()
  45. )
  46. if app_annotation_setting:
  47. dataset_collection_binding = (
  48. DatasetCollectionBindingService.get_dataset_collection_binding_by_id_and_type(
  49. app_annotation_setting.collection_binding_id, "annotation"
  50. )
  51. )
  52. if not dataset_collection_binding:
  53. raise NotFound("App annotation setting not found")
  54. dataset = Dataset(
  55. id=app_id,
  56. tenant_id=tenant_id,
  57. indexing_technique="high_quality",
  58. embedding_model_provider=dataset_collection_binding.provider_name,
  59. embedding_model=dataset_collection_binding.model_name,
  60. collection_binding_id=dataset_collection_binding.id,
  61. )
  62. vector = Vector(dataset, attributes=["doc_id", "annotation_id", "app_id"])
  63. vector.create(documents, duplicate_check=True)
  64. db.session.commit()
  65. redis_client.setex(indexing_cache_key, 600, "completed")
  66. end_at = time.perf_counter()
  67. logging.info(
  68. click.style(
  69. "Build index successful for batch import annotation: {} latency: {}".format(
  70. job_id, end_at - start_at
  71. ),
  72. fg="green",
  73. )
  74. )
  75. except Exception as e:
  76. db.session.rollback()
  77. redis_client.setex(indexing_cache_key, 600, "error")
  78. indexing_error_msg_key = "app_annotation_batch_import_error_msg_{}".format(str(job_id))
  79. redis_client.setex(indexing_error_msg_key, 600, str(e))
  80. logging.exception("Build index for batch import annotations failed")