hit_testing_service.py 4.1 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130
  1. import logging
  2. import time
  3. from typing import List
  4. import numpy as np
  5. from llama_index.data_structs.node_v2 import NodeWithScore
  6. from llama_index.indices.query.schema import QueryBundle
  7. from llama_index.indices.vector_store import GPTVectorStoreIndexQuery
  8. from sklearn.manifold import TSNE
  9. from core.docstore.empty_docstore import EmptyDocumentStore
  10. from core.index.vector_index import VectorIndex
  11. from extensions.ext_database import db
  12. from models.account import Account
  13. from models.dataset import Dataset, DocumentSegment, DatasetQuery
  14. from services.errors.index import IndexNotInitializedError
  15. class HitTestingService:
  16. @classmethod
  17. def retrieve(cls, dataset: Dataset, query: str, account: Account, limit: int = 10) -> dict:
  18. index = VectorIndex(dataset=dataset).query_index
  19. if not index:
  20. raise IndexNotInitializedError()
  21. index_query = GPTVectorStoreIndexQuery(
  22. index_struct=index.index_struct,
  23. service_context=index.service_context,
  24. vector_store=index.query_context.get('vector_store'),
  25. docstore=EmptyDocumentStore(),
  26. response_synthesizer=None,
  27. similarity_top_k=limit
  28. )
  29. query_bundle = QueryBundle(
  30. query_str=query,
  31. custom_embedding_strs=[query],
  32. )
  33. query_bundle.embedding = index.service_context.embed_model.get_agg_embedding_from_queries(
  34. query_bundle.embedding_strs
  35. )
  36. start = time.perf_counter()
  37. nodes = index_query.retrieve(query_bundle=query_bundle)
  38. end = time.perf_counter()
  39. logging.debug(f"Hit testing retrieve in {end - start:0.4f} seconds")
  40. dataset_query = DatasetQuery(
  41. dataset_id=dataset.id,
  42. content=query,
  43. source='hit_testing',
  44. created_by_role='account',
  45. created_by=account.id
  46. )
  47. db.session.add(dataset_query)
  48. db.session.commit()
  49. return cls.compact_retrieve_response(dataset, query_bundle, nodes)
  50. @classmethod
  51. def compact_retrieve_response(cls, dataset: Dataset, query_bundle: QueryBundle, nodes: List[NodeWithScore]):
  52. embeddings = [
  53. query_bundle.embedding
  54. ]
  55. for node in nodes:
  56. embeddings.append(node.node.embedding)
  57. tsne_position_data = cls.get_tsne_positions_from_embeddings(embeddings)
  58. query_position = tsne_position_data.pop(0)
  59. i = 0
  60. records = []
  61. for node in nodes:
  62. index_node_id = node.node.doc_id
  63. segment = db.session.query(DocumentSegment).filter(
  64. DocumentSegment.dataset_id == dataset.id,
  65. DocumentSegment.enabled == True,
  66. DocumentSegment.status == 'completed',
  67. DocumentSegment.index_node_id == index_node_id
  68. ).first()
  69. if not segment:
  70. i += 1
  71. continue
  72. record = {
  73. "segment": segment,
  74. "score": node.score,
  75. "tsne_position": tsne_position_data[i]
  76. }
  77. records.append(record)
  78. i += 1
  79. return {
  80. "query": {
  81. "content": query_bundle.query_str,
  82. "tsne_position": query_position,
  83. },
  84. "records": records
  85. }
  86. @classmethod
  87. def get_tsne_positions_from_embeddings(cls, embeddings: list):
  88. embedding_length = len(embeddings)
  89. if embedding_length <= 1:
  90. return [{'x': 0, 'y': 0}]
  91. concatenate_data = np.array(embeddings).reshape(embedding_length, -1)
  92. # concatenate_data = np.concatenate(embeddings)
  93. perplexity = embedding_length / 2 + 1
  94. if perplexity >= embedding_length:
  95. perplexity = max(embedding_length - 1, 1)
  96. tsne = TSNE(n_components=2, perplexity=perplexity, early_exaggeration=12.0)
  97. data_tsne = tsne.fit_transform(concatenate_data)
  98. tsne_position_data = []
  99. for i in range(len(data_tsne)):
  100. tsne_position_data.append({'x': float(data_tsne[i][0]), 'y': float(data_tsne[i][1])})
  101. return tsne_position_data