datasets.py 15 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332333334335336337338339340341342343344345346347348349350351352353354355356357358359360361362363364365366367368369370371372373374375376377378379380381382383384385386387388389390391392393394
  1. # -*- coding:utf-8 -*-
  2. from flask import request
  3. from flask_login import login_required, current_user
  4. from flask_restful import Resource, reqparse, fields, marshal, marshal_with
  5. from werkzeug.exceptions import NotFound, Forbidden
  6. import services
  7. from controllers.console import api
  8. from controllers.console.app.error import ProviderNotInitializeError
  9. from controllers.console.datasets.error import DatasetNameDuplicateError
  10. from controllers.console.setup import setup_required
  11. from controllers.console.wraps import account_initialization_required
  12. from core.indexing_runner import IndexingRunner
  13. from core.model_providers.error import LLMBadRequestError, ProviderTokenNotInitError
  14. from core.model_providers.model_factory import ModelFactory
  15. from core.model_providers.models.entity.model_params import ModelType
  16. from libs.helper import TimestampField
  17. from extensions.ext_database import db
  18. from models.dataset import DocumentSegment, Document
  19. from models.model import UploadFile
  20. from services.dataset_service import DatasetService, DocumentService
  21. from services.provider_service import ProviderService
  22. dataset_detail_fields = {
  23. 'id': fields.String,
  24. 'name': fields.String,
  25. 'description': fields.String,
  26. 'provider': fields.String,
  27. 'permission': fields.String,
  28. 'data_source_type': fields.String,
  29. 'indexing_technique': fields.String,
  30. 'app_count': fields.Integer,
  31. 'document_count': fields.Integer,
  32. 'word_count': fields.Integer,
  33. 'created_by': fields.String,
  34. 'created_at': TimestampField,
  35. 'updated_by': fields.String,
  36. 'updated_at': TimestampField,
  37. 'embedding_model': fields.String,
  38. 'embedding_model_provider': fields.String,
  39. 'embedding_available': fields.Boolean
  40. }
  41. dataset_query_detail_fields = {
  42. "id": fields.String,
  43. "content": fields.String,
  44. "source": fields.String,
  45. "source_app_id": fields.String,
  46. "created_by_role": fields.String,
  47. "created_by": fields.String,
  48. "created_at": TimestampField
  49. }
  50. def _validate_name(name):
  51. if not name or len(name) < 1 or len(name) > 40:
  52. raise ValueError('Name must be between 1 to 40 characters.')
  53. return name
  54. def _validate_description_length(description):
  55. if len(description) > 400:
  56. raise ValueError('Description cannot exceed 400 characters.')
  57. return description
  58. class DatasetListApi(Resource):
  59. @setup_required
  60. @login_required
  61. @account_initialization_required
  62. def get(self):
  63. page = request.args.get('page', default=1, type=int)
  64. limit = request.args.get('limit', default=20, type=int)
  65. ids = request.args.getlist('ids')
  66. provider = request.args.get('provider', default="vendor")
  67. if ids:
  68. datasets, total = DatasetService.get_datasets_by_ids(ids, current_user.current_tenant_id)
  69. else:
  70. datasets, total = DatasetService.get_datasets(page, limit, provider,
  71. current_user.current_tenant_id, current_user)
  72. # check embedding setting
  73. provider_service = ProviderService()
  74. valid_model_list = provider_service.get_valid_model_list(current_user.current_tenant_id, ModelType.EMBEDDINGS.value)
  75. # if len(valid_model_list) == 0:
  76. # raise ProviderNotInitializeError(
  77. # f"No Embedding Model available. Please configure a valid provider "
  78. # f"in the Settings -> Model Provider.")
  79. model_names = [item['model_name'] for item in valid_model_list]
  80. data = marshal(datasets, dataset_detail_fields)
  81. for item in data:
  82. if item['embedding_model'] in model_names:
  83. item['embedding_available'] = True
  84. else:
  85. item['embedding_available'] = False
  86. response = {
  87. 'data': data,
  88. 'has_more': len(datasets) == limit,
  89. 'limit': limit,
  90. 'total': total,
  91. 'page': page
  92. }
  93. return response, 200
  94. @setup_required
  95. @login_required
  96. @account_initialization_required
  97. def post(self):
  98. parser = reqparse.RequestParser()
  99. parser.add_argument('name', nullable=False, required=True,
  100. help='type is required. Name must be between 1 to 40 characters.',
  101. type=_validate_name)
  102. parser.add_argument('indexing_technique', type=str, location='json',
  103. choices=('high_quality', 'economy'),
  104. help='Invalid indexing technique.')
  105. args = parser.parse_args()
  106. # The role of the current user in the ta table must be admin or owner
  107. if current_user.current_tenant.current_role not in ['admin', 'owner']:
  108. raise Forbidden()
  109. try:
  110. ModelFactory.get_embedding_model(
  111. tenant_id=current_user.current_tenant_id
  112. )
  113. except LLMBadRequestError:
  114. raise ProviderNotInitializeError(
  115. f"No Embedding Model available. Please configure a valid provider "
  116. f"in the Settings -> Model Provider.")
  117. try:
  118. dataset = DatasetService.create_empty_dataset(
  119. tenant_id=current_user.current_tenant_id,
  120. name=args['name'],
  121. indexing_technique=args['indexing_technique'],
  122. account=current_user
  123. )
  124. except services.errors.dataset.DatasetNameDuplicateError:
  125. raise DatasetNameDuplicateError()
  126. return marshal(dataset, dataset_detail_fields), 201
  127. class DatasetApi(Resource):
  128. @setup_required
  129. @login_required
  130. @account_initialization_required
  131. def get(self, dataset_id):
  132. dataset_id_str = str(dataset_id)
  133. dataset = DatasetService.get_dataset(dataset_id_str)
  134. if dataset is None:
  135. raise NotFound("Dataset not found.")
  136. try:
  137. DatasetService.check_dataset_permission(
  138. dataset, current_user)
  139. except services.errors.account.NoPermissionError as e:
  140. raise Forbidden(str(e))
  141. return marshal(dataset, dataset_detail_fields), 200
  142. @setup_required
  143. @login_required
  144. @account_initialization_required
  145. def patch(self, dataset_id):
  146. dataset_id_str = str(dataset_id)
  147. parser = reqparse.RequestParser()
  148. parser.add_argument('name', nullable=False,
  149. help='type is required. Name must be between 1 to 40 characters.',
  150. type=_validate_name)
  151. parser.add_argument('description',
  152. location='json', store_missing=False,
  153. type=_validate_description_length)
  154. parser.add_argument('indexing_technique', type=str, location='json',
  155. choices=('high_quality', 'economy'),
  156. help='Invalid indexing technique.')
  157. parser.add_argument('permission', type=str, location='json', choices=(
  158. 'only_me', 'all_team_members'), help='Invalid permission.')
  159. args = parser.parse_args()
  160. # The role of the current user in the ta table must be admin or owner
  161. if current_user.current_tenant.current_role not in ['admin', 'owner']:
  162. raise Forbidden()
  163. dataset = DatasetService.update_dataset(
  164. dataset_id_str, args, current_user)
  165. if dataset is None:
  166. raise NotFound("Dataset not found.")
  167. return marshal(dataset, dataset_detail_fields), 200
  168. @setup_required
  169. @login_required
  170. @account_initialization_required
  171. def delete(self, dataset_id):
  172. dataset_id_str = str(dataset_id)
  173. # The role of the current user in the ta table must be admin or owner
  174. if current_user.current_tenant.current_role not in ['admin', 'owner']:
  175. raise Forbidden()
  176. if DatasetService.delete_dataset(dataset_id_str, current_user):
  177. return {'result': 'success'}, 204
  178. else:
  179. raise NotFound("Dataset not found.")
  180. class DatasetQueryApi(Resource):
  181. @setup_required
  182. @login_required
  183. @account_initialization_required
  184. def get(self, dataset_id):
  185. dataset_id_str = str(dataset_id)
  186. dataset = DatasetService.get_dataset(dataset_id_str)
  187. if dataset is None:
  188. raise NotFound("Dataset not found.")
  189. try:
  190. DatasetService.check_dataset_permission(dataset, current_user)
  191. except services.errors.account.NoPermissionError as e:
  192. raise Forbidden(str(e))
  193. page = request.args.get('page', default=1, type=int)
  194. limit = request.args.get('limit', default=20, type=int)
  195. dataset_queries, total = DatasetService.get_dataset_queries(
  196. dataset_id=dataset.id,
  197. page=page,
  198. per_page=limit
  199. )
  200. response = {
  201. 'data': marshal(dataset_queries, dataset_query_detail_fields),
  202. 'has_more': len(dataset_queries) == limit,
  203. 'limit': limit,
  204. 'total': total,
  205. 'page': page
  206. }
  207. return response, 200
  208. class DatasetIndexingEstimateApi(Resource):
  209. @setup_required
  210. @login_required
  211. @account_initialization_required
  212. def post(self):
  213. parser = reqparse.RequestParser()
  214. parser.add_argument('info_list', type=dict, required=True, nullable=True, location='json')
  215. parser.add_argument('process_rule', type=dict, required=True, nullable=True, location='json')
  216. parser.add_argument('doc_form', type=str, default='text_model', required=False, nullable=False, location='json')
  217. parser.add_argument('dataset_id', type=str, required=False, nullable=False, location='json')
  218. parser.add_argument('doc_language', type=str, default='English', required=False, nullable=False, location='json')
  219. args = parser.parse_args()
  220. # validate args
  221. DocumentService.estimate_args_validate(args)
  222. if args['info_list']['data_source_type'] == 'upload_file':
  223. file_ids = args['info_list']['file_info_list']['file_ids']
  224. file_details = db.session.query(UploadFile).filter(
  225. UploadFile.tenant_id == current_user.current_tenant_id,
  226. UploadFile.id.in_(file_ids)
  227. ).all()
  228. if file_details is None:
  229. raise NotFound("File not found.")
  230. indexing_runner = IndexingRunner()
  231. try:
  232. response = indexing_runner.file_indexing_estimate(current_user.current_tenant_id, file_details,
  233. args['process_rule'], args['doc_form'],
  234. args['doc_language'], args['dataset_id'])
  235. except LLMBadRequestError:
  236. raise ProviderNotInitializeError(
  237. f"No Embedding Model available. Please configure a valid provider "
  238. f"in the Settings -> Model Provider.")
  239. except ProviderTokenNotInitError as ex:
  240. raise ProviderNotInitializeError(ex.description)
  241. elif args['info_list']['data_source_type'] == 'notion_import':
  242. indexing_runner = IndexingRunner()
  243. try:
  244. response = indexing_runner.notion_indexing_estimate(current_user.current_tenant_id,
  245. args['info_list']['notion_info_list'],
  246. args['process_rule'], args['doc_form'],
  247. args['doc_language'], args['dataset_id'])
  248. except LLMBadRequestError:
  249. raise ProviderNotInitializeError(
  250. f"No Embedding Model available. Please configure a valid provider "
  251. f"in the Settings -> Model Provider.")
  252. except ProviderTokenNotInitError as ex:
  253. raise ProviderNotInitializeError(ex.description)
  254. else:
  255. raise ValueError('Data source type not support')
  256. return response, 200
  257. class DatasetRelatedAppListApi(Resource):
  258. app_detail_kernel_fields = {
  259. 'id': fields.String,
  260. 'name': fields.String,
  261. 'mode': fields.String,
  262. 'icon': fields.String,
  263. 'icon_background': fields.String,
  264. }
  265. related_app_list = {
  266. 'data': fields.List(fields.Nested(app_detail_kernel_fields)),
  267. 'total': fields.Integer,
  268. }
  269. @setup_required
  270. @login_required
  271. @account_initialization_required
  272. @marshal_with(related_app_list)
  273. def get(self, dataset_id):
  274. dataset_id_str = str(dataset_id)
  275. dataset = DatasetService.get_dataset(dataset_id_str)
  276. if dataset is None:
  277. raise NotFound("Dataset not found.")
  278. try:
  279. DatasetService.check_dataset_permission(dataset, current_user)
  280. except services.errors.account.NoPermissionError as e:
  281. raise Forbidden(str(e))
  282. app_dataset_joins = DatasetService.get_related_apps(dataset.id)
  283. related_apps = []
  284. for app_dataset_join in app_dataset_joins:
  285. app_model = app_dataset_join.app
  286. if app_model:
  287. related_apps.append(app_model)
  288. return {
  289. 'data': related_apps,
  290. 'total': len(related_apps)
  291. }, 200
  292. class DatasetIndexingStatusApi(Resource):
  293. document_status_fields = {
  294. 'id': fields.String,
  295. 'indexing_status': fields.String,
  296. 'processing_started_at': TimestampField,
  297. 'parsing_completed_at': TimestampField,
  298. 'cleaning_completed_at': TimestampField,
  299. 'splitting_completed_at': TimestampField,
  300. 'completed_at': TimestampField,
  301. 'paused_at': TimestampField,
  302. 'error': fields.String,
  303. 'stopped_at': TimestampField,
  304. 'completed_segments': fields.Integer,
  305. 'total_segments': fields.Integer,
  306. }
  307. document_status_fields_list = {
  308. 'data': fields.List(fields.Nested(document_status_fields))
  309. }
  310. @setup_required
  311. @login_required
  312. @account_initialization_required
  313. def get(self, dataset_id):
  314. dataset_id = str(dataset_id)
  315. documents = db.session.query(Document).filter(
  316. Document.dataset_id == dataset_id,
  317. Document.tenant_id == current_user.current_tenant_id
  318. ).all()
  319. documents_status = []
  320. for document in documents:
  321. completed_segments = DocumentSegment.query.filter(DocumentSegment.completed_at.isnot(None),
  322. DocumentSegment.document_id == str(document.id),
  323. DocumentSegment.status != 're_segment').count()
  324. total_segments = DocumentSegment.query.filter(DocumentSegment.document_id == str(document.id),
  325. DocumentSegment.status != 're_segment').count()
  326. document.completed_segments = completed_segments
  327. document.total_segments = total_segments
  328. documents_status.append(marshal(document, self.document_status_fields))
  329. data = {
  330. 'data': documents_status
  331. }
  332. return data
  333. api.add_resource(DatasetListApi, '/datasets')
  334. api.add_resource(DatasetApi, '/datasets/<uuid:dataset_id>')
  335. api.add_resource(DatasetQueryApi, '/datasets/<uuid:dataset_id>/queries')
  336. api.add_resource(DatasetIndexingEstimateApi, '/datasets/indexing-estimate')
  337. api.add_resource(DatasetRelatedAppListApi, '/datasets/<uuid:dataset_id>/related-apps')
  338. api.add_resource(DatasetIndexingStatusApi, '/datasets/<uuid:dataset_id>/indexing-status')