| 1234567891011121314151617181920212223242526272829303132333435363738394041424344454647484950515253545556575859606162636465666768697071 | import datetimeimport loggingimport timeimport clickfrom celery import shared_taskfrom werkzeug.exceptions import NotFoundfrom core.indexing_runner import DocumentIsPausedException, IndexingRunnerfrom core.rag.index_processor.index_processor_factory import IndexProcessorFactoryfrom extensions.ext_database import dbfrom models.dataset import Dataset, Document, DocumentSegment@shared_task(queue='dataset')def document_indexing_update_task(dataset_id: str, document_id: str):    """    Async update document    :param dataset_id:    :param document_id:    Usage: document_indexing_update_task.delay(dataset_id, document_id)    """    logging.info(click.style('Start update document: {}'.format(document_id), fg='green'))    start_at = time.perf_counter()    document = db.session.query(Document).filter(        Document.id == document_id,        Document.dataset_id == dataset_id    ).first()    if not document:        raise NotFound('Document not found')    document.indexing_status = 'parsing'    document.processing_started_at = datetime.datetime.utcnow()    db.session.commit()        try:        dataset = db.session.query(Dataset).filter(Dataset.id == dataset_id).first()        if not dataset:            raise Exception('Dataset not found')        index_type = document.doc_form        index_processor = IndexProcessorFactory(index_type).init_index_processor()        segments = db.session.query(DocumentSegment).filter(DocumentSegment.document_id == document_id).all()        index_node_ids = [segment.index_node_id for segment in segments]                index_processor.clean(dataset, index_node_ids)        for segment in segments:            db.session.delete(segment)        db.session.commit()        end_at = time.perf_counter()        logging.info(            click.style('Cleaned document when document update data source or process rule: {} latency: {}'.format(document_id, end_at - start_at), fg='green'))    except Exception:        logging.exception("Cleaned document when document update data source or process rule failed")    try:        indexing_runner = IndexingRunner()        indexing_runner.run([document])        end_at = time.perf_counter()        logging.info(click.style('update document: {} latency: {}'.format(document.id, end_at - start_at), fg='green'))    except DocumentIsPausedException as ex:        logging.info(click.style(str(ex), fg='yellow'))    except Exception:        pass
 |