document_service.py 4.8 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123
  1. from backend.Service import KbmService
  2. from backend.models import DocumentKbm
  3. from base import logger
  4. from backend.Service.KbmService import KbmService
  5. from .. import *
  6. class DocumentProcessing:
  7. @staticmethod
  8. def process_document(document_id, start_page, end_page, max_tokens):
  9. try:
  10. document = DocumentKbm.objects.get(id=document_id)
  11. document.run = 1 # 更新状态为处理中
  12. document.save()
  13. logger.info('处理事务状态 KbmService async_analysis 开始')
  14. KbmService.async_analysis(document_id, start_page, end_page, max_tokens)
  15. logger.info('处理事务状态 KbmService async_analysis 完成·')
  16. document.run = 3 # 更新状态为处理完成
  17. document.save()
  18. except Exception as e:
  19. logger.error(f"处理文档 {document_id} 时发生错误: {str(e)}")
  20. document = DocumentKbm.objects.get(id=document_id)
  21. document.run = 4 # 更新状态为处理失败
  22. document.save()
  23. raise
  24. # 新rabbitmq队列
  25. @staticmethod
  26. def analysis(request):
  27. document_id = request.POST.get("document_id")
  28. start_page = int(request.POST.get('start_page', 1))
  29. end_page = int(request.POST.get('end_page', -1))
  30. max_tokens = int(request.POST.get('max_tokens', 2048))
  31. if max_tokens == 0:
  32. max_tokens = 2048
  33. logger.info(f"开始处理文档 ID: {document_id}")
  34. try:
  35. document = DocumentKbm.objects.get(id=document_id)
  36. if int(document.run) in [1, 5]: # 1: 处理中, 5: 等待处理
  37. logger.info(f"文档 {document_id} 已有队列")
  38. return success("文档正在处理中或已经处理完成")
  39. # 准备消息
  40. message = {
  41. 'document_id': document_id,
  42. 'start_page': start_page,
  43. 'end_page': end_page,
  44. 'max_tokens': max_tokens
  45. }
  46. # 发送消息到队列
  47. if KbmService.send_to_rabbitmq(settings.RABBITMQ_QUEUE_NAME, message):
  48. # 更新文档状态为等待处理
  49. document.run = 5 # 5表示等待处理
  50. document.save()
  51. logger.info(f"文档 {document_id} 状态已更新为等待处理")
  52. return success("文档已添加到处理队列")
  53. else:
  54. document.run = 4
  55. document.save()
  56. return fail("添加文档到处理队列失败")
  57. except DocumentKbm.DoesNotExist:
  58. logger.error(f"文档 {document_id} 不存在")
  59. document.run = 4
  60. document.save()
  61. return fail("文档不存在")
  62. except Exception as e:
  63. logger.error(f"处理文档 {document_id} 时出错: {str(e)}")
  64. document.run = 4
  65. document.save()
  66. return fail("处理文档时出错")
  67. semaphore = threading.Semaphore(4)
  68. # @staticmethod
  69. # def process_queue():
  70. # logger.info("开始监测RabbitMQ队列")
  71. # connection = pika.BlockingConnection(pika.ConnectionParameters(
  72. # host=settings.RABBITMQ_HOST,
  73. # port=settings.RABBITMQ_PORT,
  74. # credentials=pika.PlainCredentials(
  75. # settings.RABBITMQ_USER,
  76. # settings.RABBITMQ_PASSWORD
  77. # )
  78. # ))
  79. # channel = connection.channel()
  80. # channel.queue_declare(queue=settings.RABBITMQ_QUEUE_NAME, durable=True)
  81. #
  82. # def callback(ch, method, properties, body):
  83. # with KbmService.semaphore:
  84. # try:
  85. # job = json.loads(body)
  86. # document_id = job['document_id']
  87. # start_page = job['start_page']
  88. # end_page = job['end_page']
  89. # max_tokens = job['max_tokens']
  90. #
  91. # logger.info(f"开始执行解析文档 {document_id}")
  92. # KbmService.async_analysis(document_id, start_page, end_page, max_tokens)
  93. #
  94. # # 处理成功,确认消息
  95. # ch.basic_ack(delivery_tag=method.delivery_tag)
  96. # except Exception as e:
  97. # logger.error(f"处理队列消息时发生错误: {str(e)}")
  98. # # 处理失败,拒绝消息并重新入队
  99. # ch.basic_nack(delivery_tag=method.delivery_tag, requeue=True)
  100. #
  101. # # 设置预取计数为4,与最大并发数相匹配
  102. # channel.basic_qos(prefetch_count=4)
  103. # channel.basic_consume(queue=settings.RABBITMQ_QUEUE_NAME, on_message_callback=callback)
  104. #
  105. # logger.info('等待队列消息。要退出请按 CTRL+C')
  106. # channel.start_consuming()
  107. connection = None
  108. channel = None
  109. should_stop = False