#! /usr/bin/python import tornado.web import tornado.ioloop import tornado.httpserver import tornado.options import os import datetime import openai import asyncio from tornado.web import RequestHandler from tornado.options import define,options from tornado.websocket import WebSocketHandler import openai_async import config import json openai.api_key = config.agentServer['agentApiKey']; def clearContent(content): for ck in config.agentFilterMoel['filterKeys']: content = content.replace(ck,'SmartChat') return content async def talkGPTWithStream(self,message): print("{} says {}".format(self.request.remote_ip,message)) try: _msg = json.loads(message) try: if 'model' in _msg : _gptModel = _msg['model'] _real_msg = _msg['messages'] #print("msg is {}".format(type(_real_msg))) if _gptModel == '4': _gptModel = 'gpt-4-0314' elif _gptModel == '3': _gptModel = 'gpt-3.5-turbo' else: print(_gptModel) #_gptModel = 'gpt-3.5-turbo' else: _gptModel = 'gpt-3.5-turbo' _real_msg = _msg except Exception as gmex: _gptModel = 'gpt-3.5-turbo' _real_msg = _msg #print(_msg) async for chunk in await openai.ChatCompletion.acreate( model=_gptModel, #model='gpt-4-0314', #messages=[{'role': 'user', 'content': message}], messages=_real_msg, temperature=0.8, max_tokens=4000, stream=True # this time, we set stream=True ): content = chunk["choices"][0].get("delta", {}).get("content") role_conent = chunk["choices"][0].get("delta", {}).get("role") #print("response {}",content) if config.agentFilterMoel['isOpen']: if content is not None: ###如果是结尾 if content in config.agentFilterMoel['oneMessageSplit']: self.my_replay_content = self.my_replay_content+content self.currentIsSend=True else: self.my_replay_content = self.my_replay_content + content self.currentIsSend=False if self.currentIsSend: ####过滤关键字 cContent = clearContent(self.my_replay_content) self.my_replay_content=''; await self.write_message(cContent) #await self.write_message(currentLineContents) else: if role_conent is not None: print('') else: await self.write_message("___talk_end___") else: if content is not None: await self.write_message(content) else: if role_conent is not None: print('') else: await self.write_message("___talk_end___") except Exception as es: print('onMessage error',es) self.close() async def talkGPT(self,message): print("{} says {}".format(self.request.remote_ip,message)) try: _msg = json.loads(message) try: if 'model' in _msg : _gptModel = _msg['model'] _real_msg = _msg['messages'] #print("msg is {}".format(type(_real_msg))) if _gptModel == '4': _gptModel = 'gpt-4-0314' elif _gptModel == '3': _gptModel = 'gpt-3.5-turbo-0125' else: _gptModel = 'gpt-3.5-turbo-0125' else: _gptModel = 'gpt-3.5-turbo-0125' _real_msg = _msg except Exception as gmex: _gptModel = 'gpt-3.5-turbo-0125' _real_msg = _msg response = await openai.Completion.acreate( model= _gptModel, prompt= _real_msg, temperature=0, max_tokens=4000, top_p=1, frequency_penalty=0.5, presence_penalty=0 ) #print(f"response:{response}") response_text = response['choices'][0]['text'] await self.write_message(response_text+"___talk_end___") except Exception as es: print('onMessage error',es) self.close()