xxxxxxxxxx
pip install llm-client
async def main():
async with ClientSession() as session:
llm_client = OpenAIClient(LLMAPIClientConfig(os.environ["API_KEY"], session, default_model="text-davinci-003")
text = "This is indeed a test"
print("generated text:", await llm_client.text_completion(text))
xxxxxxxxxx
import openai
openai.api_key = "sk-xxxx"
response = openai.ChatCompletion.create(
model="gpt-3.5-turbo",
messages=[
{"role": "system", "content": "You are a chatbot"},
{"role": "user", "content": "Why should DevOps engineer learn kubernetes?"},
]
)
result = ''
for choice in response.choices:
result += choice.message.content
print(result)