function-call

์˜คํ•ด๊ฐ€ ๋งŽ์Œ..

openai์—์„œ ๊ฒฐ๊ณผ๋ฅผ ๋ฐ›๋Š”๊ฒƒ์ด ์•„๋‹˜

๊ธฐ์กด ์งˆ๋ฌธ : "{country}์˜ ์ˆ˜๋„๊ฐ€ ์–ด๋””์•ผ?" ๋ผ๊ณ  ๋ฌผ์œผ๋ฉด ์„œ์šธ์ด๋ผ๋Š” ๋‹ต๋ณ€์ด ์˜ค๋Š”๋ฐ

function calling์„ ํ•˜๋ฉด "๋‚˜๋Š” ํ•œ๊ตญ์˜ ์ˆ˜๋„๊ฐ€ ์–ด๋””์ธ์ง€ ์•Œ๊ณ ์‹ถ์–ด?" ๋ผ๊ณ  ๋ฌผ์œผ๋ฉด

get_city(country) ๋ผ๋Š” ํ•จ์ˆ˜๊ฐ€ ์ž‡๋‹ค๊ณ ํ•˜๋ฉด "ํ•จ์ˆ˜ get_city๋ฅผ ์‹คํ–‰ํ•˜๊ณ  argument ๋กœ ํ•œ๊ตญ์„ ๋„ฃ์–ด๋ผ." ๋ผ๊ณ  ์•Œ๋ ค์ฃผ๋Š”๊ฒƒ. ์‹ค์ œ๋กœ ํ•จ์ˆ˜๋ฅผ ์‹คํ–‰ํ•˜๋Š”๊ฒƒ๋„ ์•„๋‹˜.

๊ทธ๋Ÿฌ๋‹ˆ ์‹ค์ œ๋กœ openai์—์„œ ๊ฒฐ๊ณผ๋ฅผ ๋ฐ›๋Š”๊ฒƒ์ด ์•„๋‹˜

๊ฒฐ๊ณผ๋Š” get_city์—์„œ ์ฒ˜๋ฆฌํ•ด์•ผํ•จ.

from dotenv import load_dotenv
load_dotenv()
from langchain.prompts import PromptTemplate
from langchain_openai import ChatOpenAI

llm = ChatOpenAI(
  temperature=0.1,
)
template = "{country}์˜ ์ˆ˜๋„๋Š” ๋ญ์•ผ?"


# ํ…œํ”Œ๋ฆฟ ์™„์„ฑ
prompt = PromptTemplate.from_template(template=template)
prompt
chain = prompt | llm

chain.invoke({"country":"ํ•œ๊ตญ"})

add function

def get_city(country):
    print(country)

add schema

schema = {
  "name": "get_city",
  "description": "๋‚˜๋ผ์˜ ์ˆ˜๋„๋ฅผ ๊ฐ€์ ธ์˜ต๋‹ˆ๋‹ค.",
  "parameters": {
    "type": "object",
    "properties": {
      "country": { "type": "string","description": "๋‚˜๋ผ ์ด๋ฆ„" }
    }
  },
  "required": ["country"],
}
llm = ChatOpenAI(
    temperature=0.1,
).bind(
  function_call={
      "name": "get_city",
  },
  functions=[
      schema,
  ],
)
template = "{country}์˜ ์ˆ˜๋„๋Š” ๋ญ์•ผ?"


# ํ…œํ”Œ๋ฆฟ ์™„์„ฑ
prompt = PromptTemplate.from_template(template=template)
prompt
chain = prompt | llm
chain.invoke({"country": "ํ•œ๊ตญ"})
AIMessage(content='', additional_kwargs={'function_call': {'arguments': '{"country":"ํ•œ๊ตญ"}', 'name': 'get_city'}}, )
response = chain.invoke({"country": "ํ•œ๊ตญ"})
arg_from_ai = response.additional_kwargs["function_call"]
arg_from_ai
arg_from_ai["arguments"]
arg_from_ai["name"]
import json

arguments = json.loads(arg_from_ai["arguments"])
country = arguments["country"]
country
if arg_from_ai["name"] == "get_city":
    get_city(country)

Last updated