1.はじめに
LLM+APIで特許調査をしてもらうシリーズのプログラム編です。前回は概念で説明しただけだったので、プログラムに落とし込みます。。
が、さっそく躓いて2日間ほど苦しんだので、供養のつもりで書きます。
2.習字するもエラーが発生
LLMの実行の際に、意図したAPIを呼び出したり関数を実行させることができる機能に、FunctionCallingがあります。OpenAI(GPT)やLangchain(wrapperのようですが)が有名ですが、GogleCloud大好き人間としては、vertexAIの中でgemini+Function Callingを完結させたい!
と思ったところにいいお手本があったので、早速借りてちょっと編集して試してみました。Google colabだと一般的な認証のみでサクッと動作してくれる(はず)なのでらくちんです。↓
#@title 最初1回実行
from google.colab import auth
auth.authenticate_user()
import vertexai
from vertexai.generative_models import (
Content,
FunctionDeclaration,
GenerativeModel,
Part,
Tool,
)
def generate_function_call(prompt: str, project_id: str, location: str) -> tuple:
# Initialize Vertex AI
vertexai.init(project=project_id, location=location)
# Initialize Gemini model
model = GenerativeModel("gemini-1.0-pro")
# Specify a function declaration and parameters for an API request
get_similar_patents_func = FunctionDeclaration(
name="search similar patents",
description = "search similar patents,number,title,claim1 relates to given search_text",
parameters ={
"type":"object",
"properties":{"search_text":{"type":"string","description":"Description of the search words,sentences you wish to examine"}}
},
)
get_similar_patents_tool = Tool(
function_declarations=[get_similar_patents_func]
)
# Define the user's prompt in a Content object that we can reuse in model calls
user_prompt_content = Content(
role="user",
parts=[
Part.from_text(prompt),
],
)
print(user_prompt_content)
# Send the prompt and instruct the model to generate content using the Tool that you just created
response = model.generate_content(
user_prompt_content,
generation_config={"temperature": 0},
tools= [get_similar_patents_tool],
)
response_function_call_content = response.candidates[0].content
print(response_function_call_content)
#print(response.candidates[0].to_dict()["content"])
# Check the function name that the model responded with, and make an API call to an external system
if (
response.candidates[0].content.parts[0].function_call.name
== "get similar patents"
):
# Extract the arguments to use in your API call
tex = (
response.candidates[0].content.parts[0].function_call.args["search_text"]
)
print(f"{tex}であります")
# Here you can use your preferred method to make an API request to fetch the current weather, for example:
# api_response = requests.post(weather_api_url, data={"location": location})
# In this example, we'll use synthetic data to simulate a response payload from an external API
api_response = """{ "search_text": "アノードとカソードの間を樹脂で充填して絶縁したダイオード", "patents":{'US98765432','EP0099233'} }"""
# Return the API response to Gemini so it can generate a model response or request another function call
response = model.generate_content(
[
user_prompt_content, # User prompt
response_function_call_content, # Function call response
Content(
parts=[
Part.from_function_response(
name="search similar patents",
response={
"content": api_response, # Return the API response to Gemini
},
)
],
),
],
tools=[get_similar_patents_tool],
)
# Get the model summary response
summary = response.candidates[0].content.parts[0].text
return summary, response
これは、お手本に載ってる場所情報の引数とそれを呼び出す関数を、インプットしたサーチ文章に置き換えただけです。
が、これを実行すると
InvalidArgument: 400 Request contains an invalid argument
・・・requestがうまく実行されてない、ということで、いろいろ試す(正しく引数が渡せてないとかなので、引数確認したり。関数自体を単純化したりなど)ですが、エラーが出続けます。
それで2日ほど苦しんだところ、解決しました。
3.解消
お手本を長らく眺めて試行錯誤した結果、Functionの名前部分がスペースが入ってる!のが原因でした。下のコードの赤字部分のようにスネークケースに修正すると無事に実行できました。
import vertexai
from vertexai.generative_models import (
Content,
FunctionDeclaration,
GenerativeModel,
Part,
Tool,
)
def generate_function_call(prompt: str, project_id: str, location: str) -> tuple:
# Initialize Vertex AI
vertexai.init(project=project_id, location=location)
# Initialize Gemini model
model = GenerativeModel("gemini-1.0-pro")
# Specify a function declaration and parameters for an API request
get_similar_patents_func = FunctionDeclaration(
#このnameをスペースで入れるとパラメータのエラーが発生する。
name="get_similar_patents",
description = "search simiular patents,number,title,claim1 relates to given search_text",
parameters ={
"type":"object",
"properties":{"search_text":{"type":"string","description":"Description of the search words,sentences you wish to examine"}}
},
)
get_similar_patents_tool = Tool(
function_declarations=[get_similar_patents_func]
)
# Define the user's prompt in a Content object that we can reuse in model calls
user_prompt_content = Content(
role="user",
parts=[
Part.from_text(prompt),
],
)
print(user_prompt_content)
# Send the prompt and instruct the model to generate content using the Tool that you just created
response = model.generate_content(
user_prompt_content,
generation_config={"temperature": 0},
tools= [get_similar_patents_tool],
)
response_function_call_content = response.candidates[0].content
print(response_function_call_content)
#print(response.candidates[0].to_dict()["content"])
# Check the function name that the model responded with, and make an API call to an external system
if (
response.candidates[0].content.parts[0].function_call.name
== "get similar patents"
):
# Extract the arguments to use in your API call
tex = (
response.candidates[0].content.parts[0].function_call.args["search_text"]
)
print(f"{tex}であります")
# Here you can use your preferred method to make an API request to fetch the current weather, for example:
# api_response = requests.post(weather_api_url, data={"location": location})
# In this example, we'll use synthetic data to simulate a response payload from an external API
api_response = """{ "search_text": "アノードとカソードの間を樹脂で充填して絶縁したダイオード", "patents":{'US98765432','EP0099233'} }"""
# Return the API response to Gemini so it can generate a model response or request another function call
response = model.generate_content(
[
user_prompt_content, # User prompt
response_function_call_content, # Function call response
Content(
parts=[
Part.from_function_response(
name="get_similar_patents",
response={
"content": api_response, # Return the API response to Gemini
},
)
],
),
],
tools=[get_similar_patents_tool],
)
# Get the model summary response
summary = response.candidates[0].content.parts[0].text
return summary, response
実行結果:
('類似する特許として、特許番号US98765432、EP0099233が挙げられます。',
candidates {
content {
role: "model"
parts {
text: "\351\241\236\344\274\274\343\201\231\343\202\213\347\211\271\350\250\261\343\201\250\343\201\227\343\201\246\343\200\201\347\211\271\350\250\261\347\225\252\345\217\267US98765432\343\200\201EP0099233\343\201\214\346\214\231\343\201\222\343\202\211\343\202\214\343\201\276\343\201\231\343\200\202"
}
}
finish_reason: STOP
safety_ratings {
category: HARM_CATEGORY_HATE_SPEECH
probability: NEGLIGIBLE
probability_score: 0.10034840553998947
severity: HARM_SEVERITY_NEGLIGIBLE
severity_score: 0.11899801343679428
}
safety_ratings {
category: HARM_CATEGORY_DANGEROUS_CONTENT
probability: NEGLIGIBLE
probability_score: 0.1630452573299408
severity: HARM_SEVERITY_NEGLIGIBLE
severity_score: 0.13017480075359344
}
safety_ratings {
category: HARM_CATEGORY_HARASSMENT
probability: NEGLIGIBLE
probability_score: 0.1790994256734848
severity: HARM_SEVERITY_NEGLIGIBLE
severity_score: 0.12283853441476822
}
safety_ratings {
category: HARM_CATEGORY_SEXUALLY_EXPLICIT
probability: NEGLIGIBLE
probability_score: 0.25590112805366516
severity: HARM_SEVERITY_NEGLIGIBLE
severity_score: 0.03283695504069328
}
}
usage_metadata {
prompt_token_count: 151
candidates_token_count: 31
total_token_count: 182
})
4.最後に
やっとスタートです。(次回に続く)
Comments