pythonopenai-apilarge-language-modelpy-langchain

How to Convert openai functions to PromptTemplate in langchain when using local llms?


Is is possible to Convert openai functions to PromptTemplate in langchain when using local llms and return final output similar to openai api function format

import langchain
functions = [
        {
            "name": "get_current_weather",
            "description": "Get the current weather in a given location",
            "parameters": {
                "type": "object",
                "properties": {
                    "location": {
                        "type": "string",
                        "description": "The city and state, e.g. San Francisco, CA",
                    },
                    "unit": {"type": "string", "enum": ["celsius", "fahrenheit"]},
                },
                "required": ["location"],
            },
        }
    ]
llm = llm()

#Pass functions into prompt template in langchain 

#Format prompt template similar to openai manner




Solution

  • I used openweather to get the temperature information. My tool is a function called get_current_weather which reaches a city, state and a unit. The unit is the metric or imperial results.

    def get_current_weather(location, unit='celsius'):
        units = 'metric' if unit == 'celsius' else 'imperial'
        base_url = "http://api.openweathermap.org/data/2.5/weather"
        params = {
        "q": location,
        "units": units,
        "appid": weather_api_key  # Replace with your actual API key
        }
        query_string = urllib.parse.urlencode(params)
        url = f"{base_url}?{query_string}"
        print(url)
        response = requests.get(url)
        if response.status_code == 200:
            data = response.json()
            print(data)
            weather = {
                'location': data['name'],
                'temperature': data['main']['temp'],
                'description': data['weather'][0]['description'],
                'humidity': data['main']['humidity'],
                'pressure': data['main']['pressure']
            }
            return weather
        else:
            return {'error': 'City not found or API limit exceeded'}
        
    tools = [
            Tool(
                name="Get Weather",
                func=get_current_weather,
                description="Fetches the weather by location for a specific unit"
            ),
                       
    ]
    
    llm=ChatOpenAI(model="gpt-4o-mini",temperature=0, openai_api_key=key)
    agent = initialize_agent(tools, llm, agent="zero-shot-react-description", verbose=True)
    
    response = agent.run({"input": "slc, ut", "unit": "celsius"})
    print(response)