Minimize the learning curve to create reliable LLM applications

Mirascope is a Python toolkit that provides the right level of abstraction for building LLM agents your way. Modular. Extensible. Reliable. Open Source.

pip install mirascope

Building Blocks for LLM-applications

Examples Directory
1import os
2
3from mirascope import tags
4from mirascope.openai import OpenAICall, OpenAICallParams
5
6os.environ["OPENAI_API_KEY"] = "YOUR_API_KEY"
7
8
9@tags(["version:0003"])
10class Editor(OpenAICall):
11    prompt_template = """
12    SYSTEM:
13    You are a top class manga editor.
14    
15    USER:
16    I'm working on a new storyline. What do you think?
17    {storyline}
18    """
19    
20    storyline: str
21    
22    call_params = OpenAICallParams(model="gpt-4", temperature=0.4)
23
24
25storyline = "..."
26editor = Editor(storyline=storyline)
27
28print(editor.messages())
29# > [{'role': 'system', 'content': 'You are a top class manga editor.'}, {'role': 'user', 'content': "I'm working on a new storyline. What do you think?\n..."}]
30
31critique = editor.call()
32print(critique.content)
33# > I think the beginning starts off great, but...
34
35print(editor.dump() | critique.dump())
36# { "tags": ["version:0003"], ... }
1from openai.types.chat import ChatCompletionMessageParam
2
3from mirascope.openai import OpenAICall
4
5
6class Librarian(OpenAICall):
7    prompt_template = """
8    SYSTEM: You are the world's greatest librarian.
9    MESSAGES: {history}
10    USER: {question}
11    """
12
13    question: str
14    history: list[ChatCompletionMessageParam] = []
15
16
17librarian = Librarian(question="", history=[])
18while True:
19    librarian.question = input("(User): ")
20    response = librarian.call()
21    librarian.history += [
22        {"role": "user", "content": librarian.question},
23        {"role": "assistant", "content": response.content},
24    ]
25    print(f"(Assistant): {response.content}")
26
27#> (User): What fantasy book should I read?
28#> (Assistant): Have you read the Name of the Wind?
29#> (User): I have! What do you like about it?
30#> (Assistant): I love the intricate world-building...
1from typing import Literal
2
3from mirascope.openai import OpenAICall, OpenAICallParams
4
5
6def get_current_weather(
7    location: str, unit: Literal["celsius", "fahrenheit"] = "fahrenheit"
8):
9    """Get the current weather in a given location."""
10    if "tokyo" in location.lower():
11        print(f"It is 10 degrees {unit} in Tokyo, Japan")
12    elif "san francisco" in location.lower():
13        print(f"It is 72 degrees {unit} in San Francisco, CA")
14    elif "paris" in location.lower():
15        print(f"It is 22 degress {unit} in Paris, France")
16    else:
17        print("I'm not sure what the weather is like in {location}")
18
19
20class Forecast(OpenAICall):
21    prompt_template = "What's the weather in Tokyo?"
22
23    call_params = OpenAICallParams(model="gpt-4", tools=[get_current_weather])
24
25tool = Forecast().call().tool
26if tool:
27    tool.fn(**tool.args)
28    #> It is 10 degrees fahrenheit in Tokyo, Japan
1import os
2from functools import cached_property
3
4from mirascope.openai import OpenAICall, OpenAICallParams
5
6os.environ["OPENAI_API_KEY"] = "YOUR_API_KEY"
7
8
9class ChefSelector(OpenAICall):
10    prompt_template = "Name a chef who is really good at cooking {food_type} food"
11
12    food_type: str
13
14    call_params = OpenAICallParams(model="gpt-3.5-turbo-0125")
15
16
17class RecipeRecommender(ChefSelector):
18    prompt_template = """
19    SYSTEM:
20    Imagine that you are chef {chef}.
21    Your task is to recommend recipes that you, {chef}, would be excited to serve.
22    USER:
23    Recommend a {food_type} recipe using {ingredient}.
24    """
25
26    ingredient: str
27    
28    call_params = OpenAICallParams(model="gpt-4")
29
30    @cached_property  # !!! so multiple access doesn't make multiple calls
31    def chef(self) -> str:
32        """Uses `ChefSelector` to select the chef based on the food type."""
33        return ChefSelector(food_type=self.food_type).call().content
34
35response = RecipeRecommender(food_type="japanese", ingredient="apples").call()
36print(response.content)
37# > Certainly! Here's a recipe for a delicious and refreshing Japanese Apple Salad: ...
1from typing import Literal, Type
2
3from mirascope.openai import OpenAIExtractor
4from pydantic import BaseModel
5
6
7class TaskDetails(BaseModel):
8    description: str
9    due_date: str
10    priority: Literal["low", "normal", "high"]
11
12
13class TaskExtractor(OpenAIExtractor[TaskDetails]):
14    extract_schema: Type[TaskDetails] = TaskDetails
15    prompt_template = """
16    Extract the task details from the following task:
17    {task}
18    """
19
20    task: str
21
22
23task = "Submit quarterly report by next Friday. Task is high priority."
24task_details = TaskExtractor(task=task).extract()
25assert isinstance(task_details, TaskDetails)
26print(TaskDetails)
27#> description='Submit quarterly report' due_date='next Friday' priority='high'
1import os
2from typing import Type
3
4from fastapi import FastAPI
5from mirascope.openai import OpenAIExtractor
6from pydantic import BaseModel
7
8os.environ["OPENAI_API_KEY"] = "YOUR_OPENAI_API_KEY"
9
10app = FastAPI()
11
12
13class Book(BaseModel):
14    title: str
15    author: str
16
17
18class BookRecommender(OpenAIExtractor[Book]):
19    extract_schema: Type[Book] = Book
20    prompt_template = "Please recommend a {genre} book."
21
22    genre: str
23
24
25@app.post("/")
26def root(book_recommender: BookRecommender) -> Book:
27    """Generates a book based on provided `genre`."""
28    return book_recommender.extract()
1import os
2
3from mirascope import tags
4from mirascope.openai import OpenAICall, OpenAICallParams
5
6os.environ["OPENAI_API_KEY"] = "YOUR_API_KEY"
7
8
9@tags(["version:0003"])
10class Editor(OpenAICall):
11    prompt_template = """
12    SYSTEM:
13    You are a top class manga editor.
14    
15    USER:
16    I'm working on a new storyline. What do you think?
17    {storyline}
18    """
19    
20    storyline: str
21    
22    call_params = OpenAICallParams(model="gpt-4", temperature=0.4)
23
24
25storyline = "..."
26editor = Editor(storyline=storyline)
27
28print(editor.messages())
29# > [{'role': 'system', 'content': 'You are a top class manga editor.'}, {'role': 'user', 'content': "I'm working on a new storyline. What do you think?\n..."}]
30
31critique = editor.call()
32print(critique.content)
33# > I think the beginning starts off great, but...
34
35print(editor.dump() | critique.dump())
36# { "tags": ["version:0003"], ... }
1
2from openai.types.chat import ChatCompletionMessageParam
3
4from mirascope.openai import OpenAICall
5
6
7class Librarian(OpenAICall):
8    prompt_template = """
9    SYSTEM: You are the world's greatest librarian.
10    MESSAGES: {history}
11    USER: {question}
12    """
13
14    question: str
15    history: list[ChatCompletionMessageParam] = []
16
17
18librarian = Librarian(question="", history=[])
19while True:
20    librarian.question = input("(User): ")
21    response = librarian.call()
22    librarian.history += [
23        {"role": "user", "content": librarian.question},
24        {"role": "assistant", "content": response.content},
25    ]
26    print(f"(Assistant): {response.content}")
27
28#> (User): What fantasy book should I read?
29#> (Assistant): Have you read the Name of the Wind?
30#> (User): I have! What do you like about it?
31#> (Assistant): I love the intricate world-building...
1from typing import Literal
2
3from mirascope.openai import OpenAICall, OpenAICallParams
4
5
6def get_current_weather(
7    location: str, unit: Literal["celsius", "fahrenheit"] = "fahrenheit"
8):
9    """Get the current weather in a given location."""
10    if "tokyo" in location.lower():
11        print(f"It is 10 degrees {unit} in Tokyo, Japan")
12    elif "san francisco" in location.lower():
13        print(f"It is 72 degrees {unit} in San Francisco, CA")
14    elif "paris" in location.lower():
15        print(f"It is 22 degress {unit} in Paris, France")
16    else:
17        print("I'm not sure what the weather is like in {location}")
18
19
20class Forecast(OpenAICall):
21    prompt_template = "What's the weather in Tokyo?"
22
23    call_params = OpenAICallParams(model="gpt-4", tools=[get_current_weather])
24
25tool = Forecast().call().tool
26if tool:
27    tool.fn(**tool.args)
28    #> It is 10 degrees fahrenheit in Tokyo, Japan
1import os
2from functools import cached_property
3
4from mirascope.openai import OpenAICall, OpenAICallParams
5
6os.environ["OPENAI_API_KEY"] = "YOUR_API_KEY"
7
8
9class ChefSelector(OpenAICall):
10    prompt_template = "Name a chef who is really good at cooking {food_type} food"
11
12    food_type: str
13
14    call_params = OpenAICallParams(model="gpt-3.5-turbo-0125")
15
16
17class RecipeRecommender(ChefSelector):
18    prompt_template = """
19    SYSTEM:
20    Imagine that you are chef {chef}.
21    Your task is to recommend recipes that you, {chef}, would be excited to serve.
22    USER:
23    Recommend a {food_type} recipe using {ingredient}.
24    """
25
26    ingredient: str
27    
28    call_params = OpenAICallParams(model="gpt-4")
29
30    @cached_property  # !!! so multiple access doesn't make multiple calls
31    def chef(self) -> str:
32        """Uses `ChefSelector` to select the chef based on the food type."""
33        return ChefSelector(food_type=self.food_type).call().content
34
35response = RecipeRecommender(food_type="japanese", ingredient="apples").call()
36print(response.content)
37# > Certainly! Here's a recipe for a delicious and refreshing Japanese Apple Salad: ...
1from typing import Literal, Type
2
3from mirascope.openai import OpenAIExtractor
4from pydantic import BaseModel
5
6
7class TaskDetails(BaseModel):
8    description: str
9    due_date: str
10    priority: Literal["low", "normal", "high"]
11
12
13class TaskExtractor(OpenAIExtractor[TaskDetails]):
14    extract_schema: Type[TaskDetails] = TaskDetails
15    prompt_template = """
16    Extract the task details from the following task:
17    {task}
18    """
19
20    task: str
21
22
23task = "Submit quarterly report by next Friday. Task is high priority."
24task_details = TaskExtractor(task=task).extract()
25assert isinstance(task_details, TaskDetails)
26print(TaskDetails)
27#> description='Submit quarterly report' due_date='next Friday' priority='high'
1import os
2from typing import Type
3
4from fastapi import FastAPI
5from mirascope.openai import OpenAIExtractor
6from pydantic import BaseModel
7
8os.environ["OPENAI_API_KEY"] = "YOUR_OPENAI_API_KEY"
9
10app = FastAPI()
11
12
13class Book(BaseModel):
14    title: str
15    author: str
16
17
18class BookRecommender(OpenAIExtractor[Book]):
19    extract_schema: Type[Book] = Book
20    prompt_template = "Please recommend a {genre} book."
21
22    genre: str
23
24
25@app.post("/")
26def root(book_recommender: BookRecommender) -> Book:
27    """Generates a book based on provided `genre`."""
28    return book_recommender.extract()

Start Building With Mirascope

No matter the model you're using or features you need to build, Mirascope can simplify the process. You'll feel like you're just writing the Python you already know.