Skip to content

Commit

Permalink
Merge pull request #117 from Chainlit/damien/eng-1767-prompts-should-…
Browse files Browse the repository at this point in the history
…contain-a-field-that-links-to-the-literal-ai

feat(prompt): add url to get_prompt
  • Loading branch information
clementsirieix authored Sep 12, 2024
2 parents 0638e56 + 257a7f6 commit 644a041
Show file tree
Hide file tree
Showing 4 changed files with 10 additions and 17 deletions.
19 changes: 2 additions & 17 deletions examples/prompt.py
Original file line number Diff line number Diff line change
@@ -1,26 +1,11 @@
from dotenv import load_dotenv
from openai import OpenAI

from literalai import LiteralClient

load_dotenv()

openai = OpenAI()

client = LiteralClient()

client.instrument_openai()

prompt = client.api.create_prompt(
name="hello",
template_messages=[{"role": "user", "content": "Hello, how are you {{name}}?"}],
)
messages = prompt.format_messages(name="Alice")


res = openai.chat.completions.create(
model="gpt-3.5-turbo",
messages=messages,
)
prompt = client.api.get_prompt(name="Default", version=0)

print(res)
print(prompt)
1 change: 1 addition & 0 deletions literalai/api/gql.py
Original file line number Diff line number Diff line change
Expand Up @@ -1034,6 +1034,7 @@
variables
variablesDefaultValues
version
url
lineage {
name
}
Expand Down
4 changes: 4 additions & 0 deletions literalai/prompt_engineering/prompt.py
Original file line number Diff line number Diff line change
Expand Up @@ -49,6 +49,7 @@ class PromptDict(TypedDict, total=False):
type: "GenerationType"
name: str
version: int
url: str
versionDesc: Optional[str]
templateMessages: List["GenerationMessage"]
tools: Optional[List[Dict]]
Expand All @@ -67,6 +68,7 @@ class Prompt(Utils):
type: "GenerationType"
name: str
version: int
url: str
version_desc: Optional[str]
template_messages: List["GenerationMessage"]
tools: Optional[List[Dict]]
Expand All @@ -84,6 +86,7 @@ def to_dict(self) -> PromptDict:
"type": self.type,
"name": self.name,
"version": self.version,
"url": self.url,
"versionDesc": self.version_desc,
"templateMessages": self.template_messages, # Assuming this is a list of dicts or similar serializable objects
"tools": self.tools,
Expand All @@ -104,6 +107,7 @@ def from_dict(cls, api: "LiteralAPI", prompt_dict: PromptDict) -> "Prompt":
id=prompt_dict.get("id", ""),
name=prompt_dict.get("lineage", {}).get("name", ""),
version=prompt_dict.get("version", 0),
url=prompt_dict.get("url", ""),
created_at=prompt_dict.get("createdAt", ""),
updated_at=prompt_dict.get("updatedAt", ""),
type=prompt_dict.get("type", GenerationType.CHAT),
Expand Down
3 changes: 3 additions & 0 deletions tests/e2e/test_e2e.py
Original file line number Diff line number Diff line change
Expand Up @@ -594,10 +594,13 @@ async def test_dataset_sync(
@pytest.mark.timeout(5)
async def test_prompt(self, async_client: AsyncLiteralClient):
prompt = await async_client.api.get_prompt(name="Default", version=0)
project = await async_client.api.get_my_project_id()

assert prompt is not None
assert prompt.name == "Default"
assert prompt.version == 0
assert prompt.provider == "openai"
assert prompt.url.endswith(f"projects/{project}/playground?name=Default&version=0")

prompt = await async_client.api.get_prompt(id=prompt.id, version=0)
assert prompt is not None
Expand Down

0 comments on commit 644a041

Please sign in to comment.