|
| 1 | +""" |
| 2 | +Run this script to see how the BAML client can be used in Python. |
| 3 | +
|
| 4 | +python -m example_baml_app |
| 5 | +""" |
| 6 | + |
| 7 | +import asyncio |
| 8 | +from baml_client import baml as b |
| 9 | +from datetime import datetime |
| 10 | +from typing import List |
| 11 | +from typing_extensions import TypedDict |
| 12 | + |
| 13 | +async def extract_resume(resume: str) -> None: |
| 14 | + """ |
| 15 | + Extracts the resume and prints the extracted data. |
| 16 | + """ |
| 17 | + print("Parsing resume...") |
| 18 | + print(resume[:100] + "..." if len(resume) > 100 else resume) |
| 19 | + parsed_resume = await b.ExtractResume(resume) |
| 20 | + print(parsed_resume.model_dump_json(indent=2)) |
| 21 | + |
| 22 | + await asyncio.sleep(1) |
| 23 | + print("\n\nNow extracting using streaming") |
| 24 | + async with b.ExtractResume.stream(resume) as stream: |
| 25 | + async for x in stream.parsed_stream: |
| 26 | + if x.is_parseable: |
| 27 | + print(f"streaming: {x.parsed.model_dump_json()}") |
| 28 | + response = await stream.get_final_response() |
| 29 | + if response.has_value: |
| 30 | + print(f"\n final: {response.value.model_dump_json(indent=2)}") |
| 31 | + else: |
| 32 | + print("No final response") |
| 33 | + |
| 34 | + |
| 35 | +class ChatMessage(TypedDict): |
| 36 | + sender: str |
| 37 | + message: str |
| 38 | + |
| 39 | + |
| 40 | +async def classify_chat(messages: List[ChatMessage]) -> None: |
| 41 | + """ |
| 42 | + Classifies the chat and prints the classification. |
| 43 | + """ |
| 44 | + print("Classifying chat...") |
| 45 | + chat = "\n".join(map(lambda m: f'{m["sender"]}: {m["message"]}', messages)) |
| 46 | + print(chat[:100] + "..." if len(chat) > 100 else chat) |
| 47 | + |
| 48 | + classification = await b.ClassifyMessage( |
| 49 | + message=chat, message_date=datetime.now().strftime("%Y-%m-%d") |
| 50 | + ) |
| 51 | + print("Got categories: ", classification) |
| 52 | + |
| 53 | + |
| 54 | +async def main(): |
| 55 | + resume = """ |
| 56 | + John Doe |
| 57 | + 1234 Elm Street |
| 58 | + Springfield, IL 62701 |
| 59 | + (123) 456-7890 |
| 60 | +
|
| 61 | + Objective: To obtain a position as a software engineer. |
| 62 | +
|
| 63 | + Education: |
| 64 | + Bachelor of Science in Computer Science |
| 65 | + University of Illinois at Urbana-Champaign |
| 66 | + May 2020 - May 2024 |
| 67 | +
|
| 68 | + Experience: |
| 69 | + Software Engineer Intern |
| 70 | + Google |
| 71 | + May 2022 - August 2022 |
| 72 | + - Worked on the Google Search team |
| 73 | + - Developed new features for the search engine |
| 74 | + - Wrote code in Python and C++ |
| 75 | +
|
| 76 | + Software Engineer Intern |
| 77 | + Facebook |
| 78 | + May 2021 - August 2021 |
| 79 | + - Worked on the Facebook Messenger team |
| 80 | + - Developed new features for the messenger app |
| 81 | + - Wrote code in Python and Java |
| 82 | + """ |
| 83 | + await extract_resume(resume) |
| 84 | + |
| 85 | + messages = [ |
| 86 | + {"sender": "Alice", "message": "I'm having issues with my computer."}, |
| 87 | + { |
| 88 | + "sender": "Assistant", |
| 89 | + "message": "I'm sorry to hear that. What seems to be the problem?", |
| 90 | + }, |
| 91 | + { |
| 92 | + "sender": "Alice", |
| 93 | + "message": "It's running really slow. I need to return it. Can I get a refund?", |
| 94 | + }, |
| 95 | + ] |
| 96 | + await classify_chat(messages) |
| 97 | + |
| 98 | + |
| 99 | +if __name__ == "__main__": |
| 100 | + asyncio.run(main()) |
0 commit comments