prompts.py 3.8 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128
  1. import json
  2. from typing import Any, Optional
  3. from shared.api.models import (
  4. WrappedBooleanResponse,
  5. WrappedGenericMessageResponse,
  6. WrappedPromptResponse,
  7. WrappedPromptsResponse,
  8. )
  9. class PromptsSDK:
  10. def __init__(self, client):
  11. self.client = client
  12. async def create(
  13. self, name: str, template: str, input_types: dict
  14. ) -> WrappedGenericMessageResponse:
  15. """Create a new prompt.
  16. Args:
  17. name (str): The name of the prompt
  18. template (str): The template string for the prompt
  19. input_types (dict): A dictionary mapping input names to their types
  20. Returns:
  21. dict: Created prompt information
  22. """
  23. data: dict[str, Any] = {
  24. "name": name,
  25. "template": template,
  26. "input_types": input_types,
  27. }
  28. response_dict = await self.client._make_request(
  29. "POST",
  30. "prompts",
  31. json=data,
  32. version="v3",
  33. )
  34. return WrappedGenericMessageResponse(**response_dict)
  35. async def list(self) -> WrappedPromptsResponse:
  36. """List all available prompts.
  37. Returns:
  38. dict: List of all available prompts
  39. """
  40. response_dict = await self.client._make_request(
  41. "GET",
  42. "prompts",
  43. version="v3",
  44. )
  45. return WrappedPromptsResponse(**response_dict)
  46. async def retrieve(
  47. self,
  48. name: str,
  49. inputs: Optional[dict] = None,
  50. prompt_override: Optional[str] = None,
  51. ) -> WrappedPromptResponse:
  52. """Get a specific prompt by name, optionally with inputs and override.
  53. Args:
  54. name (str): The name of the prompt to retrieve
  55. inputs (Optional[dict]): JSON-encoded inputs for the prompt
  56. prompt_override (Optional[str]): An override for the prompt template
  57. Returns:
  58. dict: The requested prompt with applied inputs and/or override
  59. """
  60. params = {}
  61. if inputs:
  62. params["inputs"] = json.dumps(inputs)
  63. if prompt_override:
  64. params["prompt_override"] = prompt_override
  65. response_dict = await self.client._make_request(
  66. "POST",
  67. f"prompts/{name}",
  68. params=params,
  69. version="v3",
  70. )
  71. return WrappedPromptResponse(**response_dict)
  72. async def update(
  73. self,
  74. name: str,
  75. template: Optional[str] = None,
  76. input_types: Optional[dict] = None,
  77. ) -> WrappedGenericMessageResponse:
  78. """Update an existing prompt's template and/or input types.
  79. Args:
  80. name (str): The name of the prompt to update
  81. template (Optional[str]): The updated template string for the prompt
  82. input_types (Optional[dict]): The updated dictionary mapping input names to their types
  83. Returns:
  84. dict: The updated prompt details
  85. """
  86. data: dict = {}
  87. if template:
  88. data["template"] = template
  89. if input_types:
  90. data["input_types"] = json.dumps(input_types)
  91. response_dict = await self.client._make_request(
  92. "PUT",
  93. f"prompts/{name}",
  94. json=data,
  95. version="v3",
  96. )
  97. return WrappedGenericMessageResponse(**response_dict)
  98. async def delete(self, name: str) -> WrappedBooleanResponse:
  99. """Delete a prompt by name.
  100. Args:
  101. name (str): The name of the prompt to delete
  102. Returns:
  103. bool: True if deletion was successful
  104. """
  105. response_dict = await self.client._make_request(
  106. "DELETE",
  107. f"prompts/{name}",
  108. version="v3",
  109. )
  110. return WrappedBooleanResponse(**response_dict)