prompts.py 3.5 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120
  1. import json
  2. from typing import Any, Optional
  3. from shared.api.models.base import (
  4. WrappedBooleanResponse,
  5. WrappedGenericMessageResponse,
  6. )
  7. from shared.api.models.management.responses import (
  8. WrappedPromptResponse,
  9. WrappedPromptsResponse,
  10. )
  11. class PromptsSDK:
  12. def __init__(self, client):
  13. self.client = client
  14. async def create(
  15. self, name: str, template: str, input_types: dict
  16. ) -> WrappedGenericMessageResponse:
  17. """
  18. Create a new prompt.
  19. Args:
  20. name (str): The name of the prompt
  21. template (str): The template string for the prompt
  22. input_types (dict): A dictionary mapping input names to their types
  23. Returns:
  24. dict: Created prompt information
  25. """
  26. data: dict[str, Any] = {
  27. "name": name,
  28. "template": template,
  29. "input_types": input_types,
  30. }
  31. return await self.client._make_request(
  32. "POST",
  33. "prompts",
  34. json=data,
  35. version="v3",
  36. )
  37. async def list(self) -> WrappedPromptsResponse:
  38. """
  39. List all available prompts.
  40. Returns:
  41. dict: List of all available prompts
  42. """
  43. return await self.client._make_request(
  44. "GET",
  45. "prompts",
  46. version="v3",
  47. )
  48. async def retrieve(
  49. self,
  50. name: str,
  51. inputs: Optional[dict] = None,
  52. prompt_override: Optional[str] = None,
  53. ) -> WrappedPromptResponse:
  54. """
  55. Get a specific prompt by name, optionally with inputs and override.
  56. Args:
  57. name (str): The name of the prompt to retrieve
  58. inputs (Optional[dict]): JSON-encoded inputs for the prompt
  59. prompt_override (Optional[str]): An override for the prompt template
  60. Returns:
  61. dict: The requested prompt with applied inputs and/or override
  62. """
  63. params = {}
  64. if inputs:
  65. params["inputs"] = json.dumps(inputs)
  66. if prompt_override:
  67. params["prompt_override"] = prompt_override
  68. return await self.client._make_request(
  69. "POST",
  70. f"prompts/{name}",
  71. params=params,
  72. version="v3",
  73. )
  74. async def update(
  75. self,
  76. name: str,
  77. template: Optional[str] = None,
  78. input_types: Optional[dict] = None,
  79. ) -> WrappedGenericMessageResponse:
  80. """
  81. Update an existing prompt's template and/or input types.
  82. Args:
  83. name (str): The name of the prompt to update
  84. template (Optional[str]): The updated template string for the prompt
  85. input_types (Optional[dict]): The updated dictionary mapping input names to their types
  86. Returns:
  87. dict: The updated prompt details
  88. """
  89. data: dict = {}
  90. if template:
  91. data["template"] = template
  92. if input_types:
  93. data["input_types"] = json.dumps(input_types)
  94. return await self.client._make_request(
  95. "PUT",
  96. f"prompts/{name}",
  97. json=data,
  98. version="v3",
  99. )
  100. async def delete(self, name: str) -> WrappedBooleanResponse:
  101. """
  102. Delete a prompt by name.
  103. Args:
  104. name (str): The name of the prompt to delete
  105. Returns:
  106. bool: True if deletion was successful
  107. """
  108. return await self.client._make_request(
  109. "DELETE",
  110. f"prompts/{name}",
  111. version="v3",
  112. )