Last active
July 30, 2025 14:20
-
-
Save labeveryday/05d835046fe2aa889fbf56f1ef9d7b90 to your computer and use it in GitHub Desktop.
This file contains hidden or bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
| # Not just for conversations although they are supported | |
| import json | |
| import boto3 | |
| message_list = [] | |
| initial_message = { | |
| "role": "user", | |
| "content": [ | |
| { "text": "How are you today?" } | |
| ], | |
| } | |
| message_list.append(initial_message) | |
| response = bedrock_client.converse( | |
| modelId="anthropic.claude-3-sonnet-20240229-v1:0", | |
| messages=message_list, | |
| inferenceConfig={ | |
| "maxTokens": 512, | |
| "temperature": 0 | |
| }, | |
| ) | |
| response_message = response['output']['message'] | |
| print(json.dumps(response_message, indent=4)) | |
| def get_completion(prompt, system_prompt=None): | |
| # Define the inference configuration | |
| inference_config = { | |
| "temperature": 0.0, # Set the temperature for generating diverse responses | |
| "maxTokens": 200 # Set the maximum number of tokens to generate | |
| } | |
| # Define additional model fields | |
| additional_model_fields = { | |
| "top_p": 1, # Set the top_p value for nucleus sampling | |
| } | |
| # Create the converse method parameters | |
| converse_api_params = { | |
| "modelId": modelId, # Specify the model ID to use | |
| "messages": [{"role": "user", "content": [{"text": prompt}]}], # Provide the user's prompt | |
| "inferenceConfig": inference_config, # Pass the inference configuration | |
| "additionalModelRequestFields": additional_model_fields # Pass additional model fields | |
| } | |
| # Check if system_text is provided | |
| if system_prompt: | |
| # If system_text is provided, add the system parameter to the converse_params dictionary | |
| converse_api_params["system"] = [{"text": system_prompt}] | |
| # Send a request to the Bedrock client to generate a response | |
| try: | |
| response = bedrock_client.converse(**converse_api_params) | |
| # Extract the generated text content from the response | |
| text_content = response['output']['message']['content'][0]['text'] | |
| # Return the generated text content | |
| return text_content | |
| except ClientError as err: | |
| message = err.response['Error']['Message'] | |
| print(f"A client error occured: {message}") | |
| # Prompt | |
| prompt = "Hello, Claude!" | |
| # Get Claude's response | |
| print(get_completion(prompt)) |
Sign up for free
to join this conversation on GitHub.
Already have an account?
Sign in to comment