Use this file to discover all available pages before exploring further.
Portkey lets you run Bedrock batch jobs without any manual S3 wrangling—simply upload an OpenAI-format .jsonl file and Portkey converts it on-the-fly to the Bedrock format.Supported batch endpoints:
Chat Completions (/v1/chat/completions)
Embeddings (/v1/embeddings)
This is the most efficient way to
Test your data with different foundation models
Perform A/B testing with different foundation models
Perform batch inference with different foundation models
Bedrock credentials — either a Portkey Provider from Model Catalog or explicit AWS keys (aws_access_key_id, aws_secret_access_key, aws_region, optional aws_session_token).
S3 bucket with read/write access for inputs and outputs.
IAM roles (see Permissions & IAM below).
Optional: a Portkey File (input_file_id) — required only when you set completion_window:"immediate" (Portkey-Batch mode).
from portkey_ai import Portkey# Initialize the Portkey clientportkey = Portkey( api_key="PORTKEY_API_KEY", # Replace with your Portkey API key provider="bedrock", aws_access_key_id="YOUR_AWS_ACCESS_KEY_ID", aws_secret_access_key="YOUR_AWS_SECRET_ACCESS_KEY", aws_region="YOUR_AWS_REGION", aws_s3_bucket="YOUR_AWS_S3_BUCKET", aws_s3_object_key="YOUR_AWS_S3_OBJECT_KEY", aws_bedrock_model="YOUR_AWS_BEDROCK_MODEL")start_batch_response = portkey.batches.create( input_file_id="file_id", # file id of the input file endpoint="endpoint", # ex: /v1/chat/completions completion_window="completion_window", # ex: 24h metadata={}, # metadata for the batch, role_arn="arn:aws:iam::12312:role/BedrockBatchRole", # the role to use for creating the batch job model="anthropic.claude-3-5-sonnet-20240620-v1:0", # the model to use for the batch output_data_config={ "s3OutputDataConfig": { "s3Uri": "s3://generations-raw/", "s3EncryptionKeyId": "arn:aws:kms:us-west-2:517194595696:key/89b483cb-130d-497b-aa37-7db177e7cd32" # this is optional, if you want to use a KMS key to encrypt the output data } }, # output_data_config is optional, if you want to specify a different output location for the batch job, default is the same as the input file job_name="anthropi-requests-test" # optional)print(start_batch_response)
from portkey_ai import Portkey# Initialize the Portkey clientportkey = Portkey( api_key="PORTKEY_API_KEY", # Replace with your Portkey API key provider="bedrock", aws_access_key_id="YOUR_AWS_ACCESS_KEY_ID", aws_secret_access_key="YOUR_AWS_SECRET_ACCESS_KEY", aws_region="YOUR_AWS_REGION",)batches = portkey.batches.list()print(batches)
from portkey_ai import Portkey# Initialize the Portkey clientportkey = Portkey( api_key="PORTKEY_API_KEY", # Replace with your Portkey API key provider="@PROVIDER", )batch = portkey.batches.retrieve(batch_id="batch_id")print(batch)
from portkey_ai import Portkey# Initialize the Portkey clientportkey = Portkey( api_key="PORTKEY_API_KEY", # Replace with your Portkey API key provider="@PROVIDER", )batches = portkey.batches.list()print(batches)
from portkey_ai import Portkey# Initialize the Portkey clientportkey = Portkey( api_key="PORTKEY_API_KEY", # Replace with your Portkey API key provider="@PROVIDER", )cancel_batch_response = portkey.batches.cancel(batch_id="batch_id")print(cancel_batch_response)