Ecosystem
LLMs
- Overview
- OpenAI
- Anthropic
- Google Gemini
- Google Vertex AI
- Azure OpenAI
- Bedrock
- AWS SageMaker
- Ollama
- More
- Bring Your Own LLM
Agents
Upload files to S3 for Bedrock batch inference
To perform batch inference with Bedrock, you need to upload files to S3. This process can be cumbersome and duplicative in nature because you need to transform your data into model specific formats.
With Portkey, you can upload the file in OpenAI format and portkey will handle transforming the file into the format required by Bedrock on the fly!
This is the most efficient way to
- Test your data with different foundation models
- Perform A/B testing with different foundation models
- Perform batch inference with different foundation models
Uploading Files
from portkey_ai import Portkey
# Initialize the Portkey client
portkey = Portkey(
api_key="PORTKEY_API_KEY", # Replace with your Portkey API key
virtual_key="VIRTUAL_KEY", # Add your provider's virtual key
provider="bedrock",
aws_region="YOUR_AWS_REGION",
aws_s3_bucket="YOUR_AWS_S3_BUCKET",
aws_s3_object_key="YOUR_AWS_S3_OBJECT_KEY",
aws_bedrock_model="YOUR_AWS_BEDROCK_MODEL",
amz_server_side_encryption: "ENCRYPTION_TYPE", # [optional] default is aws:kms
amz_server_side_encryption_aws_kms_key_id: "KMS_KEY_ID" # [optional] use this only if you want to use a KMS key to encrypt the file at rest
)
upload_file_response = portkey.files.create(
purpose="batch",
file=open("file.pdf", "rb")
)
print(upload_file_response)
from portkey_ai import Portkey
# Initialize the Portkey client
portkey = Portkey(
api_key="PORTKEY_API_KEY", # Replace with your Portkey API key
virtual_key="VIRTUAL_KEY", # Add your provider's virtual key
provider="bedrock",
aws_region="YOUR_AWS_REGION",
aws_s3_bucket="YOUR_AWS_S3_BUCKET",
aws_s3_object_key="YOUR_AWS_S3_OBJECT_KEY",
aws_bedrock_model="YOUR_AWS_BEDROCK_MODEL",
amz_server_side_encryption: "ENCRYPTION_TYPE", # [optional] default is aws:kms
amz_server_side_encryption_aws_kms_key_id: "KMS_KEY_ID" # [optional] use this only if you want to use a KMS key to encrypt the file at rest
)
upload_file_response = portkey.files.create(
purpose="batch",
file=open("file.pdf", "rb")
)
print(upload_file_response)
import { Portkey } from 'portkey-ai';
// Initialize the Portkey client
const portkey = new Portkey({
apiKey: "PORTKEY_API_KEY", // Replace with your Portkey API key
virtualKey: "VIRTUAL_KEY", // Add your provider's virtual key
provider: "bedrock",
awsRegion: "YOUR_AWS_REGION",
awsS3Bucket: "YOUR_AWS_S3_BUCKET",
awsS3ObjectKey: "YOUR_AWS_S3_OBJECT_KEY",
awsBedrockModel: "YOUR_AWS_BEDROCK_MODEL",
amzServerSideEncryption: "ENCRYPTION_TYPE", // [optional] default is aws:kms
amzServerSideEncryptionAwsKmsKeyId: "KMS_KEY_ID" // [optional] use this only if you want to use a KMS key to encrypt the file at rest
});
const uploadFile = async () => {
const file = await portkey.files.create({
purpose: "batch",
file: fs.createReadStream("file.pdf")
});
console.log(file);
}
await uploadFile();
# you can also use a virtual key here
curl --location 'https://api.portkey.ai/v1/files' \
--header 'x-portkey-api-key: <portkey_api_key>' \
--header 'x-portkey-provider: bedrock' \
--header 'Content-Type: application/json' \
--header 'x-portkey-aws-access-key-id: {YOUR_AWS_ACCESS_KEY_ID}' \
--header 'x-portkey-aws-secret-access-key: {YOUR_AWS_SECRET_ACCESS_KEY}' \
--header 'x-portkey-aws-region: {YOUR_AWS_REGION}' \
--header 'x-portkey-aws-s3-bucket: {YOUR_AWS_S3_BUCKET}' \
--header 'x-portkey-aws-s3-object-key: {YOUR_AWS_S3_OBJECT_KEY}' \
--header 'x-portkey-aws-bedrock-model: {YOUR_AWS_BEDROCK_MODEL}' \
--header 'x-portkey-amz-server-side-encryption: {ENCRYPTION_TYPE}' \
--header 'x-portkey-amz-server-side-encryption-aws-kms-key-id: {KMS_KEY_ID}' \
--form 'file=@"{YOUR_FILE_PATH}"',
--form 'purpose="batch"'
import OpenAI from 'openai'; // We're using the v4 SDK
import { PORTKEY_GATEWAY_URL, createHeaders } from 'portkey-ai'
const openai = new OpenAI({
apiKey: 'OPENAI_API_KEY', // defaults to process.env["OPENAI_API_KEY"],
baseURL: PORTKEY_GATEWAY_URL,
defaultHeaders: createHeaders({
provider: "openai",
apiKey: "PORTKEY_API_KEY", // defaults to process.env["PORTKEY_API_KEY"]
awsRegion: "YOUR_AWS_REGION",
awsS3Bucket: "YOUR_AWS_S3_BUCKET",
awsS3ObjectKey: "YOUR_AWS_S3_OBJECT_KEY",
awsBedrockModel: "YOUR_AWS_BEDROCK_MODEL",
amzServerSideEncryption: "ENCRYPTION_TYPE", // [optional] default is aws:kms
amzServerSideEncryptionAwsKmsKeyId: "KMS_KEY_ID" // [optional] use this only if you want to use a KMS key to encrypt the file at rest
})
});
const uploadFile = async () => {
const file = await openai.files.create({
purpose: "batch",
file: fs.createReadStream("file.pdf")
});
console.log(file);
}
await uploadFile();
from openai import OpenAI
from portkey_ai import PORTKEY_GATEWAY_URL, createHeaders
openai = OpenAI(
api_key='OPENAI_API_KEY',
base_url=PORTKEY_GATEWAY_URL,
default_headers=createHeaders(
provider="openai",
api_key="PORTKEY_API_KEY",
aws_region="YOUR_AWS_REGION",
aws_s3_bucket="YOUR_AWS_S3_BUCKET",
aws_s3_object_key="YOUR_AWS_S3_OBJECT_KEY",
aws_bedrock_model="YOUR_AWS_BEDROCK_MODEL",
amz_server_side_encryption: "ENCRYPTION_TYPE", # [optional] default is aws:kms
amz_server_side_encryption_aws_kms_key_id: "KMS_KEY_ID" # [optional] use this only if you want to use a KMS key to encrypt the file at rest
)
)
upload_file_response = openai.files.create(
purpose="batch",
file=open("file.pdf", "rb")
)
print(upload_file_response)
Get File
from portkey_ai import Portkey
# Initialize the Portkey client
portkey = Portkey(
api_key="PORTKEY_API_KEY", # Replace with your Portkey API key
virtual_key="VIRTUAL_KEY", # Add your provider's virtual key
aws_region="YOUR_AWS_REGION",
)
file = portkey.files.retrieve(file_id="file_id")
print(file)
from portkey_ai import Portkey
# Initialize the Portkey client
portkey = Portkey(
api_key="PORTKEY_API_KEY", # Replace with your Portkey API key
virtual_key="VIRTUAL_KEY", # Add your provider's virtual key
aws_region="YOUR_AWS_REGION",
)
file = portkey.files.retrieve(file_id="file_id")
print(file)
import { Portkey } from 'portkey-ai';
// Initialize the Portkey client
const portkey = new Portkey({
apiKey: "PORTKEY_API_KEY", // Replace with your Portkey API key
virtualKey: "VIRTUAL_KEY", // Add your provider's virtual key
awsRegion="YOUR_AWS_REGION",
});
const getFile = async () => {
const file = await portkey.files.retrieve(file_id="file_id");
console.log(file);
}
await getFile();
curl --location 'https://api.portkey.ai/v1/files/<file_id>' \
--header 'x-portkey-api-key: <portkey_api_key>' \
--header 'x-portkey-virtual-key: <virtual_key>' \
--header 'x-portkey-aws-region: <aws_region>'
import OpenAI from 'openai'; // We're using the v4 SDK
import { PORTKEY_GATEWAY_URL, createHeaders } from 'portkey-ai'
const openai = new OpenAI({
apiKey: 'OPENAI_API_KEY', // defaults to process.env["OPENAI_API_KEY"],
baseURL: PORTKEY_GATEWAY_URL,
defaultHeaders: createHeaders({
provider: "openai",
apiKey: "PORTKEY_API_KEY", // defaults to process.env["PORTKEY_API_KEY"]
awsRegion="YOUR_AWS_REGION",
})
});
const getFile = async () => {
const file = await openai.files.retrieve(file_id="file_id");
console.log(file);
}
await getFile();
from openai import OpenAI
from portkey_ai import PORTKEY_GATEWAY_URL, createHeaders
openai = OpenAI(
api_key='OPENAI_API_KEY',
base_url=PORTKEY_GATEWAY_URL,
default_headers=createHeaders(
provider="openai",
api_key="PORTKEY_API_KEY",
aws_region="YOUR_AWS_REGION",
)
)
file = openai.files.retrieve(file_id="file_id")
print(file)
Get File Content
from portkey_ai import Portkey
# Initialize the Portkey client
portkey = Portkey(
api_key="PORTKEY_API_KEY", # Replace with your Portkey API key
virtual_key="VIRTUAL_KEY", # Add your provider's virtual key
aws_region="YOUR_AWS_REGION",
)
file_content = portkey.files.content(file_id="file_id")
print(file_content)
from portkey_ai import Portkey
# Initialize the Portkey client
portkey = Portkey(
api_key="PORTKEY_API_KEY", # Replace with your Portkey API key
virtual_key="VIRTUAL_KEY", # Add your provider's virtual key
aws_region="YOUR_AWS_REGION",
)
file_content = portkey.files.content(file_id="file_id")
print(file_content)
import { Portkey } from 'portkey-ai';
// Initialize the Portkey client
const portkey = new Portkey({
apiKey: "PORTKEY_API_KEY", // Replace with your Portkey API key
virtualKey: "VIRTUAL_KEY", // Add your provider's virtual key
awsRegion="YOUR_AWS_REGION",
});
const getFileContent = async () => {
const file_content = await portkey.files.content(file_id="file_id");
console.log(file_content);
}
await getFileContent();
curl --location 'https://api.portkey.ai/v1/files/<file_id>/content' \
--header 'x-portkey-api-key: <portkey_api_key>' \
--header 'x-portkey-virtual-key: <virtual_key>' \
--header 'x-portkey-aws-region: <aws_region>'
import OpenAI from 'openai'; // We're using the v4 SDK
import { PORTKEY_GATEWAY_URL, createHeaders } from 'portkey-ai'
const openai = new OpenAI({
apiKey: 'OPENAI_API_KEY', // defaults to process.env["OPENAI_API_KEY"],
baseURL: PORTKEY_GATEWAY_URL,
defaultHeaders: createHeaders({
provider: "openai",
apiKey: "PORTKEY_API_KEY", // defaults to process.env["PORTKEY_API_KEY"]
awsRegion="YOUR_AWS_REGION",
})
});
const getFileContent = async () => {
const file_content = await openai.files.content(file_id="file_id");
console.log(file_content);
}
await getFileContent();
from openai import OpenAI
from portkey_ai import PORTKEY_GATEWAY_URL, createHeaders
openai = OpenAI(
api_key='OPENAI_API_KEY',
base_url=PORTKEY_GATEWAY_URL,
default_headers=createHeaders(
provider="openai",
api_key="PORTKEY_API_KEY",
aws_region="YOUR_AWS_REGION",
)
)
file_content = openai.files.content(file_id="file_id")
print(file_content)
The following endpoints are NOT supported for Bedrock for security reasons:
GET /v1/files
DELETE /v1/files/{file_id}
Was this page helpful?
On this page