Ecosystem
LLMs
- Overview
- OpenAI
- Anthropic
- Google Gemini
- Google Vertex AI
- Azure OpenAI
- Bedrock
- AWS SageMaker
- Ollama
- More
- Bring Your Own LLM
Agents
Fine-tune your models with Bedrock
Upload a file
Please follow to the bedrock file upload guide for more details.
Create a fine-tuning job
from portkey_ai import Portkey
# Initialize the Portkey client
portkey = Portkey(
api_key="PORTKEY_API_KEY", # Replace with your Portkey API key
virtual_key="VIRTUAL_KEY" # Add your provider's virtual key
)
fine_tune_job = portkey.fine_tuning.jobs.create(
training_file="file_id", # encoded s3 file URI of the training data.
model="model_id", # ex: modelId from bedrock for fine-tuning
hyperparameters={
"n_epochs": 1
},
role_arn="role_arn", # service role arn for bedrock job to assume when running.
job_name="job_name", # name for the job, optional will created random if not provided.
validation_file="file_id", # optional, must be encoded s3 file URI.
suffix="finetuned_model_name",
model_type="text" # optional, chat or text.
)
print(fine_tune_job)
from portkey_ai import Portkey
# Initialize the Portkey client
portkey = Portkey(
api_key="PORTKEY_API_KEY", # Replace with your Portkey API key
virtual_key="VIRTUAL_KEY" # Add your provider's virtual key
)
fine_tune_job = portkey.fine_tuning.jobs.create(
training_file="file_id", # encoded s3 file URI of the training data.
model="model_id", # ex: modelId from bedrock for fine-tuning
hyperparameters={
"n_epochs": 1
},
role_arn="role_arn", # service role arn for bedrock job to assume when running.
job_name="job_name", # name for the job, optional will created random if not provided.
validation_file="file_id", # optional, must be encoded s3 file URI.
suffix="finetuned_model_name",
model_type="text" # optional, chat or text.
)
print(fine_tune_job)
import { Portkey } from "portkey-ai";
# Initialize the Portkey client
const portkey = Portkey(
apiKey="PORTKEY_API_KEY", // Replace with your Portkey API key
virtualKey="VIRTUAL_KEY" // Add your provider's virtual key
)
(async () => {
const fine_tune_job = await portkey.fineTuning.jobs.create(
training_file:"file_id", // encoded s3 file URI of the training data.
model:"model_id", // ex: modelId from bedrock for fine-tuning
hyperparameters: {
"n_epochs": 1
},
role_arn: "role_arn", // service role arn for bedrock job to assume when running.
job_name: "job_name", // name for the job, optional will created random if not provided.
validation_file: "file_id", // optional, must be encoded s3 file URI.
suffix: "finetuned_model_name",
model_type: "text" // optional, chat or text.
)
console.log(fine_tune_job)
})();
from openai import OpenAI
from portkey_ai import PORTKEY_GATEWAY_URL, createHeaders
openai = OpenAI(
api_key='OPENAI_API_KEY',
base_url=PORTKEY_GATEWAY_URL,
default_headers=createHeaders(
virtual_key="VIRTUAL_KEY",
api_key="PORTKEY_API_KEY"
)
)
fine_tune_job = openai.fine_tuning.jobs.create(
training_file="file_id", # encoded s3 file URI of the training data.
model="model_id", # bedrock modelId for fine-tuning
hyperparameters={
"n_epochs": 1
},
role_arn="role_arn", # service role arn for bedrock job to assume when running.
job_name="job_name", # name for the job, optional will created random if not provided.
validation_file="file_id", # optional, must be encoded s3 file URI.
suffix="finetuned_model_name",
model_type="text" # optional, chat or text.
)
print(fine_tune_job)
import OpenAI from 'openai'; // We're using the v4 SDK
import { PORTKEY_GATEWAY_URL, createHeaders } from 'portkey-ai'
const openai = new OpenAI({
apiKey: 'OPENAI_API_KEY', // defaults to process.env["OPENAI_API_KEY"],
baseURL: PORTKEY_GATEWAY_URL,
defaultHeaders: createHeaders({
virtualKey: "VIRTUAL_KEY",
apiKey: "PORTKEY_API_KEY" // defaults to process.env["PORTKEY_API_KEY"]
})
});
(async () => {
const fine_tune_job = await openai.fineTuning.jobs.create({
training_file: "file_id", // encoded s3 file URI of the training data.
model: "model_id", // ex: `modelId` from bedrock for fine-tuning
hyperparameters: {
"n_epochs": 1
},
role_arn: "role_arn", // service role arn for bedrock job to assume when running.
job_name: "job_name", // name for the job, optional will created random if not provided.
validation_file: "file_id", // optional, must be encoded s3 file URI.
suffix: "finetuned_model_name",
model_type: "text" // optional, chat or text.
});
console.log(fine_tune_job)
})();
curl \
--header 'Content-Type: application/json' \
--header 'x-portkey-api-key: <api_key>' \
--header 'x-portkey-virtual-key: <virtual_key>' \
--header 'x-portkey-aws-s3-bucket: <s3_bucket>' \
--data '{
"model": "<model_id>",
"model_type": "text", #chat or text
"suffix": "<finetune_model_name>",
"training_file": "<s3_path.jsonl>",
"role_arn": "<role_arn>",
"job_name": "<job_name>",
"hyperparameters": {
"n_epochs": 1
}
}' \
'https://api.portkey.ai/v1/fine_tuning/jobs'
Notes:
- Bedrock fine-tuning dataset format is a little bit different from OpenAI’s fine-tuning dataset format.
model_type
field is required for the dataset transformation, currently gateway does the following dataset transformation:chat
->text-to-text
chat
->chat
.
model
param should be theModelID
that is required for fine-tuning not for the inference.ModelID
is different for inference and fine-tuning.
List of supported finetune models and their IDs are available at Bedrock documentation
List Fine-tuning Jobs
from portkey_ai import Portkey
# Initialize the Portkey client
portkey = Portkey(
api_key="PORTKEY_API_KEY", # Replace with your Portkey API key
virtual_key="VIRTUAL_KEY" # Add your provider's virtual key
)
# List all fine-tuning jobs
jobs = portkey.fine_tuning.jobs.list(
limit=10 # Optional: Number of jobs to retrieve (default: 20)
)
print(jobs)
from portkey_ai import Portkey
# Initialize the Portkey client
portkey = Portkey(
api_key="PORTKEY_API_KEY", # Replace with your Portkey API key
virtual_key="VIRTUAL_KEY" # Add your provider's virtual key
)
# List all fine-tuning jobs
jobs = portkey.fine_tuning.jobs.list(
limit=10 # Optional: Number of jobs to retrieve (default: 20)
)
print(jobs)
import { Portkey } from "portkey-ai";
// Initialize the Portkey client
const portkey = Portkey({
apiKey: "PORTKEY_API_KEY", // Replace with your Portkey API key
virtualKey: "VIRTUAL_KEY" // Add your provider's virtual key
});
(async () => {
// List all fine-tuning jobs
const jobs = await portkey.fineTuning.jobs.list({
limit: 10 // Optional: Number of jobs to retrieve (default: 20)
});
console.log(jobs);
})();
from openai import OpenAI
from portkey_ai import PORTKEY_GATEWAY_URL, createHeaders
openai = OpenAI(
api_key='OPENAI_API_KEY',
base_url=PORTKEY_GATEWAY_URL,
default_headers=createHeaders(
virtual_key="VIRTUAL_KEY",
api_key="PORTKEY_API_KEY"
)
)
# List all fine-tuning jobs
jobs = openai.fine_tuning.jobs.list(
limit=10 # Optional: Number of jobs to retrieve (default: 20)
)
print(jobs)
import OpenAI from 'openai';
import { PORTKEY_GATEWAY_URL, createHeaders } from 'portkey-ai';
const openai = new OpenAI({
apiKey: 'OPENAI_API_KEY',
baseURL: PORTKEY_GATEWAY_URL,
defaultHeaders: createHeaders({
virtualKey: "VIRTUAL_KEY",
apiKey: "PORTKEY_API_KEY"
})
});
(async () => {
// List all fine-tuning jobs
const jobs = await openai.fineTuning.jobs.list({
limit: 10 // Optional: Number of jobs to retrieve (default: 20)
});
console.log(jobs);
})();
curl \
--header 'Content-Type: application/json' \
--header 'x-portkey-api-key: <api_key>' \
--header 'x-portkey-virtual-key: <virtual_key>' \
'https://api.portkey.ai/v1/fine_tuning/jobs?limit=10'
Retrieve Fine-tuning Job
from portkey_ai import Portkey
# Initialize the Portkey client
portkey = Portkey(
api_key="PORTKEY_API_KEY", # Replace with your Portkey API key
virtual_key="VIRTUAL_KEY" # Add your provider's virtual key
)
# Retrieve a specific fine-tuning job
job = portkey.fine_tuning.jobs.retrieve(
job_id="job_id" # The ID of the fine-tuning job to retrieve
)
print(job)
from portkey_ai import Portkey
# Initialize the Portkey client
portkey = Portkey(
api_key="PORTKEY_API_KEY", # Replace with your Portkey API key
virtual_key="VIRTUAL_KEY" # Add your provider's virtual key
)
# Retrieve a specific fine-tuning job
job = portkey.fine_tuning.jobs.retrieve(
job_id="job_id" # The ID of the fine-tuning job to retrieve
)
print(job)
import { Portkey } from "portkey-ai";
// Initialize the Portkey client
const portkey = Portkey({
apiKey: "PORTKEY_API_KEY", // Replace with your Portkey API key
virtualKey: "VIRTUAL_KEY" // Add your provider's virtual key
});
(async () => {
// Retrieve a specific fine-tuning job
const job = await portkey.fineTuning.jobs.retrieve({
job_id: "job_id" // The ID of the fine-tuning job to retrieve
});
console.log(job);
})();
from openai import OpenAI
from portkey_ai import PORTKEY_GATEWAY_URL, createHeaders
openai = OpenAI(
api_key='OPENAI_API_KEY',
base_url=PORTKEY_GATEWAY_URL,
default_headers=createHeaders(
virtual_key="VIRTUAL_KEY",
api_key="PORTKEY_API_KEY"
)
)
# Retrieve a specific fine-tuning job
job = openai.fine_tuning.jobs.retrieve(
fine_tuning_job_id="job_id" # The ID of the fine-tuning job to retrieve
)
print(job)
import OpenAI from 'openai';
import { PORTKEY_GATEWAY_URL, createHeaders } from 'portkey-ai';
const openai = new OpenAI({
apiKey: 'OPENAI_API_KEY',
baseURL: PORTKEY_GATEWAY_URL,
defaultHeaders: createHeaders({
virtualKey: "VIRTUAL_KEY",
apiKey: "PORTKEY_API_KEY"
})
});
(async () => {
// Retrieve a specific fine-tuning job
const job = await openai.fineTuning.jobs.retrieve(
"job_id" // The ID of the fine-tuning job to retrieve
);
console.log(job);
})();
curl \
--header 'Content-Type: application/json' \
--header 'x-portkey-api-key: <api_key>' \
--header 'x-portkey-virtual-key: <virtual_key>' \
'https://api.portkey.ai/v1/fine_tuning/jobs/<job_id>'
Cancel Fine-tuning Job
from portkey_ai import Portkey
# Initialize the Portkey client
portkey = Portkey(
api_key="PORTKEY_API_KEY", # Replace with your Portkey API key
virtual_key="VIRTUAL_KEY" # Add your provider's virtual key
)
# Cancel a fine-tuning job
cancelled_job = portkey.fine_tuning.jobs.cancel(
job_id="job_id" # The ID of the fine-tuning job to cancel
)
print(cancelled_job)
from portkey_ai import Portkey
# Initialize the Portkey client
portkey = Portkey(
api_key="PORTKEY_API_KEY", # Replace with your Portkey API key
virtual_key="VIRTUAL_KEY" # Add your provider's virtual key
)
# Cancel a fine-tuning job
cancelled_job = portkey.fine_tuning.jobs.cancel(
job_id="job_id" # The ID of the fine-tuning job to cancel
)
print(cancelled_job)
import { Portkey } from "portkey-ai";
// Initialize the Portkey client
const portkey = Portkey({
apiKey: "PORTKEY_API_KEY", // Replace with your Portkey API key
virtualKey: "VIRTUAL_KEY" // Add your provider's virtual key
});
(async () => {
// Cancel a fine-tuning job
const cancelledJob = await portkey.fineTuning.jobs.cancel({
job_id: "job_id" // The ID of the fine-tuning job to cancel
});
console.log(cancelledJob);
})();
from openai import OpenAI
from portkey_ai import PORTKEY_GATEWAY_URL, createHeaders
openai = OpenAI(
api_key='OPENAI_API_KEY',
base_url=PORTKEY_GATEWAY_URL,
default_headers=createHeaders(
virtual_key="VIRTUAL_KEY",
api_key="PORTKEY_API_KEY"
)
)
# Cancel a fine-tuning job
cancelled_job = openai.fine_tuning.jobs.cancel(
fine_tuning_job_id="job_id" # The ID of the fine-tuning job to cancel
)
print(cancelled_job)
import OpenAI from 'openai';
import { PORTKEY_GATEWAY_URL, createHeaders } from 'portkey-ai';
const openai = new OpenAI({
apiKey: 'OPENAI_API_KEY',
baseURL: PORTKEY_GATEWAY_URL,
defaultHeaders: createHeaders({
virtualKey: "VIRTUAL_KEY",
apiKey: "PORTKEY_API_KEY"
})
});
(async () => {
// Cancel a fine-tuning job
const cancelledJob = await openai.fineTuning.jobs.cancel(
"job_id" // The ID of the fine-tuning job to cancel
);
console.log(cancelledJob);
})();
curl \
--request POST \
--header 'Content-Type: application/json' \
--header 'x-portkey-api-key: <api_key>' \
--header 'x-portkey-virtual-key: <virtual_key>' \
'https://api.portkey.ai/v1/fine_tuning/jobs/<job_id>/cancel'
References
Was this page helpful?