Google Vertex AI
Fine-tune
Ecosystem
LLMs
- Overview
- OpenAI
- Anthropic
- Google Gemini
- Google Vertex AI
- Azure OpenAI
- Bedrock
- AWS SageMaker
- Ollama
- More
- Bring Your Own LLM
Agents
Fine-tune
Fine-tune your models with Vertex AI
Upload a file
from portkey_ai import Portkey
# Initialize the Portkey client
portkey = Portkey(
api_key="PORTKEY_API_KEY", # Replace with your Portkey API key
virtual_key="VERTEX_VIRTUAL_KEY", # Add your Vertex virtual key
vertex_storage_bucket_name="your_bucket_name", # Specify the GCS bucket name
provider_file_name="your_file_name.jsonl", # Specify the file name in GCS
provider_model="gemini-1.5-flash-001" # Specify the model to fine-tune
)
# Upload a file for fine-tuning
file = portkey.files.create(
file=open("dataset.jsonl", "rb"),
purpose="fine-tune"
)
print(file)
from portkey_ai import Portkey
# Initialize the Portkey client
portkey = Portkey(
api_key="PORTKEY_API_KEY", # Replace with your Portkey API key
virtual_key="VERTEX_VIRTUAL_KEY", # Add your Vertex virtual key
vertex_storage_bucket_name="your_bucket_name", # Specify the GCS bucket name
provider_file_name="your_file_name.jsonl", # Specify the file name in GCS
provider_model="gemini-1.5-flash-001" # Specify the model to fine-tune
)
# Upload a file for fine-tuning
file = portkey.files.create(
file=open("dataset.jsonl", "rb"),
purpose="fine-tune"
)
print(file)
import { Portkey } from "portkey-ai";
import * as fs from 'fs';
// Initialize the Portkey client
const portkey = Portkey({
apiKey: "PORTKEY_API_KEY", // Replace with your Portkey API key
virtualKey: "VERTEX_VIRTUAL_KEY", // Add your Vertex virtual key
vertexStorageBucketName: "your_bucket_name", // Specify the GCS bucket name
providerFileName: "your_file_name.jsonl", // Specify the file name in GCS
providerModel: "gemini-1.5-flash-001" // Specify the model to fine-tune
});
(async () => {
// Upload a file for fine-tuning
const file = await portkey.files.create({
file: fs.createReadStream("dataset.jsonl"),
purpose: "fine-tune"
});
console.log(file);
})();
from openai import OpenAI
from portkey_ai import PORTKEY_GATEWAY_URL, createHeaders
openai = OpenAI(
api_key='OPENAI_API_KEY',
base_url=PORTKEY_GATEWAY_URL,
default_headers=createHeaders(
virtual_key="VERTEX_VIRTUAL_KEY",
api_key="PORTKEY_API_KEY",
vertex_storage_bucket_name="your_bucket_name",
provider_file_name="your_file_name.jsonl",
provider_model="gemini-1.5-flash-001"
)
)
# Upload a file for fine-tuning
file = openai.files.create(
file=open("dataset.jsonl", "rb"),
purpose="fine-tune"
)
print(file)
import OpenAI from 'openai';
import { PORTKEY_GATEWAY_URL, createHeaders } from 'portkey-ai';
import * as fs from 'fs';
const openai = new OpenAI({
apiKey: 'OPENAI_API_KEY',
baseURL: PORTKEY_GATEWAY_URL,
defaultHeaders: createHeaders({
virtualKey: "VERTEX_VIRTUAL_KEY",
apiKey: "PORTKEY_API_KEY",
vertexStorageBucketName: "your_bucket_name",
providerFileName: "your_file_name.jsonl",
providerModel: "gemini-1.5-flash-001"
})
});
(async () => {
// Upload a file for fine-tuning
const file = await openai.files.create({
file: fs.createReadStream("dataset.jsonl"),
purpose: "fine-tune"
});
console.log(file);
})();
curl -X POST --header 'x-portkey-api-key: <portkey_api_key>' \
--header 'x-portkey-virtual-key: <vertex_virtual_key>' \
--header 'x-portkey-vertex-storage-bucket-name: <bucket_name>' \
--header 'x-portkey-provider-file-name: <file_name>.jsonl' \
--header 'x-portkey-provider-model: <model_name>' \
--form 'purpose="fine-tune"' \
--form '[email protected]' \
'https://api.portkey.ai/v1/files'
Create a fine-tuning job
from portkey_ai import Portkey
# Initialize the Portkey client
portkey = Portkey(
api_key="PORTKEY_API_KEY", # Replace with your Portkey API key
virtual_key="VERTEX_VIRTUAL_KEY" # Add your Vertex virtual key
)
# Create a fine-tuning job
fine_tune_job = portkey.fine_tuning.jobs.create(
model="gemini-1.5-pro-002", # Base model to fine-tune
training_file="<file_id>", # Encoded GCS path to the training file
suffix="finetune_name", # Custom suffix for the fine-tuned model name
hyperparameters={
"n_epochs": 2
}
)
print(fine_tune_job)
from portkey_ai import Portkey
# Initialize the Portkey client
portkey = Portkey(
api_key="PORTKEY_API_KEY", # Replace with your Portkey API key
virtual_key="VERTEX_VIRTUAL_KEY" # Add your Vertex virtual key
)
# Create a fine-tuning job
fine_tune_job = portkey.fine_tuning.jobs.create(
model="gemini-1.5-pro-002", # Base model to fine-tune
training_file="<file_id>", # Encoded GCS path to the training file
suffix="finetune_name", # Custom suffix for the fine-tuned model name
hyperparameters={
"n_epochs": 2
}
)
print(fine_tune_job)
import { Portkey } from "portkey-ai";
// Initialize the Portkey client
const portkey = Portkey({
apiKey: "PORTKEY_API_KEY", // Replace with your Portkey API key
virtualKey: "VERTEX_VIRTUAL_KEY" // Add your Vertex virtual key
});
(async () => {
// Create a fine-tuning job
const fineTuneJob = await portkey.fineTuning.jobs.create({
model: "gemini-1.5-pro-002", // Base model to fine-tune
training_file: "<file_id>", // Encoded GCS path to the training file
suffix: "finetune_name", // Custom suffix for the fine-tuned model name
hyperparameters: {
n_epochs: 2
}
});
console.log(fineTuneJob);
})();
from openai import OpenAI
from portkey_ai import PORTKEY_GATEWAY_URL, createHeaders
openai = OpenAI(
api_key='OPENAI_API_KEY',
base_url=PORTKEY_GATEWAY_URL,
default_headers=createHeaders(
virtual_key="VERTEX_VIRTUAL_KEY",
api_key="PORTKEY_API_KEY"
)
)
# Create a fine-tuning job
fine_tune_job = openai.fine_tuning.jobs.create(
model="gemini-1.5-pro-002", # Base model to fine-tune
training_file="<file_id>", # Encoded GCS path to the training file
suffix="finetune_name", # Custom suffix for the fine-tuned model name
hyperparameters={
"n_epochs": 2
}
)
print(fine_tune_job)
import OpenAI from 'openai';
import { PORTKEY_GATEWAY_URL, createHeaders } from 'portkey-ai';
const openai = new OpenAI({
apiKey: 'OPENAI_API_KEY',
baseURL: PORTKEY_GATEWAY_URL,
defaultHeaders: createHeaders({
virtualKey: "VERTEX_VIRTUAL_KEY",
apiKey: "PORTKEY_API_KEY"
})
});
(async () => {
// Create a fine-tuning job
const fineTuneJob = await openai.fineTuning.jobs.create({
model: "gemini-1.5-pro-002", // Base model to fine-tune
training_file: "<file_id>", // Encoded GCS path to the training file
suffix: "finetune_name", // Custom suffix for the fine-tuned model name
hyperparameters: {
n_epochs: 2
}
});
console.log(fineTuneJob);
})();
curl -X POST --header 'Content-Type: application/json' \
--header 'x-portkey-api-key: <portkey_api_key>' \
--header 'x-portkey-virtual-key: <vertex_virtual_key>' \
--data \
$'{"model": "<base_model>", "suffix": "<finetune_name>", "training_file": "gs://<bucket_name>/<file_name>.jsonl", "hyperparameters": {"n_epochs": 2}}\n' \
'https://api.portkey.ai/v1/fine_tuning/jobs'
List fine-tuning jobs
from portkey_ai import Portkey
# Initialize the Portkey client
portkey = Portkey(
api_key="PORTKEY_API_KEY", # Replace with your Portkey API key
virtual_key="VERTEX_VIRTUAL_KEY" # Add your Vertex virtual key
)
# List all fine-tuning jobs
jobs = portkey.fine_tuning.jobs.list(
limit=10 # Optional: Number of jobs to retrieve (default: 20)
)
print(jobs)
from portkey_ai import Portkey
# Initialize the Portkey client
portkey = Portkey(
api_key="PORTKEY_API_KEY", # Replace with your Portkey API key
virtual_key="VERTEX_VIRTUAL_KEY" # Add your Vertex virtual key
)
# List all fine-tuning jobs
jobs = portkey.fine_tuning.jobs.list(
limit=10 # Optional: Number of jobs to retrieve (default: 20)
)
print(jobs)
import { Portkey } from "portkey-ai";
// Initialize the Portkey client
const portkey = Portkey({
apiKey: "PORTKEY_API_KEY", // Replace with your Portkey API key
virtualKey: "VERTEX_VIRTUAL_KEY" // Add your Vertex virtual key
});
(async () => {
// List all fine-tuning jobs
const jobs = await portkey.fineTuning.jobs.list({
limit: 10 // Optional: Number of jobs to retrieve (default: 20)
});
console.log(jobs);
})();
from openai import OpenAI
from portkey_ai import PORTKEY_GATEWAY_URL, createHeaders
openai = OpenAI(
api_key='OPENAI_API_KEY',
base_url=PORTKEY_GATEWAY_URL,
default_headers=createHeaders(
virtual_key="VERTEX_VIRTUAL_KEY",
api_key="PORTKEY_API_KEY"
)
)
# List all fine-tuning jobs
jobs = openai.fine_tuning.jobs.list(
limit=10 # Optional: Number of jobs to retrieve (default: 20)
)
print(jobs)
import OpenAI from 'openai';
import { PORTKEY_GATEWAY_URL, createHeaders } from 'portkey-ai';
const openai = new OpenAI({
apiKey: 'OPENAI_API_KEY',
baseURL: PORTKEY_GATEWAY_URL,
defaultHeaders: createHeaders({
virtualKey: "VERTEX_VIRTUAL_KEY",
apiKey: "PORTKEY_API_KEY"
})
});
(async () => {
// List all fine-tuning jobs
const jobs = await openai.fineTuning.jobs.list({
limit: 10 // Optional: Number of jobs to retrieve (default: 20)
});
console.log(jobs);
})();
curl -X GET --header 'x-portkey-api-key: <portkey_api_key>' \
--header 'x-portkey-virtual-key: <vertex_virtual_key>' \
'https://api.portkey.ai/v1/fine_tuning/jobs'
Get a fine-tuning job
from portkey_ai import Portkey
# Initialize the Portkey client
portkey = Portkey(
api_key="PORTKEY_API_KEY", # Replace with your Portkey API key
virtual_key="VERTEX_VIRTUAL_KEY" # Add your Vertex virtual key
)
# Retrieve a specific fine-tuning job
job = portkey.fine_tuning.jobs.retrieve(
"job_id" # The ID of the fine-tuning job to retrieve
)
print(job)
from portkey_ai import Portkey
# Initialize the Portkey client
portkey = Portkey(
api_key="PORTKEY_API_KEY", # Replace with your Portkey API key
virtual_key="VERTEX_VIRTUAL_KEY" # Add your Vertex virtual key
)
# Retrieve a specific fine-tuning job
job = portkey.fine_tuning.jobs.retrieve(
"job_id" # The ID of the fine-tuning job to retrieve
)
print(job)
import { Portkey } from "portkey-ai";
// Initialize the Portkey client
const portkey = Portkey({
apiKey: "PORTKEY_API_KEY", // Replace with your Portkey API key
virtualKey: "VERTEX_VIRTUAL_KEY" // Add your Vertex virtual key
});
(async () => {
// Retrieve a specific fine-tuning job
const job = await portkey.fineTuning.jobs.retrieve(
"job_id" // The ID of the fine-tuning job to retrieve
);
console.log(job);
})();
from openai import OpenAI
from portkey_ai import PORTKEY_GATEWAY_URL, createHeaders
openai = OpenAI(
api_key='OPENAI_API_KEY',
base_url=PORTKEY_GATEWAY_URL,
default_headers=createHeaders(
virtual_key="VERTEX_VIRTUAL_KEY",
api_key="PORTKEY_API_KEY"
)
)
# Retrieve a specific fine-tuning job
job = openai.fine_tuning.jobs.retrieve(
"job_id" // The ID of the fine-tuning job to retrieve
)
print(job)
import OpenAI from 'openai';
import { PORTKEY_GATEWAY_URL, createHeaders } from 'portkey-ai';
const openai = new OpenAI({
apiKey: 'OPENAI_API_KEY',
baseURL: PORTKEY_GATEWAY_URL,
defaultHeaders: createHeaders({
virtualKey: "VERTEX_VIRTUAL_KEY",
apiKey: "PORTKEY_API_KEY"
})
});
(async () => {
// Retrieve a specific fine-tuning job
const job = await openai.fineTuning.jobs.retrieve(
"job_id" // The ID of the fine-tuning job to retrieve
);
console.log(job);
})();
curl -X GET --header 'x-portkey-api-key: <portkey_api_key>' \
--header 'x-portkey-virtual-key: <vertex_virtual_key>' \
'https://api.portkey.ai/v1/fine_tuning/jobs/<job_id>'
Cancel a fine-tuning job
from portkey_ai import Portkey
# Initialize the Portkey client
portkey = Portkey(
api_key="PORTKEY_API_KEY", # Replace with your Portkey API key
virtual_key="VERTEX_VIRTUAL_KEY" # Add your Vertex virtual key
)
# Cancel a fine-tuning job
cancelled_job = portkey.fine_tuning.jobs.cancel(
"job_id" # The ID of the fine-tuning job to cancel
)
print(cancelled_job)
from portkey_ai import Portkey
# Initialize the Portkey client
portkey = Portkey(
api_key="PORTKEY_API_KEY", # Replace with your Portkey API key
virtual_key="VERTEX_VIRTUAL_KEY" # Add your Vertex virtual key
)
# Cancel a fine-tuning job
cancelled_job = portkey.fine_tuning.jobs.cancel(
"job_id" # The ID of the fine-tuning job to cancel
)
print(cancelled_job)
import { Portkey } from "portkey-ai";
// Initialize the Portkey client
const portkey = Portkey({
apiKey: "PORTKEY_API_KEY", // Replace with your Portkey API key
virtualKey: "VERTEX_VIRTUAL_KEY" // Add your Vertex virtual key
});
(async () => {
// Cancel a fine-tuning job
const cancelledJob = await portkey.fineTuning.jobs.cancel(
"job_id" // The ID of the fine-tuning job to cancel
);
console.log(cancelledJob);
})();
from openai import OpenAI
from portkey_ai import PORTKEY_GATEWAY_URL, createHeaders
openai = OpenAI(
api_key='OPENAI_API_KEY',
base_url=PORTKEY_GATEWAY_URL,
default_headers=createHeaders(
virtual_key="VERTEX_VIRTUAL_KEY",
api_key="PORTKEY_API_KEY"
)
)
# Cancel a fine-tuning job
cancelled_job = openai.fine_tuning.jobs.cancel(
"job_id" // The ID of the fine-tuning job to cancel
)
print(cancelled_job)
import OpenAI from 'openai';
import { PORTKEY_GATEWAY_URL, createHeaders } from 'portkey-ai';
const openai = new OpenAI({
apiKey: 'OPENAI_API_KEY',
baseURL: PORTKEY_GATEWAY_URL,
defaultHeaders: createHeaders({
virtualKey: "VERTEX_VIRTUAL_KEY",
apiKey: "PORTKEY_API_KEY"
})
});
(async () => {
// Cancel a fine-tuning job
const cancelledJob = await openai.fineTuning.jobs.cancel(
"job_id" // The ID of the fine-tuning job to cancel
);
console.log(cancelledJob);
})();
curl -X POST --header 'x-portkey-api-key: <portkey_api_key>' \
--header 'x-portkey-virtual-key: <vertex_virtual_key>' \
'https://api.portkey.ai/v1/fine_tuning/jobs/<job_id>/cancel'
Refer to Google Vertex AI’s fine-tuning documentation for more information on the parameters and options available.
Was this page helpful?