From 87c2280200653cfc83b3b9fed10021e864b88f7d Mon Sep 17 00:00:00 2001 From: Zubairuddin Mohammed Date: Wed, 31 Dec 2025 02:27:26 +0530 Subject: [PATCH 1/9] Create command for ft cli --- .../internal/cmd/converter.go | 36 ++++++++- .../internal/cmd/operations.go | 49 +++++++++--- .../internal/fine_tuning_yaml/yaml.go | 2 +- .../internal/providers/openai/conversions.go | 32 ++++++++ .../internal/providers/openai/provider.go | 44 +++++++++-- .../internal/services/finetune_service.go | 79 ++++++++++++++++--- .../azure.ai.finetune/pkg/models/finetune.go | 5 ++ 7 files changed, 213 insertions(+), 34 deletions(-) diff --git a/cli/azd/extensions/azure.ai.finetune/internal/cmd/converter.go b/cli/azd/extensions/azure.ai.finetune/internal/cmd/converter.go index e6cbdbef2c5..8a368569d1f 100644 --- a/cli/azd/extensions/azure.ai.finetune/internal/cmd/converter.go +++ b/cli/azd/extensions/azure.ai.finetune/internal/cmd/converter.go @@ -7,10 +7,42 @@ import ( "github.com/openai/openai-go/v3" FTYaml "azure.ai.finetune/internal/fine_tuning_yaml" + "azure.ai.finetune/pkg/models" ) +func ConvertYAMLToInternalJobParams(config *FTYaml.FineTuningConfig, trainingFileID, validationFileID string) (models.CreateFineTuningRequest, error) { + jobParams := models.CreateFineTuningRequest{ + BaseModel: config.Model, + TrainingDataID: trainingFileID, + } + + if validationFileID != "" { + jobParams.ValidationDataID = validationFileID + } + + if config.Suffix != nil { + jobParams.Suffix = *config.Suffix + } + + if config.Seed != nil { + jobParams.Seed = *config.Seed + } + + // Set metadata if provided + if len(config.Metadata) > 0 { + jobParams.Metadata = make(map[string]string) + for k, v := range config.Metadata { + jobParams.Metadata[k] = v + } + } + + //TODO Need to set hyperparameters, method, integrations + return jobParams, nil +} + +// TODO Get rid of this method // ConvertYAMLToJobParams converts a YAML fine-tuning configuration to OpenAI job parameters -func ConvertYAMLToJobParams(config *FTYaml.FineTuningConfig, trainingFileID, validationFileID string) (openai.FineTuningJobNewParams, error) { +func ConvertYAMLToOpenAiJobParams(config *FTYaml.FineTuningConfig, trainingFileID, validationFileID string) (openai.FineTuningJobNewParams, error) { jobParams := openai.FineTuningJobNewParams{ Model: openai.FineTuningJobNewParamsModel(config.Model), TrainingFile: trainingFileID, @@ -30,7 +62,7 @@ func ConvertYAMLToJobParams(config *FTYaml.FineTuningConfig, trainingFileID, val } // Set metadata if provided - if config.Metadata != nil && len(config.Metadata) > 0 { + if len(config.Metadata) > 0 { jobParams.Metadata = make(map[string]string) for k, v := range config.Metadata { jobParams.Metadata[k] = v diff --git a/cli/azd/extensions/azure.ai.finetune/internal/cmd/operations.go b/cli/azd/extensions/azure.ai.finetune/internal/cmd/operations.go index 9c2ce3d6894..83c6bcc1eea 100644 --- a/cli/azd/extensions/azure.ai.finetune/internal/cmd/operations.go +++ b/cli/azd/extensions/azure.ai.finetune/internal/cmd/operations.go @@ -67,7 +67,7 @@ func newOperationSubmitCommand() *cobra.Command { var filename string cmd := &cobra.Command{ Use: "submit", - Short: "Submit fine tuning job", + Short: "submit fine tuning job", RunE: func(cmd *cobra.Command, args []string) error { ctx := azdext.WithAccessToken(cmd.Context()) @@ -82,47 +82,70 @@ func newOperationSubmitCommand() *cobra.Command { } defer azdClient.Close() + // Show spinner while creating job + spinner := ux.NewSpinner(&ux.SpinnerOptions{ + Text: "creating fine-tuning job...", + }) + if err := spinner.Start(ctx); err != nil { + fmt.Printf("failed to start spinner: %v\n", err) + } + // Parse and validate the YAML configuration file - color.Green("Parsing configuration file...") + color.Green("parsing configuration file...") config, err := FTYaml.ParseFineTuningConfig(filename) if err != nil { + _ = spinner.Stop(ctx) + fmt.Println() return err } - // Upload training file + fineTuneSvc, err := services.NewFineTuningService(ctx, azdClient, nil) + if err != nil { + _ = spinner.Stop(ctx) + fmt.Println() + return err + } - trainingFileID, err := JobWrapper.UploadFileIfLocal(ctx, azdClient, config.TrainingFile) + trainingFileID, err := fineTuneSvc.UploadTrainingFile(ctx, config.TrainingFile) if err != nil { + _ = spinner.Stop(ctx) + fmt.Println() return fmt.Errorf("failed to upload training file: %w", err) } // Upload validation file if provided var validationFileID string - if config.ValidationFile != "" { - validationFileID, err = JobWrapper.UploadFileIfLocal(ctx, azdClient, config.ValidationFile) + if config.ValidationFile != nil && *config.ValidationFile != "" { + validationFileID, err = fineTuneSvc.UploadValidationFile(ctx, *config.ValidationFile) if err != nil { + _ = spinner.Stop(ctx) + fmt.Println() return fmt.Errorf("failed to upload validation file: %w", err) } } // Create fine-tuning job - // Convert YAML configuration to OpenAI job parameters - jobParams, err := ConvertYAMLToJobParams(config, trainingFileID, validationFileID) + // Convert YAML configuration to service layer job parameters + ftRequest, err := ConvertYAMLToInternalJobParams(config, trainingFileID, validationFileID) if err != nil { + _ = spinner.Stop(ctx) + fmt.Println() return fmt.Errorf("failed to convert configuration to job parameters: %w", err) } // Submit the fine-tuning job using CreateJob from JobWrapper - job, err := JobWrapper.CreateJob(ctx, azdClient, jobParams) + job, err := fineTuneSvc.CreateFineTuningJob(ctx, &ftRequest) if err != nil { + _ = spinner.Stop(ctx) + fmt.Println() return err } // Print success message fmt.Println(strings.Repeat("=", 120)) - color.Green("\nSuccessfully submitted fine-tuning Job!\n") - fmt.Printf("Job ID: %s\n", job.Id) - fmt.Printf("Model: %s\n", job.Model) + color.Green("\nsuccessfully submitted fine-tuning Job!\n") + fmt.Printf("Job ID: %s\n", job.ID) + fmt.Printf("Base Model: %s\n", job.BaseModel) fmt.Printf("Status: %s\n", job.Status) fmt.Printf("Created: %s\n", job.CreatedAt) if job.FineTunedModel != "" { @@ -289,7 +312,7 @@ func newOperationListCommand() *cobra.Command { jobs, err := fineTuneSvc.ListFineTuningJobs(ctx, limit, after) _ = spinner.Stop(ctx) if err != nil { - fmt.Println() + fmt.Println() return err } diff --git a/cli/azd/extensions/azure.ai.finetune/internal/fine_tuning_yaml/yaml.go b/cli/azd/extensions/azure.ai.finetune/internal/fine_tuning_yaml/yaml.go index 3f4099273b4..9a99d6e5503 100644 --- a/cli/azd/extensions/azure.ai.finetune/internal/fine_tuning_yaml/yaml.go +++ b/cli/azd/extensions/azure.ai.finetune/internal/fine_tuning_yaml/yaml.go @@ -24,7 +24,7 @@ type FineTuningConfig struct { TrainingFile string `yaml:"training_file"` // Optional: Path to validation file - ValidationFile string `yaml:"validation_file,omitempty"` + ValidationFile *string `yaml:"validation_file,omitempty"` // Optional: Fine-tuning method configuration (supervised, dpo, or reinforcement) Method MethodConfig `yaml:"method,omitempty"` diff --git a/cli/azd/extensions/azure.ai.finetune/internal/providers/openai/conversions.go b/cli/azd/extensions/azure.ai.finetune/internal/providers/openai/conversions.go index bcf0ccdd9e7..5a937cc7ca8 100644 --- a/cli/azd/extensions/azure.ai.finetune/internal/providers/openai/conversions.go +++ b/cli/azd/extensions/azure.ai.finetune/internal/providers/openai/conversions.go @@ -47,3 +47,35 @@ func convertOpenAIJobToModel(openaiJob openai.FineTuningJob) *models.FineTuningJ CreatedAt: utils.UnixTimestampToUTC(openaiJob.CreatedAt), } } + +// ConvertYAMLToJobParams converts a YAML fine-tuning configuration to OpenAI job parameters +func convertInternalJobParamToOpenAiJobParams(config *models.CreateFineTuningRequest) (openai.FineTuningJobNewParams, error) { + jobParams := openai.FineTuningJobNewParams{ + Model: openai.FineTuningJobNewParamsModel(config.BaseModel), + TrainingFile: config.TrainingDataID, + } + + if config.ValidationDataID != "" { + jobParams.ValidationFile = openai.String(config.ValidationDataID) + } + + // Set optional fields + if config.Suffix != "" { + jobParams.Suffix = openai.String(config.Suffix) + } + + if config.Seed != 0 { + jobParams.Seed = openai.Int(config.Seed) + } + + // Set metadata if provided + if len(config.Metadata) > 0 { + jobParams.Metadata = make(map[string]string) + for k, v := range config.Metadata { + jobParams.Metadata[k] = v + } + } + + //TODO Need to set hyperparameters, method, integrations + return jobParams, nil +} diff --git a/cli/azd/extensions/azure.ai.finetune/internal/providers/openai/provider.go b/cli/azd/extensions/azure.ai.finetune/internal/providers/openai/provider.go index 5de45c00feb..ff2023ccd24 100644 --- a/cli/azd/extensions/azure.ai.finetune/internal/providers/openai/provider.go +++ b/cli/azd/extensions/azure.ai.finetune/internal/providers/openai/provider.go @@ -5,6 +5,8 @@ package openai import ( "context" + "fmt" + "os" "azure.ai.finetune/pkg/models" "github.com/openai/openai-go/v3" @@ -24,11 +26,18 @@ func NewOpenAIProvider(client *openai.Client) *OpenAIProvider { // CreateFineTuningJob creates a new fine-tuning job via OpenAI API func (p *OpenAIProvider) CreateFineTuningJob(ctx context.Context, req *models.CreateFineTuningRequest) (*models.FineTuningJob, error) { - // TODO: Implement - // 1. Convert domain model to OpenAI SDK format - // 2. Call OpenAI SDK CreateFineTuningJob - // 3. Convert OpenAI response to domain model - return nil, nil + + params, err := convertInternalJobParamToOpenAiJobParams(req) + if err != nil { + return nil, fmt.Errorf("failed to convert internal model to openai: %w", err) + } + + job, err := p.client.FineTuning.Jobs.New(ctx, params) + if err != nil { + return nil, fmt.Errorf("failed to create fine-tuning job: %w", err) + } + + return convertOpenAIJobToModel(*job), nil } // GetFineTuningStatus retrieves the status of a fine-tuning job @@ -95,8 +104,29 @@ func (p *OpenAIProvider) CancelJob(ctx context.Context, jobID string) (*models.F // UploadFile uploads a file for fine-tuning func (p *OpenAIProvider) UploadFile(ctx context.Context, filePath string) (string, error) { - // TODO: Implement - return "", nil + if filePath == "" { + return "", fmt.Errorf("file path cannot be empty") + } + + file, err := os.Open(filePath) + if err != nil { + return "", fmt.Errorf("failed to open file %s: %w", filePath, err) + } + defer file.Close() + + uploadedFile, err := p.client.Files.New(ctx, openai.FileNewParams{ + File: file, + Purpose: openai.FilePurposeFineTune, + }) + + if err != nil { + return "", fmt.Errorf("failed to upload file: %w", err) + } + if uploadedFile == nil || uploadedFile.ID == "" { + return "", fmt.Errorf("uploaded file is empty") + } + + return uploadedFile.ID, nil } // GetUploadedFile retrieves information about an uploaded file diff --git a/cli/azd/extensions/azure.ai.finetune/internal/services/finetune_service.go b/cli/azd/extensions/azure.ai.finetune/internal/services/finetune_service.go index 03ae59d508f..061ce5b7d17 100644 --- a/cli/azd/extensions/azure.ai.finetune/internal/services/finetune_service.go +++ b/cli/azd/extensions/azure.ai.finetune/internal/services/finetune_service.go @@ -6,6 +6,7 @@ package services import ( "context" "fmt" + "os" "azure.ai.finetune/internal/providers" "azure.ai.finetune/internal/providers/factory" @@ -40,13 +41,34 @@ func NewFineTuningService(ctx context.Context, azdClient *azdext.AzdClient, stat // CreateFineTuningJob creates a new fine-tuning job with business validation func (s *fineTuningServiceImpl) CreateFineTuningJob(ctx context.Context, req *models.CreateFineTuningRequest) (*models.FineTuningJob, error) { - // TODO: Implement - // 1. Validate request (model exists, data size valid, etc.) - // 2. Call provider.CreateFineTuningJob() - // 3. Transform any errors to standardized ErrorDetail - // 4. Persist job to state store - // 5. Return job - return nil, nil + // Validate request + if req == nil { + return nil, fmt.Errorf("request cannot be nil") + } + if req.BaseModel == "" { + return nil, fmt.Errorf("base model is required") + } + if req.TrainingDataID == "" { + return nil, fmt.Errorf("training file is required") + } + + // Call provider with retry logic + var job *models.FineTuningJob + err := utils.RetryOperation(ctx, utils.DefaultRetryConfig(), func() error { + var err error + job, err = s.provider.CreateFineTuningJob(ctx, req) + return err + }) + if err != nil { + return nil, fmt.Errorf("failed to create fine-tuning job: %w", err) + } + + // Persist job to state store + if err := s.stateStore.SaveJob(ctx, job); err != nil { + return nil, fmt.Errorf("failed to persist job: %w", err) + } + + return job, nil } // GetFineTuningStatus retrieves the current status of a job @@ -111,14 +133,49 @@ func (s *fineTuningServiceImpl) CancelJob(ctx context.Context, jobID string) (*m // UploadTrainingFile uploads and validates a training file func (s *fineTuningServiceImpl) UploadTrainingFile(ctx context.Context, filePath string) (string, error) { - // TODO: Implement - return "", nil + if filePath == "" { + return "", fmt.Errorf("training file path cannot be empty") + } + uploadedFileId, err := s._uploadFile(ctx, filePath) + if err != nil || uploadedFileId == "" { + return "", fmt.Errorf("failed to upload training file: %w", err) + } + return uploadedFileId, nil } // UploadValidationFile uploads and validates a validation file func (s *fineTuningServiceImpl) UploadValidationFile(ctx context.Context, filePath string) (string, error) { - // TODO: Implement - return "", nil + if filePath == "" { + return "", nil // Validation file is optional + } + uploadedFileId, err := s._uploadFile(ctx, filePath) + if err != nil || uploadedFileId == "" { + return "", fmt.Errorf("failed to upload validation file: %w", err) + } + return uploadedFileId, nil +} + +func (s *fineTuningServiceImpl) _uploadFile(ctx context.Context, filePath string) (string, error) { + // validate file existence + fileInfo, err := os.Stat(filePath) + if err != nil { + if os.IsNotExist(err) { + return "", fmt.Errorf("file does not exist: %s", filePath) + } + return "", fmt.Errorf("failed to stat file %s: %w", filePath, err) + } + if fileInfo.IsDir() { + return "", fmt.Errorf("path is a directory, not a file: %s", filePath) + } + + // upload file with retry + uploadedFileId := "" + err = utils.RetryOperation(ctx, utils.DefaultRetryConfig(), func() error { + var err error + uploadedFileId, err = s.provider.UploadFile(ctx, filePath) + return err + }) + return uploadedFileId, err } // PollJobUntilCompletion polls a job until it completes or fails diff --git a/cli/azd/extensions/azure.ai.finetune/pkg/models/finetune.go b/cli/azd/extensions/azure.ai.finetune/pkg/models/finetune.go index a8b6b11b237..f3786433c9b 100644 --- a/cli/azd/extensions/azure.ai.finetune/pkg/models/finetune.go +++ b/cli/azd/extensions/azure.ai.finetune/pkg/models/finetune.go @@ -47,6 +47,11 @@ type CreateFineTuningRequest struct { BaseModel string TrainingDataID string ValidationDataID string + Suffix string + Seed int64 + Metadata map[string]string + Method interface{} // Can be dpo, supervised or reinforcement + Integrations []interface{} Hyperparameters *Hyperparameters } From 7709f1fd55abfb70cbc9cd86e7c758720586b19e Mon Sep 17 00:00:00 2001 From: Zubairuddin Mohammed Date: Fri, 2 Jan 2026 11:10:56 +0530 Subject: [PATCH 2/9] handling null pointer for state --- .../internal/services/finetune_service.go | 8 +++++--- 1 file changed, 5 insertions(+), 3 deletions(-) diff --git a/cli/azd/extensions/azure.ai.finetune/internal/services/finetune_service.go b/cli/azd/extensions/azure.ai.finetune/internal/services/finetune_service.go index 061ce5b7d17..8714cd19ade 100644 --- a/cli/azd/extensions/azure.ai.finetune/internal/services/finetune_service.go +++ b/cli/azd/extensions/azure.ai.finetune/internal/services/finetune_service.go @@ -63,9 +63,11 @@ func (s *fineTuningServiceImpl) CreateFineTuningJob(ctx context.Context, req *mo return nil, fmt.Errorf("failed to create fine-tuning job: %w", err) } - // Persist job to state store - if err := s.stateStore.SaveJob(ctx, job); err != nil { - return nil, fmt.Errorf("failed to persist job: %w", err) + // Persist job to state store if available + if s.stateStore != nil { + if err := s.stateStore.SaveJob(ctx, job); err != nil { + return nil, fmt.Errorf("failed to persist job: %w", err) + } } return job, nil From e19f8be07778bdc00168ee6c592802be3e5c2c32 Mon Sep 17 00:00:00 2001 From: Zubairuddin Mohammed Date: Sat, 3 Jan 2026 02:57:33 +0530 Subject: [PATCH 3/9] removing changes from old converter and yaml logic --- .../internal/cmd/converter.go | 36 ++----------------- .../internal/fine_tuning_yaml/yaml.go | 2 +- 2 files changed, 3 insertions(+), 35 deletions(-) diff --git a/cli/azd/extensions/azure.ai.finetune/internal/cmd/converter.go b/cli/azd/extensions/azure.ai.finetune/internal/cmd/converter.go index 8a368569d1f..e6cbdbef2c5 100644 --- a/cli/azd/extensions/azure.ai.finetune/internal/cmd/converter.go +++ b/cli/azd/extensions/azure.ai.finetune/internal/cmd/converter.go @@ -7,42 +7,10 @@ import ( "github.com/openai/openai-go/v3" FTYaml "azure.ai.finetune/internal/fine_tuning_yaml" - "azure.ai.finetune/pkg/models" ) -func ConvertYAMLToInternalJobParams(config *FTYaml.FineTuningConfig, trainingFileID, validationFileID string) (models.CreateFineTuningRequest, error) { - jobParams := models.CreateFineTuningRequest{ - BaseModel: config.Model, - TrainingDataID: trainingFileID, - } - - if validationFileID != "" { - jobParams.ValidationDataID = validationFileID - } - - if config.Suffix != nil { - jobParams.Suffix = *config.Suffix - } - - if config.Seed != nil { - jobParams.Seed = *config.Seed - } - - // Set metadata if provided - if len(config.Metadata) > 0 { - jobParams.Metadata = make(map[string]string) - for k, v := range config.Metadata { - jobParams.Metadata[k] = v - } - } - - //TODO Need to set hyperparameters, method, integrations - return jobParams, nil -} - -// TODO Get rid of this method // ConvertYAMLToJobParams converts a YAML fine-tuning configuration to OpenAI job parameters -func ConvertYAMLToOpenAiJobParams(config *FTYaml.FineTuningConfig, trainingFileID, validationFileID string) (openai.FineTuningJobNewParams, error) { +func ConvertYAMLToJobParams(config *FTYaml.FineTuningConfig, trainingFileID, validationFileID string) (openai.FineTuningJobNewParams, error) { jobParams := openai.FineTuningJobNewParams{ Model: openai.FineTuningJobNewParamsModel(config.Model), TrainingFile: trainingFileID, @@ -62,7 +30,7 @@ func ConvertYAMLToOpenAiJobParams(config *FTYaml.FineTuningConfig, trainingFileI } // Set metadata if provided - if len(config.Metadata) > 0 { + if config.Metadata != nil && len(config.Metadata) > 0 { jobParams.Metadata = make(map[string]string) for k, v := range config.Metadata { jobParams.Metadata[k] = v diff --git a/cli/azd/extensions/azure.ai.finetune/internal/fine_tuning_yaml/yaml.go b/cli/azd/extensions/azure.ai.finetune/internal/fine_tuning_yaml/yaml.go index 9a99d6e5503..3f4099273b4 100644 --- a/cli/azd/extensions/azure.ai.finetune/internal/fine_tuning_yaml/yaml.go +++ b/cli/azd/extensions/azure.ai.finetune/internal/fine_tuning_yaml/yaml.go @@ -24,7 +24,7 @@ type FineTuningConfig struct { TrainingFile string `yaml:"training_file"` // Optional: Path to validation file - ValidationFile *string `yaml:"validation_file,omitempty"` + ValidationFile string `yaml:"validation_file,omitempty"` // Optional: Fine-tuning method configuration (supervised, dpo, or reinforcement) Method MethodConfig `yaml:"method,omitempty"` From 2a65f17b10df5e8dbb8b34529237377e4430c953 Mon Sep 17 00:00:00 2001 From: Zubairuddin Mohammed Date: Sat, 3 Jan 2026 03:23:03 +0530 Subject: [PATCH 4/9] pr review changes + scheme implementation for job params --- .../internal/cmd/operations.go | 37 +- .../internal/providers/openai/conversions.go | 327 +++++++++++++++++- .../internal/providers/openai/provider.go | 48 ++- .../internal/services/finetune_service.go | 50 ++- .../internal/services/interface.go | 7 +- .../internal/utils/common.go | 18 + .../internal/utils/parser.go | 30 ++ .../azure.ai.finetune/pkg/models/finetune.go | 217 +++++++++++- 8 files changed, 647 insertions(+), 87 deletions(-) create mode 100644 cli/azd/extensions/azure.ai.finetune/internal/utils/common.go create mode 100644 cli/azd/extensions/azure.ai.finetune/internal/utils/parser.go diff --git a/cli/azd/extensions/azure.ai.finetune/internal/cmd/operations.go b/cli/azd/extensions/azure.ai.finetune/internal/cmd/operations.go index 83c6bcc1eea..30868e1166f 100644 --- a/cli/azd/extensions/azure.ai.finetune/internal/cmd/operations.go +++ b/cli/azd/extensions/azure.ai.finetune/internal/cmd/operations.go @@ -12,9 +12,9 @@ import ( "github.com/fatih/color" "github.com/spf13/cobra" - FTYaml "azure.ai.finetune/internal/fine_tuning_yaml" "azure.ai.finetune/internal/services" JobWrapper "azure.ai.finetune/internal/tools" + Utils "azure.ai.finetune/internal/utils" ) func newOperationCommand() *cobra.Command { @@ -92,7 +92,7 @@ func newOperationSubmitCommand() *cobra.Command { // Parse and validate the YAML configuration file color.Green("parsing configuration file...") - config, err := FTYaml.ParseFineTuningConfig(filename) + config, err := Utils.ParseCreateFineTuningRequestConfig(filename) if err != nil { _ = spinner.Stop(ctx) fmt.Println() @@ -106,35 +106,8 @@ func newOperationSubmitCommand() *cobra.Command { return err } - trainingFileID, err := fineTuneSvc.UploadTrainingFile(ctx, config.TrainingFile) - if err != nil { - _ = spinner.Stop(ctx) - fmt.Println() - return fmt.Errorf("failed to upload training file: %w", err) - } - - // Upload validation file if provided - var validationFileID string - if config.ValidationFile != nil && *config.ValidationFile != "" { - validationFileID, err = fineTuneSvc.UploadValidationFile(ctx, *config.ValidationFile) - if err != nil { - _ = spinner.Stop(ctx) - fmt.Println() - return fmt.Errorf("failed to upload validation file: %w", err) - } - } - - // Create fine-tuning job - // Convert YAML configuration to service layer job parameters - ftRequest, err := ConvertYAMLToInternalJobParams(config, trainingFileID, validationFileID) - if err != nil { - _ = spinner.Stop(ctx) - fmt.Println() - return fmt.Errorf("failed to convert configuration to job parameters: %w", err) - } - // Submit the fine-tuning job using CreateJob from JobWrapper - job, err := fineTuneSvc.CreateFineTuningJob(ctx, &ftRequest) + job, err := fineTuneSvc.CreateFineTuningJob(ctx, config) if err != nil { _ = spinner.Stop(ctx) fmt.Println() @@ -145,7 +118,7 @@ func newOperationSubmitCommand() *cobra.Command { fmt.Println(strings.Repeat("=", 120)) color.Green("\nsuccessfully submitted fine-tuning Job!\n") fmt.Printf("Job ID: %s\n", job.ID) - fmt.Printf("Base Model: %s\n", job.BaseModel) + fmt.Printf("Model: %s\n", job.BaseModel) fmt.Printf("Status: %s\n", job.Status) fmt.Printf("Created: %s\n", job.CreatedAt) if job.FineTunedModel != "" { @@ -153,6 +126,8 @@ func newOperationSubmitCommand() *cobra.Command { } fmt.Println(strings.Repeat("=", 120)) + _ = spinner.Stop(ctx) + fmt.Println() return nil }, } diff --git a/cli/azd/extensions/azure.ai.finetune/internal/providers/openai/conversions.go b/cli/azd/extensions/azure.ai.finetune/internal/providers/openai/conversions.go index 5a937cc7ca8..fabc6acd06e 100644 --- a/cli/azd/extensions/azure.ai.finetune/internal/providers/openai/conversions.go +++ b/cli/azd/extensions/azure.ai.finetune/internal/providers/openai/conversions.go @@ -4,9 +4,13 @@ package openai import ( + "encoding/json" + "strings" + "azure.ai.finetune/internal/utils" "azure.ai.finetune/pkg/models" "github.com/openai/openai-go/v3" + "github.com/openai/openai-go/v3/shared/constant" ) // OpenAI Status Constants - matches OpenAI SDK values @@ -37,8 +41,8 @@ func mapOpenAIStatusToJobStatus(openaiStatus openai.FineTuningJobStatus) models. } } -// convertOpenAIJobToModel converts OpenAI SDK job to domain model -func convertOpenAIJobToModel(openaiJob openai.FineTuningJob) *models.FineTuningJob { +// ConvertOpenAIJobToModel converts OpenAI SDK job to domain model +func ConvertOpenAIJobToModel(openaiJob openai.FineTuningJob) *models.FineTuningJob { return &models.FineTuningJob{ ID: openaiJob.ID, Status: mapOpenAIStatusToJobStatus(openaiJob.Status), @@ -48,24 +52,24 @@ func convertOpenAIJobToModel(openaiJob openai.FineTuningJob) *models.FineTuningJ } } -// ConvertYAMLToJobParams converts a YAML fine-tuning configuration to OpenAI job parameters -func convertInternalJobParamToOpenAiJobParams(config *models.CreateFineTuningRequest) (openai.FineTuningJobNewParams, error) { +// Converts the internal create finetuning request model to OpenAI job parameters +func ConvertInternalJobParamToOpenAiJobParams(config *models.CreateFineTuningRequest) (*openai.FineTuningJobNewParams, error) { jobParams := openai.FineTuningJobNewParams{ Model: openai.FineTuningJobNewParamsModel(config.BaseModel), - TrainingFile: config.TrainingDataID, + TrainingFile: config.TrainingFile, } - if config.ValidationDataID != "" { - jobParams.ValidationFile = openai.String(config.ValidationDataID) + if config.ValidationFile != nil && *config.ValidationFile != "" { + jobParams.ValidationFile = openai.String(*config.ValidationFile) } // Set optional fields - if config.Suffix != "" { - jobParams.Suffix = openai.String(config.Suffix) + if config.Suffix != nil && *config.Suffix != "" { + jobParams.Suffix = openai.String(*config.Suffix) } - if config.Seed != 0 { - jobParams.Seed = openai.Int(config.Seed) + if config.Seed != nil { + jobParams.Seed = openai.Int(*config.Seed) } // Set metadata if provided @@ -76,6 +80,303 @@ func convertInternalJobParamToOpenAiJobParams(config *models.CreateFineTuningReq } } - //TODO Need to set hyperparameters, method, integrations - return jobParams, nil + // Set hyperparameters if provided + if config.Method.Type == "supervised" && config.Method.Supervised != nil { + hp := config.Method.Supervised.Hyperparameters + supervisedMethod := openai.SupervisedMethodParam{ + Hyperparameters: openai.SupervisedHyperparameters{}, + } + + if hp.BatchSize != nil { + if batchSize := convertHyperparameterToInt(hp.BatchSize); batchSize != nil { + supervisedMethod.Hyperparameters.BatchSize = openai.SupervisedHyperparametersBatchSizeUnion{ + OfInt: openai.Int(*batchSize), + } + } else if strVal, ok := hp.BatchSize.(string); ok && strVal == "auto" { + supervisedMethod.Hyperparameters.BatchSize = openai.SupervisedHyperparametersBatchSizeUnion{ + OfAuto: constant.ValueOf[constant.Auto](), + } + } + } + + if hp.LearningRateMultiplier != nil { + if lr := convertHyperparameterToFloat(hp.LearningRateMultiplier); lr != nil { + supervisedMethod.Hyperparameters.LearningRateMultiplier = openai.SupervisedHyperparametersLearningRateMultiplierUnion{ + OfFloat: openai.Float(*lr), + } + } else if strVal, ok := hp.LearningRateMultiplier.(string); ok && strVal == "auto" { + supervisedMethod.Hyperparameters.LearningRateMultiplier = openai.SupervisedHyperparametersLearningRateMultiplierUnion{ + OfAuto: constant.ValueOf[constant.Auto](), + } + } + } + + if hp.Epochs != nil { + if epochs := convertHyperparameterToInt(hp.Epochs); epochs != nil { + supervisedMethod.Hyperparameters.NEpochs = openai.SupervisedHyperparametersNEpochsUnion{ + OfInt: openai.Int(*epochs), + } + } else if strVal, ok := hp.Epochs.(string); ok && strVal == "auto" { + supervisedMethod.Hyperparameters.NEpochs = openai.SupervisedHyperparametersNEpochsUnion{ + OfAuto: constant.ValueOf[constant.Auto](), + } + } + } + + jobParams.Method = openai.FineTuningJobNewParamsMethod{ + Type: "supervised", + Supervised: supervisedMethod, + } + + } else if config.Method.Type == "dpo" && config.Method.DPO != nil { + hp := config.Method.DPO.Hyperparameters + dpoMethod := openai.DpoMethodParam{ + Hyperparameters: openai.DpoHyperparameters{}, + } + + if hp.BatchSize != nil { + if batchSize := convertHyperparameterToInt(hp.BatchSize); batchSize != nil { + dpoMethod.Hyperparameters.BatchSize = openai.DpoHyperparametersBatchSizeUnion{ + OfInt: openai.Int(*batchSize), + } + } + } else if strVal, ok := hp.BatchSize.(string); ok && strVal == "auto" { + dpoMethod.Hyperparameters.BatchSize = openai.DpoHyperparametersBatchSizeUnion{ + OfAuto: constant.ValueOf[constant.Auto](), + } + } + + if hp.LearningRateMultiplier != nil { + if lr := convertHyperparameterToFloat(hp.LearningRateMultiplier); lr != nil { + dpoMethod.Hyperparameters.LearningRateMultiplier = openai.DpoHyperparametersLearningRateMultiplierUnion{ + OfFloat: openai.Float(*lr), + } + } else if strVal, ok := hp.LearningRateMultiplier.(string); ok && strVal == "auto" { + dpoMethod.Hyperparameters.LearningRateMultiplier = openai.DpoHyperparametersLearningRateMultiplierUnion{ + OfAuto: constant.ValueOf[constant.Auto](), + } + } + } + + if hp.Epochs != nil { + if epochs := convertHyperparameterToInt(hp.Epochs); epochs != nil { + dpoMethod.Hyperparameters.NEpochs = openai.DpoHyperparametersNEpochsUnion{ + OfInt: openai.Int(*epochs), + } + } else if strVal, ok := hp.Epochs.(string); ok && strVal == "auto" { + dpoMethod.Hyperparameters.NEpochs = openai.DpoHyperparametersNEpochsUnion{ + OfAuto: constant.ValueOf[constant.Auto](), + } + } + } + + if hp.Beta != nil { + if beta := convertHyperparameterToFloat(hp.Beta); beta != nil { + dpoMethod.Hyperparameters.Beta = openai.DpoHyperparametersBetaUnion{ + OfFloat: openai.Float(*beta), + } + } else if strVal, ok := hp.Beta.(string); ok && strVal == "auto" { + dpoMethod.Hyperparameters.Beta = openai.DpoHyperparametersBetaUnion{ + OfAuto: constant.ValueOf[constant.Auto](), + } + } + } + + jobParams.Method = openai.FineTuningJobNewParamsMethod{ + Type: "dpo", + Dpo: dpoMethod, + } + + } else if config.Method.Type == "reinforcement" && config.Method.Reinforcement != nil { + hp := config.Method.Reinforcement.Hyperparameters + reinforcementMethod := openai.ReinforcementMethodParam{ + Hyperparameters: openai.ReinforcementHyperparameters{}, + } + + if hp.BatchSize != nil { + if batchSize := convertHyperparameterToInt(hp.BatchSize); batchSize != nil { + reinforcementMethod.Hyperparameters.BatchSize = openai.ReinforcementHyperparametersBatchSizeUnion{ + OfInt: openai.Int(*batchSize), + } + } else if strVal, ok := hp.BatchSize.(string); ok && strVal == "auto" { + reinforcementMethod.Hyperparameters.BatchSize = openai.ReinforcementHyperparametersBatchSizeUnion{ + OfAuto: constant.ValueOf[constant.Auto](), + } + } + } + + if hp.LearningRateMultiplier != nil { + if lr := convertHyperparameterToFloat(hp.LearningRateMultiplier); lr != nil { + reinforcementMethod.Hyperparameters.LearningRateMultiplier = openai.ReinforcementHyperparametersLearningRateMultiplierUnion{ + OfFloat: openai.Float(*lr), + } + } else if strVal, ok := hp.LearningRateMultiplier.(string); ok && strVal == "auto" { + reinforcementMethod.Hyperparameters.LearningRateMultiplier = openai.ReinforcementHyperparametersLearningRateMultiplierUnion{ + OfAuto: constant.ValueOf[constant.Auto](), + } + } + } + + if hp.Epochs != nil { + if epochs := convertHyperparameterToInt(hp.Epochs); epochs != nil { + reinforcementMethod.Hyperparameters.NEpochs = openai.ReinforcementHyperparametersNEpochsUnion{ + OfInt: openai.Int(*epochs), + } + } else if strVal, ok := hp.Epochs.(string); ok && strVal == "auto" { + reinforcementMethod.Hyperparameters.NEpochs = openai.ReinforcementHyperparametersNEpochsUnion{ + OfAuto: constant.ValueOf[constant.Auto](), + } + } + } + + if hp.ComputeMultiplier != nil { + if compute := convertHyperparameterToFloat(hp.ComputeMultiplier); compute != nil { + reinforcementMethod.Hyperparameters.ComputeMultiplier = openai.ReinforcementHyperparametersComputeMultiplierUnion{ + OfFloat: openai.Float(*compute), + } + } else if strVal, ok := hp.ComputeMultiplier.(string); ok && strVal == "auto" { + reinforcementMethod.Hyperparameters.ComputeMultiplier = openai.ReinforcementHyperparametersComputeMultiplierUnion{ + OfAuto: constant.ValueOf[constant.Auto](), + } + } + } + + if hp.EvalInterval != nil { + if evalSteps := convertHyperparameterToInt(hp.EvalInterval); evalSteps != nil { + reinforcementMethod.Hyperparameters.EvalInterval = openai.ReinforcementHyperparametersEvalIntervalUnion{ + OfInt: openai.Int(*evalSteps), + } + } else if strVal, ok := hp.EvalInterval.(string); ok && strVal == "auto" { + reinforcementMethod.Hyperparameters.EvalInterval = openai.ReinforcementHyperparametersEvalIntervalUnion{ + OfAuto: constant.ValueOf[constant.Auto](), + } + } + } + + if hp.EvalSamples != nil { + if evalSamples := convertHyperparameterToInt(hp.EvalSamples); evalSamples != nil { + reinforcementMethod.Hyperparameters.EvalSamples = openai.ReinforcementHyperparametersEvalSamplesUnion{ + OfInt: openai.Int(*evalSamples), + } + } else if strVal, ok := hp.EvalSamples.(string); ok && strVal == "auto" { + reinforcementMethod.Hyperparameters.EvalSamples = openai.ReinforcementHyperparametersEvalSamplesUnion{ + OfAuto: constant.ValueOf[constant.Auto](), + } + } + } + + if hp.ReasoningEffort != "" { + reinforcementMethod.Hyperparameters.ReasoningEffort = getReasoningEffortValue(hp.ReasoningEffort) + } + + grader := config.Method.Reinforcement.Grader + if grader != nil { + // Convert grader to JSON and unmarshal to ReinforcementMethodGraderUnionParam + graderJSON, err := json.Marshal(grader) + if err != nil { + return nil, err + } + + var graderUnion openai.ReinforcementMethodGraderUnionParam + err = json.Unmarshal(graderJSON, &graderUnion) + if err != nil { + return nil, err + } + reinforcementMethod.Grader = graderUnion + } + + jobParams.Method = openai.FineTuningJobNewParamsMethod{ + Type: "reinforcement", + Reinforcement: reinforcementMethod, + } + } + + // Set integrations if provided + if len(config.Integrations) > 0 { + var integrations []openai.FineTuningJobNewParamsIntegration + + for _, integration := range config.Integrations { + if integration.Type == "" || integration.Type == "wandb" { + + wandbConfigJSON, err := json.Marshal(integration.Config) + if err != nil { + return nil, err + } + + var wandbConfig openai.FineTuningJobNewParamsIntegrationWandb + err = json.Unmarshal(wandbConfigJSON, &wandbConfig) + if err != nil { + return nil, err + } + integrations = append(integrations, openai.FineTuningJobNewParamsIntegration{ + Type: "wandb", + Wandb: wandbConfig, + }) + } + } + + if len(integrations) > 0 { + jobParams.Integrations = integrations + } + } + + return &jobParams, nil +} + +// convertHyperparameterToInt converts interface{} hyperparameter to *int64 +func convertHyperparameterToInt(value interface{}) *int64 { + if value == nil { + return nil + } + switch v := value.(type) { + case int: + val := int64(v) + return &val + case int64: + return &v + case float64: + val := int64(v) + return &val + case string: + // "auto" string handled separately + return nil + default: + return nil + } +} + +// convertHyperparameterToFloat converts interface{} hyperparameter to *float64 +func convertHyperparameterToFloat(value interface{}) *float64 { + if value == nil { + return nil + } + switch v := value.(type) { + case int: + val := float64(v) + return &val + case int64: + val := float64(v) + return &val + case float64: + return &v + case string: + // "auto" string handled separately + return nil + default: + return nil + } +} + +func getReasoningEffortValue(effort string) openai.ReinforcementHyperparametersReasoningEffort { + + switch strings.ToLower(effort) { + case "low": + return openai.ReinforcementHyperparametersReasoningEffortLow + case "medium": + return openai.ReinforcementHyperparametersReasoningEffortMedium + case "high": + return openai.ReinforcementHyperparametersReasoningEffortHigh + default: + return openai.ReinforcementHyperparametersReasoningEffortDefault + } } diff --git a/cli/azd/extensions/azure.ai.finetune/internal/providers/openai/provider.go b/cli/azd/extensions/azure.ai.finetune/internal/providers/openai/provider.go index ff2023ccd24..32ce9396a53 100644 --- a/cli/azd/extensions/azure.ai.finetune/internal/providers/openai/provider.go +++ b/cli/azd/extensions/azure.ai.finetune/internal/providers/openai/provider.go @@ -7,8 +7,10 @@ import ( "context" "fmt" "os" + "time" "azure.ai.finetune/pkg/models" + "github.com/azure/azure-dev/cli/azd/pkg/ux" "github.com/openai/openai-go/v3" ) @@ -27,17 +29,17 @@ func NewOpenAIProvider(client *openai.Client) *OpenAIProvider { // CreateFineTuningJob creates a new fine-tuning job via OpenAI API func (p *OpenAIProvider) CreateFineTuningJob(ctx context.Context, req *models.CreateFineTuningRequest) (*models.FineTuningJob, error) { - params, err := convertInternalJobParamToOpenAiJobParams(req) + params, err := ConvertInternalJobParamToOpenAiJobParams(req) if err != nil { return nil, fmt.Errorf("failed to convert internal model to openai: %w", err) } - job, err := p.client.FineTuning.Jobs.New(ctx, params) + job, err := p.client.FineTuning.Jobs.New(ctx, *params) if err != nil { return nil, fmt.Errorf("failed to create fine-tuning job: %w", err) } - return convertOpenAIJobToModel(*job), nil + return ConvertOpenAIJobToModel(*job), nil } // GetFineTuningStatus retrieves the status of a fine-tuning job @@ -60,7 +62,7 @@ func (p *OpenAIProvider) ListFineTuningJobs(ctx context.Context, limit int, afte var jobs []*models.FineTuningJob for _, job := range jobList.Data { - finetuningJob := convertOpenAIJobToModel(job) + finetuningJob := ConvertOpenAIJobToModel(job) jobs = append(jobs, finetuningJob) } return jobs, nil @@ -108,9 +110,18 @@ func (p *OpenAIProvider) UploadFile(ctx context.Context, filePath string) (strin return "", fmt.Errorf("file path cannot be empty") } + // Show spinner while creating job + spinner := ux.NewSpinner(&ux.SpinnerOptions{ + Text: "uploading the file using openai provider", + }) + if err := spinner.Start(ctx); err != nil { + fmt.Printf("failed to start spinner: %v\n", err) + } + file, err := os.Open(filePath) if err != nil { - return "", fmt.Errorf("failed to open file %s: %w", filePath, err) + _ = spinner.Stop(ctx) + return "", fmt.Errorf("\nfailed to open file %s: %w", filePath, err) } defer file.Close() @@ -120,10 +131,33 @@ func (p *OpenAIProvider) UploadFile(ctx context.Context, filePath string) (strin }) if err != nil { - return "", fmt.Errorf("failed to upload file: %w", err) + _ = spinner.Stop(ctx) + return "", fmt.Errorf("\nfailed to upload file: %w", err) } + if uploadedFile == nil || uploadedFile.ID == "" { - return "", fmt.Errorf("uploaded file is empty") + _ = spinner.Stop(ctx) + return "", fmt.Errorf("\nuploaded file is empty") + } + + // Poll for file processing status + fmt.Print("Waiting for file to be processed") + for { + f, err := p.client.Files.Get(ctx, uploadedFile.ID) + if err != nil { + _ = spinner.Stop(ctx) + return "", fmt.Errorf("\nfailed to check file status: %w", err) + } + if f.Status == openai.FileObjectStatusProcessed { + _ = spinner.Stop(ctx) + break + } + if f.Status == openai.FileObjectStatusError { + _ = spinner.Stop(ctx) + return "", fmt.Errorf("\nfile processing failed with status: %s", f.Status) + } + fmt.Print(".") + time.Sleep(2 * time.Second) } return uploadedFile.ID, nil diff --git a/cli/azd/extensions/azure.ai.finetune/internal/services/finetune_service.go b/cli/azd/extensions/azure.ai.finetune/internal/services/finetune_service.go index 8714cd19ade..ae7370ec7bd 100644 --- a/cli/azd/extensions/azure.ai.finetune/internal/services/finetune_service.go +++ b/cli/azd/extensions/azure.ai.finetune/internal/services/finetune_service.go @@ -11,8 +11,10 @@ import ( "azure.ai.finetune/internal/providers" "azure.ai.finetune/internal/providers/factory" "azure.ai.finetune/internal/utils" + Utils "azure.ai.finetune/internal/utils" "azure.ai.finetune/pkg/models" "github.com/azure/azure-dev/cli/azd/pkg/azdext" + "github.com/fatih/color" ) // Ensure fineTuningServiceImpl implements FineTuningService interface @@ -48,10 +50,36 @@ func (s *fineTuningServiceImpl) CreateFineTuningJob(ctx context.Context, req *mo if req.BaseModel == "" { return nil, fmt.Errorf("base model is required") } - if req.TrainingDataID == "" { + if req.TrainingFile == "" { return nil, fmt.Errorf("training file is required") } + if Utils.IsLocalFilePath(req.TrainingFile) { + color.Green("uploading training file...") + + trainingDataID, err := s.UploadFile(ctx, Utils.GetLocalFilePath(req.TrainingFile)) + if err != nil { + return nil, fmt.Errorf("failed to upload training file: %w", err) + } + req.TrainingFile = trainingDataID + } else { + color.Yellow("Provided training file is non-local, skipping upload...") + } + + // Upload validation file if provided + if req.ValidationFile != nil && *req.ValidationFile != "" { + if Utils.IsLocalFilePath(*req.ValidationFile) { + color.Green("uploading validation file...") + validationDataID, err := s.UploadFile(ctx, Utils.GetLocalFilePath(*req.ValidationFile)) + if err != nil { + return nil, fmt.Errorf("failed to upload validation file: %w", err) + } + req.ValidationFile = &validationDataID + } else { + color.Yellow("Provided validation file is non-local, skipping upload...") + } + } + // Call provider with retry logic var job *models.FineTuningJob err := utils.RetryOperation(ctx, utils.DefaultRetryConfig(), func() error { @@ -133,26 +161,14 @@ func (s *fineTuningServiceImpl) CancelJob(ctx context.Context, jobID string) (*m return nil, nil } -// UploadTrainingFile uploads and validates a training file -func (s *fineTuningServiceImpl) UploadTrainingFile(ctx context.Context, filePath string) (string, error) { - if filePath == "" { - return "", fmt.Errorf("training file path cannot be empty") - } - uploadedFileId, err := s._uploadFile(ctx, filePath) - if err != nil || uploadedFileId == "" { - return "", fmt.Errorf("failed to upload training file: %w", err) - } - return uploadedFileId, nil -} - -// UploadValidationFile uploads and validates a validation file -func (s *fineTuningServiceImpl) UploadValidationFile(ctx context.Context, filePath string) (string, error) { +// UploadFile uploads and validates a file +func (s *fineTuningServiceImpl) UploadFile(ctx context.Context, filePath string) (string, error) { if filePath == "" { - return "", nil // Validation file is optional + return "", fmt.Errorf("file path cannot be empty") } uploadedFileId, err := s._uploadFile(ctx, filePath) if err != nil || uploadedFileId == "" { - return "", fmt.Errorf("failed to upload validation file: %w", err) + return "", fmt.Errorf("failed to upload file: %w", err) } return uploadedFileId, nil } diff --git a/cli/azd/extensions/azure.ai.finetune/internal/services/interface.go b/cli/azd/extensions/azure.ai.finetune/internal/services/interface.go index e2b0d63c7e5..78d2fa5f39c 100644 --- a/cli/azd/extensions/azure.ai.finetune/internal/services/interface.go +++ b/cli/azd/extensions/azure.ai.finetune/internal/services/interface.go @@ -38,11 +38,8 @@ type FineTuningService interface { // CancelJob cancels a job with proper state validation CancelJob(ctx context.Context, jobID string) (*models.FineTuningJob, error) - // UploadTrainingFile uploads and validates a training file - UploadTrainingFile(ctx context.Context, filePath string) (string, error) - - // UploadValidationFile uploads and validates a validation file - UploadValidationFile(ctx context.Context, filePath string) (string, error) + // UploadFile uploads and validates a file + UploadFile(ctx context.Context, filePath string) (string, error) // PollJobUntilCompletion polls a job until it completes or fails PollJobUntilCompletion(ctx context.Context, jobID string, intervalSeconds int) (*models.FineTuningJob, error) diff --git a/cli/azd/extensions/azure.ai.finetune/internal/utils/common.go b/cli/azd/extensions/azure.ai.finetune/internal/utils/common.go new file mode 100644 index 00000000000..491e04a4a22 --- /dev/null +++ b/cli/azd/extensions/azure.ai.finetune/internal/utils/common.go @@ -0,0 +1,18 @@ +package utils + +func IsLocalFilePath(fileID string) bool { + if fileID == "" { + return false + } + if len(fileID) > 6 && fileID[:6] == "local:" { + return true + } + return false +} + +func GetLocalFilePath(fileID string) string { + if IsLocalFilePath(fileID) { + return fileID[6:] + } + return fileID +} diff --git a/cli/azd/extensions/azure.ai.finetune/internal/utils/parser.go b/cli/azd/extensions/azure.ai.finetune/internal/utils/parser.go new file mode 100644 index 00000000000..8c487a1b7a2 --- /dev/null +++ b/cli/azd/extensions/azure.ai.finetune/internal/utils/parser.go @@ -0,0 +1,30 @@ +package utils + +import ( + "fmt" + "os" + + "azure.ai.finetune/pkg/models" + "github.com/braydonk/yaml" +) + +func ParseCreateFineTuningRequestConfig(filePath string) (*models.CreateFineTuningRequest, error) { + // Read the YAML file + yamlFile, err := os.ReadFile(filePath) + if err != nil { + return nil, fmt.Errorf("failed to read config file %s: %w", filePath, err) + } + + // Parse YAML into config struct + var config models.CreateFineTuningRequest + if err := yaml.Unmarshal(yamlFile, &config); err != nil { + return nil, fmt.Errorf("failed to parse YAML config: %w", err) + } + + // Validate the configuration + if err := config.Validate(); err != nil { + return nil, fmt.Errorf("invalid configuration: %w", err) + } + + return &config, nil +} diff --git a/cli/azd/extensions/azure.ai.finetune/pkg/models/finetune.go b/cli/azd/extensions/azure.ai.finetune/pkg/models/finetune.go index f3786433c9b..4b474c16229 100644 --- a/cli/azd/extensions/azure.ai.finetune/pkg/models/finetune.go +++ b/cli/azd/extensions/azure.ai.finetune/pkg/models/finetune.go @@ -3,7 +3,10 @@ package models -import "time" +import ( + "fmt" + "time" +) // JobStatus represents the status of a fine-tuning job type JobStatus string @@ -18,6 +21,15 @@ const ( StatusPaused JobStatus = "paused" ) +// Represents the type of method used for fine-tuning +type MethodType string + +const ( + Supervised MethodType = "supervised" + DPO MethodType = "dpo" + Reinforcement MethodType = "reinforcement" +) + // FineTuningJob represents a vendor-agnostic fine-tuning job type FineTuningJob struct { // Core identification @@ -42,19 +54,6 @@ type FineTuningJob struct { ErrorDetails *ErrorDetail } -// CreateFineTuningRequest represents a request to create a fine-tuning job -type CreateFineTuningRequest struct { - BaseModel string - TrainingDataID string - ValidationDataID string - Suffix string - Seed int64 - Metadata map[string]string - Method interface{} // Can be dpo, supervised or reinforcement - Integrations []interface{} - Hyperparameters *Hyperparameters -} - // Hyperparameters represents fine-tuning hyperparameters type Hyperparameters struct { BatchSize int64 @@ -108,3 +107,193 @@ type CheckpointMetrics struct { FullValidLoss float64 FullValidMeanTokenAccuracy float64 } + +// CreateFineTuningRequest represents a request to create a fine-tuning job +type CreateFineTuningRequest struct { + // Required: The name of the model to fine-tune + BaseModel string `yaml:"model"` + + // Required: Path to training file + // Format: "file-id" or "local:/path/to/file.jsonl" + TrainingFile string `yaml:"training_file"` + + // Optional: Path to validation file + ValidationFile *string `yaml:"validation_file,omitempty"` + + // Optional: Suffix for the fine-tuned model name (up to 64 characters) + // Example: "custom-model-name" produces "ft:gpt-4o-mini:openai:custom-model-name:7p4lURel" + Suffix *string `yaml:"suffix,omitempty"` + + // Optional: Random seed for reproducibility + Seed *int64 `yaml:"seed,omitempty"` + + // Optional: Custom metadata for the fine-tuning job + // Max 16 key-value pairs, keys max 64 chars, values max 512 chars + Metadata map[string]string `yaml:"metadata,omitempty"` + + // Optional: Fine-tuning method configuration (supervised, dpo, or reinforcement) + Method MethodConfig `yaml:"method,omitempty"` + + // Optional: Integrations to enable (e.g., wandb for Weights & Biases) + Integrations []Integration `yaml:"integrations,omitempty"` + + // Optional: Additional request body fields not covered by standard config + ExtraBody map[string]interface{} `yaml:"extra_body,omitempty"` +} + +// MethodConfig represents fine-tuning method configuration +type MethodConfig struct { + // Type of fine-tuning method: "supervised", "dpo", or "reinforcement" + Type string `yaml:"type"` + + // Supervised fine-tuning configuration + Supervised *SupervisedConfig `yaml:"supervised,omitempty"` + + // Direct Preference Optimization (DPO) configuration + DPO *DPOConfig `yaml:"dpo,omitempty"` + + // Reinforcement learning fine-tuning configuration + Reinforcement *ReinforcementConfig `yaml:"reinforcement,omitempty"` +} + +// SupervisedConfig represents supervised fine-tuning method configuration +// Suitable for standard supervised learning tasks +type SupervisedConfig struct { + Hyperparameters HyperparametersConfig `yaml:"hyperparameters,omitempty"` +} + +// DPOConfig represents Direct Preference Optimization (DPO) configuration +// DPO is used for preference-based fine-tuning +type DPOConfig struct { + Hyperparameters HyperparametersConfig `yaml:"hyperparameters,omitempty"` +} + +// ReinforcementConfig represents reinforcement learning fine-tuning configuration +// Suitable for reasoning models that benefit from reinforcement learning +type ReinforcementConfig struct { + // Grader configuration for reinforcement learning (evaluates model outputs) + Grader map[string]interface{} `yaml:"grader,omitempty"` + + // Hyperparameters specific to reinforcement learning + Hyperparameters HyperparametersConfig `yaml:"hyperparameters,omitempty"` +} + +// HyperparametersConfig represents hyperparameter configuration +// Values can be integers, floats, or "auto" for automatic configuration +type HyperparametersConfig struct { + // Number of training epochs + // Can be: integer (1-10), "auto" + Epochs interface{} `yaml:"epochs,omitempty"` + + // Batch size for training + // Can be: integer (1, 8, 16, 32, 64, 128), "auto" + BatchSize interface{} `yaml:"batch_size,omitempty"` + + // Learning rate multiplier + // Can be: float (0.1-2.0), "auto" + LearningRateMultiplier interface{} `yaml:"learning_rate_multiplier,omitempty"` + + // Weight for prompt loss in supervised learning (0.0-1.0) + PromptLossWeight *float64 `yaml:"prompt_loss_weight,omitempty"` + + // Beta parameter for DPO (temperature-like parameter) + // Can be: float, "auto" + Beta interface{} `yaml:"beta,omitempty"` + + // Compute multiplier for reinforcement learning + // Multiplier on amount of compute used for exploring search space during training + // Can be: float, "auto" + ComputeMultiplier interface{} `yaml:"compute_multiplier,omitempty"` + + // Reasoning effort level for reinforcement learning with reasoning models + // Options: "low", "medium", "high" + ReasoningEffort string `yaml:"reasoning_effort,omitempty"` + + // Evaluation interval for reinforcement learning + // Number of training steps between evaluation runs + // Can be: integer, "auto" + EvalInterval interface{} `yaml:"eval_interval,omitempty"` + + // Evaluation samples for reinforcement learning + // Number of evaluation samples to generate per training step + // Can be: integer, "auto" + EvalSamples interface{} `yaml:"eval_samples,omitempty"` +} + +// Integration represents integration configuration (e.g., Weights & Biases) +type Integration struct { + // Type of integration: "wandb" (Weights & Biases), etc. + Type string `yaml:"type"` + + // Integration-specific configuration (API keys, project names, etc.) + Config map[string]interface{} `yaml:"config,omitempty"` +} + +// Validate checks if the configuration is valid +func (c CreateFineTuningRequest) Validate() error { + // Validate required fields + if c.BaseModel == "" { + return fmt.Errorf("model is required") + } + + if c.TrainingFile == "" { + return fmt.Errorf("training_file is required") + } + + // Validate method if provided + if c.Method.Type != "" { + if c.Method.Type != string(Supervised) && c.Method.Type != string(DPO) && c.Method.Type != string(Reinforcement) { + return fmt.Errorf("invalid method type: %s (must be 'supervised', 'dpo', or 'reinforcement')", c.Method.Type) + } + + // Validate method-specific configuration + switch c.Method.Type { + case string(Supervised): + if c.Method.Supervised == nil { + return fmt.Errorf("supervised method requires 'supervised' configuration block") + } + case string(DPO): + if c.Method.DPO == nil { + return fmt.Errorf("dpo method requires 'dpo' configuration block") + } + case string(Reinforcement): + if c.Method.Reinforcement == nil { + return fmt.Errorf("reinforcement method requires 'reinforcement' configuration block") + } + } + } + + // Validate integrations if provided + if len(c.Integrations) > 0 { + for _, integration := range c.Integrations { + if integration.Type == "" { + return fmt.Errorf("integration type is required if integrations are specified") + } + if integration.Config == nil { + return fmt.Errorf("integration of type '%s' requires 'config' block", integration.Type) + } + } + } + + // Validate suffix length if provided + if c.Suffix != nil && len(*c.Suffix) > 64 { + return fmt.Errorf("suffix exceeds maximum length of 64 characters: %d", len(*c.Suffix)) + } + + // Validate metadata constraints + if c.Metadata != nil { + if len(c.Metadata) > 16 { + return fmt.Errorf("metadata exceeds maximum of 16 key-value pairs: %d", len(c.Metadata)) + } + for k, v := range c.Metadata { + if len(k) > 64 { + return fmt.Errorf("metadata key exceeds maximum length of 64 characters: %s", k) + } + if len(v) > 512 { + return fmt.Errorf("metadata value exceeds maximum length of 512 characters for key: %s", k) + } + } + } + + return nil +} From 51d38dd072f759095f7037b5587f395b255e3e71 Mon Sep 17 00:00:00 2001 From: Zubairuddin Mohammed Date: Sat, 3 Jan 2026 04:32:14 +0530 Subject: [PATCH 5/9] a few formatting changes --- .../extensions/azure.ai.finetune/internal/cmd/operations.go | 2 +- .../azure.ai.finetune/internal/providers/openai/provider.go | 5 +++-- .../azure.ai.finetune/internal/services/finetune_service.go | 4 ++-- 3 files changed, 6 insertions(+), 5 deletions(-) diff --git a/cli/azd/extensions/azure.ai.finetune/internal/cmd/operations.go b/cli/azd/extensions/azure.ai.finetune/internal/cmd/operations.go index 30868e1166f..0e7a319ceb7 100644 --- a/cli/azd/extensions/azure.ai.finetune/internal/cmd/operations.go +++ b/cli/azd/extensions/azure.ai.finetune/internal/cmd/operations.go @@ -91,7 +91,7 @@ func newOperationSubmitCommand() *cobra.Command { } // Parse and validate the YAML configuration file - color.Green("parsing configuration file...") + color.Green("\nparsing configuration file...") config, err := Utils.ParseCreateFineTuningRequestConfig(filename) if err != nil { _ = spinner.Stop(ctx) diff --git a/cli/azd/extensions/azure.ai.finetune/internal/providers/openai/provider.go b/cli/azd/extensions/azure.ai.finetune/internal/providers/openai/provider.go index 32ce9396a53..a337da15ad1 100644 --- a/cli/azd/extensions/azure.ai.finetune/internal/providers/openai/provider.go +++ b/cli/azd/extensions/azure.ai.finetune/internal/providers/openai/provider.go @@ -11,6 +11,7 @@ import ( "azure.ai.finetune/pkg/models" "github.com/azure/azure-dev/cli/azd/pkg/ux" + "github.com/fatih/color" "github.com/openai/openai-go/v3" ) @@ -141,7 +142,7 @@ func (p *OpenAIProvider) UploadFile(ctx context.Context, filePath string) (strin } // Poll for file processing status - fmt.Print("Waiting for file to be processed") + color.Yellow("\nWaiting for file to be processed") for { f, err := p.client.Files.Get(ctx, uploadedFile.ID) if err != nil { @@ -156,7 +157,7 @@ func (p *OpenAIProvider) UploadFile(ctx context.Context, filePath string) (strin _ = spinner.Stop(ctx) return "", fmt.Errorf("\nfile processing failed with status: %s", f.Status) } - fmt.Print(".") + color.Yellow(".") time.Sleep(2 * time.Second) } diff --git a/cli/azd/extensions/azure.ai.finetune/internal/services/finetune_service.go b/cli/azd/extensions/azure.ai.finetune/internal/services/finetune_service.go index ae7370ec7bd..1fd86e00fa4 100644 --- a/cli/azd/extensions/azure.ai.finetune/internal/services/finetune_service.go +++ b/cli/azd/extensions/azure.ai.finetune/internal/services/finetune_service.go @@ -69,14 +69,14 @@ func (s *fineTuningServiceImpl) CreateFineTuningJob(ctx context.Context, req *mo // Upload validation file if provided if req.ValidationFile != nil && *req.ValidationFile != "" { if Utils.IsLocalFilePath(*req.ValidationFile) { - color.Green("uploading validation file...") + color.Green("\nuploading validation file...") validationDataID, err := s.UploadFile(ctx, Utils.GetLocalFilePath(*req.ValidationFile)) if err != nil { return nil, fmt.Errorf("failed to upload validation file: %w", err) } req.ValidationFile = &validationDataID } else { - color.Yellow("Provided validation file is non-local, skipping upload...") + color.Yellow("\nProvided validation file is non-local, skipping upload...") } } From 602816af3047d7f16cd4b29a1257e6c124171038 Mon Sep 17 00:00:00 2001 From: Zubairuddin Mohammed Date: Sat, 3 Jan 2026 04:45:01 +0530 Subject: [PATCH 6/9] removing ofAuto value for hyperparameters --- .../internal/providers/openai/conversions.go | 53 ------------------- .../internal/providers/openai/provider.go | 1 - 2 files changed, 54 deletions(-) diff --git a/cli/azd/extensions/azure.ai.finetune/internal/providers/openai/conversions.go b/cli/azd/extensions/azure.ai.finetune/internal/providers/openai/conversions.go index fabc6acd06e..3d8c8e3dabf 100644 --- a/cli/azd/extensions/azure.ai.finetune/internal/providers/openai/conversions.go +++ b/cli/azd/extensions/azure.ai.finetune/internal/providers/openai/conversions.go @@ -10,7 +10,6 @@ import ( "azure.ai.finetune/internal/utils" "azure.ai.finetune/pkg/models" "github.com/openai/openai-go/v3" - "github.com/openai/openai-go/v3/shared/constant" ) // OpenAI Status Constants - matches OpenAI SDK values @@ -92,10 +91,6 @@ func ConvertInternalJobParamToOpenAiJobParams(config *models.CreateFineTuningReq supervisedMethod.Hyperparameters.BatchSize = openai.SupervisedHyperparametersBatchSizeUnion{ OfInt: openai.Int(*batchSize), } - } else if strVal, ok := hp.BatchSize.(string); ok && strVal == "auto" { - supervisedMethod.Hyperparameters.BatchSize = openai.SupervisedHyperparametersBatchSizeUnion{ - OfAuto: constant.ValueOf[constant.Auto](), - } } } @@ -104,10 +99,6 @@ func ConvertInternalJobParamToOpenAiJobParams(config *models.CreateFineTuningReq supervisedMethod.Hyperparameters.LearningRateMultiplier = openai.SupervisedHyperparametersLearningRateMultiplierUnion{ OfFloat: openai.Float(*lr), } - } else if strVal, ok := hp.LearningRateMultiplier.(string); ok && strVal == "auto" { - supervisedMethod.Hyperparameters.LearningRateMultiplier = openai.SupervisedHyperparametersLearningRateMultiplierUnion{ - OfAuto: constant.ValueOf[constant.Auto](), - } } } @@ -116,10 +107,6 @@ func ConvertInternalJobParamToOpenAiJobParams(config *models.CreateFineTuningReq supervisedMethod.Hyperparameters.NEpochs = openai.SupervisedHyperparametersNEpochsUnion{ OfInt: openai.Int(*epochs), } - } else if strVal, ok := hp.Epochs.(string); ok && strVal == "auto" { - supervisedMethod.Hyperparameters.NEpochs = openai.SupervisedHyperparametersNEpochsUnion{ - OfAuto: constant.ValueOf[constant.Auto](), - } } } @@ -140,10 +127,6 @@ func ConvertInternalJobParamToOpenAiJobParams(config *models.CreateFineTuningReq OfInt: openai.Int(*batchSize), } } - } else if strVal, ok := hp.BatchSize.(string); ok && strVal == "auto" { - dpoMethod.Hyperparameters.BatchSize = openai.DpoHyperparametersBatchSizeUnion{ - OfAuto: constant.ValueOf[constant.Auto](), - } } if hp.LearningRateMultiplier != nil { @@ -151,10 +134,6 @@ func ConvertInternalJobParamToOpenAiJobParams(config *models.CreateFineTuningReq dpoMethod.Hyperparameters.LearningRateMultiplier = openai.DpoHyperparametersLearningRateMultiplierUnion{ OfFloat: openai.Float(*lr), } - } else if strVal, ok := hp.LearningRateMultiplier.(string); ok && strVal == "auto" { - dpoMethod.Hyperparameters.LearningRateMultiplier = openai.DpoHyperparametersLearningRateMultiplierUnion{ - OfAuto: constant.ValueOf[constant.Auto](), - } } } @@ -163,10 +142,6 @@ func ConvertInternalJobParamToOpenAiJobParams(config *models.CreateFineTuningReq dpoMethod.Hyperparameters.NEpochs = openai.DpoHyperparametersNEpochsUnion{ OfInt: openai.Int(*epochs), } - } else if strVal, ok := hp.Epochs.(string); ok && strVal == "auto" { - dpoMethod.Hyperparameters.NEpochs = openai.DpoHyperparametersNEpochsUnion{ - OfAuto: constant.ValueOf[constant.Auto](), - } } } @@ -175,10 +150,6 @@ func ConvertInternalJobParamToOpenAiJobParams(config *models.CreateFineTuningReq dpoMethod.Hyperparameters.Beta = openai.DpoHyperparametersBetaUnion{ OfFloat: openai.Float(*beta), } - } else if strVal, ok := hp.Beta.(string); ok && strVal == "auto" { - dpoMethod.Hyperparameters.Beta = openai.DpoHyperparametersBetaUnion{ - OfAuto: constant.ValueOf[constant.Auto](), - } } } @@ -198,10 +169,6 @@ func ConvertInternalJobParamToOpenAiJobParams(config *models.CreateFineTuningReq reinforcementMethod.Hyperparameters.BatchSize = openai.ReinforcementHyperparametersBatchSizeUnion{ OfInt: openai.Int(*batchSize), } - } else if strVal, ok := hp.BatchSize.(string); ok && strVal == "auto" { - reinforcementMethod.Hyperparameters.BatchSize = openai.ReinforcementHyperparametersBatchSizeUnion{ - OfAuto: constant.ValueOf[constant.Auto](), - } } } @@ -210,10 +177,6 @@ func ConvertInternalJobParamToOpenAiJobParams(config *models.CreateFineTuningReq reinforcementMethod.Hyperparameters.LearningRateMultiplier = openai.ReinforcementHyperparametersLearningRateMultiplierUnion{ OfFloat: openai.Float(*lr), } - } else if strVal, ok := hp.LearningRateMultiplier.(string); ok && strVal == "auto" { - reinforcementMethod.Hyperparameters.LearningRateMultiplier = openai.ReinforcementHyperparametersLearningRateMultiplierUnion{ - OfAuto: constant.ValueOf[constant.Auto](), - } } } @@ -222,10 +185,6 @@ func ConvertInternalJobParamToOpenAiJobParams(config *models.CreateFineTuningReq reinforcementMethod.Hyperparameters.NEpochs = openai.ReinforcementHyperparametersNEpochsUnion{ OfInt: openai.Int(*epochs), } - } else if strVal, ok := hp.Epochs.(string); ok && strVal == "auto" { - reinforcementMethod.Hyperparameters.NEpochs = openai.ReinforcementHyperparametersNEpochsUnion{ - OfAuto: constant.ValueOf[constant.Auto](), - } } } @@ -234,10 +193,6 @@ func ConvertInternalJobParamToOpenAiJobParams(config *models.CreateFineTuningReq reinforcementMethod.Hyperparameters.ComputeMultiplier = openai.ReinforcementHyperparametersComputeMultiplierUnion{ OfFloat: openai.Float(*compute), } - } else if strVal, ok := hp.ComputeMultiplier.(string); ok && strVal == "auto" { - reinforcementMethod.Hyperparameters.ComputeMultiplier = openai.ReinforcementHyperparametersComputeMultiplierUnion{ - OfAuto: constant.ValueOf[constant.Auto](), - } } } @@ -246,10 +201,6 @@ func ConvertInternalJobParamToOpenAiJobParams(config *models.CreateFineTuningReq reinforcementMethod.Hyperparameters.EvalInterval = openai.ReinforcementHyperparametersEvalIntervalUnion{ OfInt: openai.Int(*evalSteps), } - } else if strVal, ok := hp.EvalInterval.(string); ok && strVal == "auto" { - reinforcementMethod.Hyperparameters.EvalInterval = openai.ReinforcementHyperparametersEvalIntervalUnion{ - OfAuto: constant.ValueOf[constant.Auto](), - } } } @@ -258,10 +209,6 @@ func ConvertInternalJobParamToOpenAiJobParams(config *models.CreateFineTuningReq reinforcementMethod.Hyperparameters.EvalSamples = openai.ReinforcementHyperparametersEvalSamplesUnion{ OfInt: openai.Int(*evalSamples), } - } else if strVal, ok := hp.EvalSamples.(string); ok && strVal == "auto" { - reinforcementMethod.Hyperparameters.EvalSamples = openai.ReinforcementHyperparametersEvalSamplesUnion{ - OfAuto: constant.ValueOf[constant.Auto](), - } } } diff --git a/cli/azd/extensions/azure.ai.finetune/internal/providers/openai/provider.go b/cli/azd/extensions/azure.ai.finetune/internal/providers/openai/provider.go index a337da15ad1..e27775fe6b5 100644 --- a/cli/azd/extensions/azure.ai.finetune/internal/providers/openai/provider.go +++ b/cli/azd/extensions/azure.ai.finetune/internal/providers/openai/provider.go @@ -142,7 +142,6 @@ func (p *OpenAIProvider) UploadFile(ctx context.Context, filePath string) (strin } // Poll for file processing status - color.Yellow("\nWaiting for file to be processed") for { f, err := p.client.Files.Get(ctx, uploadedFile.ID) if err != nil { From 372b83896cd028cddea03d5bc3eb442a43126f8b Mon Sep 17 00:00:00 2001 From: Zubairuddin Mohammed Date: Sat, 3 Jan 2026 04:54:55 +0530 Subject: [PATCH 7/9] more formatting changes --- cli/azd/extensions/azure.ai.finetune/internal/cmd/operations.go | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/cli/azd/extensions/azure.ai.finetune/internal/cmd/operations.go b/cli/azd/extensions/azure.ai.finetune/internal/cmd/operations.go index 0e7a319ceb7..d2f96dd84ee 100644 --- a/cli/azd/extensions/azure.ai.finetune/internal/cmd/operations.go +++ b/cli/azd/extensions/azure.ai.finetune/internal/cmd/operations.go @@ -115,7 +115,7 @@ func newOperationSubmitCommand() *cobra.Command { } // Print success message - fmt.Println(strings.Repeat("=", 120)) + fmt.Println("\n", strings.Repeat("=", 120)) color.Green("\nsuccessfully submitted fine-tuning Job!\n") fmt.Printf("Job ID: %s\n", job.ID) fmt.Printf("Model: %s\n", job.BaseModel) From 3a1b7d828e3bf5c5d07f0fdd677fc784ee56708a Mon Sep 17 00:00:00 2001 From: Zubairuddin Mohammed Date: Sun, 4 Jan 2026 19:46:02 +0530 Subject: [PATCH 8/9] adding command line parameters + formatting --- .../internal/cmd/operations.go | 74 ++++++++++++++----- .../internal/providers/openai/provider.go | 2 +- .../internal/services/finetune_service.go | 4 +- 3 files changed, 57 insertions(+), 23 deletions(-) diff --git a/cli/azd/extensions/azure.ai.finetune/internal/cmd/operations.go b/cli/azd/extensions/azure.ai.finetune/internal/cmd/operations.go index d2f96dd84ee..89ec02c266e 100644 --- a/cli/azd/extensions/azure.ai.finetune/internal/cmd/operations.go +++ b/cli/azd/extensions/azure.ai.finetune/internal/cmd/operations.go @@ -15,6 +15,7 @@ import ( "azure.ai.finetune/internal/services" JobWrapper "azure.ai.finetune/internal/tools" Utils "azure.ai.finetune/internal/utils" + "azure.ai.finetune/pkg/models" ) func newOperationCommand() *cobra.Command { @@ -65,15 +66,18 @@ func formatFineTunedModel(model string) string { func newOperationSubmitCommand() *cobra.Command { var filename string + var model string + var trainingFile string + var validationFile string + var suffix string + var seed int64 cmd := &cobra.Command{ Use: "submit", Short: "submit fine tuning job", RunE: func(cmd *cobra.Command, args []string) error { ctx := azdext.WithAccessToken(cmd.Context()) - - // Validate filename is provided - if filename == "" { - return fmt.Errorf("config file is required, use -f or --file flag") + if filename == "" && (model == "" || trainingFile == "") { + return fmt.Errorf("either config file or model and training-file parameters are required") } azdClient, err := azdext.NewAzdClient() @@ -90,13 +94,36 @@ func newOperationSubmitCommand() *cobra.Command { fmt.Printf("failed to start spinner: %v\n", err) } - // Parse and validate the YAML configuration file - color.Green("\nparsing configuration file...") - config, err := Utils.ParseCreateFineTuningRequestConfig(filename) - if err != nil { - _ = spinner.Stop(ctx) - fmt.Println() - return err + // Parse and validate the YAML configuration file if provided + var config *models.CreateFineTuningRequest + if filename != "" { + color.Green("\nparsing configuration file...") + config, err = Utils.ParseCreateFineTuningRequestConfig(filename) + if err != nil { + _ = spinner.Stop(ctx) + fmt.Println() + return err + } + } else { + config = &models.CreateFineTuningRequest{} + } + + // Override config values with command-line parameters if provided + if model != "" { + config.BaseModel = model + } + if trainingFile != "" { + + config.TrainingFile = trainingFile + } + if validationFile != "" { + config.ValidationFile = &validationFile + } + if suffix != "" { + config.Suffix = &suffix + } + if seed != 0 { + config.Seed = &seed } fineTuneSvc, err := services.NewFineTuningService(ctx, azdClient, nil) @@ -108,14 +135,15 @@ func newOperationSubmitCommand() *cobra.Command { // Submit the fine-tuning job using CreateJob from JobWrapper job, err := fineTuneSvc.CreateFineTuningJob(ctx, config) + _ = spinner.Stop(ctx) + fmt.Println() + if err != nil { - _ = spinner.Stop(ctx) - fmt.Println() return err } // Print success message - fmt.Println("\n", strings.Repeat("=", 120)) + fmt.Println("\n", strings.Repeat("=", 60)) color.Green("\nsuccessfully submitted fine-tuning Job!\n") fmt.Printf("Job ID: %s\n", job.ID) fmt.Printf("Model: %s\n", job.BaseModel) @@ -124,16 +152,22 @@ func newOperationSubmitCommand() *cobra.Command { if job.FineTunedModel != "" { fmt.Printf("Fine-tuned: %s\n", job.FineTunedModel) } - fmt.Println(strings.Repeat("=", 120)) - - _ = spinner.Stop(ctx) - fmt.Println() + fmt.Println(strings.Repeat("=", 60)) return nil }, } - cmd.Flags().StringVarP(&filename, "file", "f", "", "Path to the config file") - + cmd.Flags().StringVarP(&filename, "file", "f", "", "Path to the config file.") + cmd.Flags().StringVarP(&model, "model", "m", "", "Base model to fine-tune. Overrides config file. Required if --file is not provided") + cmd.Flags().StringVarP(&trainingFile, "training-file", "t", "", "Training file ID or local path. Use 'local:' prefix for local paths. Required if --file is not provided") + cmd.Flags().StringVarP(&validationFile, "validation-file", "v", "", "Validation file ID or local path. Use 'local:' prefix for local paths.") + cmd.Flags().StringVarP(&suffix, "suffix", "s", "", "An optional string of up to 64 characters that will be added to your fine-tuned model name. Overrides config file.") + cmd.Flags().Int64VarP(&seed, "seed", "r", 0, "Random seed for reproducibility of the job. If a seed is not specified, one will be generated for you. Overrides config file.") + + //Either config file should be provided or at least `model` & `training-file` parameters + cmd.MarkFlagFilename("file", "yaml", "yml") + cmd.MarkFlagsOneRequired("file", "model") + cmd.MarkFlagsRequiredTogether("model", "training-file") return cmd } diff --git a/cli/azd/extensions/azure.ai.finetune/internal/providers/openai/provider.go b/cli/azd/extensions/azure.ai.finetune/internal/providers/openai/provider.go index e27775fe6b5..b261e67431d 100644 --- a/cli/azd/extensions/azure.ai.finetune/internal/providers/openai/provider.go +++ b/cli/azd/extensions/azure.ai.finetune/internal/providers/openai/provider.go @@ -113,7 +113,7 @@ func (p *OpenAIProvider) UploadFile(ctx context.Context, filePath string) (strin // Show spinner while creating job spinner := ux.NewSpinner(&ux.SpinnerOptions{ - Text: "uploading the file using openai provider", + Text: "uploading the file for fine-tuning", }) if err := spinner.Start(ctx); err != nil { fmt.Printf("failed to start spinner: %v\n", err) diff --git a/cli/azd/extensions/azure.ai.finetune/internal/services/finetune_service.go b/cli/azd/extensions/azure.ai.finetune/internal/services/finetune_service.go index 1fd86e00fa4..7de78ea649e 100644 --- a/cli/azd/extensions/azure.ai.finetune/internal/services/finetune_service.go +++ b/cli/azd/extensions/azure.ai.finetune/internal/services/finetune_service.go @@ -55,7 +55,7 @@ func (s *fineTuningServiceImpl) CreateFineTuningJob(ctx context.Context, req *mo } if Utils.IsLocalFilePath(req.TrainingFile) { - color.Green("uploading training file...") + color.Green("\nuploading training file...") trainingDataID, err := s.UploadFile(ctx, Utils.GetLocalFilePath(req.TrainingFile)) if err != nil { @@ -63,7 +63,7 @@ func (s *fineTuningServiceImpl) CreateFineTuningJob(ctx context.Context, req *mo } req.TrainingFile = trainingDataID } else { - color.Yellow("Provided training file is non-local, skipping upload...") + color.Yellow("\nProvided training file is non-local, skipping upload...") } // Upload validation file if provided From 0db86afaaa1a64cdacf32e3f8e7e95ed3ed7fbc2 Mon Sep 17 00:00:00 2001 From: Zubairuddin Mohammed Date: Mon, 5 Jan 2026 11:56:48 +0530 Subject: [PATCH 9/9] minor merge fixes --- .../extensions/azure.ai.finetune/internal/cmd/operations.go | 3 +-- .../internal/providers/openai/conversions.go | 5 +++-- .../azure.ai.finetune/internal/providers/openai/provider.go | 4 ++-- 3 files changed, 6 insertions(+), 6 deletions(-) diff --git a/cli/azd/extensions/azure.ai.finetune/internal/cmd/operations.go b/cli/azd/extensions/azure.ai.finetune/internal/cmd/operations.go index 1c3f0c7fcac..e90329f35a8 100644 --- a/cli/azd/extensions/azure.ai.finetune/internal/cmd/operations.go +++ b/cli/azd/extensions/azure.ai.finetune/internal/cmd/operations.go @@ -13,7 +13,6 @@ import ( "github.com/azure/azure-dev/cli/azd/pkg/azdext" "github.com/azure/azure-dev/cli/azd/pkg/ux" - FTYaml "azure.ai.finetune/internal/fine_tuning_yaml" "azure.ai.finetune/internal/services" JobWrapper "azure.ai.finetune/internal/tools" "azure.ai.finetune/internal/utils" @@ -100,7 +99,7 @@ func newOperationSubmitCommand() *cobra.Command { var config *models.CreateFineTuningRequest if filename != "" { color.Green("\nparsing configuration file...") - config, err = Utils.ParseCreateFineTuningRequestConfig(filename) + config, err = utils.ParseCreateFineTuningRequestConfig(filename) if err != nil { _ = spinner.Stop(ctx) fmt.Println() diff --git a/cli/azd/extensions/azure.ai.finetune/internal/providers/openai/conversions.go b/cli/azd/extensions/azure.ai.finetune/internal/providers/openai/conversions.go index ae156d166a4..f27372483f1 100644 --- a/cli/azd/extensions/azure.ai.finetune/internal/providers/openai/conversions.go +++ b/cli/azd/extensions/azure.ai.finetune/internal/providers/openai/conversions.go @@ -6,6 +6,7 @@ package openai import ( "encoding/json" "strings" + "github.com/openai/openai-go/v3" "github.com/openai/openai-go/v3/packages/pagination" @@ -123,9 +124,9 @@ func convertOpenAIJobCheckpointsToModel(checkpointsPage *pagination.CursorPage[o HasMore: checkpointsPage.HasMore, } } - + // Converts the internal create finetuning request model to OpenAI job parameters -func ConvertInternalJobParamToOpenAiJobParams(config *models.CreateFineTuningRequest) (*openai.FineTuningJobNewParams, error) { +func convertInternalJobParamToOpenAiJobParams(config *models.CreateFineTuningRequest) (*openai.FineTuningJobNewParams, error) { jobParams := openai.FineTuningJobNewParams{ Model: openai.FineTuningJobNewParamsModel(config.BaseModel), TrainingFile: config.TrainingFile, diff --git a/cli/azd/extensions/azure.ai.finetune/internal/providers/openai/provider.go b/cli/azd/extensions/azure.ai.finetune/internal/providers/openai/provider.go index ca1ec4013e0..5ed9c1404a9 100644 --- a/cli/azd/extensions/azure.ai.finetune/internal/providers/openai/provider.go +++ b/cli/azd/extensions/azure.ai.finetune/internal/providers/openai/provider.go @@ -30,7 +30,7 @@ func NewOpenAIProvider(client *openai.Client) *OpenAIProvider { // CreateFineTuningJob creates a new fine-tuning job via OpenAI API func (p *OpenAIProvider) CreateFineTuningJob(ctx context.Context, req *models.CreateFineTuningRequest) (*models.FineTuningJob, error) { - params, err := ConvertInternalJobParamToOpenAiJobParams(req) + params, err := convertInternalJobParamToOpenAiJobParams(req) if err != nil { return nil, fmt.Errorf("failed to convert internal model to openai: %w", err) } @@ -40,7 +40,7 @@ func (p *OpenAIProvider) CreateFineTuningJob(ctx context.Context, req *models.Cr return nil, fmt.Errorf("failed to create fine-tuning job: %w", err) } - return ConvertOpenAIJobToModel(*job), nil + return convertOpenAIJobToModel(*job), nil } // GetFineTuningStatus retrieves the status of a fine-tuning job