Adding hyperparam_opt

This commit is contained in:
2023-04-15 11:51:13 +00:00
parent 1f24df1b8f
commit a57bf235da
2 changed files with 47 additions and 0 deletions

46
hyperparam_opt.py Normal file
View File

@@ -0,0 +1,46 @@
from clearml.automation import UniformParameterRange, UniformIntegerParameterRange, DiscreteParameterRange
from clearml.automation import HyperParameterOptimizer
from clearml.automation.optuna import OptimizerOptuna
from clearml import Task
task = Task.init(
project_name='SpoterEmbedding',
task_name='Automatic Hyper-Parameter Optimization',
task_type=Task.TaskTypes.optimizer,
reuse_last_task_id=False
)
optimizer = HyperParameterOptimizer(
# specifying the task to be optimized, task must be in system already so it can be cloned
base_task_id="b0acd4bca5d447a28a882a65cdf3be3e",
# setting the hyperparameters to optimize
hyper_parameters=[
# epochs
UniformIntegerParameterRange('Args/epochs', 200, 800),
# learning rate
UniformParameterRange('Args/lr', 0.000001, 0.1),
# optimizer
DiscreteParameterRange('Args/optimizer', ['ADAM', 'SGD']),
# vector length
UniformIntegerParameterRange('Args/vector_length', 10, 100),
],
# setting the objective metric we want to maximize/minimize
objective_metric_title='silhouette_coefficient',
objective_metric_series='val',
objective_metric_sign='max',
# setting optimizer
optimizer_class=OptimizerOptuna,
# configuring optimization parameters
execution_queue='default',
optimization_time_limit=360,
compute_time_limit=480,
total_max_jobs=20,
min_iteration_per_job=0,
max_iteration_per_job=150000,
)
task.execute_remotely(queue_name='default', exit_process=True)

View File

@@ -12,3 +12,4 @@ clearml==1.10.3
torch
torchvision
tqdm==4.54.1
optuna==3.1.1