Adding hyperparam_opt
This commit is contained in:
46
hyperparam_opt.py
Normal file
46
hyperparam_opt.py
Normal file
@@ -0,0 +1,46 @@
|
||||
from clearml.automation import UniformParameterRange, UniformIntegerParameterRange, DiscreteParameterRange
|
||||
from clearml.automation import HyperParameterOptimizer
|
||||
from clearml.automation.optuna import OptimizerOptuna
|
||||
|
||||
from clearml import Task
|
||||
|
||||
task = Task.init(
|
||||
project_name='SpoterEmbedding',
|
||||
task_name='Automatic Hyper-Parameter Optimization',
|
||||
task_type=Task.TaskTypes.optimizer,
|
||||
reuse_last_task_id=False
|
||||
)
|
||||
|
||||
|
||||
optimizer = HyperParameterOptimizer(
|
||||
# specifying the task to be optimized, task must be in system already so it can be cloned
|
||||
base_task_id="b0acd4bca5d447a28a882a65cdf3be3e",
|
||||
# setting the hyperparameters to optimize
|
||||
hyper_parameters=[
|
||||
# epochs
|
||||
UniformIntegerParameterRange('Args/epochs', 200, 800),
|
||||
# learning rate
|
||||
UniformParameterRange('Args/lr', 0.000001, 0.1),
|
||||
# optimizer
|
||||
DiscreteParameterRange('Args/optimizer', ['ADAM', 'SGD']),
|
||||
# vector length
|
||||
UniformIntegerParameterRange('Args/vector_length', 10, 100),
|
||||
],
|
||||
# setting the objective metric we want to maximize/minimize
|
||||
objective_metric_title='silhouette_coefficient',
|
||||
objective_metric_series='val',
|
||||
objective_metric_sign='max',
|
||||
|
||||
# setting optimizer
|
||||
optimizer_class=OptimizerOptuna,
|
||||
|
||||
# configuring optimization parameters
|
||||
execution_queue='default',
|
||||
optimization_time_limit=360,
|
||||
compute_time_limit=480,
|
||||
total_max_jobs=20,
|
||||
min_iteration_per_job=0,
|
||||
max_iteration_per_job=150000,
|
||||
)
|
||||
|
||||
task.execute_remotely(queue_name='default', exit_process=True)
|
||||
@@ -12,3 +12,4 @@ clearml==1.10.3
|
||||
torch
|
||||
torchvision
|
||||
tqdm==4.54.1
|
||||
optuna==3.1.1
|
||||
Reference in New Issue
Block a user