diff --git a/__pycache__/model.cpython-39.pyc b/__pycache__/model.cpython-39.pyc new file mode 100644 index 0000000000000000000000000000000000000000..4320d7de9ba253882f078b871ef8dc31a137d2a9 Binary files /dev/null and b/__pycache__/model.cpython-39.pyc differ diff --git a/__pycache__/utils.cpython-39.pyc b/__pycache__/utils.cpython-39.pyc new file mode 100644 index 0000000000000000000000000000000000000000..4baf7c40eed4d077b7dafe81aef6c84a5949c341 Binary files /dev/null and b/__pycache__/utils.cpython-39.pyc differ diff --git a/controller.py b/controller.py index f46291fd38fe4b10f87da4e4af038fdf75279c95..bef163e8d9e7f2a334df551c849826fd537da78f 100644 --- a/controller.py +++ b/controller.py @@ -37,19 +37,6 @@ pipe.add_step(name='stage_train', "General/dataset_project": "${stage_data.parameters.General/dataset_project}"} ) - model = step_two( - dataset_name_training=dataset_name_training, - dataset_name_test=dataset_name_test, - dataset_project=dataset_project, - epochs=10, - train_batch_size=256, - validation_batch_size=256, - train_num_workers=0, - validation_num_workers=0, - resize=28, - lr=1e-3 - ) - # Starting the pipeline # pipe.start_locally() pipe.start(queue='test') diff --git a/stage_one.py b/stage_one.py index 8a6a220afd4db724932d4b94f4c93a909c0bdaa7..7c82e6706ade0b7bb614bb662df4070a25c0ab54 100644 --- a/stage_one.py +++ b/stage_one.py @@ -1,8 +1,8 @@ from clearml import Task, Dataset -Task.add_requirements("./requirements.txt") task = Task.init(project_name="pipeline", task_name="pipeline step 1 dataset artifact") +task.add_requirements("./requirements.txt") # only create the task, it will be executed remotely later task.execute_remotely() diff --git a/stage_two.py b/stage_two.py index 368a61488facbfb9319290faa7445f6578b50c59..288f1f00b5124866ce1a00aa3e0b10252214b20d 100644 --- a/stage_two.py +++ b/stage_two.py @@ -75,9 +75,9 @@ def validate(model, testloader, criterion): epoch_acc = 100. * (valid_running_correct / len(testloader.dataset)) return epoch_loss, epoch_acc -Task.add_requirements("./requirements.txt") task = Task.init(project_name="pipeline", task_name="pipeline step 2 train model") +task.add_requirements("./requirements.txt") # only create the task, we will actually execute it later task.execute_remotely() @@ -95,6 +95,8 @@ args = { 'lr': 1e-3 } +task.connect(args) + mnist_train = Dataset.get( dataset_name=args['train_dataset_name'], dataset_project=args['dataset_project'] ).get_local_copy()