Skip to content

Commit

Permalink
removed todo notes and modified tests to generate observations file
Browse files Browse the repository at this point in the history
  • Loading branch information
kylajones committed Dec 1, 2023
1 parent 24e8066 commit 00fa765
Show file tree
Hide file tree
Showing 4 changed files with 5 additions and 6 deletions.
Binary file added .DS_Store
Binary file not shown.
2 changes: 1 addition & 1 deletion linfa/tests/test_discr_05_LF_nodiscr_HFData_TP15.py
Original file line number Diff line number Diff line change
Expand Up @@ -33,7 +33,7 @@ def run_test():
exp.surr_pre_it = 1000 # int: Number of pre-training iterations for surrogate model
exp.surr_upd_it = 1000 # int: Number of iterations for the surrogate model update

exp.annealing = False # TODO : turn this on eventually
exp.annealing = False
exp.calibrate_interval = 300 # int: How often to update the surrogate model (default 1000)
exp.budget = 216 # int: Total number of true model evaulations
exp.surr_folder = "./"
Expand Down
7 changes: 3 additions & 4 deletions linfa/tests/test_discr_07_LF_withdiscr_HFData_TP15.py
Original file line number Diff line number Diff line change
Expand Up @@ -19,7 +19,7 @@ def run_test():
exp.activation_fn = 'relu' # str: Activation function used (default 'relu')
exp.input_order = 'sequential' # str: Input oder for create_mask (default 'sequential')
exp.batch_norm_order = True # bool: Order to decide if batch_norm is used (default True)
exp.save_interval = 5000 # int: How often to sample from normalizing flow
exp.save_interval = 1000 # int: How often to sample from normalizing flow

exp.input_size = 2 # int: Dimensionalty of input (default 2)
exp.batch_size = 200 # int: Number of samples generated (default 100)
Expand All @@ -29,14 +29,13 @@ def run_test():
exp.lr_decay = 0.9999 # float: Learning rate decay (default 0.9999)
exp.log_interal = 10 # int: How often to show loss stat (default 10)

#### HAD TO TURN THIS OFF FOR NOW
exp.run_nofas = True # normalizing flow with adaptive surrogate
exp.surrogate_type = 'discrepancy' # type of surrogate we are using
exp.surr_pre_it = 1000 # int: Number of pre-training iterations for surrogate model
exp.surr_upd_it = 2000 # int: Number of iterations for the surrogate model update
exp.calibrate_interval = 1000 #:int: How often the surrogate model is updated

exp.annealing = False # TODO : turn this on eventually
exp.annealing = False
exp.budget = 216 # int: Total number of true model evaulations
exp.surr_folder = "./"
exp.use_new_surr = True
Expand Down Expand Up @@ -178,7 +177,7 @@ def generate_data(use_true_model=False,num_observations=50):
# Main code
if __name__ == "__main__":

generate_data(use_true_model=True,num_observations=1)
generate_data(use_true_model=True, num_observations=1)

run_test()

Expand Down
2 changes: 1 addition & 1 deletion linfa/tests/test_prior.py
Original file line number Diff line number Diff line change
Expand Up @@ -19,7 +19,7 @@ def run_test():
exp.activation_fn = 'relu' # str: Activation function used (default 'relu')
exp.input_order = 'sequential' # str: Input oder for create_mask (default 'sequential')
exp.batch_norm_order = True # bool: Order to decide if batch_norm is used (default True)
exp.save_interval = 5000 # int: How often to sample from normalizing flow
exp.save_interval = 1000 # int: How often to sample from normalizing flow

exp.input_size = 2 # int: Dimensionalty of input (default 2)
exp.batch_size = 200 # int: Number of samples generated (default 100)
Expand Down

0 comments on commit 00fa765

Please sign in to comment.