-
Notifications
You must be signed in to change notification settings - Fork 1
Description
I ran PIRATE with a simulated dataset, and the process as killed, because of running out of memory. Here are screenshots of the terminal:
My code looks like this:
`
function for making experiments
def make_experiments( dataset):
x = dataset["t"].values
funcs = {}
for col in ["C","T","dCdt"]:
y = dataset[col].values
gp = GP(x[:, np.newaxis], y[:, np.newaxis])
gp.train(num_epochs=1, learning_rate=0.1)
gp.predict_mode()
# Extract function
# Sorry for the a-kludge :(
f = Function(gp.model.get_mean_function())
funcs[col] = f
experiments = [
Experiment({"C": funcs["C"], "T": funcs["T"]}, dataset[["t"]], inhomogeneous = funcs["dCdt"])
]
fitness_threshold = 1
return experiments, fitness_threshold
def main(testing=False):
args = parse_args_non_adaptive(testing)
if not args.no_write:
result_file = os.path.join(
os.path.dirname(file),
"..",
"output",
"ex_data",
"results",
"n_%i_s_%i.txt" % (args.n_train, args.seed),
)
result_dir = os.path.dirname(result_file)
if not os.path.isdir(result_dir):
os.makedirs(result_dir)
if os.path.isfile(result_file) and not args.overwrite:
print("Already found %s; exit." % result_file)
exit()
# RNG seeds
random.seed(args.seed)
np.random.seed(args.seed)
torch.manual_seed(args.seed)
# Dataset & experiment
# import dataset
folder = '' # enter folder with dataset
filename = folder + 'poly2d_v2_'+ str(0) # noiseless dataset
with open(filename, 'rb') as f:
Cn,dC,T,t = pickle.load(f)
# flattening data out
tt = np.tile(t,(len(T)))
TT = np.repeat(T,len(t))
Cnf = Cn.flatten()
dCf = dC.flatten()
# converting to PyTorch tensors
t = torch.from_numpy(tt)
T = torch.from_numpy(TT)
Cn = torch.from_numpy(Cnf)
dC = torch.from_numpy(dCf)
# converting dataset to dataframe
dataset = pd.DataFrame(
{"t": t , "T": T, "C": Cn, "dCdt": dC}
)
# system = Elliptic(x1_limit=[0.1, 0.5], x2_limit=[0.25, 0.75])
# dataset = system.sample(args.n_train ** 2)
# dataset["solution"] += 1.0
# make experiments
experiments, fitness_threshold = make_experiments(dataset)
#experiments = [Experiment({"C": Function(Cn), "T": Function(T)}, data=dataset[["t"]], inhomogeneous=Function(dC))]
#fitness_threshold = 0.1
# Do symbolic regression
t_start = time()
operators = (
operator.add,
operator.neg,
operator.mul,
pirate_operator.Gradient([0]),
pirate_operator.Divergence([0]),
#pirate_operator.ScalarGradient(1) # system = Elliptic(x1_limit=[0.1, 0.5], x2_limit=[0.25, 0.75])
# dataset = system.sample(args.n_train ** 2)
# dataset["solution"] += 1.0
)
primitive_set = get_primitive_set(operators, variable_names=("C","T"))
expected_exceptions = (GradientError, DivergenceError)
fitness_function = add_memoization(DifferentialResidual)(
experiments,
primitive_set,
expected_exceptions=expected_exceptions,
differential_operators=["grad"],
)
symbolic_regression = SymbolicRegression(
primitive_set,
fitness_function,
population=512,
mating_probability=0.2,
mutation_probability=0.8,
)
symbolic_regression.run(
iterations=2 if testing else 50,
verbose=True,
fitness_threshold=(0.99 * fitness_threshold,),
)
t_elapsed = time() - t_start
print("Elapsed time = {}".format(t_elapsed))
if name == "main":
main()
`


