diff --git a/compile.sh b/compile.sh new file mode 100755 index 00000000..4dd39af9 --- /dev/null +++ b/compile.sh @@ -0,0 +1,10 @@ +#!/bin/bash +#SBATCH --job-name=boost +#SBATCH --output=boost.log + +OPTIMIZATION_METHOD="cma" +OPTIMIZATION_CONFIG="{ \"maxiter\": 2}" + +python3 compiling.py --backend numpy --path "./results/XXZ_5seeds/sgd_10q_6l_27/" \ + --epoch 500 --steps 2 --optimization_method $OPTIMIZATION_METHOD \ + --optimization_config "$OPTIMIZATION_CONFIG" diff --git a/compiling.py b/compiling.py index 8db5ea6e..c85d5426 100644 --- a/compiling.py +++ b/compiling.py @@ -35,27 +35,35 @@ def dump_config(config: dict, path): - config["path"] = config["path"].name + config["path"] = config["path"] config["db_rotation"] = config["db_rotation"].value - (path / "config.json").write_text(json.dumps(config)) + (path / "config.json").write_text(json.dumps(config, indent=4)) def main(args): """VQE training.""" - path = args.path + path = pathlib.Path(args.path) dump_path = ( path - / f"{args.db_rotation.name}_{args.optimization_method}_{args.epoch}e_{args.steps}s_{args.gd_steps}gds" + / f"{args.db_rotation.name}_{args.optimization_method}_{args.epoch}e_{args.steps}s" ) dump_path.mkdir(parents=True, exist_ok=True) + config = json.loads((path / OPTIMIZATION_FILE).read_text()) dump_config(deepcopy(vars(args)), path=dump_path) + + if args.optimization_config is None: + opt_options = {} + else: + opt_options = json.loads(args.optimization_config) + try: params = np.load(path / f"parameters/params_ite{args.epoch}.npy") except FileNotFoundError: params = np.array( np.load(path / PARAMS_FILE, allow_pickle=True).tolist()[0][args.epoch] ) + nqubits = config["nqubits"] nlayers = config["nlayers"] vqe_backend = construct_backend(backend=config["backend"]) @@ -96,10 +104,14 @@ def main(args): ) metadata = {} - print_report(report(vqe, hamiltonian, gci)) + this_report = report(vqe, hamiltonian, gci) + metadata[0] = this_report + for gci_step_nmb in range(args.steps): logging.info( - f"Optimizing GCI step {gci_step_nmb+1} with optimizer {args.optimization_method}" + "\n################################################################################\n" + + f"Optimizing GCI step {gci_step_nmb+1} with optimizer {args.optimization_method}" + + "\n################################################################################\n" ) it = time.time() if args.optimization_method == "sgd": @@ -108,12 +120,13 @@ def main(args): mode_dbr_list=[args.db_rotation], step_grid=np.linspace(1e-5, 2e-2, 30), lr_range=(1e-3, 1), - nmb_gd_epochs=args.gd_steps, + nmb_gd_epochs=opt_options["gd_epochs"], threshold=1e-4, max_eval_gd=30, please_be_visual=False, save_path="gci_step", ) + else: if gci_step_nmb == 0: p0 = [0.01] @@ -125,7 +138,7 @@ def main(args): params=p0, gci=gci, method=args.optimization_method, - maxiter=20, + **opt_options, ) best_s = optimized_params[0] best_b = optimized_params[1:] @@ -137,12 +150,15 @@ def main(args): eo_d_params=eo_d.params, ) logging.info(f"Total optimization time required: {time.time() - it} seconds") - metadata[gci_step_nmb] = report(vqe, hamiltonian, gci) | step_data gci.mode_double_bracket_rotation = args.db_rotation gci.eo_d = eo_d gci(best_s) - print_report(report(vqe, hamiltonian, gci)) - (dump_path / "boosting_data.json").write_text(json.dumps(metadata)) + + this_report = report(vqe, hamiltonian, gci) + print_report(this_report) + metadata[gci_step_nmb + 1] = this_report | step_data + + (dump_path / "boosting_data.json").write_text(json.dumps(metadata, indent=4)) def report(vqe, hamiltonian, gci): @@ -207,19 +223,11 @@ def print_report(report: dict): if __name__ == "__main__": parser = argparse.ArgumentParser(description="Boosting VQE with DBI.") parser.add_argument("--backend", default="qibojit", type=str, help="Qibo backend") - parser.add_argument( - "--path", - type=pathlib.Path, - default=pathlib.Path("XXZ_5seeds/moreonXXZ/sgd_10q_7l_42"), - help="Output folder", - ) + parser.add_argument("--path", type=str, help="Output folder") parser.add_argument( "--epoch", default=-1, type=int, help="VQE epoch where DBI will be applied." ) parser.add_argument("--steps", default=2, type=int, help="DBI steps") - parser.add_argument( - "--gd_steps", default=1, type=int, help="Gradient descent steps" - ) parser.add_argument("--order", default=2, type=int, help="Suzuki-Trotter order") parser.add_argument( "--db_rotation", @@ -231,5 +239,10 @@ def print_report(report: dict): parser.add_argument( "--optimization_method", default="sgd", type=str, help="Optimization method" ) + parser.add_argument( + "--optimization_config", + type=str, + help="Options to customize the optimizer training.", + ) args = parser.parse_args() main(args) diff --git a/run_sgd.sh b/run_sgd.sh deleted file mode 100755 index 53749849..00000000 --- a/run_sgd.sh +++ /dev/null @@ -1,24 +0,0 @@ -#!/bin/bash -#SBATCH --job-name=boostvqe -#SBATCH --output=boostvqe.log - -NQUBITS=4 -NLAYERS=2 - -DBI_STEPS=2 -NBOOST=2 -BOOST_FREQUENCY=10 - -NSHOTS=10000 -TOL=1e-8 -ACC=0.5 - -OPTIMIZER="sgd" -BACKEND="tensorflow" -OPTIMIZER_OPTIONS="{ \"optimizer\": \"Adagrad\", \"learning_rate\": 0.1, \"nmessage\": 1, \"nepochs\": $BOOST_FREQUENCY }" - -python main.py --nqubits $NQUBITS --nlayers $NLAYERS --optimizer $OPTIMIZER \ - --output_folder results/debugging --backend $BACKEND --tol $TOL \ - --dbi_step $DBI_STEPS --seed 42 \ - --boost_frequency $BOOST_FREQUENCY --nboost $NBOOST \ - --optimizer_options "$OPTIMIZER_OPTIONS"