Skip to content

Commit

Permalink
Enabling shift change by passing a double to multiply pi with the 'sh…
Browse files Browse the repository at this point in the history
…ift-scalar' key

Signed-off-by: Daniel Claudino <[email protected]>
  • Loading branch information
danclaudino committed Apr 1, 2021
1 parent 786010d commit 29e139d
Show file tree
Hide file tree
Showing 3 changed files with 73 additions and 40 deletions.
Original file line number Diff line number Diff line change
Expand Up @@ -30,6 +30,7 @@ class ParameterShiftGradient : public AlgorithmGradientStrategy {
std::shared_ptr<Observable> obs; // Hamiltonian (or any) observable
std::function<std::shared_ptr<CompositeInstruction>(std::vector<double>)>
kernel_evaluator;
double shiftScalar = 0.25;

public:
bool initialize(const HeterogeneousMap parameters) override {
Expand All @@ -47,6 +48,11 @@ class ParameterShiftGradient : public AlgorithmGradientStrategy {
parameters.get<std::function<std::shared_ptr<CompositeInstruction>(
std::vector<double>)>>("kernel-evaluator");
}

if (parameters.keyExists<double>("shift-scalar")) {
shiftScalar = parameters.get<double>("shift-scalar");
}

return true;
}

Expand All @@ -61,13 +67,13 @@ class ParameterShiftGradient : public AlgorithmGradientStrategy {
std::vector<std::shared_ptr<CompositeInstruction>> gradientInstructions;

// loop over parameters
for (int param = 0; param < x.size(); param++) {
for (int param = 0; param < x.size(); param++) {
// parameter shift sign
for (double sign : {1.0, -1.0}) {
for (double sign : {1.0, -1.0}) {

// parameter shift
auto tmpX = x;
tmpX[param] += sign * xacc::constants::pi / 4.0;
tmpX[param] += sign * xacc::constants::pi * shiftScalar;

// get instructions for shifted parameter
std::vector<std::shared_ptr<CompositeInstruction>> kernels;
Expand Down Expand Up @@ -128,7 +134,7 @@ class ParameterShiftGradient : public AlgorithmGradientStrategy {
}

// gradient is (<+> - <->)
dx[gradTerm] = plusGradElement - minusGradElement;
dx[gradTerm] = (plusGradElement - minusGradElement);
shift += 2 * nInstructionsElement[gradTerm];
}

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -64,8 +64,8 @@ TEST(GradientStrategiesTester, checkCentralDifference) {
ansatz2->addInstruction(provider2->createInstruction(
"Ry", std::vector<std::size_t>{0}, {InstructionParameter("x0")}));

auto centralDifference = xacc::getService<AlgorithmGradientStrategy>(
"central");
auto centralDifference =
xacc::getService<AlgorithmGradientStrategy>("central");
centralDifference->initialize({std::make_pair("observable", observable2)});
auto gradientInstructions =
centralDifference->getGradientExecutions(ansatz2, {0.0});
Expand All @@ -90,8 +90,8 @@ TEST(GradientStrategiesTester, checkForwardDifference) {
ansatz3->addInstruction(provider3->createInstruction(
"Ry", std::vector<std::size_t>{0}, {InstructionParameter("x0")}));

auto forwardDifference = xacc::getService<AlgorithmGradientStrategy>(
"forward");
auto forwardDifference =
xacc::getService<AlgorithmGradientStrategy>("forward");
forwardDifference->initialize({std::make_pair("observable", observable3)});
auto gradientInstructions =
forwardDifference->getGradientExecutions(ansatz3, {0.0});
Expand All @@ -116,8 +116,8 @@ TEST(GradientStrategiesTester, checkBackwardDifference) {
ansatz4->addInstruction(provider4->createInstruction(
"Ry", std::vector<std::size_t>{0}, {InstructionParameter("x0")}));

auto backwardDifference = xacc::getService<AlgorithmGradientStrategy>(
"backward");
auto backwardDifference =
xacc::getService<AlgorithmGradientStrategy>("backward");
backwardDifference->initialize({std::make_pair("observable", observable4)});
auto gradientInstructions =
backwardDifference->getGradientExecutions(ansatz4, {0.0});
Expand All @@ -129,18 +129,18 @@ TEST(GradientStrategiesTester, checkBackwardDifference) {
}

TEST(GradientStrategiesTester, checkDeuteronVQE) {
// Use Qpp accelerator
auto accelerator = xacc::getAccelerator("qpp");
EXPECT_EQ(accelerator->name(), "qpp");

// Create the N=2 deuteron Hamiltonian
auto H_N_2 = xacc::quantum::getObservable(
"pauli", std::string("5.907 - 2.1433 X0X1 "
"- 2.1433 Y0Y1"
"+ .21829 Z0 - 6.125 Z1"));

auto optimizer = xacc::getOptimizer("nlopt", {{"nlopt-optimizer", "l-bfgs"}});
xacc::qasm(R"(
// Use Qpp accelerator
auto accelerator = xacc::getAccelerator("qpp");
EXPECT_EQ(accelerator->name(), "qpp");

// Create the N=2 deuteron Hamiltonian
auto H_N_2 = xacc::quantum::getObservable(
"pauli", std::string("5.907 - 2.1433 X0X1 "
"- 2.1433 Y0Y1"
"+ .21829 Z0 - 6.125 Z1"));

auto optimizer = xacc::getOptimizer("nlopt", {{"nlopt-optimizer", "l-bfgs"}});
xacc::qasm(R"(
.compiler xasm
.circuit deuteron_ansatz
.parameters theta
Expand All @@ -149,25 +149,53 @@ TEST(GradientStrategiesTester, checkDeuteronVQE) {
Ry(q[1], theta);
CNOT(q[1],q[0]);
)");
auto ansatz = xacc::getCompiled("deuteron_ansatz");
auto ansatz = xacc::getCompiled("deuteron_ansatz");

// Get the VQE Algorithm and initialize it
auto vqe = xacc::getAlgorithm("vqe");
vqe->initialize({{"ansatz", ansatz},
{"observable", H_N_2},
{"accelerator", accelerator},
{"optimizer", optimizer},
{"gradient_strategy", "parameter-shift"}});

// Allocate some qubits and execute
auto buffer = xacc::qalloc(2);
xacc::set_verbose(true);
vqe->execute(buffer);

// Expected result: -1.74886
EXPECT_NEAR((*buffer)["opt-val"].as<double>(), -1.74886, 1e-4);
}

TEST(GradientStrategiesTester, checkYanPSproblem) {


// Get the VQE Algorithm and initialize it
auto vqe = xacc::getAlgorithm("vqe");
vqe->initialize({{"ansatz", ansatz},
{"observable", H_N_2},
{"accelerator", accelerator},
{"optimizer", optimizer},
{"gradient_strategy", "parameter-shift"}});
int nLayer = 7;
double param = 2.0 / nLayer, shiftScalar = 1.0 / 8.0;
auto H = xacc::quantum::getObservable("pauli", std::string("Z0"));
auto acc = xacc::getAccelerator("qpp");

// Allocate some qubits and execute
auto buffer = xacc::qalloc(2);
xacc::set_verbose(true);
vqe->execute(buffer);
std::vector<double> init = {param};
auto opt = xacc::getOptimizer(
"nlopt", {{"algorithm", "l-bfgs"}, {"initial-parameters", init}});

// Expected result: -1.74886
EXPECT_NEAR((*buffer)["opt-val"].as<double>(), -1.74886, 1e-4);
auto provider = xacc::getService<xacc::IRProvider>("quantum");
auto ansatz = provider->createComposite("initial-state");

ansatz->addVariable("x0");
for (int i = 0; i < nLayer; i++) {
ansatz->addInstruction(provider->createInstruction("Rx", {0}, {"x0"}));
}

auto objFxn = xacc::getService<xacc::Algorithm>("vqe");
objFxn->initialize({{"accelerator", acc},
{"observable", H},
{"ansatz", ansatz},
{"optimizer", opt},
{"shift-scalar", shiftScalar},
{"gradient_strategy", "parameter-shift"}});
auto q = xacc::qalloc(1);
objFxn->execute(q);
}

int main(int argc, char **argv) {
Expand Down
5 changes: 2 additions & 3 deletions quantum/plugins/algorithms/vqe/vqe.cpp
Original file line number Diff line number Diff line change
Expand Up @@ -65,8 +65,7 @@ bool VQE::initialize(const HeterogeneousMap &parameters) {
optimizer->isGradientBased()) {
gradientStrategy = xacc::getService<AlgorithmGradientStrategy>(
parameters.getString("gradient_strategy"));
gradientStrategy->initialize(
{std::make_pair("observable", xacc::as_shared_ptr(observable))});
gradientStrategy->initialize(parameters);
}

if ((parameters.stringExists("gradient_strategy") ||
Expand All @@ -81,7 +80,7 @@ bool VQE::initialize(const HeterogeneousMap &parameters) {
gradientStrategy == nullptr) {
// No gradient strategy was provided, just use autodiff.
gradientStrategy = xacc::getService<AlgorithmGradientStrategy>("autodiff");
gradientStrategy->initialize({{"observable", observable}});
gradientStrategy->initialize(parameters);
}
return true;
}
Expand Down

0 comments on commit 29e139d

Please sign in to comment.