Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Support for V2 primitives #843

Merged
merged 94 commits into from
Nov 11, 2024
Merged
Changes from 1 commit
Commits
Show all changes
94 commits
Select commit Hold shift + click to select a range
009c399
Update README.md
FrancescaSchiav Feb 29, 2024
5096546
Merge branch 'qiskit-community:main' into main
OkuyanBoga Mar 1, 2024
32219fb
Generalize the Einstein summation signature
edoaltamura Mar 14, 2024
17d8c33
Add reno
edoaltamura Mar 15, 2024
d6f688d
Update Copyright
edoaltamura Mar 15, 2024
034785b
Rename and add test
edoaltamura Mar 20, 2024
04b886d
Update Copyright
edoaltamura Mar 20, 2024
11cde5f
Merge branch 'qiskit-community:main' into main
OkuyanBoga Apr 3, 2024
aea890d
Merge pull request #18 from OkuyanBoga/torch_issue716
OkuyanBoga Apr 3, 2024
7b2e9be
Add docstring for `test_get_einsum_signature`
edoaltamura Apr 3, 2024
6a8a136
Correct spelling
edoaltamura Apr 3, 2024
31a826e
Disable spellcheck for comments
edoaltamura Apr 3, 2024
5b4f617
Add `docstring` in pylint dict
edoaltamura Apr 3, 2024
b0d0590
Delete example in docstring
edoaltamura Apr 3, 2024
240d02f
Add Einstein in pylint dict
edoaltamura Apr 3, 2024
f8c32dd
Add full use case in einsum dict
edoaltamura Apr 8, 2024
34322b2
Spelling and type ignore
edoaltamura Apr 8, 2024
94ec48c
Spelling and type ignore
edoaltamura Apr 8, 2024
16c8454
Spelling and type ignore
edoaltamura Apr 8, 2024
00130f2
Spelling and type ignore
edoaltamura Apr 8, 2024
e045c16
Spelling and type ignore
edoaltamura Apr 8, 2024
22d94ce
Remove for loop in einsum function and remove Literal arguments (1/2)
edoaltamura Apr 24, 2024
95dd9df
Remove for loop in einsum function and remove Literal arguments (1/2)
edoaltamura Apr 24, 2024
4cbf0c3
Remove for loop in einsum function and remove Literal arguments (2/2)
edoaltamura Apr 24, 2024
c4dca19
Update RuntimeError msg
edoaltamura Apr 30, 2024
d5ed96b
Update RuntimeError msg - line too long
edoaltamura Apr 30, 2024
d6f3d47
Trigger CI
edoaltamura May 2, 2024
5ed6345
Merge branch 'main' into main
edoaltamura May 2, 2024
9ccc3a2
Merge branch 'qiskit-community:main' into main
edoaltamura Jun 7, 2024
3f669b0
Merge branch 'qiskit-community:main' into main
edoaltamura Jun 19, 2024
3846d4d
Merge algos, globals.random to fix
edoaltamura Jul 11, 2024
070aa81
Fixed `algorithms_globals`
edoaltamura Jul 11, 2024
ddc160f
Import /tests and run CI locally
edoaltamura Jul 11, 2024
c5a55ad
Fix copyrights and some spellings
edoaltamura Jul 11, 2024
f4d49eb
Ignore mypy in 8 instances
edoaltamura Jul 18, 2024
2d1209f
Merge spell dicts
edoaltamura Jul 29, 2024
2735810
Black reformatting
edoaltamura Jul 29, 2024
840c270
Black reformatting
edoaltamura Jul 31, 2024
c2f726a
Add reno
edoaltamura Jul 31, 2024
cf2d6b0
Merge remote-tracking branch 'origin/main' into migrate-algo
edoaltamura Jul 31, 2024
5976830
Lint sanitize
edoaltamura Jul 31, 2024
5e07acc
Pylint
edoaltamura Jul 31, 2024
b997bb0
Pylint
edoaltamura Jul 31, 2024
c464459
Pylint
edoaltamura Jul 31, 2024
51610a1
Pylint
edoaltamura Jul 31, 2024
c42688c
Fix relative imports in tutorials
edoaltamura Jul 31, 2024
db9b03f
Fix relative imports in tutorials
edoaltamura Jul 31, 2024
21badc4
Remove algorithms from Jupyter magic methods
edoaltamura Jul 31, 2024
e8628cc
Temporarily disable "Run stable tutorials" tests
edoaltamura Aug 1, 2024
da63c5b
Change the docstrings with imports from qiskit_algorithms
edoaltamura Aug 1, 2024
0c5825c
Styling
edoaltamura Aug 1, 2024
c0974f9
Update qiskit_machine_learning/optimizers/gradient_descent.py
edoaltamura Aug 1, 2024
7490b38
Update qiskit_machine_learning/optimizers/optimizer_utils/learning_ra…
edoaltamura Aug 1, 2024
d38154b
Add more tests for utils
edoaltamura Aug 2, 2024
fe021e9
Add more tests for optimizers: adam, bobyqa, gsls and imfil
edoaltamura Aug 2, 2024
51e3ea7
Fix random seed for volatile optimizers
edoaltamura Aug 2, 2024
fb4fc39
Fix random seed for volatile optimizers
edoaltamura Aug 2, 2024
3cb3850
Add more tests
edoaltamura Aug 2, 2024
3da9109
Pylint dict
edoaltamura Aug 2, 2024
d34c4c9
Activate scikit-quant-0.8.2
edoaltamura Aug 2, 2024
1f6ca7a
Remove scikit-quant methods
edoaltamura Aug 2, 2024
b5875a3
Remove scikit-quant methods (2)
edoaltamura Aug 2, 2024
800cca4
Edit the release notes and Qiskit version 1+
edoaltamura Aug 5, 2024
e98200a
Edit the release notes and Qiskit version 1+
edoaltamura Aug 5, 2024
f349f7c
Add Qiskit 1.0 upgrade in reno
edoaltamura Aug 5, 2024
154d6a7
Add Qiskit 1.0 upgrade in reno
edoaltamura Aug 5, 2024
c728400
Add Qiskit 1.0 upgrade in reno
edoaltamura Aug 5, 2024
3294731
Apply line breaks
edoaltamura Aug 6, 2024
9e53371
Restructure line breaks
edoaltamura Aug 6, 2024
2bbb57c
Added support for SamplerV2 primitives (#49)
OkuyanBoga Nov 7, 2024
1712ebe
Added support for EstimatorV2 primitives (#48)
OkuyanBoga Nov 7, 2024
2bf2668
Pulled changes from main
OkuyanBoga Nov 8, 2024
e52575b
Quick fix
OkuyanBoga Nov 8, 2024
805a6b1
bugfix for V1
OkuyanBoga Nov 8, 2024
9a6574b
formatting
oscar-wallis Nov 8, 2024
1d03d4f
Prep-ing for 0.8 (#53)
oscar-wallis Nov 8, 2024
79e9b2e
Merge remote-tracking branch 'upstream/main' into update-V2
edoaltamura Nov 8, 2024
5606dd6
Update test_qbayesian
OkuyanBoga Nov 8, 2024
45bc6f8
Bugfixing the test_gradient
oscar-wallis Nov 8, 2024
e69c03d
Fixing an Options error with sampler_gradient
oscar-wallis Nov 8, 2024
56dc948
Merge branch 'update-V2' of https://github.com/OkuyanBoga/hc-qiskit-m…
oscar-wallis Nov 8, 2024
bd41778
Linting and formatting
edoaltamura Nov 8, 2024
3622bc2
Add reno
edoaltamura Nov 8, 2024
4844894
Fix dict typing definition
edoaltamura Nov 8, 2024
e386aaf
Fix mypy
edoaltamura Nov 8, 2024
2527ea7
Issue deprecation warnings
edoaltamura Nov 8, 2024
6c6efc5
Update skip test message
edoaltamura Nov 8, 2024
da04d85
Update deprecation warning for qbayesian.py
edoaltamura Nov 8, 2024
9472261
Update deprecation warning for qbayesian.py
edoaltamura Nov 8, 2024
fc716fe
Add headers in deprecation.py
edoaltamura Nov 8, 2024
e0c6b7d
Add headers in deprecation.py
edoaltamura Nov 8, 2024
0f09f4f
Add headers in deprecation.py
edoaltamura Nov 8, 2024
6a44cbe
Correct spelling
edoaltamura Nov 8, 2024
5d653b0
Add spelling `msg`
edoaltamura Nov 8, 2024
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
Prev Previous commit
Next Next commit
Ignore mypy in 8 instances
edoaltamura committed Jul 18, 2024

Verified

This commit was created on GitHub.com and signed with GitHub’s verified signature. The key has expired.
commit f4d49eb089a91344bd7e598181c7d9a76b448b8e
6 changes: 3 additions & 3 deletions qiskit_machine_learning/algorithms/trainable_model.py
Original file line number Diff line number Diff line change
@@ -244,7 +244,7 @@ def _choose_initial_point(self) -> np.ndarray:
An array as an initial point
"""
if self._warm_start and self._fit_result is not None:
self._initial_point = self._fit_result.x
self._initial_point = self._fit_result.x # type: ignore[assignment]
elif self._initial_point is None:
self._initial_point = algorithm_globals.random.random(self._neural_network.num_weights)
return self._initial_point
@@ -287,13 +287,13 @@ def _minimize(self, function: ObjectiveFunction) -> OptimizerResult:

initial_point = self._choose_initial_point()
if callable(self._optimizer):
optimizer_result = self._optimizer(
optimizer_result = self._optimizer( # type: ignore[call-arg]
fun=objective, x0=initial_point, jac=function.gradient
)
else:
optimizer_result = self._optimizer.minimize(
fun=objective,
x0=initial_point,
jac=function.gradient,
jac=function.gradient, # type: ignore[arg-type]
)
return optimizer_result
Original file line number Diff line number Diff line change
@@ -199,27 +199,27 @@ def fit(

# Randomly initialize the initial point if one was not passed
if self._initial_point is None:
self._initial_point = algorithm_globals.random.random(num_params)
self._initial_point = algorithm_globals.random.random(num_params) # type: ignore[assignment]

# Perform kernel optimization
loss_function = partial(
self._loss.evaluate, quantum_kernel=self.quantum_kernel, data=data, labels=labels
)
if callable(self._optimizer):
opt_results = self._optimizer(fun=loss_function, x0=self._initial_point)
opt_results = self._optimizer(fun=loss_function, x0=self._initial_point) # type: ignore[call-arg, arg-type]
else:
opt_results = self._optimizer.minimize(
fun=loss_function,
x0=self._initial_point,
x0=self._initial_point, # type: ignore[arg-type]
)

# Return kernel training results
result = QuantumKernelTrainerResult()
result.optimizer_evals = opt_results.nfev
result.optimal_value = opt_results.fun
result.optimal_point = opt_results.x
result.optimal_point = opt_results.x # type: ignore[assignment]
result.optimal_parameters = dict(
zip(self.quantum_kernel.training_parameters, opt_results.x)
zip(self.quantum_kernel.training_parameters, opt_results.x) # type: ignore[arg-type]
)

# Return the QuantumKernel in optimized state
8 changes: 4 additions & 4 deletions qiskit_machine_learning/kernels/fidelity_quantum_kernel.py
Original file line number Diff line number Diff line change
@@ -229,8 +229,8 @@ def _get_kernel_entries(
job = self._fidelity.run(
[self._feature_map] * num_circuits,
[self._feature_map] * num_circuits,
left_parameters,
right_parameters,
left_parameters, # type: ignore[arg-type]
right_parameters, # type: ignore[arg-type]
)
kernel_entries = job.result().fidelities
else:
@@ -249,8 +249,8 @@ def _get_kernel_entries(
job = self._fidelity.run(
[self._feature_map] * (end_idx - start_idx),
[self._feature_map] * (end_idx - start_idx),
chunk_left_parameters,
chunk_right_parameters,
chunk_left_parameters, # type: ignore[arg-type]
chunk_right_parameters, # type: ignore[arg-type]
)
# Extend the kernel_entries list with the results from this chunk
kernel_entries.extend(job.result().fidelities)
4 changes: 2 additions & 2 deletions qiskit_machine_learning/neural_networks/estimator_qnn.py
Original file line number Diff line number Diff line change
@@ -270,10 +270,10 @@ def _backward(

job = None
if self._input_gradients:
job = self.gradient.run(circuits, observables, param_values)
job = self.gradient.run(circuits, observables, param_values) # type: ignore[arg-type]
elif len(parameter_values[0]) > self._num_inputs:
params = [self._circuit.parameters[self._num_inputs :]] * num_circuits
job = self.gradient.run(circuits, observables, param_values, parameters=params)
job = self.gradient.run(circuits, observables, param_values, parameters=params) # type: ignore[arg-type]

if job is not None:
try:
4 changes: 2 additions & 2 deletions qiskit_machine_learning/neural_networks/sampler_qnn.py
Original file line number Diff line number Diff line change
@@ -414,10 +414,10 @@ def _backward(

job = None
if self._input_gradients:
job = self.gradient.run(circuits, parameter_values)
job = self.gradient.run(circuits, parameter_values) # type: ignore[arg-type]
elif len(parameter_values[0]) > self._num_inputs:
params = [self._circuit.parameters[self._num_inputs :]] * num_samples
job = self.gradient.run(circuits, parameter_values, parameters=params)
job = self.gradient.run(circuits, parameter_values, parameters=params) # type: ignore[arg-type]

if job is not None:
try:
4 changes: 2 additions & 2 deletions test/algorithms/classifiers/test_neural_network_classifier.py
Original file line number Diff line number Diff line change
@@ -61,9 +61,9 @@ def _create_optimizer(self, opt: str) -> Optimizer | None:
if opt == "bfgs":
optimizer = L_BFGS_B(maxiter=5)
elif opt == "cobyla":
optimizer = COBYLA(maxiter=25)
optimizer = COBYLA(maxiter=25) # type: ignore[assignment]
elif opt == "callable":
optimizer = partial(minimize, method="COBYLA", options={"maxiter": 25})
optimizer = partial(minimize, method="COBYLA", options={"maxiter": 25}) # type: ignore[assignment]
else:
optimizer = None

4 changes: 2 additions & 2 deletions test/algorithms/regressors/test_neural_network_regressor.py
Original file line number Diff line number Diff line change
@@ -80,9 +80,9 @@ def _create_regressor(
if opt == "bfgs":
optimizer = L_BFGS_B(maxiter=5)
elif opt == "cobyla":
optimizer = COBYLA(maxiter=25)
optimizer = COBYLA(maxiter=25) # type: ignore[assignment]
elif opt == "callable":
optimizer = partial(minimize, method="COBYLA", options={"maxiter": 25})
optimizer = partial(minimize, method="COBYLA", options={"maxiter": 25}) # type: ignore[assignment]
else:
optimizer = None

4 changes: 2 additions & 2 deletions test/kernels/test_fidelity_qkernel.py
Original file line number Diff line number Diff line change
@@ -279,7 +279,7 @@ def create_fidelity_circuit(
) -> QuantumCircuit:
raise NotImplementedError()

def _run(
def _run( # type: ignore[override]
self,
circuits_1: QuantumCircuit | Sequence[QuantumCircuit],
circuits_2: QuantumCircuit | Sequence[QuantumCircuit],
@@ -294,7 +294,7 @@ def _run(

@staticmethod
def _call(fidelities, options) -> StateFidelityResult:
return StateFidelityResult(fidelities, [], {}, options)
return StateFidelityResult(fidelities, [], {}, options) # type: ignore[arg-type]

with self.subTest("No PSD enforcement"):
kernel = FidelityQuantumKernel(fidelity=MockFidelity(), enforce_psd=False)