From b619f73320813646d1951d6ecacdf62809f5c365 Mon Sep 17 00:00:00 2001 From: Corey Ostrove Date: Fri, 21 Feb 2025 14:23:22 -0700 Subject: [PATCH 1/6] Forward simulator casting for FPR Minor bugfix for per-germ global FPR which uses some germ selection primitives that require the matrix forward simulator. --- pygsti/algorithms/fiducialpairreduction.py | 10 ++++++---- 1 file changed, 6 insertions(+), 4 deletions(-) diff --git a/pygsti/algorithms/fiducialpairreduction.py b/pygsti/algorithms/fiducialpairreduction.py index 80956fd2e..a6c17386a 100644 --- a/pygsti/algorithms/fiducialpairreduction.py +++ b/pygsti/algorithms/fiducialpairreduction.py @@ -19,22 +19,20 @@ import scipy.linalg as _sla from math import ceil -import time from pygsti import baseobjs as _baseobjs from pygsti import circuits as _circuits from pygsti.circuits import circuitconstruction as _gsc from pygsti.modelmembers.operations import EigenvalueParamDenseOp as _EigenvalueParamDenseOp -from pygsti.tools import apply_aliases_to_circuits as _apply_aliases_to_circuits from pygsti.tools import remove_duplicates as _remove_duplicates from pygsti.tools import slicetools as _slct from pygsti.tools.legacytools import deprecate as _deprecated_fn +from pygsti.forwardsims import MatrixForwardSimulator as _MatrixForwardSimulator -from pygsti.algorithms.germselection import construct_update_cache, minamide_style_inverse_trace, compact_EVD, compact_EVD_via_SVD, germ_set_spanning_vectors +from pygsti.algorithms.germselection import construct_update_cache, minamide_style_inverse_trace, compact_EVD, germ_set_spanning_vectors from pygsti.algorithms import scoring as _scoring -from pygsti.tools.matrixtools import print_mx import warnings @@ -1658,6 +1656,10 @@ def find_sufficient_fiducial_pairs_per_germ_global(target_model, prep_fiducials, `prep_fiducials` and `meas_fiducials`). """ + if not isinstance(target_model.sim, _MatrixForwardSimulator): + target_model = target_model.copy() + target_model.sim = 'matrix' + printer = _baseobjs.VerbosityPrinter.create_printer(verbosity) #if no germ_vector_spanning_set is passed in compute it here. From 0b13c44497a9401b0954a0e78b98435f6e620e77 Mon Sep 17 00:00:00 2001 From: Corey Ostrove Date: Fri, 21 Feb 2025 14:24:58 -0700 Subject: [PATCH 2/6] Fix nondeterministic germ selection bug Fix a bug that was introduced in a recent PR related to the fact that the germ list can in some instances be None (correctly so). --- pygsti/algorithms/germselection.py | 20 +++++++++++--------- test/unit/algorithms/test_germselection.py | 3 ++- 2 files changed, 13 insertions(+), 10 deletions(-) diff --git a/pygsti/algorithms/germselection.py b/pygsti/algorithms/germselection.py index 163cc6875..7e20bf639 100644 --- a/pygsti/algorithms/germselection.py +++ b/pygsti/algorithms/germselection.py @@ -409,15 +409,17 @@ def find_germs(target_model, randomize=True, randomization_strength=1e-2, #force the line labels on each circuit to match the state space labels for the target model. #this is suboptimal for many-qubit models, so will probably want to revisit this. #TODO finalGermList = [] - for ckt in germList: - if ckt._static: - new_ckt = ckt.copy(editable=True) - new_ckt.line_labels = target_model.state_space.state_space_labels - new_ckt.done_editing() - finalGermList.append(new_ckt) - else: - ckt.line_labels = target_model.state_space.state_space_labels - finalGermList.append(ckt) + if germList is not None: + for ckt in germList: + if ckt._static: + new_ckt = ckt.copy(editable=True) + new_ckt.line_labels = target_model.state_space.state_space_labels + new_ckt.done_editing() + finalGermList.append(new_ckt) + else: + ckt.line_labels = target_model.state_space.state_space_labels + finalGermList.append(ckt) + return finalGermList diff --git a/test/unit/algorithms/test_germselection.py b/test/unit/algorithms/test_germselection.py index 6415717bb..d2cac0cec 100644 --- a/test/unit/algorithms/test_germselection.py +++ b/test/unit/algorithms/test_germselection.py @@ -147,7 +147,8 @@ class GenerateGermsTester(GermSelectionData, BaseCase): def test_generate_germs_with_candidate_germ_counts(self): germs = germsel.find_germs( self.mdl_target_noisy, randomize=False, - candidate_germ_counts={3: 'all upto', 4: 10, 5: 10, 6: 10} + candidate_germ_counts={3: 'all upto', 4: 10, 5: 10, 6: 10}, + candidate_seed=1234 ) # TODO assert correctness From 86a89185b569982b518c36e58d56c9b26eb6d947 Mon Sep 17 00:00:00 2001 From: Corey Ostrove Date: Fri, 21 Feb 2025 14:43:36 -0700 Subject: [PATCH 3/6] Minor bugfix Minor pgg FPR bugfix --- pygsti/algorithms/fiducialpairreduction.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/pygsti/algorithms/fiducialpairreduction.py b/pygsti/algorithms/fiducialpairreduction.py index a6c17386a..b31831a30 100644 --- a/pygsti/algorithms/fiducialpairreduction.py +++ b/pygsti/algorithms/fiducialpairreduction.py @@ -1689,11 +1689,11 @@ def find_sufficient_fiducial_pairs_per_germ_global(target_model, prep_fiducials, #if precomputed_jacobians is None then make sure we pass in None for each germ #hack this in without branching by constructing a dictionary of Nones. if precomputed_jacobians is None: - precomputed_jacobians = {germ:None for germ in germ_vector_spanning_set.keys()} + precomputed_jacobians = {germ:None for germ in germ_vector_spanning_set[0].keys()} printer.log("------ Per Germ Global Fiducial Pair Reduction --------") with printer.progress_logging(1): - for i, (germ, germ_vector_list) in enumerate(germ_vector_spanning_set.items()): + for i, (germ, germ_vector_list) in enumerate(germ_vector_spanning_set[0].items()): candidate_solution_list, best_score = get_per_germ_fid_pairs_global(prep_fiducials, meas_fiducials, prep_povm_tuples, target_model, germ, germ_vector_list, mem_limit, printer, dof_per_povm, inv_trace_tol, From 1a37743617d7b795c931ded01f41a5cf89273936 Mon Sep 17 00:00:00 2001 From: Corey Ostrove Date: Fri, 21 Feb 2025 17:04:47 -0700 Subject: [PATCH 4/6] Add fix to germ_set_spanning_vectors Same fix as was needed in the pgg fpr function. --- pygsti/algorithms/germselection.py | 4 ++++ 1 file changed, 4 insertions(+) diff --git a/pygsti/algorithms/germselection.py b/pygsti/algorithms/germselection.py index 7e20bf639..7902842cf 100644 --- a/pygsti/algorithms/germselection.py +++ b/pygsti/algorithms/germselection.py @@ -4533,6 +4533,10 @@ def germ_set_spanning_vectors(target_model, germ_list, assume_real=False, float_ amplificational properties of the reduced vector set. """ printer = _baseobjs.VerbosityPrinter.create_printer(verbosity) + + if not isinstance(target_model.sim, _MatrixForwardSimulator): + target_model = target_model.copy() + target_model.sim = 'matrix' #Add some checks related to the option to switch up data types: if not assume_real: From cc979cd3ee6ebaeac70b396eee50679b21221f93 Mon Sep 17 00:00:00 2001 From: Corey Ostrove Date: Fri, 21 Feb 2025 17:17:16 -0700 Subject: [PATCH 5/6] Proper fix for pgg fpr Proper fix for the tuple unpacking in pgg fpr. --- pygsti/algorithms/fiducialpairreduction.py | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/pygsti/algorithms/fiducialpairreduction.py b/pygsti/algorithms/fiducialpairreduction.py index b31831a30..d321c1a29 100644 --- a/pygsti/algorithms/fiducialpairreduction.py +++ b/pygsti/algorithms/fiducialpairreduction.py @@ -1676,7 +1676,7 @@ def find_sufficient_fiducial_pairs_per_germ_global(target_model, prep_fiducials, if germ_set_spanning_kwargs is not None: used_kwargs.update(germ_set_spanning_kwargs) - germ_vector_spanning_set = germ_set_spanning_vectors(target_model, germs, + germ_vector_spanning_set, _ = germ_set_spanning_vectors(target_model, germs, float_type=float_type, evd_tol = evd_tol, verbosity=verbosity, @@ -1689,11 +1689,11 @@ def find_sufficient_fiducial_pairs_per_germ_global(target_model, prep_fiducials, #if precomputed_jacobians is None then make sure we pass in None for each germ #hack this in without branching by constructing a dictionary of Nones. if precomputed_jacobians is None: - precomputed_jacobians = {germ:None for germ in germ_vector_spanning_set[0].keys()} + precomputed_jacobians = {germ:None for germ in germ_vector_spanning_set.keys()} printer.log("------ Per Germ Global Fiducial Pair Reduction --------") with printer.progress_logging(1): - for i, (germ, germ_vector_list) in enumerate(germ_vector_spanning_set[0].items()): + for i, (germ, germ_vector_list) in enumerate(germ_vector_spanning_set.items()): candidate_solution_list, best_score = get_per_germ_fid_pairs_global(prep_fiducials, meas_fiducials, prep_povm_tuples, target_model, germ, germ_vector_list, mem_limit, printer, dof_per_povm, inv_trace_tol, From cf1b5a702a617e1edd048a2dbb32ca041cae8048 Mon Sep 17 00:00:00 2001 From: Corey Ostrove Date: Sun, 23 Feb 2025 13:52:18 -0700 Subject: [PATCH 6/6] Check for fastcalc import errors Add a check for the fastcalc cython module and add fallback for when that isn't present. --- .../operations/lindbladcoefficients.py | 16 +++++++++++----- 1 file changed, 11 insertions(+), 5 deletions(-) diff --git a/pygsti/modelmembers/operations/lindbladcoefficients.py b/pygsti/modelmembers/operations/lindbladcoefficients.py index b5a430ad6..c19986626 100644 --- a/pygsti/modelmembers/operations/lindbladcoefficients.py +++ b/pygsti/modelmembers/operations/lindbladcoefficients.py @@ -4,17 +4,23 @@ import collections as _collections import copy as _copy import warnings as _warnings - from pygsti.tools import lindbladtools as _lt from pygsti.tools import matrixtools as _mt from pygsti.tools import optools as _ot -from pygsti.tools import fastcalc as _fc from pygsti.baseobjs.basis import Basis as _Basis, BuiltinBasis as _BuiltinBasis from pygsti.modelmembers import term as _term from pygsti.baseobjs.polynomial import Polynomial as _Polynomial from pygsti.baseobjs.nicelyserializable import NicelySerializable as _NicelySerializable - from functools import lru_cache +try: + from pygsti.tools import fastcalc as _fc + triu_indices = _fc.fast_triu_indices +except ImportError: + msg = 'Could not import cython module `fastcalc`. This may indicate that your cython extensions for pyGSTi failed to.'\ + +'properly build. Lack of cython extensions can result in significant performance degredation so we recommend trying to rebuild them.'\ + 'Falling back to numpy implementation for triu_indices.' + _warnings.warn(msg) + triu_indices = _np.triu_indices IMAG_TOL = 1e-7 # tolerance for imaginary part being considered zero @@ -814,7 +820,7 @@ def from_vector(self, v): cache_mx = self._cache_mx - params_upper_indices = _fc.fast_triu_indices(num_bels) + params_upper_indices = triu_indices(num_bels) params_upper = 1j*params[params_upper_indices] params_lower = (params.T)[params_upper_indices] @@ -837,7 +843,7 @@ def from_vector(self, v): elif self._param_mode == "elements": # params mx stores block_data (hermitian) directly #params holds block_data real and imaginary parts directly - params_upper_indices = _fc.fast_triu_indices(num_bels) + params_upper_indices = triu_indices(num_bels) params_upper = -1j*params[params_upper_indices] params_lower = (params.T)[params_upper_indices]