How to generate error bars for 2Q-GST

In [1]:
import pygsti
import pickle
import time

#If we were using MPI
# from mpi4py import MPI
# comm = MPI.COMM_WORLD
comm = None

#Load the 2-qubit results (if you don't have this file, run the 2Q-GST example)
with open("example_files/easy_2q_results.pkl","rb") as f:
    results = pickle.load(f)

#Set a memory limit
print("Memory limit was = ", results.estimates['default'].parameters.get('memLimit',"none given"))
results.estimates['default'].parameters['memLimit'] = 2.5*(1024.0)**3 # 2.5GB
print("Memory limit is now = ", results.estimates['default'].parameters['memLimit'])
Memory limit was =  3221225472
Memory limit is now =  2684354560.0
In [2]:
# error bars in reports require the presence of a fully-initialized
# "confidence region factory" within the relevant Estimate object.
# In most cases "fully-initialized" means that a Hessian has been 
# computed and projected onto the non-gauge space.
start = time.time()

# initialize a factory for the 'go0' gauge optimization within the 'default' estimate
crfact = results.estimates['default'].add_confidence_region_factory('go0', 'final')
crfact.compute_hessian(comm=comm) #optionally use multiple processors
crfact.project_hessian('intrinsic error')

end = time.time()
print("Total time=%f hours" % ((end - start) / 3600.0))
Evaltree generation (deriv) w/mem limit = 2.48GB
 mem(1 subtrees, 1,1 param-grps, 1 proc-grps) in 0s = 6773.17GB (6773.17GB fc)
Created evaluation tree with 1 subtrees.  Will divide 1 procs into 1 (subtree-processing)
 groups of ~1 procs each, to distribute over (1616,1616) params (taken as 1616,4 param groups of ~1,404 params).
 Memory estimate = 2.08GB (cache=1317, wrtLen1=1, wrtLen2=404, subsPerProc=1).
rank 0: 38.7419s: block 0/4043, sub-tree 0/1, sub-tree-len = 1317
rank 0: 120.189s: block 1/4043, sub-tree 0/1, sub-tree-len = 1317
rank 0: 178.593s: block 2/4043, sub-tree 0/1, sub-tree-len = 1317
rank 0: 234.334s: block 3/4043, sub-tree 0/1, sub-tree-len = 1317
rank 0: 290.545s: block 4/4043, sub-tree 0/1, sub-tree-len = 1317
rank 0: 338.526s: block 5/4043, sub-tree 0/1, sub-tree-len = 1317
rank 0: 385.31s: block 6/4043, sub-tree 0/1, sub-tree-len = 1317
rank 0: 427.241s: block 7/4043, sub-tree 0/1, sub-tree-len = 1317
rank 0: 463.751s: block 8/4043, sub-tree 0/1, sub-tree-len = 1317
rank 0: 493.799s: block 9/4043, sub-tree 0/1, sub-tree-len = 1317
rank 0: 523.684s: block 10/4043, sub-tree 0/1, sub-tree-len = 1317
rank 0: 553.939s: block 11/4043, sub-tree 0/1, sub-tree-len = 1317
rank 0: 584.543s: block 12/4043, sub-tree 0/1, sub-tree-len = 1317
rank 0: 614.543s: block 13/4043, sub-tree 0/1, sub-tree-len = 1317
rank 0: 644.817s: block 14/4043, sub-tree 0/1, sub-tree-len = 1317
rank 0: 676.939s: block 15/4043, sub-tree 0/1, sub-tree-len = 1317
rank 0: 710.985s: block 16/4043, sub-tree 0/1, sub-tree-len = 1317
rank 0: 743.661s: block 17/4043, sub-tree 0/1, sub-tree-len = 1317
rank 0: 776.32s: block 18/4043, sub-tree 0/1, sub-tree-len = 1317
rank 0: 809.27s: block 19/4043, sub-tree 0/1, sub-tree-len = 1317
rank 0: 843.08s: block 20/4043, sub-tree 0/1, sub-tree-len = 1317
rank 0: 875.849s: block 21/4043, sub-tree 0/1, sub-tree-len = 1317
rank 0: 909.451s: block 22/4043, sub-tree 0/1, sub-tree-len = 1317
---------------------------------------------------------------------------
KeyboardInterrupt                         Traceback (most recent call last)
<ipython-input-2-d4c9d370f215> in <module>()
      7 # initialize a factory for the 'go0' gauge optimization within the 'default' estimate
      8 crfact = results.estimates['default'].add_confidence_region_factory('go0', 'final')
----> 9 crfact.compute_hessian(comm=comm) #optionally use multiple processors
     10 crfact.project_hessian('intrinsic error')
     11 

/Users/enielse/research/pyGSTi/packages/pygsti/objects/confidenceregionfactory.py in compute_hessian(self, comm, memLimit)
    232                                           minProbClip, probClipInterval, radius,
    233                                           comm=comm, memLimit=memLimit, verbosity=vb,
--> 234                                           gateLabelAliases=aliases)
    235 
    236             nonMarkRadiusSq = max( 2*(_tools.logl_max(gateset, dataset)

/Users/enielse/research/pyGSTi/packages/pygsti/tools/likelihoodfns.py in logl_hessian(gateset, dataset, gatestring_list, minProbClip, probClipInterval, radius, poissonPicture, check, comm, memLimit, gateLabelAliases, verbosity)
    629         k,kmax = 0,len(mySliceTupList)
    630         for (slice1,slice2,hprobs,dprobs12) in gateset.bulk_hprobs_by_block(
--> 631             evalSubTree, mySliceTupList, True, blkComm):
    632             rank = comm.Get_rank() if (comm is not None) else 0
    633 

/Users/enielse/research/pyGSTi/packages/pygsti/objects/gatematrixcalc.py in bulk_hprobs_by_block(self, evalTree, wrtSlicesList, bReturnDProbs12, comm)
   2658             hProdCache = self._compute_hproduct_cache(
   2659                 evalTree, prodCache, dProdCache1, dProdCache2,
-> 2660                 scaleCache, comm, wrtSlice1, wrtSlice2)
   2661             hGs = evalTree.final_view(hProdCache, axis=0)
   2662 

/Users/enielse/research/pyGSTi/packages/pygsti/objects/gatematrixcalc.py in _compute_hproduct_cache(self, evalTree, prodCache, dProdCache1, dProdCache2, scaleCache, comm, wrtSlice1, wrtSlice2)
   1084             dLdR_sym = dLdRa + _np.swapaxes(dLdRb,0,1)
   1085 
-> 1086             hProdCache[i] = _np.dot(hL, R) + dLdR_sym + _np.transpose(_np.dot(L,hR),(1,2,0,3))
   1087 
   1088             scale = scaleCache[i] - (scaleCache[iLeft] + scaleCache[iRight])

KeyboardInterrupt: 

Note above cell was executed for demonstration purposes, and was keyboard-interrupted intentionally since it would have taken forever on a single processor.

In [ ]:
#write results back to file
with open("example_files/easy_2q_results_withCI.pkl","wb") as f:
    pickle.dump(results, f)
In [ ]: