@@ -274,7 +274,7 @@ def tensor_contract(
274
274
If `True`, return the exponent of the result, log10, as well as the
275
275
rescaled 'mantissa'. Useful for very large or small values.
276
276
exponent : float, optional
277
- If supplied, a base exponent to add to the result exponent .
277
+ If supplied, an overall base exponent to scale the result by .
278
278
contract_opts
279
279
Passed to ``cotengra.array_contract``.
280
280
@@ -3593,8 +3593,8 @@ def _tensor_network_gate_inds_basic(
3593
3593
trn = tr_L @ tr_Q
3594
3594
3595
3595
# if singular values are returned (``absorb=None``) check if we should
3596
- # return them via ``info``, e.g. for ``SimpleUpdate`
3597
- if maybe_svals and info is not None :
3596
+ # return them further via ``info``, e.g. for ``SimpleUpdate`
3597
+ if maybe_svals and ( info is not None ) :
3598
3598
s = next (iter (maybe_svals )).data
3599
3599
info ["singular_values" , bix ] = s
3600
3600
@@ -7189,9 +7189,9 @@ def gauge_all_simple(
7189
7189
tol : float, optional
7190
7190
The convergence tolerance for the singular values.
7191
7191
smudge : float, optional
7192
- The smudge factor to add to the singular values when gauging.
7192
+ A small value to add to the singular values when gauging.
7193
7193
power : float, optional
7194
- The power to raise the singular values to when gauging.
7194
+ A power to raise the singular values to when gauging.
7195
7195
damping : float, optional
7196
7196
The damping factor to apply to the gauging updates.
7197
7197
gauges : dict, optional
@@ -8218,8 +8218,14 @@ def contract_compressed(
8218
8218
Whether to compress pairs of tensors that are effectively matrices.
8219
8219
compress_exclude : set[int], optional
8220
8220
An explicit set of tensor ids to exclude from compression.
8221
- equalize_norms : bool or float, optional
8222
- Whether to equalize the norms of the tensors after each operation.
8221
+ strip_exponent : bool, optional
8222
+ Whether the strip an overall exponent, log10, from the *final*
8223
+ contraction. If a TensorNetwork is returned, this exponent is
8224
+ accumulated in the `exponent` attribute. If a Tensor or scalar is
8225
+ returned, the exponent is returned separately.
8226
+ equalize_norms : bool or "auto", optional
8227
+ Whether to equalize the norms of the tensors *during* the
8228
+ contraction. By default ("auto") this follows `strip_exponent`.
8223
8229
The overall scaling is accumulated, log10, into `tn.exponent`. If
8224
8230
`True`, at the end this exponent is redistributed. If a float,
8225
8231
this is the target norm to equalize tensors to, e.g. `1.0`, and the
@@ -8846,11 +8852,10 @@ def contract(
8846
8852
output_inds = None ,
8847
8853
optimize = None ,
8848
8854
get = None ,
8849
- backend = None ,
8850
- preserve_tensor = False ,
8851
8855
max_bond = None ,
8852
8856
strip_exponent = False ,
8853
- exponent = True ,
8857
+ preserve_tensor = False ,
8858
+ backend = None ,
8854
8859
inplace = False ,
8855
8860
** kwargs ,
8856
8861
):
@@ -8897,20 +8902,17 @@ def contract(
8897
8902
with detailed information such as flop cost. The symbol-map is
8898
8903
also added to the ``quimb_symbol_map`` attribute.
8899
8904
8900
- backend : {'auto', 'numpy', 'jax', 'cupy', 'tensorflow', ...}, optional
8901
- Which backend to use to perform the contraction. Supplied to
8902
- `cotengra`.
8905
+ strip_exponent : bool, optional
8906
+ Whether the strip an overall exponent, log10, from the *final*
8907
+ contraction. If a TensorNetwork is returned, this exponent is
8908
+ accumulated in the `exponent` attribute. If a Tensor or scalar is
8909
+ returned, the exponent is returned separately.
8903
8910
preserve_tensor : bool, optional
8904
8911
Whether to return a tensor regardless of whether the output object
8905
8912
is a scalar (has no indices) or not.
8906
- strip_exponent : bool, optional
8907
- If contracting the entire tensor network, whether to strip a log10
8908
- exponent and return it separately. This is useful for very large or
8909
- small values.
8910
- exponent : float, optional
8911
- The current exponent to scale the whole contraction by. If ``True``
8912
- this taken from `tn.exponent`. If `False` then this is ignored.
8913
- If a float, this is the exponent to use.
8913
+ backend : {'auto', 'numpy', 'jax', 'cupy', 'tensorflow', ...}, optional
8914
+ Which backend to use to perform the contraction. Supplied to
8915
+ `cotengra`.
8914
8916
inplace : bool, optional
8915
8917
Whether to perform the contraction inplace. This is only valid
8916
8918
if not all tensors are contracted (which doesn't produce a TN).
@@ -8945,8 +8947,6 @@ def contract(
8945
8947
raise NotImplementedError
8946
8948
if kwargs .pop ("backend" , None ) is not None :
8947
8949
raise NotImplementedError
8948
- if exponent is not True :
8949
- raise NotImplementedError
8950
8950
8951
8951
return self .contract_compressed (
8952
8952
max_bond = max_bond ,
@@ -8959,9 +8959,6 @@ def contract(
8959
8959
# contraction pattern (e.g. 1D along the line)
8960
8960
if self ._CONTRACT_STRUCTURED :
8961
8961
8962
- if exponent is not True :
8963
- raise NotImplementedError
8964
-
8965
8962
if (tags is ...) or isinstance (tags , slice ):
8966
8963
return self .contract_structured (
8967
8964
tags ,
@@ -8973,15 +8970,10 @@ def contract(
8973
8970
# contracting everything to single output
8974
8971
if all_tags and not inplace :
8975
8972
8976
- if exponent is True :
8977
- exponent = self .exponent
8978
- elif exponent is False :
8979
- exponent = 0.0
8980
-
8981
8973
return tensor_contract (
8982
8974
* self .tensor_map .values (),
8983
8975
strip_exponent = strip_exponent ,
8984
- exponent = exponent ,
8976
+ exponent = self . exponent ,
8985
8977
** kwargs
8986
8978
)
8987
8979
@@ -8999,9 +8991,9 @@ def contract_cumulative(
8999
8991
self ,
9000
8992
tags_seq ,
9001
8993
output_inds = None ,
9002
- preserve_tensor = False ,
9003
8994
strip_exponent = False ,
9004
8995
equalize_norms = "auto" ,
8996
+ preserve_tensor = False ,
9005
8997
inplace = False ,
9006
8998
** contract_opts ,
9007
8999
):
@@ -9018,6 +9010,14 @@ def contract_cumulative(
9018
9010
The indices to specify as outputs of the contraction. If not given,
9019
9011
and the tensor network has no hyper-indices, these are computed
9020
9012
automatically as every index appearing once.
9013
+ strip_exponent : bool, optional
9014
+ Whether the strip an overall exponent, log10, from the *final*
9015
+ contraction. If a TensorNetwork is returned, this exponent is
9016
+ accumulated in the `exponent` attribute. If a Tensor or scalar is
9017
+ returned, the exponent is returned separately.
9018
+ equalize_norms : bool or "auto", optional
9019
+ Whether to equalize the norms of the tensors *during* the
9020
+ contraction. By default ("auto") this follows `strip_exponent`.
9021
9021
preserve_tensor : bool, optional
9022
9022
Whether to return a tensor regardless of whether the output object
9023
9023
is a scalar (has no indices) or not.
0 commit comments