Skip to content
This repository was archived by the owner on Mar 23, 2025. It is now read-only.

Commit 0ebbd7b

Browse files
committed
Merge branch 'master' of github.com:ITensor/NDTensors.jl
2 parents dfad216 + a75f5ca commit 0ebbd7b

19 files changed

+714
-649
lines changed

Project.toml

Lines changed: 6 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -1,16 +1,19 @@
11
name = "NDTensors"
22
uuid = "23ae76d9-e61a-49c4-8f12-3f1a16adf9cf"
33
authors = ["Matthew Fishman <mfishman@flatironinstitute.org>"]
4-
version = "0.1.18"
4+
version = "0.1.19"
55

66
[deps]
77
Compat = "34da2185-b29b-5c13-b0c7-acf172513d20"
8+
Dictionaries = "85a47980-9c8c-11e8-2b9f-f7ca1fa99fb4"
89
HDF5 = "f67ccb44-e63f-5c2f-98bd-6dc0ccc4ba2f"
910
LinearAlgebra = "37e2e46d-f89d-539d-b4ee-838fcccc9c8e"
1011
Random = "9a3f8284-a2c9-5f02-9a11-845980a1fd5c"
1112
Requires = "ae029012-a4dd-5104-9daa-d747884805df"
1213
StaticArrays = "90137ffa-7385-5640-81b9-e52037218182"
1314
Strided = "5e0ebb24-38b0-5f93-81fe-25c709ecae67"
15+
TimerOutputs = "a759f4b9-e2f1-59dc-863e-4aeb61b1ea8f"
16+
TupleTools = "9d95972d-f1c8-5527-a6e0-b4b365fa01f6"
1417

1518
[compat]
1619
Compat = "2.1, 3"
@@ -19,4 +22,6 @@ HDF5 = "0.12, 0.13, 0.14"
1922
Requires = "1.1"
2023
StaticArrays = "0.12, 1.0"
2124
Strided = "0.3, 1"
25+
TimerOutputs = "0.5.5"
26+
TupleTools = "1.2.0"
2227
julia = "1.3"

src/NDTensors.jl

Lines changed: 21 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -1,17 +1,25 @@
11
module NDTensors
22

33
using Compat
4+
using Dictionaries
45
using Random
56
using LinearAlgebra
67
using StaticArrays
78
using HDF5
89
using Requires
910
using Strided
11+
using TimerOutputs
12+
using TupleTools
13+
14+
using Base:
15+
@propagate_inbounds,
16+
ReshapedArray
1017

1118
#####################################
12-
# Exports
19+
# Imports and exports
1320
#
1421
include("exports.jl")
22+
include("imports.jl")
1523

1624
#####################################
1725
# DenseTensor and DiagTensor
@@ -33,6 +41,7 @@ include("svd.jl")
3341
# BlockSparseTensor
3442
#
3543
include("blocksparse/blockdims.jl")
44+
include("blocksparse/block.jl")
3645
include("blocksparse/blockoffsets.jl")
3746
include("blocksparse/blocksparse.jl")
3847
include("blocksparse/blocksparsetensor.jl")
@@ -45,6 +54,17 @@ include("blocksparse/linearalgebra.jl")
4554
#
4655
include("empty.jl")
4756

57+
#####################################
58+
# Deprecations
59+
#
60+
include("deprecated.jl")
61+
62+
#####################################
63+
# A global timer used with TimerOutputs.jl
64+
#
65+
66+
const GLOBAL_TIMER = TimerOutput()
67+
4868
#####################################
4969
# Optional TBLIS contraction backend
5070
#

src/blocksparse/block.jl

Lines changed: 153 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,153 @@
1+
2+
#
3+
# Block
4+
#
5+
6+
struct Block{N}
7+
data::NTuple{N, UInt}
8+
hash::UInt
9+
function Block{N}(data::NTuple{N, UInt}) where {N}
10+
h = _hash(data)
11+
return new{N}(data, h)
12+
end
13+
function Block{0}(::Tuple{})
14+
h = _hash(())
15+
return new{0}((), h)
16+
end
17+
end
18+
19+
#
20+
# Constructors
21+
#
22+
23+
Block{N}(t::Tuple{Vararg{<:Any, N}}) where {N} =
24+
Block{N}(UInt.(t))
25+
26+
Block{N}(I::CartesianIndex{N}) where {N} = Block{N}(I.I)
27+
28+
Block{N}(v::MVector{N}) where {N} = Block{N}(Tuple(v))
29+
30+
Block{N}(v::SVector{N}) where {N} = Block{N}(Tuple(v))
31+
32+
Block(b::Block) = b
33+
34+
Block(I::CartesianIndex{N}) where {N} = Block{N}(I)
35+
36+
Block(v::MVector{N}) where {N} = Block{N}(v)
37+
38+
Block(v::SVector{N}) where {N} = Block{N}(v)
39+
40+
Block(t::NTuple{N, UInt}) where {N} = Block{N}(t)
41+
42+
Block(t::Tuple{Vararg{<:Any, N}}) where {N} = Block{N}(t)
43+
44+
Block(::Tuple{}) where {N} = Block{0}(())
45+
46+
Block(I::Integer...) = Block(I)
47+
48+
#
49+
# Conversions
50+
#
51+
52+
CartesianIndex(b::Block) = CartesianIndex(Tuple(b))
53+
54+
Tuple(b::Block{N}) where {N} = NTuple{N, UInt}(b.data)
55+
56+
convert(::Type{Block}, I::CartesianIndex{N}) where {N} = Block{N}(I.I)
57+
58+
convert(::Type{Block{N}}, I::CartesianIndex{N}) where {N} = Block{N}(I.I)
59+
60+
convert(::Type{Block}, t::Tuple) where {N} = Block(t)
61+
62+
convert(::Type{Block{N}}, t::Tuple) where {N} = Block{N}(t)
63+
64+
#
65+
# Getting and setting fields
66+
#
67+
68+
gethash(b::Block) = b.hash[]
69+
70+
sethash!(b::Block, h::UInt) = (b.hash[] = h; return b)
71+
72+
#
73+
# Basic functions
74+
#
75+
76+
length(::Block{N}) where {N} = N
77+
78+
iterate(b::Block, args...) = iterate(b.data, args...)
79+
80+
using Base: @propagate_inbounds
81+
@propagate_inbounds function getindex(b::Block, i::Integer)
82+
return b.data[i]
83+
end
84+
85+
@propagate_inbounds setindex(b::Block{N}, val, i::Integer) where {N} =
86+
Block{N}(setindex(b.data, UInt(val), i))
87+
88+
ValLength(::Type{<:Block{N}}) where {N} = Val{N}
89+
90+
deleteat(b::Block, pos) = Block(deleteat(Tuple(b), pos))
91+
92+
insertafter(b::Block, val, pos) =
93+
Block(insertafter(Tuple(b), UInt.(val), pos))
94+
95+
getindices(b::Block, I) = getindices(Tuple(b), I)
96+
97+
#
98+
# Hashing
99+
#
100+
101+
# Borrowed from:
102+
# https://github.com/JuliaLang/julia/issues/37073
103+
# This is the same as Julia's Base tuple hash, but is
104+
# a bit faster.
105+
_hash(t::Tuple) = _hash(t, zero(UInt))
106+
_hash(::Tuple{}, h::UInt) = h + Base.tuplehash_seed
107+
using Base.Cartesian: @nexprs
108+
@generated function _hash(b::NTuple{N}, h::UInt) where {N}
109+
quote
110+
out = h + Base.tuplehash_seed
111+
@nexprs $N i -> out = hash(b[$N-i+1], out)
112+
end
113+
end
114+
# Stop inlining after some number of arguments to avoid code blowup
115+
function _hash(t::Base.Any16, h::UInt)
116+
out = h + Base.tuplehash_seed
117+
for i = length(t):-1:1
118+
out = hash(t[i], out)
119+
end
120+
return out
121+
end
122+
123+
hash(b::Block) = UInt(b.hash)
124+
hash(b::Block, h::UInt) = h + hash(b)
125+
126+
#
127+
# Custom NTuple{N, Int} hashes
128+
# These are faster, but have a lot of collisions
129+
#
130+
131+
# Borrowed from:
132+
# https://stackoverflow.com/questions/20511347/a-good-hash-function-for-a-vector
133+
# This seems to have a lot of clashes
134+
#function Base.hash(b::Block, seed::UInt)
135+
# h = UInt(0x9e3779b9)
136+
# for n in b
137+
# seed ⊻= n + h + (seed << 6) + (seed >> 2)
138+
# end
139+
# return seed
140+
#end
141+
142+
# Borrowed from:
143+
# http://www.docjar.com/html/api/java/util/Arrays.java.html
144+
# Could also consider uring the CPython tuple hash:
145+
# https://github.com/python/cpython/blob/0430dfac629b4eb0e899a09b899a494aa92145f6/Objects/tupleobject.c#L406
146+
#function Base.hash(b::Block, h::UInt)
147+
# h += Base.tuplehash_seed
148+
# for n in b
149+
# h = 31 * h + n ⊻ (n >> 32)
150+
# end
151+
# return h
152+
#end
153+

src/blocksparse/blockdims.jl

Lines changed: 8 additions & 19 deletions
Original file line numberDiff line numberDiff line change
@@ -1,9 +1,3 @@
1-
export BlockDims,
2-
blockdim,
3-
blockdims,
4-
nblocks,
5-
blockindex
6-
71
"""
82
BlockDim
93
@@ -107,34 +101,30 @@ function blockdim(ind::BlockDim,
107101
end
108102

109103
"""
110-
blockdim(::BlockDims,block,::Integer)
104+
blockdim(::BlockDims,block,::Integer)
111105
112106
The size of the specified block in the specified
113107
dimension.
114108
"""
115-
function blockdim(inds,
116-
block,
117-
i::Integer)
109+
function blockdim(inds, block, i::Integer)
118110
return blockdim(inds[i],block[i])
119111
end
120112

121113
"""
122-
blockdims(::BlockDims,block)
114+
blockdims(::BlockDims,block)
123115
124116
The size of the specified block.
125117
"""
126-
function blockdims(inds,
127-
block)
118+
function blockdims(inds, block)
128119
return ntuple(i->blockdim(inds,block,i),ValLength(inds))
129120
end
130121

131122
"""
132-
blockdim(::BlockDims,block)
123+
blockdim(::BlockDims,block)
133124
134125
The total size of the specified block.
135126
"""
136-
function blockdim(inds,
137-
block)
127+
function blockdim(inds, block)
138128
return prod(blockdims(inds,block))
139129
end
140130

@@ -164,8 +154,7 @@ end
164154

165155
# Given a CartesianIndex in the range dims(T), get the block it is in
166156
# and the index within that block
167-
function blockindex(T,
168-
i::Vararg{Int,N}) where {ElT,N}
157+
function blockindex(T, i::Vararg{Int,N}) where {ElT,N}
169158
# Start in the (1,1,...,1) block
170159
current_block_loc = @MVector ones(Int,N)
171160
current_block_dims = blockdims(T,Tuple(current_block_loc))
@@ -177,7 +166,7 @@ function blockindex(T,
177166
current_block_dims = blockdims(T,Tuple(current_block_loc))
178167
end
179168
end
180-
return Tuple(block_index),Block{N}(current_block_loc)
169+
return Tuple(block_index), Block{N}(current_block_loc)
181170
end
182171

183172
blockindex(T) = (),Block{0}()

0 commit comments

Comments
 (0)