Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Relax some shape checks for rank-1 tensors #625

Merged
merged 2 commits into from
Mar 2, 2024
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
11 changes: 9 additions & 2 deletions src/arraymancer/tensor/private/p_accessors_macros_write.nim
Original file line number Diff line number Diff line change
Expand Up @@ -24,6 +24,13 @@ import ../../laser/private/nested_containers,
# #########################################################################
# Slicing macros - write access

## `RelaxedRankOne` is a CT variable exposed to the user to recover the old behavior
## of how rank 1 tensors are treated in mutating slices.
## If set to `false` using `-d:RelaxedRankOne=false`, slice assignments using rank 1
## arrays / seqs / tensors have to match exactly. If it is `true`, only the number of
## input elements have to match for a more convenient interface.
const RelaxedRankOne* {.booldefine.} = true

# #########################################################################
# Setting a single value

Expand Down Expand Up @@ -80,7 +87,7 @@ template slicerMutImpl_oa[T](t: var Tensor[T], slices: openArray[SteppedSlice],

var sliced = t.slicer(slices)
when compileOption("boundChecks"):
check_shape(sliced, oa)
check_shape(sliced, oa, relaxed_rank1_check = RelaxedRankOne)

var data = toSeq(flatIter(oa))
when compileOption("boundChecks"):
Expand Down Expand Up @@ -140,7 +147,7 @@ template slicerMutImpl_T[T](t: var Tensor[T], slices: openArray[SteppedSlice], t
var sliced = t.slicer(slices)

when compileOption("boundChecks"):
check_shape(sliced, t2)
check_shape(sliced, t2, relaxed_rank1_check = RelaxedRankOne)

apply2_inline(sliced, t2):
y
Expand Down
13 changes: 12 additions & 1 deletion src/arraymancer/tensor/private/p_checks.nim
Original file line number Diff line number Diff line change
Expand Up @@ -93,14 +93,25 @@ func check_start_end*(a, b: int, dim_size: int) {.inline.} =
". Slicing must be done between 0 (inclusive) and " &
$dim_size & " (exclusive).")

func check_shape*(a: Tensor; b: Tensor|openArray) {.inline.}=
func check_shape*(a: Tensor; b: Tensor|openArray;
relaxed_rank1_check: static[bool] = false) {.inline.} =
## Compare shape

when b is Tensor:
let b_shape = b.shape
else:
let b_shape = b.getShape()

when relaxed_rank1_check:
# When b is a rank-1 tensor, just check its size (i.e. make a rank-1
# tensor of size n "fit" into a [1, n] rank-2 tensor, for example)
when b is Tensor:
let b_rank = b.rank
else:
let b_rank = b_shape.len
if b_rank == 1 and b.len == a.len:
return

if unlikely(a.shape != b_shape):
raise newException(IndexDefect, "Your tensors or openArrays do not have the same shape: " &
$a.shape &
Expand Down
42 changes: 40 additions & 2 deletions tests/tensor/test_fancy_indexing.nim
Original file line number Diff line number Diff line change
Expand Up @@ -122,6 +122,31 @@ proc main() =

check: y == exp

test "Masked assign tensor or openArray via fancy indexing":
block: # y[y > 50] = np.array([-100, -200])
var y = x.clone()
# Assing a tensor
y[y >. 50] = [-100, -200].toTensor()

let exp = [[ 4, -100, 2],
[ 3, 4, -200],
[ 1, 8, 7],
[ 8, 6, 8]].toTensor()

check: y == exp

block: # y[y > 50] = [-100, -200]
var y = x.clone()
# Assing an openArray
y[y >. 50] = [-100, -200]

let exp = [[ 4, -100, 2],
[ 3, 4, -200],
[ 1, 8, 7],
[ 8, 6, 8]].toTensor()

check: y == exp

test "Masked axis assign value via fancy indexing":
block: # y[:, y.sum(axis = 0) > 50] = -100
var y = x.clone()
Expand All @@ -146,11 +171,11 @@ proc main() =
check: y == exp

test "Masked axis assign tensor via fancy indexing - invalid Numpy syntaxes":
block: # y[:, y.sum(axis = 0) > 50] = np.array([10, 20, 30, 40])
block: # y[:, y.sum(axis = 0) > 50] = np.array([[10, 20, 30, 40]])
var y = x.clone()

expect(IndexDefect):
y[_, y.sum(axis = 0) >. 50] = [10, 20, 30, 40].toTensor()
y[_, y.sum(axis = 0) >. 50] = [[10, 20, 30, 40]].toTensor()

test "Masked axis assign broadcastable 1d tensor via fancy indexing":
block: # y[:, y.sum(axis = 0) > 50] = np.array([[10], [20], [30], [40]])
Expand All @@ -175,6 +200,19 @@ proc main() =

check: y == exp

block:
# Assigning a rank-1 tensor into an axis of the same size is supported
# Note that this is not supported by numpy
Copy link
Collaborator

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Does numpy have a good reason not to support this maybe?

var y = x.clone()
y[_, y.sum(axis = 0) >. 50] = [10, 20, 30, 40].toTensor()

let exp = [[ 4, 10, 10],
[ 3, 20, 20],
[ 1, 30, 30],
[ 8, 40, 40]].toTensor()

check: y == exp

# TODO - only broadcastable tensor assign are supported at the moment
# test "Masked axis assign multidimensional tensor via fancy indexing":
# block: # y[:, y.sum(axis = 0) > 50] = np.array([[10, 50], [20, 60], [30, 70], [40, 80]])
Expand Down
Loading