Skip to content

Commit

Permalink
rebase
Browse files Browse the repository at this point in the history
  • Loading branch information
huaiyuzh committed Jul 12, 2024
2 parents 5a0883b + cbb4ab1 commit b3b8fc6
Show file tree
Hide file tree
Showing 30 changed files with 721 additions and 62 deletions.
14 changes: 7 additions & 7 deletions .github/ci_expected_accuracy/check_expected.py
Original file line number Diff line number Diff line change
Expand Up @@ -48,19 +48,19 @@
passed_models.append([model_name, test_accuracy])
if refer_accuracy == "N/A":
new_models.append([model_name, test_accuracy])
refer_data.loc[refer_data.tail(1).index.tolist()[0] + 1,:] = "N/A"
refer_data.at[refer_data.tail(1).index, "name"] = model_name
refer_data.at[refer_data.tail(1).index, args.dtype] = test_accuracy
refer_data.loc[len(refer_data),:] = "N/A"
refer_data.at[len(refer_data) - 1, "name"] = model_name
refer_data.at[len(refer_data) - 1, args.dtype] = test_accuracy
elif 'pass' not in refer_accuracy:
new_pass_models.append([model_name, test_accuracy])
refer_data.at[refer_row[0], args.dtype] = test_accuracy
else:
if refer_accuracy == "N/A":
new_models.append([model_name, test_accuracy])
real_failed_models.append([model_name, test_accuracy])
refer_data.loc[refer_data.tail(1).index.tolist()[0] + 1,:] = "N/A"
refer_data.at[refer_data.tail(1).index, "name"] = model_name
refer_data.at[refer_data.tail(1).index, args.dtype] = test_accuracy
refer_data.loc[len(refer_data),:] = "N/A"
refer_data.at[len(refer_data) - 1, "name"] = model_name
refer_data.at[len(refer_data) - 1, args.dtype] = test_accuracy
elif "pass" in refer_accuracy:
real_failed_models.append([model_name, test_accuracy])
else:
Expand All @@ -80,7 +80,7 @@
print("Pass rate: {:.2f}%".format(len(passed_models) / len(model_names) * 100))

if len(new_pass_models + new_models) > 0:
print("NOTE: New models result, please update the reference", new_pass_models)
print("NOTE: New models result, please update the reference", new_pass_models, new_models)
if args.update:
refer_data.to_csv(refer_file, sep=',', encoding='utf-8', index=False)
print("Updated. Now, confirm the changes to .csvs and `git add` them if satisfied.")
64 changes: 45 additions & 19 deletions .github/scripts/apply_torch_pr.py
Original file line number Diff line number Diff line change
Expand Up @@ -25,7 +25,7 @@

# check reverted PR is in current code base or not
def check_reverted_reopen(pr_info):
git_cmd = "git log nightly -n 1 2>&1 |grep 'nightly release' |head -1 |sed 's/.*(//;s/).*//' || git rev-parse HEAD"
git_cmd = "((git log -n 1 2>&1 |grep 'nightly release' |head -1 |sed 's/.*(//;s/).*//' || true) && git rev-parse HEAD) |head -n 1"
git_info = subprocess.Popen(git_cmd, stdout=subprocess.PIPE, stderr=subprocess.STDOUT, shell=True)
main_commit = git_info.communicate()[0].decode("utf-8").replace("\n", "")
revert_cmd = "cur_cmt=$(git rev-parse HEAD) && git fetch origin main > /dev/null 2>&1 && " + \
Expand All @@ -40,6 +40,39 @@ def check_reverted_reopen(pr_info):
reverted = False
return reverted

def check_merged(pr_info):
git_cmd = "((git log -n 1 2>&1 |grep 'nightly release' |head -1 |sed 's/.*(//;s/).*//' || true) && git rev-parse HEAD) |head -n 1"
git_info = subprocess.Popen(git_cmd, stdout=subprocess.PIPE, stderr=subprocess.STDOUT, shell=True)
main_commit = git_info.communicate()[0].decode("utf-8").replace("\n", "")
merge_cmd = "cur_cmt=$(git rev-parse HEAD) && git fetch origin main > /dev/null 2>&1 && " + \
"git checkout " + main_commit + " > /dev/null 2>&1 && " + \
"git log |grep 'resolved: " + pr_info["html_url"] + "' || true && " + \
"git checkout $cur_cmt > /dev/null 2>&1"
merge_info = subprocess.Popen(merge_cmd, stdout=subprocess.PIPE, stderr=subprocess.STDOUT, shell=True)
merge_msg = merge_info.communicate()[0].decode("utf-8")
if "resolved: " + pr_info["html_url"] in merge_msg:
merged = True
else:
merged = False
return merged

def appyly_pr(pr_info, re_apply_msg):
# get pr diff
pr_file = pr_info["diff_url"].split("/")[-1]
urllib.request.urlretrieve(pr_info["diff_url"], pr_file)
# apply diff
apply_cmd = "git apply --3way " + pr_file + " && rm -f " + pr_file
apply_info = subprocess.Popen(apply_cmd, stdout=subprocess.PIPE, stderr=subprocess.STDOUT, shell=True)
apply_message = apply_info.communicate()[0].decode("utf-8")
apply_status = apply_info.returncode
# apply status
if apply_status == 0:
print("{} {}, applied got SUCCESSFUL".format(pr_info["diff_url"], re_apply_msg))
else:
print("{} {}, applied got FAILED".format(pr_info["diff_url"], apply_message))
print(apply_status, apply_message)
exit(1)


# headers = {'Authorization': 'Bearer ' + args.token} if args.token != None else args.token
pr_list = args.pr_list + args.extra_pr_list
Expand All @@ -53,32 +86,25 @@ def check_reverted_reopen(pr_info):
if pr_info["state"].lower() == "open":
# for reverted PR
reverted_id = next((item["id"] for item in pr_info["labels"] if item["name"] == "Reverted"), -1)
re_apply_msg = ""
re_apply_msg = "is opened"
if reverted_id != -1:
reverted = check_reverted_reopen(pr_info)
# skip if PR not reverted but re-open in current code base
if not reverted:
print("{} is re-open but not reverted, no need to apply".format(pr_info["diff_url"]))
continue
else:
re_apply_msg = "is re-opened & reverted,"
# get pr diff
pr_file = pr_info["diff_url"].split("/")[-1]
urllib.request.urlretrieve(pr_info["diff_url"], pr_file)
# apply diff
apply_cmd = "git apply --3way " + pr_file + " && rm -f " + pr_file
apply_info = subprocess.Popen(apply_cmd, stdout=subprocess.PIPE, stderr=subprocess.STDOUT, shell=True)
apply_message = apply_info.communicate()[0].decode("utf-8")
apply_status = apply_info.returncode
# apply status
if apply_status == 0:
print("{} {} applied got SUCCESSFUL".format(pr_info["diff_url"], re_apply_msg))
else:
print("{} applied got FAILED".format(pr_info["diff_url"]))
print(apply_status, apply_message)
exit(1)
re_apply_msg = "is re-opened and reverted,"
appyly_pr(pr_info, re_apply_msg)
elif pr_info["state"].lower() == "closed":
print("{} is ClOSED, no need to apply".format(pr_info["diff_url"]))
merged_id = next((item["id"] for item in pr_info["labels"] if item["name"] == "Merged"), -1)
re_apply_msg = "is closed but not merged"
if merged_id != -1:
merged = check_merged(pr_info)
if merged:
print("{} is closed and merged, no need to apply".format(pr_info["diff_url"]))
continue
appyly_pr(pr_info, re_apply_msg)
else:
print("{} is {}, no need to apply".format(pr_info["diff_url"], pr_info["state"]))
exit(1)
Expand Down
48 changes: 48 additions & 0 deletions src/ATen/native/xpu/BinaryOps.cpp
Original file line number Diff line number Diff line change
Expand Up @@ -9,6 +9,7 @@
#include <ATen/native/xpu/sycl/BinaryKernels.h>
#include <ATen/native/xpu/sycl/BinaryMiscBackwardOpsKernels.h>
#include <ATen/native/xpu/sycl/BinaryRemainderKernel.h>
#include <ATen/native/xpu/sycl/CopysignKernel.h>
#include <ATen/native/xpu/sycl/GcdLcmKernels.h>
#include <ATen/native/xpu/sycl/MaxMinElementwiseKernels.h>

Expand Down Expand Up @@ -477,4 +478,51 @@ Tensor XPUNativeFunctions::sigmoid_backward(
return iter.output();
}

Tensor XPUNativeFunctions::atan2(const Tensor& self, const Tensor& other) {
Tensor out;
TensorIterator iter;
iter.build_borrowing_binary_float_op(out, self, other);
native::xpu::atan2_kernel(iter);
return iter.output();
}

Tensor& XPUNativeFunctions::atan2_(Tensor& self, const Tensor& other) {
TensorIterator iter;
iter.build_borrowing_binary_float_op(self, self, other);
native::xpu::atan2_kernel(iter);
return self;
}

Tensor& XPUNativeFunctions::atan2_out(
const Tensor& self,
const Tensor& other,
Tensor& out) {
TensorIterator iter;
iter.build_borrowing_binary_float_op(out, self, other);
native::xpu::atan2_kernel(iter);
return out;
}

Tensor& XPUNativeFunctions::copysign_out(
const Tensor& self,
const Tensor& other,
Tensor& out) {
TensorIterator iter;
iter.build_borrowing_binary_float_op(out, self, other);
native::xpu::copysign_kernel(iter);
return out;
}

Tensor& XPUNativeFunctions::copysign_(Tensor& self, const Tensor& other) {
return XPUNativeFunctions::copysign_out(self, other, self);
}

Tensor XPUNativeFunctions::copysign(const Tensor& self, const Tensor& other) {
Tensor out;
TensorIterator iter;
iter.build_borrowing_binary_float_op(out, self, other);
native::xpu::copysign_kernel(iter);
return iter.output();
}

} // namespace at
1 change: 1 addition & 0 deletions src/ATen/native/xpu/Indexing.cpp
Original file line number Diff line number Diff line change
Expand Up @@ -43,4 +43,5 @@ Tensor XPUNativeFunctions::index_select(
auto out = at::empty({0}, self.options());
return index_select_out(self, dim, index, out);
}

} // namespace at
63 changes: 61 additions & 2 deletions src/ATen/native/xpu/Loss.cpp
Original file line number Diff line number Diff line change
@@ -1,14 +1,24 @@
#include <ATen/ATen.h>
#include <ATen/core/Reduction.h>
#include <ATen/core/Tensor.h>
#include <ATen/xpu/XPUNativeFunctions.h>

#include <ATen/native/xpu/sycl/BinaryMiscOpsKernels.h>
#include <ATen/native/xpu/sycl/PointwiseOpsKernels.h>
#include <ATen/xpu/XPUNativeFunctions.h>
#include <comm/RegisterUtils.h>

namespace at {

static inline at::Tensor apply_loss_reduction(
const at::Tensor& unreduced,
int64_t reduction) {
if (reduction == at::Reduction::Mean) {
return unreduced.mean();
} else if (reduction == at::Reduction::Sum) {
return unreduced.sum();
}
return unreduced;
}

Tensor& XPUNativeFunctions::mse_loss_out(
const Tensor& input,
const Tensor& target,
Expand Down Expand Up @@ -69,4 +79,53 @@ Tensor& XPUNativeFunctions::mse_loss_backward_out(
return grad_input;
}

Tensor XPUNativeFunctions::huber_loss(
const Tensor& input,
const Tensor& target,
int64_t reduction,
double delta) {
TORCH_CHECK(
delta > 0, "huber_loss does not support non-positive values for delta.")
Tensor loss = at::empty_like(input);
auto iter = TensorIterator::borrowing_binary_op(loss, input, target);
native::xpu::huber_kernel(iter, delta);
return apply_loss_reduction(loss, reduction);
}

Tensor& XPUNativeFunctions::huber_loss_out(
const Tensor& input,
const Tensor& target,
int64_t reduction,
double delta,
Tensor& result) {
TORCH_CHECK(
delta > 0, "huber_loss does not support non-positive values for delta.")
auto iter = TensorIterator::borrowing_binary_op(result, input, target);
native::xpu::huber_kernel(iter, delta);
if (reduction != Reduction::None) {
auto reduced = apply_loss_reduction(result, reduction);
result.resize_({});
result.copy_(reduced);
}
return result;
}

Tensor& XPUNativeFunctions::huber_loss_backward_out(
const Tensor& grad_output,
const Tensor& input,
const Tensor& target,
int64_t reduction,
double delta,
Tensor& grad_input) {
auto norm = (reduction == Reduction::Mean) ? (1. / input.numel()) : 1.;
auto iter = at::TensorIteratorConfig()
.add_output(grad_input)
.add_const_input(input)
.add_const_input(target)
.add_const_input(grad_output)
.build();
native::xpu::huber_backward_kernel(iter, norm, delta);
return grad_input;
}

} // namespace at
4 changes: 4 additions & 0 deletions src/ATen/native/xpu/TensorAdvancedIndexing.cpp
Original file line number Diff line number Diff line change
Expand Up @@ -1396,4 +1396,8 @@ Tensor& XPUNativeFunctions::gather_out(
return out;
}

Tensor XPUNativeFunctions::count_nonzero(const Tensor& self, IntArrayRef dims) {
return (self != 0).sum(dims);
}

} // namespace at
Loading

0 comments on commit b3b8fc6

Please sign in to comment.