Skip to content

Commit

Permalink
Add cross-layer energy paper (#270)
Browse files Browse the repository at this point in the history
* Add cross-layer energy paper

* Add braces to AI
  • Loading branch information
jaywonchung authored Aug 6, 2024
1 parent d65df6b commit fdb5245
Show file tree
Hide file tree
Showing 3 changed files with 21 additions and 0 deletions.
14 changes: 14 additions & 0 deletions source/_data/SymbioticLab.bib
Original file line number Diff line number Diff line change
Expand Up @@ -1739,6 +1739,20 @@ @Article{crosslayer-energy:arxiv24
}
}
@inproceedings{crosslayer-energy:eecs24,
author = {Jae-Won Chung and Nishil Talati and Mosharaf Chowdhury},
booktitle = {Energy-Efficient Computing for Science Workshop},
title = {Toward Cross-Layer Energy Optimizations in {AI} Systems},
year = {2024},
publist_confkey = {EECS'24},
publist_link = {paper || crosslayer-energy-eecs24.pdf},
publist_topic = {Systems + AI},
publist_topic = {Energy-Efficient Systems},
publist_abstract = {The ``AI for Science, Energy, and Security'' report from DOE outlines a significant focus on developing and optimizing artificial intelligence workflows for a foundational impact on a broad range of DOE missions. With the pervasive usage of artificial intelligence (AI) and machine learning (ML) tools and techniques, their energy efficiency is likely to become the gating factor toward adoption. This is because generative AI (GenAI) models are massive energy hogs: for instance, training a 200-billion parameter large language model (LLM) at Amazon is estimated to have taken 11.9 GWh, which is enough to power more than a thousand average U.S. households for a year. Inference consumes even more energy, because a model trained once serve millions. Given this scale, high energy efficiency is key to addressing the power delivery problem of constructing and operating new supercomputers and datacenters specialized for AI workloads. In that regard, we outline software- and architecture-level research challenges and opportunities, setting the stage for creating cross-layer energy optimizations in AI systems.
}
}
@Article{fedtrans:arxiv24,
author = {Yuxuan Zhu and Jiachen Liu and Mosharaf Chowdhury and Fan Lai},
title = {{FedTrans}: Efficient Federated Learning Over Heterogeneous Clients via Model Transformation},
Expand Down
Binary file not shown.
7 changes: 7 additions & 0 deletions source/publications/index.md
Original file line number Diff line number Diff line change
Expand Up @@ -324,6 +324,13 @@ venues:
name: 1st Workshop on Sustainable Computer Systems Design and Implementation
date: 2022-07-10
url: https://hotcarbon.org/
EECS:
category: Workshops
occurrences:
- key: EECS'24
name: Energy-Efficient Computing for Science Workshop
date: 2024-09-09
url: https://web.cvent.com/event/a3dd901a-699e-408c-8a84-81445e6ea64f
'USENIX ;login:':
category: Journals
occurrences:
Expand Down

0 comments on commit fdb5245

Please sign in to comment.