@inproceedings{na2024scalable,title={Scalable Data Ablation Approximations for Language Models through Modular Training and Merging},author={Na, Clara and Magnusson, Ian and Jha, Ananya Harsh and Sherborne, Tom and Strubell, Emma and Dodge, Jesse and Dasigi, Pradeep},year={2024},eprint={2410.15661},archiveprefix={arXiv},primaryclass={cs.CL},selected=true,booktitle={The 2024 Conference on Empirical Methods in Natural Language Processing},abbr={EMNLP Main},pdf={scalable-data-ablations.pdf},bibtex_show=true,arxiv={2410.15661},equal_contribution={0}}
2023
EMNLP Findings
Energy and Carbon Considerations of Fine-Tuning BERT
@inproceedings{wang2023energy,title={Energy and Carbon Considerations of Fine-Tuning BERT},author={Wang, Xiaorong and Na, Clara and Strubell, Emma and Friedler, Sorelle and Luccioni, Sasha},booktitle={Findings of the Association for Computational Linguistics: EMNLP 2023},abs={},year={2023},eprint={2311.10267},archiveprefix={arXiv},primaryclass={cs.CL},equal_contribution={2},abbr={EMNLP Findings},pdf={energy_and_carbon.pdf},selected=true,arxiv={2311.10267},bibtex_show=true}
EMNLP Main
To Build Our Future, We Must Know Our Past: Contextualizing Paradigm Shifts in Natural Language Processing
@inproceedings{gururaja2023build,title={To Build Our Future, We Must Know Our Past: Contextualizing Paradigm Shifts in Natural Language Processing},author={Gururaja, Sireesh and Bertsch, Amanda and Na, Clara and Widder, David Gray and Strubell, Emma},year={2023},eprint={2310.07715},archiveprefix={arXiv},primaryclass={cs.CL},selected=true,booktitle={The 2023 Conference on Empirical Methods in Natural Language Processing},abbr={EMNLP Main},pdf={to_build_our_future.pdf},bibtex_show=true,arxiv={2310.07715},equal_contribution={3}}
EMNLP Main
The Framework Tax: Disparities Between Inference Efficiency in Research and Deployment
@inproceedings{fernandez2023framework,title={The Framework Tax: Disparities Between Inference Efficiency in Research and Deployment},author={Fernandez, Jared and Kahn, Jacob and Na, Clara and Bisk, Yonatan and Strubell, Emma},year={2023},eprint={2302.06117},archiveprefix={arXiv},primaryclass={cs.LG},abbr={EMNLP Main},selected={true},booktitle={The 2023 Conference on Empirical Methods in Natural Language Processing},arxiv={2302.06117},pdf={the_framework_tax.pdf},bibtex_show=true,equal_contribution={0}}
2022
EMNLP Findings
Train Flat, Then Compress: Sharpness-Aware Minimization Learns More Compressible Models
@inproceedings{na-etal-2022-train,title={Train Flat, Then Compress: Sharpness-Aware Minimization Learns More Compressible Models},author={Na, Clara and Mehta, Sanket Vaibhav and Strubell, Emma},editor={Goldberg, Yoav and Kozareva, Zornitsa and Zhang, Yue},booktitle={Findings of the Association for Computational Linguistics: EMNLP 2022},month=dec,year={2022},address={Abu Dhabi, United Arab Emirates},publisher={Association for Computational Linguistics},url={https://aclanthology.org/2022.findings-emnlp.361},doi={10.18653/v1/2022.findings-emnlp.361},pages={4909--4936},abbr={EMNLP Findings},pdf={train-flat-then-compress.pdf},poster={poster-trainflat.pdf},selected={true},arxiv={2205.12694},html={https://aclanthology.org/2022.findings-emnlp.361},bibtex_show=true}
Virtual Task Selection in Meta-Learning for Domain Generalization in Semantic Parsing
This article examines how design transformations are described in one specific but important context: patents. Using text analytics, we examined term frequency and term frequency-inverse document frequency from 33,100 full patents from 2017 sourced from the US Patent and Trade Office. Using a corpus-based approach, we developed lexicons to capture two general types of design transformation: addition and subtraction. In patent data we collected and analyzed, addition design transformations were more common than subtraction design transformations (2.7:1). The ratio of addition to subtraction was higher than ratios in non-design texts (1:2.5). While patents represent one area of design, and the patent texts we analyzed were not necessarily written by designers themselves, something about the process that produces patents leads to far greater use of addition than subtraction. We discuss possible reasons for and implications of these findings.
@inproceedings{10.1007/978-3-030-90625-2_16,author={Stenger, Katelyn and Na, Clara and Klotz, Leidy},editor={Gero, John S.},title={Less Is More? In Patents, Design Transformations that Add Occur More Often Than Those that Subtract},booktitle={Design Computing and Cognition'20},year={2020},publisher={Springer International Publishing},address={Cham},pages={283--295},isbn={978-3-030-90625-2},html={https://link.springer.com/chapter/10.1007/978-3-030-90625-2_16},bibtex_show=true,equal_contribution={0}}