diff --git a/app/routes.py b/app/routes.py index 0e2d85ca3e..208f7c84f1 100644 --- a/app/routes.py +++ b/app/routes.py @@ -141,6 +141,11 @@ def emnlp(): return render_template('alps2022.html') +@bp.route('/adapters/') +def adapters_lib(): + return render_template('adapters_lib.html') + + @bp.app_errorhandler(404) def error_404(error): return render_template('errors/404.html'), 404 diff --git a/app/static/images/adapters_feature_table.png b/app/static/images/adapters_feature_table.png new file mode 100644 index 0000000000..4933e44f22 Binary files /dev/null and b/app/static/images/adapters_feature_table.png differ diff --git a/app/templates/adapters_lib.html b/app/templates/adapters_lib.html new file mode 100644 index 0000000000..d69862a0e6 --- /dev/null +++ b/app/templates/adapters_lib.html @@ -0,0 +1,102 @@ +{% extends 'base.html' %} +{% set active_page = "adapters" %} + +{% block header %} +

+ {% block title %} Adapters {% endblock %} +

+

+ A Unified Library for Parameter-Efficient and Modular Transfer Learning +

+{% endblock %} + +{% block content %} + +
+

Abstract

+ + We introduce Adapters, an open-source library + that unifies parameter-efficient and modular + transfer learning in large language models. By + integrating 10 diverse adapter methods into + a unified interface, Adapters offers ease of + use and flexible configuration. Our library + allows researchers and practitioners to lever- + age adapter modularity through composition + blocks, enabling the design of complex adapter + setups. We demonstrate the library's efficacy + by evaluating its performance against full fine- + tuning on various NLP tasks. Adapters pro- + vides a powerful tool for addressing the chal- + lenges of conventional fine-tuning paradigms + and promoting more efficient and modular + transfer learning. + +
+ +
+

Package

+ +
+
+   + PyPI Package +
+
+
+

+The Adapters package can be installed via pip: +

+pip install adapters
+
+

+
+ +
+
+ +
+

Demo

+ +
+
+   + Screencast Video +
+
+
+

+ Example Usage: +

+import adapters
+from transformers import BertModel
+
+model = BertModel.from_pretrained("bert-base-uncased")
+adapters.init(model)
+
+model.add_adapter("my-adapter", config="seq_bn")
+
+

+
+ +
+

Code

+ +
+
+   + Code Repository +
+
+
+
+ +
+

Features

+ Feature comparison between the initial AdapterHub release and the proposed Adapters library: +
+ +
+
+ +{% endblock %} diff --git a/requirements.txt b/requirements.txt index ec11f39f82..261c1d530c 100644 --- a/requirements.txt +++ b/requirements.txt @@ -4,7 +4,7 @@ Flask-Assets == 2.0 Flask-Markdown == 0.3 Flask-SQLAlchemy == 2.4.1 Frozen-Flask == 0.15 -jsmin == 2.2.2 +jsmin == 3.0.1 pygments == 2.7.4 python-markdown-math == 0.8 requests == 2.23