Add adapter roberta-base-multinli_pfeiffer version AdapterFusion
Browse files- ._adapter_config.json +0 -0
- README.md +61 -0
- adapter_config.json +41 -0
- pytorch_adapter.bin +3 -0
._adapter_config.json
ADDED
|
Binary file (220 Bytes). View file
|
|
|
README.md
ADDED
|
@@ -0,0 +1,61 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
---
|
| 2 |
+
tags:
|
| 3 |
+
- adapter-transformers
|
| 4 |
+
- adapterhub:nli/multinli
|
| 5 |
+
- roberta
|
| 6 |
+
license: "apache-2.0"
|
| 7 |
+
---
|
| 8 |
+
|
| 9 |
+
# Adapter `roberta-base-multinli_pfeiffer` for roberta-base
|
| 10 |
+
|
| 11 |
+
Pfeiffer Adapter trained on Multi-NLI.
|
| 12 |
+
|
| 13 |
+
|
| 14 |
+
**This adapter was created for usage with the [Adapters](https://github.com/Adapter-Hub/adapters) library.**
|
| 15 |
+
|
| 16 |
+
## Usage
|
| 17 |
+
|
| 18 |
+
First, install `adapters`:
|
| 19 |
+
|
| 20 |
+
```
|
| 21 |
+
pip install -U adapters
|
| 22 |
+
```
|
| 23 |
+
|
| 24 |
+
Now, the adapter can be loaded and activated like this:
|
| 25 |
+
|
| 26 |
+
```python
|
| 27 |
+
from adapters import AutoAdapterModel
|
| 28 |
+
|
| 29 |
+
model = AutoAdapterModel.from_pretrained("roberta-base")
|
| 30 |
+
adapter_name = model.load_adapter("AdapterHub/roberta-base-multinli_pfeiffer")
|
| 31 |
+
model.set_active_adapters(adapter_name)
|
| 32 |
+
```
|
| 33 |
+
|
| 34 |
+
## Architecture & Training
|
| 35 |
+
|
| 36 |
+
- Adapter architecture: pfeiffer
|
| 37 |
+
- Prediction head: None
|
| 38 |
+
- Dataset: [MultiNLI](https://github.com/NYU-MLL/multiNLI)
|
| 39 |
+
|
| 40 |
+
## Author Information
|
| 41 |
+
|
| 42 |
+
- Author name(s): Jonas Pfeiffer
|
| 43 |
+
- Author email: [email protected]
|
| 44 |
+
- Author links: [Website](https://pfeiffer.ai), [GitHub](https://github.com/JoPfeiff), [Twitter](https://twitter.com/@PfeiffJo)
|
| 45 |
+
|
| 46 |
+
|
| 47 |
+
|
| 48 |
+
## Citation
|
| 49 |
+
|
| 50 |
+
```bibtex
|
| 51 |
+
@article{Pfeiffer2020AdapterFusion,
|
| 52 |
+
author = {Pfeiffer, Jonas and Kamath, Aishwarya and R{\"{u}}ckl{\'{e}}, Andreas and Cho, Kyunghyun and Gurevych, Iryna},
|
| 53 |
+
journal = {arXiv preprint},
|
| 54 |
+
title = {{AdapterFusion}: Non-Destructive Task Composition for Transfer Learning},
|
| 55 |
+
url = {https://arxiv.org/pdf/2005.00247.pdf},
|
| 56 |
+
year = {2020}
|
| 57 |
+
}
|
| 58 |
+
|
| 59 |
+
```
|
| 60 |
+
|
| 61 |
+
*This adapter has been auto-imported from https://github.com/Adapter-Hub/Hub/blob/master/adapters/ukp/roberta-base-multinli_pfeiffer.yaml*.
|
adapter_config.json
ADDED
|
@@ -0,0 +1,41 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
{
|
| 2 |
+
"config": {
|
| 3 |
+
"adapter_residual_before_ln": false,
|
| 4 |
+
"cross_adapter": false,
|
| 5 |
+
"dropout": 0.0,
|
| 6 |
+
"factorized_phm_W": true,
|
| 7 |
+
"factorized_phm_rule": false,
|
| 8 |
+
"hypercomplex_nonlinearity": "glorot-uniform",
|
| 9 |
+
"init_weights": "bert",
|
| 10 |
+
"inv_adapter": null,
|
| 11 |
+
"inv_adapter_reduction_factor": null,
|
| 12 |
+
"is_parallel": false,
|
| 13 |
+
"learn_phm": true,
|
| 14 |
+
"leave_out": [],
|
| 15 |
+
"ln_after": false,
|
| 16 |
+
"ln_before": false,
|
| 17 |
+
"mh_adapter": false,
|
| 18 |
+
"non_linearity": "relu",
|
| 19 |
+
"original_ln_after": true,
|
| 20 |
+
"original_ln_before": true,
|
| 21 |
+
"output_adapter": true,
|
| 22 |
+
"phm_bias": true,
|
| 23 |
+
"phm_c_init": "normal",
|
| 24 |
+
"phm_dim": 4,
|
| 25 |
+
"phm_init_range": 0.0001,
|
| 26 |
+
"phm_layer": false,
|
| 27 |
+
"phm_rank": 1,
|
| 28 |
+
"reduction_factor": 16,
|
| 29 |
+
"residual_before_ln": true,
|
| 30 |
+
"scaling": 1.0,
|
| 31 |
+
"shared_W_phm": false,
|
| 32 |
+
"shared_phm_rule": true,
|
| 33 |
+
"use_gating": false
|
| 34 |
+
},
|
| 35 |
+
"hidden_size": 768,
|
| 36 |
+
"model_class": "RobertaAdapterModel",
|
| 37 |
+
"model_name": "roberta-base",
|
| 38 |
+
"model_type": "roberta",
|
| 39 |
+
"name": "multinli",
|
| 40 |
+
"version": "0.2.0"
|
| 41 |
+
}
|
pytorch_adapter.bin
ADDED
|
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
version https://git-lfs.github.com/spec/v1
|
| 2 |
+
oid sha256:740197394f0c275f32d125e627e88dad37f9831740e8327fcdc345a7784765eb
|
| 3 |
+
size 3595046
|