From fb9b50dbc8f9179726785fb6ad7a054258581511 Mon Sep 17 00:00:00 2001 From: Jael Gu Date: Sat, 2 Apr 2022 15:36:27 +0800 Subject: [PATCH] Update Signed-off-by: Jael Gu --- README.md | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/README.md b/README.md index 705363f..a9b6a85 100644 --- a/README.md +++ b/README.md @@ -16,7 +16,7 @@ operation, which scales quadratically with the sequence length. To address this we introduce the Longformer with an attention mechanism that scales linearly with sequence length, making it easy to process documents of thousands of tokens or longer[2]. -## Reference +### References [1].https://huggingface.co/docs/transformers/v4.16.2/en/model_doc/longformer#transformers.LongformerConfig @@ -34,7 +34,7 @@ from towhee import dc dc.stream(["Hello, world."]) - .text_embedding.longformer(model_name="allenai/longformer-base-4096") + .text_embedding.longformer(model_name=c"allenai/longformer-base-4096") .show() ```