YuPeng0214 commited on
Commit
5af741b
·
verified ·
1 Parent(s): a7d7526

Upload README.md with huggingface_hub

Browse files
Files changed (1) hide show
  1. README.md +6 -5
README.md CHANGED
@@ -95,6 +95,7 @@ We provide detailed parameters and environment configurations so that you can ru
95
  - Accelerate: 1.3.0
96
  - Datasets: 3.2.0
97
  - Tokenizers: 0.21.2
 
98
  #### Transformers model load arguments
99
  torch_dtype=torch.bfloat16<br>
100
  attn_implementation='sdpa'<br>
@@ -133,11 +134,11 @@ This is a general script that can be used to evaluate other huggingface embeddin
133
  ```
134
  from sentence_transformers import SentenceTransformer
135
 
136
- model = SentenceTransformer("QZhou-Embedding")
137
 
138
  model = SentenceTransformer(
139
- "QZhou-Embedding",
140
- model_kwargs={"device_map": "auto", "trust_remote_code": True},
141
  tokenizer_kwargs={"padding_side": "left", "trust_remote_code": True},
142
  trust_remote_code=True
143
  )
@@ -195,8 +196,8 @@ documents = [
195
 
196
  input_texts = queries + documents
197
 
198
- tokenizer = AutoTokenizer.from_pretrained('QZhou-Embedding', padding_side='left', trust_remote_code=True)
199
- model = AutoModel.from_pretrained('QZhou-Embedding', trust_remote_code=True, device_map='auto')
200
 
201
  batch_dict = tokenizer(
202
  input_texts,
 
95
  - Accelerate: 1.3.0
96
  - Datasets: 3.2.0
97
  - Tokenizers: 0.21.2
98
+ - mteb: 1.38.30
99
  #### Transformers model load arguments
100
  torch_dtype=torch.bfloat16<br>
101
  attn_implementation='sdpa'<br>
 
134
  ```
135
  from sentence_transformers import SentenceTransformer
136
 
137
+ model = SentenceTransformer("Kingsoft-LLM/QZhou-Embedding")
138
 
139
  model = SentenceTransformer(
140
+ "Kingsoft-LLM/QZhou-Embedding",
141
+ model_kwargs={"device_map": "cuda", "trust_remote_code": True},
142
  tokenizer_kwargs={"padding_side": "left", "trust_remote_code": True},
143
  trust_remote_code=True
144
  )
 
196
 
197
  input_texts = queries + documents
198
 
199
+ tokenizer = AutoTokenizer.from_pretrained('/home/yupeng5/yupeng/output_models/output/publish/QZhou-Embedding', padding_side='left', trust_remote_code=True)
200
+ model = AutoModel.from_pretrained('/home/yupeng5/yupeng/output_models/output/publish/QZhou-Embedding', trust_remote_code=True, device_map='cuda')
201
 
202
  batch_dict = tokenizer(
203
  input_texts,