| { | |
| "architectures": [ | |
| "MolLLaMA" | |
| ], | |
| "model_type": "mol_llama", | |
| "graph_encoder_config": { | |
| "encoder_types": [ | |
| "unimol", | |
| "moleculestm" | |
| ], | |
| "moleculestm_config": { | |
| "repo_id": "chao1224/MoleculeSTM", | |
| "filename": "demo/demo_checkpoints_Graph/molecule_model.pth" | |
| }, | |
| "unimol_config": { | |
| "repo_id": "dptech/Uni-Mol-Models", | |
| "dictionary_filename": "mol.dict.txt", | |
| "weights_filename": "mol_pre_no_h_220816.pt" | |
| } | |
| }, | |
| "blending_module_config": { | |
| "num_heads": 8, | |
| "num_layers": 4 | |
| }, | |
| "llm_config": { | |
| "llm_model": "meta-llama/Llama-2-7b-chat-hf", | |
| "lora_config": { | |
| "lora_alpha": 32, | |
| "lora_dropout": 0.1, | |
| "r": 8 | |
| } | |
| }, | |
| "qformer_config": { | |
| "bert_name": "allenai/scibert_scivocab_uncased", | |
| "num_query_tokens": 8, | |
| "cross_attention_freq": 2, | |
| "embed_dim": 256 | |
| } | |
| } |