Spaces:
Running
on
Zero
Running
on
Zero
Advik
commited on
Commit
·
b66720f
1
Parent(s):
7cd2476
minor errors
Browse files- software.py +6 -4
software.py
CHANGED
|
@@ -9,6 +9,7 @@ from scipy.stats import skew, kurtosis, entropy
|
|
| 9 |
from tqdm import tqdm
|
| 10 |
from torch.nn import CrossEntropyLoss
|
| 11 |
from pathlib import Path
|
|
|
|
| 12 |
|
| 13 |
class Diversity:
|
| 14 |
def __init__(self, model, tokenizer, device):
|
|
@@ -102,14 +103,14 @@ class Software:
|
|
| 102 |
else:
|
| 103 |
self.device_bi = torch.device("cuda:0" if torch.cuda.is_available() else "cpu")
|
| 104 |
|
| 105 |
-
self.div_tokenizer = AutoTokenizer.from_pretrained("tiiuae/falcon-7b", use_fast=False, trust_remote_code=True
|
| 106 |
self.div_model = AutoModelForCausalLM.from_pretrained(
|
| 107 |
-
"tiiuae/falcon-7b", device_map=self.device_div, torch_dtype=torch.float16, trust_remote_code=True
|
| 108 |
)
|
| 109 |
|
| 110 |
-
self.bi_tokenizer = AutoTokenizer.from_pretrained("google/gemma-1.1-2b-it", use_fast=False, trust_remote_code=True
|
| 111 |
self.bi_model = AutoModelForCausalLM.from_pretrained(
|
| 112 |
-
"google/gemma-1.1-2b-it", device_map=self.device_bi, torch_dtype=torch.float16, trust_remote_code=True
|
| 113 |
)
|
| 114 |
|
| 115 |
self.diveye = Diversity(self.div_model, self.div_tokenizer, self.device_div)
|
|
@@ -128,6 +129,7 @@ class Software:
|
|
| 128 |
texts.append(obj["text"])
|
| 129 |
return ids, texts
|
| 130 |
|
|
|
|
| 131 |
def evaluate(self, text):
|
| 132 |
diveye_features = self.diveye.compute_features(text)
|
| 133 |
biscope_features = self.biscope.detect_single_sample(text)
|
|
|
|
| 9 |
from tqdm import tqdm
|
| 10 |
from torch.nn import CrossEntropyLoss
|
| 11 |
from pathlib import Path
|
| 12 |
+
import spaces
|
| 13 |
|
| 14 |
class Diversity:
|
| 15 |
def __init__(self, model, tokenizer, device):
|
|
|
|
| 103 |
else:
|
| 104 |
self.device_bi = torch.device("cuda:0" if torch.cuda.is_available() else "cpu")
|
| 105 |
|
| 106 |
+
self.div_tokenizer = AutoTokenizer.from_pretrained("tiiuae/falcon-7b", use_fast=False, trust_remote_code=True)
|
| 107 |
self.div_model = AutoModelForCausalLM.from_pretrained(
|
| 108 |
+
"tiiuae/falcon-7b", device_map=self.device_div, torch_dtype=torch.float16, trust_remote_code=True
|
| 109 |
)
|
| 110 |
|
| 111 |
+
self.bi_tokenizer = AutoTokenizer.from_pretrained("google/gemma-1.1-2b-it", use_fast=False, trust_remote_code=True)
|
| 112 |
self.bi_model = AutoModelForCausalLM.from_pretrained(
|
| 113 |
+
"google/gemma-1.1-2b-it", device_map=self.device_bi, torch_dtype=torch.float16, trust_remote_code=True
|
| 114 |
)
|
| 115 |
|
| 116 |
self.diveye = Diversity(self.div_model, self.div_tokenizer, self.device_div)
|
|
|
|
| 129 |
texts.append(obj["text"])
|
| 130 |
return ids, texts
|
| 131 |
|
| 132 |
+
@spaces.GPU
|
| 133 |
def evaluate(self, text):
|
| 134 |
diveye_features = self.diveye.compute_features(text)
|
| 135 |
biscope_features = self.biscope.detect_single_sample(text)
|