-
Notifications
You must be signed in to change notification settings - Fork 0
/
Copy pathsentenceGen.py
80 lines (54 loc) · 2.5 KB
/
sentenceGen.py
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
import random
import math
import numpy as np
import re
from transformers import BertTokenizer, pipeline
class SentenceGenerator:
def __init__(self, templates_path, reserved_path, device=-1):
self.tokenizer = BertTokenizer.from_pretrained('bert-base-uncased')
self.unmasker = pipeline('fill-mask', model='bert-base-uncased', device=device)
with open(reserved_path, 'r') as file:
text = file.read()
self.reserved_words = text.split('\n')
with open(templates_path, 'r', encoding='utf-8') as file:
text = file.read()
sentences = text.split('\n')
self.templates = [self.tokenize(sentence) for sentence in sentences]
def tokenize (self, sentence):
return self.tokenizer.basic_tokenizer.tokenize(sentence, never_split=self.tokenizer.all_special_tokens)
def joinSentence (self, words):
sentence = ' '.join(words)
sentence = sentence.replace(' ,', ',')
sentence = sentence.replace(' .', '.')
sentence = sentence.replace(" ' s", "'s")
sentence = re.sub(r'"\s([^"]*)\s"', r'"\1"', sentence)
sentence = re.sub(r"'\s([^']*)\s'", r'"\1"', sentence)
return sentence
def transform_sentence (self, sentence, temperature=1):
idx = [i for i, word in enumerate(sentence) if not word in self.reserved_words]
if len(idx) <= 0:
return sentence
mid = idx[random.randrange(len(idx))]
masked_sentence = ' '.join([word if i != mid else '[MASK]' for i, word in enumerate(sentence)])
candidates = self.unmasker(masked_sentence)
candidates = [c for c in candidates if c['token_str'] != 'the' and not c['token_str'] in sentence and re.match(r'^\w+', c['token_str']) is not None]
if len(candidates) == 0:
return sentence
scores = [item['score'] for item in candidates]
logits = [math.log(s) for s in scores]
scores = [math.exp(logit / temperature) for logit in logits]
score_sum = sum(scores)
scores = [s / score_sum for s in scores]
index = np.random.choice(len(scores), p=scores)
nw = candidates[index]['token_str']
new_sentence = [word if i != mid else nw for i, word in enumerate(sentence)]
return new_sentence
def generate (self, temperature=1, change_rate=0.5):
template = self.templates[random.randrange(len(self.templates))]
n_vary_word = len([word for word in template if not (word in self.reserved_words)])
n_changes = max(1, round(math.exp(np.random.randn() * 0.4) * n_vary_word * change_rate))
#print('n_changes:', n_changes)
words = template
for i in range(n_changes):
words = self.transform_sentence(words, temperature)
return self.joinSentence(words)