-
Notifications
You must be signed in to change notification settings - Fork 0
/
Copy pathscript.py
61 lines (42 loc) · 1.52 KB
/
script.py
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
# !pip install groq
from transformers import pipeline
from huggingface_hub import login
from groq import Groq
# hf login
login(token="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")
# groq api key
client = Groq(
api_key="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",
)
# Replace with your specific model if it's available on Hugging Face
classifier = pipeline("ner", model="bigcode/starpii", aggregation_strategy="simple")
text = input("Enter your prompt: ")
result = classifier(text)
print(result)
def replace_pii(text, result):
# Sort entities by start index in descending order to handle replacements from the end of the string
result = sorted(result, key=lambda x: x['start'], reverse=True)
for entity in result:
placeholder = entity['entity_group']
start = entity['start']
end = entity['end']
# Replace the PII in the text, and add spaces if necessary
if start > 0 and text[start - 1] != ' ':
placeholder = ' ' + placeholder
if end < len(text) and text[end] != ' ':
placeholder = placeholder + ' '
text = text[:start] + placeholder + text[end:]
return text
modified_text = replace_pii(text, result)
print(modified_text)
# Groq
chat_completion = client.chat.completions.create(
messages=[
{
"role": "user",
"content": "Repeat whatever i write next after fullstop." + modified_text,
}
],
model="llama3-8b-8192",
)
print(chat_completion.choices[0].message.content)