from transformers import pipeline, Pipeline
from pythonosc import dispatcher, osc_server, udp_client
nltk.data.path.append('C:/Users/Florence Nightingale/AppData/Roaming/nltk_data')
# Initialize global message counter
file_path = r'C:\Users\Florence Nightingale\Documents\Florence Nightingale Living Portrait\InputHistory.csv'
TABLET_IP = "192.168.0.3"
OFFENSIVE_MODEL = "unitary/toxic-bert"
CONTEXT_MODEL = "facebook/bart-large-mnli"
ZERO_SHOT_LABELS = ["angel", "hero", "feminist", "rebel"]
class FlorenceAttributes:
LABELS = Config.ZERO_SHOT_LABELS
def __init__(self, primary=None, secondary=None, tertiary=None, quaternary=None, primary_value=None,
secondary_value=None, tertiary_value=None, quaternary_value=None, ambiguous=False):
self.secondary = secondary
self.quaternary = quaternary
self.primary_value = primary_value
self.secondary_value = secondary_value
self.tertiary_value = tertiary_value
self.quaternary_value = quaternary_value
self.ambiguous = ambiguous
return (f'PRIMARY: "{self.primary}" = {self.primary_value}, '
f'SECONDARY: "{self.secondary}" = {self.secondary_value}, '
f'TERTIARY: "{self.tertiary}" = {self.tertiary_value}, '
f'QUATERNARY: "{self.quaternary}" = {self.quaternary_value}')
def has_null_labels(self):
return any(label in ("", "None") for label in [self.primary, self.secondary, self.tertiary, self.quaternary])
def has_null_values(self):
return any(value in (0, None) for value in [self.primary_value, self.secondary_value, self.tertiary_value, self.quaternary_value])
def fill_missing_labels(self):
used_labels = {label for label in [self.primary, self.secondary, self.tertiary, self.quaternary] if label}
free_labels = [label for label in self.LABELS if label not in used_labels]
self.primary = free_labels.pop() if len(free_labels) == 1 else random.choice(free_labels)
self.secondary = free_labels.pop() if len(free_labels) == 1 else random.choice(free_labels)
self.tertiary = free_labels.pop() if len(free_labels) == 1 else random.choice(free_labels)
self.quaternary = free_labels.pop() if len(free_labels) == 1 else random.choice(free_labels)
def reorganize_data(self):
labels_values = zip([self.primary, self.secondary, self.tertiary, self.quaternary],
[self.primary_value, self.secondary_value, self.tertiary_value, self.quaternary_value])
result = [f"{label}: {value}" for label, value in labels_values if label in self.LABELS and value is not None]
result.append(f"/new_message {new_message}")
def generate_final_message(self, word_response):
return (f"{self.primary} = {self.primary_value}, "
f"{self.secondary} = {self.secondary_value}, "
f"{self.tertiary} = {self.tertiary_value}, "
f"{self.quaternary} = {self.quaternary_value}, "
f"newMessage = {new_message}, "
f"wordResponse = {word_response}")
parser = argparse.ArgumentParser(description="OSC Server for handling Unity messages")
parser.add_argument("--local_ip", default=Config.LOCAL_IP, help="The IP of the python script")
parser.add_argument("--local_port", default=Config.LOCAL_PORT, help="The port of the python script")
parser.add_argument("--td_ip", default=Config.LOCAL_IP, help="The IP of the Touch Designer app")
parser.add_argument("--td_port", type=int, default=Config.TD_PORT, help="The port of the Touch Designer app")
parser.add_argument("--unity_ip", default=Config.TABLET_IP, help="The IP of the Unity client")
parser.add_argument("--unity_port", type=int, default=Config.TABLET_PORT, help="The port of the Unity client")
return parser.parse_args()
async def is_offensive(word, offensive_classifier):
result = await offensive_classifier(word)
offensive = any(res['label'] == 'toxic' and res['score'] > 0.5 for res in result)
print(f"Checking if word '{word}' is offensive: {result}")
async def is_contextually_relevant(word, context_classifier):
result = await context_classifier(word, candidate_labels=Config.ZERO_SHOT_LABELS)
relevant = any(score > 0.3 for score in result['scores']) # Adjusted threshold for relevance
print(f"Checking if word '{word}' is contextually relevant: {result}")
async def extract_word_response(sentence, offensive_classifier, context_classifier):
words = nltk.word_tokenize(sentence)
tagged_words = nltk.pos_tag(words)
print(f"Tagged words: {tagged_words}")
relevant_words = [word for word, pos in tagged_words if
pos in ('JJ', 'JJR', 'JJS', 'NN', 'NNS', 'NNP', 'NNPS')
and not await is_offensive(word, offensive_classifier)]
print(f"Relevant Adjectives, Proper Nouns, and Nouns: {relevant_words}")
for word in relevant_words:
if await is_contextually_relevant(word, context_classifier):
print(f"Chosen word: {word}")
return " " # Return blank if no relevant word is found or if it's offensive
async def analyze_sentence(sentence):
classifier = pipeline("zero-shot-classification", model=Config.CONTEXT_MODEL)
result = await classifier(sentence, candidate_labels=Config.ZERO_SHOT_LABELS)
weightings = {label: int(score * 100) for label, score in zip(result['labels'], result['scores'])}
async def classify_sentence(sentence, offensive_classifier, context_classifier):
weightings = await analyze_sentence(sentence)
sorted_weightings = sorted(weightings.items(), key=lambda item: item[1], reverse=True)
primary, primary_value = sorted_weightings[0]
secondary, secondary_value = sorted_weightings[1]
tertiary, tertiary_value = sorted_weightings[2]
quaternary, quaternary_value = sorted_weightings[3]
attr1 = FlorenceAttributes(
primary_value=primary_value,
secondary_value=secondary_value,
tertiary_value=tertiary_value,
quaternary_value=quaternary_value,
word_response = await extract_word_response(sentence, offensive_classifier, context_classifier)
print(f"Weightings: {weightings}, Word response: {word_response}")
return str(attr1.generate_final_message(word_response)).strip(), attr1, new_message, word_response
async def handle_osc_message(unity_client, td_client, unused_addr, offensive_classifier, context_classifier, sentence):
print("I received message from Unity")
response, attribute, new_msg, word_response = await classify_sentence(sentence, offensive_classifier, context_classifier)
print(f"Received response from classify_sentence: {response}, {attribute}, {new_msg}, {word_response}")
save_string_to_csv(file_path, f"{sentence} , {attribute}, {word_response}")
unity_client.send_message("/response", response)
td_client.send_message(f"/{attribute.primary}", attribute.primary_value)
td_client.send_message(f"/{attribute.secondary}", attribute.secondary_value)
td_client.send_message(f"/{attribute.tertiary}", attribute.tertiary_value)
td_client.send_message(f"/{attribute.quaternary}", attribute.quaternary_value)
td_client.send_message("/newMessage", new_msg)
td_client.send_message("/wordResponse", word_response)
td_client.send_message("/message", f"Message received: '{sentence}'")
def save_string_to_csv(file_path, string_to_save):
os.makedirs(os.path.dirname(file_path), exist_ok=True)
with open(file_path, mode='a', newline='') as file:
writer = csv.writer(file)
writer.writerow([string_to_save])
unity_client = udp_client.SimpleUDPClient(args.unity_ip, args.unity_port)
td_client = udp_client.SimpleUDPClient(args.td_ip, args.td_port)
offensive_classifier = pipeline("text-classification", model=Config.OFFENSIVE_MODEL)
context_classifier = pipeline("zero-shot-classification", model=Config.CONTEXT_MODEL)
disp = dispatcher.Dispatcher()
disp.map("/florence_sentence", lambda unused_addr, *osc_args: asyncio.run(handle_osc_message(unity_client, td_client, unused_addr, offensive_classifier, context_classifier, *osc_args)))
server = osc_server.ThreadingOSCUDPServer((args.local_ip, args.local_port), disp)
print(f"Serving on {server.server_address}")
if __name__ == "__main__":