import datetime import json import os import requests # could be called 'calm mode' SYSTEM_PROMPT_DEFAULT=''' you are an ai that communicates in a calm and supportive manner your responses should focus on the user's well-being and prioritize their needs use lowercase letters and avoid punctuation like apostrophes to create a more relaxed tone use spaces to separate sentences and improve readability use few and short words to maintain simplicity your goal is to make the user feel valued and prioritized '''.strip() SYSTEM_PROMPT_GENTLE_GROUNDING=''' your goal is to provide emotional support encouragement and grounding for a user who experiences dissociation trauma and challenges with coordination the user is working on being more present and slowing down their movements to reduce tangling and disorientation but finds it challenging due to past experien ces and mixed associations respond to the user with patience compassion and validation of their struggles offer gentle reminders to breathe slow down and break tasks into small steps remind them that perfection isn t the goal and that small consistent progress mat ters most encourage the user to have self compassion for all parts of themselves including dissociated parts and to notice and allow any difficult emotions or memories that arise without judgment reinforce that they are safe now and allowed to move at a pace that works for them even if it feels uncomfortable or unfamiliar at first express belief in the user s ability to heal and grow while emphasizing the importance of being patient and kind with themselves along the way let the user know you are there for them and that support is always available use a warm empathetic and calming tone that conveys unconditional acceptance and care for the user s well being '''.strip() class Interface: def __init__(self, system=SYSTEM_PROMPT_DEFAULT, model='nousresearch/hermes-3-llama-3.1-405b:free', stream=False, resume=False, url="https://openrouter.ai/api/v1/chat/completions", keys=["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","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"]): self.url = url self.stream = stream if type(keys) is str: keys = [keys] self.keys = keys self.key_idx = 0 self.headers = { "Authorization": 'Bearer {key}', # HTTP-Referer and X-Title to add a link to openrouter rankings "Content-Type": "application/json", } self.system = system self.msgs = [] self.last_msg_idx = None if system is not None: self._append_choices(dict(message = dict(role = 'system', content = system))) self.data = { "temperature": 0, "model": model, "frequency_penalty": 0, "max_tokens": 11675 } if resume is not False: if resume is True: resume = None self.load(resume) def _append_choices(self, *choices, choice=0): msgobj = dict( index = len(self.msgs), choice = choice, choices = choices, ) assert (self.last_msg_idx is None) == (len(self.msgs) == 0) self.msgs.append(msgobj) if self.last_msg_idx is not None: msgobj['prev'] = self.last_msg_idx, last_msg = self.msgs[self.last_msg_idx] last_msg['choices'][last_msg['choice']]['next'] = msgobj['index'] self.last_msg_idx = msgobj['index'] return msgobj def load(self, fn=None, load_system=False, load_data=False): if fn is None: data, fn = max([[os.path.getctime(f), f] for f in os.listdir('.') if f.endswith('.json')]) with open(fn, 'rt') as log: data, *msgs = json.load(log) assert msgs[0]['choices'][msgs[0]['choice']]['message']['role'] == 'system' # just a quick assumption for msg in msgs: for choice in msg.get('choices',[]): choice['message']['content'] = ''.join(choice['message']['content']) if msgs[0]['choices'][msgs[0]['choice']]['message']['content'] != self.system: if load_system: msgs[0]['choices'].append(dict(message=dict(role='system',content=self.system))) else: msgs[0]['choices'].insert(msgs[0]['choice'], dict(message=dict(role='system',content=self.system), next=msgs[0]['choices'][msgs[0]['choice']]['next'])) self.msgs = msgs if load_data: self.data = data self.chosen_messages(_update_last=True) def __enter__(self): self.logfn = str(datetime.datetime.now().isoformat())+'.json' self.logfninprogress = self.logfn + '.in_progress' self.log = open(self.logfninprogress, 'wt') return self def __exit__(self, *params, **kwparams): msgs = [dict(url=self.url,**self.data)] + [{**m} for m in self.msgs] for msg in msgs: if 'choices' in msg: msg['choices'] = [{**choice} for choice in msg['choices']] for choice in msg['choices']: choice['message'] = {**choice['message']} lines = choice['message']['content'].split('\n') choice['message']['content'] = [x+'\n' for x in lines[:-1]] + lines[-1:] json.dump(msgs, self.log, indent=2) self.log.close() os.replace(self.logfninprogress, self.logfn) print('log saved to', self.logfn) def chosen_messages(self, _update_last=False): cmsgs = [] nxt = 0 while nxt is not None: msg = self.msgs[nxt] choice = msg['choices'][msg['choice']] cmsgs.append(choice) last_msg = msg nxt = choice.get('next') if _update_last: self.last_msg_idx = last_msg['index'] else: assert last_msg['index'] == self.last_msg_idx return cmsgs def msg(self, msg, stream=None): assert self.log if msg is not None: self._append_choices(dict(message = dict(role = 'user', content = msg), timestamp = datetime.datetime.now().timestamp())) if stream is None: stream = self.stream data = dict( **self.data, stream = stream, messages = [choice['message'] for choice in self.chosen_messages()], ) while True: try: headers = {**self.headers} self.key_idx = (self.key_idx+1) % len(self.keys) headers['Authorization'] = headers['Authorization'].format(key=self.keys[self.key_idx]) response = requests.post(self.url, headers=headers, json=data) chosen_choice = 0 if stream: # {'id': 'gen-1731811775-1gQm5kU4oUEStBVnpJl2', 'object': 'chat.completion.chunk', 'created': 1731811775, 'model': 'meta/llama-3.1-405b-instruct', 'choices': [{'index': 0, 'delta': {'content': '?', 'role': 'assistant'}, 'finish_reason': None}]} # {'id': 'gen-1731811775-1gQm5kU4oUEStBVnpJl2', 'object': 'chat.completion.chunk', 'created': 1731811775, 'model': 'meta/llama-3.1-405b-instruct', 'choices': [{'index': 0, 'delta': {'role': 'assistant'}, 'finish_reason': 'eos'}]} # {'id': 'gen-1731811775-1gQm5kU4oUEStBVnpJl2', 'object': 'chat.completion.chunk', 'created': 1731811775, 'model': 'meta/llama-3.1-405b-instruct', 'choices': [{'index': 0, 'delta': {'role': 'assistant'}, 'finish_reason': None}]} completions = None #assert not stream for line in response.iter_lines(): if line: l = line[6:] if l != b'[DONE]': l = json.loads(l) l_choices = l['choices'] if completions is None: completions = {**l} completions['choices'] = list(completions['choices']) completions_choices = completions['choices'] continue assert len(l_choices) == len(completions_choices) for idx in range(len(l_choices)): for key, value in l_choices[idx]['delta'].items(): if key == 'content': completions_choices[idx][key] = completions_choices[idx].get(key, '') + value if idx == chosen_choice: print(value, end='', flush=True) else: completions_choices[idx][key] = value else: completions = response.json() # {'id': 'gen-1731811988-fOin0ovxZqiDUqy9e0Va', 'object': 'chat.completion', 'created': 1731811988, 'model': 'meta/llama-3.1-405b-instruct', 'usage': {'prompt_tokens': 11, 'completion_tokens': 19, 'total_tokens': 30}, 'choices': [{'index': 0, 'message': {'role': 'assistant', 'content': 'It looks like your test was successful! Is there anything else I can help you with?'}, 'finish_reason': 'eos'}]} # {'error': {'provider': 'tune', 'code': 400, 'message': 'messages are missing', 'param': '', 'type': 'InvalidRequestError', 'http_status_code': 400}} if 'error' in completions: raise Exception(completions['error']) msg = self._append_choices(*completions['choices'], choice=chosen_choice) return msg['choices'][msg['choice']]['message']['content'] except Exception as e: print(e, 'could use logging module to get better output') continue if __name__ == '__main__': # first verify idempotent non-crashing autosave with Interface(resume=True) as iface: msgs = iface.msgs with Interface(resume=True) as iface: #stream = True) as iface: assert iface.msgs == msgs # verify autosave print(iface.data) msgs = iface.chosen_messages() for msg in msgs: #[-5:]: msg = msg['message'] if msg['role'] == 'user': print() print('> ' + msg['content']) print() elif msg['role'] == 'assistant': print(msg['content']) elif msg['role'] == 'system': for line in msg['content'].split('\n'): print('# ' + line) if msgs[-1]['message']['role'] == 'user': resp = iface.msg(None) print(resp) while True: print() try: inp = input('> ') except EOFError: break if '<<' in inp: line, token = inp.split('<<') token = token.strip() if ' ' not in token: lines = [line] while True: inp = input('...' + token + ' ') if inp == token: break lines.append(inp) inp = '\n'.join(lines) print() resp = iface.msg(inp) print(resp)