From d46396b0d62048726285f717337a5bbe7d66758a Mon Sep 17 00:00:00 2001 From: saltedFC <115203034+oYCYo@users.noreply.github.com> Date: Mon, 20 May 2024 14:07:20 +0800 Subject: [PATCH] init --- local-llama/everything.py | 73 +++++++++++++++++++ local-llama/llama.py | 150 ++++++++++++++++++++++++++++++++++++++ local-llama/test.py | 122 +++++++++++++++++++++++++++++++ 3 files changed, 345 insertions(+) create mode 100644 local-llama/everything.py create mode 100644 local-llama/llama.py create mode 100644 local-llama/test.py diff --git a/local-llama/everything.py b/local-llama/everything.py new file mode 100644 index 0000000..2d9c029 --- /dev/null +++ b/local-llama/everything.py @@ -0,0 +1,73 @@ +import ctypes +import datetime +import struct + +#defines +EVERYTHING_REQUEST_FILE_NAME = 0x00000001 +EVERYTHING_REQUEST_PATH = 0x00000002 +EVERYTHING_REQUEST_FULL_PATH_AND_FILE_NAME = 0x00000004 +EVERYTHING_REQUEST_EXTENSION = 0x00000008 +EVERYTHING_REQUEST_SIZE = 0x00000010 +EVERYTHING_REQUEST_DATE_CREATED = 0x00000020 +EVERYTHING_REQUEST_DATE_MODIFIED = 0x00000040 +EVERYTHING_REQUEST_DATE_ACCESSED = 0x00000080 +EVERYTHING_REQUEST_ATTRIBUTES = 0x00000100 +EVERYTHING_REQUEST_FILE_LIST_FILE_NAME = 0x00000200 +EVERYTHING_REQUEST_RUN_COUNT = 0x00000400 +EVERYTHING_REQUEST_DATE_RUN = 0x00000800 +EVERYTHING_REQUEST_DATE_RECENTLY_CHANGED = 0x00001000 +EVERYTHING_REQUEST_HIGHLIGHTED_FILE_NAME = 0x00002000 +EVERYTHING_REQUEST_HIGHLIGHTED_PATH = 0x00004000 +EVERYTHING_REQUEST_HIGHLIGHTED_FULL_PATH_AND_FILE_NAME = 0x00008000 + +#dll imports +everything_dll = ctypes.WinDLL ("D:\\Everything\\SDK\\dll\\Everything64.dll") +everything_dll.Everything_GetResultDateModified.argtypes = [ctypes.c_int,ctypes.POINTER(ctypes.c_ulonglong)] +everything_dll.Everything_GetResultSize.argtypes = [ctypes.c_int,ctypes.POINTER(ctypes.c_ulonglong)] +everything_dll.Everything_GetResultFileNameW.argtypes = [ctypes.c_int] +everything_dll.Everything_GetResultFileNameW.restype = ctypes.c_wchar_p + +#setup search +def search(file): + everything_dll.Everything_SetSearchW("file") + everything_dll.Everything_SetRequestFlags( + EVERYTHING_REQUEST_FILE_NAME | EVERYTHING_REQUEST_PATH | EVERYTHING_REQUEST_SIZE | EVERYTHING_REQUEST_DATE_MODIFIED) + # execute the query + everything_dll.Everything_QueryW(1) + num_results = everything_dll.Everything_GetNumResults() + + +#get the number of results +num_results = everything_dll.Everything_GetNumResults() + +#show the number of results +print("Result Count: {}".format(num_results)) + +#convert a windows FILETIME to a python datetime +#https://stackoverflow.com/questions/39481221/convert-datetime-back-to-windows-64-bit-filetime +WINDOWS_TICKS = int(1/10**-7) # 10,000,000 (100 nanoseconds or .1 microseconds) +WINDOWS_EPOCH = datetime.datetime.strptime('1601-01-01 00:00:00', + '%Y-%m-%d %H:%M:%S') +POSIX_EPOCH = datetime.datetime.strptime('1970-01-01 00:00:00', + '%Y-%m-%d %H:%M:%S') +EPOCH_DIFF = (POSIX_EPOCH - WINDOWS_EPOCH).total_seconds() # 11644473600.0 +WINDOWS_TICKS_TO_POSIX_EPOCH = EPOCH_DIFF * WINDOWS_TICKS # 116444736000000000.0 + +def get_time(filetime): + """Convert windows filetime winticks to python datetime.datetime.""" + winticks = struct.unpack('", "<|end_of_text|>"], + max_tokens=_max_tokens, + ) + return response + +tools_mapping = { + "get_path": get_path, + "opr_cmd": opr_cmd +} + +def interact_chat(): + messages = [ + { + "role": "system", + "content": system_prompt, + }, + ] + tools = [ + { + "type": "function", + "function": { + "name": "get_path", + "description": "Get the path of the file", + "parameters": { + "type": "object", + "properties": { + "file": { + "type": "string", + "description": "The name of the file, e.g. QQ.exe", + } + }, + "required": ["file"], + }, + } + }, + { + "type": "function", + "function": { + "name": "opr_cmd", + "description": "Operate the cmd command and get the output", + "parameters": { + "type": "object", + "properties": { + "cmd": { + "type": "string", + "description": "The windows cmd command, e.g. start QQ.exe", + } + }, + "required": ["cmd"], + }, + } + } + ] + user_input = input() + messages.append( + { + "role": "user", + "content": user_input, + } + ) + while True: + response = generate_response(model, messages, tools) + res_content = response["choices"][0]["message"]["content"] + tool_calls = response["choices"][0]["message"].get("tool_calls", None) + messages.append(response["choices"][0]["message"]) + for tool in tool_calls: + tool_name = tool["function"]["name"] + tool_args = json.loads(tool["function"]["arguments"]) + if tool_name in tools_mapping: + tool_response = tools_mapping[tool_name](**tool_args) + messages.append({ + "role": "assistant", + "content": tool_response, + }) + else: + print("Windows Assistant: ", tool_args.get("message", None)) + user_input = input() + messages.append( + { + "role": "user", + "content": user_input, + } + ) + if user_input == "exit": + break +interact_chat() + diff --git a/local-llama/test.py b/local-llama/test.py new file mode 100644 index 0000000..ab25b30 --- /dev/null +++ b/local-llama/test.py @@ -0,0 +1,122 @@ +from llama_cpp import Llama +from everytools import EveryTools + +import re +import json +import subprocess +model = Llama( + "D:/Llama3/Llama3-8B-Chinese-Chat-q8-v2.gguf", + verbose=False, + n_ctx=8192, + n_threads=4, + n_gpu_layers=-1, +) + +system_prompt = "你是一个ai助手,你可以调用" + +es = EveryTools() + + +def generate_reponse(_model, _messages, _max_tokens=100): + _output = _model.create_chat_completion( + _messages, + temperature=0.2, + top_p=0.9, + stop=["<|eot_id|>", "<|end_of_text|>"], + max_tokens=_max_tokens, + )["choices"][0]["message"]["content"] + return _output + +def interact_chat(): + messages = [ + { + "role": "system", + "content": system_prompt, + }, + ] + while True: + user_input = input() + if user_input == "exit": + break + messages.append( + { + "role": "user", + "content": user_input, + } + ) + response = generate_reponse(model, messages) + print("Windows Assistant: ", response) + + messages.append(response) + + +def get_path(file): + es.search(file) + res = es.results() + rows = [] + for index, r in res.iterrows(): + row = {} + for label, value in r.items(): + if label == "path" or label == "name": + row[label] = value + rows.append(row) + return json.dumps(rows[:3]) + + +if __name__ == '__main__': + tools = [ + { + "type": "function", + "function": { + "name": "get_path", + "description": "Get the path of the file", + "parameters": { + "type": "object", + "properties": { + "file": { + "type": "string", + "description": "The name of the file, e.g. QQ.exe", + } + }, + "required": ["file"], + }, + } + }, + { + "type": "function", + "function": { + "name": "opr_cmd", + "description": "Operate the cmd command and get the output", + "parameters": { + "type": "object", + "properties": { + "cmd": { + "type": "string", + "description": "The windows cmd command, e.g. start QQ.exe", + } + }, + "required": ["cmd"], + }, + } + } + ] + + # Debugging output to check the structure of tools + print("Tools:", tools) + + # Generate the list of function names and join them + try: + # function_names = " | ".join( + # [f'''functions.{tool['function']['name']}''' for tool in tools] + # ) + function_names = " | ".join( + [f'''"functions.{tool['function']['name']}:"''' for tool in tools] + ) + print("Function Names:", function_names) + except TypeError as e: + print("Encountered a TypeError:", e) + except KeyError as e: + print("Encountered a KeyError:", e) + + # Output the generated function names + print("Generated function names:", function_names) \ No newline at end of file