Skip to content

Commit

Permalink
[CLEANUP]
Browse files Browse the repository at this point in the history
  • Loading branch information
Kye committed Apr 12, 2024
1 parent 88230c9 commit 4b159b5
Show file tree
Hide file tree
Showing 18 changed files with 926 additions and 41 deletions.
23 changes: 23 additions & 0 deletions scripts/Dockerfile
Original file line number Diff line number Diff line change
@@ -0,0 +1,23 @@
# Use an official CUDA runtime as a parent image
FROM nvidia/cuda:11.4.2-runtime-ubuntu20.04

# Set the working directory in the container to /app
WORKDIR /app

# Copy the current directory contents into the container at /app
COPY . /app

# Install any needed packages specified in requirements.txt
RUN apt-get update && apt-get install -y \
python3-pip \
&& rm -rf /var/lib/apt/lists/*
RUN pip3 install --no-cache-dir -r requirements.txt

# Make port 80 available to the world outside this container
EXPOSE 80

# Define environment variable
# ENV NAME World

# Run app.py when the container launches
CMD ["python3", "example.py"]
84 changes: 84 additions & 0 deletions scripts/auto_tests_docs/auto_docs.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,84 @@
###### VERISON2
import inspect
import os
import threading

from dotenv import load_dotenv

from scripts.auto_tests_docs.docs import DOCUMENTATION_WRITER_SOP
from swarms import OpenAIChat
from swarms.structs.majority_voting import MajorityVoting
from swarms.structs.stackoverflow_swarm import StackOverflowSwarm
from swarms.structs.task_queue_base import TaskQueueBase
from swarms.structs.tool_json_schema import JSON

##########


####################
load_dotenv()

api_key = os.getenv("OPENAI_API_KEY")

model = OpenAIChat(
openai_api_key=api_key,
max_tokens=4000,
)


def process_documentation(cls):
"""
Process the documentation for a given class using OpenAI model and save it in a Markdown file.
"""
doc = inspect.getdoc(cls)
source = inspect.getsource(cls)
input_content = (
"Class Name:"
f" {cls.__name__}\n\nDocumentation:\n{doc}\n\nSource"
f" Code:\n{source}"
)

# Process with OpenAI model (assuming the model's __call__ method takes this input and returns processed content)
processed_content = model(
DOCUMENTATION_WRITER_SOP(input_content, "swarms.structs")
)

# doc_content = f"# {cls.__name__}\n\n{processed_content}\n"
doc_content = f"{processed_content}\n"

# Create the directory if it doesn't exist
dir_path = "docs/swarms/tokenizers"
os.makedirs(dir_path, exist_ok=True)

# Write the processed documentation to a Markdown file
file_path = os.path.join(dir_path, f"{cls.__name__.lower()}.md")
with open(file_path, "w") as file:
file.write(doc_content)

print(f"Documentation generated for {cls.__name__}.")


def main():
classes = [
JSON,
MajorityVoting,
StackOverflowSwarm,
TaskQueueBase,
]
threads = []
for cls in classes:
thread = threading.Thread(
target=process_documentation, args=(cls,)
)
threads.append(thread)
thread.start()

# Wait for all threads to complete
for thread in threads:
thread.join()

print("Documentation generated in 'swarms.structs' directory.")


if __name__ == "__main__":
main()
77 changes: 77 additions & 0 deletions scripts/auto_tests_docs/auto_docs_functions.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,77 @@
import inspect
import os
import sys
import threading

from dotenv import load_dotenv

from scripts.auto_tests_docs.docs import DOCUMENTATION_WRITER_SOP
from swarms import OpenAIChat

load_dotenv()

api_key = os.getenv("OPENAI_API_KEY")

model = OpenAIChat(
model_name="gpt-4",
openai_api_key=api_key,
max_tokens=4000,
)


def process_documentation(item):
"""
Process the documentation for a given function using OpenAI model and save it in a Markdown file.
"""
doc = inspect.getdoc(item)
source = inspect.getsource(item)
input_content = (
f"Name: {item.__name__}\n\nDocumentation:\n{doc}\n\nSource"
f" Code:\n{source}"
)
print(input_content)

# Process with OpenAI model
processed_content = model(
DOCUMENTATION_WRITER_SOP(input_content, "swarms.utils")
)

doc_content = f"# {item.__name__}\n\n{processed_content}\n"

# Create the directory if it doesn't exist
dir_path = "docs/swarms/utils"
os.makedirs(dir_path, exist_ok=True)

# Write the processed documentation to a Markdown file
file_path = os.path.join(dir_path, f"{item.__name__.lower()}.md")
with open(file_path, "w") as file:
file.write(doc_content)


def main():
# Gathering all functions from the swarms.utils module
functions = [
obj
for name, obj in inspect.getmembers(
sys.modules["swarms.utils"]
)
if inspect.isfunction(obj)
]

threads = []
for func in functions:
thread = threading.Thread(
target=process_documentation, args=(func,)
)
threads.append(thread)
thread.start()

# Wait for all threads to complete
for thread in threads:
thread.join()

print("Documentation generated in 'docs/swarms/utils' directory.")


if __name__ == "__main__":
main()
84 changes: 84 additions & 0 deletions scripts/auto_tests_docs/auto_docs_omni.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,84 @@
import inspect
import os
import threading

from dotenv import load_dotenv

from scripts.auto_tests_docs.docs import DOCUMENTATION_WRITER_SOP
from swarms import OpenAIChat

###########


###############

load_dotenv()

api_key = os.getenv("OPENAI_API_KEY")

model = OpenAIChat(
model_name="gpt-4-1106-preview",
openai_api_key=api_key,
max_tokens=4000,
)


def process_documentation(
item,
module: str = "swarms.structs",
docs_folder_path: str = "docs/swarms/structs",
):
"""
Process the documentation for a given class or function using OpenAI model and save it in a Python file.
"""
doc = inspect.getdoc(item)
source = inspect.getsource(item)
is_class = inspect.isclass(item)
item_type = "Class Name" if is_class else "Name"
input_content = (
f"{item_type}:"
f" {item.__name__}\n\nDocumentation:\n{doc}\n\nSource"
f" Code:\n{source}"
)

# Process with OpenAI model
processed_content = model(
DOCUMENTATION_WRITER_SOP(input_content, module)
)

doc_content = f"# {item.__name__}\n\n{processed_content}\n"

# Create the directory if it doesn't exist
dir_path = docs_folder_path
os.makedirs(dir_path, exist_ok=True)

# Write the processed documentation to a Python file
file_path = os.path.join(dir_path, f"{item.__name__.lower()}.md")
with open(file_path, "w") as file:
file.write(doc_content)

print(
f"Processed documentation for {item.__name__}. at {file_path}"
)


def main(module: str = "docs/swarms/structs"):
items = []

threads = []
for item in items:
thread = threading.Thread(
target=process_documentation, args=(item,)
)
threads.append(thread)
thread.start()

# Wait for all threads to complete
for thread in threads:
thread.join()

print(f"Documentation generated in {module} directory.")


if __name__ == "__main__":
main()
109 changes: 109 additions & 0 deletions scripts/auto_tests_docs/auto_tests.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,109 @@
import inspect
import os
import re
import threading

########
from dotenv import load_dotenv

from scripts.auto_tests_docs.docs import TEST_WRITER_SOP_PROMPT
from swarms import OpenAIChat

#########
from swarms.memory.dict_internal_memory import DictInternalMemory
from swarms.memory.dict_shared_memory import DictSharedMemory
from swarms.memory.lanchain_chroma import LangchainChromaVectorMemory

load_dotenv()

api_key = os.getenv("OPENAI_API_KEY")

model = OpenAIChat(
openai_api_key=api_key,
max_tokens=4000,
)

# agent = Agent(
# llm=model,
# agent_name="Unit Testing Agent",
# agent_description=(
# "This agent is responsible for generating unit tests for"
# " the swarms package."
# ),
# autosave=True,
# system_prompt=None,
# max_loops=1,
# )


def extract_code_from_markdown(markdown_content: str):
"""
Extracts code blocks from a Markdown string and returns them as a single string.
Args:
- markdown_content (str): The Markdown content as a string.
Returns:
- str: A single string containing all the code blocks separated by newlines.
"""
# Regular expression for fenced code blocks
pattern = r"```(?:\w+\n)?(.*?)```"
matches = re.findall(pattern, markdown_content, re.DOTALL)

# Concatenate all code blocks separated by newlines
return "\n".join(code.strip() for code in matches)


def create_test(cls):
"""
Process the documentation for a given class using OpenAI model and save it in a Python file.
"""
doc = inspect.getdoc(cls)
source = inspect.getsource(cls)
input_content = (
"Class Name:"
f" {cls.__name__}\n\nDocumentation:\n{doc}\n\nSource"
f" Code:\n{source}"
)

# Process with OpenAI model (assuming the model's __call__ method takes this input and returns processed content)
processed_content = model(
TEST_WRITER_SOP_PROMPT(
input_content, "swarms", "swarms.memory"
)
)
processed_content = extract_code_from_markdown(processed_content)

doc_content = f"# {cls.__name__}\n\n{processed_content}\n"

# Create the directory if it doesn't exist
dir_path = "tests/memory"
os.makedirs(dir_path, exist_ok=True)

# Write the processed documentation to a Python file
file_path = os.path.join(dir_path, f"{cls.__name__.lower()}.py")
with open(file_path, "w") as file:
file.write(doc_content)


def main():
classes = [
DictInternalMemory,
DictSharedMemory,
LangchainChromaVectorMemory,
]
threads = []
for cls in classes:
thread = threading.Thread(target=create_test, args=(cls,))
threads.append(thread)
thread.start()

# Wait for all threads to complete
for thread in threads:
thread.join()

print("Tests generated in 'tests/memory' directory.")


if __name__ == "__main__":
main()
Loading

0 comments on commit 4b159b5

Please sign in to comment.