diff --git a/.github/workflows/main.yml b/.github/workflows/main.yml index 589d4c0..bbbe35f 100644 --- a/.github/workflows/main.yml +++ b/.github/workflows/main.yml @@ -1,4 +1,4 @@ -name: Lint and Test +name: lintandtest on: [push, pull_request] @@ -18,18 +18,20 @@ jobs: uses: Gr1N/setup-poetry@v7 - name: Install dependencies run: poetry install - - name: Run black + - name: Run linters if: always() - run: poetry run task black - - name: Run flake8 - if: always() - run: poetry run task flake8 - - name: Run pylint - if: always() - run: poetry run task pylint + run: poetry run task lint + test: name: Test runs-on: ubuntu-latest + env: + DRBX_APP_KEY: ${{ secrets.DRBX_APP_KEY }} + DRBX_APP_SECRET: ${{ secrets.DRBX_APP_SECRET }} + AWS_ACCESS_KEY_ID: ${{ secrets.AWS_ACCESS_KEY_ID }} + AWS_SECRET_ACCESS_KEY: ${{ secrets.AWS_SECRET_ACCESS_KEY }} + OAUTH2_REFRESH_TOKEN: ${{ secrets.OAUTH2_REFRESH_TOKEN }} + CODECOV_TOKEN: ${{ secrets.CODECOV_TOKEN }} steps: - name: Checkout repository uses: actions/checkout@v2 @@ -43,4 +45,6 @@ jobs: - name: Install dependencies run: poetry install - name: Execute tests - run: poetry run task test \ No newline at end of file + run: poetry run task test + - name: Upload coverage to Codecov + uses: codecov/codecov-action@v1 \ No newline at end of file diff --git a/README.md b/README.md index 5ba863e..4e9abb4 100644 --- a/README.md +++ b/README.md @@ -1,2 +1,10 @@ -# gator-computational-cloud -A lightweight framework for the execution of scientific workflows with big data +# GCC + +[![codecov](https://codecov.io/gh/Nathandloria/gcc-test/branch/main/graph/badge.svg?token=GQ4AWLJ8KW)](https://codecov.io/gh/Nathandloria/gcc-test) +[![buildstatus](https://github.com/Nathandloria/gcc-test/workflows/lintandtest/badge.svg)](https://github.com/Nathandloria/gcc-test/actions) + +A lightweight framework for the execution of scientific workflows + +## Overview + +A scientific workflow is a cluster of nodes that work together to accomplish an end goal. When executing a workflow, especially one associated with big data, there is often a massive amount of data and time overhead to work around. Because of this, there is a need for efficient and easy-to-use software that allows for the execution of workflows on obscure computing resources. This new model mitigates the need for massive infrastructure investments by the party that is executing a workflow. Additionally, the demand for efficient task scheduling solutions is ever-increasing. All of these are issues that can be tackled with the proper implementation of a grid computing system. This grid computing approach combined with efficient task scheduling is the focus of my project: Gator Computational Cloud (GCC). GCC is a lightweight web framework that utilizes a generic task scheduling algorithm to schedule jobs in a cloud environment. This framework intelligently manages dependencies and takes a multi-threaded execution approach to increase efficiency. To execute nodes, GCC takes advantage of the Amazon AWS API to provision virtual machines. Once provisioned, the tool completes the execution and transfers any dependencies to their corresponding VM in real-time utilizing an intelligent socket infrastructure. The goal of the project is to provide a lightweight and user-friendly environment for workflow execution, while also ensuring a powerful and efficient backend that completes a user’s workflow with ease. To achieve this, some preliminary experimentation has taken place to ensure the effectiveness of the tool. \ No newline at end of file diff --git a/backend/exec/Drbx.py b/backend/exec/Drbx.py deleted file mode 100644 index 2a7d4a3..0000000 --- a/backend/exec/Drbx.py +++ /dev/null @@ -1,68 +0,0 @@ -import io -import os - -import dropbox - - -class Drbx: - def __init__( - self, drbx_refresh_token: str, drbx_app_key: str, drbx_app_secret: str - ): - """Initialize an instance of the Drbx class.""" - self.drbx_refresh_token = drbx_refresh_token - self.drbx_app_key = drbx_app_key - self.drbx_app_secret = drbx_app_secret - self.drbx = dropbox.Dropbox( - oauth2_refresh_token=self.drbx_refresh_token, - app_key=self.drbx_app_key, - app_secret=self.drbx_app_secret, - ) - - def upload_file(self, local_file_path: str, drbx_file_path: str): - """Upload file to Dropbox using the Dropbox API.""" - chunk_size = 4 * 1024 * 1024 - file_size = os.path.getsize(local_file_path) - - with open(local_file_path, "rb") as f: - if file_size <= chunk_size: - self.drbx.files_upload( - f.read(), drbx_file_path, mode=dropbox.files.WriteMode("overwrite") - ) - else: - upload_session_start_result = self.drbx.files_upload_session_start( - f.read(chunk_size) - ) - cursor = dropbox.files.UploadSessionCursor( - session_id=upload_session_start_result.session_id, offset=f.tell() - ) - commit = dropbox.files.CommitInfo(path=drbx_file_path) - while f.tell() < file_size: - if (file_size - f.tell()) <= chunk_size: - self.drbx.files_upload_session_finish( - f.read(chunk_size), cursor, commit - ) - else: - self.drbx.files_upload_session_append_v2( - f.read(chunk_size), cursor - ) - cursor.offset = f.tell() - - def get_file_contents(self, drbx_file_path: str): - """Get the contents of a file stored in Dropbox as a String.""" - _, result = self.drbx.files_download(drbx_file_path) - with io.BytesIO(result.content) as stream: - return stream.read().decode() - - def get_file_link(self, drbx_file_path: str): - """Get the download link of a file stored in Dropbox as a String.""" - result = self.drbx.files_get_temporary_link(drbx_file_path) - return result.link - - def create_folder(self, drbx_folder_path: str): - """Create a folder in Dropbox.""" - self.drbx.files_create_folder_v2(drbx_folder_path) - - def list_files(self, drbx_folder_path: str): - """List the files in a certain directory in Dropbox.""" - result = self.drbx.files_list_folder(drbx_folder_path) - return [f.name for f in result.entries] diff --git a/backend/exec/Ec2.py b/backend/exec/Ec2.py deleted file mode 100644 index dc29695..0000000 --- a/backend/exec/Ec2.py +++ /dev/null @@ -1,71 +0,0 @@ -import boto3 - - -class Ec2: - def __init__(self, aws_access_key_id: str, aws_secret_access_key: str): - """Initialize an instance of the Ec2 class.""" - self.aws_access_key_id = aws_access_key_id - self.aws_secret_access_key = aws_secret_access_key - self.ec2 = boto3.client( - "ec2", - aws_access_key_id=self.aws_access_key_id, - aws_secret_access_key=self.aws_secret_access_key, - region_name="us-east-1", - ) - self.ec2_resource = boto3.resource( - "ec2", - aws_access_key_id=self.aws_access_key_id, - aws_secret_access_key=self.aws_secret_access_key, - region_name="us-east-1", - ) - - def create_key_pair(self, key_pair_name: str): - """Create a key pair object for use in communicating with and creating instances.""" - response = self.ec2.create_key_pair(KeyName=key_pair_name) - return response - - def delete_key_pair(self, key_pair_name: str): - """Delete a key pair object.""" - response = self.ec2.delete_key_pair(KeyName=key_pair_name) - return response - - def create_security_group(self, security_group_name: str): - """Create a security group to establish what connections a virtual machine can have.""" - response = self.ec2.create_security_group( - GroupName=security_group_name, - Description="Security group generated by GCC", - ) - self.ec2.authorize_security_group_ingress( - GroupId=response["GroupId"], - IpPermissions=[ - { - "IpProtocol": "tcp", - "FromPort": 0, - "ToPort": 65535, - "IpRanges": [{"CidrIp": "0.0.0.0/0"}], - }, - ], - ) - return response - - def delete_security_group(self, security_group_id: str): - """Delete a security group.""" - response = self.ec2.delete_security_group(GroupId=security_group_id) - return response - - def create_instance(self, key_pair_name: str, security_group_id: str): - """Create an instance in Ec2 with a size of t2.micro and an image of ubuntu 20.04.""" - response = self.ec2.run_instances( - ImageId="ami-09e67e426f25ce0d7", - MinCount=1, - MaxCount=1, - InstanceType="t2.micro", - KeyName=key_pair_name, - SecurityGroupIds=[security_group_id], - ) - return response - - def terminate_instance(self, instance_id: str): - """Terminate an Ec2 instance.""" - response = self.ec2.terminate_instances(InstanceIds=[instance_id]) - return response diff --git a/backend/exec/Node.py b/backend/exec/Node.py deleted file mode 100644 index a7a5068..0000000 --- a/backend/exec/Node.py +++ /dev/null @@ -1,257 +0,0 @@ -import json -import os -import socket -import time -from io import StringIO - -import paramiko as paramiko - -from . import Workflow - - -class Node: - def __init__(self, workflow: Workflow, node_id: str, node_dependencies: dict): - """Initialize an instance of the Node class.""" - self.workflow = workflow - self.node_id = node_id - self.node_dependencies = node_dependencies - self.gcc_vm = True - self.ip = None - self.pem = None - self.level = 0 - self.node_dependents = {} - self.receiving_ports = [] - self.sending_ports = [] - self.ins_id = None - self.rae = None - self.sae = None - self.uda = None - self.configure_commands = [] - - def initialize(self): - """Initialize a virtual machine and assign it to the node if a machine - is not already available to the node.""" - security_group = self.workflow.security_group - key_pair = self.workflow.key_pair - retry_count = 0 - - if self.gcc_vm: - result = self.workflow.ec2.create_instance( - key_pair["KeyName"], security_group["GroupId"] - ) - self.ins_id = result["Instances"][0]["InstanceId"] - - ins = self.workflow.ec2.ec2_resource.Instance(id=self.ins_id) - ins.wait_until_running() - - while retry_count <= 20: - sock = socket.socket(socket.AF_INET, socket.SOCK_STREAM) - result = sock.connect_ex((ins.public_ip_address, 22)) - if result == 0: - if ins.public_ip_address is not None: - self.ip = ins.public_ip_address - break - else: - time.sleep(10) - else: - while retry_count <= 20: - sock = socket.socket(socket.AF_INET, socket.SOCK_STREAM) - result = sock.connect_ex((self.ip, 22)) - if result == 0: - break - else: - time.sleep(10) - - def set_configure_commands(self): - """Set the configuration commands for a specific node on it's virtual machine.""" - if self.workflow.type == 0: - self.configure_commands += [ - f"sudo rm -rf /home/ubuntu/{self.node_id}", - "sudo apt update -qq", - "sudo apt update -qq", - "sudo apt install unzip -y -qq", - "sudo apt install python3-pip -y -qq", - "pip3 install rpyc", - "pip3 install dropbox", - f"wget {self.workflow.drbx.get_file_link(f'/{self.workflow.workflow_name}/nodes/{self.node_id}.zip')} -O {self.node_id}.zip", - f"unzip {self.node_id}.zip -d /home/ubuntu", - ] - - elif self.workflow.type == 1: - self.configure_commands += [ - f"sudo rm -rf /home/ubuntu/{self.node_id}", - "sudo apt update -qq", - "sudo apt update -qq", - "sudo apt install unzip -y -qq", - "sudo apt install python3-pip -y -qq", - "pip3 install rpyc", - "pip3 install dropbox", - f"wget {self.workflow.drbx.get_file_link(f'/{self.workflow.workflow_name}/nodes/{self.node_id}.zip')} -O {self.node_id}.zip", - f"unzip {self.node_id}.zip -d /home/ubuntu", - ] - - port = 5001 - receiving_args = [] - sending_args = [] - - if self.node_dependencies is not None: - for node in self.node_dependencies: - receiving_args_dict = {} - if node is not None: - receiving_args_dict["host"] = "0.0.0.0" - receiving_args_dict["port"] = port - receiving_args_dict["outdir"] = "data/in" - receiving_args.append(receiving_args_dict) - self.receiving_ports.append(port) - port += 1 - else: - for file in self.node_dependencies[node].split(","): - if file == "*": - for __file__ in self.workflow.drbx.list_files( - f"/{self.workflow.workflow_name}/data" - ): - self.configure_commands.append( - f"wget {self.workflow.drbx.get_file_link(f'/{self.workflow.workflow_name}/data/{__file__}')} -O /home/ubuntu/{self.node_id}/data/in/{__file__}" - ) - else: - self.configure_commands.append( - f"wget {self.workflow.drbx.get_file_link(f'/{self.workflow.workflow_name}/data/{file}')} -O /home/ubuntu/{self.node_id}/data/in/{file}" - ) - - for node in self.node_dependents: - sending_args_dict = {} - if node is not None: - filedictlist = [] - for file in self.node_dependents[node].split(","): - filedict = {"filename": file, "filedir": "data/out"} - filedictlist.append(filedict) - - sending_args_dict["host"] = self.workflow.nodes_dict[node].ip - sending_args_dict["filedictlist"] = filedictlist - sending_args_dict["port"] = self.workflow.nodes_dict[ - node - ].receiving_ports.pop() - sending_args.append(sending_args_dict) - - self.rae = json.dumps(str(receiving_args)) - self.sae = json.dumps(str(sending_args)) - - uda_dict = { - "drbx_refresh_token": self.workflow.user.drbx_refresh_token, - "drbx_app_key": self.workflow.user.drbx_app_key, - "drbx_app_secret": self.workflow.user.drbx_app_secret, - "local_dir_path": "/data/out", - "drbx_dir_path": f"/{self.workflow.workflow_name}/exec/{self.workflow.exec_date_time}/{self.node_id}/data/out", - } - self.uda = json.dumps(str(uda_dict)) - - self.configure_commands += [ - f"cd {self.node_id};pip3 install -r requirements.txt", - "exit", - ] - - def configure(self): - """Execute the configuration commands set in the set_configure_commands() method.""" - key_pair = self.workflow.key_pair - - if self.pem is None: - self.pem = key_pair["KeyMaterial"] - - keyfile = StringIO(self.pem) - mykey = paramiko.RSAKey.from_private_key(keyfile) - client = paramiko.SSHClient() - client.load_system_host_keys() - client.set_missing_host_key_policy(paramiko.AutoAddPolicy()) - client.connect(self.ip, username="ubuntu", pkey=mykey) - - log_file = open( - f"{os.getcwd()}/tmp/{self.workflow.temp_dir}/{self.node_id}_logs.txt", - "w+", - ) - - for comm in self.configure_commands: - stdin, stdout, stderr = client.exec_command(comm) - log_file.write(f"\n[{comm}]\n") - log_file.writelines([line for line in stdout]) - - log_file.close() - client.close() - - def execute(self): - """Execute the node's payload on it's virtual machine.""" - key_pair = self.workflow.key_pair - commands = [] - - if self.workflow.type == 0: - if self.node_dependencies is not None: - for node in self.node_dependencies: - for file in self.node_dependencies[node].split(","): - if node is not None: - if file == "*": - for __file__ in self.workflow.drbx.list_files( - f"/{self.workflow.workflow_name}/exec/{self.workflow.exec_date_time}/{node}/data/out" - ): - commands.append( - f"wget {self.workflow.drbx.get_file_link(f'/{self.workflow.workflow_name}/exec/{self.workflow.exec_date_time}/{node}/data/out/{__file__}')} -O /home/ubuntu/{self.node_id}/data/in/{__file__}" - ) - else: - commands.append( - f"wget {self.workflow.drbx.get_file_link(f'/{self.workflow.workflow_name}/exec/{self.workflow.exec_date_time}/{node}/data/out/{file}')} -O /home/ubuntu/{self.node_id}/data/in/{file}" - ) - else: - if file == "*": - for __file__ in self.workflow.drbx.list_files( - f"/{self.workflow.workflow_name}/data" - ): - commands.append( - f"wget {self.workflow.drbx.get_file_link(f'/{self.workflow.workflow_name}/data/{__file__}')} -O /home/ubuntu/{self.node_id}/data/in/{__file__}" - ) - else: - commands.append( - f"wget {self.workflow.drbx.get_file_link(f'/{self.workflow.workflow_name}/data/{file}')} -O /home/ubuntu/{self.node_id}/data/in/{file}" - ) - - commands += [ - f"cd {self.node_id};chmod +x run.sh;./run.sh {self.uda}", - "exit", - ] - - elif self.workflow.type == 1: - commands += [ - f"cd {self.node_id};chmod +x run.sh;./run.sh {self.rae} {self.sae} {self.uda}", - "exit", - ] - - if self.pem is None: - self.pem = key_pair["KeyMaterial"] - - keyfile = StringIO(self.pem) - mykey = paramiko.RSAKey.from_private_key(keyfile) - client = paramiko.SSHClient() - client.load_system_host_keys() - client.set_missing_host_key_policy(paramiko.AutoAddPolicy()) - client.connect(self.ip, username="ubuntu", pkey=mykey) - - log_file = open( - f"{os.getcwd()}/tmp/{self.workflow.temp_dir}/{self.node_id}_logs.txt", - "a+", - ) - - for comm in commands: - stdin, stdout, stderr = client.exec_command(comm) - log_file.write(f"\n[{comm}]\n") - log_file.writelines([line for line in stdout]) - - log_file.close() - client.close() - - self.workflow.drbx.upload_file( - f"{os.getcwd()}/tmp/{self.workflow.temp_dir}/{self.node_id}_logs.txt", - f"/{self.workflow.workflow_name}/exec/{self.workflow.exec_date_time}/{self.node_id}/{self.node_id}_logs.txt", - ) - - def terminate(self): - """Terminate the node's virtual machine if it was created by GCC.""" - if self.gcc_vm: - if self.ins_id is not None: - self.workflow.ec2.terminate_instance(self.ins_id) diff --git a/backend/exec/User.py b/backend/exec/User.py deleted file mode 100644 index a6f5858..0000000 --- a/backend/exec/User.py +++ /dev/null @@ -1,15 +0,0 @@ -class User: - def __init__( - self, - drbx_refresh_token: str, - drbx_app_key: str, - drbx_app_secret: str, - aws_access_key_id: str, - aws_secret_access_key: str, - ): - """Initialize an instance of the User class. Used to store a user's credentials.""" - self.drbx_refresh_token = drbx_refresh_token - self.drbx_app_key = drbx_app_key - self.drbx_app_secret = drbx_app_secret - self.aws_access_key_id = aws_access_key_id - self.aws_secret_access_key = aws_secret_access_key diff --git a/backend/exec/Workflow.py b/backend/exec/Workflow.py deleted file mode 100644 index 4c6fa1b..0000000 --- a/backend/exec/Workflow.py +++ /dev/null @@ -1,223 +0,0 @@ -import os -import random -import shutil -import threading -import time -from concurrent.futures.thread import ThreadPoolExecutor -from datetime import datetime - -import xmltodict -from botocore.exceptions import ClientError -from xmltodict import OrderedDict - -from . import Drbx, Ec2, Node, User - - -class Workflow: - def __init__(self, user: User, workflow_name: str, temp_dir: str): - """Initialize an instance of the Workflow class.""" - self.user = user - self.workflow_name = workflow_name - self.temp_dir = temp_dir - self.ec2 = Ec2.Ec2(user.aws_access_key_id, user.aws_secret_access_key) - self.drbx = Drbx.Drbx( - user.drbx_refresh_token, user.drbx_app_key, user.drbx_app_secret - ) - self.nodes = [] - self.nodes_dict = {} - self.security_group = None - self.key_pair = None - self.exec_date_time = None - self.gcc_vms = 0 - self.type = 0 - - self.time_to_init = 0 - self.time_to_config = 0 - self.time_to_execute = 0 - - def plan(self, available_machines): - """Plan the workflow based on the XML specification file in Dropbox.""" - used_machines = [] - workflow_dict = xmltodict.parse( - self.drbx.get_file_contents(f"/{self.workflow_name}/spec.xml") - ) - plan = {} - - try: - self.type = int(workflow_dict["workflow"]["@type"]) - except KeyError: - pass - for x in workflow_dict["workflow"]["task"]: - try: - __node_dependencies__ = x["dep"] - except KeyError: - __node_dependencies__ = None - try: - __node_vm__ = x["vm"] - except KeyError: - __node_vm__ = None - node_dependencies = {} - node_id = x["@id"] - if __node_dependencies__ is not None: - if type(__node_dependencies__) == str: - node_dependencies[None] = __node_dependencies__ - elif type(__node_dependencies__) == OrderedDict: - node_dependencies[ - __node_dependencies__["@node"] - ] = __node_dependencies__["#text"] - elif type(__node_dependencies__) == list: - for y in __node_dependencies__: - if type(y) == str: - node_dependencies[None] = y - elif type(y) == OrderedDict: - node_dependencies[dict(y)["@node"]] = dict(y)["#text"] - elif __node_dependencies__ is None: - node_dependencies = None - node = Node.Node(self, node_id, node_dependencies) - if __node_vm__ is not None: - if __node_vm__["@pem"] is not None: - node.pem = self.drbx.get_file_contents( - f"/{self.workflow_name}/pem/{__node_vm__['@pem']}" - ).strip("\n") - if __node_vm__["#text"] is not None: - node.ip = __node_vm__["#text"] - node.gcc_vm = False - elif len(available_machines) > 0: - machine = available_machines[len(available_machines) - 1] - if machine.pem is not None: - node.pem = machine.pem.strip("\n") - if machine.ip is not None: - node.ip = machine.ip - used_machines.append(available_machines.pop()) - node.gcc_vm = False - else: - self.gcc_vms += 1 - - self.nodes.append(node) - self.nodes_dict[node.node_id] = node - - done_ct = 0 - - while done_ct < len(self.nodes): - done_ct = 0 - for node in self.nodes: - if node.level == 0: - __max__ = 0 - if len(node.node_dependencies.keys()) > 0: - for dependency in node.node_dependencies.keys(): - if dependency is not None: - dep = [ - d for d in self.nodes if d.node_id == dependency - ][0] - if dep.level > __max__: - __max__ = dep.level + 1 - else: - if 1 > __max__: - __max__ = 1 - else: - if 1 > __max__: - __max__ = 1 - - try: - plan[__max__].append(node) - except KeyError: - plan[__max__] = [node] - node.level = __max__ - else: - done_ct += 1 - - for lev in plan: - for node in plan[lev]: - for dep in node.node_dependencies: - if dep is not None: - self.nodes_dict[dep].node_dependents[ - node.node_id - ] = node.node_dependencies[dep] - - return plan, used_machines - - def initialize(self): - """Initialize all virtual machines (if needed) for the workflow's nodes.""" - self.exec_date_time = datetime.now().strftime("%m:%d:%Y-%H:%M:%S") - self.drbx.create_folder(f"/{self.workflow_name}/exec/{self.exec_date_time}") - - if self.gcc_vms > 0: - self.security_group = self.ec2.create_security_group(gen_string()) - self.key_pair = self.ec2.create_key_pair(gen_string()) - - with ThreadPoolExecutor(max_workers=len(self.nodes)) as executor: - for node in self.nodes: - executor.submit(node.initialize) - executor.shutdown() - - def configure(self, plan): - """Configure each virtual machine in the workflow.""" - if self.type == 0: - with ThreadPoolExecutor(max_workers=len(self.nodes)) as executor: - for node in self.nodes: - executor.submit(node.set_configure_commands) - executor.shutdown() - - elif self.type == 1: - plan = dict(reversed(list(plan.items()))) - - for level in plan: - with ThreadPoolExecutor(max_workers=len(plan[level])) as executor: - for node in plan[level]: - executor.submit(node.set_configure_commands) - executor.shutdown() - - with ThreadPoolExecutor(max_workers=len(self.nodes)) as executor: - for node in self.nodes: - executor.submit(node.configure) - executor.shutdown() - - def execute(self, plan: dict): - """Execute the plan produced by the plan() method.""" - if self.type == 0: - for level in plan: - with ThreadPoolExecutor(max_workers=len(plan[level])) as executor: - for node in plan[level]: - executor.submit(node.execute) - executor.shutdown() - - if self.type == 1: - plan = dict(reversed(list(plan.items()))) - threads = [] - for level in plan: - for node in plan[level]: - t = threading.Thread(target=node.execute) - threads.append(t) - t.start() - time.sleep(5) - for t in threads: - t.join() - - def complete(self): - """Finish exexution by terminating all newly generated security groups, key pairs, and virtual machines.""" - self.drbx.upload_file( - f"{os.getcwd()}/tmp/{self.temp_dir}/stats.txt", - f"/{self.workflow_name}/exec/{self.exec_date_time}/stats.txt", - ) - shutil.rmtree(f"{os.getcwd()}/tmp/{self.temp_dir}") - - with ThreadPoolExecutor(max_workers=len(self.nodes)) as executor: - for node in self.nodes: - executor.submit(node.terminate) - executor.shutdown() - - if self.key_pair is not None: - self.ec2.delete_key_pair(self.key_pair["KeyName"]) - if self.security_group is not None: - while True: - try: - self.ec2.delete_security_group(self.security_group["GroupId"]) - break - except ClientError: - time.sleep(10) - - -def gen_string(): - """Generate a random 7 character string.""" - choices = "abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ-" - return "".join([random.choice(choices) for _ in range(7)]) diff --git a/frontend/requirements.txt b/frontend/requirements.txt deleted file mode 100644 index 977b14f..0000000 --- a/frontend/requirements.txt +++ /dev/null @@ -1,36 +0,0 @@ -asgiref==3.5.0 -bcrypt==3.2.0 -black==22.1.0 -boto3==1.21.18 -botocore==1.24.18 -certifi==2021.10.8 -cffi==1.15.0 -charset-normalizer==2.0.12 -click==8.0.4 -cryptography==36.0.1 -Django==4.0.3 -django-fernet-fields==0.6 -django-recaptcha==3.0.0 -django-widget-tweaks==1.4.12 -dropbox==11.28.0 -idna==3.3 -jmespath==0.10.0 -mypy-extensions==0.4.3 -mysqlclient==2.1.0 -paramiko==2.9.2 -pathspec==0.9.0 -platformdirs==2.5.1 -ply==3.11 -psutil==5.9.0 -pycparser==2.21 -PyNaCl==1.5.0 -python-dateutil==2.8.2 -python-dotenv==0.19.2 -requests==2.27.1 -s3transfer==0.5.2 -six==1.16.0 -sqlparse==0.4.2 -stone==3.3.1 -tomli==2.0.1 -urllib3==1.26.8 -xmltodict==0.12.0 diff --git a/backend/exec/__init__.py b/gcc_exec/__init__.py similarity index 100% rename from backend/exec/__init__.py rename to gcc_exec/__init__.py diff --git a/gcc_exec/gcc_drbx.py b/gcc_exec/gcc_drbx.py new file mode 100644 index 0000000..943a1b7 --- /dev/null +++ b/gcc_exec/gcc_drbx.py @@ -0,0 +1,102 @@ +"""This file contains the GccDrbx class.""" + +import io +import os +from os.path import dirname, join + +import dropbox +from dotenv import load_dotenv +from dropbox import files + + +class GccDrbx: + """This class contains methods to interface with Dropbox.""" + + __drbx = None + __drbx_app_key = None + __drbx_app_secret = None + + def __init__(self, oauth2_refresh_token: str) -> None: + """Constructor method for a GccDrbx object.""" + env_path = join(dirname(__file__), ".env") + + if os.path.isfile(env_path): + load_dotenv() + + self.__drbx_app_key = os.environ.get("DRBX_APP_KEY") + self.__drbx_app_secret = os.environ.get("DRBX_APP_SECRET") + + self.__drbx = dropbox.Dropbox( + oauth2_refresh_token=oauth2_refresh_token, + app_key=self.__drbx_app_key, + app_secret=self.__drbx_app_secret, + ) + + def upload_file( + self, local_file_path: str, drbx_file_path: str + ) -> files.FileMetadata: + """Upload file to Dropbox using the Dropbox API.""" + chunk_size = 4 * 1024 * 1024 + file_size = os.path.getsize(local_file_path) + + with open(local_file_path, "rb") as out_file: + if file_size <= chunk_size: + response = self.__drbx.files_upload( + out_file.read(), + drbx_file_path, + mode=dropbox.files.WriteMode("overwrite"), + ) + else: + upload_session_start_result = self.__drbx.files_upload_session_start( + out_file.read(chunk_size) + ) + cursor = dropbox.files.UploadSessionCursor( + session_id=upload_session_start_result.session_id, + offset=out_file.tell(), + ) + commit = dropbox.files.CommitInfo(path=drbx_file_path) + while out_file.tell() < file_size: + if (file_size - out_file.tell()) <= chunk_size: + response = self.__drbx.files_upload_session_finish( + out_file.read(chunk_size), cursor, commit + ) + else: + self.__drbx.files_upload_session_append_v2( + out_file.read(chunk_size), cursor + ) + cursor.offset = out_file.tell() + return response + + def get_file_contents(self, drbx_file_path: str) -> str: + """Get the contents of a file stored in Dropbox as a String.""" + _, result = self.__drbx.files_download(drbx_file_path) + with io.BytesIO(result.content) as stream: + return stream.read().decode() + + def get_file_link(self, drbx_file_path: str) -> str: + """Get the download link of a file stored in Dropbox as a String.""" + response = self.__drbx.files_get_temporary_link(drbx_file_path) + return response.link + + def create_folder(self, drbx_folder_path: str) -> files.FolderMetadata: + """Create a folder in Dropbox.""" + response = self.__drbx.files_create_folder_v2(drbx_folder_path) + return response + + def list_files(self, drbx_folder_path: str) -> list: + """List the files in a certain directory in Dropbox.""" + result = self.__drbx.files_list_folder(drbx_folder_path) + return [f.name for f in result.entries] + + def delete(self, drbx_path: str) -> files.DeleteResult: + """Delete an object that is stored in Dropbox.""" + result = self.__drbx.files_delete_v2(drbx_path) + return result + + def get_drbx_app_key(self) -> str: + """Return the dropbox app key.""" + return self.__drbx_app_key + + def get_drbx_app_secret(self) -> str: + """Return the dropbox app secret.""" + return self.__drbx_app_secret diff --git a/gcc_exec/gcc_ec2.py b/gcc_exec/gcc_ec2.py new file mode 100644 index 0000000..6bcf674 --- /dev/null +++ b/gcc_exec/gcc_ec2.py @@ -0,0 +1,82 @@ +"""This file contains the GccEc2 class.""" +from typing import Any + +import boto3 + + +class GccEc2: + """This class contains methods to interface with EC2.""" + + __ec2 = None + __ec2_resource = None + + def __init__(self, aws_access_key_id: str, aws_secret_access_key: str) -> None: + """Constructor for a GccEc2 object.""" + self.__ec2 = boto3.client( + "ec2", + aws_access_key_id=aws_access_key_id, + aws_secret_access_key=aws_secret_access_key, + region_name="us-east-1", + ) + self.__ec2_resource = boto3.resource( + "ec2", + aws_access_key_id=aws_access_key_id, + aws_secret_access_key=aws_secret_access_key, + region_name="us-east-1", + ) + + def create_key_pair(self, key_pair_name: str) -> dict: + """Create a key pair object for use in communicating with and creating instances.""" + response = self.__ec2.create_key_pair(KeyName=key_pair_name) + return response + + def delete_key_pair(self, key_pair_name: str) -> dict: + """Delete a key pair object.""" + response = self.__ec2.delete_key_pair(KeyName=key_pair_name) + return response + + def create_security_group(self, security_group_name: str) -> dict: + """Create a security group to establish what connections a virtual machine can have.""" + response = self.__ec2.create_security_group( + GroupName=security_group_name, + Description="Security group generated by GCC", + ) + self.__ec2.authorize_security_group_ingress( + GroupId=response["GroupId"], + IpPermissions=[ + { + "IpProtocol": "tcp", + "FromPort": 0, + "ToPort": 65535, + "IpRanges": [{"CidrIp": "0.0.0.0/0"}], + }, + ], + ) + return response + + def delete_security_group(self, security_group_id: str) -> dict: + """Delete a security group.""" + response = self.__ec2.delete_security_group(GroupId=security_group_id) + return response + + def create_instance(self, key_pair_name: str, security_group_id: str) -> dict: + """Create an instance in Ec2 with a size of t2.micro and an image of ubuntu 20.04.""" + response = self.__ec2.run_instances( + ImageId="ami-09e67e426f25ce0d7", + MinCount=1, + MaxCount=1, + InstanceType="t2.micro", + KeyName=key_pair_name, + SecurityGroupIds=[security_group_id], + ) + return response + + def terminate_instance(self, instance_id: str) -> dict: + """Terminate an Ec2 instance.""" + response = self.__ec2.terminate_instances(InstanceIds=[instance_id]) + return response + + def get_instance_object(self, instance_id: str) -> Any: + """This class returns an Instance object.""" + response = self.__ec2_resource.Instance(id=instance_id) + return response diff --git a/gcc_exec/gcc_node.py b/gcc_exec/gcc_node.py new file mode 100644 index 0000000..5cddcb4 --- /dev/null +++ b/gcc_exec/gcc_node.py @@ -0,0 +1,324 @@ +"""This file contains the GccNode class.""" +# pylint: disable=C0301,R0914,W0612,R1721,R1702,W1514,R0912 +import json +import os +import socket +import time +from io import StringIO +from typing import Any + +import paramiko + + +class GccNode: + """This class contains methods to configure and execute a node in a workflow.""" + + __node_virtual_machine = None + __node_level = None + __node_id = None + __node_dependents = None + __node_dependencies = None + __node_config = None + __gcc_workflow_obj = None + + def __init__(self, node_id: str, gcc_workflow_obj: Any) -> None: + """Constructor for a GccNode object.""" + self.__node_id = node_id + self.__gcc_workflow_obj = gcc_workflow_obj + + def set_node_level(self, node_level: int) -> None: + """This method sets the level of a node in a workflow plan.""" + self.__node_level = node_level + + def get_node_level(self) -> int: + """This method returns a nodes level.""" + return self.__node_level + + def get_node_id(self) -> str: + """This method returns a nodes id""" + return self.__node_id + + def set_node_virtual_machine(self, node_virtual_machine: dict) -> None: + """This methid sets a nodes virtual machine.""" + self.__node_virtual_machine = node_virtual_machine + + def get_node_virtual_machine(self) -> dict: + """This methid returns a nodes virtual machine.""" + return self.__node_virtual_machine + + def initialize(self) -> None: + """This method initializes a nodes virtual machine if needed.""" + security_group = self.__gcc_workflow_obj.get_gcc_security_group() + key_pair = self.__gcc_workflow_obj.get_gcc_key_pair() + retry_count = 0 + + result = self.__gcc_workflow_obj.get_gcc_ec2_obj().create_instance( + key_pair["KeyName"], security_group["GroupId"] + ) + self.__node_virtual_machine = { + "ip": None, + "pem": key_pair["KeyMaterial"], + "instance_id": result["Instances"][0]["InstanceId"], + } + + instance = self.__gcc_workflow_obj.get_gcc_ec2_obj().get_instance_object( + self.__node_virtual_machine["instance_id"] + ) + instance.wait_until_running() + + while retry_count <= 20: + sock = socket.socket(socket.AF_INET, socket.SOCK_STREAM) + result = sock.connect_ex((instance.public_ip_address, 22)) + if result == 0: + if instance.public_ip_address is not None: + self.__node_virtual_machine["ip"] = instance.public_ip_address + break + else: + time.sleep(10) + + def set_config_commands(self) -> None: + """Set the configuration commands for a specific node on it's virtual machine.""" + drbx_node_path = f"/{self.__gcc_workflow_obj.get_workflow_dict()['name']}/nodes/{self.__node_id}.zip" + drbx_node_link = self.__gcc_workflow_obj.get_gcc_drbx_obj().get_file_link( + drbx_node_path + ) + + self.__node_config = { + "config_commands": [ + f"sudo rm -rf /home/ubuntu/{self.__node_id}", + "sudo apt update -qq", + "sudo apt update -qq", + "sudo apt install unzip -y -qq", + "sudo apt install python3-pip -y -qq", + "pip3 install rpyc", + "pip3 install dropbox", + f"wget {drbx_node_link} -O {self.__node_id}.zip", + f"unzip {self.__node_id}.zip -d /home/ubuntu", + ], + "receiving_ports": None, + "receiving_args": None, + "sending_args": None, + "receiving_args_str": None, + "sending_args_str": None, + "dropbox_args_str": None, + } + + if self.__gcc_workflow_obj.get_workflow_dict()["type"] == 1: + port = 5001 + + if self.__node_dependencies is not None: + for node_dependency in self.__node_dependencies: + receiving_args_dict = { + "host": "0.0.0.0", + "port": port, + "outdir": "data/in", + } + + if self.__node_config["receiving_args"] is None: + self.__node_config["receiving_args"] = [receiving_args_dict] + elif isinstance(self.__node_config["receiving_args"], list): + self.__node_config["receiving_args"].append(receiving_args_dict) + + if self.__node_config["receiving_ports"] is None: + self.__node_config["receiving_ports"] = [port] + elif isinstance(self.__node_config["receiving_ports"], list): + self.__node_config["receiving_ports"].append(port) + + port += 1 + + gcc_workflow_name = self.__gcc_workflow_obj.get_workflow_dict()[ + "name" + ] + gcc_drbx_obj = self.__gcc_workflow_obj.get_gcc_drbx_obj() + + for node_id in node_dependency: + if node_id is None: + for file in node_dependency[node_id]: + if file == "*": + for __file__ in gcc_drbx_obj.list_files( + f"/{self.__gcc_workflow_obj.get_workflow_dict()['name']}/data" + ): + self.__node_config["config_commands"].append( + f"wget {gcc_drbx_obj.get_file_link(f'/{gcc_workflow_name}/data/{__file__}')} -O /home/ubuntu/{self.__node_id}/data/in/{__file__}" + ) + else: + self.__node_config["config_commands"].append( + f"wget {gcc_drbx_obj.get_file_link(f'/{gcc_workflow_name}/data/{file}')} -O /home/ubuntu/{self.__node_id}/data/in/{file}" + ) + + if self.__node_dependents is not None: + for node_dependent in self.__node_dependents: + filedictlist = [] + + for node_id in node_dependent: + for file in node_dependent[node_id]: + filedict = {"filename": file, "filedir": "data/out"} + filedictlist.append(filedict) + + sending_to_node = self.__gcc_workflow_obj.get_workflow_dict()[ + "nodes" + ][node_id] + + sending_args_dict = { + "host": sending_to_node.get_node_virtual_machine()["ip"], + "filedictlist": filedictlist, + "port": sending_to_node.get_node_config()[ + "receiving_ports" + ].pop(), + } + + if self.__node_config["sending_args"] is None: + self.__node_config["sending_args"] = [sending_args_dict] + elif isinstance(self.__node_config["sending_args"], list): + self.__node_config["sending_args"].append(sending_args_dict) + + if self.__node_config["sending_args"] is None: + self.__node_config["sending_args_str"] = json.dumps(str([])) + elif isinstance(self.__node_config["sending_args"], list): + self.__node_config["sending_args_str"] = json.dumps( + str(self.__node_config["sending_args"]) + ) + + if self.__node_config["receiving_args"] is None: + self.__node_config["receiving_args_str"] = json.dumps(str([])) + elif isinstance(self.__node_config["receiving_args"], list): + self.__node_config["receiving_args_str"] = json.dumps( + str(self.__node_config["receiving_args"]) + ) + + uda_dict = { + "drbx_refresh_token": self.__gcc_workflow_obj.get_gcc_user_obj().get_oauth2_refresh_token(), + "drbx_app_key": self.__gcc_workflow_obj.get_gcc_drbx_obj().get_drbx_app_key(), + "drbx_app_secret": self.__gcc_workflow_obj.get_gcc_drbx_obj().get_drbx_app_secret(), + "local_dir_path": "/data/out", + "drbx_dir_path": f"/{self.__gcc_workflow_obj.get_workflow_dict()['name']}/exec/{self.__gcc_workflow_obj.get_exec_date_time()}/{self.__node_id}/data/out", + } + self.__node_config["dropbox_args_str"] = json.dumps(str(uda_dict)) + + self.__node_config["config_commands"] += [ + f"cd {self.__node_id};pip3 install -r requirements.txt", + "exit", + ] + + def configure(self) -> None: + """Execute configuration commands on a virtual machine.""" + keyfile = StringIO(self.__node_virtual_machine["pem"]) + mykey = paramiko.RSAKey.from_private_key(keyfile) + client = paramiko.SSHClient() + client.load_system_host_keys() + client.set_missing_host_key_policy(paramiko.AutoAddPolicy()) + client.connect(self.__node_virtual_machine["ip"], username="ubuntu", pkey=mykey) + + with open( + f"{os.getcwd()}/tmp/{self.__gcc_workflow_obj.get_tmp_dir()}/{self.__node_id}_logs.txt", + "w+", + ) as log_file: + for comm in self.__node_config["config_commands"]: + stdin, stdout, stderr = client.exec_command(comm) + + log_file.write(f"\n[{comm}]\n") + log_file.writelines([line for line in stdout]) + + log_file.close() + client.close() + + def execute(self) -> None: + """Execute execution commands on a virtual machine.""" + exec_commands = [] + + if self.__gcc_workflow_obj.get_workflow_dict()["type"] == 1: + exec_commands += [ + f"cd {self.__node_id};chmod +x run.sh;./run.sh {self.__node_config['receiving_args_str']} {self.__node_config['sending_args_str']} {self.__node_config['dropbox_args_str']}", + "exit", + ] + + elif self.__gcc_workflow_obj.get_workflow_dict()["type"] == 0: + gcc_workflow_name = self.__gcc_workflow_obj.get_workflow_dict()["name"] + gcc_drbx_obj = self.__gcc_workflow_obj.get_gcc_drbx_obj() + + if self.__node_dependencies is not None: + for node_dependency in self.__node_dependencies: + for node_id in node_dependency: + if node_id is not None: + for file in node_dependency[node_id]: + if file == "*": + for __file__ in gcc_drbx_obj.list_files( + f"/{gcc_workflow_name}/exec/{self.__gcc_workflow_obj.get_exec_date_time()}/{node_id}/data/out" + ): + exec_commands.append( + f"wget {gcc_drbx_obj.get_file_link(f'/{gcc_workflow_name}/exec/{self.__gcc_workflow_obj.get_exec_date_time()}/{node_id}/data/out/{__file__}')} -O /home/ubuntu/{self.__node_id}/data/in/{__file__}" + ) + else: + exec_commands.append( + f"wget {gcc_drbx_obj.get_file_link(f'/{gcc_workflow_name}/exec/{self.__gcc_workflow_obj.get_exec_date_time()}/{node_id}/data/out/{file}')} -O /home/ubuntu/{self.__node_id}/data/in/{file}" + ) + else: + for file in node_dependency[node_id]: + if file == "*": + for __file__ in gcc_drbx_obj.list_files( + f"/{gcc_workflow_name}/data" + ): + exec_commands.append( + f"wget {gcc_drbx_obj.get_file_link(f'/{gcc_workflow_name}/data/{__file__}')} -O /home/ubuntu/{self.__node_id}/data/in/{__file__}" + ) + else: + exec_commands.append( + f"wget {gcc_drbx_obj.get_file_link(f'/{gcc_workflow_name}/data/{file}')} -O /home/ubuntu/{self.__node_id}/data/in/{file}" + ) + + exec_commands += [ + f"cd {self.__node_id};chmod +x run.sh;./run.sh {self.__node_config['dropbox_args_str']}", + "exit", + ] + + keyfile = StringIO(self.__node_virtual_machine["pem"]) + mykey = paramiko.RSAKey.from_private_key(keyfile) + client = paramiko.SSHClient() + client.load_system_host_keys() + client.set_missing_host_key_policy(paramiko.AutoAddPolicy()) + client.connect(self.__node_virtual_machine["ip"], username="ubuntu", pkey=mykey) + + with open( + f"{os.getcwd()}/tmp/{self.__gcc_workflow_obj.get_tmp_dir()}/{self.__node_id}_logs.txt", + "a+", + ) as log_file: + for comm in exec_commands: + stdin, stdout, stderr = client.exec_command(comm) + + log_file.write(f"\n[{comm}]\n\n") + log_file.writelines([line for line in stdout]) + + log_file.close() + client.close() + + self.__gcc_workflow_obj.get_gcc_drbx_obj().upload_file( + f"{os.getcwd()}/tmp/{self.__gcc_workflow_obj.get_tmp_dir()}/{self.__node_id}_logs.txt", + f"/{self.__gcc_workflow_obj.get_workflow_dict()['name']}/exec/{self.__gcc_workflow_obj.get_exec_date_time()}/{self.__node_id}/{self.__node_id}_logs.txt", + ) + + def terminate(self) -> None: + """Terminate the virtual machine associated with a node if needed.""" + if self.__node_virtual_machine["instance_id"] is not None: + self.__gcc_workflow_obj.get_gcc_ec2_obj().terminate_instance( + self.__node_virtual_machine["instance_id"] + ) + + def set_node_dependencies(self, node_dependencies: list) -> None: + """Set node dependencies.""" + self.__node_dependencies = node_dependencies + + def get_node_dependencies(self) -> list: + """Get node dependencies.""" + return self.__node_dependencies + + def set_node_dependents(self, node_dependents: list) -> None: + """Set node dependents.""" + self.__node_dependents = node_dependents + + def get_node_dependents(self) -> list: + """Get node dependents.""" + return self.__node_dependents + + def get_node_config(self) -> dict: + """Get node configuration.""" + return self.__node_config diff --git a/gcc_exec/gcc_user.py b/gcc_exec/gcc_user.py new file mode 100644 index 0000000..532e844 --- /dev/null +++ b/gcc_exec/gcc_user.py @@ -0,0 +1,32 @@ +"""This file contains the GccUser class.""" + + +class GccUser: + """This class contains getter and setter methods for GCC User credentials""" + + __oauth2_refresh_token = None + __aws_access_key_id = None + __aws_secret_access_key = None + + def __init__( + self, + oauth2_refresh_token: str, + aws_access_key_id: str, + aws_secret_access_key: str, + ) -> None: + """Constructor for a GccUser class object.""" + self.__oauth2_refresh_token = oauth2_refresh_token + self.__aws_access_key_id = aws_access_key_id + self.__aws_secret_access_key = aws_secret_access_key + + def get_oauth2_refresh_token(self) -> str: + """This method returns a users oauth2 refresh token.""" + return self.__oauth2_refresh_token + + def get_aws_access_key_id(self) -> str: + """This method returns a users aws access key id.""" + return self.__aws_access_key_id + + def get_aws_secret_access_key(self) -> str: + """This method returns a users aws secret access key.""" + return self.__aws_secret_access_key diff --git a/gcc_exec/gcc_workflow.py b/gcc_exec/gcc_workflow.py new file mode 100644 index 0000000..6fc4c26 --- /dev/null +++ b/gcc_exec/gcc_workflow.py @@ -0,0 +1,454 @@ +"""This file contains the GccWorkflow class.""" +# pylint: disable=R0914,R0912,R0915,R0902,E0401,R1702 +import os +import random +import shutil +import threading +import time +from concurrent.futures.thread import ThreadPoolExecutor +from datetime import datetime +from typing import OrderedDict + +import xmltodict +from botocore.exceptions import ClientError +from gcc_drbx import GccDrbx +from gcc_ec2 import GccEc2 +from gcc_node import GccNode +from gcc_user import GccUser + + +class GccWorkflow: + """This class contains methods to manage a GCC workflow.""" + + __workflow_dict = None + __gcc_user_obj = None + __gcc_ec2_obj = None + __gcc_drbx_obj = None + __gcc_security_group = None + __gcc_key_pair = None + __exec_date_time = None + __tmp_dir = None + + def __init__(self, gcc_user_obj: GccUser, workflow_name: str) -> None: + """Constructor for a GccWorkflow object.""" + self.__gcc_user_obj = gcc_user_obj + self.__gcc_ec2_obj = GccEc2( + self.__gcc_user_obj.get_aws_access_key_id(), + self.__gcc_user_obj.get_aws_secret_access_key(), + ) + self.__gcc_drbx_obj = GccDrbx(self.__gcc_user_obj.get_oauth2_refresh_token()) + self.__workflow_dict = { + "type": 0, + "plan_raw": {}, + "plan_human_readable": {}, + "nodes": {}, + "name": workflow_name, + "machines_initialized": None, + } + + def plan( + self, available_machines: list = None, xml_specification: str = None + ) -> list: + """This method creates an execution plan based on a workflow specification.""" + if xml_specification is None: + xml_specification = xmltodict.parse( + self.__gcc_drbx_obj.get_file_contents( + f"/{self.__workflow_dict['name']}/spec.xml" + ) + ) + + self.__workflow_dict["type"] = int(xml_specification["workflow"]["@type"]) + + used_machines = [] + for node in xml_specification["workflow"]["task"]: + gcc_node_object = GccNode(node["@id"], self) + + try: + gcc_node_vm = node["vm"] + except KeyError: + gcc_node_vm = None + + node_virtual_machine = None + + if gcc_node_vm is not None: + vm_ip = gcc_node_vm["#text"] + + try: + vm_pem = self.__gcc_drbx_obj.get_file_contents( + f"/{self.__workflow_dict['name']}/pem/{gcc_node_vm['@pem']}" + ).strip("\n") + except KeyError: + vm_pem = None + + node_virtual_machine = {"ip": vm_ip, "pem": vm_pem, "instance_id": None} + + gcc_node_object.set_node_virtual_machine(node_virtual_machine) + + elif len(available_machines) > 0: + available_machine = available_machines.pop() + + node_virtual_machine = { + "ip": available_machine.machine_ip, + "pem": available_machine.machine_pem.strip("\n"), + "instance_id": None, + } + + used_machines.append(available_machine) + gcc_node_object.set_node_virtual_machine(node_virtual_machine) + + try: + gcc_node_deps = node["dep"] + except KeyError: + gcc_node_deps = None + + if gcc_node_deps is not None: + if isinstance(gcc_node_deps, str): + node_dependencies = gcc_node_object.get_node_dependencies() + node_dependency_dict = {None: gcc_node_deps.split(",")} + + if node_dependencies is None: + gcc_node_object.set_node_dependencies([node_dependency_dict]) + elif isinstance(node_dependencies, list): + node_dependencies.append(node_dependency_dict) + gcc_node_object.set_node_dependencies(node_dependencies) + + gcc_node_object.set_node_level(0) + + try: + self.__workflow_dict["plan_raw"][0].append(gcc_node_object) + self.__workflow_dict["plan_human_readable"][0].append( + gcc_node_object.get_node_id() + ) + except KeyError: + self.__workflow_dict["plan_raw"][0] = [gcc_node_object] + self.__workflow_dict["plan_human_readable"][0] = [ + gcc_node_object.get_node_id() + ] + + if isinstance(gcc_node_deps, OrderedDict): + try: + gcc_dep_node_id = gcc_node_deps["@node"] + except KeyError: + gcc_dep_node_id = None + + try: + gcc_dep_file_dependencies = gcc_node_deps["#text"].split(",") + except KeyError: + gcc_dep_file_dependencies = None + + if gcc_dep_node_id is not None: + node_dependencies = gcc_node_object.get_node_dependencies() + node_dependency_dict = { + gcc_dep_node_id: gcc_dep_file_dependencies, + } + node_dependents = self.__workflow_dict["nodes"][ + gcc_dep_node_id + ].get_node_dependents() + node_dependents_dict = { + gcc_node_object.get_node_id(): gcc_dep_file_dependencies + } + + if node_dependencies is None: + gcc_node_object.set_node_dependencies( + [node_dependency_dict] + ) + elif isinstance(node_dependencies, list): + node_dependencies.append(node_dependency_dict) + gcc_node_object.set_node_dependencies(node_dependencies) + + if node_dependents is None: + self.__workflow_dict["nodes"][ + gcc_dep_node_id + ].set_node_dependents([node_dependents_dict]) + elif isinstance(node_dependents, list): + node_dependents.append(node_dependents_dict) + self.__workflow_dict["nodes"][ + gcc_dep_node_id + ].set_node_dependents(node_dependents) + + try: + level = ( + self.__workflow_dict["nodes"][ + gcc_dep_node_id + ].get_node_level() + + 1 + ) + except TypeError: + level = 0 + + gcc_node_object.set_node_level(level) + + try: + self.__workflow_dict["plan_raw"][level].append(gcc_node_object) + self.__workflow_dict["plan_human_readable"][level].append( + gcc_node_object.get_node_id() + ) + except KeyError: + self.__workflow_dict["plan_raw"][level] = [gcc_node_object] + self.__workflow_dict["plan_human_readable"][level] = [ + gcc_node_object.get_node_id() + ] + + elif isinstance(gcc_node_deps, list): + max_level = 0 + + for dep in gcc_node_deps: + if isinstance(dep, str): + node_dependencies = gcc_node_object.get_node_dependencies() + node_dependency_dict = {None: dep.split(",")} + + if node_dependencies is None: + gcc_node_object.set_node_dependencies( + [node_dependency_dict] + ) + elif isinstance(node_dependencies, list): + node_dependencies.append(node_dependency_dict) + gcc_node_object.set_node_dependencies(node_dependencies) + + level = 0 + + if gcc_node_object.get_node_level() is None: + gcc_node_object.set_node_level(0) + + elif isinstance(dep, OrderedDict): + try: + gcc_dep_node_id = dep["@node"] + except KeyError: + gcc_dep_node_id = None + + try: + gcc_dep_file_dependencies = dep["#text"].split(",") + except KeyError: + gcc_dep_file_dependencies = None + + if gcc_dep_node_id is not None: + node_dependencies = ( + gcc_node_object.get_node_dependencies() + ) + node_dependency_dict = { + gcc_dep_node_id: gcc_dep_file_dependencies, + } + node_dependents = self.__workflow_dict["nodes"][ + gcc_dep_node_id + ].get_node_dependents() + node_dependents_dict = { + gcc_node_object.get_node_id(): gcc_dep_file_dependencies + } + + if node_dependencies is None: + gcc_node_object.set_node_dependencies( + [node_dependency_dict] + ) + elif isinstance(node_dependencies, list): + node_dependencies.append(node_dependency_dict) + gcc_node_object.set_node_dependencies( + node_dependencies + ) + + if node_dependents is None: + self.__workflow_dict["nodes"][ + gcc_dep_node_id + ].set_node_dependents([node_dependents_dict]) + elif isinstance(node_dependents, list): + node_dependents.append(node_dependents_dict) + self.__workflow_dict["nodes"][ + gcc_dep_node_id + ].set_node_dependents(node_dependents) + + try: + level = ( + self.__workflow_dict["nodes"][ + gcc_dep_node_id + ].get_node_level() + + 1 + ) + except TypeError: + level = 0 + + if level > max_level: + max_level = level + + gcc_node_object.set_node_level(max_level) + + try: + self.__workflow_dict["plan_raw"][max_level].append( + gcc_node_object + ) + self.__workflow_dict["plan_human_readable"][max_level].append( + gcc_node_object.get_node_id() + ) + except KeyError: + self.__workflow_dict["plan_raw"][max_level] = [gcc_node_object] + self.__workflow_dict["plan_human_readable"][max_level] = [ + gcc_node_object.get_node_id() + ] + + else: + gcc_node_object.set_node_level(0) + + try: + self.__workflow_dict["plan_raw"][0].append(gcc_node_object) + self.__workflow_dict["plan_human_readable"][0].append( + gcc_node_object.get_node_id() + ) + except KeyError: + self.__workflow_dict["plan_raw"][0] = [gcc_node_object] + self.__workflow_dict["plan_human_readable"][0] = [ + gcc_node_object.get_node_id() + ] + + self.__workflow_dict["nodes"][ + gcc_node_object.get_node_id() + ] = gcc_node_object + + if self.__workflow_dict["type"] == 1: + self.__workflow_dict["plan_raw"] = dict( + reversed(list(self.__workflow_dict["plan_raw"].items())) + ) + self.__workflow_dict["plan_human_readable"] = dict( + reversed(list(self.__workflow_dict["plan_human_readable"].items())) + ) + + return used_machines + + def get_workflow_dict(self) -> dict: + """This method returns the __workflow_dict private variable.""" + return self.__workflow_dict + + def get_gcc_key_pair(self) -> dict: + """This method returns the __gcc_key_pair private variable.""" + return self.__gcc_key_pair + + def get_gcc_security_group(self) -> dict: + """This method returns the __gcc_security_group private variable.""" + return self.__gcc_security_group + + def get_gcc_ec2_obj(self) -> GccEc2: + """This method returns the __gcc_ec2_obj private variable.""" + return self.__gcc_ec2_obj + + def configure(self) -> None: + """Set configuration commands and execute them on a virtual machine.""" + if self.__workflow_dict["type"] == 1: + for level in self.__workflow_dict["plan_raw"]: + with ThreadPoolExecutor( + max_workers=len(self.__workflow_dict["plan_raw"][level]) + ) as executor: + for node in self.__workflow_dict["plan_raw"][level]: + executor.submit(node.set_config_commands) + executor.shutdown() + + elif self.__workflow_dict["type"] == 0: + with ThreadPoolExecutor( + max_workers=len(self.__workflow_dict["nodes"]) + ) as executor: + for node in self.__workflow_dict["nodes"]: + executor.submit( + self.__workflow_dict["nodes"][node].set_config_commands + ) + executor.shutdown() + + with ThreadPoolExecutor( + max_workers=len(self.__workflow_dict["nodes"]) + ) as executor: + for node in self.__workflow_dict["nodes"]: + executor.submit(self.__workflow_dict["nodes"][node].configure) + executor.shutdown() + + def initialize(self) -> None: + """Initialize a virtual machine for a node if needed.""" + self.__exec_date_time = datetime.now().strftime("%m:%d:%Y-%H:%M:%S") + self.__gcc_drbx_obj.create_folder( + f"/{self.__workflow_dict['name']}/exec/{self.__exec_date_time}" + ) + self.__tmp_dir = generate_random_string() + + os.makedirs(f"{os.getcwd()}/tmp/{self.__tmp_dir}", exist_ok=True) + + with ThreadPoolExecutor( + max_workers=len(self.__workflow_dict["nodes"]) + ) as executor: + for node in self.__workflow_dict["nodes"]: + if ( + self.__workflow_dict["nodes"][node].get_node_virtual_machine() + is None + ): + if ( + self.__gcc_security_group is None + and self.__gcc_key_pair is None + ): + self.__gcc_security_group = ( + self.__gcc_ec2_obj.create_security_group(self.__tmp_dir) + ) + self.__gcc_key_pair = self.__gcc_ec2_obj.create_key_pair( + self.__tmp_dir + ) + executor.submit(self.__workflow_dict["nodes"][node].initialize) + executor.shutdown() + + def execute(self) -> None: + """This method executes a node payload on a virtual machine.""" + if self.__workflow_dict["type"] == 1: + threads = [] + for level in self.__workflow_dict["plan_raw"]: + for node in self.__workflow_dict["plan_raw"][level]: + thread = threading.Thread(target=node.execute) + threads.append(thread) + thread.start() + time.sleep(5) + for thread in threads: + thread.join() + + elif self.__workflow_dict["type"] == 0: + for level in self.__workflow_dict["plan_raw"]: + with ThreadPoolExecutor( + max_workers=len(self.__workflow_dict["plan_raw"][level]) + ) as executor: + for node in self.__workflow_dict["plan_raw"][level]: + executor.submit(node.execute) + executor.shutdown() + + def complete(self) -> None: + """Delete tmp directory and terminate created instances.""" + shutil.rmtree(f"{os.getcwd()}/tmp/{self.__tmp_dir}") + + with ThreadPoolExecutor( + max_workers=len(self.__workflow_dict["nodes"]) + ) as executor: + for node in self.__workflow_dict["nodes"]: + executor.submit(self.__workflow_dict["nodes"][node].terminate) + executor.shutdown() + + if self.__gcc_key_pair is not None: + self.__gcc_ec2_obj.delete_key_pair(self.__gcc_key_pair["KeyName"]) + if self.__gcc_security_group is not None: + while True: + try: + self.__gcc_ec2_obj.delete_security_group( + self.__gcc_security_group["GroupId"] + ) + break + except ClientError: + time.sleep(10) + + def get_gcc_user_obj(self) -> GccUser: + """Return gcc user object.""" + return self.__gcc_user_obj + + def get_exec_date_time(self) -> str: + """Return execution date and time string.""" + return self.__exec_date_time + + def get_gcc_drbx_obj(self) -> GccDrbx: + """Return gcc dropbox object.""" + return self.__gcc_drbx_obj + + def get_tmp_dir(self) -> str: + """Return tmp directory string.""" + return self.__tmp_dir + + +def generate_random_string() -> str: + """Generate a random 7 character string.""" + choices = "abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ-" + return "".join([random.choice(choices) for _ in range(7)]) diff --git a/gcc_exec/main.py b/gcc_exec/main.py new file mode 100644 index 0000000..047bb6c --- /dev/null +++ b/gcc_exec/main.py @@ -0,0 +1,29 @@ +"""This file contains a method to execute a workflow using the cli.""" +# pylint: disable=E0401 +import sys + +from gcc_user import GccUser +from gcc_workflow import GccWorkflow + + +def main( + oauth2_refresh_token: str, + aws_access_key_id: str, + aws_secret_access_key: str, + workflow_name: str, +): + """This method contains code to execute a wofkflow.""" + gcc_user_obj = GccUser( + oauth2_refresh_token, aws_access_key_id, aws_secret_access_key + ) + gcc_workflow_obj = GccWorkflow(gcc_user_obj, workflow_name) + + gcc_workflow_obj.plan() + gcc_workflow_obj.initialize() + gcc_workflow_obj.configure() + gcc_workflow_obj.execute() + gcc_workflow_obj.complete() + + +if __name__ == "__main__": + main(sys.argv[1], sys.argv[2], sys.argv[3], sys.argv[4]) diff --git a/frontend/gcc_web/__init__.py b/gcc_exec/tests/__init__.py similarity index 100% rename from frontend/gcc_web/__init__.py rename to gcc_exec/tests/__init__.py diff --git a/gcc_exec/tests/data/upload/test_gcc_drbx.txt b/gcc_exec/tests/data/upload/test_gcc_drbx.txt new file mode 100644 index 0000000..c62e2b6 --- /dev/null +++ b/gcc_exec/tests/data/upload/test_gcc_drbx.txt @@ -0,0 +1 @@ +86BsxH3FSIf6z+y4TboJqIPKdLDQ7SwE+8Y2qxPZMf/RH9fYTS5OTGv9ZubisEgsfxb0dKKdLBboJmyosxsorsXitvP44LFpqrYi7rO3C9VJcETT74mcLVScrIIeDvgLlphsJnitECI80JibTNQXgJj8ZtfxjuYq8PHQKKMSHOYTyIpdOahyIsqEedGEapys0RTonoe2G97LVHLjRDP7xNLUQ+sesAFouC5+5GAfrjwx8Ju4N04/b74crhXOSfatUhAGPb7WL4QFPfrrn2jraBwVZQRANXDZFIsXBQlU35W7ZcHPNa4vKGaxPd8PwZMphny2KdPGtHzvF6kA9eZ0x6cnoGpN5KRoDk5KxqCIhrhSXFRMDAY5Tz79RtWh680loQ4KbRutMjPqcnQJQuHxyiZQfeoPouyXioQ1PUrY0s/xN/tbdBOairN6ZOY2IldEvL/AzmlOs4lcoPuUXhgDVuZIF7TVo90Zf/6Zd8ag425sO4mXJfUnMmi/kLLZD16Y6lSDx473oo4iOQo6dtf9YPrxz7WOdP2txV8+ZT/iU/GLFEG2kBWL70K10juQ4PJ4WdOJYCIdubcfNQENVnyV6jt5AFTo5RzNpP9z7M3o4wANobFoj7qWpl7VCIxzJN5eMofDiI9xVELBBmKf7YQtJ0tSKb8bymFfsg/HuHZpfR+eIOe1P4TMaZmD++HEyBNBwwCtN1q+l9K48IJwhvGt9sxcsbO3c9zXpQXa+xCAhtERdyJUoq3Ege/qI9teIenlJdpz6rPVtTYtwHmGpjlA9RQV5EP37ulkUupaMZ+hEDDN/yNddJjZe2WM+2gNPaS/caCEBkwqsAnqinRKGeqMDWEmy9UBGENQI006WAUoHwMNJRSlwgr8OC1wb6S/3E7bYjBoJWIjrIIe4szsYlp2oFF0TRPr6AK7YGvQ6DiUc30Cm6SxYGHEw75wCrveI1dkJaiMYxWYmvOhmOk9NusVuTNRA1JVoHdXjDhSJrN2wdiXrUf4j12fRIuKZ9WcQz0mcTWJHBWnwtQraBJ38I//rDmPqx6GKq021bwpB7NDNkAGRWPH8aJHoNVkOcnwn/5r3uw2wtOqQuIt9CnrkIrW8nLp11LEAotRzjteKc81mhw+h6ovcmEYxHA55BGWEFJOwlGabtmfziQXsU2cjOkHUda/JL90TqnAfJWcHc/BUJlNonTHO7i3tkBTovdpFBTn7FI7qK2yXnrFno/XOWWX6r3irtYHIIroROy0L3wnb4tw9XO/mrKu6SN/eX28viBywKsM7AdevfRPjtn1LPQamvyYg8X14m83PyxOuaGgAInbAlXX/1Ja/cvi/Z2ha2COtdLh6ae9eJ4tqdq9UT0krhkhr1xPTaFhRuTW2MqdvJTYXKrdJuA+QLbeYug8YZTPE4lTSZJlBpUpqGbQiVS8tUYSLXKS1MAJ0ZoG2orZZu30qrjuXZpxQCQIUo9sciGtTsXhcMhbCvI1MYn0d6WXd1VS8jbaYzpCEvBbh4evqnMyhZXo46jA5gy/DK46f0iV3widxWF6IalpdZMSgRfFi2kEULyXh9VUTtDZYCR8Rv63enEFgJNVRzZMADx2eyejVSU9cU441Yaa/aqV2shSvLdOr43fE1wLkc6Ob5qHb5fvQTyK6GqCL87XvhmbMESHjPM+XMHEY2bhEqkbIy5fKr9faDA5DikAqBbU2/soODiO1hI4dfqa7zKF3DaKVCJd7oM8n+DvL9mJxrXkKD/2RsXorftr0En/UZq7ri97Z50ICntlv+8l6xIYasr0n95eHfDRGkuUKjnrBGWTJGKB9SQv38XhGFFzHc2jHgvG8t7igcI/OyaKQtxm2oGGrPZJuJaYpPnI1bW0ynBYyQfWGZZi3LpvUOd7YGffwRS2DnkCQle3L6PbTBPpnO3nwmuw39yBJFNpRW8Xw/jBI+tbRlhlzcPbV7cW9k4a48ywTs/RviXgdtX6DYvB4Vkrf6ESUu2nOH2750bG+nDtDqB1QAL5YxAOJ3TSzRfAgTlmcjHa1TlYpsPQLVnplf3b2bP/zLU+WCQiOaduhM9zEyUr8CL17zzGDKcNoVna90jnJxZSaFhSr2TMqLaa2jqrHUJpiPgQqflHVELdLsBMbGxT7a2TYsxC4e1/4E/FcF4q5vY96CI7CgkezzLKg0rdNrBgIxFIf1Jond1Dm2VpgQgN42iSm+9KfUO0P58V6yQfBfL2oceIzz1msHHTyXNhou/NFLmukXnRsbSLqVC/ZC7es7R6FWt8l7gvjOJFA5GLpILV/WSaWij40V0N4i9Lwf3xVx2/4FCoNSIIWm9fiV9d1LbKC5au17RwBoxtxvohHAhDsBlxlfzjr4/j77H962m4NTS9idN13akSAxjQZFribI5GffH3dC1sjn2z0RtC2Yh47b9JFViwyFAAZpzlMsBoVhnVXXadacQ0+P/jRiAKd5mxzJ/l/Lc64V0nKeVAV247W+/2ysI7KRy6dyOM9/iGy1+utbwJSUk2UwxMGmzbDnN/a5j1j47SwZiUzt4g3rnSenEkn8SYufD4P9RGS74QEKe/pRtCW0njPsGoKDRB50GqMpIxTcWFJLJtiJ8J2G3ZnnCJ9hsMiaXeqsH8UIQv/m+To5rEXPxM/B5J2Ieu/oxCCf2V17QCDAbvpmR38Y0sJOhhTqSuFFh1zqp5lO/ocDIzwFMhS5bWjgcJZ7yuy/svZdaKBx/8d/kOJjCFWZgmF0s+69YsE2lWkOwJFNdv4taIkJweoO8zWe20MFI67JFJS4o5Rj07pgBAVAmaSujIVl2qWEZbYNvgrCYWtnMN2EJOJWVlINZTNtgmRv8Umm0TbMMDNe2xVO2oda0nU1cd9eodaTq+km6Wr+YATmKRbV4FcnZrCh5VlQZfxprY1GVBwXh7qGkCwOWgm9CgwQDhFZ1cJr4f3a2iW2AZDSObPJP8aWyVM8jjLV64ujZo1m09QvtNZmu0tfTo8T0vk6nj+1/OioC8EutllorgcO+q8c54t0N+4XsINEhLgxIOV+QOCvPcgP5CS1cvB9zXyKkf5ROkJFqHn+Zc7VcGRvXUB0Q6xMZwvWKIIUaBetTQ27Al0o/LLEow16lfSGty9zjmmpx2cm4hai4d6rdKwPyPdaYl8PhtMQHu0y7kNYLSnwbzT9jjl8HkwupnDe8BWXOSgJ8npvb8HC/hvOp41gu8l0ajTDk1K2pHJji5hYskDJhs9QJJMGlpfqM5kr6NcRMtM7koWxVhZoZC+Jt9dxBglYrzWEHPiArb6ARy5jnfEvuchQzVSPLoQQc7NygG6kByoRi7GvIu2cQ5gD9VfB8GSw0EfAkgJPnBM5FI2w4V/4wkaOlgU/z+y7L4kB/oK3r8prdAAZCgc185tWeHYsltD/ZoE6WQ9tf1tA7yk9nnGVo4sO6CK81CY97lncL0l+5LMW8DJZxG0c887ol30QNregw6yvu3py7RJ9/pUS2sm1wr5KC7c3y9qLVOSDWf/G+6J7G/JOm8koXnbL0XXgQ/PN8LLBnHulsI0amxOlrQVk9VfWw4X2ida07tX+9rY5799BKLKrX2yCY4mN8mUf0+26WKgIxoxG0LEyT5mcT7pXV+xeELn1O9PCvBu9ZTcA7NOxqXdJjtd7UARWYmgpuc+yKxgHf07uUx+Dv2yRSorzK/jVu+zOOVnPKg6ReuQZoSK96ulgs1dlq4TKuisxyeTQ7SuMTd0gC424ZXsvP6cPJDcVTddnOAokkDKvrGhiplIqqrCA4JE7TYQbrbaobHhnEXwndQqi5SRNYAOFJAynNPz76W5yK9CyZtIWB9d7wFTFT0dm0B+Dm+RZTLTG674bqqwuSi8xnvl2PTeXT1UhuhfSaDGLQxdx1M4atMJo0efcVGGnfGt6PBzMcY7XtFw02n562xKgc4kfZ/7i82cHUFdGZ3ykr6BDLA4MJZSM875h1uXe5RTeY+jAaaBemjduUzJmJXqq1ZDLyFn5vjpbIeX/Y6Fs7c0XewQxzy3AtOWtFRRd7ybUzCaVGjzuYv/f8Hzujqx9f88P9HNInahJMIDCAAMESEkOQyjoagVeyMRkLTXHvUXgnv7DzyoD+zU2o7HjDJ5KHGzy9V4rPo8u0tLJqQy2vfL8qABFeej+SWUffiJ/BJtEaSVXpd3Gig602rY+j6WXOzMOj+L+gkwBGWyp3TLkAlrvey+J+/qAbKnVXPECDY//p99wS4nEN+ee+BTDlZogVcdZ6tzfZE6Qg29G7mSP/X3NvR5C9NTtgpt5CroNiNvVBSg/bAcjQOA2Dsxqre39d/9k2YFGi0y8tpE0UVw0xMXO/2KgohK8gSWFlInvBGdCdXjjeLmKwysNladr4c8biYm3H8MPtqs/GmBLzjb5nVGNCgOc2I6RO5+QXsvACbqGwErPHDuuJ9cpvyYYsYarev/tY5pYWOxtewrGB1gmSPtMMbAQOz0oXrSWInllOHGlCaY1NwDv34wKy5e9hKUBmjb1Adx/heoAMPK9RmfE56OrpY2EIDbi70JIiRl4d4JcgD5vWa6BjcJgG8xI1z86m3vM8geO1iavkNMUQLzHwbrETmMLraJzcZtYAJlHx05PwUaRNK9xK7Zc9xBB2z1jGQc8LjOcL+HiHjHWNo6XPFDmt29vCJMBQiKwEykz4P5MtU3C5VsscB2D0HADNe5UPeQG8nKrpF/4MUCan4YGYrtwbo34vXqsIwBv1B/HXO2wBxyfIiZ+kf1Vo8i55i+P0Vy3gt+BLGXE0+rk8yYTaI9i8mDzbfrMNM4ScXv9Eyghg9/c1YV/1oXFpTb/5KPbcG/E/Vf+KPTgZ84q4f4sdJCowVBBSvp2KavtaxiZZ+bBrjiKTNdF2SK2OXLpj8ts9uopmMEjYmzKcsDlsHYJjCk3uLHYucx+SLv2u/BPsgVl+wdM7Z8k1J9+j2cHu3PPVWQLpx+Z0BhFK2AMi5e0BwC22ZhqLFPxON5iTCamQP4rLcJq/YXYinIj67mz6u3ZA9pI0x2igtlt8EqN4q/o6KyGLJySrZcGlnNHhLvjjbpQtFFZAuLafJ3kWft1WjelDhv0EU/D89Wh5ZWx70XjJdX88pFUXzWLt2DoXFCkLZ1XbQVIp+2fiLFxiakpYStZCf7sK1OFs9gDyS63t+JeRqg67Ph1t+1UaUmn8pcZKPb/VHfz/LzDV4RJfO3k1BgYdyB8Xjl1RguHRzexvEo0NftokMHxn2WIpwvFoReW43XcGp5cY1xohxPj04zwKXWkd9tRxF/qR60HqPI9jJEYnNLzRvw+BYrdtdxl1TpYIGwl73DZcVryl3kJ1MGwqWJlcyMPOuL1iNd9gfEluoMR1oIS1FWjWbBPP6ZfITxneRWr7jKCAkfyyUcPwME8hWT0/+HN+hsMuj39Q3SUizTiRn/M1w5pTSJ1ekyb2fmC2/eMAAz3JioOCk2NMr0EG5IY98QtCJ2RPq5jePNFgN7fPq3cFNWUot4Mw/L/7T9lrpinwOi0Ob+9CzrawDyDKbsLyM5cwJWBNfEU11dZmFyqS7o2cCKnfXYqeKGPee8esjcuToqH2ohUQ+aV2vcN5gJ2b/ZpyCdQTcVayjaa2UGStdN8UFlYPVKxExmdk2YXGdYLruHYCmbUCYU51X2qp9Zl5xfQDfnlHUhEw2rP+s18294m4eAUrp1UriP54aTNrQ8/yKF7FEwA445Q6DpU59Qo8uHrC+jeuyQgo3mZnh+qrb4rWhKtfsDmqrydKJalVd6E2R/RImesPR+uV9LrYc+mTvVb5gHPGtey7Ab1HFL/DYr3P5oLiKFUmM1aw4hldO2ylm/v2NqU00PVjtc/lqJPI4Ey/Qk+T7D+9FgoQkoEnhOzcAnFjuM0EYkg0XrNcufZ4ZXR+XmfeUuDSRxYZ4Ce6ILg0YPpGJwvT36lNXQg+4Ggt6ExkepI1lall1xRyz3LQpRFd6bS88pgfSN2GdeLo/YIcZk2XJmmOlrfvZrhHG3fa0kfQAt6GrZTDoRdKOYROFMDts8O/WOeiwBTneoXBQCxu9zZMjoKM+I0/0gkfInZaj7nMRve/q1tD6m5/rjznYepSDLuXYPZISfQ4zPWUPJrxVGxY7ADTo8uuIRH0VVAV13Cnw+H5+uB4jd0r/OqKbcL7FE7I5R9ykNXhAbR1JV34vX93DxgZHZhVslPPz+pGy30ymuAvemn1voLlLqcMi4e3Q3DWD+re0+K7H25jBiYkQOp+7SSlFrkmG6mPZD3uD+Avhm3VWMGedn1ulwGJDzGyYPZoOw9/N/NH1miiiMiO4XMYlw7ylhZpM9rC39jqSo71hNqrKr0N4QXbplAyDCEe9invkNKiPBXgixq0fJR00UrevnIbima2l2bKPhB9E6F7wkj3EU96+v2a+FRoKooEqfd/umsPODeAPW7KN28PxYUp/dwF60VspmEAW65YP4N5haeFvMNFkyTssAv6UTy4rTQTjvkp/altp+9IirHqUcJeGIAvq1YRcQ5UnjDiUL7XK1FPdUjJaP3wGtZC/nq/Q+opTJHuutiDfVDNMxR2IUg+cDBGBgzWNHYMVYpB+SzH5VLU3/tPMA9fnnlfkNJQXyjN+1hWZJCHadC0HOF2w3L5IHCiyeWCp4Wdx9DpP/BkvXTyEIKzSu4oGlnEh5njRu6lcCgjSNqa7jMMI2vAKYfZr9cEDX+yVZa/MbXFAjF8CJcQQ2LEDbU6AeBxYtHOKPTTuDXXGn0HZh5gVM8ZHppVU4mH7cjXcBbSC/DfLDeHdr85IOV6dcAvLywWt7aSSS6z6AvkasMZFHIyceQBBX2dCaFKz1XXxvcsExPNB0C1snToPfHmY/ueVfOGNe2ZGjGfgOYH0H2g79Ppx9H0NLlUKBQXxH2xQsw2XAGC1FfizkDL319uT4Yr2lsnbel8TojK4lzAVmwixJfaCfndsi749zx/GAwyCGn1ZEW4BNjzlO7vbAuyMoyM5DZTkXTocDITdUcmf1j7O83wyJX+YcxcYAFVtGB364Va6oXpTscNAbKiBS5vQL74N2c0SGlcgRuKTi0vCoymtoXadSmk1z6SKu3JYEhrAtkhaPgQloIBBIa40+pPUMCs8RGcSIvQJNCueJk4GyFab7V5Pya3NgnyX/Duby1y2inhR521MnjAjX63gS7+13B5Ol/FKRLGZ9gXT1HyEEpoGS0Dfn41QTtWH+QDt0feZoaKFwN5zIspmSmzvn0d4gzXdRUyQlChCodKiNp2+lgn+LQpyNqTK36h4+ZfSWYPStwCMgy2f5vdz3VEdpBQLFMqUhZA5lj48LRsN5CKUF1V6mOs9z8OS8LyR3N4cM5exyV6C5Xp9+bYtWkP9jLesowoM8V4PT3Bz8Nphb2uQFSjUinqElJKPpfBLeX/azhhukbvuZOaTK87GIff7rxXVbUzLqjKywrbvVlq4uQZFg76+8pP3Vg0KOzAhrmBL5pm4PiBhNLmaY3MR3zgQD43nDD/GquWFaW9km6/NT7mfh66eRIlKTdF9dX9dHkdNz3kA2amPotfIOqZVBVWbZzEtdtToGd67yAuzXRNQG9uTg4B3BEyqIn6K+8lZyEmyvL7gLxQeky/19jWcOCte+O7fhf9xz/eNKQ34LMfMBIEHxJjhmGyRCb622SX+hHuwTktdkeQZt8dlDB+aJPzhwRNgrzrEgofmhHEzIfdwxfPlkI1s61L7RdPMTWbS7ycz3J8HSNmVqpJNdAZtwkcqNEB8UmpORnDXcDm2uqdE7rPxCZYnOikCaYNV+X6NoImK5nDiwXTUojbmhbXdT5wNY8KHgtJP+MmXnF6+y8xJaHTLAYkeCNTlzNRiajUATyNG42HrVSYpwpA6lwkDnPO+/EItVyiJc7I/7PdRKOFs7fa1lRe2RnWKz6OBj+zzyLM1lHMa8YSpjvmJkisY+y7uG0M3P5tjLJmJADkgJINfLdFy3jXSFCIfthHnPskoUaKZgXNDvyarLawT17JNJnJ+69uJRjsR5qI09Wdkmje7rIiMGmKscAqdtCZ5RvTi/WSJzPyHCpck17dmfpSXCRF3hfHa4UpQbDTcoNvcTDYsHoRZ8ux8VoBAqeMaDS0HtwvIfb1vn36OW7GRFYgK2gOGrvEas7J8zWMXBMwW0VmYi/x+NLRPUFoitvzKTI7nAE2I9dhFufOdBGoU1hJ9fpmQ6DguROmgCKqj8JqNXj4XEngbqeTQpsnzYOBCeA+KaWFlJN+zXbNHadllpP/a79SbJ8O0t9N4idr97UWJfPdJEUEHcIkw30XotzcrEvpL+D/2jaKnVctyp/NMxIpXXq8BXi30W+M54KTI6I3WHigX/M/cFikLvT3/gnu4Bo6avMQ+oiTy1vEfHf2qcGkhh33L3lwbJ46RPNzakCsEKS9jyJdVFGKAESNdALVNZOq1vkFkpICEkSOrPLx6dJKm4ajoauV/+LHpfEkFm7NFjgDVELcPv08j11pD73+C6IOX4B9HJy/hMgiD9KRve7ZPqVeSFaKgp1cqPuPrlQvn69Bc7mW4eQHyFzo5hOhWXmp0Reu11tBeado/5Ic9kUX/rF3dcsrAraab92IDDYxQxzAzfONGytCMatVekZEcNHNMH41iRF7uFTSu5w+he9j1ApH363Socdn/6vrbGR+Vy4FQnJ61c75NthMZkfI+zGK9zvsFiHdzyvv5CcsSH0p6zDE6SOqIip05Bf69I+uFzQRltocFBQ1b9mcQtzCbHxk8Vmz99ZwiXWTI7Pz9R+y4oft/603WjbTUxvSi0x49XE5nXrqGgOti26CjGSIazJ2IK+irOvGjM9BLY2wQhmAScV6pOdSp5xAvDetJHe7X3oXm7jURjPrkDY1DJaVJ/XCokw4jR7nokwGgOx56O9bkqgJ8I+Ih7Cv0jJE+PycGVIs6/oRt32RQsfmdIPwZdwysV88IXfCPrD/OyXBa9qoXIyhyIg0Qig2nFxyk33mnCypqFOv1/mf14WMJeVunD9XRhFn750VzfajeAhZF74ENeROt5dhPh0D8IF+YE0QQzxiKs8Jl7wqrGBIjCSpW4jTa7LWpF4k9mFeqNwsobC0oJXReVe8wQq6ZwiBwW6AV4cq5mxENoShjHCnO1vrI98e16K4PyD3URVku6R9uE5wPZqDLFnDjmL3qInSYV+1Mzef9y6yrD7EBtq9LOEQEfEnjIyhxT8/ysBdDHm94j35X5oOT7KPJik5HEt13fvQ5Lq0URHL2O547vJhKaZtQWj45q1fVof4osuLS0RLpt0wU2R/ZPO4korXHMuHaFt/TrFlNaLCwIojSUUi3cbYwpGm8iUNjISIn6iEbBSl9WGUPulaO69OuTgpuwwk2cOvad/pHlMdcTMg2dkGozofAyfAVBL0rX6USb+GB1oYu1zEBMJlpPtcB7x44Tsfxpv7VcpZL7LAaGftBmu8td1RGnQLF812m+X5RYMHzJBq8wI2rFSuec6VOb6zFGwkDPvQC94BPwhsO6bcoN3jnnNpy763geHN4mYQyCkqgt+59HNMqbPKGPK79YYiAd262HsVi073Zk4mYFKy8YCq/Q4oNOX75rcLlNAoAWEki6mGbRE1AD+tlFxJavh4aKMVaUvxA7J0Z/lo4Yie8t5TAtIkt8FIlEqJ4B9+RMrRZYSLRxTwWLg1Qt3U8v1xO1wu2INQF/afdwur2h4ZYNLIIZ6fXZQs/k5pGrEec4ap/sWlenvdwjfp5b+p3ztWOawRZD/KJMD4Gq+nYxBsSLCF9Td3nBxjwUC3rzSNI6yfW3/5SIBEBXaf8d5luegQ+srwONw8GnBm1LHcPLJLPfI7iBzZDKm9ulpVLnhrlM3BJCPY633j0c4/WVh1zYI9goy9Fppv3vEWdEtg5o3RGY0ysDRCYs2i+oW9s3NtYF+0wkdZxcb20UakmIkq6hnt5Uof34Rw3WoqGqWIU/PQCJ7rs5AMPyYk+4+fimUlBGSUzv6VX1MwhYv/6x4P0bi7G025szpgQBvj8XTutmVPbjv0qngvT \ No newline at end of file diff --git a/gcc_exec/tests/test_gcc_drbx.py b/gcc_exec/tests/test_gcc_drbx.py new file mode 100644 index 0000000..72fe2b5 --- /dev/null +++ b/gcc_exec/tests/test_gcc_drbx.py @@ -0,0 +1,64 @@ +"""This file contains the TestGccDrbx class.""" + +import os +from os.path import dirname, join + +from dotenv import load_dotenv +from dropbox.files import CreateFolderResult, DeleteResult, FileMetadata, FolderMetadata + +from gcc_exec.gcc_drbx import GccDrbx + + +class TestGccDrbx: + """This class contains methods to test the GccDrbx class.""" + + env_path = join(dirname(__file__), ".env") + + if os.path.isfile(env_path): + load_dotenv() + + __gcc_drbx_obj = GccDrbx( + oauth2_refresh_token=os.environ.get("OAUTH2_REFRESH_TOKEN") + ) + __drbx_folder_path = "/.test_gcc_drbx" + __drbx_file_path = f"{__drbx_folder_path}/test_gcc_drbx.txt" + + def test_create_folder(self): + """This method ensures folders are created properly.""" + response = self.__gcc_drbx_obj.create_folder(self.__drbx_folder_path) + + assert isinstance(response, CreateFolderResult) + assert isinstance(response.metadata, FolderMetadata) + assert response.metadata.path_lower == self.__drbx_folder_path + + def test_upload_file(self): + """This method ensures files are uploaded properly.""" + local_file_path = join(dirname(__file__), "data/upload/test_gcc_drbx.txt") + + response = self.__gcc_drbx_obj.upload_file( + local_file_path, self.__drbx_file_path + ) + + assert isinstance(response, FileMetadata) + assert response.path_lower == self.__drbx_file_path + + def test_get_file_contents(self): + """This method ensures file contents are retrieved properly.""" + response = self.__gcc_drbx_obj.get_file_contents(self.__drbx_file_path) + + assert isinstance(response, str) + assert len(response.splitlines()) > 0 + + def test_get_file_link(self): + """This method ensures file links are retrieved properly.""" + response = self.__gcc_drbx_obj.get_file_link(self.__drbx_file_path) + + assert isinstance(response, str) + assert "https://" in response and "dl.dropboxusercontent.com" in response + + def test_delete(self): + """This method ensures objects are deleted properly.""" + response = self.__gcc_drbx_obj.delete(self.__drbx_folder_path) + + assert isinstance(response, DeleteResult) + assert response.metadata.path_lower == self.__drbx_folder_path diff --git a/gcc_exec/tests/test_gcc_ec2.py b/gcc_exec/tests/test_gcc_ec2.py new file mode 100644 index 0000000..c9eedaa --- /dev/null +++ b/gcc_exec/tests/test_gcc_ec2.py @@ -0,0 +1,84 @@ +"""This file contains the TestGccEc2 class.""" +# pylint: disable=E0401 +import os +import time +from os.path import dirname, join + +import pytest +from botocore.exceptions import ClientError +from dotenv import load_dotenv + +from gcc_exec.gcc_ec2 import GccEc2 + + +class TestGccEc2: + """This class contains methods to test the GccEc2 class.""" + + env_path = join(dirname(__file__), ".env") + + if os.path.isfile(env_path): + load_dotenv() + + __gcc_ec2_obj = GccEc2( + aws_access_key_id=os.environ.get("AWS_ACCESS_KEY_ID"), + aws_secret_access_key=os.environ.get("AWS_SECRET_ACCESS_KEY"), + ) + + def test_create_key_pair(self): + """This method ensures key pairs are created properly.""" + response = self.__gcc_ec2_obj.create_key_pair("test_create_key_pair") + + pytest.key_pair_name = response["KeyName"] + + assert isinstance(response, dict) + assert response["KeyPairId"] is not None + + def test_create_security_group(self): + """This method ensures security groups are created properly.""" + response = self.__gcc_ec2_obj.create_security_group( + "test_create_security_group" + ) + + pytest.security_group_id = response["GroupId"] + + assert isinstance(response, dict) + assert response["GroupId"] is not None + + def test_create_instance(self): + """This method ensures instances are created properly.""" + response = self.__gcc_ec2_obj.create_instance( + pytest.key_pair_name, pytest.security_group_id + ) + + pytest.instance_id = response["Instances"][0]["InstanceId"] + + assert isinstance(response, dict) + assert response["Instances"][0]["InstanceId"] is not None + + def test_terminate_instance(self): + """This method ensures instances are terminated properly.""" + response = self.__gcc_ec2_obj.terminate_instance(pytest.instance_id) + + assert isinstance(response, dict) + assert response["TerminatingInstances"][0]["InstanceId"] == pytest.instance_id + + def test_delete_key_pair(self): + """This method ensures key pairs are deleted properly.""" + response = self.__gcc_ec2_obj.delete_key_pair(pytest.key_pair_name) + + assert isinstance(response, dict) + assert response["ResponseMetadata"] is not None + + def test_delete_security_group(self): + """This method ensures security groups are deleted properly.""" + while True: + try: + response = self.__gcc_ec2_obj.delete_security_group( + pytest.security_group_id + ) + break + except ClientError: + time.sleep(10) + + assert isinstance(response, dict) + assert response["ResponseMetadata"] is not None diff --git a/frontend/gcc_web/gcc/__init__.py b/gcc_web/__init__.py similarity index 100% rename from frontend/gcc_web/gcc/__init__.py rename to gcc_web/__init__.py diff --git a/frontend/gcc_web/gcc/migrations/__init__.py b/gcc_web/gcc/__init__.py similarity index 100% rename from frontend/gcc_web/gcc/migrations/__init__.py rename to gcc_web/gcc/__init__.py diff --git a/frontend/gcc_web/gcc/admin.py b/gcc_web/gcc/admin.py similarity index 91% rename from frontend/gcc_web/gcc/admin.py rename to gcc_web/gcc/admin.py index e160f15..d6e1350 100644 --- a/frontend/gcc_web/gcc/admin.py +++ b/gcc_web/gcc/admin.py @@ -1,7 +1,7 @@ from django import forms from django.contrib import admin -from .models import ExternalAccountCredentials, MachinePool, Machine +from .models import ExternalAccountCredentials, Machine, MachinePool class ExternalAccountCredentialsAdmin(admin.ModelAdmin): diff --git a/frontend/gcc_web/gcc/apps.py b/gcc_web/gcc/apps.py similarity index 100% rename from frontend/gcc_web/gcc/apps.py rename to gcc_web/gcc/apps.py diff --git a/frontend/gcc_web/gcc/forms.py b/gcc_web/gcc/forms.py similarity index 100% rename from frontend/gcc_web/gcc/forms.py rename to gcc_web/gcc/forms.py diff --git a/frontend/gcc_web/gcc/migrations/0001_initial.py b/gcc_web/gcc/migrations/0001_initial.py similarity index 100% rename from frontend/gcc_web/gcc/migrations/0001_initial.py rename to gcc_web/gcc/migrations/0001_initial.py diff --git a/frontend/gcc_web/gcc/migrations/0002_auto_20220103_1054.py b/gcc_web/gcc/migrations/0002_auto_20220103_1054.py similarity index 100% rename from frontend/gcc_web/gcc/migrations/0002_auto_20220103_1054.py rename to gcc_web/gcc/migrations/0002_auto_20220103_1054.py diff --git a/frontend/gcc_web/gcc/migrations/0003_machine_machinepool.py b/gcc_web/gcc/migrations/0003_machine_machinepool.py similarity index 100% rename from frontend/gcc_web/gcc/migrations/0003_machine_machinepool.py rename to gcc_web/gcc/migrations/0003_machine_machinepool.py diff --git a/frontend/gcc_web/gcc/migrations/0004_alter_machine_ip.py b/gcc_web/gcc/migrations/0004_alter_machine_ip.py similarity index 100% rename from frontend/gcc_web/gcc/migrations/0004_alter_machine_ip.py rename to gcc_web/gcc/migrations/0004_alter_machine_ip.py diff --git a/frontend/gcc_web/gcc/migrations/0005_auto_20220110_1447.py b/gcc_web/gcc/migrations/0005_auto_20220110_1447.py similarity index 100% rename from frontend/gcc_web/gcc/migrations/0005_auto_20220110_1447.py rename to gcc_web/gcc/migrations/0005_auto_20220110_1447.py diff --git a/frontend/gcc_web/gcc/migrations/0006_alter_machine_pool.py b/gcc_web/gcc/migrations/0006_alter_machine_pool.py similarity index 100% rename from frontend/gcc_web/gcc/migrations/0006_alter_machine_pool.py rename to gcc_web/gcc/migrations/0006_alter_machine_pool.py diff --git a/frontend/gcc_web/gcc/migrations/0007_machine_mac_id.py b/gcc_web/gcc/migrations/0007_machine_mac_id.py similarity index 100% rename from frontend/gcc_web/gcc/migrations/0007_machine_mac_id.py rename to gcc_web/gcc/migrations/0007_machine_mac_id.py diff --git a/frontend/gcc_web/gcc/migrations/0008_auto_20220111_1131.py b/gcc_web/gcc/migrations/0008_auto_20220111_1131.py similarity index 100% rename from frontend/gcc_web/gcc/migrations/0008_auto_20220111_1131.py rename to gcc_web/gcc/migrations/0008_auto_20220111_1131.py diff --git a/frontend/gcc_web/gcc/migrations/0009_machine_status.py b/gcc_web/gcc/migrations/0009_machine_status.py similarity index 100% rename from frontend/gcc_web/gcc/migrations/0009_machine_status.py rename to gcc_web/gcc/migrations/0009_machine_status.py diff --git a/frontend/gcc_web/gcc_web/__init__.py b/gcc_web/gcc/migrations/__init__.py similarity index 100% rename from frontend/gcc_web/gcc_web/__init__.py rename to gcc_web/gcc/migrations/__init__.py diff --git a/frontend/gcc_web/gcc/models.py b/gcc_web/gcc/models.py similarity index 100% rename from frontend/gcc_web/gcc/models.py rename to gcc_web/gcc/models.py diff --git a/frontend/gcc_web/gcc/templates/gcc/index.html b/gcc_web/gcc/templates/gcc/index.html similarity index 100% rename from frontend/gcc_web/gcc/templates/gcc/index.html rename to gcc_web/gcc/templates/gcc/index.html diff --git a/frontend/gcc_web/gcc/templates/gcc/machine_pool.html b/gcc_web/gcc/templates/gcc/machine_pool.html similarity index 100% rename from frontend/gcc_web/gcc/templates/gcc/machine_pool.html rename to gcc_web/gcc/templates/gcc/machine_pool.html diff --git a/frontend/gcc_web/gcc/templates/gcc/signup.html b/gcc_web/gcc/templates/gcc/signup.html similarity index 100% rename from frontend/gcc_web/gcc/templates/gcc/signup.html rename to gcc_web/gcc/templates/gcc/signup.html diff --git a/frontend/gcc_web/gcc/templates/gcc/signup_error.html b/gcc_web/gcc/templates/gcc/signup_error.html similarity index 100% rename from frontend/gcc_web/gcc/templates/gcc/signup_error.html rename to gcc_web/gcc/templates/gcc/signup_error.html diff --git a/frontend/gcc_web/gcc/templates/gcc/user_credentials.html b/gcc_web/gcc/templates/gcc/user_credentials.html similarity index 100% rename from frontend/gcc_web/gcc/templates/gcc/user_credentials.html rename to gcc_web/gcc/templates/gcc/user_credentials.html diff --git a/frontend/gcc_web/gcc/templates/gcc/user_home.html b/gcc_web/gcc/templates/gcc/user_home.html similarity index 100% rename from frontend/gcc_web/gcc/templates/gcc/user_home.html rename to gcc_web/gcc/templates/gcc/user_home.html diff --git a/frontend/gcc_web/gcc/templates/gcc/user_workflows.html b/gcc_web/gcc/templates/gcc/user_workflows.html similarity index 100% rename from frontend/gcc_web/gcc/templates/gcc/user_workflows.html rename to gcc_web/gcc/templates/gcc/user_workflows.html diff --git a/frontend/gcc_web/gcc/tests.py b/gcc_web/gcc/tests.py similarity index 100% rename from frontend/gcc_web/gcc/tests.py rename to gcc_web/gcc/tests.py diff --git a/frontend/gcc_web/gcc/urls.py b/gcc_web/gcc/urls.py similarity index 100% rename from frontend/gcc_web/gcc/urls.py rename to gcc_web/gcc/urls.py diff --git a/frontend/gcc_web/gcc/views.py b/gcc_web/gcc/views.py similarity index 94% rename from frontend/gcc_web/gcc/views.py rename to gcc_web/gcc/views.py index d24e259..38734de 100644 --- a/frontend/gcc_web/gcc/views.py +++ b/gcc_web/gcc/views.py @@ -16,9 +16,11 @@ from django.template import loader from dropbox import DropboxOAuth2Flow from dropbox.exceptions import AuthError +from forms import CredentialForm, MachineForm, SignInForm, SignUpForm +from models import ExternalAccountCredentials, Machine, MachinePool -from .forms import SignInForm, SignUpForm, CredentialForm, MachineForm -from .models import ExternalAccountCredentials, MachinePool, Machine +from gcc_exec.gcc_user import GccUser +from gcc_exec.gcc_workflow import GccWorkflow def index(request): @@ -28,9 +30,9 @@ def index(request): if form.is_valid(): entered_usr = request.POST.get("user_name") entered_pwd = request.POST.get("user_password") - user = authenticate(username=entered_usr, password=entered_pwd) - if user is not None: - login(request, user) + usr = authenticate(username=entered_usr, password=entered_pwd) + if usr is not None: + login(request, usr) return HttpResponseRedirect("/user-home") else: return HttpResponseRedirect("/") @@ -51,15 +53,15 @@ def signup(request): pwd = request.POST.get("_user_password_") email = request.POST.get("user_email") try: - user = User.objects.create_user(username=usr, password=pwd, email=email) - login(request, user) + usr = User.objects.create_user(username=usr, password=pwd, email=email) + login(request, usr) mp = MachinePool() - mp.user = user + mp.user = usr mp.save() ac = ExternalAccountCredentials() - ac.user = user + ac.user = usr ac.aws_access_key = "" ac.aws_secret_access_key = "" ac.drbx_refresh_token = "" @@ -90,15 +92,15 @@ def signup_error(request): pwd = request.POST.get("_user_password_") email = request.POST.get("user_email") try: - user = User.objects.create_user(username=usr, password=pwd, email=email) - login(request, user) + usr = User.objects.create_user(username=usr, password=pwd, email=email) + login(request, usr) mp = MachinePool - mp.user = user + mp.user = usr mp.save() ac = ExternalAccountCredentials() - ac.user = user + ac.user = usr ac.aws_access_key = "" ac.aws_secret_access_key = "" ac.drbx_refresh_token = "" @@ -302,12 +304,7 @@ def _execute_workflow_(request, workflow_name: str): 0 ].drbx_refresh_token - from .exec import User, Workflow - - temp_dir = gen_string() - os.mkdir(f"{os.getcwd()}/tmp/{temp_dir}") - - user = User.User( + gcc_user_obj = GccUser( drbx_refresh_token, os.environ.get("DRBX_APP_KEY"), os.environ.get("DRBX_APP_SECRET"), @@ -315,16 +312,17 @@ def _execute_workflow_(request, workflow_name: str): aws_secret_access_key, ) - workflow = Workflow.Workflow(user, workflow_name, temp_dir) - plan, used_machines = workflow.plan(available_machines) + gcc_workflow_obj = GccWorkflow(gcc_user_obj, workflow_name) + + used_machines = gcc_workflow_obj.plan(available_machines) for machine in used_machines: machine_pool.machines.filter(id=machine.id).update(status="In use") - workflow.initialize() - workflow.configure(plan) - profile_memory(workflow.execute, plan, workflow) - workflow.complete() + gcc_workflow_obj.initialize() + gcc_workflow_obj.configure() + gcc_workflow_obj.execute() + gcc_workflow_obj.complete() for machine in used_machines: machine_pool.machines.filter(id=machine.id).update(status="Available") diff --git a/gcc_web/gcc_web/__init__.py b/gcc_web/gcc_web/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/frontend/gcc_web/gcc_web/asgi.py b/gcc_web/gcc_web/asgi.py similarity index 100% rename from frontend/gcc_web/gcc_web/asgi.py rename to gcc_web/gcc_web/asgi.py diff --git a/frontend/gcc_web/gcc_web/settings.py b/gcc_web/gcc_web/settings.py similarity index 100% rename from frontend/gcc_web/gcc_web/settings.py rename to gcc_web/gcc_web/settings.py diff --git a/frontend/gcc_web/gcc_web/urls.py b/gcc_web/gcc_web/urls.py similarity index 100% rename from frontend/gcc_web/gcc_web/urls.py rename to gcc_web/gcc_web/urls.py diff --git a/frontend/gcc_web/gcc_web/wsgi.py b/gcc_web/gcc_web/wsgi.py similarity index 100% rename from frontend/gcc_web/gcc_web/wsgi.py rename to gcc_web/gcc_web/wsgi.py diff --git a/frontend/gcc_web/manage.py b/gcc_web/manage.py similarity index 100% rename from frontend/gcc_web/manage.py rename to gcc_web/manage.py diff --git a/poetry.lock b/poetry.lock index 6435301..4c0b1dd 100644 --- a/poetry.lock +++ b/poetry.lock @@ -82,14 +82,14 @@ uvloop = ["uvloop (>=0.15.2)"] [[package]] name = "boto3" -version = "1.21.30" +version = "1.21.35" description = "The AWS SDK for Python" category = "main" optional = false python-versions = ">= 3.6" [package.dependencies] -botocore = ">=1.24.30,<1.25.0" +botocore = ">=1.24.35,<1.25.0" jmespath = ">=0.7.1,<2.0.0" s3transfer = ">=0.5.0,<0.6.0" @@ -98,7 +98,7 @@ crt = ["botocore[crt] (>=1.21.0,<2.0a0)"] [[package]] name = "botocore" -version = "1.24.30" +version = "1.24.35" description = "Low-level, data-driven core of boto 3." category = "main" optional = false @@ -110,7 +110,7 @@ python-dateutil = ">=2.1,<3.0.0" urllib3 = ">=1.25.4,<1.27" [package.extras] -crt = ["awscrt (==0.13.5)"] +crt = ["awscrt (==0.13.8)"] [[package]] name = "certifi" @@ -144,7 +144,7 @@ unicode_backport = ["unicodedata2"] [[package]] name = "click" -version = "8.1.1" +version = "8.1.2" description = "Composable command line interface toolkit" category = "dev" optional = false @@ -161,6 +161,20 @@ category = "dev" optional = false python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*" +[[package]] +name = "coverage" +version = "6.3.2" +description = "Code coverage measurement for Python" +category = "dev" +optional = false +python-versions = ">=3.7" + +[package.dependencies] +tomli = {version = "*", optional = true, markers = "extra == \"toml\""} + +[package.extras] +toml = ["tomli"] + [[package]] name = "cryptography" version = "36.0.2" @@ -209,16 +223,28 @@ argon2 = ["argon2-cffi (>=19.1.0)"] bcrypt = ["bcrypt"] [[package]] -name = "django-fernet-fields" -version = "0.6" -description = "Fernet-encrypted model fields for Django" +name = "django-appconf" +version = "1.0.5" +description = "A helper class for handling configuration defaults of packaged apps gracefully." category = "main" optional = false -python-versions = "*" +python-versions = ">=3.6" [package.dependencies] -cryptography = ">=0.9" -Django = ">=1.11" +django = "*" + +[[package]] +name = "django-cryptography" +version = "1.1" +description = "Easily encrypt data in Django" +category = "main" +optional = false +python-versions = ">=3.6" + +[package.dependencies] +cryptography = "*" +Django = "*" +django-appconf = "*" [[package]] name = "django-recaptcha" @@ -241,7 +267,7 @@ python-versions = ">=3.7" [[package]] name = "dropbox" -version = "11.28.0" +version = "11.29.0" description = "Official Dropbox API Client" category = "main" optional = false @@ -459,7 +485,7 @@ python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*" [[package]] name = "pylint" -version = "2.13.3" +version = "2.13.4" description = "python code static checker" category = "dev" optional = false @@ -524,6 +550,38 @@ tomli = ">=1.0.0" [package.extras] testing = ["argcomplete", "hypothesis (>=3.56)", "mock", "nose", "pygments (>=2.7.2)", "requests", "xmlschema"] +[[package]] +name = "pytest-codecov" +version = "0.4.0" +description = "Pytest plugin for uploading pytest-cov results to codecov.io" +category = "dev" +optional = false +python-versions = ">=3.5" + +[package.dependencies] +coverage = {version = ">=5.2.1", extras = ["toml"]} +pytest = ">=4.6.0" +pytest-cov = ">=2.11.0" +requests = ">=2.25.1" + +[package.extras] +git = ["GitPython (>=3.1.15)"] + +[[package]] +name = "pytest-cov" +version = "3.0.0" +description = "Pytest plugin for measuring coverage." +category = "dev" +optional = false +python-versions = ">=3.6" + +[package.dependencies] +coverage = {version = ">=5.2.1", extras = ["toml"]} +pytest = ">=4.6" + +[package.extras] +testing = ["fields", "hunter", "process-tests", "six", "pytest-xdist", "virtualenv"] + [[package]] name = "python-dateutil" version = "2.8.2" @@ -668,7 +726,7 @@ python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*" [metadata] lock-version = "1.1" python-versions = "^3.10" -content-hash = "e1fd4f62f8bd84b1dfe078dcebfa4cf45a2706bf378062e87eaf6a90a92e70e3" +content-hash = "576e0b6751882ff2c9ea91241ee2d96593ff90dc7bd8e303514adebf089f82a6" [metadata.files] asgiref = [ @@ -725,12 +783,12 @@ black = [ {file = "black-22.3.0.tar.gz", hash = "sha256:35020b8886c022ced9282b51b5a875b6d1ab0c387b31a065b84db7c33085ca79"}, ] boto3 = [ - {file = "boto3-1.21.30-py3-none-any.whl", hash = "sha256:ef210f8e85cdb6d26a38ebad1cfe9cefdef2ab269207e5987653555375a7ef6b"}, - {file = "boto3-1.21.30.tar.gz", hash = "sha256:f0af8f4ef5fe6353c794cd3cce627d469a618b58ace7ca75a63cfd719df615ce"}, + {file = "boto3-1.21.35-py3-none-any.whl", hash = "sha256:4fb810755bca4696effbeb0c6f792295795fae52e895638038dff53965f3f423"}, + {file = "boto3-1.21.35.tar.gz", hash = "sha256:ab6e001ba9de1db986634424abff6c79d938c15d0d2fa3ef95eb0939c120b4f6"}, ] botocore = [ - {file = "botocore-1.24.30-py3-none-any.whl", hash = "sha256:c622751093e3d0bf61343e66d6d06190ef30bf42b1557d5070ca84e9efa06d4b"}, - {file = "botocore-1.24.30.tar.gz", hash = "sha256:af4bdc51eeecbe9fdcdadbed9ad58c5c91380ef30f3560022bbc2ee1d78f0ad6"}, + {file = "botocore-1.24.35-py3-none-any.whl", hash = "sha256:734aa598af5d6bc0351e6ecce4a91b0b6ccf245febfd8d4de8425211aada5f36"}, + {file = "botocore-1.24.35.tar.gz", hash = "sha256:36b5422d8f0c312983582b8b4b056c98e1fd6121cb0b2ddb1f67e882e1ae6867"}, ] certifi = [ {file = "certifi-2021.10.8-py2.py3-none-any.whl", hash = "sha256:d62a0163eb4c2344ac042ab2bdf75399a71a2d8c7d47eac2e2ee91b9d6339569"}, @@ -793,13 +851,56 @@ charset-normalizer = [ {file = "charset_normalizer-2.0.12-py3-none-any.whl", hash = "sha256:6881edbebdb17b39b4eaaa821b438bf6eddffb4468cf344f09f89def34a8b1df"}, ] click = [ - {file = "click-8.1.1-py3-none-any.whl", hash = "sha256:5e0d195c2067da3136efb897449ec1e9e6c98282fbf30d7f9e164af9be901a6b"}, - {file = "click-8.1.1.tar.gz", hash = "sha256:7ab900e38149c9872376e8f9b5986ddcaf68c0f413cf73678a0bca5547e6f976"}, + {file = "click-8.1.2-py3-none-any.whl", hash = "sha256:24e1a4a9ec5bf6299411369b208c1df2188d9eb8d916302fe6bf03faed227f1e"}, + {file = "click-8.1.2.tar.gz", hash = "sha256:479707fe14d9ec9a0757618b7a100a0ae4c4e236fac5b7f80ca68028141a1a72"}, ] colorama = [ {file = "colorama-0.4.4-py2.py3-none-any.whl", hash = "sha256:9f47eda37229f68eee03b24b9748937c7dc3868f906e8ba69fbcbdd3bc5dc3e2"}, {file = "colorama-0.4.4.tar.gz", hash = "sha256:5941b2b48a20143d2267e95b1c2a7603ce057ee39fd88e7329b0c292aa16869b"}, ] +coverage = [ + {file = "coverage-6.3.2-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:9b27d894748475fa858f9597c0ee1d4829f44683f3813633aaf94b19cb5453cf"}, + {file = "coverage-6.3.2-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:37d1141ad6b2466a7b53a22e08fe76994c2d35a5b6b469590424a9953155afac"}, + {file = "coverage-6.3.2-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f9987b0354b06d4df0f4d3e0ec1ae76d7ce7cbca9a2f98c25041eb79eec766f1"}, + {file = "coverage-6.3.2-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:26e2deacd414fc2f97dd9f7676ee3eaecd299ca751412d89f40bc01557a6b1b4"}, + {file = "coverage-6.3.2-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:4dd8bafa458b5c7d061540f1ee9f18025a68e2d8471b3e858a9dad47c8d41903"}, + {file = "coverage-6.3.2-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:46191097ebc381fbf89bdce207a6c107ac4ec0890d8d20f3360345ff5976155c"}, + {file = "coverage-6.3.2-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:6f89d05e028d274ce4fa1a86887b071ae1755082ef94a6740238cd7a8178804f"}, + {file = "coverage-6.3.2-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:58303469e9a272b4abdb9e302a780072c0633cdcc0165db7eec0f9e32f901e05"}, + {file = "coverage-6.3.2-cp310-cp310-win32.whl", hash = "sha256:2fea046bfb455510e05be95e879f0e768d45c10c11509e20e06d8fcaa31d9e39"}, + {file = "coverage-6.3.2-cp310-cp310-win_amd64.whl", hash = "sha256:a2a8b8bcc399edb4347a5ca8b9b87e7524c0967b335fbb08a83c8421489ddee1"}, + {file = "coverage-6.3.2-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:f1555ea6d6da108e1999b2463ea1003fe03f29213e459145e70edbaf3e004aaa"}, + {file = "coverage-6.3.2-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e5f4e1edcf57ce94e5475fe09e5afa3e3145081318e5fd1a43a6b4539a97e518"}, + {file = "coverage-6.3.2-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:7a15dc0a14008f1da3d1ebd44bdda3e357dbabdf5a0b5034d38fcde0b5c234b7"}, + {file = "coverage-6.3.2-cp37-cp37m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:21b7745788866028adeb1e0eca3bf1101109e2dc58456cb49d2d9b99a8c516e6"}, + {file = "coverage-6.3.2-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:8ce257cac556cb03be4a248d92ed36904a59a4a5ff55a994e92214cde15c5bad"}, + {file = "coverage-6.3.2-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:b0be84e5a6209858a1d3e8d1806c46214e867ce1b0fd32e4ea03f4bd8b2e3359"}, + {file = "coverage-6.3.2-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:acf53bc2cf7282ab9b8ba346746afe703474004d9e566ad164c91a7a59f188a4"}, + {file = "coverage-6.3.2-cp37-cp37m-win32.whl", hash = "sha256:8bdde1177f2311ee552f47ae6e5aa7750c0e3291ca6b75f71f7ffe1f1dab3dca"}, + {file = "coverage-6.3.2-cp37-cp37m-win_amd64.whl", hash = "sha256:b31651d018b23ec463e95cf10070d0b2c548aa950a03d0b559eaa11c7e5a6fa3"}, + {file = "coverage-6.3.2-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:07e6db90cd9686c767dcc593dff16c8c09f9814f5e9c51034066cad3373b914d"}, + {file = "coverage-6.3.2-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:2c6dbb42f3ad25760010c45191e9757e7dce981cbfb90e42feef301d71540059"}, + {file = "coverage-6.3.2-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c76aeef1b95aff3905fb2ae2d96e319caca5b76fa41d3470b19d4e4a3a313512"}, + {file = "coverage-6.3.2-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:8cf5cfcb1521dc3255d845d9dca3ff204b3229401994ef8d1984b32746bb45ca"}, + {file = "coverage-6.3.2-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8fbbdc8d55990eac1b0919ca69eb5a988a802b854488c34b8f37f3e2025fa90d"}, + {file = "coverage-6.3.2-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:ec6bc7fe73a938933d4178c9b23c4e0568e43e220aef9472c4f6044bfc6dd0f0"}, + {file = "coverage-6.3.2-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:9baff2a45ae1f17c8078452e9e5962e518eab705e50a0aa8083733ea7d45f3a6"}, + {file = "coverage-6.3.2-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:fd9e830e9d8d89b20ab1e5af09b32d33e1a08ef4c4e14411e559556fd788e6b2"}, + {file = "coverage-6.3.2-cp38-cp38-win32.whl", hash = "sha256:f7331dbf301b7289013175087636bbaf5b2405e57259dd2c42fdcc9fcc47325e"}, + {file = "coverage-6.3.2-cp38-cp38-win_amd64.whl", hash = "sha256:68353fe7cdf91f109fc7d474461b46e7f1f14e533e911a2a2cbb8b0fc8613cf1"}, + {file = "coverage-6.3.2-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:b78e5afb39941572209f71866aa0b206c12f0109835aa0d601e41552f9b3e620"}, + {file = "coverage-6.3.2-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:4e21876082ed887baed0146fe222f861b5815455ada3b33b890f4105d806128d"}, + {file = "coverage-6.3.2-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:34626a7eee2a3da12af0507780bb51eb52dca0e1751fd1471d0810539cefb536"}, + {file = "coverage-6.3.2-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:1ebf730d2381158ecf3dfd4453fbca0613e16eaa547b4170e2450c9707665ce7"}, + {file = "coverage-6.3.2-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:dd6fe30bd519694b356cbfcaca9bd5c1737cddd20778c6a581ae20dc8c04def2"}, + {file = "coverage-6.3.2-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:96f8a1cb43ca1422f36492bebe63312d396491a9165ed3b9231e778d43a7fca4"}, + {file = "coverage-6.3.2-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:dd035edafefee4d573140a76fdc785dc38829fe5a455c4bb12bac8c20cfc3d69"}, + {file = "coverage-6.3.2-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:5ca5aeb4344b30d0bec47481536b8ba1181d50dbe783b0e4ad03c95dc1296684"}, + {file = "coverage-6.3.2-cp39-cp39-win32.whl", hash = "sha256:f5fa5803f47e095d7ad8443d28b01d48c0359484fec1b9d8606d0e3282084bc4"}, + {file = "coverage-6.3.2-cp39-cp39-win_amd64.whl", hash = "sha256:9548f10d8be799551eb3a9c74bbf2b4934ddb330e08a73320123c07f95cc2d92"}, + {file = "coverage-6.3.2-pp36.pp37.pp38-none-any.whl", hash = "sha256:18d520c6860515a771708937d2f78f63cc47ab3b80cb78e86573b0a760161faf"}, + {file = "coverage-6.3.2.tar.gz", hash = "sha256:03e2a7826086b91ef345ff18742ee9fc47a6839ccd517061ef8fa1976e652ce9"}, +] cryptography = [ {file = "cryptography-36.0.2-cp36-abi3-macosx_10_10_universal2.whl", hash = "sha256:4e2dddd38a5ba733be6a025a1475a9f45e4e41139d1321f412c6b360b19070b6"}, {file = "cryptography-36.0.2-cp36-abi3-macosx_10_10_x86_64.whl", hash = "sha256:4881d09298cd0b669bb15b9cfe6166f16fc1277b4ed0d04a22f3d6430cb30f1d"}, @@ -830,8 +931,12 @@ django = [ {file = "Django-4.0.3-py3-none-any.whl", hash = "sha256:1239218849e922033a35d2a2f777cb8bee18bd725416744074f455f34ff50d0c"}, {file = "Django-4.0.3.tar.gz", hash = "sha256:77ff2e7050e3324c9b67e29b6707754566f58514112a9ac73310f60cd5261930"}, ] -django-fernet-fields = [ - {file = "django-fernet-fields-0.6.tar.gz", hash = "sha256:7f7e03c86d9473a42031ebade2b15be1484aad18ef5576ddab156c4667e04c4a"}, +django-appconf = [ + {file = "django-appconf-1.0.5.tar.gz", hash = "sha256:be3db0be6c81fa84742000b89a81c016d70ae66a7ccb620cdef592b1f1a6aaa4"}, + {file = "django_appconf-1.0.5-py3-none-any.whl", hash = "sha256:ae9f864ee1958c815a965ed63b3fba4874eec13de10236ba063a788f9a17389d"}, +] +django-cryptography = [ + {file = "django_cryptography-1.1-py2.py3-none-any.whl", hash = "sha256:93702fcf0d75865d55362f20ecd95274c4eef60ccdce46cbdade0420acee07cb"}, ] django-recaptcha = [ {file = "django-recaptcha-3.0.0.tar.gz", hash = "sha256:253197051288923cae675d7eff91b619e3775311292a5dbaf27a8a55ffebc670"}, @@ -842,9 +947,9 @@ django-widget-tweaks = [ {file = "django_widget_tweaks-1.4.12-py3-none-any.whl", hash = "sha256:fe6b17d5d595c63331f300917980db2afcf71f240ab9341b954aea8f45d25b9a"}, ] dropbox = [ - {file = "dropbox-11.28.0-py2-none-any.whl", hash = "sha256:7d03ae41cb855ee211e6beb395649bf883aeba65aac7c589912de3b65409bafb"}, - {file = "dropbox-11.28.0-py3-none-any.whl", hash = "sha256:51359fda86d9c2c56efd825ad738e98f7fa6d783de2637fbc44094ece965b79c"}, - {file = "dropbox-11.28.0.tar.gz", hash = "sha256:1c9983acad41c738394235276c0b3898d02f8c10f342f64a4c0d0885ad716929"}, + {file = "dropbox-11.29.0-py2-none-any.whl", hash = "sha256:2200ad5f42e00ae00d45db4a050fa199fe701ddc979fd1396d2c3e8912476c60"}, + {file = "dropbox-11.29.0-py3-none-any.whl", hash = "sha256:bf81a822e662bd337f4cd33fe39580c0b6ee4781d018ef1b31dcef2f402986f2"}, + {file = "dropbox-11.29.0.tar.gz", hash = "sha256:09b59f962ac28ce5b80d5f870c00c5fe7a637c4ac8d095c7c72fdab1e07376fc"}, ] flake8 = [ {file = "flake8-4.0.1-py2.py3-none-any.whl", hash = "sha256:479b1304f72536a55948cb40a32dce8bb0ffe3501e26eaf292c7e60eb5e0428d"}, @@ -999,8 +1104,8 @@ pyflakes = [ {file = "pyflakes-2.4.0.tar.gz", hash = "sha256:05a85c2872edf37a4ed30b0cce2f6093e1d0581f8c19d7393122da7e25b2b24c"}, ] pylint = [ - {file = "pylint-2.13.3-py3-none-any.whl", hash = "sha256:c8837b6ec6440e3490ab8f066054b0645a516a29ca51ce442f16f7004f711a70"}, - {file = "pylint-2.13.3.tar.gz", hash = "sha256:12ed2520510c40db647e4ec7f747b07e0d669b33ab41479c2a07bb89b92877db"}, + {file = "pylint-2.13.4-py3-none-any.whl", hash = "sha256:8672cf7441b81410f5de7defdf56e2d559c956fd0579652f2e0a0a35bea2d546"}, + {file = "pylint-2.13.4.tar.gz", hash = "sha256:7cc6d0c4f61dff440f9ed8b657f4ecd615dcfe35345953eb7b1dc74afe901d7a"}, ] pynacl = [ {file = "PyNaCl-1.5.0-cp36-abi3-macosx_10_10_universal2.whl", hash = "sha256:401002a4aaa07c9414132aaed7f6836ff98f59277a234704ff66878c2ee4a0d1"}, @@ -1022,6 +1127,14 @@ pytest = [ {file = "pytest-7.1.1-py3-none-any.whl", hash = "sha256:92f723789a8fdd7180b6b06483874feca4c48a5c76968e03bb3e7f806a1869ea"}, {file = "pytest-7.1.1.tar.gz", hash = "sha256:841132caef6b1ad17a9afde46dc4f6cfa59a05f9555aae5151f73bdf2820ca63"}, ] +pytest-codecov = [ + {file = "pytest-codecov-0.4.0.tar.gz", hash = "sha256:79b88c14c6abafa5e8a017b2d2043e21873bef016dee7d24cc824b5f7b189b91"}, + {file = "pytest_codecov-0.4.0-py3-none-any.whl", hash = "sha256:662549fd1be88e1abc57d1c0b64053936ea507e1cf97c353fc0f0e75b009459b"}, +] +pytest-cov = [ + {file = "pytest-cov-3.0.0.tar.gz", hash = "sha256:e7f0f5b1617d2210a2cabc266dfe2f4c75a8d32fb89eafb7ad9d06f6d076d470"}, + {file = "pytest_cov-3.0.0-py3-none-any.whl", hash = "sha256:578d5d15ac4a25e5f961c938b85a05b09fdaae9deef3bb6de9a6e766622ca7a6"}, +] python-dateutil = [ {file = "python-dateutil-2.8.2.tar.gz", hash = "sha256:0123cacc1627ae19ddf3c27a5de5bd67ee4586fbdd6440d9748f8abb483d3e86"}, {file = "python_dateutil-2.8.2-py2.py3-none-any.whl", hash = "sha256:961d03dc3453ebbc59dbdea9e4e11c5651520a876d0f4db161e8674aae935da9"}, diff --git a/pyproject.toml b/pyproject.toml index 853e33f..329dfae 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -9,11 +9,11 @@ python = "^3.10" Django = "^4.0.3" python-dotenv = "^0.20.0" django-widget-tweaks = "^1.4.12" -django-fernet-fields = "^0.6" +django-cryptography = "^1.1" django-recaptcha = "^3.0.0" mysqlclient = "^2.1.0" -boto3 = "^1.21.30" -dropbox = "^11.28.0" +boto3 = "^1.21.35" +dropbox = "^11.29.0" xmltodict = "^0.12.0" psutil = "^5.9.0" paramiko = "^2.10.3" @@ -23,14 +23,14 @@ requests = "^2.27.1" black = "^22.3.0" pytest = "^7.1.1" flake8 = "^4.0.1" -pylint = "^2.13.3" +pylint = "^2.13.4" taskipy = "^1.10.1" +pytest-codecov = "^0.4.0" +coverage = "^6.3.2" [tool.taskipy.tasks] -black = { cmd = "black frontend backend --check", help = "Run the black checks for source code format" } -flake8 = { cmd = "flake8 frontend backend --ignore E501,E405,W503", help = "Run the flake8 checks for source code standards" } -pylint = { cmd = "pylint frontend backend --exit-zero", help = "Run the pylint checks for source code linting" } -test = { cmd = "pytest -x -s", help = "Run the test suite to ensure code correctness" } +lint = { cmd = "black gcc_exec --check;flake8 gcc_exec --ignore=E501,W503;pylint gcc_exec", help = "Run the black checks for source code format" } +test = { cmd = "pytest --cov gcc_exec/tests --cov-report xml:coverage.xml", help = "Run the test suite to ensure code correctness" } [build-system] requires = ["poetry-core>=1.0.0"] diff --git a/tests/test_workflow.py b/tests/test_workflow.py deleted file mode 100644 index af93786..0000000 --- a/tests/test_workflow.py +++ /dev/null @@ -1,5 +0,0 @@ -import backend.exec.Workflow -import pytest - -def test_workflow_plan(xml_dict): - assert 1 == 1 \ No newline at end of file