diff --git a/examples/KerasGA/XOR_classification.py b/examples/KerasGA/XOR_classification.py index 3b0a1e12..2d7e4ee5 100644 --- a/examples/KerasGA/XOR_classification.py +++ b/examples/KerasGA/XOR_classification.py @@ -16,8 +16,8 @@ def fitness_func(ga_instanse, solution, sol_idx): return solution_fitness def on_generation(ga_instance): - print("Generation = {generation}".format(generation=ga_instance.generations_completed)) - print("Fitness = {fitness}".format(fitness=ga_instance.best_solution()[1])) + print(f"Generation = {ga_instance.generations_completed}") + print(f"Fitness = {ga_instance.best_solution()[1]}") # Build the keras model using the functional API. input_layer = tensorflow.keras.layers.Input(2) @@ -62,23 +62,23 @@ def on_generation(ga_instance): # Returning the details of the best solution. solution, solution_fitness, solution_idx = ga_instance.best_solution() -print("Fitness value of the best solution = {solution_fitness}".format(solution_fitness=solution_fitness)) -print("Index of the best solution : {solution_idx}".format(solution_idx=solution_idx)) +print(f"Fitness value of the best solution = {solution_fitness}") +print(f"Index of the best solution : {solution_idx}") predictions = pygad.kerasga.predict(model=model, solution=solution, data=data_inputs) -print("Predictions : \n", predictions) +print(f"Predictions : \n{predictions}") # Calculate the binary crossentropy for the trained model. bce = tensorflow.keras.losses.BinaryCrossentropy() -print("Binary Crossentropy : ", bce(data_outputs, predictions).numpy()) +print(f"Binary Crossentropy : {bce(data_outputs, predictions).numpy()}") # Calculate the classification accuracy for the trained model. ba = tensorflow.keras.metrics.BinaryAccuracy() ba.update_state(data_outputs, predictions) accuracy = ba.result().numpy() -print("Accuracy : ", accuracy) +print(f"Accuracy : {accuracy}") # model.compile(optimizer="Adam", loss="mse", metrics=["mae"]) diff --git a/examples/KerasGA/cancer_dataset.py b/examples/KerasGA/cancer_dataset.py index 5aceae6e..f5e87d39 100644 --- a/examples/KerasGA/cancer_dataset.py +++ b/examples/KerasGA/cancer_dataset.py @@ -17,8 +17,8 @@ def fitness_func(ga_instanse, solution, sol_idx): return solution_fitness def on_generation(ga_instance): - print("Generation = {generation}".format(generation=ga_instance.generations_completed)) - print("Fitness = {fitness}".format(fitness=ga_instance.best_solution(ga_instance.last_generation_fitness)[1])) + print(f"Generation = {ga_instance.generations_completed}") + print(f"Fitness = {ga_instance.best_solution()[1]}") # The dataset path. dataset_path = r'../data/Skin_Cancer_Dataset' @@ -71,8 +71,8 @@ def on_generation(ga_instance): # Returning the details of the best solution. solution, solution_fitness, solution_idx = ga_instance.best_solution(ga_instance.last_generation_fitness) -print("Fitness value of the best solution = {solution_fitness}".format(solution_fitness=solution_fitness)) -print("Index of the best solution : {solution_idx}".format(solution_idx=solution_idx)) +print(f"Fitness value of the best solution = {solution_fitness}") +print(f"Index of the best solution : {solution_idx}") predictions = pygad.kerasga.predict(model=model, solution=solution, @@ -81,13 +81,13 @@ def on_generation(ga_instance): # Calculate the categorical crossentropy for the trained model. cce = tensorflow.keras.losses.CategoricalCrossentropy() -print("Categorical Crossentropy : ", cce(data_outputs, predictions).numpy()) +print(f"Categorical Crossentropy : {cce(data_outputs, predictions).numpy()}") # Calculate the classification accuracy for the trained model. ca = tensorflow.keras.metrics.CategoricalAccuracy() ca.update_state(data_outputs, predictions) accuracy = ca.result().numpy() -print("Accuracy : ", accuracy) +print(f"Accuracy : {accuracy}") # model.compile(optimizer="Adam", loss="mse", metrics=["mae"]) diff --git a/examples/KerasGA/cancer_dataset_generator.py b/examples/KerasGA/cancer_dataset_generator.py index 3f8afeb0..9746e907 100644 --- a/examples/KerasGA/cancer_dataset_generator.py +++ b/examples/KerasGA/cancer_dataset_generator.py @@ -16,8 +16,8 @@ def fitness_func(ga_instanse, solution, sol_idx): return solution_fitness def on_generation(ga_instance): - print("Generation = {generation}".format(generation=ga_instance.generations_completed)) - print("Fitness = {fitness}".format(fitness=ga_instance.best_solution(ga_instance.last_generation_fitness)[1])) + print(f"Generation = {ga_instance.generations_completed}") + print(f"Fitness = {ga_instance.best_solution()[1]}") # The dataset path. dataset_path = r'../data/Skin_Cancer_Dataset' @@ -65,8 +65,8 @@ def on_generation(ga_instance): # Returning the details of the best solution. solution, solution_fitness, solution_idx = ga_instance.best_solution(ga_instance.last_generation_fitness) -print("Fitness value of the best solution = {solution_fitness}".format(solution_fitness=solution_fitness)) -print("Index of the best solution : {solution_idx}".format(solution_idx=solution_idx)) +print(f"Fitness value of the best solution = {solution_fitness}") +print(f"Index of the best solution : {solution_idx}") predictions = pygad.kerasga.predict(model=model, solution=solution, @@ -75,13 +75,13 @@ def on_generation(ga_instance): # Calculate the categorical crossentropy for the trained model. cce = tensorflow.keras.losses.CategoricalCrossentropy() -print("Categorical Crossentropy : ", cce(data_outputs, predictions).numpy()) +print(f"Categorical Crossentropy : {cce(data_outputs, predictions).numpy()}") # Calculate the classification accuracy for the trained model. ca = tensorflow.keras.metrics.CategoricalAccuracy() ca.update_state(data_outputs, predictions) accuracy = ca.result().numpy() -print("Accuracy : ", accuracy) +print(f"Accuracy : {accuracy}") # model.compile(optimizer="Adam", loss="mse", metrics=["mae"]) diff --git a/examples/KerasGA/image_classification_CNN.py b/examples/KerasGA/image_classification_CNN.py index 9fb45630..a8084ee9 100644 --- a/examples/KerasGA/image_classification_CNN.py +++ b/examples/KerasGA/image_classification_CNN.py @@ -16,8 +16,8 @@ def fitness_func(ga_instanse, solution, sol_idx): return solution_fitness def on_generation(ga_instance): - print("Generation = {generation}".format(generation=ga_instance.generations_completed)) - print("Fitness = {fitness}".format(fitness=ga_instance.best_solution()[1])) + print(f"Generation = {ga_instance.generations_completed}") + print(f"Fitness = {ga_instance.best_solution()[1]}") # Build the keras model using the functional API. input_layer = tensorflow.keras.layers.Input(shape=(100, 100, 3)) @@ -66,8 +66,8 @@ def on_generation(ga_instance): # Returning the details of the best solution. solution, solution_fitness, solution_idx = ga_instance.best_solution() -print("Fitness value of the best solution = {solution_fitness}".format(solution_fitness=solution_fitness)) -print("Index of the best solution : {solution_idx}".format(solution_idx=solution_idx)) +print(f"Fitness value of the best solution = {solution_fitness}") +print(f"Index of the best solution : {solution_idx}") predictions = pygad.kerasga.predict(model=model, solution=solution, @@ -76,13 +76,13 @@ def on_generation(ga_instance): # Calculate the categorical crossentropy for the trained model. cce = tensorflow.keras.losses.CategoricalCrossentropy() -print("Categorical Crossentropy : ", cce(data_outputs, predictions).numpy()) +print(f"Categorical Crossentropy : {cce(data_outputs, predictions).numpy()}") # Calculate the classification accuracy for the trained model. ca = tensorflow.keras.metrics.CategoricalAccuracy() ca.update_state(data_outputs, predictions) accuracy = ca.result().numpy() -print("Accuracy : ", accuracy) +print(f"Accuracy : {accuracy}") # model.compile(optimizer="Adam", loss="mse", metrics=["mae"]) diff --git a/examples/KerasGA/image_classification_Dense.py b/examples/KerasGA/image_classification_Dense.py index 002e36c6..986282a3 100644 --- a/examples/KerasGA/image_classification_Dense.py +++ b/examples/KerasGA/image_classification_Dense.py @@ -16,8 +16,8 @@ def fitness_func(ga_instanse, solution, sol_idx): return solution_fitness def on_generation(ga_instance): - print("Generation = {generation}".format(generation=ga_instance.generations_completed)) - print("Fitness = {fitness}".format(fitness=ga_instance.best_solution()[1])) + print(f"Generation = {ga_instance.generations_completed}") + print(f"Fitness = {ga_instance.best_solution()[1]}") # Build the keras model using the functional API. input_layer = tensorflow.keras.layers.Input(360) @@ -57,8 +57,8 @@ def on_generation(ga_instance): # Returning the details of the best solution. solution, solution_fitness, solution_idx = ga_instance.best_solution() -print("Fitness value of the best solution = {solution_fitness}".format(solution_fitness=solution_fitness)) -print("Index of the best solution : {solution_idx}".format(solution_idx=solution_idx)) +print(f"Fitness value of the best solution = {solution_fitness}") +print(f"Index of the best solution : {solution_idx}") # Fetch the parameters of the best solution. predictions = pygad.kerasga.predict(model=model, @@ -68,13 +68,13 @@ def on_generation(ga_instance): # Calculate the categorical crossentropy for the trained model. cce = tensorflow.keras.losses.CategoricalCrossentropy() -print("Categorical Crossentropy : ", cce(data_outputs, predictions).numpy()) +print(f"Categorical Crossentropy : {cce(data_outputs, predictions).numpy()}") # Calculate the classification accuracy for the trained model. ca = tensorflow.keras.metrics.CategoricalAccuracy() ca.update_state(data_outputs, predictions) accuracy = ca.result().numpy() -print("Accuracy : ", accuracy) +print(f"Accuracy : {accuracy}") # model.compile(optimizer="Adam", loss="mse", metrics=["mae"]) diff --git a/examples/KerasGA/regression_example.py b/examples/KerasGA/regression_example.py index 2deec1fe..11312c35 100644 --- a/examples/KerasGA/regression_example.py +++ b/examples/KerasGA/regression_example.py @@ -17,8 +17,8 @@ def fitness_func(ga_instanse, solution, sol_idx): return solution_fitness def on_generation(ga_instance): - print("Generation = {generation}".format(generation=ga_instance.generations_completed)) - print("Fitness = {fitness}".format(fitness=ga_instance.best_solution()[1])) + print(f"Generation = {ga_instance.generations_completed}") + print(f"Fitness = {ga_instance.best_solution()[1]}") # Create the Keras model. input_layer = tensorflow.keras.layers.Input(3) @@ -61,17 +61,17 @@ def on_generation(ga_instance): # Returning the details of the best solution. solution, solution_fitness, solution_idx = ga_instance.best_solution() -print("Fitness value of the best solution = {solution_fitness}".format(solution_fitness=solution_fitness)) -print("Index of the best solution : {solution_idx}".format(solution_idx=solution_idx)) +print(f"Fitness value of the best solution = {solution_fitness}") +print(f"Index of the best solution : {solution_idx}") predictions = pygad.kerasga.predict(model=model, solution=solution, data=data_inputs) -print("Predictions : \n", predictions) +print(f"Predictions : \n{predictions}") mae = tensorflow.keras.losses.MeanAbsoluteError() abs_error = mae(data_outputs, predictions).numpy() -print("Absolute Error : ", abs_error) +print(f"Absolute Error : {abs_error}") # model.compile(optimizer="Adam", loss="mse", metrics=["mae"]) diff --git a/examples/TorchGA/XOR_classification.py b/examples/TorchGA/XOR_classification.py index f7a2f44a..be2e9e86 100644 --- a/examples/TorchGA/XOR_classification.py +++ b/examples/TorchGA/XOR_classification.py @@ -14,8 +14,8 @@ def fitness_func(ga_instanse, solution, sol_idx): return solution_fitness def on_generation(ga_instance): - print("Generation = {generation}".format(generation=ga_instance.generations_completed)) - print("Fitness = {fitness}".format(fitness=ga_instance.best_solution()[1])) + print(f"Generation = {ga_instance.generations_completed}") + print(f"Fitness = {ga_instance.best_solution()[1]}") # Create the PyTorch model. input_layer = torch.nn.Linear(2, 4) @@ -68,19 +68,19 @@ def on_generation(ga_instance): # Returning the details of the best solution. solution, solution_fitness, solution_idx = ga_instance.best_solution() -print("Fitness value of the best solution = {solution_fitness}".format(solution_fitness=solution_fitness)) -print("Index of the best solution : {solution_idx}".format(solution_idx=solution_idx)) +print(f"Fitness value of the best solution = {solution_fitness}") +print(f"Index of the best solution : {solution_idx}") predictions = pygad.torchga.predict(model=model, solution=solution, data=data_inputs) -print("Predictions : \n", predictions.detach().numpy()) +print(f"Predictions : \n{predictions.detach().numpy()}") # Calculate the binary crossentropy for the trained model. -print("Binary Crossentropy : ", loss_function(predictions, data_outputs).detach().numpy()) +print(f"Binary Crossentropy : {loss_function(predictions, data_outputs).detach().numpy()}") # Calculate the classification accuracy of the trained model. a = torch.max(predictions, axis=1) b = torch.max(data_outputs, axis=1) accuracy = torch.true_divide(torch.sum(a.indices == b.indices), len(data_outputs)) -print("Accuracy : ", accuracy.detach().numpy()) +print(f"Accuracy : {accuracy.detach().numpy()}") diff --git a/examples/TorchGA/image_classification_CNN.py b/examples/TorchGA/image_classification_CNN.py index baf1f1bc..295cdc59 100644 --- a/examples/TorchGA/image_classification_CNN.py +++ b/examples/TorchGA/image_classification_CNN.py @@ -15,8 +15,8 @@ def fitness_func(ga_instanse, solution, sol_idx): return solution_fitness def on_generation(ga_instance): - print("Generation = {generation}".format(generation=ga_instance.generations_completed)) - print("Fitness = {fitness}".format(fitness=ga_instance.best_solution()[1])) + print(f"Generation = {ga_instance.generations_completed}") + print(f"Fitness = {ga_instance.best_solution()[1]}") # Build the PyTorch model. input_layer = torch.nn.Conv2d(in_channels=3, out_channels=5, kernel_size=7) @@ -78,8 +78,8 @@ def on_generation(ga_instance): # Returning the details of the best solution. solution, solution_fitness, solution_idx = ga_instance.best_solution() -print("Fitness value of the best solution = {solution_fitness}".format(solution_fitness=solution_fitness)) -print("Index of the best solution : {solution_idx}".format(solution_idx=solution_idx)) +print(f"Fitness value of the best solution = {solution_fitness}") +print(f"Index of the best solution : {solution_idx}") predictions = pygad.torchga.predict(model=model, solution=solution, @@ -87,8 +87,8 @@ def on_generation(ga_instance): # print("Predictions : \n", predictions) # Calculate the crossentropy for the trained model. -print("Crossentropy : ", loss_function(predictions, data_outputs).detach().numpy()) +print(f"Crossentropy : {loss_function(predictions, data_outputs).detach().numpy()}") # Calculate the classification accuracy for the trained model. accuracy = torch.true_divide(torch.sum(torch.max(predictions, axis=1).indices == data_outputs), len(data_outputs)) -print("Accuracy : ", accuracy.detach().numpy()) +print(f"Accuracy : {accuracy.detach().numpy()}") diff --git a/examples/TorchGA/image_classification_Dense.py b/examples/TorchGA/image_classification_Dense.py index 91bb4c16..85e8b1f3 100644 --- a/examples/TorchGA/image_classification_Dense.py +++ b/examples/TorchGA/image_classification_Dense.py @@ -15,8 +15,8 @@ def fitness_func(ga_instanse, solution, sol_idx): return solution_fitness def on_generation(ga_instance): - print("Generation = {generation}".format(generation=ga_instance.generations_completed)) - print("Fitness = {fitness}".format(fitness=ga_instance.best_solution()[1])) + print(f"Generation = {ga_instance.generations_completed}") + print(f"Fitness = {ga_instance.best_solution()[1]}") # Build the PyTorch model using the functional API. input_layer = torch.nn.Linear(360, 50) @@ -64,8 +64,8 @@ def on_generation(ga_instance): # Returning the details of the best solution. solution, solution_fitness, solution_idx = ga_instance.best_solution() -print("Fitness value of the best solution = {solution_fitness}".format(solution_fitness=solution_fitness)) -print("Index of the best solution : {solution_idx}".format(solution_idx=solution_idx)) +print(f"Fitness value of the best solution = {solution_fitness}") +print(f"Index of the best solution : {solution_idx}") predictions = pygad.torchga.predict(model=model, solution=solution, @@ -73,8 +73,8 @@ def on_generation(ga_instance): # print("Predictions : \n", predictions) # Calculate the crossentropy loss of the trained model. -print("Crossentropy : ", loss_function(predictions, data_outputs).detach().numpy()) +print(f"Crossentropy : {loss_function(predictions, data_outputs).detach().numpy()}") # Calculate the classification accuracy for the trained model. accuracy = torch.true_divide(torch.sum(torch.max(predictions, axis=1).indices == data_outputs), len(data_outputs)) -print("Accuracy : ", accuracy.detach().numpy()) +print(f"Accuracy : {accuracy.detach().numpy()}") diff --git a/examples/TorchGA/regression_example.py b/examples/TorchGA/regression_example.py index 5bf2fc1e..a7feb31a 100644 --- a/examples/TorchGA/regression_example.py +++ b/examples/TorchGA/regression_example.py @@ -15,8 +15,8 @@ def fitness_func(ga_instanse, solution, sol_idx): return solution_fitness def on_generation(ga_instance): - print("Generation = {generation}".format(generation=ga_instance.generations_completed)) - print("Fitness = {fitness}".format(fitness=ga_instance.best_solution()[1])) + print(f"Generation = {ga_instance.generations_completed}") + print(f"Fitness = {ga_instance.best_solution()[1]}") # Create the PyTorch model. input_layer = torch.nn.Linear(3, 2) @@ -64,13 +64,13 @@ def on_generation(ga_instance): # Returning the details of the best solution. solution, solution_fitness, solution_idx = ga_instance.best_solution() -print("Fitness value of the best solution = {solution_fitness}".format(solution_fitness=solution_fitness)) -print("Index of the best solution : {solution_idx}".format(solution_idx=solution_idx)) +print(f"Fitness value of the best solution = {solution_fitness}") +print(f"Index of the best solution : {solution_idx}") predictions = pygad.torchga.predict(model=model, solution=solution, data=data_inputs) -print("Predictions : \n", predictions.detach().numpy()) +print("Predictions : \n{predictions.detach().numpy()}") abs_error = loss_function(predictions, data_outputs) -print("Absolute Error : ", abs_error.detach().numpy()) +print("Absolute Error : {abs_error.detach().numpy()}") diff --git a/examples/clustering/example_clustering_2.py b/examples/clustering/example_clustering_2.py index 877e3183..9b846541 100644 --- a/examples/clustering/example_clustering_2.py +++ b/examples/clustering/example_clustering_2.py @@ -107,9 +107,9 @@ def fitness_func(ga_instance, solution, solution_idx): ga_instance.run() best_solution, best_solution_fitness, best_solution_idx = ga_instance.best_solution() -print("Best solution is {bs}".format(bs=best_solution)) -print("Fitness of the best solution is {bsf}".format(bsf=best_solution_fitness)) -print("Best solution found after {gen} generations".format(gen=ga_instance.best_solution_generation)) +print(f"Best solution is {best_solution}") +print(f"Fitness of the best solution is {best_solution_fitness}") +print(f"Best solution found after {ga_instance.best_solution_generation} generations") cluster_centers, all_clusters_dists, cluster_indices, clusters, clusters_sum_dist = cluster_data(best_solution, best_solution_idx) diff --git a/examples/clustering/example_clustering_3.py b/examples/clustering/example_clustering_3.py index 608d54b7..5c0381d8 100644 --- a/examples/clustering/example_clustering_3.py +++ b/examples/clustering/example_clustering_3.py @@ -119,9 +119,9 @@ def fitness_func(ga_instance, solution, solution_idx): ga_instance.run() best_solution, best_solution_fitness, best_solution_idx = ga_instance.best_solution() -print("Best solution is {bs}".format(bs=best_solution)) -print("Fitness of the best solution is {bsf}".format(bsf=best_solution_fitness)) -print("Best solution found after {gen} generations".format(gen=ga_instance.best_solution_generation)) +print(f"Best solution is {best_solution}") +print(f"Fitness of the best solution is {best_solution_fitness}") +print(f"Best solution found after {ga_instance.best_solution_generation} generations") cluster_centers, all_clusters_dists, cluster_indices, clusters, clusters_sum_dist = cluster_data(best_solution, best_solution_idx) diff --git a/examples/cnn/example_image_classification.py b/examples/cnn/example_image_classification.py index 9b90a8b5..13347ec1 100644 --- a/examples/cnn/example_image_classification.py +++ b/examples/cnn/example_image_classification.py @@ -67,6 +67,6 @@ num_wrong = numpy.where(predictions != train_outputs)[0] num_correct = train_outputs.size - num_wrong.size accuracy = 100 * (num_correct/train_outputs.size) -print("Number of correct classifications : {num_correct}.".format(num_correct=num_correct)) -print("Number of wrong classifications : {num_wrong}.".format(num_wrong=num_wrong.size)) -print("Classification accuracy : {accuracy}.".format(accuracy=accuracy)) +print(f"Number of correct classifications : {num_correct}.") +print(f"Number of wrong classifications : {num_wrong.size}.") +print(f"Classification accuracy : {accuracy}.") diff --git a/examples/example.py b/examples/example.py index f22bc724..72d895cd 100644 --- a/examples/example.py +++ b/examples/example.py @@ -25,9 +25,9 @@ def fitness_func(ga_instance, solution, solution_idx): last_fitness = 0 def on_generation(ga_instance): global last_fitness - print("Generation = {generation}".format(generation=ga_instance.generations_completed)) - print("Fitness = {fitness}".format(fitness=ga_instance.best_solution(pop_fitness=ga_instance.last_generation_fitness)[1])) - print("Change = {change}".format(change=ga_instance.best_solution(pop_fitness=ga_instance.last_generation_fitness)[1] - last_fitness)) + print(f"Generation = {ga_instance.generations_completed}") + print(f"Fitness = {ga_instance.best_solution(pop_fitness=ga_instance.last_generation_fitness)[1]}") + print(f"Change = {ga_instance.best_solution(pop_fitness=ga_instance.last_generation_fitness)[1] - last_fitness}") last_fitness = ga_instance.best_solution(pop_fitness=ga_instance.last_generation_fitness)[1] ga_instance = pygad.GA(num_generations=num_generations, diff --git a/examples/example_logger.py b/examples/example_logger.py index d38a1791..bbf44e97 100644 --- a/examples/example_logger.py +++ b/examples/example_logger.py @@ -29,8 +29,8 @@ def fitness_func(ga_instance, solution, solution_idx): return fitness def on_generation(ga_instance): - ga_instance.logger.info("Generation = {generation}".format(generation=ga_instance.generations_completed)) - ga_instance.logger.info("Fitness = {fitness}".format(fitness=ga_instance.best_solution(pop_fitness=ga_instance.last_generation_fitness)[1])) + ga_instance.logger.info(f"Generation = {ga_instance.generations_completed}") + ga_instance.logger.info(f"Fitness = {ga_instance.best_solution(pop_fitness=ga_instance.last_generation_fitness)[1]}") ga_instance = pygad.GA(num_generations=10, sol_per_pop=40, diff --git a/examples/example_multi_objective.py b/examples/example_multi_objective.py index 3fdf35a2..479ba9ba 100644 --- a/examples/example_multi_objective.py +++ b/examples/example_multi_objective.py @@ -37,9 +37,9 @@ def fitness_func(ga_instance, solution, solution_idx): last_fitness = 0 def on_generation(ga_instance): global last_fitness - print("Generation = {generation}".format(generation=ga_instance.generations_completed)) - print("Fitness = {fitness}".format(fitness=ga_instance.best_solution(pop_fitness=ga_instance.last_generation_fitness)[1])) - print("Change = {change}".format(change=ga_instance.best_solution(pop_fitness=ga_instance.last_generation_fitness)[1] - last_fitness)) + print(f"Generation = {ga_instance.generations_completed}") + print(f"Fitness = {ga_instance.best_solution(pop_fitness=ga_instance.last_generation_fitness)[1]}") + print(f"Change = {ga_instance.best_solution(pop_fitness=ga_instance.last_generation_fitness)[1] - last_fitness}") last_fitness = ga_instance.best_solution(pop_fitness=ga_instance.last_generation_fitness)[1] ga_instance = pygad.GA(num_generations=num_generations, @@ -57,15 +57,15 @@ def on_generation(ga_instance): # Returning the details of the best solution. solution, solution_fitness, solution_idx = ga_instance.best_solution(ga_instance.last_generation_fitness) -print("Parameters of the best solution : {solution}".format(solution=solution)) -print("Fitness value of the best solution = {solution_fitness}".format(solution_fitness=solution_fitness)) -print("Index of the best solution : {solution_idx}".format(solution_idx=solution_idx)) +print(f"Parameters of the best solution : {solution}") +print(f"Fitness value of the best solution = {solution_fitness}") +print(f"Index of the best solution : {solution_idx}") prediction = numpy.sum(numpy.array(function_inputs1)*solution) -print("Predicted output 1 based on the best solution : {prediction}".format(prediction=prediction)) +print(f"Predicted output 1 based on the best solution : {prediction}") prediction = numpy.sum(numpy.array(function_inputs2)*solution) -print("Predicted output 2 based on the best solution : {prediction}".format(prediction=prediction)) +print(f"Predicted output 2 based on the best solution : {prediction}") if ga_instance.best_solution_generation != -1: - print("Best fitness value reached after {best_solution_generation} generations.".format(best_solution_generation=ga_instance.best_solution_generation)) + print(f"Best fitness value reached after {ga_instance.best_solution_generation} generations.") diff --git a/examples/gacnn/example_image_classification.py b/examples/gacnn/example_image_classification.py index 32fa77ce..daaec5a3 100644 --- a/examples/gacnn/example_image_classification.py +++ b/examples/gacnn/example_image_classification.py @@ -108,21 +108,21 @@ def callback_generation(ga_instance): # Returning the details of the best solution. solution, solution_fitness, solution_idx = ga_instance.best_solution(pop_fitness=ga_instance.last_generation_fitness) -print("Parameters of the best solution : {solution}".format(solution=solution)) -print("Fitness value of the best solution = {solution_fitness}".format(solution_fitness=solution_fitness)) -print("Index of the best solution : {solution_idx}".format(solution_idx=solution_idx)) +print(f"Parameters of the best solution : {solution}") +print(f"Fitness value of the best solution = {solution_fitness}") +print(f"Index of the best solution : {solution_idx}") if ga_instance.best_solution_generation != -1: - print("Best fitness value reached after {best_solution_generation} generations.".format(best_solution_generation=ga_instance.best_solution_generation)) + print(f"Best fitness value reached after {ga_instance.best_solution_generation} generations.") # Predicting the outputs of the data using the best solution. predictions = GACNN_instance.population_networks[solution_idx].predict(data_inputs=data_inputs) -print("Predictions of the trained network : {predictions}".format(predictions=predictions)) +print("Predictions of the trained network : {predictions}") # Calculating some statistics num_wrong = numpy.where(predictions != data_outputs)[0] num_correct = data_outputs.size - num_wrong.size accuracy = 100 * (num_correct/data_outputs.size) -print("Number of correct classifications : {num_correct}.".format(num_correct=num_correct)) -print("Number of wrong classifications : {num_wrong}.".format(num_wrong=num_wrong.size)) -print("Classification accuracy : {accuracy}.".format(accuracy=accuracy)) +print(f"Number of correct classifications : {num_correct}.") +print(f"Number of wrong classifications : {num_wrong.size}.") +print(f"Classification accuracy : {accuracy}.") diff --git a/examples/gann/example_XOR_classification.py b/examples/gann/example_XOR_classification.py index 2e3f4652..b5ed6d7a 100644 --- a/examples/gann/example_XOR_classification.py +++ b/examples/gann/example_XOR_classification.py @@ -25,9 +25,9 @@ def callback_generation(ga_instance): GANN_instance.update_population_trained_weights(population_trained_weights=population_matrices) - print("Generation = {generation}".format(generation=ga_instance.generations_completed)) - print("Fitness = {fitness}".format(fitness=ga_instance.best_solution()[1])) - print("Change = {change}".format(change=ga_instance.best_solution()[1] - last_fitness)) + print(f"Generation = {ga_instance.generations_completed}") + print(f"Fitness = {ga_instance.best_solution()[1]}") + print(f"Change = {ga_instance.best_solution()[1] - last_fitness}") last_fitness = ga_instance.best_solution()[1].copy() @@ -107,22 +107,22 @@ def callback_generation(ga_instance): # Returning the details of the best solution. solution, solution_fitness, solution_idx = ga_instance.best_solution() -print("Parameters of the best solution : {solution}".format(solution=solution)) -print("Fitness value of the best solution = {solution_fitness}".format(solution_fitness=solution_fitness)) -print("Index of the best solution : {solution_idx}".format(solution_idx=solution_idx)) +print(f"Parameters of the best solution : {solution}") +print(f"Fitness value of the best solution = {solution_fitness}") +print(f"Index of the best solution : {solution_idx}") if ga_instance.best_solution_generation != -1: - print("Best fitness value reached after {best_solution_generation} generations.".format(best_solution_generation=ga_instance.best_solution_generation)) + print(f"Best fitness value reached after {ga_instance.best_solution_generation} generations.") # Predicting the outputs of the data using the best solution. predictions = pygad.nn.predict(last_layer=GANN_instance.population_networks[solution_idx], data_inputs=data_inputs) -print("Predictions of the trained network : {predictions}".format(predictions=predictions)) +print("Predictions of the trained network : {predictions}") # Calculating some statistics num_wrong = numpy.where(predictions != data_outputs)[0] num_correct = data_outputs.size - num_wrong.size accuracy = 100 * (num_correct/data_outputs.size) -print("Number of correct classifications : {num_correct}.".format(num_correct=num_correct)) -print("Number of wrong classifications : {num_wrong}.".format(num_wrong=num_wrong.size)) -print("Classification accuracy : {accuracy}.".format(accuracy=accuracy)) \ No newline at end of file +print(f"Number of correct classifications : {num_correct}.") +print(f"Number of wrong classifications : {num_wrong.size}.") +print(f"Classification accuracy : {accuracy}.") \ No newline at end of file diff --git a/examples/gann/example_classification.py b/examples/gann/example_classification.py index b25a6763..a1eb1505 100644 --- a/examples/gann/example_classification.py +++ b/examples/gann/example_classification.py @@ -21,9 +21,9 @@ def callback_generation(ga_instance): GANN_instance.update_population_trained_weights(population_trained_weights=population_matrices) - print("Generation = {generation}".format(generation=ga_instance.generations_completed)) - print("Fitness = {fitness}".format(fitness=ga_instance.best_solution(pop_fitness=ga_instance.last_generation_fitness)[1])) - print("Change = {change}".format(change=ga_instance.best_solution(pop_fitness=ga_instance.last_generation_fitness)[1] - last_fitness)) + print(f"Generation = {ga_instance.generations_completed}") + print(f"Fitness = {ga_instance.best_solution()[1]}") + print(f"Change = {ga_instance.best_solution()[1] - last_fitness}") last_fitness = ga_instance.best_solution(pop_fitness=ga_instance.last_generation_fitness)[1].copy() @@ -95,22 +95,22 @@ def callback_generation(ga_instance): # Returning the details of the best solution. solution, solution_fitness, solution_idx = ga_instance.best_solution(pop_fitness=ga_instance.last_generation_fitness) -print("Parameters of the best solution : {solution}".format(solution=solution)) -print("Fitness value of the best solution = {solution_fitness}".format(solution_fitness=solution_fitness)) -print("Index of the best solution : {solution_idx}".format(solution_idx=solution_idx)) +print(f"Parameters of the best solution : {solution}") +print(f"Fitness value of the best solution = {solution_fitness}") +print(f"Index of the best solution : {solution_idx}") if ga_instance.best_solution_generation != -1: - print("Best fitness value reached after {best_solution_generation} generations.".format(best_solution_generation=ga_instance.best_solution_generation)) + print(f"Best fitness value reached after {ga_instance.best_solution_generation} generations.") # Predicting the outputs of the data using the best solution. predictions = pygad.nn.predict(last_layer=GANN_instance.population_networks[solution_idx], data_inputs=data_inputs) -print("Predictions of the trained network : {predictions}".format(predictions=predictions)) +print("Predictions of the trained network : {predictions}") # Calculating some statistics num_wrong = numpy.where(predictions != data_outputs)[0] num_correct = data_outputs.size - num_wrong.size accuracy = 100 * (num_correct/data_outputs.size) -print("Number of correct classifications : {num_correct}.".format(num_correct=num_correct)) -print("Number of wrong classifications : {num_wrong}.".format(num_wrong=num_wrong.size)) -print("Classification accuracy : {accuracy}.".format(accuracy=accuracy)) \ No newline at end of file +print(f"Number of correct classifications : {num_correct}.") +print(f"Number of wrong classifications : {num_wrong.size}.") +print(f"Classification accuracy : {accuracy}.") \ No newline at end of file diff --git a/examples/gann/example_regression.py b/examples/gann/example_regression.py index f120bbf9..427176e9 100644 --- a/examples/gann/example_regression.py +++ b/examples/gann/example_regression.py @@ -20,9 +20,9 @@ def callback_generation(ga_instance): GANN_instance.update_population_trained_weights(population_trained_weights=population_matrices) - print("Generation = {generation}".format(generation=ga_instance.generations_completed)) - print("Fitness = {fitness}".format(fitness=ga_instance.best_solution(pop_fitness=ga_instance.last_generation_fitness)[1])) - print("Change = {change}".format(change=ga_instance.best_solution(pop_fitness=ga_instance.last_generation_fitness)[1] - last_fitness)) + print(f"Generation = {ga_instance.generations_completed}") + print(f"Fitness = {ga_instance.best_solution()[1]}") + print(f"Change = {ga_instance.best_solution()[1] - last_fitness}") last_fitness = ga_instance.best_solution(pop_fitness=ga_instance.last_generation_fitness)[1].copy() @@ -95,19 +95,19 @@ def callback_generation(ga_instance): # Returning the details of the best solution. solution, solution_fitness, solution_idx = ga_instance.best_solution(pop_fitness=ga_instance.last_generation_fitness) -print("Parameters of the best solution : {solution}".format(solution=solution)) -print("Fitness value of the best solution = {solution_fitness}".format(solution_fitness=solution_fitness)) -print("Index of the best solution : {solution_idx}".format(solution_idx=solution_idx)) +print(f"Parameters of the best solution : {solution}") +print(f"Fitness value of the best solution = {solution_fitness}") +print(f"Index of the best solution : {solution_idx}") if ga_instance.best_solution_generation != -1: - print("Best fitness value reached after {best_solution_generation} generations.".format(best_solution_generation=ga_instance.best_solution_generation)) + print(f"Best fitness value reached after {ga_instance.best_solution_generation} generations.") # Predicting the outputs of the data using the best solution. predictions = pygad.nn.predict(last_layer=GANN_instance.population_networks[solution_idx], data_inputs=data_inputs, problem_type="regression") -print("Predictions of the trained network : {predictions}".format(predictions=predictions)) +print("Predictions of the trained network : {predictions}") # Calculating some statistics abs_error = numpy.mean(numpy.abs(predictions - data_outputs)) -print("Absolute error : {abs_error}.".format(abs_error=abs_error)) +print(f"Absolute error : {abs_error}.") diff --git a/examples/gann/example_regression_fish.py b/examples/gann/example_regression_fish.py index 4eb3c0b8..3c5beaa1 100644 --- a/examples/gann/example_regression_fish.py +++ b/examples/gann/example_regression_fish.py @@ -21,9 +21,9 @@ def callback_generation(ga_instance): GANN_instance.update_population_trained_weights(population_trained_weights=population_matrices) - print("Generation = {generation}".format(generation=ga_instance.generations_completed)) - print("Fitness = {fitness}".format(fitness=ga_instance.best_solution(pop_fitness=ga_instance.last_generation_fitness)[1])) - print("Change = {change}".format(change=ga_instance.best_solution(pop_fitness=ga_instance.last_generation_fitness)[1] - last_fitness)) + print(f"Generation = {ga_instance.generations_completed}") + print(f"Fitness = {ga_instance.best_solution()[1]}") + print(f"Change = {ga_instance.best_solution()[1] - last_fitness}") last_fitness = ga_instance.best_solution(pop_fitness=ga_instance.last_generation_fitness)[1].copy() @@ -96,19 +96,19 @@ def callback_generation(ga_instance): # Returning the details of the best solution. solution, solution_fitness, solution_idx = ga_instance.best_solution(pop_fitness=ga_instance.last_generation_fitness) -print("Parameters of the best solution : {solution}".format(solution=solution)) -print("Fitness value of the best solution = {solution_fitness}".format(solution_fitness=solution_fitness)) -print("Index of the best solution : {solution_idx}".format(solution_idx=solution_idx)) +print(f"Parameters of the best solution : {solution}") +print(f"Fitness value of the best solution = {solution_fitness}") +print(f"Index of the best solution : {solution_idx}") if ga_instance.best_solution_generation != -1: - print("Best fitness value reached after {best_solution_generation} generations.".format(best_solution_generation=ga_instance.best_solution_generation)) + print(f"Best fitness value reached after {ga_instance.best_solution_generation} generations.") # Predicting the outputs of the data using the best solution. predictions = pygad.nn.predict(last_layer=GANN_instance.population_networks[solution_idx], data_inputs=data_inputs, problem_type="regression") -print("Predictions of the trained network : {predictions}".format(predictions=predictions)) +print("Predictions of the trained network : {predictions}") # Calculating some statistics abs_error = numpy.mean(numpy.abs(predictions - data_outputs)) -print("Absolute error : {abs_error}.".format(abs_error=abs_error)) +print(f"Absolute error : {abs_error}.") diff --git a/examples/genetic.pkl b/examples/genetic.pkl deleted file mode 100644 index c82c1568..00000000 Binary files a/examples/genetic.pkl and /dev/null differ diff --git a/examples/nn/example_XOR_classification.py b/examples/nn/example_XOR_classification.py index 52086790..f40d97ff 100644 --- a/examples/nn/example_XOR_classification.py +++ b/examples/nn/example_XOR_classification.py @@ -46,6 +46,6 @@ num_wrong = numpy.where(predictions != data_outputs)[0] num_correct = data_outputs.size - num_wrong.size accuracy = 100 * (num_correct/data_outputs.size) -print("Number of correct classifications : {num_correct}.".format(num_correct=num_correct)) -print("Number of wrong classifications : {num_wrong}.".format(num_wrong=num_wrong.size)) -print("Classification accuracy : {accuracy}.".format(accuracy=accuracy)) +print(f"Number of correct classifications : {num_correct}.") +print(f"Number of wrong classifications : {num_wrong.size}.") +print(f"Classification accuracy : {accuracy}.") \ No newline at end of file diff --git a/examples/nn/example_classification.py b/examples/nn/example_classification.py index 732b92d0..ac5f97a6 100644 --- a/examples/nn/example_classification.py +++ b/examples/nn/example_classification.py @@ -46,6 +46,6 @@ num_wrong = numpy.where(predictions != data_outputs)[0] num_correct = data_outputs.size - num_wrong.size accuracy = 100 * (num_correct/data_outputs.size) -print("Number of correct classifications : {num_correct}.".format(num_correct=num_correct)) -print("Number of wrong classifications : {num_wrong}.".format(num_wrong=num_wrong.size)) -print("Classification accuracy : {accuracy}.".format(accuracy=accuracy)) +print(f"Number of correct classifications : {num_correct}.") +print(f"Number of wrong classifications : {num_wrong.size}.") +print(f"Classification accuracy : {accuracy}.") diff --git a/examples/nn/example_regression.py b/examples/nn/example_regression.py index 5b9d990c..3f0f5af7 100644 --- a/examples/nn/example_regression.py +++ b/examples/nn/example_regression.py @@ -43,4 +43,4 @@ # Calculating some statistics abs_error = numpy.mean(numpy.abs(predictions - data_outputs)) -print("Absolute error : {abs_error}.".format(abs_error=abs_error)) +print(f"Absolute error : {abs_error}.") diff --git a/examples/nn/example_regression_fish.py b/examples/nn/example_regression_fish.py index a6aa550e..49e6522f 100644 --- a/examples/nn/example_regression_fish.py +++ b/examples/nn/example_regression_fish.py @@ -44,4 +44,4 @@ # Calculating some statistics abs_error = numpy.mean(numpy.abs(predictions - data_outputs)) -print("Absolute error : {abs_error}.".format(abs_error=abs_error)) \ No newline at end of file +print(f"Absolute error : {abs_error}.") \ No newline at end of file