-
Notifications
You must be signed in to change notification settings - Fork 195
Commit
This commit does not belong to any branch on this repository, and may belong to a fork outside of the repository.
Merge pull request #470 from pallamadhavi/main
Automated test cases for frequent patterns
- Loading branch information
Showing
10 changed files
with
320 additions
and
0 deletions.
There are no files selected for viewing
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,19 @@ | ||
import pandas as pd | ||
from gen import generate_transactional_dataset | ||
from PAMI.frequentPattern.basic.Apriori import Apriori as alg | ||
import warnings | ||
|
||
warnings.filterwarnings("ignore") | ||
|
||
# Apriori algorithm from PAMI | ||
def test_pami(dataset, min_sup=0.2): | ||
dataset = [",".join(i) for i in dataset] | ||
with open("sample.csv", "w+") as f: | ||
f.write("\n".join(dataset)) | ||
obj = alg(iFile="sample.csv", minSup=min_sup, sep=',') | ||
obj.mine() | ||
res = obj.getPatternsAsDataFrame() | ||
res["Patterns"] = res["Patterns"].apply(lambda x: x.split()) | ||
res["Support"] = res["Support"].apply(lambda x: x / len(dataset)) | ||
pami = res | ||
return pami |
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,19 @@ | ||
import pandas as pd | ||
from gen import generate_transactional_dataset | ||
from PAMI.frequentPattern.basic.ECLAT import ECLAT as alg | ||
import warnings | ||
|
||
warnings.filterwarnings("ignore") | ||
|
||
# Apriori algorithm from PAMI | ||
def test_pami(dataset, min_sup=0.2): | ||
dataset = [",".join(i) for i in dataset] | ||
with open("sample.csv", "w+") as f: | ||
f.write("\n".join(dataset)) | ||
obj = alg(iFile="sample.csv", minSup=min_sup, sep=',') | ||
obj.mine() | ||
res = obj.getPatternsAsDataFrame() | ||
res["Patterns"] = res["Patterns"].apply(lambda x: x.split()) | ||
res["Support"] = res["Support"].apply(lambda x: x / len(dataset)) | ||
pami = res | ||
return pami |
19 changes: 19 additions & 0 deletions
19
tests/frequentPattern/basic/automated_test_ECLATDiffset.py
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,19 @@ | ||
import pandas as pd | ||
from gen import generate_transactional_dataset | ||
from PAMI.frequentPattern.basic.ECLATDiffset import ECLATDiffset as alg | ||
import warnings | ||
|
||
warnings.filterwarnings("ignore") | ||
|
||
# Apriori algorithm from PAMI | ||
def test_pami(dataset, min_sup=0.2): | ||
dataset = [",".join(i) for i in dataset] | ||
with open("sample.csv", "w+") as f: | ||
f.write("\n".join(dataset)) | ||
obj = alg(iFile="sample.csv", minSup=min_sup, sep=',') | ||
obj.mine() | ||
res = obj.getPatternsAsDataFrame() | ||
res["Patterns"] = res["Patterns"].apply(lambda x: x.split()) | ||
res["Support"] = res["Support"].apply(lambda x: x / len(dataset)) | ||
pami = res | ||
return pami |
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,19 @@ | ||
import pandas as pd | ||
from gen import generate_transactional_dataset | ||
from PAMI.frequentPattern.basic.FPGrowth import FPGrowth as alg | ||
import warnings | ||
|
||
warnings.filterwarnings("ignore") | ||
|
||
# Apriori algorithm from PAMI | ||
def test_pami(dataset, min_sup=0.2): | ||
dataset = [",".join(i) for i in dataset] | ||
with open("sample.csv", "w+") as f: | ||
f.write("\n".join(dataset)) | ||
obj = alg(iFile="sample.csv", minSup=min_sup, sep=',') | ||
obj.mine() | ||
res = obj.getPatternsAsDataFrame() | ||
res["Patterns"] = res["Patterns"].apply(lambda x: x.split()) | ||
res["Support"] = res["Support"].apply(lambda x: x / len(dataset)) | ||
pami = res | ||
return pami |
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,56 @@ | ||
import unittest | ||
from gen import generate_transactional_dataset | ||
from automated_test_ECLAT import test_pami | ||
import warnings | ||
|
||
warnings.filterwarnings("ignore") | ||
|
||
class TestExample(unittest.TestCase): | ||
def test_num_patterns(self): | ||
for _ in range(3): | ||
num_distinct_items = 20 | ||
num_transactions = 1000 | ||
max_items_per_transaction = 20 | ||
items = ["item-{}".format(i) for i in range(1, num_distinct_items + 1)] | ||
dataset = generate_transactional_dataset(num_transactions, items, max_items_per_transaction) | ||
|
||
pami = test_pami(dataset) | ||
# As we don't have a second method to compare, we just verify the length of pami | ||
self.assertGreater(len(pami), 0, "No patterns were generated by PAMI") | ||
|
||
print("3 test cases for number of patterns have been passed") | ||
|
||
def test_equality(self): | ||
for _ in range(3): | ||
num_distinct_items = 20 | ||
num_transactions = 1000 | ||
max_items_per_transaction = 20 | ||
items = ["item-{}".format(i) for i in range(1, num_distinct_items + 1)] | ||
dataset = generate_transactional_dataset(num_transactions, items, max_items_per_transaction) | ||
|
||
pami = test_pami(dataset) | ||
# Since we have no second method to compare, we just verify the patterns are generated | ||
pami_patterns = sorted(list(pami["Patterns"])) | ||
self.assertTrue(len(pami_patterns) > 0, "No patterns were generated by PAMI") | ||
|
||
print("2 test cases for Patterns equality are passed") | ||
|
||
def test_support(self): | ||
for _ in range(3): | ||
num_distinct_items = 20 | ||
num_transactions = 1000 | ||
max_items_per_transaction = 20 | ||
items = ["item-{}".format(i) for i in range(1, num_distinct_items + 1)] | ||
dataset = generate_transactional_dataset(num_transactions, items, max_items_per_transaction) | ||
|
||
pami = test_pami(dataset) | ||
# Since we have no second method to compare, we just verify the support values are generated | ||
pami.sort_values(by="Support", inplace=True) | ||
ps = list(pami["Support"]) | ||
for support in ps: | ||
self.assertTrue(support > 0, "Support value should be greater than 0") | ||
|
||
print("3 test cases for support equality are passed") | ||
|
||
if __name__ == '__main__': | ||
unittest.main() |
56 changes: 56 additions & 0 deletions
56
tests/frequentPattern/basic/automated_test_case_ECLATDiffset.py
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,56 @@ | ||
import unittest | ||
from gen import generate_transactional_dataset | ||
from automated_test_ECLATDiffset import test_pami | ||
import warnings | ||
|
||
warnings.filterwarnings("ignore") | ||
|
||
class TestExample(unittest.TestCase): | ||
def test_num_patterns(self): | ||
for _ in range(3): | ||
num_distinct_items = 20 | ||
num_transactions = 1000 | ||
max_items_per_transaction = 20 | ||
items = ["item-{}".format(i) for i in range(1, num_distinct_items + 1)] | ||
dataset = generate_transactional_dataset(num_transactions, items, max_items_per_transaction) | ||
|
||
pami = test_pami(dataset) | ||
# As we don't have a second method to compare, we just verify the length of pami | ||
self.assertGreater(len(pami), 0, "No patterns were generated by PAMI") | ||
|
||
print("3 test cases for number of patterns have been passed") | ||
|
||
def test_equality(self): | ||
for _ in range(3): | ||
num_distinct_items = 20 | ||
num_transactions = 1000 | ||
max_items_per_transaction = 20 | ||
items = ["item-{}".format(i) for i in range(1, num_distinct_items + 1)] | ||
dataset = generate_transactional_dataset(num_transactions, items, max_items_per_transaction) | ||
|
||
pami = test_pami(dataset) | ||
# Since we have no second method to compare, we just verify the patterns are generated | ||
pami_patterns = sorted(list(pami["Patterns"])) | ||
self.assertTrue(len(pami_patterns) > 0, "No patterns were generated by PAMI") | ||
|
||
print("2 test cases for Patterns equality are passed") | ||
|
||
def test_support(self): | ||
for _ in range(3): | ||
num_distinct_items = 20 | ||
num_transactions = 1000 | ||
max_items_per_transaction = 20 | ||
items = ["item-{}".format(i) for i in range(1, num_distinct_items + 1)] | ||
dataset = generate_transactional_dataset(num_transactions, items, max_items_per_transaction) | ||
|
||
pami = test_pami(dataset) | ||
# Since we have no second method to compare, we just verify the support values are generated | ||
pami.sort_values(by="Support", inplace=True) | ||
ps = list(pami["Support"]) | ||
for support in ps: | ||
self.assertTrue(support > 0, "Support value should be greater than 0") | ||
|
||
print("3 test cases for support equality are passed") | ||
|
||
if __name__ == '__main__': | ||
unittest.main() |
56 changes: 56 additions & 0 deletions
56
tests/frequentPattern/basic/automated_test_case_apriori.py
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,56 @@ | ||
import unittest | ||
from gen import generate_transactional_dataset | ||
from automated_test_Apriori import test_pami | ||
import warnings | ||
|
||
warnings.filterwarnings("ignore") | ||
|
||
class TestExample(unittest.TestCase): | ||
def test_num_patterns(self): | ||
for _ in range(3): | ||
num_distinct_items = 20 | ||
num_transactions = 1000 | ||
max_items_per_transaction = 20 | ||
items = ["item-{}".format(i) for i in range(1, num_distinct_items + 1)] | ||
dataset = generate_transactional_dataset(num_transactions, items, max_items_per_transaction) | ||
|
||
pami = test_pami(dataset) | ||
# As we don't have a second method to compare, we just verify the length of pami | ||
self.assertGreater(len(pami), 0, "No patterns were generated by PAMI") | ||
|
||
print("3 test cases for number of patterns have been passed") | ||
|
||
def test_equality(self): | ||
for _ in range(3): | ||
num_distinct_items = 20 | ||
num_transactions = 1000 | ||
max_items_per_transaction = 20 | ||
items = ["item-{}".format(i) for i in range(1, num_distinct_items + 1)] | ||
dataset = generate_transactional_dataset(num_transactions, items, max_items_per_transaction) | ||
|
||
pami = test_pami(dataset) | ||
# Since we have no second method to compare, we just verify the patterns are generated | ||
pami_patterns = sorted(list(pami["Patterns"])) | ||
self.assertTrue(len(pami_patterns) > 0, "No patterns were generated by PAMI") | ||
|
||
print("2 test cases for Patterns equality are passed") | ||
|
||
def test_support(self): | ||
for _ in range(3): | ||
num_distinct_items = 20 | ||
num_transactions = 1000 | ||
max_items_per_transaction = 20 | ||
items = ["item-{}".format(i) for i in range(1, num_distinct_items + 1)] | ||
dataset = generate_transactional_dataset(num_transactions, items, max_items_per_transaction) | ||
|
||
pami = test_pami(dataset) | ||
# Since we have no second method to compare, we just verify the support values are generated | ||
pami.sort_values(by="Support", inplace=True) | ||
ps = list(pami["Support"]) | ||
for support in ps: | ||
self.assertTrue(support > 0, "Support value should be greater than 0") | ||
|
||
print("3 test cases for support equality are passed") | ||
|
||
if __name__ == '__main__': | ||
unittest.main() |
56 changes: 56 additions & 0 deletions
56
tests/frequentPattern/basic/automated_test_case_fpgrowth.py
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,56 @@ | ||
import unittest | ||
from gen import generate_transactional_dataset | ||
from automated_test_FPGrowth import test_pami | ||
import warnings | ||
|
||
warnings.filterwarnings("ignore") | ||
|
||
class TestExample(unittest.TestCase): | ||
def test_num_patterns(self): | ||
for _ in range(3): | ||
num_distinct_items = 20 | ||
num_transactions = 1000 | ||
max_items_per_transaction = 20 | ||
items = ["item-{}".format(i) for i in range(1, num_distinct_items + 1)] | ||
dataset = generate_transactional_dataset(num_transactions, items, max_items_per_transaction) | ||
|
||
pami = test_pami(dataset) | ||
# As we don't have a second method to compare, we just verify the length of pami | ||
self.assertGreater(len(pami), 0, "No patterns were generated by PAMI") | ||
|
||
print("3 test cases for number of patterns have been passed") | ||
|
||
def test_equality(self): | ||
for _ in range(3): | ||
num_distinct_items = 20 | ||
num_transactions = 1000 | ||
max_items_per_transaction = 20 | ||
items = ["item-{}".format(i) for i in range(1, num_distinct_items + 1)] | ||
dataset = generate_transactional_dataset(num_transactions, items, max_items_per_transaction) | ||
|
||
pami = test_pami(dataset) | ||
# Since we have no second method to compare, we just verify the patterns are generated | ||
pami_patterns = sorted(list(pami["Patterns"])) | ||
self.assertTrue(len(pami_patterns) > 0, "No patterns were generated by PAMI") | ||
|
||
print("2 test cases for Patterns equality are passed") | ||
|
||
def test_support(self): | ||
for _ in range(3): | ||
num_distinct_items = 20 | ||
num_transactions = 1000 | ||
max_items_per_transaction = 20 | ||
items = ["item-{}".format(i) for i in range(1, num_distinct_items + 1)] | ||
dataset = generate_transactional_dataset(num_transactions, items, max_items_per_transaction) | ||
|
||
pami = test_pami(dataset) | ||
# Since we have no second method to compare, we just verify the support values are generated | ||
pami.sort_values(by="Support", inplace=True) | ||
ps = list(pami["Support"]) | ||
for support in ps: | ||
self.assertTrue(support > 0, "Support value should be greater than 0") | ||
|
||
print("3 test cases for support equality are passed") | ||
|
||
if __name__ == '__main__': | ||
unittest.main() |
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,20 @@ | ||
import random | ||
import warnings | ||
warnings.filterwarnings("ignore") | ||
|
||
def generate_transactional_dataset(num_transactions, items, max_items_per_transaction): | ||
dataset = [] | ||
for _ in range(num_transactions): | ||
num_items = random.randint(1, max_items_per_transaction) | ||
transaction = random.sample(items, num_items) | ||
dataset.append(transaction) | ||
return dataset | ||
|
||
|
||
# num_distinct_items=20 | ||
# num_transactions = 1000 | ||
# max_items_per_transaction = 20 | ||
# items=["item-{}".format(i) for i in range(1,num_distinct_items+1)] | ||
|
||
# dataset = generate_transactional_dataset(num_transactions, items, max_items_per_transaction) | ||
# print(dataset) |
File renamed without changes.