diff --git a/tests/data/base_file.csv b/tests/data/base_file.csv index 5ff813f..53b2978 100644 --- a/tests/data/base_file.csv +++ b/tests/data/base_file.csv @@ -1,21 +1,21 @@ -network,is_valid,is_subnet_of_extra,test_case -127.0.0.0/17,FALSE,FALSE,localhost -0.128.0.0/10,TRUE,FALSE,private network -206.0.0.0/8,TRUE,FALSE,valid -105.231.36.0/23,TRUE,FALSE,valid -108.232.0.0/13,TRUE,FALSE,valid -110.204.32.0/21,TRUE,FALSE,valid -113.53.128.0/17,TRUE,FALSE,valid -121.128.0.0/9,TRUE,FALSE,valid -122.64.0.0/11,TRUE,FALSE,valid -131.128.0.0/9,TRUE,FALSE,valid -138.232.0.0/13,TRUE,FALSE,valid -140.210.168.0/22,TRUE,FALSE,valid -145.224.0.0/13,TRUE,FALSE,valid -152.128.0.0/9,TRUE,FALSE,valid -162.55.64.0/18,TRUE,FALSE,valid -165.128.0.0/13,TRUE,FALSE,valid -174.160.0.0/11,TRUE,FALSE,valid -109.92.213.0/24,TRUE,FALSE,valid -244.20.0.0/14,TRUE,FALSE,valid -244.20.0.0/14,TRUE,FALSE,duplicate +network,in_result,comment +127.0.0.0/17,1,merge does not handle sorting out localhost +0.128.0.0/10,1,private network +206.0.0.0/8,1, +105.231.36.0/23,1, +108.232.0.0/13,1, +110.204.32.0/21,1, +113.53.128.0/17,1, +121.128.0.0/9,1, +122.64.0.0/11,1, +131.128.0.0/9,1, +138.232.0.0/13,1, +140.210.168.0/22,1, +145.224.0.0/13,1, +152.128.0.0/9,1, +162.55.64.0/18,1, +165.128.0.0/13,1, +174.160.0.0/11,1, +109.92.213.0/24,1, +244.20.0.0/14,1, +244.20.0.0/14,1,duplicates within each file are not expected so merge does not handle this diff --git a/tests/data/extra_file.csv b/tests/data/extra_file.csv index ffd6efc..43723c3 100644 --- a/tests/data/extra_file.csv +++ b/tests/data/extra_file.csv @@ -1,22 +1,22 @@ -network,is_valid,is_subnet_of_base,test_case -2.11.76.0/22,TRUE,FALSE,valid -115.7.192.0/19,TRUE,FALSE,valid -12.132.80.0/20,TRUE,FALSE,valid -82.64.0.0/11,TRUE,FALSE,valid -194.48.0.0/17,TRUE,FALSE,valid -197.216.136.0/21,TRUE,FALSE,valid -249.0.0.0/9,TRUE,FALSE,valid -32.17.64.0/20,TRUE,FALSE,valid -38.66.0.0/17,TRUE,FALSE,valid -40.240.124.0/22,TRUE,FALSE,valid -44.26.0.0/19,TRUE,FALSE,valid -46.99.64.0/20,TRUE,FALSE,valid -55.48.0.0/15,TRUE,FALSE,valid -74.0.0.0/10,TRUE,FALSE,valid -77.120.136.0/22,TRUE,FALSE,valid -131.128.58.0/23,TRUE,TRUE,valid -244.20.240.0/20,TRUE,TRUE,valid -108.232.0.0/16,TRUE,TRUE,valid -122.68.0.0/14,TRUE,TRUE,valid -121.128.0.0/10,TRUE,TRUE,valid -121.128.0.0/10,TRUE,TRUE,duplicate +network,in_result,comment +2.11.76.0/22,1, +115.7.192.0/19,1, +12.132.80.0/20,1, +82.64.0.0/11,1, +194.48.0.0/17,1, +197.216.136.0/21,1, +249.0.0.0/9,1, +32.17.64.0/20,1, +38.66.0.0/17,1, +40.240.124.0/22,1, +44.26.0.0/19,1, +46.99.64.0/20,1, +55.48.0.0/15,1, +74.0.0.0/10,1, +77.120.136.0/22,1, +131.128.58.0/23,0,subnet of 131.128.0.0/9 in base file +244.20.240.0/20,0,subnet of 244.20.0.0/14 in base file +108.232.0.0/16,0,subnet of 108.232.0.0/13 in base file +122.68.0.0/14,0,subnet of 122.64.0.0/11 in base file +121.128.0.0/10,0,subnet of 121.128.0.0/9 in base file +109.92.213.0/24,0,duplicate of an entry in base file diff --git a/tests/test_merge.py b/tests/test_merge.py index 2922d7e..2e9a514 100644 --- a/tests/test_merge.py +++ b/tests/test_merge.py @@ -23,17 +23,16 @@ def __read_test_vectors(filepath): Fixtures for IP networks are under tests/data. Read them and return the list of valid networks and the list of individual subnets in the file. ''' - networks = [] - subnets = [] + all_networks = [] + all_in_result = [] with open(filepath, "r") as f: lines = f.readlines()[1:] for line in lines: - network, _, is_subnet, test_case = line.split(',') - if test_case.strip() == "valid": - networks.append(network) - if is_subnet.strip() == "TRUE": - subnets.append(network) - return networks, subnets + network, in_result, _comment = line.split(',') + all_networks.append(network) + if in_result.strip() == "1": + all_in_result.append(network) + return all_networks, all_in_result def test_merge_from_fixtures(tmp_path): ''' @@ -42,9 +41,9 @@ def test_merge_from_fixtures(tmp_path): and subnets are merged into the root network appropriately. ''' testdir = Path(__file__).parent - base_nets, base_nets_to_exclude = __read_test_vectors(testdir / "data/base_file.csv") + base_nets, base_results = __read_test_vectors(testdir / "data/base_file.csv") base_path = tmp_path / "base.txt" - extra_nets, extra_nets_to_exclude = __read_test_vectors(testdir / "data/extra_file.csv") + extra_nets, extra_results = __read_test_vectors(testdir / "data/extra_file.csv") extra_path = tmp_path / "extra.txt" # write the networks to disk, generating ASNs for each network generate_ip_file(base_path, build_file_lines(base_nets, generate_asns(len(base_nets)))) @@ -54,10 +53,9 @@ def test_merge_from_fixtures(tmp_path): general_merge(base_path, extra_path, None, outpath) with open(outpath, "r") as f: l = f.readlines() - final_networks = {line.split()[0] for line in l} - # the unique set of networks, excluding invalid, duplicate, or subnets - expected_networks = set(base_nets + extra_nets) - set(base_nets_to_exclude + extra_nets_to_exclude ) - assert final_networks == expected_networks + final_networks = [line.split()[0] for line in l] + # the merged of networks, excluding duplicates and subnets + assert sorted(final_networks) == sorted(base_results + extra_results) def test_merge(tmp_path):