initial commit

This commit is contained in:
Oscar Plaisant 2024-06-25 11:54:21 +02:00
commit 8bd048085a
72 changed files with 1790 additions and 0 deletions

BIN
src/.DS_Store vendored Normal file

Binary file not shown.

Binary file not shown.

Binary file not shown.

Binary file not shown.

Binary file not shown.

Binary file not shown.

Binary file not shown.

Binary file not shown.

Binary file not shown.

Binary file not shown.

Binary file not shown.

63
src/borda.py Normal file
View File

@ -0,0 +1,63 @@
import numpy as np
import numba as nb
from itertools import permutations
from fastcache import lru_cache
from tqdm import tqdm
from tprint import tprint
@nb.jit
def simple_borda(rankings: list[list[int]]) -> list[int]:
# dictionnary to count the points given to each value
points_count = {value: 0 for value in rankings[0]}
length = len(rankings[0])
for ranking in rankings:
for points, value in enumerate(ranking):
points_count[value] += length - points
# sort dictionnary items by values
sorted_points_count = sorted(points_count.items(),
key=lambda x: x[1],
reverse=True)
# return dictionnary keys
return [item[0] for item in sorted_points_count]
@nb.jit
def borda(rankings: list[list[int]]) -> list[int]:
length = len(rankings[0])
points_count = np.zeros(length)
for ranking in rankings:
points_count += np.argsort(ranking)
return np.argsort(points_count)
if __name__ == '__main__':
ranks = np.array([[0, 1, 2, 3, 4],
[0, 1, 3, 2, 4],
[4, 1, 2, 0, 3],
[4, 1, 0, 2, 3],
[4, 1, 3, 2, 0]])
print(borda(ranks))
# print(borda([[1, 2, 3],
# [3, 1, 2]]))
def take(idx: int, iterator: iter) -> iter:
for _ in range(idx):
yield next(iterator)
ranks = np.array(list(take(20000, permutations(range(20)))))
for _ in tqdm(range(500)):
selected_lines = np.random.randint(ranks.shape[0], size=20000)
selected = ranks[selected_lines,:]
if (borda(selected) != simple_borda(selected)).any():
print(*borda(selected))
print(*simple_borda(selected))
# tprint(selected)
# print(ranks)
# print(kendalltau_dist(ranks[5], ranks[-1]))
# print(np_kendalltau_dist(ranks[5], ranks[-1]))

View File

@ -0,0 +1 @@
{"duration": 51.82469201087952, "input_args": {"parameter": "'p_color'", "summed_attribute": "'lo_quantity'", "criterion": "('c_city', 'p_category', 'p_brand', 's_city', 's_region')", "length": "2", "authorized_parameter_values": "('aliceblue', 'antiquewhite', 'aqua', 'aquamarine', 'azure', 'beige', 'bisque', 'black', 'blanchedalmond', 'blue', 'blueviolet', 'brown', 'burlywood', 'cadetblue', 'chartreuse', 'chocolate', 'coral', 'cornflowerblue', 'cornsilk', 'crimson', 'cyan', 'darkblue', 'darkcyan', 'darkgoldenrod', 'darkgray', 'darkgreen', 'darkgrey', 'darkkhaki', 'darkmagenta', 'darkolivegreen', 'darkorange', 'darkorchid', 'darkred', 'darksalmon', 'darkseagreen', 'darkslateblue', 'darkslategray', 'darkslategrey', 'darkturquoise', 'darkviolet', 'deeppink', 'deepskyblue', 'dimgray', 'dimgrey', 'dodgerblue', 'firebrick', 'floralwhite', 'forestgreen', 'fuchsia', 'gainsboro', 'ghostwhite', 'gold', 'goldenrod', 'gray', 'green', 'greenyellow', 'grey', 'honeydew', 'hotpink', 'indianred', 'indigo', 'ivory', 'khaki', 'lavender', 'lavenderblush', 'lawngreen', 'lemonchiffon', 'lightblue', 'lightcoral', 'lightcyan', 'lightgoldenrodyellow', 'lightgray', 'lightgreen', 'lightgrey', 'lightpink', 'lightsalmon', 'lightseagreen', 'lightskyblue', 'lightslategray', 'lightslategrey', 'lightsteelblue', 'lightyellow', 'lime', 'limegreen', 'linen', 'magenta', 'maroon', 'mediumaquamarine', 'mediumblue', 'mediumorchid', 'mediumpurple', 'mediumseagreen', 'mediumslateblue', 'mediumspringgreen', 'mediumturquoise', 'mediumvioletred', 'midnightblue', 'mintcream', 'mistyrose', 'moccasin', 'navajowhite', 'navy', 'oldlace', 'olive', 'olivedrab', 'orange', 'orangered', 'orchid', 'palegoldenrod', 'palegreen', 'paleturquoise', 'palevioletred', 'papayawhip', 'peachpuff', 'peru', 'pink', 'plum', 'powderblue', 'purple', 'rebeccapurple', 'red', 'rosybrown', 'royalblue', 'saddlebrown', 'salmon', 'sandybrown', 'seagreen', 'seashell', 'sienna', 'silver', 'skyblue', 'slateblue', 'slategray', 'slategrey', 'snow', 'springgreen', 'steelblue', 'tan', 'teal', 'thistle', 'tomato', 'turquoise', 'violet', 'wheat', 'white', 'whitesmoke', 'yellow', 'yellowgreen')"}, "time": 1717599488.4139688}

View File

@ -0,0 +1 @@
{"duration": 0.5613329410552979, "input_args": {"parameter": "'p_color'", "summed_attribute": "'lo_quantity'", "criterion": "('c_city', 'p_category')", "length": "2", "authorized_parameter_values": "('aliceblue', 'antiquewhite', 'aqua', 'aquamarine', 'azure', 'beige', 'bisque', 'black', 'blanchedalmond', 'blue', 'blueviolet', 'brown', 'burlywood', 'cadetblue', 'chartreuse', 'chocolate', 'coral', 'cornflowerblue', 'cornsilk', 'crimson', 'cyan', 'darkblue', 'darkcyan', 'darkgoldenrod', 'darkgray', 'darkgreen', 'darkgrey', 'darkkhaki', 'darkmagenta', 'darkolivegreen', 'darkorange', 'darkorchid', 'darkred', 'darksalmon', 'darkseagreen', 'darkslateblue', 'darkslategray', 'darkslategrey', 'darkturquoise', 'darkviolet', 'deeppink', 'deepskyblue', 'dimgray', 'dimgrey', 'dodgerblue', 'firebrick', 'floralwhite', 'forestgreen', 'fuchsia', 'gainsboro', 'ghostwhite', 'gold', 'goldenrod', 'gray', 'green', 'greenyellow', 'grey', 'honeydew', 'hotpink', 'indianred', 'indigo', 'ivory', 'khaki', 'lavender', 'lavenderblush', 'lawngreen', 'lemonchiffon', 'lightblue', 'lightcoral', 'lightcyan', 'lightgoldenrodyellow', 'lightgray', 'lightgreen', 'lightgrey', 'lightpink', 'lightsalmon', 'lightseagreen', 'lightskyblue', 'lightslategray', 'lightslategrey', 'lightsteelblue', 'lightyellow', 'lime', 'limegreen', 'linen', 'magenta', 'maroon', 'mediumaquamarine', 'mediumblue', 'mediumorchid', 'mediumpurple', 'mediumseagreen', 'mediumslateblue', 'mediumspringgreen', 'mediumturquoise', 'mediumvioletred', 'midnightblue', 'mintcream', 'mistyrose', 'moccasin', 'navajowhite', 'navy', 'oldlace', 'olive', 'olivedrab', 'orange', 'orangered', 'orchid', 'palegoldenrod', 'palegreen', 'paleturquoise', 'palevioletred', 'papayawhip', 'peachpuff', 'peru', 'pink', 'plum', 'powderblue', 'purple', 'rebeccapurple', 'red', 'rosybrown', 'royalblue', 'saddlebrown', 'salmon', 'sandybrown', 'seagreen', 'seashell', 'sienna', 'silver', 'skyblue', 'slateblue', 'slategray', 'slategrey', 'snow', 'springgreen', 'steelblue', 'tan', 'teal', 'thistle', 'tomato', 'turquoise', 'violet', 'wheat', 'white', 'whitesmoke', 'yellow', 'yellowgreen')"}, "time": 1717683542.5204601}

View File

@ -0,0 +1 @@
{"duration": 0.8030781745910645, "input_args": {"parameter": "'p_color'", "summed_attribute": "'lo_quantity'", "criterion": "('c_city', 'p_category', 'p_brand', 's_city')", "length": "2", "authorized_parameter_values": "('aliceblue', 'antiquewhite', 'aqua', 'aquamarine', 'azure', 'beige', 'bisque', 'black', 'blanchedalmond', 'blue', 'blueviolet', 'brown', 'burlywood', 'cadetblue', 'chartreuse', 'chocolate', 'coral', 'cornflowerblue', 'cornsilk', 'crimson', 'cyan', 'darkblue', 'darkcyan', 'darkgoldenrod', 'darkgray', 'darkgreen', 'darkgrey', 'darkkhaki', 'darkmagenta', 'darkolivegreen', 'darkorange', 'darkorchid', 'darkred', 'darksalmon', 'darkseagreen', 'darkslateblue', 'darkslategray', 'darkslategrey', 'darkturquoise', 'darkviolet', 'deeppink', 'deepskyblue', 'dimgray', 'dimgrey', 'dodgerblue', 'firebrick', 'floralwhite', 'forestgreen', 'fuchsia', 'gainsboro', 'ghostwhite', 'gold', 'goldenrod', 'gray', 'green', 'greenyellow', 'grey', 'honeydew', 'hotpink', 'indianred', 'indigo', 'ivory', 'khaki', 'lavender', 'lavenderblush', 'lawngreen', 'lemonchiffon', 'lightblue', 'lightcoral', 'lightcyan', 'lightgoldenrodyellow', 'lightgray', 'lightgreen', 'lightgrey', 'lightpink', 'lightsalmon', 'lightseagreen', 'lightskyblue', 'lightslategray', 'lightslategrey', 'lightsteelblue', 'lightyellow', 'lime', 'limegreen', 'linen', 'magenta', 'maroon', 'mediumaquamarine', 'mediumblue', 'mediumorchid', 'mediumpurple', 'mediumseagreen', 'mediumslateblue', 'mediumspringgreen', 'mediumturquoise', 'mediumvioletred', 'midnightblue', 'mintcream', 'mistyrose', 'moccasin', 'navajowhite', 'navy', 'oldlace', 'olive', 'olivedrab', 'orange', 'orangered', 'orchid', 'palegoldenrod', 'palegreen', 'paleturquoise', 'palevioletred', 'papayawhip', 'peachpuff', 'peru', 'pink', 'plum', 'powderblue', 'purple', 'rebeccapurple', 'red', 'rosybrown', 'royalblue', 'saddlebrown', 'salmon', 'sandybrown', 'seagreen', 'seashell', 'sienna', 'silver', 'skyblue', 'slateblue', 'slategray', 'slategrey', 'snow', 'springgreen', 'steelblue', 'tan', 'teal', 'thistle', 'tomato', 'turquoise', 'violet', 'wheat', 'white', 'whitesmoke', 'yellow', 'yellowgreen')"}, "time": 1717674857.170329}

View File

@ -0,0 +1 @@
{"duration": 0.0308530330657959, "input_args": {"parameter": "'departure_airport'", "summed_attribute": "'nb_flights'", "criterion": "('departure_hour',)", "length": "3", "authorized_parameter_values": "['ATL', 'ORD', 'DFW', 'DEN', 'LAX', 'IAH', 'LAS', 'SFO', 'PHX', 'MCO', 'SEA', 'CLT', 'MSP', 'LGA', 'DTW', 'EWR', 'BOS', 'BWI', 'SLC', 'JFK']"}, "time": 1717674748.119753}

View File

@ -0,0 +1 @@
{"duration": 0.7943480014801025, "input_args": {"parameter": "'p_color'", "summed_attribute": "'lo_quantity'", "criterion": "('c_city', 'p_category', 'p_brand')", "length": "2", "authorized_parameter_values": "('aliceblue', 'antiquewhite', 'aqua', 'aquamarine', 'azure', 'beige', 'bisque', 'black', 'blanchedalmond', 'blue', 'blueviolet', 'brown', 'burlywood', 'cadetblue', 'chartreuse', 'chocolate', 'coral', 'cornflowerblue', 'cornsilk', 'crimson', 'cyan', 'darkblue', 'darkcyan', 'darkgoldenrod', 'darkgray', 'darkgreen', 'darkgrey', 'darkkhaki', 'darkmagenta', 'darkolivegreen', 'darkorange', 'darkorchid', 'darkred', 'darksalmon', 'darkseagreen', 'darkslateblue', 'darkslategray', 'darkslategrey', 'darkturquoise', 'darkviolet', 'deeppink', 'deepskyblue', 'dimgray', 'dimgrey', 'dodgerblue', 'firebrick', 'floralwhite', 'forestgreen', 'fuchsia', 'gainsboro', 'ghostwhite', 'gold', 'goldenrod', 'gray', 'green', 'greenyellow', 'grey', 'honeydew', 'hotpink', 'indianred', 'indigo', 'ivory', 'khaki', 'lavender', 'lavenderblush', 'lawngreen', 'lemonchiffon', 'lightblue', 'lightcoral', 'lightcyan', 'lightgoldenrodyellow', 'lightgray', 'lightgreen', 'lightgrey', 'lightpink', 'lightsalmon', 'lightseagreen', 'lightskyblue', 'lightslategray', 'lightslategrey', 'lightsteelblue', 'lightyellow', 'lime', 'limegreen', 'linen', 'magenta', 'maroon', 'mediumaquamarine', 'mediumblue', 'mediumorchid', 'mediumpurple', 'mediumseagreen', 'mediumslateblue', 'mediumspringgreen', 'mediumturquoise', 'mediumvioletred', 'midnightblue', 'mintcream', 'mistyrose', 'moccasin', 'navajowhite', 'navy', 'oldlace', 'olive', 'olivedrab', 'orange', 'orangered', 'orchid', 'palegoldenrod', 'palegreen', 'paleturquoise', 'palevioletred', 'papayawhip', 'peachpuff', 'peru', 'pink', 'plum', 'powderblue', 'purple', 'rebeccapurple', 'red', 'rosybrown', 'royalblue', 'saddlebrown', 'salmon', 'sandybrown', 'seagreen', 'seashell', 'sienna', 'silver', 'skyblue', 'slateblue', 'slategray', 'slategrey', 'snow', 'springgreen', 'steelblue', 'tan', 'teal', 'thistle', 'tomato', 'turquoise', 'violet', 'wheat', 'white', 'whitesmoke', 'yellow', 'yellowgreen')"}, "time": 1717599509.3213089}

View File

@ -0,0 +1 @@
{"duration": 28.039510011672974, "input_args": {"parameter": "'p_color'", "summed_attribute": "'lo_quantity'", "criterion": "('c_city', 'p_color')", "length": "2", "authorized_parameter_values": "('aliceblue', 'antiquewhite', 'aqua', 'aquamarine', 'azure', 'beige', 'bisque', 'black', 'blanchedalmond', 'blue', 'blueviolet', 'brown', 'burlywood', 'cadetblue', 'chartreuse', 'chocolate', 'coral', 'cornflowerblue', 'cornsilk', 'crimson', 'cyan', 'darkblue', 'darkcyan', 'darkgoldenrod', 'darkgray', 'darkgreen', 'darkgrey', 'darkkhaki', 'darkmagenta', 'darkolivegreen', 'darkorange', 'darkorchid', 'darkred', 'darksalmon', 'darkseagreen', 'darkslateblue', 'darkslategray', 'darkslategrey', 'darkturquoise', 'darkviolet', 'deeppink', 'deepskyblue', 'dimgray', 'dimgrey', 'dodgerblue', 'firebrick', 'floralwhite', 'forestgreen', 'fuchsia', 'gainsboro', 'ghostwhite', 'gold', 'goldenrod', 'gray', 'green', 'greenyellow', 'grey', 'honeydew', 'hotpink', 'indianred', 'indigo', 'ivory', 'khaki', 'lavender', 'lavenderblush', 'lawngreen', 'lemonchiffon', 'lightblue', 'lightcoral', 'lightcyan', 'lightgoldenrodyellow', 'lightgray', 'lightgreen', 'lightgrey', 'lightpink', 'lightsalmon', 'lightseagreen', 'lightskyblue', 'lightslategray', 'lightslategrey', 'lightsteelblue', 'lightyellow', 'lime', 'limegreen', 'linen', 'magenta', 'maroon', 'mediumaquamarine', 'mediumblue', 'mediumorchid', 'mediumpurple', 'mediumseagreen', 'mediumslateblue', 'mediumspringgreen', 'mediumturquoise', 'mediumvioletred', 'midnightblue', 'mintcream', 'mistyrose', 'moccasin', 'navajowhite', 'navy', 'oldlace', 'olive', 'olivedrab', 'orange', 'orangered', 'orchid', 'palegoldenrod', 'palegreen', 'paleturquoise', 'palevioletred', 'papayawhip', 'peachpuff', 'peru', 'pink', 'plum', 'powderblue', 'purple', 'rebeccapurple', 'red', 'rosybrown', 'royalblue', 'saddlebrown', 'salmon', 'sandybrown', 'seagreen', 'seashell', 'sienna', 'silver', 'skyblue', 'slateblue', 'slategray', 'slategrey', 'snow', 'springgreen', 'steelblue', 'tan', 'teal', 'thistle', 'tomato', 'turquoise', 'violet', 'wheat', 'white', 'whitesmoke', 'yellow', 'yellowgreen')"}, "time": 1717599431.891778}

View File

@ -0,0 +1 @@
{"duration": 0.0630960464477539, "input_args": {"parameter": "'departure_airport'", "summed_attribute": "'nb_flights'", "criterion": "('airline', 'day', 'month', 'year')", "length": "3", "authorized_parameter_values": "['ATL', 'ORD', 'DFW', 'DEN', 'LAX', 'IAH', 'LAS', 'SFO', 'PHX', 'MCO', 'SEA', 'CLT', 'MSP', 'LGA', 'DTW', 'EWR', 'BOS', 'BWI', 'SLC', 'JFK']"}, "time": 1717674727.880703}

View File

@ -0,0 +1 @@
{"duration": 0.5494892597198486, "input_args": {"parameter": "'p_color'", "summed_attribute": "'lo_quantity'", "criterion": "('c_city',)", "length": "2", "authorized_parameter_values": "('aliceblue', 'antiquewhite', 'aqua', 'aquamarine', 'azure', 'beige', 'bisque', 'black', 'blanchedalmond', 'blue', 'blueviolet', 'brown', 'burlywood', 'cadetblue', 'chartreuse', 'chocolate', 'coral', 'cornflowerblue', 'cornsilk', 'crimson', 'cyan', 'darkblue', 'darkcyan', 'darkgoldenrod', 'darkgray', 'darkgreen', 'darkgrey', 'darkkhaki', 'darkmagenta', 'darkolivegreen', 'darkorange', 'darkorchid', 'darkred', 'darksalmon', 'darkseagreen', 'darkslateblue', 'darkslategray', 'darkslategrey', 'darkturquoise', 'darkviolet', 'deeppink', 'deepskyblue', 'dimgray', 'dimgrey', 'dodgerblue', 'firebrick', 'floralwhite', 'forestgreen', 'fuchsia', 'gainsboro', 'ghostwhite', 'gold', 'goldenrod', 'gray', 'green', 'greenyellow', 'grey', 'honeydew', 'hotpink', 'indianred', 'indigo', 'ivory', 'khaki', 'lavender', 'lavenderblush', 'lawngreen', 'lemonchiffon', 'lightblue', 'lightcoral', 'lightcyan', 'lightgoldenrodyellow', 'lightgray', 'lightgreen', 'lightgrey', 'lightpink', 'lightsalmon', 'lightseagreen', 'lightskyblue', 'lightslategray', 'lightslategrey', 'lightsteelblue', 'lightyellow', 'lime', 'limegreen', 'linen', 'magenta', 'maroon', 'mediumaquamarine', 'mediumblue', 'mediumorchid', 'mediumpurple', 'mediumseagreen', 'mediumslateblue', 'mediumspringgreen', 'mediumturquoise', 'mediumvioletred', 'midnightblue', 'mintcream', 'mistyrose', 'moccasin', 'navajowhite', 'navy', 'oldlace', 'olive', 'olivedrab', 'orange', 'orangered', 'orchid', 'palegoldenrod', 'palegreen', 'paleturquoise', 'palevioletred', 'papayawhip', 'peachpuff', 'peru', 'pink', 'plum', 'powderblue', 'purple', 'rebeccapurple', 'red', 'rosybrown', 'royalblue', 'saddlebrown', 'salmon', 'sandybrown', 'seagreen', 'seashell', 'sienna', 'silver', 'skyblue', 'slateblue', 'slategray', 'slategrey', 'snow', 'springgreen', 'steelblue', 'tan', 'teal', 'thistle', 'tomato', 'turquoise', 'violet', 'wheat', 'white', 'whitesmoke', 'yellow', 'yellowgreen')"}, "time": 1717683610.694644}

View File

@ -0,0 +1 @@
{"duration": 38.59127497673035, "input_args": {"parameter": "'p_color'", "summed_attribute": "'lo_quantity'", "criterion": "('c_city', 'p_category', 'p_brand', 'p_type', 'p_container', 's_city', 's_nation', 's_region')", "length": "2", "authorized_parameter_values": "('aliceblue', 'antiquewhite', 'aqua', 'aquamarine', 'azure', 'beige', 'bisque', 'black', 'blanchedalmond', 'blue', 'blueviolet', 'brown', 'burlywood', 'cadetblue', 'chartreuse', 'chocolate', 'coral', 'cornflowerblue', 'cornsilk', 'crimson', 'cyan', 'darkblue', 'darkcyan', 'darkgoldenrod', 'darkgray', 'darkgreen', 'darkgrey', 'darkkhaki', 'darkmagenta', 'darkolivegreen', 'darkorange', 'darkorchid', 'darkred', 'darksalmon', 'darkseagreen', 'darkslateblue', 'darkslategray', 'darkslategrey', 'darkturquoise', 'darkviolet', 'deeppink', 'deepskyblue', 'dimgray', 'dimgrey', 'dodgerblue', 'firebrick', 'floralwhite', 'forestgreen', 'fuchsia', 'gainsboro', 'ghostwhite', 'gold', 'goldenrod', 'gray', 'green', 'greenyellow', 'grey', 'honeydew', 'hotpink', 'indianred', 'indigo', 'ivory', 'khaki', 'lavender', 'lavenderblush', 'lawngreen', 'lemonchiffon', 'lightblue', 'lightcoral', 'lightcyan', 'lightgoldenrodyellow', 'lightgray', 'lightgreen', 'lightgrey', 'lightpink', 'lightsalmon', 'lightseagreen', 'lightskyblue', 'lightslategray', 'lightslategrey', 'lightsteelblue', 'lightyellow', 'lime', 'limegreen', 'linen', 'magenta', 'maroon', 'mediumaquamarine', 'mediumblue', 'mediumorchid', 'mediumpurple', 'mediumseagreen', 'mediumslateblue', 'mediumspringgreen', 'mediumturquoise', 'mediumvioletred', 'midnightblue', 'mintcream', 'mistyrose', 'moccasin', 'navajowhite', 'navy', 'oldlace', 'olive', 'olivedrab', 'orange', 'orangered', 'orchid', 'palegoldenrod', 'palegreen', 'paleturquoise', 'palevioletred', 'papayawhip', 'peachpuff', 'peru', 'pink', 'plum', 'powderblue', 'purple', 'rebeccapurple', 'red', 'rosybrown', 'royalblue', 'saddlebrown', 'salmon', 'sandybrown', 'seagreen', 'seashell', 'sienna', 'silver', 'skyblue', 'slateblue', 'slategray', 'slategrey', 'snow', 'springgreen', 'steelblue', 'tan', 'teal', 'thistle', 'tomato', 'turquoise', 'violet', 'wheat', 'white', 'whitesmoke', 'yellow', 'yellowgreen')"}, "time": 1717680554.5963578}

View File

@ -0,0 +1,56 @@
# first line: 50
@memory.cache # persistent memoïzation
def find_orderings(parameter: str, summed_attribute: str, criterion: tuple[str, ...],
length: int,
authorized_parameter_values: list[str] =None
) -> list[list[str]]:
"""Gather the list of every ordering returned by queries using given values
of parameter, summed_attribute, and all given values of criterion.
Args:
parameter (str): The value of the parameter attribute in the query generator.
summed_attribute (str): The attribute that you wan to sum in order to sort the values.
criterion (tuple[str]): The list of attributes that you want to group the query by.
length (int): The length of orderings, hence the number of different
values of parameter that you consider in the query.
Returns:
list[list]: The list of all found orderings.
"""
# instanciate the query generator
qg = QUERY_PARAM_GB_FACTORY(parameter=parameter,
summed_attribute=summed_attribute,
criteria=None)
if authorized_parameter_values is None:
# reduce the number of compared parameter values
qg.authorized_parameter_values = qg.authorized_parameter_values#[:length]
else:
qg.authorized_parameter_values = authorized_parameter_values#[:length]
# ensemble de tous les ordres trouvés
# la clef est la valeur dans la colonne criteria
orderings = list()
for criteria in criterion:
qg.criteria = criteria
# if VERBOSE: print(repr(QG))
table = query(str(qg))
if VERBOSE:
print(f"request result with criteria '{criteria}' :")
tprint(table, limit=10)
table_orders = odrk.get_all_orderings_from_table(table)
# pprint(table_orders, compact=True, width=1000)
# update the global list of all found orders
orderings.extend(table_orders.values())
# keep only orders that are of the specified length
# that means removing too short ones, and slicing too long ones
correct_length_orderings = np.array(
[ordrng[:length] for ordrng in orderings if len(ordrng) >= length]
)
if VERBOSE:
print(f"found {len(correct_length_orderings)} orderings :")
print(correct_length_orderings)
# tprint(correct_length_orderings)
return correct_length_orderings

View File

@ -0,0 +1 @@
{"duration": 0.014148950576782227, "input_args": {"q": "\"\\n SELECT departure_airport, airline, SUM(nb_flights)\\n FROM fact_table\\n INNER JOIN airport_dim ON airport_dim.iata_code = fact_table.departure_airport\\n NATURAL JOIN hour_dim\\n INNER JOIN time_dim ON time_dim.day = fact_table.date\\n WHERE departure_airport IN ('ATL', 'ORD', 'DFW', 'DEN', 'LAX', 'IAH', 'LAS', 'SFO', 'PHX', 'MCO', 'SEA', 'CLT', 'MSP', 'LGA', 'DTW', 'EWR', 'BOS', 'BWI', 'SLC', 'JFK')\\n GROUP BY departure_airport, airline\\n ORDER BY SUM(nb_flights) DESC;\\n \""}, "time": 1717674727.832313}

View File

@ -0,0 +1 @@
{"duration": 12.216989040374756, "input_args": {"q": "\"\\n SELECT p_color, p_container, SUM(lo_quantity)\\n FROM lineorder\\n INNER JOIN part ON lo_partkey = p_partkey\\n\\n WHERE p_color IN ('aliceblue', 'antiquewhite', 'aqua', 'aquamarine', 'azure', 'beige', 'bisque', 'black', 'blanchedalmond', 'blue', 'blueviolet', 'brown', 'burlywood', 'cadetblue', 'chartreuse', 'chocolate', 'coral', 'cornflowerblue', 'cornsilk', 'crimson', 'cyan', 'darkblue', 'darkcyan', 'darkgoldenrod', 'darkgray', 'darkgreen', 'darkgrey', 'darkkhaki', 'darkmagenta', 'darkolivegreen', 'darkorange', 'darkorchid', 'darkred', 'darksalmon', 'darkseagreen', 'darkslateblue', 'darkslategray', 'darkslategrey', 'darkturquoise', 'darkviolet', 'deeppink', 'deepskyblue', 'dimgray', 'dimgrey', 'dodgerblue', 'firebrick', 'floralwhite', 'forestgreen', 'fuchsia', 'gainsboro', 'ghostwhite', 'gold', 'goldenrod', 'gray', 'green', 'greenyellow', 'grey', 'honeydew', 'hotpink', 'indianred', 'indigo', 'ivory', 'khaki', 'lavender', 'lavenderblush', 'lawngreen', 'lemonchiffon', 'lightblue', 'lightcoral', 'lightcyan', 'lightgoldenrodyellow', 'lightgray', 'lightgreen', 'lightgrey', 'lightpink', 'lightsalmon', 'lightseagreen', 'lightskyblue', 'lightslategray', 'lightslategrey', 'lightsteelblue', 'lightyellow', 'lime', 'limegreen', 'linen', 'magenta', 'maroon', 'mediumaquamarine', 'mediumblue', 'mediumorchid', 'mediumpurple', 'mediumseagreen', 'mediumslateblue', 'mediumspringgreen', 'mediumturquoise', 'mediumvioletred', 'midnightblue', 'mintcream', 'mistyrose', 'moccasin', 'navajowhite', 'navy', 'oldlace', 'olive', 'olivedrab', 'orange', 'orangered', 'orchid', 'palegoldenrod', 'palegreen', 'paleturquoise', 'palevioletred', 'papayawhip', 'peachpuff', 'peru', 'pink', 'plum', 'powderblue', 'purple', 'rebeccapurple', 'red', 'rosybrown', 'royalblue', 'saddlebrown', 'salmon', 'sandybrown', 'seagreen', 'seashell', 'sienna', 'silver', 'skyblue', 'slateblue', 'slategray', 'slategrey', 'snow', 'springgreen', 'steelblue', 'tan', 'teal', 'thistle', 'tomato', 'turquoise', 'violet', 'wheat', 'white', 'whitesmoke', 'yellow', 'yellowgreen')\\n GROUP BY p_color, p_container\\n ORDER BY SUM(lo_quantity) DESC;\\n \""}, "time": 1717680541.325936}

View File

@ -0,0 +1 @@
{"duration": 15.925843238830566, "input_args": {"q": "\"\\n SELECT p_color, c_city, SUM(lo_quantity)\\n FROM lineorder\\n INNER JOIN customer ON lo_custkey = c_custkey\\nINNER JOIN part ON lo_partkey = p_partkey\\n\\n WHERE p_color IN ('aliceblue', 'antiquewhite', 'aqua', 'aquamarine', 'azure', 'beige', 'bisque', 'black', 'blanchedalmond', 'blue', 'blueviolet', 'brown', 'burlywood', 'cadetblue', 'chartreuse', 'chocolate', 'coral', 'cornflowerblue', 'cornsilk', 'crimson', 'cyan', 'darkblue', 'darkcyan', 'darkgoldenrod', 'darkgray', 'darkgreen', 'darkgrey', 'darkkhaki', 'darkmagenta', 'darkolivegreen', 'darkorange', 'darkorchid', 'darkred', 'darksalmon', 'darkseagreen', 'darkslateblue', 'darkslategray', 'darkslategrey', 'darkturquoise', 'darkviolet', 'deeppink', 'deepskyblue', 'dimgray', 'dimgrey', 'dodgerblue', 'firebrick', 'floralwhite', 'forestgreen', 'fuchsia', 'gainsboro', 'ghostwhite', 'gold', 'goldenrod', 'gray', 'green', 'greenyellow', 'grey', 'honeydew', 'hotpink', 'indianred', 'indigo', 'ivory', 'khaki', 'lavender', 'lavenderblush', 'lawngreen', 'lemonchiffon', 'lightblue', 'lightcoral', 'lightcyan', 'lightgoldenrodyellow', 'lightgray', 'lightgreen', 'lightgrey', 'lightpink', 'lightsalmon', 'lightseagreen', 'lightskyblue', 'lightslategray', 'lightslategrey', 'lightsteelblue', 'lightyellow', 'lime', 'limegreen', 'linen', 'magenta', 'maroon', 'mediumaquamarine', 'mediumblue', 'mediumorchid', 'mediumpurple', 'mediumseagreen', 'mediumslateblue', 'mediumspringgreen', 'mediumturquoise', 'mediumvioletred', 'midnightblue', 'mintcream', 'mistyrose', 'moccasin', 'navajowhite', 'navy', 'oldlace', 'olive', 'olivedrab', 'orange', 'orangered', 'orchid', 'palegoldenrod', 'palegreen', 'paleturquoise', 'palevioletred', 'papayawhip', 'peachpuff', 'peru', 'pink', 'plum', 'powderblue', 'purple', 'rebeccapurple', 'red', 'rosybrown', 'royalblue', 'saddlebrown', 'salmon', 'sandybrown', 'seagreen', 'seashell', 'sienna', 'silver', 'skyblue', 'slateblue', 'slategray', 'slategrey', 'snow', 'springgreen', 'steelblue', 'tan', 'teal', 'thistle', 'tomato', 'turquoise', 'violet', 'wheat', 'white', 'whitesmoke', 'yellow', 'yellowgreen')\\n GROUP BY p_color, c_city\\n ORDER BY SUM(lo_quantity) DESC;\\n \""}, "time": 1717599419.778661}

View File

@ -0,0 +1 @@
{"duration": 12.698099851608276, "input_args": {"q": "\"\\n SELECT p_color, s_region, SUM(lo_quantity)\\n FROM lineorder\\n INNER JOIN part ON lo_partkey = p_partkey\\nINNER JOIN supplier ON lo_suppkey = s_suppkey\\n\\n WHERE p_color IN ('aliceblue', 'antiquewhite', 'aqua', 'aquamarine', 'azure', 'beige', 'bisque', 'black', 'blanchedalmond', 'blue', 'blueviolet', 'brown', 'burlywood', 'cadetblue', 'chartreuse', 'chocolate', 'coral', 'cornflowerblue', 'cornsilk', 'crimson', 'cyan', 'darkblue', 'darkcyan', 'darkgoldenrod', 'darkgray', 'darkgreen', 'darkgrey', 'darkkhaki', 'darkmagenta', 'darkolivegreen', 'darkorange', 'darkorchid', 'darkred', 'darksalmon', 'darkseagreen', 'darkslateblue', 'darkslategray', 'darkslategrey', 'darkturquoise', 'darkviolet', 'deeppink', 'deepskyblue', 'dimgray', 'dimgrey', 'dodgerblue', 'firebrick', 'floralwhite', 'forestgreen', 'fuchsia', 'gainsboro', 'ghostwhite', 'gold', 'goldenrod', 'gray', 'green', 'greenyellow', 'grey', 'honeydew', 'hotpink', 'indianred', 'indigo', 'ivory', 'khaki', 'lavender', 'lavenderblush', 'lawngreen', 'lemonchiffon', 'lightblue', 'lightcoral', 'lightcyan', 'lightgoldenrodyellow', 'lightgray', 'lightgreen', 'lightgrey', 'lightpink', 'lightsalmon', 'lightseagreen', 'lightskyblue', 'lightslategray', 'lightslategrey', 'lightsteelblue', 'lightyellow', 'lime', 'limegreen', 'linen', 'magenta', 'maroon', 'mediumaquamarine', 'mediumblue', 'mediumorchid', 'mediumpurple', 'mediumseagreen', 'mediumslateblue', 'mediumspringgreen', 'mediumturquoise', 'mediumvioletred', 'midnightblue', 'mintcream', 'mistyrose', 'moccasin', 'navajowhite', 'navy', 'oldlace', 'olive', 'olivedrab', 'orange', 'orangered', 'orchid', 'palegoldenrod', 'palegreen', 'paleturquoise', 'palevioletred', 'papayawhip', 'peachpuff', 'peru', 'pink', 'plum', 'powderblue', 'purple', 'rebeccapurple', 'red', 'rosybrown', 'royalblue', 'saddlebrown', 'salmon', 'sandybrown', 'seagreen', 'seashell', 'sienna', 'silver', 'skyblue', 'slateblue', 'slategray', 'slategrey', 'snow', 'springgreen', 'steelblue', 'tan', 'teal', 'thistle', 'tomato', 'turquoise', 'violet', 'wheat', 'white', 'whitesmoke', 'yellow', 'yellowgreen')\\n GROUP BY p_color, s_region\\n ORDER BY SUM(lo_quantity) DESC;\\n \""}, "time": 1717599488.394772}

View File

@ -0,0 +1 @@
{"duration": 13.360551118850708, "input_args": {"q": "\"\\n SELECT p_color, s_city, SUM(lo_quantity)\\n FROM lineorder\\n INNER JOIN part ON lo_partkey = p_partkey\\nINNER JOIN supplier ON lo_suppkey = s_suppkey\\n\\n WHERE p_color IN ('aliceblue', 'antiquewhite', 'aqua', 'aquamarine', 'azure', 'beige', 'bisque', 'black', 'blanchedalmond', 'blue', 'blueviolet', 'brown', 'burlywood', 'cadetblue', 'chartreuse', 'chocolate', 'coral', 'cornflowerblue', 'cornsilk', 'crimson', 'cyan', 'darkblue', 'darkcyan', 'darkgoldenrod', 'darkgray', 'darkgreen', 'darkgrey', 'darkkhaki', 'darkmagenta', 'darkolivegreen', 'darkorange', 'darkorchid', 'darkred', 'darksalmon', 'darkseagreen', 'darkslateblue', 'darkslategray', 'darkslategrey', 'darkturquoise', 'darkviolet', 'deeppink', 'deepskyblue', 'dimgray', 'dimgrey', 'dodgerblue', 'firebrick', 'floralwhite', 'forestgreen', 'fuchsia', 'gainsboro', 'ghostwhite', 'gold', 'goldenrod', 'gray', 'green', 'greenyellow', 'grey', 'honeydew', 'hotpink', 'indianred', 'indigo', 'ivory', 'khaki', 'lavender', 'lavenderblush', 'lawngreen', 'lemonchiffon', 'lightblue', 'lightcoral', 'lightcyan', 'lightgoldenrodyellow', 'lightgray', 'lightgreen', 'lightgrey', 'lightpink', 'lightsalmon', 'lightseagreen', 'lightskyblue', 'lightslategray', 'lightslategrey', 'lightsteelblue', 'lightyellow', 'lime', 'limegreen', 'linen', 'magenta', 'maroon', 'mediumaquamarine', 'mediumblue', 'mediumorchid', 'mediumpurple', 'mediumseagreen', 'mediumslateblue', 'mediumspringgreen', 'mediumturquoise', 'mediumvioletred', 'midnightblue', 'mintcream', 'mistyrose', 'moccasin', 'navajowhite', 'navy', 'oldlace', 'olive', 'olivedrab', 'orange', 'orangered', 'orchid', 'palegoldenrod', 'palegreen', 'paleturquoise', 'palevioletred', 'papayawhip', 'peachpuff', 'peru', 'pink', 'plum', 'powderblue', 'purple', 'rebeccapurple', 'red', 'rosybrown', 'royalblue', 'saddlebrown', 'salmon', 'sandybrown', 'seagreen', 'seashell', 'sienna', 'silver', 'skyblue', 'slateblue', 'slategray', 'slategrey', 'snow', 'springgreen', 'steelblue', 'tan', 'teal', 'thistle', 'tomato', 'turquoise', 'violet', 'wheat', 'white', 'whitesmoke', 'yellow', 'yellowgreen')\\n GROUP BY p_color, s_city\\n ORDER BY SUM(lo_quantity) DESC;\\n \""}, "time": 1717599475.656039}

View File

@ -0,0 +1 @@
{"duration": 0.02377486228942871, "input_args": {"q": "\"\\n SELECT departure_airport, day, SUM(nb_flights)\\n FROM fact_table\\n INNER JOIN airport_dim ON airport_dim.iata_code = fact_table.departure_airport\\n NATURAL JOIN hour_dim\\n INNER JOIN time_dim ON time_dim.day = fact_table.date\\n WHERE departure_airport IN ('ATL', 'ORD', 'DFW', 'DEN', 'LAX', 'IAH', 'LAS', 'SFO', 'PHX', 'MCO', 'SEA', 'CLT', 'MSP', 'LGA', 'DTW', 'EWR', 'BOS', 'BWI', 'SLC', 'JFK')\\n GROUP BY departure_airport, day\\n ORDER BY SUM(nb_flights) DESC;\\n \""}, "time": 1717674727.8571048}

View File

@ -0,0 +1 @@
{"duration": 0.00795602798461914, "input_args": {"q": "\"\\n SELECT departure_airport, month, SUM(nb_flights)\\n FROM fact_table\\n INNER JOIN airport_dim ON airport_dim.iata_code = fact_table.departure_airport\\n NATURAL JOIN hour_dim\\n INNER JOIN time_dim ON time_dim.day = fact_table.date\\n WHERE departure_airport IN ('ATL', 'ORD', 'DFW', 'DEN', 'LAX', 'IAH', 'LAS', 'SFO', 'PHX', 'MCO', 'SEA', 'CLT', 'MSP', 'LGA', 'DTW', 'EWR', 'BOS', 'BWI', 'SLC', 'JFK')\\n GROUP BY departure_airport, month\\n ORDER BY SUM(nb_flights) DESC;\\n \""}, "time": 1717674727.8699038}

View File

@ -0,0 +1 @@
{"duration": 0.00851297378540039, "input_args": {"q": "\"\\n SELECT departure_airport, year, SUM(nb_flights)\\n FROM fact_table\\n INNER JOIN airport_dim ON airport_dim.iata_code = fact_table.departure_airport\\n NATURAL JOIN hour_dim\\n INNER JOIN time_dim ON time_dim.day = fact_table.date\\n WHERE departure_airport IN ('ATL', 'ORD', 'DFW', 'DEN', 'LAX', 'IAH', 'LAS', 'SFO', 'PHX', 'MCO', 'SEA', 'CLT', 'MSP', 'LGA', 'DTW', 'EWR', 'BOS', 'BWI', 'SLC', 'JFK')\\n GROUP BY departure_airport, year\\n ORDER BY SUM(nb_flights) DESC;\\n \""}, "time": 1717674727.8793159}

View File

@ -0,0 +1 @@
{"duration": 12.400226831436157, "input_args": {"q": "\"\\n SELECT p_color, p_type, SUM(lo_quantity)\\n FROM lineorder\\n INNER JOIN part ON lo_partkey = p_partkey\\n\\n WHERE p_color IN ('aliceblue', 'antiquewhite', 'aqua', 'aquamarine', 'azure', 'beige', 'bisque', 'black', 'blanchedalmond', 'blue', 'blueviolet', 'brown', 'burlywood', 'cadetblue', 'chartreuse', 'chocolate', 'coral', 'cornflowerblue', 'cornsilk', 'crimson', 'cyan', 'darkblue', 'darkcyan', 'darkgoldenrod', 'darkgray', 'darkgreen', 'darkgrey', 'darkkhaki', 'darkmagenta', 'darkolivegreen', 'darkorange', 'darkorchid', 'darkred', 'darksalmon', 'darkseagreen', 'darkslateblue', 'darkslategray', 'darkslategrey', 'darkturquoise', 'darkviolet', 'deeppink', 'deepskyblue', 'dimgray', 'dimgrey', 'dodgerblue', 'firebrick', 'floralwhite', 'forestgreen', 'fuchsia', 'gainsboro', 'ghostwhite', 'gold', 'goldenrod', 'gray', 'green', 'greenyellow', 'grey', 'honeydew', 'hotpink', 'indianred', 'indigo', 'ivory', 'khaki', 'lavender', 'lavenderblush', 'lawngreen', 'lemonchiffon', 'lightblue', 'lightcoral', 'lightcyan', 'lightgoldenrodyellow', 'lightgray', 'lightgreen', 'lightgrey', 'lightpink', 'lightsalmon', 'lightseagreen', 'lightskyblue', 'lightslategray', 'lightslategrey', 'lightsteelblue', 'lightyellow', 'lime', 'limegreen', 'linen', 'magenta', 'maroon', 'mediumaquamarine', 'mediumblue', 'mediumorchid', 'mediumpurple', 'mediumseagreen', 'mediumslateblue', 'mediumspringgreen', 'mediumturquoise', 'mediumvioletred', 'midnightblue', 'mintcream', 'mistyrose', 'moccasin', 'navajowhite', 'navy', 'oldlace', 'olive', 'olivedrab', 'orange', 'orangered', 'orchid', 'palegoldenrod', 'palegreen', 'paleturquoise', 'palevioletred', 'papayawhip', 'peachpuff', 'peru', 'pink', 'plum', 'powderblue', 'purple', 'rebeccapurple', 'red', 'rosybrown', 'royalblue', 'saddlebrown', 'salmon', 'sandybrown', 'seagreen', 'seashell', 'sienna', 'silver', 'skyblue', 'slateblue', 'slategray', 'slategrey', 'snow', 'springgreen', 'steelblue', 'tan', 'teal', 'thistle', 'tomato', 'turquoise', 'violet', 'wheat', 'white', 'whitesmoke', 'yellow', 'yellowgreen')\\n GROUP BY p_color, p_type\\n ORDER BY SUM(lo_quantity) DESC;\\n \""}, "time": 1717680529.093871}

View File

@ -0,0 +1 @@
{"duration": 12.51117491722107, "input_args": {"q": "\"\\n SELECT p_color, p_brand, SUM(lo_quantity)\\n FROM lineorder\\n INNER JOIN part ON lo_partkey = p_partkey\\n\\n WHERE p_color IN ('aliceblue', 'antiquewhite', 'aqua', 'aquamarine', 'azure', 'beige', 'bisque', 'black', 'blanchedalmond', 'blue', 'blueviolet', 'brown', 'burlywood', 'cadetblue', 'chartreuse', 'chocolate', 'coral', 'cornflowerblue', 'cornsilk', 'crimson', 'cyan', 'darkblue', 'darkcyan', 'darkgoldenrod', 'darkgray', 'darkgreen', 'darkgrey', 'darkkhaki', 'darkmagenta', 'darkolivegreen', 'darkorange', 'darkorchid', 'darkred', 'darksalmon', 'darkseagreen', 'darkslateblue', 'darkslategray', 'darkslategrey', 'darkturquoise', 'darkviolet', 'deeppink', 'deepskyblue', 'dimgray', 'dimgrey', 'dodgerblue', 'firebrick', 'floralwhite', 'forestgreen', 'fuchsia', 'gainsboro', 'ghostwhite', 'gold', 'goldenrod', 'gray', 'green', 'greenyellow', 'grey', 'honeydew', 'hotpink', 'indianred', 'indigo', 'ivory', 'khaki', 'lavender', 'lavenderblush', 'lawngreen', 'lemonchiffon', 'lightblue', 'lightcoral', 'lightcyan', 'lightgoldenrodyellow', 'lightgray', 'lightgreen', 'lightgrey', 'lightpink', 'lightsalmon', 'lightseagreen', 'lightskyblue', 'lightslategray', 'lightslategrey', 'lightsteelblue', 'lightyellow', 'lime', 'limegreen', 'linen', 'magenta', 'maroon', 'mediumaquamarine', 'mediumblue', 'mediumorchid', 'mediumpurple', 'mediumseagreen', 'mediumslateblue', 'mediumspringgreen', 'mediumturquoise', 'mediumvioletred', 'midnightblue', 'mintcream', 'mistyrose', 'moccasin', 'navajowhite', 'navy', 'oldlace', 'olive', 'olivedrab', 'orange', 'orangered', 'orchid', 'palegoldenrod', 'palegreen', 'paleturquoise', 'palevioletred', 'papayawhip', 'peachpuff', 'peru', 'pink', 'plum', 'powderblue', 'purple', 'rebeccapurple', 'red', 'rosybrown', 'royalblue', 'saddlebrown', 'salmon', 'sandybrown', 'seagreen', 'seashell', 'sienna', 'silver', 'skyblue', 'slateblue', 'slategray', 'slategrey', 'snow', 'springgreen', 'steelblue', 'tan', 'teal', 'thistle', 'tomato', 'turquoise', 'violet', 'wheat', 'white', 'whitesmoke', 'yellow', 'yellowgreen')\\n GROUP BY p_color, p_brand\\n ORDER BY SUM(lo_quantity) DESC;\\n \""}, "time": 1717599462.2399411}

View File

@ -0,0 +1 @@
{"duration": 11.881813049316406, "input_args": {"q": "\"\\n SELECT p_color, p_color, SUM(lo_quantity)\\n FROM lineorder\\n INNER JOIN part ON lo_partkey = p_partkey\\n\\n WHERE p_color IN ('aliceblue', 'antiquewhite', 'aqua', 'aquamarine', 'azure', 'beige', 'bisque', 'black', 'blanchedalmond', 'blue', 'blueviolet', 'brown', 'burlywood', 'cadetblue', 'chartreuse', 'chocolate', 'coral', 'cornflowerblue', 'cornsilk', 'crimson', 'cyan', 'darkblue', 'darkcyan', 'darkgoldenrod', 'darkgray', 'darkgreen', 'darkgrey', 'darkkhaki', 'darkmagenta', 'darkolivegreen', 'darkorange', 'darkorchid', 'darkred', 'darksalmon', 'darkseagreen', 'darkslateblue', 'darkslategray', 'darkslategrey', 'darkturquoise', 'darkviolet', 'deeppink', 'deepskyblue', 'dimgray', 'dimgrey', 'dodgerblue', 'firebrick', 'floralwhite', 'forestgreen', 'fuchsia', 'gainsboro', 'ghostwhite', 'gold', 'goldenrod', 'gray', 'green', 'greenyellow', 'grey', 'honeydew', 'hotpink', 'indianred', 'indigo', 'ivory', 'khaki', 'lavender', 'lavenderblush', 'lawngreen', 'lemonchiffon', 'lightblue', 'lightcoral', 'lightcyan', 'lightgoldenrodyellow', 'lightgray', 'lightgreen', 'lightgrey', 'lightpink', 'lightsalmon', 'lightseagreen', 'lightskyblue', 'lightslategray', 'lightslategrey', 'lightsteelblue', 'lightyellow', 'lime', 'limegreen', 'linen', 'magenta', 'maroon', 'mediumaquamarine', 'mediumblue', 'mediumorchid', 'mediumpurple', 'mediumseagreen', 'mediumslateblue', 'mediumspringgreen', 'mediumturquoise', 'mediumvioletred', 'midnightblue', 'mintcream', 'mistyrose', 'moccasin', 'navajowhite', 'navy', 'oldlace', 'olive', 'olivedrab', 'orange', 'orangered', 'orchid', 'palegoldenrod', 'palegreen', 'paleturquoise', 'palevioletred', 'papayawhip', 'peachpuff', 'peru', 'pink', 'plum', 'powderblue', 'purple', 'rebeccapurple', 'red', 'rosybrown', 'royalblue', 'saddlebrown', 'salmon', 'sandybrown', 'seagreen', 'seashell', 'sienna', 'silver', 'skyblue', 'slateblue', 'slategray', 'slategrey', 'snow', 'springgreen', 'steelblue', 'tan', 'teal', 'thistle', 'tomato', 'turquoise', 'violet', 'wheat', 'white', 'whitesmoke', 'yellow', 'yellowgreen')\\n GROUP BY p_color, p_color\\n ORDER BY SUM(lo_quantity) DESC;\\n \""}, "time": 1717599431.864533}

View File

@ -0,0 +1 @@
{"duration": 0.0241241455078125, "input_args": {"q": "\"\\n SELECT departure_airport, departure_hour, SUM(nb_flights)\\n FROM fact_table\\n INNER JOIN airport_dim ON airport_dim.iata_code = fact_table.departure_airport\\n NATURAL JOIN hour_dim\\n INNER JOIN time_dim ON time_dim.day = fact_table.date\\n WHERE departure_airport IN ('ATL', 'ORD', 'DFW', 'DEN', 'LAX', 'IAH', 'LAS', 'SFO', 'PHX', 'MCO', 'SEA', 'CLT', 'MSP', 'LGA', 'DTW', 'EWR', 'BOS', 'BWI', 'SLC', 'JFK')\\n GROUP BY departure_airport, departure_hour\\n ORDER BY SUM(nb_flights) DESC;\\n \""}, "time": 1717674748.1134489}

View File

@ -0,0 +1 @@
{"duration": 12.596222877502441, "input_args": {"q": "\"\\n SELECT p_color, p_category, SUM(lo_quantity)\\n FROM lineorder\\n INNER JOIN part ON lo_partkey = p_partkey\\n\\n WHERE p_color IN ('aliceblue', 'antiquewhite', 'aqua', 'aquamarine', 'azure', 'beige', 'bisque', 'black', 'blanchedalmond', 'blue', 'blueviolet', 'brown', 'burlywood', 'cadetblue', 'chartreuse', 'chocolate', 'coral', 'cornflowerblue', 'cornsilk', 'crimson', 'cyan', 'darkblue', 'darkcyan', 'darkgoldenrod', 'darkgray', 'darkgreen', 'darkgrey', 'darkkhaki', 'darkmagenta', 'darkolivegreen', 'darkorange', 'darkorchid', 'darkred', 'darksalmon', 'darkseagreen', 'darkslateblue', 'darkslategray', 'darkslategrey', 'darkturquoise', 'darkviolet', 'deeppink', 'deepskyblue', 'dimgray', 'dimgrey', 'dodgerblue', 'firebrick', 'floralwhite', 'forestgreen', 'fuchsia', 'gainsboro', 'ghostwhite', 'gold', 'goldenrod', 'gray', 'green', 'greenyellow', 'grey', 'honeydew', 'hotpink', 'indianred', 'indigo', 'ivory', 'khaki', 'lavender', 'lavenderblush', 'lawngreen', 'lemonchiffon', 'lightblue', 'lightcoral', 'lightcyan', 'lightgoldenrodyellow', 'lightgray', 'lightgreen', 'lightgrey', 'lightpink', 'lightsalmon', 'lightseagreen', 'lightskyblue', 'lightslategray', 'lightslategrey', 'lightsteelblue', 'lightyellow', 'lime', 'limegreen', 'linen', 'magenta', 'maroon', 'mediumaquamarine', 'mediumblue', 'mediumorchid', 'mediumpurple', 'mediumseagreen', 'mediumslateblue', 'mediumspringgreen', 'mediumturquoise', 'mediumvioletred', 'midnightblue', 'mintcream', 'mistyrose', 'moccasin', 'navajowhite', 'navy', 'oldlace', 'olive', 'olivedrab', 'orange', 'orangered', 'orchid', 'palegoldenrod', 'palegreen', 'paleturquoise', 'palevioletred', 'papayawhip', 'peachpuff', 'peru', 'pink', 'plum', 'powderblue', 'purple', 'rebeccapurple', 'red', 'rosybrown', 'royalblue', 'saddlebrown', 'salmon', 'sandybrown', 'seagreen', 'seashell', 'sienna', 'silver', 'skyblue', 'slateblue', 'slategray', 'slategrey', 'snow', 'springgreen', 'steelblue', 'tan', 'teal', 'thistle', 'tomato', 'turquoise', 'violet', 'wheat', 'white', 'whitesmoke', 'yellow', 'yellowgreen')\\n GROUP BY p_color, p_category\\n ORDER BY SUM(lo_quantity) DESC;\\n \""}, "time": 1717599449.712944}

View File

@ -0,0 +1 @@
{"duration": 13.08634901046753, "input_args": {"q": "\"\\n SELECT p_color, s_nation, SUM(lo_quantity)\\n FROM lineorder\\n INNER JOIN part ON lo_partkey = p_partkey\\nINNER JOIN supplier ON lo_suppkey = s_suppkey\\n\\n WHERE p_color IN ('aliceblue', 'antiquewhite', 'aqua', 'aquamarine', 'azure', 'beige', 'bisque', 'black', 'blanchedalmond', 'blue', 'blueviolet', 'brown', 'burlywood', 'cadetblue', 'chartreuse', 'chocolate', 'coral', 'cornflowerblue', 'cornsilk', 'crimson', 'cyan', 'darkblue', 'darkcyan', 'darkgoldenrod', 'darkgray', 'darkgreen', 'darkgrey', 'darkkhaki', 'darkmagenta', 'darkolivegreen', 'darkorange', 'darkorchid', 'darkred', 'darksalmon', 'darkseagreen', 'darkslateblue', 'darkslategray', 'darkslategrey', 'darkturquoise', 'darkviolet', 'deeppink', 'deepskyblue', 'dimgray', 'dimgrey', 'dodgerblue', 'firebrick', 'floralwhite', 'forestgreen', 'fuchsia', 'gainsboro', 'ghostwhite', 'gold', 'goldenrod', 'gray', 'green', 'greenyellow', 'grey', 'honeydew', 'hotpink', 'indianred', 'indigo', 'ivory', 'khaki', 'lavender', 'lavenderblush', 'lawngreen', 'lemonchiffon', 'lightblue', 'lightcoral', 'lightcyan', 'lightgoldenrodyellow', 'lightgray', 'lightgreen', 'lightgrey', 'lightpink', 'lightsalmon', 'lightseagreen', 'lightskyblue', 'lightslategray', 'lightslategrey', 'lightsteelblue', 'lightyellow', 'lime', 'limegreen', 'linen', 'magenta', 'maroon', 'mediumaquamarine', 'mediumblue', 'mediumorchid', 'mediumpurple', 'mediumseagreen', 'mediumslateblue', 'mediumspringgreen', 'mediumturquoise', 'mediumvioletred', 'midnightblue', 'mintcream', 'mistyrose', 'moccasin', 'navajowhite', 'navy', 'oldlace', 'olive', 'olivedrab', 'orange', 'orangered', 'orchid', 'palegoldenrod', 'palegreen', 'paleturquoise', 'palevioletred', 'papayawhip', 'peachpuff', 'peru', 'pink', 'plum', 'powderblue', 'purple', 'rebeccapurple', 'red', 'rosybrown', 'royalblue', 'saddlebrown', 'salmon', 'sandybrown', 'seagreen', 'seashell', 'sienna', 'silver', 'skyblue', 'slateblue', 'slategray', 'slategrey', 'snow', 'springgreen', 'steelblue', 'tan', 'teal', 'thistle', 'tomato', 'turquoise', 'violet', 'wheat', 'white', 'whitesmoke', 'yellow', 'yellowgreen')\\n GROUP BY p_color, s_nation\\n ORDER BY SUM(lo_quantity) DESC;\\n \""}, "time": 1717680554.546965}

View File

@ -0,0 +1,8 @@
# first line: 29
@memory.cache # persistent memoïzation
def query(q: str) -> list[tuple]:
"""Execute a given query and reture the result in a python list[tuple]."""
if VERBOSE: print(f'sending query : {q}')
res = CUR.execute(str(q))
if VERBOSE: print("got response", res)
return res.fetchall()

425
src/concentration_test.py Normal file
View File

@ -0,0 +1,425 @@
import matplotlib.pyplot as plt
from matplotlib.colors import CSS4_COLORS
import numpy as np
from scipy.stats import norm as Norm, beta as Beta, t as Student
from tprint import tprint
import orderankings as odrk
from querying import find_orderings
from kemeny_young import kendall_tau_dist, rank_aggregation
from tqdm import tqdm
from collections import Counter, defaultdict
import joblib
from functools import partial
import random
# Random number generator for the whole program
RNG = np.random.default_rng(1234)
VERBOSE = True
VERBOSE = False
################## DATA SETTINGS (parameters, hypothesis...) ###################
# """ comment this line when using the SSB dataset
# SSB dataset settings # {{{
PARAMETER = "p_color"
SUMMED_ATTRIBUTE = "lo_quantity"
# SUMMED_ATTRIBUTE = "lo_revenue"
# SUMMED_ATTRIBUTE = "lo_extendedprice"
LENGTH = 2
authorized_parameter_values = {
"p_size": tuple(map(int, range(50))),
"p_color": tuple(CSS4_COLORS.keys()),
}
AUTHORIZED_PARAMETER_VALUES = authorized_parameter_values[PARAMETER]
CRITERION = (
##### customer table
# "c_region",
"c_city",
# "c_nation",
##### part table
"p_category",
"p_brand",
# "p_mfgr",
# "p_color",
# "p_type",
# "p_container",
##### supplier table
"s_city",
# "s_nation",
# "s_region",
##### order date
# "D_DATE",
# "D_DATEKEY",
# "D_DATE",
# "D_DAYOFWEEK",
# "D_MONTH",
# "D_YEAR",
# "D_YEARMONTHNUM",
# "D_YEARMONTH",
# "D_DAYNUMINWEEK"
# "D_DAYNUMINMONTH",
# "D_DAYNUMINYEAR",
# "D_MONTHNUMINYEAR",
# "D_WEEKNUMINYEAR",
# "D_SELLINGSEASON",
# "D_LASTDAYINWEEKFL",
# "D_LASTDAYINMONTHFL",
# "D_HOLIDAYFL",
# "D_WEEKDAYFL",
)
HYPOTHESIS_ORDERING = ("bisque", "aquamarine")
HYPOTHESIS_ORDERING = ("bisque", "blue")
# HYPOTHESIS_ORDERING = [2, 32]
# HYPOTHESIS_ORDERING = [30, 18]
# HYPOTHESIS_ORDERING = [37, 49, 10]
# }}}
""" # flight_delay dataset settings {{{
PARAMETER = "departure_airport"
SUMMED_ATTRIBUTE = "nb_flights"
LENGTH = 3
CRITERION = (
# "airline",
"departure_hour", # simpson's paradox ?
# "day",
# "month",
# "year",
)
GLOBAL_ORDERING = ['ATL', 'ORD', 'DFW', 'DEN', 'LAX', 'IAH', 'LAS',
'SFO', 'PHX', 'MCO', 'SEA', 'CLT', 'MSP', 'LGA',
'DTW', 'EWR', 'BOS', 'BWI', 'SLC', 'JFK']
AUTHORIZED_PARAMETER_VALUES = GLOBAL_ORDERING
# Correct hypothesis for each length (so the loss converges to 0)
CORRECT_ORDERINGS = defaultdict(lambda: GLOBAL_ORDERING)
CORRECT_ORDERINGS[2] = ['ATL', 'DEN']
CORRECT_ORDERINGS[3] = ['ATL', 'DFW', 'ORD']
CORRECT_ORDERINGS[4] = ['ATL', 'DEN', 'DFW', 'ORD']
CORRECT_ORDERINGS[5] = ['ATL', 'ORD', 'DFW', 'DEN', 'LAX']
# now select the right one according to the LENGTH
CORRECT_ORDERING = CORRECT_ORDERINGS[LENGTH][:LENGTH]
# Use the correct ordering
HYPOTHESIS_ORDERING = CORRECT_ORDERING
print(HYPOTHESIS_ORDERING)
# HYPOTHESIS_ORDERING = ['ATL', 'ORD', 'DWF', 'DEN', 'LAX']
# HYPOTHESIS_ORDERING = ['ATL', 'ORD', 'DFW', 'LAX', 'DEN', 'IAH'][:LENGTH]
# HYPOTHESIS_ORDERING = ['ATL', 'ORD', 'DFW', 'DEN', 'LAS', 'LAX', 'IAH'][:LENGTH]
# HYPOTHESIS_ORDERING = ['ORD', 'ATL', 'DEN', 'DFW', 'LAX'] # interesting loss curve
assert len(HYPOTHESIS_ORDERING) == LENGTH
# }}}
# """
def orderings_average_loss(orderings: list[list[str]], truth: list[str]) -> float:# {{{
"""This loss is the the average of kendall tau distances between the truth
and each ordering."""
rankings = odrk.rankings_from_orderings(orderings)
true_ranking = odrk.rankings_from_orderings([truth])[0]
return rankings_average_loss(rankings, true_ranking)# }}}
def rankings_average_loss(rankings: list[list[int]], truth: list[int]) -> float:# {{{
distance = sum(kendall_tau_dist(rkng, truth) for rkng in rankings)
length = len(rankings)
# apparently, this is what works for a good normalization
return distance / length
# return distance * 2 / (length * (length - 1))}}}
def kmny_dist_loss(orderings: list[list[str]], truth: list[str]) -> int:# {{{
"""Return the kendall tau distance between the truth and the kemeny-young
aggregation of orderings"""
_, agg_rank = rank_aggregation(odrk.rankings_from_orderings(orderings))
aggregation = odrk.ordering_from_ranking(agg_rank, truth)
loss = kendall_tau_dist(
odrk.ranking_from_ordering(aggregation),
odrk.ranking_from_ordering(truth))
return loss
# print(aggregation, HYPOTHESIS_ORDERING, kdl_agg_dist)}}}
def get_loss_progression(): # {{{
grouped_orderings = find_orderings(parameter=PARAMETER,
summed_attribute=SUMMED_ATTRIBUTE,
criterion=CRITERION,
length=LENGTH)
RNG.shuffle(grouped_orderings)
average_losses = []
kendal_aggregation_losses = []
for nb_considered_orderings in range(1, len(grouped_orderings)+1):
# loss as the average distance from truth to all considered orderings
considered_orderings = grouped_orderings[:nb_considered_orderings]
loss = orderings_average_loss(orderings=considered_orderings,
truth=HYPOTHESIS_ORDERING)
# loss as the distance between truth and the aggregation
kdl_agg_loss = kmny_dist_loss(orderings=considered_orderings,
truth=HYPOTHESIS_ORDERING)
kendal_aggregation_losses.append(kdl_agg_loss)
if VERBOSE:
print(f"using {nb_considered_orderings} orderings")
tprint(considered_orderings)
print("truth :", HYPOTHESIS_ORDERING)
print("loss =", loss)
average_losses.append(loss)
return average_losses, kendal_aggregation_losses
# }}}
def plot_loss_progression(): # {{{
"""Plot the progression of losses when using more and more of the values
(see get_loss_progression)."""
N = 20
avg_loss_progression, kdl_agg_loss_progression = get_loss_progression()
avg_loss_progression = np.array(avg_loss_progression)
kdl_agg_loss_progression = np.array(kdl_agg_loss_progression)
for _ in tqdm(range(N-1), leave=False):
avg_lp, kmny_lp = get_loss_progression()
avg_loss_progression += avg_lp
kdl_agg_loss_progression += kmny_lp
# print(progression)
if VERBOSE:
print(avg_loss_progression)
print(kdl_agg_loss_progression)
plt.plot(avg_loss_progression, color="orange")
plt.plot(kdl_agg_loss_progression, color="green")
# }}}
def get_mode_loss_progression(all_orderings: list[list[str]],
number_of_steps: int,
orders_added_each_step: int =1) -> list[bool]:
# all_rankings = odrk.rankings_from_orderings(all_orderings)
# considered_orderings = list(RNG.choice(all_orderings, size=orders_added_each_step))
considered_orderings = list(random.choices(all_orderings, k=orders_added_each_step))
# count occurrences of each ordering
orderings_count = Counter(map(tuple, considered_orderings))
# loss progression when adding more and more orderings
loss_history = np.zeros(number_of_steps)
# # random permutation of the orderings
# permuted_orderings = np.random.permutation(all_orderings)
for idx in range(number_of_steps):
# new_orders = RNG.choice(all_orderings, size=orders_added_each_step)
new_orders = random.choices(all_orderings, k=orders_added_each_step)
# new_orders = permuted_orderings[orders_added_each_step*idx:orders_added_each_step*(idx+1)]
# considered_orderings.extend(new_orders)
# update the counter of orderings occurrences
orderings_count.update(Counter(map(tuple, new_orders)))
# the most common (modal) ordering
modal_ordering = orderings_count.most_common()[0][0]
modal_ordering = np.array(modal_ordering)
# if VERBOSE: print(modal_ordering)
# the loss is 1 if the modal ordering is the same as the hypothesis
loss = int(not np.array_equal(modal_ordering, HYPOTHESIS_ORDERING))
# loss = int((modal_ordering == HYPOTHESIS_ORDERING).all())
# loss = int(all(map(lambda x: x[0]==x[1],
# zip(modal_ordering, HYPOTHESIS_ORDERING))))
# add loss to the list of losses
loss_history[idx] = loss
if VERBOSE:
# print(loss_history, HYPOTHESIS_ORDERING)
print(orderings_count.most_common(1)[0])
return np.repeat(loss_history, orders_added_each_step)
################################################################################
def plot_modal_losses():
###################
# sampling settings
N = 100 # number of repetitions of the experiment
max_number_of_orders = 7500 # max sample size
GRANULARITY = 12 # granularity of the sampling (orders by iteration)
number_of_steps = max_number_of_orders // GRANULARITY
all_orderings = find_orderings(
parameter=PARAMETER,
summed_attribute=SUMMED_ATTRIBUTE,
criterion=CRITERION,
length=LENGTH,
authorized_parameter_values=AUTHORIZED_PARAMETER_VALUES)
print(f"there are {all_orderings.size} orders in total :")
tprint(all_orderings, limit=10)
# make get_mode_loss_progression parallelizable
gmlp = joblib.delayed(get_mode_loss_progression)
####
# Aggregate multiple simulations
# don't use the tqdm progress bar if there are some logs
range_N = range(N) if VERBOSE else tqdm(range(N))
# for my 8-core computer, n_jobs=7 is empirically the best value
loss_history = joblib.Parallel(n_jobs=7)(
gmlp(all_orderings,
number_of_steps,
orders_added_each_step=GRANULARITY)
for _ in range_N
)
loss_history = np.array(loss_history)
# the sum of losses for each number of steps
losses = np.sum(loss_history, axis=0)
if VERBOSE: print("losses :", losses, sep="\n")
#####
# average
# since losses is the sum of losses, losses/N is the average
mean = losses / N
plt.plot(mean, color="green", label="loss average")
#####
# standard deviation
# variance is (average of squares) - (square of the average)
# since we only have 1 or 0, average of squares is just the average
# so the variance is average - average**2
# stddev is the square root of variance
stddev = np.sqrt(mean - mean**2)
plt.plot(stddev, color="grey", label="loss standard deviation")
############################################################################
# CONFIDENCE INTERVALS
X = np.arange(mean.size) # the x axis
######
## confidence interval
## assuming the experimental variance is the correct one
#confidence = 0.95
#alpha = 1 - confidence
#eta = Norm.ppf(1 - alpha/2, loc=0, scale=1)
#epsilon = eta * stddev / np.sqrt(N)
#plt.fill_between(X, mean - epsilon, mean + epsilon,
# color="blue", alpha=0.25,
# label=f"{100*confidence}% confidence interval")
#####
# confidence interval
# assuming each summed distribution is a normal distribution
confidence = 0.999999
delta = 1 - confidence
# corrected sample variance
S = np.sqrt((1 / N-1) * (mean - mean**2))
eta = Student(df=N-1).ppf(1 - delta/2)
epsilon = eta * stddev / np.sqrt(N)
plt.fill_between(X, mean - epsilon, mean + epsilon,
color="green", alpha=0.2,
label=f"{100*confidence}% confidence interval")
# confidence = 0.95
# delta = 1 - confidence
# eta = Student(df=X-1).ppf(1 - delta/2)
# epsilon = eta * stddev / np.sqrt(X)
# plt.fill_between(X, mean - epsilon, mean + epsilon,
# color="green", alpha=0.5,
# label=f"{100*confidence}% confidence interval")
######
## beta distribution
## confidence = 0.95
#delta = 1 - confidence
#alpha = np.cumsum(1 - loss_history, axis=1).mean(axis=0)
#beta = np.cumsum(loss_history, axis=1).mean(axis=0)
#epsilon = Beta.ppf(1 - delta/2, alpha, beta)
#plt.fill_between(X, mean - epsilon, mean + epsilon,
# color="orange", alpha=0.30,
# label=f"{100*confidence} β confidence interval")
######
## fluctuation interval
#confidence = 0.1
#alpha = 1-confidence
#k = Norm.ppf(alpha/2, loc=0, scale=1)
#fluctuation = k * stddev
#plt.fill_between(X, mean - fluctuation, mean + fluctuation,
# color="orange", alpha=0.25,
# label=f"{100*confidence}% fluctuation interval")
######
## hoeffding
#t = 0.9999999
#plt.plot(X, 2 * np.exp(-2 * t ** 2 / X),
# color="red")
######
## y = 1/2
#plt.plot([0, mean.size], [0.5, 0.5],
# color="orange", alpha=0.25)
if __name__ == '__main__':
rankings = np.array([[1, 3, 2, 4],
[3, 4, 2, 1],
[1, 2, 3, 4],
[1, 3, 2, 4],
[2, 3, 1, 4],
[1, 3, 2, 1],
[2, 3, 1, 4],
[2, 3, 1, 4]])
# all_orderings = find_orderings(parameter=PARAMETER,
# summed_attribute=SUMMED_ATTRIBUTE,
# criterion=CRITERION,
# length=LENGTH)
# # print(all_orderings)
# print(f"There are {len(all_orderings)} orderings in `all_orderings`")
# for _ in range(20):
# dep = time()
# plot_modal_losses()
# print(round(time()-dep, 4))
plt.style.use('dark_background')
# HYPOTHESIS_ORDERING = ("bisque", "aquamarine")
# plot_modal_losses()
HYPOTHESIS_ORDERING = ("bisque", "blue")
plot_modal_losses()
plt.legend()
ax = plt.gca()
# ax.set_ylim([0, 1])
# plt.ion()
plt.show()

323
src/data.py Normal file
View File

@ -0,0 +1,323 @@
data = [
(10, 3.2814),
(10, 1.1246),
(10, 1.2786),
(10, 1.4048),
(10, 1.321),
(10, 1.0877),
(10, 1.3789),
(10, 1.2656),
(10, 1.2232),
(10, 1.1576),
(10, 1.0716),
(10, 1.1329),
(10, 1.2229),
(10, 1.0674),
(10, 1.1904),
(10, 1.1503),
(10, 1.1237),
(10, 1.0695),
(10, 1.192),
(10, 1.1163),
(2, 4.985),
(2, 3.4106),
(2, 4.4639),
(2, 3.8917),
(2, 3.5325),
(2, 3.6275),
(2, 3.586),
(2, 3.7085),
(2, 3.5506),
(2, 3.882),
(2, 3.4114),
(2, 2.9221),
(2, 3.0728),
(2, 3.2228),
(2, 3.126),
(2, 3.018),
(2, 2.6121),
(2, 3.3835),
(2, 2.688),
(2, 2.7131),
(3, 4.9138),
(3, 3.6681),
(3, 4.228),
(3, 4.2168),
(3, 3.6797),
(3, 3.2504),
(3, 3.3086),
(3, 3.8523),
(3, 3.4246),
(3, 3.3924),
(3, 3.4794),
(3, 3.3593),
(3, 3.7011),
(3, 3.8801),
(3, 3.6497),
(3, 3.4457),
(3, 3.1876),
(3, 3.3091),
(3, 3.2624),
(3, 3.1918),
(4, 3.996),
(4, 2.3734),
(4, 2.3895),
(4, 2.027),
(4, 2.0217),
(4, 1.9908),
(4, 2.0311),
(4, 1.9258),
(4, 2.0102),
(4, 2.0338),
(4, 2.0078),
(4, 2.0199),
(4, 1.9693),
(4, 2.0876),
(4, 1.9746),
(4, 2.1291),
(4, 2.0353),
(4, 2.0223),
(4, 1.9693),
(4, 2.1176),
(5, 3.6458),
(5, 1.9484),
(5, 2.0161),
(5, 1.999),
(5, 1.9481),
(5, 2.0306),
(5, 2.0121),
(5, 2.0052),
(5, 1.9338),
(5, 1.9788),
(5, 1.8997),
(5, 2.0425),
(5, 2.009),
(5, 2.0407),
(5, 2.5651),
(5, 2.3494),
(5, 4.0412),
(5, 2.3624),
(5, 2.1484),
(5, 2.1279),
(6, 3.0398),
(6, 1.3934),
(6, 1.5696),
(6, 1.3557),
(6, 1.5808),
(6, 1.2172),
(6, 1.4345),
(6, 1.2293),
(6, 1.1803),
(6, 1.5682),
(6, 1.2226),
(6, 1.3786),
(6, 1.1973),
(6, 1.2538),
(6, 1.326),
(6, 1.285),
(6, 1.4086),
(6, 1.4677),
(6, 1.325),
(6, 1.7864),
(6, 2.8935),
(6, 1.4145),
(6, 1.2627),
(6, 1.2306),
(6, 1.4593),
(6, 1.4569),
(6, 1.4273),
(6, 1.2546),
(6, 1.8061),
(6, 1.7507),
(6, 1.8094),
(6, 1.6604),
(6, 1.1203),
(6, 1.5539),
(6, 1.1841),
(6, 1.3447),
(6, 1.318),
(6, 1.2145),
(6, 1.5093),
(6, 1.222),
(7, 2.8026),
(7, 1.2677),
(7, 1.3518),
(7, 1.2646),
(7, 1.3529),
(7, 1.298),
(7, 1.3879),
(7, 1.5377),
(7, 1.6141),
(7, 1.6608),
(7, 1.6938),
(7, 1.5475),
(7, 1.3327),
(7, 1.3387),
(7, 1.3543),
(7, 1.3318),
(7, 1.2613),
(7, 1.3656),
(7, 1.3646),
(7, 1.3082),
(7, 3.7757),
(7, 1.2824),
(7, 1.4717),
(7, 1.3426),
(7, 1.3604),
(7, 1.3191),
(7, 1.3851),
(7, 1.4107),
(7, 1.3291),
(7, 1.3861),
(7, 1.2749),
(7, 1.3441),
(7, 1.2875),
(7, 1.285),
(7, 1.4011),
(7, 1.285),
(7, 1.4398),
(7, 1.3175),
(7, 1.1406),
(7, 1.1148),
(7, 2.9924),
(7, 1.3008),
(7, 1.3184),
(7, 1.3205),
(7, 1.3085),
(7, 1.3275),
(7, 1.3117),
(7, 1.2819),
(7, 1.3389),
(7, 1.3741),
(7, 1.3308),
(7, 1.2763),
(7, 1.3069),
(7, 1.3578),
(7, 1.3264),
(7, 1.3716),
(7, 1.2968),
(7, 1.3645),
(7, 1.3726),
(7, 1.1437),
(7, 2.8074),
(7, 1.2116),
(7, 1.2206),
(7, 1.3141),
(7, 1.1898),
(7, 1.3442),
(7, 1.1675),
(7, 1.4256),
(7, 1.2796),
(7, 1.3477),
(7, 1.3515),
(7, 1.0426),
(7, 1.2668),
(7, 1.3067),
(7, 1.342),
(7, 1.2743),
(7, 1.3513),
(7, 1.6219),
(7, 1.6259),
(7, 1.6586),
(8, 2.7135),
(8, 1.0404),
(8, 1.2629),
(8, 1.0612),
(8, 1.1745),
(8, 1.1316),
(8, 0.9676),
(8, 1.1561),
(8, 0.9848),
(8, 1.1405),
(8, 1.1975),
(8, 1.0905),
(8, 1.3382),
(8, 1.2419),
(8, 1.221),
(8, 1.2209),
(8, 1.2595),
(8, 1.2315),
(8, 1.1985),
(8, 1.5726),
(8, 2.9819),
(8, 1.1447),
(8, 1.4281),
(8, 1.5031),
(8, 1.4433),
(8, 1.7052),
(8, 1.611),
(8, 1.3322),
(8, 1.2052),
(8, 1.3051),
(8, 1.0381),
(8, 1.1987),
(8, 1.1742),
(8, 1.2184),
(8, 0.9659),
(8, 1.0336),
(8, 1.2008),
(8, 1.23),
(8, 1.1227),
(8, 1.084),
(8, 3.4243),
(8, 1.5459),
(8, 1.705),
(8, 1.4039),
(8, 1.1903),
(8, 1.1655),
(8, 1.1943),
(8, 1.2169),
(8, 1.1924),
(8, 1.2306),
(8, 1.1635),
(8, 1.1598),
(8, 1.2742),
(8, 1.1646),
(8, 1.034),
(8, 1.2087),
(8, 1.1515),
(8, 1.145),
(8, 1.2855),
(8, 1.0425),
(8, 2.9917),
(8, 1.2165),
(8, 1.187),
(8, 1.1772),
(8, 1.2726),
(8, 1.1411),
(8, 1.2505),
(8, 1.2163),
(8, 1.2172),
(8, 1.1765),
(8, 1.2291),
(8, 1.2302),
(8, 1.195),
(8, 1.3805),
(8, 1.4443),
(8, 1.4463),
(8, 1.535),
(8, 1.5171),
(8, 1.2004),
(8, 1.2866),
(8, 2.9194),
(8, 1.1209),
(8, 1.1777),
(8, 1.1953),
(8, 1.3267),
(8, 1.2001),
(8, 1.2174),
(8, 1.1995),
(8, 1.294),
(8, 1.1856),
(8, 1.1948),
(8, 1.235),
(8, 1.1608),
(8, 1.2643),
(8, 1.3034),
(8, 1.5058),
(8, 1.4037),
(8, 1.6096),
(8, 1.4336),
(8, 1.3659),
]

25
src/data_plot.py Normal file
View File

@ -0,0 +1,25 @@
import numpy as np
from data import data
import matplotlib.pyplot as plt
data = np.array(data)
X, Y = data.transpose()
print(X, Y)
for x in set(X):
plt.boxplot(Y[X==x], positions=[x])
plt.fill_between([1, 2, 3, 4, 5], [10, 11, 11, 12, 9], [0, 1, 1, 0, 2],
step="mid",
color="red", alpha=0.25)
plt.ion()
plt.show()
input()

25
src/file_manipulation.py Normal file
View File

@ -0,0 +1,25 @@
"""
Utility functions to manipulate files in the project.
"""
import random
def number_of_lines(file: str) -> int:
"""Return the number of lines in a file (given by filename)."""
return sum(1 for line in open(file, 'rb'))
def sample_lines(filename: str, k: int =1):
"""Return a sample of k lines of a file (given by filename)."""# {{{
selected_lines = random.sample(range(number_of_lines(filename)), k=k)
sample: list[str] = [''] * k
sample_hd_idx = 0
with open(filename, encoding="utf8") as file:
for num, line in enumerate(file):
if num in selected_lines:
# remove the trailing \n
sample[sample_hd_idx] = line[:-1]
sample_hd_idx += 1
return sample# }}}
def random_line(filename: str):
"""Select one line randomly from a file (given by filename)."""
return sample_lines(filename, k=1)[0]

92
src/kemeny_young.py Normal file
View File

@ -0,0 +1,92 @@
import numpy as np
from numba import jit, njit
from itertools import permutations
from tools import combinations_of_2
from tqdm import tqdm
from tprint import tprint
Number = int|float
# original, unoptimized version, but it's more readable
# def kendall_tau_dist(rank_a, rank_b) -> int:
# tau = 0
# nb_candidates = len(rank_a)
# # for each pair of candidates (not counting twice pairs with a different order)
# for i, j in combinations(range(nb_candidates), 2):
# tau += np.sign(rank_a[i] - rank_a[j]) == - np.sign(rank_b[i] - rank_b[j])
# return tau
def kendall_tau_dist(ranking_a: list[int], ranking_b: list[int]) -> Number:
"""The kendall τ distance between two rankings / permutations.
It is the number of inversions that don't have the same sign within all pairs of an inversion of ranking_a and an inversion of ranking_b.
"""
ranking_a = np.array(ranking_a)
ranking_b = np.array(ranking_b)
# comb = np.array([[i, j] for i in range(1, size) for j in range(1, size) if i<j])
comb = combinations_of_2(ranking_a.size)
A = ranking_a[comb]
B = ranking_b[comb]
length = ranking_a.size
# return __tau(A, B)
return 1 - __tau(A, B) / (length * (length - 1))
@jit(cache=True)
def __tau(A: list[int], B: list[int]) -> int:
# tau = np.sum(np.sign(A[:,0] - A[:,1]) == - np.sign(B[:,0] - B[:,1]))
return np.sum((A[:,0] < A[:,1]) == (B[:,0] > B[:,1]))
def rank_aggregation(rankings: list[list[int]]) -> tuple[int, tuple[int, ...]]:
"""Brute-force kemeny-young rank aggregation.
Args:
ranks: A list of the ranks (2D numpy array).
Returns:
int, list: The minimal sum of distances to ranks, the rank of minimal distance.
"""
rankings = np.array(rankings)
min_dist: int = np.inf
best_ranking = None
_, nb_candidates = rankings.shape
for candidate_ranking in permutations(range(nb_candidates)):
# dist = sum(kendall_tau_dist(candidate_ranking, ranking) for ranking in rankings)
dist = np.sum(np.apply_along_axis(lambda rkng: kendall_tau_dist(candidate_ranking, rkng), 1, rankings))
# dist = np.sum(np.apply_along_axis(lambda rank: kendalltau_dist(candidate_rank, rank), 1, ranks))
if dist < min_dist:
min_dist = dist
best_ranking = candidate_ranking
# if no minimum has been found, there is a problem somewhere
if best_ranking is None: raise ValueError()
return min_dist, best_ranking
if __name__ == '__main__':
ranks = np.array([[0, 1, 2, 3, 4],
[0, 1, 3, 2, 4],
[4, 1, 2, 0, 3],
[4, 1, 0, 2, 3],
[4, 1, 3, 2, 0]])
# print(rank_aggregation(ranks))
# print(kendall_tau_dist([1, 2, 3],
# [3, 1, 2]))
ranks = np.array(list(permutations(range(7))))
for _ in tqdm(range(10)):
selected_lines = np.random.randint(ranks.shape[0], size=30)
selected = ranks[selected_lines,:]
print(rank_aggregation(selected))
# tprint(selected)
# print(ranks)
# print(kendalltau_dist(ranks[5], ranks[-1]))
# print(np_kendalltau_dist(ranks[5], ranks[-1]))

123
src/orderankings.py Normal file
View File

@ -0,0 +1,123 @@
"""
Functions to deal with orderings, rankings, and permutations.
Orderings are lists of values where the order of these values is important.
It is assumed that values inside orderings are distinct.
Rankings are lists of integers indicating the indexes of some values.
Rankings are basically the index-wise reprsentation of orderings.
A ranking is built from an ordering, and using a "reference ordering", which
you index to get back the values from the indexes.
Rankings are similar to mathematical "permutations".
"""
import numpy as np
from tprint import tprint
from kemeny_young import rank_aggregation
VERBOSE=False
# def inverse_permutation(permutation: list[int]) -> list[int]:
# """Return the inverse of a given permutation."""
# permutation = np.array(permutation)
# inverse = np.empty_like(permutation)
# inverse[permutation] = np.arange(permutation.size)
# return inverse
def inverse_permutation(permutation: list[int]) -> list[int]:
"""Return the inverse of a given permutation.
Args:
permutation (list[int]): A permutation to get the inverse of. It can be
represented as a list[int] or a numpy array of integers.
Returns:
list[int]: A numpy array of integers representing the inverse permutation.
"""
permutation = np.array(permutation)
inverse = np.arange(permutation.size)
inverse[permutation] = inverse.copy()
return inverse
def get_orderings_from_table(table: np.ndarray, column_index: int =0) -> list:
"""Extract a list of orderings from a table coming out of a sql query.
This basically means that you extract values of the given column, while
keeping order but removing duplicates.
Args:
table: The table, coming from an sql query, containing ordered values.
column_index: The index of the column in the table from which to
extract the orderings from.
"""
table = np.array(table)
values = table[:,column_index]
ranking, indexes = np.unique(values, return_index=True)
return values[np.sort(indexes)] # distinct ordered values
def get_all_orderings_from_table(table: list[tuple]) -> dict:
orders = dict()
for line in table:
parameter, criteria, sum_value = line
if orders.get(criteria) is None:
orders[criteria] = []
orders[criteria].append(parameter)
return orders
def rankings_from_orderings(orderings: list[list[str]]) -> list[list[int]]:
"""Return a list of rankings from a given list of orderings.
Orderings are lists[str], where order is important.
Rankings are list[int], and represent the permutation that would sort the
matching ordering into alphabetical order.
"""
orderings = np.array(orderings)
rankings = np.argsort(orderings, axis=1)
if VERBOSE:
print("found rankings :")
tprint(rankings)
return rankings
def ranking_from_ordering(ordering: list[str]) -> list[int]:
return rankings_from_orderings([ordering])[0]
def ordering_from_ranking(ranking: list[int], values_to_order: list[str]) -> list[str]:
"""Get an order of values from a ranking of these values.
This is basically the inverse function of *rankings_from_orderings*.
Args:
ranking (list[int]): The ranking (permutation) you want to convert into
an ordering.
values_to_order (list[str]): The values to put in the ordering.
Order of these values doens't matter.
Returns:
list[str]: The ordering of the values, matching the given ranking (permutation).
"""
# applying the inverse permutation of *ranking* on sorted values_to_order
inversed_ranking = inverse_permutation(ranking)
return np.sort(values_to_order)[inversed_ranking]
# def ordering_from_ranking(ranking: list[int],
# reference_ordering: list[str],
# reference_ranking: list[int]):
# """Get an ordering of values from a ranking, using a reference ordering and
# ranking (the ranking must match the ordering)."""
# # make sure you are using numpy arrays
# ref_ordering = np.array(reference_ordering)
# ref_ranking = np.array(reference_ranking)
# # get back the best order from the best ranking
# ordering = ref_ordering[ref_ranking[[ranking]]][0]
# if VERBOSE: print("best ordering :", ordering)
# return ordering
def aggregate_rankings(rankings: list[list[int]]) -> tuple[int, ...]:
"""Calculate the aggregation of all given rankings, that is the ranking
that is the nearest to all given rankings."""
min_dist, best_ranking = rank_aggregation(rankings)
if VERBOSE: print("best ranking :", best_ranking)
return best_ranking

65
src/orders_count.py Normal file
View File

@ -0,0 +1,65 @@
data = [
((1, 21, 3), 11), ((1, 21, 8), 10), ((1, 3, 10), 13), ((1, 3, 10), 14),
((1, 30, 4), 10), ((10, 23, 34), 11), ((10, 39, 24), 10), ((10, 39, 24), 11),
((10, 39, 24), 11), ((10, 39, 24), 12), ((10, 39, 24), 12), ((11, 20, 12), 10),
((12, 19, 1), 12), ((12, 19, 1), 13), ((12, 19, 1), 9) , ((12, 27, 37), 13),
((12, 27, 37), 14), ((12, 41, 2), 12), ((12, 48, 35), 10), ((12, 48, 35), 10),
((14, 10, 18), 12), ((14, 19, 43), 11), ((14, 19, 43), 12), ((14, 23, 33), 10),
((14, 23, 33), 11), ((14, 23, 33), 13), ((15, 29, 39), 11), ((15, 4, 25), 10),
((15, 49, 34), 10), ((16, 33, 47), 11), ((16, 37, 47), 10), ((17, 14, 47), 11),
((17, 27, 20), 11), ((17, 27, 20), 12), ((17, 3, 20), 12), ((17, 3, 20), 12),
((17, 3, 20), 12), ((17, 3, 20), 13), ((18, 20, 42), 11), ((18, 31, 9), 12),
((18, 31, 9), 13), ((18, 31, 9), 13), ((19, 37, 17), 11), ((19, 37, 17), 12),
((19, 37, 17), 13), ((2, 18, 13), 10), ((2, 18, 13), 11), ((2, 47, 22), 10),
((2, 47, 22), 10), ((20, 22, 3), 12), ((20, 22, 3), 14), ((20, 26, 5), 10),
((20, 26, 5), 11), ((20, 26, 5), 11), ((20, 26, 5), 13), ((20, 31, 43), 10),
((20, 31, 43), 10), ((20, 46, 15), 11), ((21, 34, 38), 12), ((21, 42, 7), 10),
((21, 46, 25), 10), ((21, 46, 25), 11), ((21, 46, 25), 11), ((21, 46, 25), 12),
((21, 5, 20), 9) , ((22, 10, 2), 10), ((22, 20, 28), 10), ((22, 46, 34), 10),
((22, 46, 34), 12), ((22, 46, 34), 14), ((23, 11, 46), 11), ((23, 11, 46), 14),
((23, 11, 46), 9) , ((23, 22, 25), 11), ((23, 22, 25), 11), ((23, 44, 43), 11),
((23, 44, 43), 11), ((23, 49, 17), 10), ((24, 27, 2), 11), ((24, 29, 43), 13),
((24, 37, 11), 12), ((24, 37, 11), 12), ((24, 37, 11), 14), ((24, 37, 11), 15),
((25, 18, 26), 11), ((25, 34, 42), 11), ((25, 39, 4), 10), ((25, 39, 4), 12),
((25, 39, 4), 12), ((25, 39, 4), 13), ((26, 25, 18), 11), ((26, 31, 25), 11),
((26, 31, 25), 9) , ((26, 36, 40), 13), ((27, 11, 14), 11), ((27, 23, 17), 11),
((27, 34, 20), 11), ((28, 2, 1), 11), ((29, 19, 3), 11), ((29, 19, 3), 11),
((29, 2, 1), 11), ((29, 22, 30), 9) , ((29, 39, 10), 11), ((29, 39, 10), 12),
((29, 39, 10), 12), ((29, 44, 40), 10), ((29, 44, 40), 12), ((3, 10, 9), 11),
((3, 10, 9), 13), ((3, 12, 15), 12), ((3, 14, 10), 11), ((3, 14, 10), 13),
((3, 30, 20), 9) , ((3, 36, 12), 11), ((3, 40, 39), 10), ((3, 6, 38), 10),
((30, 35, 42), 11), ((30, 35, 42), 13), ((31, 15, 12), 12), ((31, 15, 12), 12),
((32, 14, 23), 11), ((32, 16, 49), 11), ((32, 7, 11), 10), ((32, 7, 11), 13),
((32, 7, 11), 14), ((33, 22, 3), 11), ((33, 22, 3), 12), ((33, 22, 3), 13),
((33, 25, 36), 10), ((33, 29, 40), 11), ((33, 39, 38), 10), ((33, 39, 38), 11),
((33, 39, 38), 14), ((33, 43, 23), 10), ((34, 7, 47), 11), ((35, 21, 26), 12),
((36, 10, 17), 12), ((36, 38, 2), 11), ((36, 38, 2), 12), ((37, 40, 5), 10),
((37, 49, 10), 11), ((37, 49, 10), 13), ((38, 14, 22), 11), ((38, 14, 22), 12),
((38, 32, 3), 10), ((38, 32, 3), 11), ((38, 32, 3), 11), ((39, 10, 44), 11),
((39, 12, 35), 11), ((39, 12, 35), 11), ((39, 12, 35), 13), ((39, 3, 1), 10),
((39, 30, 7), 10), ((39, 30, 7), 12), ((39, 30, 7), 12), ((4, 17, 28), 10),
((4, 46, 42), 12), ((40, 24, 12), 11), ((40, 26, 47), 11), ((41, 30, 43), 11),
((41, 30, 43), 12), ((41, 30, 7), 11), ((41, 30, 7), 12), ((41, 30, 7), 17),
((41, 46, 24), 12), ((41, 46, 24), 9) , ((41, 48, 44), 10), ((42, 14, 15), 11),
((42, 14, 15), 11), ((42, 14, 15), 11), ((42, 29, 22), 10), ((42, 29, 22), 10),
((42, 3, 37), 11), ((42, 8, 41), 11), ((43, 11, 48), 11), ((44, 40, 7), 10),
((45, 20, 12), 11), ((45, 20, 12), 13), ((45, 44, 10), 11), ((45, 46, 6), 10),
((47, 17, 30), 12), ((47, 18, 30), 10), ((47, 18, 30), 12), ((47, 18, 30), 14),
((47, 22, 41), 12), ((47, 22, 41), 13), ((47, 4, 33), 10), ((48, 12, 28), 11),
((5, 3, 12), 10), ((5, 3, 12), 10), ((5, 3, 12), 11), ((5, 35, 34), 13),
((6, 32, 5), 11), ((6, 32, 5), 11), ((7, 2, 14), 10), ((7, 2, 14), 10),
((7, 2, 14), 11), ((7, 2, 14), 14), ((7, 5, 29), 12), ((9, 29, 21), 13),
((9, 31, 28), 9)]
from collections import defaultdict
orders_count = defaultdict(lambda: 0)
for order, count in data:
orders_count[order] += count
print(len(data), len(orders_count))
print(orders_count)
print(sorted(orders_count, key=lambda x: x[1]))

187
src/query_generator.py Normal file
View File

@ -0,0 +1,187 @@
"""
Objects to generate SQL requests.
The QueryWithParameter and QueryWithParameterGroupedByCriteria,
"""
from abc import ABC, abstractmethod
class QueryGenerator(ABC):
@abstractmethod
def __init__(self): ...
@abstractmethod
def __str__(self) -> str: ...
class QueryWithParameter(QueryGenerator):
# DEFAULT_AUTHORIZED_PARAMETER_VALUES: tuple[str, ...] = ("foo", "bar")
def __init__(self, parameter: str|None =None,
authorized_parameter_values: tuple[str, ...] | None = None,
summed_attribute: str|None =None):
if parameter is None: raise ValueError
self.parameter = str(parameter)
if authorized_parameter_values is None:
authorized_parameter_values = self.DEFAULT_AUTHORIZED_PARAMETER_VALUES
self.authorized_parameter_values = authorized_parameter_values
if summed_attribute is None: raise ValueError
self.summed_attribute = str(summed_attribute)
@property
def parameter(self) -> str:
return self.__parameter
@parameter.setter
def parameter(self, value):
self.__parameter = str(value)
def __force_typing_on_authorized_parameter_values(self):
self.__authorized_parameter_values = tuple(
map(str, self.__authorized_parameter_values))
@property
def authorized_parameter_values(self) -> tuple[str, ...]:
return self.__authorized_parameter_values
@authorized_parameter_values.setter
def authorized_parameter_values(self, value):
self.__authorized_parameter_values = value
self.__force_typing_on_authorized_parameter_values()
class QueryWithParameterGroupedByCriteria(QueryWithParameter):
def __init__(self, parameter: str|None =None,
authorized_parameter_values: tuple[str, ...] | None =None,
criteria: str | None =None,
summed_attribute: str|None =None
):
if parameter is None: raise ValueError
self.parameter = str(parameter)
if authorized_parameter_values is None:
authorized_parameter_values = self.DEFAULT_AUTHORIZED_PARAMETER_VALUES
self.authorized_parameter_values = authorized_parameter_values
self.criteria = criteria
if summed_attribute is None: raise ValueError
self.summed_attribute = str(summed_attribute)
@property
def criteria(self) -> str | None:
return self.__criteria
@criteria.setter
def criteria(self, value: str | None):
if value is None:
self.__criteria = None
else:
self.__criteria = str(value)
def __repr__(self):
return f"QueryGenerator(parameter={self.parameter}, authorized_parameter_values={self.authorized_parameter_values}, criteria={self.criteria}, summed_attribute={self.summed_attribute})"
class QueryFlight:
DEFAULT_AUTHORIZED_PARAMETER_VALUES: tuple[str, ...] = (
"ATL", "ORD", "DFW", "DEN", "LAX", "IAH", "LAS", "SFO", "PHX", "MCO",
"SEA", "CLT", "MSP", "LGA", "DTW", "EWR", "BOS", "BWI", "SLC", "JFK")
class QuerySSB:
# DEFAULT_AUTHORIZED_PARAMETER_VALUES: tuple[str, ...] = tuple(CSS4_COLORS.keys())
# defaults to None to accept all values
DEFAULT_AUTHORIZED_PARAMETER_VALUES = None
class QueryFlightWithParameterGroupedByCriteria(QueryWithParameterGroupedByCriteria,
QueryFlight):
# def __init__(self, parameter: str = "departure_airport",
# authorized_parameter_values: tuple[str, ...] | None = None,
# criteria: str | None = None,
# summed_attribute: str = "nb_flights"):
# self.parameter = str(parameter)
# if authorized_parameter_values is None:
# authorized_parameter_values = self.DEFAULT_AUTHORIZED_PARAMETER_VALUES
# self.authorized_parameter_values = authorized_parameter_values
# self.criteria = criteria
# self.summed_attribute = str(summed_attribute)
def __str__(self):
if self.criteria is None:
raise ValueError("attribute criteria has not been set.")
res = f"""
SELECT {self.parameter}, {self.criteria}, SUM({self.summed_attribute})
FROM fact_table
INNER JOIN airport_dim ON airport_dim.iata_code = fact_table.departure_airport
NATURAL JOIN hour_dim
INNER JOIN time_dim ON time_dim.day = fact_table.date
"""
if self.authorized_parameter_values is not None:
res += f"WHERE {self.parameter} IN {self.authorized_parameter_values}\n"
res += f"""
GROUP BY {self.parameter}, {self.criteria}
ORDER BY SUM({self.summed_attribute}) DESC;
"""
return res
class QueryFlightWithParameter(QueryWithParameter, QueryFlight):
def __str__(self):
return f"""
SELECT {self.parameter}, SUM({self.summed_attribute})
FROM fact_table
WHERE {self.parameter} IN {self.authorized_parameter_values}
GROUP BY {self.parameter}
ORDER BY SUM({self.summed_attribute}) DESC;
"""
class QuerySSBWithParameterGroupedByCriteria(QueryWithParameterGroupedByCriteria,
QuerySSB):
def __str__(self):
if self.criteria is None:
raise ValueError("attribute criteria has not been set.")
res = f"""
SELECT {self.parameter}, {self.criteria}, SUM({self.summed_attribute})
FROM lineorder
"""
# join to tables according to which columns are needed
if self.criteria.startswith('c_') or self.parameter.startswith('c_'):
res += "INNER JOIN customer ON lo_custkey = c_custkey\n"
if self.criteria.startswith('p_') or self.parameter.startswith('p_'):
res += "INNER JOIN part ON lo_partkey = p_partkey\n"
if self.criteria.startswith('s_') or self.parameter.startswith('s_'):
res += "INNER JOIN supplier ON lo_suppkey = s_suppkey\n"
if self.criteria.startswith('D_') or self.parameter.startswith('D_'):
res += "INNER JOIN date ON lo_orderdate = D_DATEKEY\n"
if self.authorized_parameter_values is not None:
res += "WHERE {self.parameter} IN {self.authorized_parameter_values}\n"
res += f"""
GROUP BY {self.parameter}, {self.criteria}
ORDER BY SUM({self.summed_attribute}) DESC;
"""
return res
class QuerySSBWithParameter(QueryWithParameter,
QuerySSB):
def __str__(self):
res = f"""
SELECT {self.parameter}, SUM({self.summed_attribute})
FROM lineorder
"""
if self.authorized_parameter_values is not None:
res += f"WHERE {self.parameter} IN {self.authorized_parameter_values}\n"
res += f"""
GROUP BY {self.parameter}
ORDER BY SUM({self.summed_attribute}) DESC;
"""
return res

248
src/querying.py Normal file
View File

@ -0,0 +1,248 @@
import sqlite3
import numpy as np
from tprint import tprint
from query_generator import *
import orderankings as odrk
import kemeny_young as km
from joblib import Memory
# persistent memoïzation
memory = Memory("cache")
DATABASE_NAME = "flight_delay"
DATABASE_NAME = "SSB"
################################################################################
# Connexion to sqlite database
odrk.VERBOSE = False
VERBOSE = True
# initialize database connection
DATABASE_FILE = f"../{DATABASE_NAME}_dataset/{DATABASE_NAME}.db"
if VERBOSE: print(f"connecting to {DATABASE_FILE}")
CON = sqlite3.connect(DATABASE_FILE)
CUR = CON.cursor()
@memory.cache # persistent memoïzation
def query(q: str) -> list[tuple]:
"""Execute a given query and reture the result in a python list[tuple]."""
if VERBOSE: print(f'sending query : {q}')
res = CUR.execute(str(q))
if VERBOSE: print("got response", res)
return res.fetchall()
################################################################################
# Choice of the right query generator
if DATABASE_NAME == "flight_delay":
QUERY_PARAM_GB_FACTORY = QueryFlightWithParameterGroupedByCriteria
QUERY_PARAM_FACTORY = QueryFlightWithParameter
elif DATABASE_NAME == "SSB":
QUERY_PARAM_GB_FACTORY = QuerySSBWithParameterGroupedByCriteria
QUERY_PARAM_FACTORY = QuerySSBWithParameter
################################################################################
# orderings extraction functions
@memory.cache # persistent memoïzation
def find_orderings(parameter: str, summed_attribute: str, criterion: tuple[str, ...],
length: int,
authorized_parameter_values: list[str] =None
) -> list[list[str]]:
"""Gather the list of every ordering returned by queries using given values
of parameter, summed_attribute, and all given values of criterion.
Args:
parameter (str): The value of the parameter attribute in the query generator.
summed_attribute (str): The attribute that you wan to sum in order to sort the values.
criterion (tuple[str]): The list of attributes that you want to group the query by.
length (int): The length of orderings, hence the number of different
values of parameter that you consider in the query.
Returns:
list[list]: The list of all found orderings.
"""
# instanciate the query generator
qg = QUERY_PARAM_GB_FACTORY(parameter=parameter,
summed_attribute=summed_attribute,
criteria=None)
if authorized_parameter_values is None:
# reduce the number of compared parameter values
qg.authorized_parameter_values = qg.authorized_parameter_values#[:length]
else:
qg.authorized_parameter_values = authorized_parameter_values#[:length]
# ensemble de tous les ordres trouvés
# la clef est la valeur dans la colonne criteria
orderings = list()
for criteria in criterion:
qg.criteria = criteria
# if VERBOSE: print(repr(QG))
table = query(str(qg))
if VERBOSE:
print(f"request result with criteria '{criteria}' :")
tprint(table, limit=10)
table_orders = odrk.get_all_orderings_from_table(table)
# pprint(table_orders, compact=True, width=1000)
# update the global list of all found orders
orderings.extend(table_orders.values())
# keep only orders that are of the specified length
# that means removing too short ones, and slicing too long ones
correct_length_orderings = np.array(
[ordrng[:length] for ordrng in orderings if len(ordrng) >= length]
)
if VERBOSE:
print(f"found {len(correct_length_orderings)} orderings :")
print(correct_length_orderings)
# tprint(correct_length_orderings)
return correct_length_orderings
@memory.cache # persistent memoïzation
def find_true_ordering_ranking(parameter: str,
summed_attribute: str,
length: int,
authorized_parameter_values: tuple[str,...]|None =None
) -> tuple[list[list[str]], list[list[int]]]:
"""Return the true (ordering, ranking), considering the data as a whole (no
grouping by), and getting the true order (no rankings aggregation)."""
if authorized_parameter_values is None:
qg = QUERY_PARAM_FACTORY(parameter=parameter,
summed_attribute=summed_attribute)
else:
qg = QUERY_PARAM_FACTORY(parameter=parameter,
summed_attribute=summed_attribute,
authorized_parameter_values=authorized_parameter_values)
# qg.authorized_parameter_values = qg.authorized_parameter_values[:length]
res = query(str(qg))
if VERBOSE: print(res)
ordering = odrk.get_orderings_from_table(res)
ranking = odrk.rankings_from_orderings([ordering])[0]
return ordering, ranking
################################################################################
def flight_delay_main():
PARAMETER = "departure_airport"
SUMMED_ATTRIBUTE = "nb_flights"
LENGTH = 5
ordering, ranking = find_true_ordering_ranking(parameter=PARAMETER,
summed_attribute=SUMMED_ATTRIBUTE,
length=LENGTH)
print(ordering, ranking)
CRITERION = [
# "airline",
# "departure_hour",
"day",
# "month",
]
rng = np.random.default_rng()
rng.shuffle(CRITERION)
grouped_orderings = find_orderings(parameter=PARAMETER,
summed_attribute=SUMMED_ATTRIBUTE,
criterion=CRITERION,
length=LENGTH)
# grouped_orderings = grouped_orderings[:5]
# tprint(grouped_orderings, limit=20)
print(grouped_orderings)
# inferred_ordering = odrk.get_orderings_from_table(inferred_orderings_table)
grouped_rankings = odrk.rankings_from_orderings(grouped_orderings)
_, inferred_ranking = km.rank_aggregation(grouped_rankings)
inferred_ranking = np.array(inferred_ranking)
inferred_order = odrk.ordering_from_ranking(inferred_ranking,
grouped_orderings[0])
print("inferred :")
print(inferred_order, inferred_ranking)
# print("distance =", km.kendall_tau_dist(ranking, inferred_ranking))
################################################################################
def SSB_main():
PARAMETER = "p_color"
SUMMED_ATTRIBUTE = "lo_quantity"
# SUMMED_ATTRIBUTE = "lo_revenue"
# SUMMED_ATTRIBUTE = "lo_extendedprice"
LENGTH = 2
CRITERION = (
##### customer table
"c_region",
"c_city",
"c_nation",
##### part table
"p_category",
"p_brand",
"p_mfgr",
"p_color",
"p_type",
"p_container",
##### supplier table
"s_city",
"s_nation",
"s_region",
##### order date
# "D_DATE",
# "D_DATEKEY",
# "D_DATE",
# "D_DAYOFWEEK",
# "D_MONTH",
# "D_YEAR",
# "D_YEARMONTHNUM",
# "D_YEARMONTH",
# "D_DAYNUMINWEEK"
# "D_DAYNUMINMONTH",
# "D_DAYNUMINYEAR",
# "D_MONTHNUMINYEAR",
"D_WEEKNUMINYEAR",
# "D_SELLINGSEASON",
# "D_LASTDAYINWEEKFL",
# "D_LASTDAYINMONTHFL",
# "D_HOLIDAYFL",
# "D_WEEKDAYFL",
)
HYPOTHESIS_ORDERING = ("aquamarine", "dark")
ordering, ranking = find_true_ordering_ranking(parameter=PARAMETER,
summed_attribute=SUMMED_ATTRIBUTE,
length=LENGTH,
authorized_parameter_values=HYPOTHESIS_ORDERING)
print(ordering, ranking)
grouped_orderings = find_orderings(parameter=PARAMETER,
summed_attribute=SUMMED_ATTRIBUTE,
criterion=CRITERION,
length=LENGTH
)
# grouped_orderings = grouped_orderings[:5]
tprint(grouped_orderings, limit=20)
# print(grouped_orderings)
# inferred_ordering = odrk.get_orderings_from_table(inferred_orderings_table)
grouped_rankings = odrk.rankings_from_orderings(grouped_orderings)
_, inferred_ranking = km.rank_aggregation(grouped_rankings)
inferred_ranking = np.array(inferred_ranking)
inferred_order = odrk.ordering_from_ranking(inferred_ranking,
grouped_orderings[0])
print("inferred :")
print(inferred_order, inferred_ranking)
# print("distance =", km.kendall_tau_dist(ranking, inferred_ranking))
if __name__ == '__main__':
if DATABASE_NAME == "SSB":
SSB_main()
elif DATABASE_NAME == "flight_delay":
flight_delay_main()

View File

@ -0,0 +1,57 @@
import numpy as np
import numba as nb
from itertools import permutations, combinations
from fastcache import lru_cache
from tqdm import tqdm
from tprint import tprint
def round_robin_tournament(rankings: list[list[int]]) -> list[list[int]]:
nb_candidates = rankings.shape[1]
points = np.zeros((nb_candidates, nb_candidates), dtype=int)
for rkng in rankings:
arg_rkng = np.argsort(rkng)
print(rkng)
for a, b in combinations(range(len(rkng)), 2):
points[a][b] += arg_rkng[a] < arg_rkng[b]
points[b][a] += arg_rkng[a] > arg_rkng[b]
return points
def minimax_condorcet(rankings: list[list[int]]) -> list[list[int]]:
tournament = round_robin_tournament(rankings)
#TODO
if __name__ == '__main__':
ranks = np.array([[0, 1, 2, 3, 4],
[0, 1, 3, 2, 4],
[4, 1, 2, 0, 3],
[4, 1, 0, 2, 3],
[4, 1, 3, 2, 0]])
ranks = np.array([[1, 3, 2],
[1, 2, 3],
[2, 1, 3]])
print(round_robin_tournament(ranks))
# print(borda([[1, 2, 3],
# [3, 1, 2]]))
# def take(idx: int, iterator: iter) -> iter:
# for _ in range(idx):
# yield next(iterator)
# ranks = np.array(list(take(20000, permutations(range(20)))))
# for _ in tqdm(range(500)):
# selected_lines = np.random.randint(ranks.shape[0], size=20000)
# selected = ranks[selected_lines,:]
# if (borda(selected) != simple_borda(selected)).any():
# print(*borda(selected))
# print(*simple_borda(selected))
# # tprint(selected)
# # print(ranks)
# # print(kendalltau_dist(ranks[5], ranks[-1]))
# # print(np_kendalltau_dist(ranks[5], ranks[-1]))

25
src/tools.py Normal file
View File

@ -0,0 +1,25 @@
import numpy as np
from numba import jit
from fastcache import lru_cache
# @lru_cache(maxsize=16)
def combinations_of_2(size: int):
"""Returns an array of size n*2, containing every pair of two integers
smaller than size, but not listing twice the pairs with the same numbers
(so 1, 2 and 2, 1 aren't both listed).
This function is cached because it is called many times just to generate
the same value (when using aggregate_rankings).
"""
# return np.array([[i, j] for i in range(0, size) for j in range(0, size) if i<j])
return __combinations_of_2(size)
@jit
def __combinations_of_2(size: int):
"""Compiled helper."""
# return np.array(list(combinations(range(size), 2)))
# return np.array(np.meshgrid(np.arange(size), np.arange(size))).T.reshape(-1, 2)
return np.array([[i, j] for i in range(0, size) for j in range(0, size) if i<j])

45
src/tprint.py Normal file
View File

@ -0,0 +1,45 @@
"""
Defines the tprint function, that prints tables fomt sqlite.
"""
import numpy as np
def print_as_table(data, limit: int =10):
"""Pretty printing for a sqlite output.# {{{
Args:
limit(int): The max number of lines to print.
Can be set to float("inf") to print all the lines.
Defaults to 10.
"""
if len(data) == 0:
print("┏━━━━━━━━━━━┓\n┃empty table┃\n┗━━━━━━━━━━━┛")
return
column_width = np.max(np.vectorize(lambda x: len(repr(x)))(data), axis=0)
# column_width = np.max(np.apply_along_axis(lambda x: len(repr(x)), 0, data))
header = "┏━" + "━┳━".join(width*"" for width in column_width) + "━┓"
print(header)
for num, line in enumerate(data):
if num > limit:
ellipsis = "" + "".join("".center(width) for width in column_width) + ""
print(ellipsis)
return
res_line = ""
res_line += "".join(repr(elt).ljust(column_width[col_num])
for col_num, elt in enumerate(line))
res_line += ""
print(res_line)
footer = "┗━" + "━┻━".join(width*"" for width in column_width) + "━┛"
print(footer)# }}}
def tprint(*args, sep: str ="\n\n", limit: int =10):
"""Pretty print ouputs from sqlite.# {{{
Args:
*args: The tables to print.
sep (str): The separation betweet two tables. Defaults to "\\n\\n"
limit(int): The max number of lines to print for each table.
Can be set to float("inf") to print all the lines.
Defaults to 10.
"""
for table in args:
print_as_table(table, limit)
print(sep, end="")# }}}