|
| 1 | +""" |
| 2 | +An evolving tool for computing info from actual clusters |
| 3 | +""" |
| 4 | +import argparse |
| 5 | +import os |
| 6 | +import pandas as pd |
| 7 | +import pickle |
| 8 | +import sys |
| 9 | +import traceback |
| 10 | + |
| 11 | +import pickle |
| 12 | + |
| 13 | +from nltk.tokenize import sent_tokenize, word_tokenize |
| 14 | +from collections import Counter |
| 15 | +from operator import itemgetter |
| 16 | + |
| 17 | +# --------------------------------------------- |
| 18 | +# a bit of tools |
| 19 | +# --------------------------------------------- |
| 20 | + |
| 21 | +# just for custom printing (trace purpose) |
| 22 | +class FeCounter(Counter): |
| 23 | + def __str__(self): |
| 24 | + return " ".join(f'{k}' for k, v in self.items() if v > 1) |
| 25 | + |
| 26 | +# --------------------------------------------- |
| 27 | +# gestion ligne de commande |
| 28 | +# --------------------------------------------- |
| 29 | + |
| 30 | +def get_args(): |
| 31 | + |
| 32 | + parser = argparse.ArgumentParser(description='analyse clusters') |
| 33 | + parser.add_argument("input_file", help="A pickle input file, e.g. aircan-data-split-clean.pkl.") |
| 34 | + |
| 35 | + parser.add_argument("-v", '--verbosity', type=int, help="increase output verbosity", default=0) |
| 36 | + parser.add_argument("-t", '--test', action='store_true', help="for dealing with test", default=False) |
| 37 | + parser.add_argument("-p", '--pickle', type=str, help="Pickle Bows", default=None) |
| 38 | + |
| 39 | + |
| 40 | + args = parser.parse_args() |
| 41 | + return args |
| 42 | + |
| 43 | +# --------------------------------------------- |
| 44 | +# main |
| 45 | +# --------------------------------------------- |
| 46 | + |
| 47 | + |
| 48 | +def main(): |
| 49 | + |
| 50 | + # parse args |
| 51 | + args = get_args() |
| 52 | + |
| 53 | + if not os.path.exists(args.input_file): |
| 54 | + print(f"Invalid input file: {args.input_file}", file=sys.stderr) |
| 55 | + sys.exit(1) |
| 56 | + |
| 57 | + # read data; this will load the data as 6 pandas DataFrames, which allow fast manipulations and (slower) iterations |
| 58 | + # more info on pandas here: https://pandas.pydata.org/ |
| 59 | + try: |
| 60 | + with open(args.input_file, 'rb') as fin: |
| 61 | + [defect_df_train, defect_df_dev, defect_df_test, ata_df, mel_df, trax_df] = pickle.load(fin) |
| 62 | + print(f"Read # samples: {len(defect_df_train)} train, {len(defect_df_test)} dev, {len(defect_df_test)} test.") |
| 63 | + except: |
| 64 | + print("Loading the pickle failed.", file=sys.stderr) |
| 65 | + |
| 66 | + if pd.__version__ != '1.1.0': |
| 67 | + print("""You can upgrade your version of pandas with the command |
| 68 | + 'pip install 'pandas==1.1.0' --force-reinstall'.""", file=sys.stderr) |
| 69 | + |
| 70 | + print("""You can also recreate the pickle by following the instructions here: |
| 71 | + https://github.com/rali-udem/arpi_air_canada#data-preparation""", file=sys.stderr) |
| 72 | + print() |
| 73 | + traceback.print_exc() |
| 74 | + |
| 75 | + # basic stats on each cluster |
| 76 | + check_ref_clusters(defect_df_test if args.test is True else defect_df_train, args.pickle) |
| 77 | + |
| 78 | + |
| 79 | + |
| 80 | +# --------------------------------------------------------------- |
| 81 | +# felipe's function (to ramp up on pandas I never used seriously) |
| 82 | +# incidentally producing views of clusters |
| 83 | +# --------------------------------------------------------------- |
| 84 | + |
| 85 | +def check_ref_clusters(defect, save): |
| 86 | + |
| 87 | + # ATA-signature -> bow (counter) |
| 88 | + bows = {} |
| 89 | + |
| 90 | + # note: ATA signatures might be null, which might generate some noise (even bugs) |
| 91 | + # for now I leave it like this |
| 92 | + |
| 93 | + grouped_by_recurrent = defect.groupby('recurrent') |
| 94 | + for name, group in grouped_by_recurrent: |
| 95 | + |
| 96 | + l = len(group) |
| 97 | + if l == 1: |
| 98 | + #ignore clusters with only one member (it does happen !) |
| 99 | + print(f"#WARNING: recurrent defect {name} has only one member (skipped)") |
| 100 | + else: |
| 101 | + |
| 102 | + # the count the number of chapter-section signatures per cluster |
| 103 | + # (mind you: some clusters have numerous signatures, which defeats my understanding of TRAX) |
| 104 | + grouped_by_ata = group.groupby(['chapter', 'section']) |
| 105 | + print(f"---\n#INFO: Recurrent defect {name}, with {len(group)} member(s), and {len(grouped_by_ata)} ata-code(s)") |
| 106 | + if len(grouped_by_ata) > 1: |
| 107 | + # warn if more than one signature |
| 108 | + print(f"#WARNING: more than one chapter-section ({len(grouped_by_ata)})") |
| 109 | + |
| 110 | + # number of lines retained in the cluster |
| 111 | + nb = 0 |
| 112 | + |
| 113 | + # let's keep track of words in a given cluster |
| 114 | + c = FeCounter() |
| 115 | + |
| 116 | + # iterate over signatures in the cluster |
| 117 | + for sname,sgroup in grouped_by_ata: |
| 118 | + code = format(f"{sname[0]}-{sname[1]}") |
| 119 | + print(f"+ ata-code: {code}") |
| 120 | + if sname[1] != 0: |
| 121 | + # and print concerned lines provided section is not 0 and the description is filled |
| 122 | + for index,row in sgroup.iterrows(): |
| 123 | + desc = row['defect_description'] |
| 124 | + if pd.notnull(desc): |
| 125 | + c.update(word_tokenize(desc.lower())) |
| 126 | + print(f"\t#line\t{index}\t{row['chapter']}-{row['section']}\t{row['ac']}\t{desc}") |
| 127 | + nb += 1 |
| 128 | + |
| 129 | + if code not in bows: |
| 130 | + bows[code] = Counter(c) |
| 131 | + else: |
| 132 | + bows[code].update(c) |
| 133 | + |
| 134 | + # cluster-wise journalization |
| 135 | + print(f"#trace: {nb} safe lines for defect {name}") |
| 136 | + print("#bow: ",c) |
| 137 | + |
| 138 | + # dataset-wise journalization |
| 139 | + print(f"#ata-signatures: {len(bows)}") |
| 140 | + for signature,bow in bows.items(): |
| 141 | + b = bow.most_common(10) |
| 142 | + #b = dict(sorted(bow.items(), key=itemgetter(1), reverse=True)) |
| 143 | + print(f"#bow({signature}) [{len(bow)}]: {b}") |
| 144 | + |
| 145 | + if not save is None: |
| 146 | + outfile = open(save,'wb') |
| 147 | + pickle.dump(bows,outfile) |
| 148 | + outfile.close() |
| 149 | + print(f"Generated pickle: {save}") |
| 150 | + |
| 151 | + |
| 152 | + |
| 153 | +if __name__ == '__main__': |
| 154 | + main() |
0 commit comments