Pylint giving me a recursion error saying "<class 'RecursionError'>: maximum recursion depth exceeded in comparison". And it directs me to line where I simply do import numpy as np
. When I run the code there is no problem, it works fine. But still, I am wondering what is the issue here?
PS: I am using pylint extension in VS code.
Thanks a lot
Edit:
I believe problem is due to these 2 particular functions, when I remove these 2 functions problem disappears. But these 2 functions works fine when I rund the code. I dont know if it is important but I am importing these functions plus several other functions from a seperate .py file.
def med_std_dist(df):
df_subset = df[["DATETIME", "REGION", "O2_ID", "LATITUDE", "LONGITUDE", "FAIL_POWER"]]
grped_df = df_subset.groupby(["O2_ID", "REGION", "DATETIME"], as_index=False).last()
grped_df = grped_df[grped_df.FAIL_POWER >= 20]
merged_df = pd.merge(df_subset, grped_df, how="right", on=["DATETIME", "REGION"], suffixes=("_org", "_over_20"))
merged_df = merged_df[merged_df.O2_ID_org != merged_df.O2_ID_over_20]
merged_df["dist"] = get_distance_np(merged_df.LATITUDE_org.values, merged_df.LONGITUDE_org.values, merged_df.LATITUDE_over_20.values, merged_df.LONGITUDE_over_20.values)
merged_df = merged_df[["DATETIME", "REGION", "O2_ID_org", "dist", "FAIL_POWER_over_20"]].groupby(["O2_ID_org", "DATETIME", "REGION"], as_index=False).agg(["median", "std"]).reset_index().droplevel(1, axis=1)
merged_df.columns = ["O2_ID", "DATETIME", "REGION", "median_dist", "std_dist", "median_neigh_fail", "std_neigh_fail"]
return pd.merge(df, merged_df, on=["O2_ID", "DATETIME", "REGION"], how="left")**strong text**
def handover_thres_feats(df_fails, df_hand):
fail_subset = df_fails[["DATETIME", "O2_ID", "FAIL_POWER"]].rename(columns={"O2_ID": "O2_ID_neigh", "FAIL_POWER": "fail_check"})
merged_subset = pd.merge(fail_subset, df_hand, how="right", on=["O2_ID_neigh"])
merged_subset_1 = merged_subset[merged_subset.fail_check >= 15]
dist_df = pd.merge(df_fails[["DATETIME", "O2_ID", "LATITUDE", "LONGITUDE"]], merged_subset_1, on=["DATETIME", "O2_ID"], how="left")
dist_df["dist_to_neigh"] = get_distance_np(dist_df.LATITUDE.values, dist_df.LONGITUDE.values, dist_df.LATITUDE_neigh, dist_df.LONGITUDE_neigh)
agg_dist_df = dist_df.groupby(["DATETIME", "O2_ID"], as_index=False)[["fail_check", "dist_to_neigh", "relative_handover"]].agg({"fail_check": ["median", "std"], "dist_to_neigh": ["median", "std"], "relative_handover": "sum"}).droplevel(1, axis=1)
agg_dist_df.columns = ["DATETIME", "O2_ID", "median_neigh_fails", "std_neigh_fails", "median_dist_to_neigh", "std_dist_to_neigh", "total_rel_handover"]
merged_df = pd.merge(df_fails, agg_dist_df, how="left", on=["DATETIME", "O2_ID"]).rename(columns={"fail_check": "median_neigh_fails", "dist_to_neigh": "median_dist_to_neigh", "relative_handover": "total_rel_handover"})
dist_df_2 = pd.merge(df_fails[["DATETIME", "O2_ID", "LATITUDE", "LONGITUDE"]], merged_subset, on=["DATETIME", "O2_ID"], how="left")
dist_df_2["dist_to_neigh"] = get_distance_np(dist_df_2.LATITUDE.values, dist_df_2.LONGITUDE.values, dist_df_2.LATITUDE_neigh, dist_df_2.LONGITUDE_neigh)
dist_df_2 = dist_df_2[["DATETIME", "O2_ID", "fail_check", "relative_handover", "dist_to_neigh"]]
max_neigh_df = dist_df_2.groupby(["DATETIME", "O2_ID"], as_index=False)[["fail_check"]].max()
max_df = pd.merge(dist_df_2, max_neigh_df, on=["DATETIME", "O2_ID", "fail_check"], how="right")
max_filtered = max_df.sort_values(["O2_ID", "DATETIME", "dist_to_neigh"]).drop_duplicates(subset=["O2_ID","DATETIME"], keep="first").rename(
columns={"fail_check": "max_fail_neigh", "dist_to_neigh": "dist_to_max_fail_neigh", "relative_handover": "max_fail_handover"})
return pd.merge(merged_df, max_filtered, how="left", on=["DATETIME", "O2_ID"]).drop(["LATITUDE", "LONGITUDE"], axis=1)**strong text**