diff --git a/.gitignore b/.gitignore
index bee8a64b79a99590d5303307144172cfe824fbf7..4bae5a6a8a529ec23892c9a096f5a1e61ab724a6 100644
--- a/.gitignore
+++ b/.gitignore
@@ -1 +1 @@
-__pycache__
+*/__pycache__
diff --git a/__pycache__/filter_data.cpython-310.pyc b/__pycache__/filter_data.cpython-310.pyc
new file mode 100644
index 0000000000000000000000000000000000000000..3f31e2c9753288b559305ace0c1932f870c1894d
Binary files /dev/null and b/__pycache__/filter_data.cpython-310.pyc differ
diff --git a/__pycache__/filter_data.cpython-39.pyc b/__pycache__/filter_data.cpython-39.pyc
new file mode 100644
index 0000000000000000000000000000000000000000..cbfd71c2168fd466b744aba20cf869c4744ac687
Binary files /dev/null and b/__pycache__/filter_data.cpython-39.pyc differ
diff --git a/__pycache__/heatmap_visualisation.cpython-310.pyc b/__pycache__/heatmap_visualisation.cpython-310.pyc
new file mode 100644
index 0000000000000000000000000000000000000000..4f74d262ee28b7ccc1711ac9e530bd2ecc5b1dd6
Binary files /dev/null and b/__pycache__/heatmap_visualisation.cpython-310.pyc differ
diff --git a/__pycache__/read_data_sewio_LF.cpython-310.pyc b/__pycache__/read_data_sewio_LF.cpython-310.pyc
new file mode 100644
index 0000000000000000000000000000000000000000..ba27c7d16872ce8565acd390e9d4e284e18885f6
Binary files /dev/null and b/__pycache__/read_data_sewio_LF.cpython-310.pyc differ
diff --git a/__pycache__/read_data_sewio_LF.cpython-39.pyc b/__pycache__/read_data_sewio_LF.cpython-39.pyc
new file mode 100644
index 0000000000000000000000000000000000000000..aeabd47a5b663ce2b73e311585669cbc1d1de041
Binary files /dev/null and b/__pycache__/read_data_sewio_LF.cpython-39.pyc differ
diff --git a/__pycache__/uwbfunctions.cpython-310.pyc b/__pycache__/uwbfunctions.cpython-310.pyc
new file mode 100644
index 0000000000000000000000000000000000000000..e097b218c8037728ad4eb5f12a80f1eb880aa9f1
Binary files /dev/null and b/__pycache__/uwbfunctions.cpython-310.pyc differ
diff --git a/__pycache__/uwbfunctions.cpython-39.pyc b/__pycache__/uwbfunctions.cpython-39.pyc
new file mode 100644
index 0000000000000000000000000000000000000000..24831a9955942c4930cd341eae1907556519e995
Binary files /dev/null and b/__pycache__/uwbfunctions.cpython-39.pyc differ
diff --git a/exploration.py b/exploration.py
index 9201285f0556b00bd1391ee44cbe990507aba719..b7248d8eb67a7c13c1da61998ddd8346f5495af3 100644
--- a/exploration.py
+++ b/exploration.py
@@ -703,6 +703,36 @@ for barn in bsum["barn"].drop_duplicates():
 
 
 
+#%% make bar plots per date : how many cows are in the barn
+
+for barn in bsum["barn"].drop_duplicates():
+    subset = bsum.loc[(bsum["behaviour"]=="feed") & \
+                      (bsum["date"] >= pd.to_datetime("2022-9-1")) & \
+                      (bsum["barn"] == barn),
+                      ["cowid","barn","perc"]]
+    subset["cowid2"] = subset["cowid"].astype(str) 
+    subset = subset.sort_values(by = "barn") 
+    fig,ax = plt.subplots(nrows=1,ncols=1,
+                          figsize = (20,6))  
+    order = subset[["cowid2","perc"]].groupby(by="cowid2").mean().sort_values(by = "perc").reset_index()   
+    order2 = subset[["cowid2","perc"]].groupby(by="cowid2").count().reset_index()    
+    order = pd.merge(order,order2, on = "cowid2")
+    order = order.rename(columns = {"perc_y" : "counts", "perc_x" : "mean"})                                   
+    sns.barplot(data = subset,
+                x = 'cowid2', y = 'perc',
+                order = order.cowid2,
+                estimator = "mean",errorbar = "se",
+                palette = "flare", errcolor = ".5"
+                )
+    ax.set_title("barn " + str(barn))
+    
+    pos = range(len(order))
+    position = 0.01*ax.get_ylim()[1]
+    for tick,label in zip(pos,ax.get_xticklabels()):
+        ax.text(pos[tick],position, order["counts"][tick], horizontalalignment='center', size='x-small', color='w', weight='semibold')
+
+
+
 ##hallooo
 
 
diff --git a/preprocessing_all_behaviours.py b/preprocessing_all_behaviours.py
index 5147f9d39fdbd078559410ed21da52a4022e4a82..7b8be0683843a51d48c57c64a079e07d04a1a503 100644
--- a/preprocessing_all_behaviours.py
+++ b/preprocessing_all_behaviours.py
@@ -136,6 +136,10 @@ for dd in range(t_interval.days+1):
         data["gap"] = data["relsec"].diff()
         data.loc[data["gap"] < 0,"gap"] = np.nan # set to nan when different cowid
         
+        #TODO: write data to csv with all cows with datetimestamp
+        # after interpolation
+        alldata = pd.DataFrame([])
+        
         #-----------------------filter & interpolate per cow-----------------------
         # filter the (X,y) time series with a median filter, minimal dist travelled and interpolate
         # cows in the barn
@@ -219,6 +223,7 @@ for dd in range(t_interval.days+1):
                         del x1,y1,x2,y2,x3,y3,x4,y4
                         
                 df = df[["cowid","barn","date","t","gap","X","y","xnew","ynew","acc_x","acc_y","acc_z","dist","area","zone"]]
+                alldata = pd.concat([alldata,df])
                 #test = df.loc[(df["area"].isna()) & (df["dist"] < 180),:]   # check no area
                 #plt.scatter(test["xnew"],test["ynew"],marker = "o",c = "blue",s = 4)
                 del x, y
@@ -350,9 +355,22 @@ for dd in range(t_interval.days+1):
                 act_dist["dist"] = round(act_dist["dist"],2)
                 act_dist["perc_no_lying_nan"] = round(act_dist["perc_no_lying_nan"],2)
                 act_dist["perc_no_lying"] = round(act_dist["perc_no_lying"],2)
-                act_dist.to_csv(os.path.join(path_res,str(startdate+dd).replace("-","")+"_activity_cow_" + str(cow) + "_barn" + str(barn) + ".txt"), index=False)
-                bouts.to_csv(os.path.join(path_res, str(startdate+dd).replace("-","")+"_bouts_cow_" + str(cow) + "_barn" + str(barn) + ".txt"), index=False)
-                bsums.to_csv(os.path.join(path_res,str(startdate+dd).replace("-","")+ "_summary_cow_" + str(cow) + "_barn" + str(barn) + ".txt"), index=False)   
+                
+                
+                if not os.path.isfile(os.path.join(path_res,"activity_cow_" + str(cow) + "_barn" + str(barn) + ".txt")):
+                    act_dist.to_csv(os.path.join(path_res,"activity_cow_" + str(cow) + "_barn" + str(barn) + ".txt"), index=False)
+                else:
+                    act_dist.to_csv(os.path.join(path_res,"activity_cow_" + str(cow) + "_barn" + str(barn) + ".txt"), index=False,mode = 'a',header=False)
+                
+                if not os.path.isfile(os.path.join(path_res,"bouts_cow_" + str(cow) + "_barn" + str(barn) + ".txt")):
+                    bouts.to_csv(os.path.join(path_res, "bouts_cow_" + str(cow) + "_barn" + str(barn) + ".txt"), index=False)
+                else:
+                    bouts.to_csv(os.path.join(path_res, "bouts_cow_" + str(cow) + "_barn" + str(barn) + ".txt"), index=False,mode = 'a',header = False)
+                
+                if not os.path.isfile(os.path.join(path_res, "summary_cow_" + str(cow) + "_barn" + str(barn) + ".txt")):
+                    bsums.to_csv(os.path.join(path_res,"summary_cow_" + str(cow) + "_barn" + str(barn) + ".txt"), index=False)
+                else: 
+                    bsums.to_csv(os.path.join(path_res,"summary_cow_" + str(cow) + "_barn" + str(barn) + ".txt"), index=False,mode = 'a',header=False)
                 
             else:
                 df = pd.DataFrame({"cowid":pd.Series(np.ones(86400)*cows["cowid"][i]),
@@ -371,6 +389,7 @@ for dd in range(t_interval.days+1):
                                    "area" : pd.Series(np.arange(0,86400,1)*np.nan),
                                    "zone" : pd.Series(np.arange(0,86400,1)*np.nan)
                                    })
+                alldata = pd.concat([alldata,df])
                 
                 # write data to csv per cow
                 df.to_csv(os.path.join(path_res,"barn"+str(round(barn)), "data_" + str(startdate+dd).replace("-","") + "_barn_" + str(round(barn)) + "_cow_"+ str(cows["cowid"][i])  + ".txt"), index=False)
@@ -380,7 +399,9 @@ for dd in range(t_interval.days+1):
 
             del df
             
+            
         # clear workspace and memory
+        alldata.to_csv(os.path.join(path_res,"alldata_" + str(startdate+dd).replace("-","") + ".txt" ))
         del data
     except:
         print("data preprocessing for " + str(startdate+dd).replace("-","") + " has failed ")