diff --git a/urban_areas/main2_urbanSAR.py b/urban_areas/main2_urbanSAR.py
index f5004e5b7a443934d74e60f6e12851196aad6eca..5f625ceec96e5b9ef76b4334789691919fa2566c 100644
--- a/urban_areas/main2_urbanSAR.py
+++ b/urban_areas/main2_urbanSAR.py
@@ -15,67 +15,55 @@ well as the average precipitation per date and the sum of the last x days. Moreo
 several outlier detection methods are done to find outliers in the dataset. The
 outliers, which are linked to the dates in the dataset, are both plotted and exported
 to the same CSV file as the mean VV backscatter. This CSV file is saved in the 
-following directory -> 'urban_areas/output/outlierDetection/___.CSV'
+following directory -> 'Urban_areas/output/{file_name}/outlierDetection.CSV'
 
 PARAMETERS:
 For SAR_path, insert the name of the folder containing unzipped and stacked SAR 
-files. All SAR directories should be placed in 'data/SAR_data/...' whenever you
-download the data from the EO Browser website. The EO Learn API saves the SAR 
-images automatically to the correct directory. For the urban_raster, the name of 
-urban raster that should be loaded should be defined. In our case, all GHSL files 
-are saved to a folder called 'GHS_built_globe' inside the 'data' folder. Moreover,
-the name of the GHSL has been changed somewhat so it becomes easier to know which
-file should be imported (raster for Haiti -> "GHS_BUILT_LDSMT_GLOBE_R2018A_HAITI.tif")
+files. All SAR files should be placed in the data folder and loaded according to
+there corresponding path (i.e. "./SAR/CapHaitien_Jan2020Mar2022". For the urban_raster, 
+the name of urban raster that should be loaded should be defined. In our case, 
+all GHSL files are saved to a folder called 'urban_areas' inside the 'data' folder. 
+Moreover,the name of the GHSL has been changed somewhat so it is easier to know 
+which file should be imported (raster for Haiti -> "GHS_BUILT_S_E2018_GLOBE_R2022A_HAITI.tif")
 
 A tuple of (xmin, ymin, xmax, and ymax) should be defined for the 'coordinates' 
-variable. This variable is used to mask the urban raster. It is also possible to 
-leave the coordinates variable empty (i.e. assign an empty tuple). In this case, 
-the exact extent of the SAR images will be taken. Be aware of the following: 
+variable (from for instance bboxfinder.com). This variable is used to mask the 
+urban raster. It is also possible to leave the coordinates variable empty 
+(i.e. assign an empty tuple). In this case, the exact extent of the SAR images 
+will be taken. Be aware of the following: 
 (1) If the bbox is larger than the SAR extent, all urban areas in the SAR extent 
 will be taken, (2) if the bbox is smaller than the SAR extent, only the urban areas 
 within the bbox will be taken (even though the SAR extent is larger). Advisable 
 to check the bbox extent first before running the whole script (this is done by
 only running STEP 1 of EXECUTE RASTER CLIPPING). 
 
-The "built_lowerbound" variable corresponds to the multi-temporal classification of 
-built-up presence. To select built-up, a value between 3 and 6 should be chosen
-(default is 3). There is also an option to show the extent of the boundingbox 
-(from coordinates) on the first SAR image. It visualises how the boundingbox has
-been defined relative to the SAR extent. Finally, the file name of the precipitation 
-csv should be defined. This file should be saved in the 'precipitation_data' folder 
-inside the main 'data' folder. Do not forget to add '.CSV' behind the file name. 
-_______________________________________________________________________________
+There is also an option to show the extent of the boundingbox (from coordinates) 
+on the first SAR image. It visualises how the boundingbox has been defined relative 
+to the SAR extent. Finally, the file name of the precipitation csv should be defined. 
+This file should be saved in the 'precipitation_data' folder inside the main 'data' 
+folder. Do not forget to add '.CSV' behind the file name.
 
-Multi-temporal classification of built-up presence;
-0 = no data
-1 = water surface
-2 = land no built-up in any epoch
-3 = built-up from 2000 to 2014 epochs
-4 = built-up from 1990 to 2000 epochs
-5 = built-up from 1975 to 1990 epochs
-6 = built-up up to 1975 epoch
 _______________________________________________________________________________
 
-Source; https://ghsl.jrc.ec.europa.eu/documents/GHSL_Data_Package_2019.pdf?t=1637939855 (Table 2)
+Source; https://ghsl.jrc.ec.europa.eu/documents/GHSL_Data_Package_2022.pdf?t=1655995832
 _______________________________________________________________________________
 """
 
 # =============================================================================
 # IMPORT FUNCTIONS
 # =============================================================================
-from urban_areas.py.s21UrbanClipping import createBbox, maskUrban, rasterToPolygon, clipSAR, exportToCSV
-from urban_areas.py.s22VisualiseData import visualiseData
-from urban_areas.py.s23OutlierDetection import visualOutlierDetection, statisticalOutlierDetection, visualiseStatisticalOutliers, exportOutlierDetection
+from Urban_areas.py.s21UrbanClipping import createBbox, maskUrban, rasterToPolygon, clipSAR, exportToCSV
+from Urban_areas.py.s22VisualiseData import visualiseData
+from Urban_areas.py.s23OutlierDetection import visualOutlierDetection, statisticalOutlierDetection, visualiseStatisticalOutliers, exportOutlierDetection
 
 # =============================================================================
 # DEFINE INPUT PARAMETERS (for more info, see above)
 # =============================================================================
 # PARAMETERS FOR RASTER CLIPPING
 # Make 'coordinates' an empty tuple to take SAR image as extent 
-SAR_path = 'CapHaitien_Jan2020Jun2022'
-urban_raster = 'urban_areas/GHS_BUILT_S_E2018_GLOBE_R2022A_54009_10_V1_0_R7_C12.tif' 
-coordinates = (-72.190141,19.721568,-72.173747,19.739352)
-built_lowerbound = 3
+SAR_path = 'SAR/CapHaitien_try1'
+urban_raster = 'urban_areas/GHS_BUILT_S_E2018_GLOBE_R2022A_HAITI.tif' 
+coordinates = (-72.221598,19.733007,-72.192072,19.757579)
 
 # Show boundingbox in map? (y/n) (not exported to output)
 show_bbox = 'y'
@@ -95,10 +83,10 @@ bbox = createBbox(coordinates, SAR_path, show_bbox)
 temp_path_masked = maskUrban(urban_raster, bbox)
 
 # STEP 3: CONVERT MASKED RASTER TO POLYGON
-urban_polygon_masked = rasterToPolygon(temp_path_masked, SAR_path, built_lowerbound)
+gpd_urban = rasterToPolygon(temp_path_masked, SAR_path)
 
 # STEP 4: CLIP POLYGON WITH SAR IMAGES
-mean_values, dates = clipSAR(SAR_path, urban_polygon_masked)
+mean_values, dates = clipSAR(SAR_path, gpd_urban, coordinates)
 
 # STEP 5: EXPORT MEAN VALUES AS CSV FILE
 file_name, path_mean_vv = exportToCSV(mean_values, dates, SAR_path)
@@ -110,14 +98,14 @@ file_name, path_mean_vv = exportToCSV(mean_values, dates, SAR_path)
 df_total, days, mean_VV, sum_xdays, title_name = visualiseData(SAR_path, precipitation_csv, file_name, path_mean_vv)
 
 # STEP 2: VISUAL OUTLIER DETECTION
-visualOutlierDetection(title_name, df_total, days, mean_VV, sum_xdays)
+output_dir = visualOutlierDetection(title_name, df_total, days, mean_VV, sum_xdays)
 
 # STEP 3: STATISTICAL OUTLIER DETECTION
 df_outlier, LR_columns = statisticalOutlierDetection(df_total, mean_VV, sum_xdays)
-visualiseStatisticalOutliers(title_name, df_outlier, LR_columns, mean_VV, sum_xdays, days)
+visualiseStatisticalOutliers(output_dir, title_name, df_outlier, LR_columns, mean_VV, sum_xdays, days)
 
 # STEP 4: EXPORT OUTLIER DETECTION DF
-exportOutlierDetection(df_outlier, title_name)
+exportOutlierDetection(output_dir, df_outlier, title_name)
 
 
 print(f"----- {round((time.time() - start_time), 2)} seconds -----")
diff --git a/urban_areas/py/s21UrbanClipping.py b/urban_areas/py/s21UrbanClipping.py
index 6f4081f6eaec634b4b52e6deab4ab00930d869a5..a49176d0a8400260bb3faef6fd6bd91f64ef469c 100644
--- a/urban_areas/py/s21UrbanClipping.py
+++ b/urban_areas/py/s21UrbanClipping.py
@@ -85,45 +85,70 @@ def maskUrban(urban_raster, bbox):
     # Create path to open large urban raster file (.tif)
     urban_path = os.path.join('data', urban_raster)
     
-    ## Open urban raster for masking with bounding box
-    with rio.open(urban_path) as src:
-        # Save epsg code of urban raster
-        epsg_code = int(src.crs.data['init'][5:])
+    # Find CRS of raster
+    src = rio.open(urban_path)
+    esri_code = str(src.crs)
+    
+    # BBOX to CRS of raster
+    bbox_esri = bbox.to_crs(esri_code) 
+    coords = get_features(bbox_esri)
+
+    # BBOX to CRS of raster
+    out_image, out_transform = mask(src, coords, crop=True)
+    
+    # Save meta data of raster and update it with new info
+    out_meta = src.meta
+    out_meta.update({"driver": "GTiff",
+                      "height": out_image.shape[1],
+                      "width": out_image.shape[2],
+                      "transform": out_transform,
+                      "crs": pycrs.parse.from_esri_code(esri_code[5:]).to_proj4()}) # update meta
+    
+    ## Write masked tif file to data folder
+    with rio.open(temp_path_masked, "w", **out_meta,) as dest:
+        dest.write(out_image)
         
-        # Assign epsg of raster to bbox
-        bbox = bbox.to_crs(epsg = epsg_code)
         
-        # Convert features of bbox
-        coordinates = get_features(bbox)
         
-        ## Apply mask to raster and features
-        out_image, out_transform = mask(src, coordinates, crop=True)
+    
+    
+    
+    ############################################################
+    # ## Open urban raster for masking with bounding box
+    # with rio.open(urban_path) as src:
+    #     # Save epsg code of urban raster
+    #     epsg_code = 4326
         
-        # Save meta data of raster and update it with new info
-        out_meta = src.meta
-        out_meta.update({"driver": "GTiff",
-                         "height": out_image.shape[1],
-                         "width": out_image.shape[2],
-                         "transform": out_transform,
-                         "crs": pycrs.parse.from_epsg_code(epsg_code).to_proj4()}) # update meta
+    #     # Assign epsg of raster to bbox
+    #     bbox = bbox.to_crs(epsg = epsg_code)
         
-    ## Write masked tif file to data folder
-    with rio.open(temp_path_masked, "w", **out_meta,) as dest:
-        dest.write(out_image)
+    #     # Convert features of bbox
+    #     coordinates = get_features(bbox)
+        
+    #     ## Apply mask to raster and features
+    #     out_image, out_transform = mask(src, coordinates, crop=True)
+        
+    #     # Save meta data of raster and update it with new info
+    #     out_meta = src.meta
+    #     out_meta.update({"driver": "GTiff",
+    #                      "height": out_image.shape[1],
+    #                      "width": out_image.shape[2],
+    #                      "transform": out_transform,
+    #                      "crs": pycrs.parse.from_epsg_code(epsg_code).to_proj4()}) # update meta
+        
+    # ## Write masked tif file to data folder
+    # with rio.open(temp_path_masked, "w", **out_meta,) as dest:
+    #     dest.write(out_image)
        
     return temp_path_masked
 
 # =============================================================================
 # STEP 3: CONVERT MASKED RASTER TO POLYGON
 # =============================================================================
-def rasterToPolygon(temp_path_masked, SAR_path, urban_grade=3):
+def rasterToPolygon(temp_path_masked, SAR_path):
     """ 
     Function that converts the masked urban raster to one polygon
     """
-    ## Check if urban grade is between 3 and 6, and if not, raise TypeError
-    if urban_grade <= 2 or urban_grade > 6:
-        raise TypeError("Variable 'urban_grade'; Choose value between 3 and 6!")
-    
     ## Get epsg code from SAR data
     path = os.path.join('data', SAR_path)
     file = os.listdir(path)[0]
@@ -148,11 +173,11 @@ def rasterToPolygon(temp_path_masked, SAR_path, urban_grade=3):
     gpd_polygon = gpd.GeoDataFrame.from_features(geoms)
 
     # Set to original CRS ('epsg:3857') and convert to new CRS of SAR data
-    gpd_polygon = gpd_polygon.set_crs(epsg = 3857)
+    gpd_polygon = gpd_polygon.set_crs("ESRI:54009")
     gpd_polygon = gpd_polygon.to_crs(epsg = epsg_code)
-
-    ## Select only values based on urban_grade
-    gpd_urban = gpd_polygon.loc[gpd_polygon['raster_val'] >= urban_grade].copy()
+        
+    ## Select only urban values (not classed as 0)
+    gpd_urban = gpd_polygon.loc[gpd_polygon['raster_val'] != 0].copy()
     
     # Assign column with a constant value
     gpd_urban['dissolve'] = 1
@@ -169,7 +194,7 @@ def rasterToPolygon(temp_path_masked, SAR_path, urban_grade=3):
 # =============================================================================
 # STEP 4: CLIP POLYGON WITH SAR IMAGES
 # =============================================================================
-def clipSAR(SAR_path, urban_polygon_masked):
+def clipSAR(SAR_path, gpd_urban, coordinates):
     """
     Function to clip the SAR data based on the polygon of the urban data. A list 
     of dates and means are returned. 
@@ -178,6 +203,12 @@ def clipSAR(SAR_path, urban_polygon_masked):
     list_of_dates = []
     list_of_means = []
     
+    # Create bbox where SAR clipped image should be clipped again
+    minx, miny, maxx, maxy = coordinates
+    geom = box(*[minx, miny, maxx, maxy])
+    gdf_bbox = gpd.GeoDataFrame({'id': 1, 'geometry': [geom]})
+    coords_bbox = get_features(gdf_bbox)
+    
     # Create path to load SAR data
     SAR_path = os.path.join('data', SAR_path)
     
@@ -188,9 +219,10 @@ def clipSAR(SAR_path, urban_polygon_masked):
         # Open SAR file with rioxarray and keep only first band (which is VV)
         SAR = rxr.open_rasterio(file_path, masked = True).squeeze()[0]
         
-        # Clip the SAR image with the urban polygon
+        # Clip the SAR image with the urban polygon and bbox
         # credit: https://www.earthdatascience.org/courses/use-data-open-source-python/intro-raster-data-python/raster-data-processing/crop-raster-data-with-shapefile-in-python/
-        SAR_clipped = SAR.rio.clip(urban_polygon_masked.geometry.apply(mapping))
+        SAR_clipped = SAR.rio.clip(gpd_urban.geometry.apply(mapping))
+        SAR_clipped = SAR_clipped.rio.clip(coords_bbox)
         
         # Calculate the means of each clipped SAR image
         means = float(np.mean(SAR_clipped))
@@ -237,4 +269,5 @@ def exportToCSV(mean_values, dates, SAR_path):
         df_mean_vv.to_csv(path_mean_vv, encoding='utf-8', index=False)
     else:
         print(f"\nThe following file already exists: \n{file_name}")
+        
     return file_name, path_mean_vv
diff --git a/urban_areas/py/s22VisualiseData.py b/urban_areas/py/s22VisualiseData.py
index 3fe5fbcb1bb951824357d4c8ef08dba2681b255f..9565f7e38d1c2c062b498ed90f74bed3ae22bfa8 100644
--- a/urban_areas/py/s22VisualiseData.py
+++ b/urban_areas/py/s22VisualiseData.py
@@ -24,14 +24,13 @@ def visualiseData(SAR_path, precipitation_csv, file_name, path_mean_vv):
     ## Combine DataFrames based on data
     df_total = pd.merge(df_precip, df_vv, on ='date')
     
-    # Define name for titles
+    # Create title name based on SAR_path
     count = SAR_path.count('/')
     if count != 0:
         title_name = re.search('/(.+)', SAR_path).group(count)
     else:
-        title_name = SAR_path
+        title_name = title_name
 
-    
     ##### LINEPLOT MEAN_VV AND AVERAGE PRECIPITATION
     ## Set parameters for the lineplot
     fig, ax = plt.subplots(figsize=(20, 5))
diff --git a/urban_areas/py/s23OutlierDetection.py b/urban_areas/py/s23OutlierDetection.py
index e81453c13450bbb77a06d6c60052c21864cf17b9..19484af42a3f42e98a8f8672c26561ce6eacb633 100644
--- a/urban_areas/py/s23OutlierDetection.py
+++ b/urban_areas/py/s23OutlierDetection.py
@@ -18,6 +18,13 @@ def visualOutlierDetection(title_name, df_total, days, mean_VV, sum_xdays):
     df_plot = df_total.copy()
     df_plot = df_plot[[mean_VV, sum_xdays]]
     
+    # Create output folder (if not exist) and specific folder within output to save files
+    if not os.path.exists(os.path.join('urban_areas', 'output')):
+        os.makedirs(os.path.join('urban_areas', 'output'))
+    output_dir = os.path.join('urban_areas', 'output', title_name)
+    if not os.path.exists(output_dir):
+        os.makedirs(output_dir)
+    
     ## METHOD 1: HISTOGRAM 
     fig, (ax1, ax2) = plt.subplots(nrows=1, ncols=2,figsize=(15, 8))
     ax1 = sns.histplot(data=df_plot, x=mean_VV, kde=True, color="darkred", ax=ax1)
@@ -27,7 +34,7 @@ def visualOutlierDetection(title_name, df_total, days, mean_VV, sum_xdays):
     ax2.set_title(f'Histogram of sum {days}-days precipitation (in mm)', 
                   fontdict={'fontsize': 15})
     plt.tight_layout()
-    plt.savefig(f"./urban_areas/output/Histogram_{title_name}.png", dpi=300)
+    plt.savefig(f"{output_dir}/Histogram_{title_name}.png", dpi=300)
     plt.show()
     
     ## METHOD 2: BOXPLOT
@@ -38,7 +45,7 @@ def visualOutlierDetection(title_name, df_total, days, mean_VV, sum_xdays):
     sns.swarmplot(data=df_plot[mean_VV], orient="h", color=".25", size=5)
     
     plt.tight_layout()
-    plt.savefig(f"./urban_areas/output/Boxplot{mean_VV}_{title_name}.png", dpi=300)
+    plt.savefig(f"{output_dir}/Boxplot{mean_VV}_{title_name}.png", dpi=300)
     plt.show()
     
     sns.set(rc = {'figure.figsize':(15,8)})    
@@ -48,7 +55,7 @@ def visualOutlierDetection(title_name, df_total, days, mean_VV, sum_xdays):
     sns.swarmplot(data=df_plot[sum_xdays], orient="h", color=".25", size=5)
     
     plt.tight_layout()
-    plt.savefig(f"./urban_areas/output/Boxplot{sum_xdays}_{title_name}.png", dpi=300)
+    plt.savefig(f"{output_dir}/Boxplot{sum_xdays}_{title_name}.png", dpi=300)
     plt.show()
     
     ## METHOD 3: SCATTERPLOT
@@ -58,10 +65,10 @@ def visualOutlierDetection(title_name, df_total, days, mean_VV, sum_xdays):
     sns.scatterplot(data=df_plot, x=sum_xdays, y=mean_VV)
     
     plt.tight_layout()
-    plt.savefig(f"./urban_areas/output/Scatterplot_{title_name}.png", dpi=300)
+    plt.savefig(f"{output_dir}/Scatterplot_{title_name}.png", dpi=300)
     plt.show()
 
-    return
+    return output_dir
 
 # METHOD 2: STATISTICALLY CHECK OUTLIERS
 def statisticalOutlierDetection(df_total, mean_VV, sum_xdays):
@@ -282,7 +289,7 @@ def mad_method(df, variable_name, threshold=3):
 # =============================================================================
 # VISUALISE AND EXPORT OUTLIERS
 # =============================================================================
-def visualiseStatisticalOutliers(title_name, df_outlier, LR_columns, mean_VV, sum_xdays, days):  
+def visualiseStatisticalOutliers(output_dir, title_name, df_outlier, LR_columns, mean_VV, sum_xdays, days):  
     # Get list of indices of both variables (which will be used to marker the outliers)
     indexLR_vv = df_outlier.loc[df_outlier[LR_columns[0]] != 0].index.to_list()
     indexLR_precip = df_outlier.loc[df_outlier[LR_columns[1]] != 0].index.to_list()
@@ -297,7 +304,7 @@ def visualiseStatisticalOutliers(title_name, df_outlier, LR_columns, mean_VV, su
                         palette="deep")
         plt.legend(loc='upper right')
         plt.tight_layout()
-        plt.savefig(f"./urban_areas/output/outliers{variables[idx]}_Scatterplot_{title_name}.png", dpi=300)
+        plt.savefig(f"{output_dir}/outliers{variables[idx]}_Scatterplot_{title_name}.png", dpi=300)
         plt.show()
 
     ##### VISUALISE RESULTS - LINEPLOT 
@@ -340,17 +347,13 @@ def visualiseStatisticalOutliers(title_name, df_outlier, LR_columns, mean_VV, su
     
     ## Plot final result
     plt.tight_layout()
-    plt.savefig(f"./urban_areas/output/outliers_Lineplot_{title_name}.png", dpi=300)
+    plt.savefig(f"{output_dir}/outliers_Lineplot_{title_name}.png", dpi=300)
     plt.show()
     
-def exportOutlierDetection(df_outlier, title_name):
-    # Create directory
-    dest_path = os.path.join('urban_areas', 'output', 'outlierDetection')
-    if not os.path.exists(dest_path):
-        os.makedirs(dest_path)
-    
+def exportOutlierDetection(output_dir, df_outlier, title_name):
+    # Create directory    
     ## Write DataFrame with outliers to output folder
-    csv_path = os.path.join(dest_path, f'outliers_{title_name}.csv')
+    csv_path = os.path.join(output_dir, f'outliers_{title_name}.csv')
     
     df_outlier.to_csv(csv_path, encoding='utf-8', index=False)