/*============================================================================================== TITLE: Wetland-oriented Annual LULC classification AUTHORS: Migone, L., Schivo, F., Verón, S., Banchero, S., and Grimson, R. EMAIL: lmigone@unsam.edu.ar LICENSE: GNU AGPL v3.0 DATE: April 2025 For a detailed explanation of the methodology, please refer to the associated paper: PENDING PUBLICATION. DESCRIPTION: This script performs an annual Land Use and Land Cover (LULC) classification, with a particular focus on identifying three types of wetlands among the LULC classes. The classification is conducted using a Random Forest classifier. The feature importance table and a GeoTIFF file containing the annaul LULC map are exported for further analysis. ==============================================================================================*/ //---------------------------------------------------------------------------------------------- // SECTION 0 - Imports //---------------------------------------------------------------------------------------------- //Elements to incorporate as imports var roi = null; //Complete with Region of Interest feature collection var trainSetRaw = null; //Complete with raw train set feature collection var testSetRaw = null; //Complete with raw test set feature collection var TPI200 = null; //Complete with TPI200 layer or similar. If ommited lines 169, 185 and 188 must be commented var TPI500 = null; //Complete with TPI500 layer or similar. If ommited lines 170, 156 and 189 must be commented var sarXX = null;//Complete with imports of S1 assets generated by S1-preproc-Wetland-LULC /*Elements to incorporate to generate complementary train set. To avoid errors, generate a feature colection for each class even if they contain 0 features. Make sure to add a property that matches the property and its value assigned to the class, to the train and test set. */ var ArtificalWetl = null; var Infrastructure = null; var Ponds = null; var Crops = null; var WetMeadows = null; var WoodyVeg = null; var Rangelands = null; var BareSoil = null; //---------------------------------------------------------------------------------------------- // SECTION 1 - General Settings //---------------------------------------------------------------------------------------------- // Define classification period (year1 = starting year, year2 = finishing year) var year1 = 2017; //Complete with year to classify var year2 = year1 + 1; //Select imported sar data from year1 var sar = ""; //Assign to asset with SAR data for year1 //Elements to incorporate as imports var trainSetIni = '' //Complete with Inicial train set or import with this variable name //Determine could filter threshold var cloudThreshold = 18; // Random forest hyperparameters var numTrees = 100; var maxNodes = 60; //Parameters for GLCM var glcmKernelSize = 3; var glcmKernel = ee.Kernel.square(glcmKernelSize); //Set filenames and paths var classYear = year1.toString(); var runningDate = ''; //Set up date to keep track of work var outputFolder = ''; // Name of Google Drive folder where to export results var classReference = '' + classYear + '_' + runningDate; // Reference of LULC classification var lulcClassFN = 'LULC-map_' + classReference; //Set filename of LULC map var FeatImpTableFN = ´FeatureImportance_´ + classReference; //Set filename of feature importance CSV // Set CRS to export LULC classification var targetCrs = 'EPSG:32721'; // Indicate the name of the property containing the classes var ClassPropertyName = 'Clase'; //---------------------------------------------------------------------------------------------- // SECTION 2 - SENTINEL 2 PROCESSING //---------------------------------------------------------------------------------------------- //Dates are set for south hemisphere seasons var startDate = year1+'-06-21'; var endDate = year2+'-06-20'; //Function to mask cloudy pixels using the Sentinel-2 QA band function maskS2clouds(image) { var qa = image.select('QA60'); // Bits 10 and 11 are clouds and cirrus, respectively. var cloudBitMask = 1 << 10; var cirrusBitMask = 1 << 11; // Both flags should be set to zero, indicating clear conditions. var mask = qa.bitwiseAnd(cloudBitMask).eq(0) .and(qa.bitwiseAnd(cirrusBitMask).eq(0)); return image.updateMask(mask); } //Get Sentinel 2 imagery (S21C product) var S2 = ee.ImageCollection("COPERNICUS/S2_HARMONIZED") .filterDate(startDate,endDate) .filterBounds(roi) .filter(ee.Filter.lt('CLOUDY_PIXEL_PERCENTAGE', cloudThreshold)) .map(maskS2clouds); //Define function to compute indices function addIndex(image) { // Normalized Difference Vegetation Index (NDVI) var ndvi = image.normalizedDifference(["B8", "B4"]).rename("NDVI"); var newImage = image.addBands(ndvi); // Normalized Difference Wetness Index (NDWI) var ndwi = image.normalizedDifference(["B8", "B11"]).rename("NDWI"); newImage = newImage.addBands(ndwi); // Chlorophyll Vegetation Index (CVI) var cvi = image.expression("B8 * B4 / pow(B3, 2.0)", {B8:image.select("B8"), B4:image.select("B4"), B3:image.select("B3")}).rename("CVI"); newImage = newImage.addBands(cvi); // Modified Normalized Difference Water Index 1 (MNDWI1) var mndwi1 = image.normalizedDifference(["B11", "B3"]).rename("MNDWI1"); newImage = newImage.addBands(mndwi1); // Modified Normalized Difference Water Index 2 (MNDWI2) var mndwi2 = image.normalizedDifference(["B12", "B3"]).rename("MNDWI2"); newImage = newImage.addBands(mndwi2); // Soil-Adjusted Vegetation Index (SAVI) var savi = image.expression('(B8 - B4)/(B5+B4+0.428)*1.428', {B8:image.select("B8"), B4:image.select("B4"), B5:image.select("B5")}).rename("SAVI"); newImage = newImage.addBands(savi); return newImage; } function computeIndices(ImCollection) { return ImCollection.map(addIndex); } // Compute annual and seasonal indices var S2IdxWinter= computeIndices(S2.filterDate(year1+'-06-21',year1+'-09-20') .select('B2','B3','B4','B5','B8','B11','B12')); var S2IdxSpring= computeIndices(S2.filterDate(year1+'-09-21',year1+'-12-20') .select('B2','B3','B4','B5','B8','B11','B12')); var S2IdxSummer= computeIndices(S2.filterDate(year1+'-12-21',year2+'-03-21') .select('B2','B3','B4','B5','B8','B11','B12')); var S2IdxAutum= computeIndices(S2.filterDate(year2+'-03-21',year2+'-06-20') .select('B2','B3','B4','B5','B8','B11','B12')); var S2IdxAnnual= computeIndices(S2.filterDate(startDate,endDate) .select('B2','B3','B4','B5','B8','B11','B12')); // Compute seasonal mean of every band and index, then exclude bands 5 and 8. var S2IdxWinterMean = S2IdxWinter.mean().rename(['B2_Wi', 'B3_Wi', 'B4_Wi', 'B5_Wi', 'B8_Wi', 'B11_Wi', 'B12_Wi', 'NDVI_Wi', 'NDWI_Wi', 'CVI_Wi', 'MNDWI1_Wi', 'MNDWI2_Wi', 'SAVI_Wi']) .select('B2_Wi', 'B3_Wi', 'B4_Wi', 'B11_Wi', 'B12_Wi', 'NDVI_Wi', 'NDWI_Wi', 'CVI_Wi', 'MNDWI1_Wi', 'MNDWI2_Wi', 'SAVI_Wi'); var S2IdxSpringMean = S2IdxSpring.mean().rename(['B2_Sp', 'B3_Sp', 'B4_Sp', 'B5_Sp', 'B8_Sp', 'B11_Sp', 'B12_Sp', 'NDVI_Sp', 'NDWI_Sp', 'CVI_Sp', 'MNDWI1_Sp', 'MNDWI2_Sp', 'SAVI_Sp']) .select('B2_Sp', 'B3_Sp', 'B4_Sp', 'B11_Sp', 'B12_Sp', 'NDVI_Sp', 'NDWI_Sp', 'CVI_Sp', 'MNDWI1_Sp', 'MNDWI2_Sp', 'SAVI_Sp'); var S2IdxSummerMean = S2IdxSummer.mean().rename(['B2_Su', 'B3_Su', 'B4_Su', 'B5_Su', 'B8_Su', 'B11_Su', 'B12_Su', 'NDVI_Su', 'NDWI_Su', 'CVI_Su', 'MNDWI1_Su', 'MNDWI2_Su', 'SAVI_Su']) .select('B2_Su', 'B3_Su', 'B4_Su', 'B11_Su', 'B12_Su', 'NDVI_Su', 'NDWI_Su', 'CVI_Su', 'MNDWI1_Su', 'MNDWI2_Su', 'SAVI_Su'); var S2IdxAutumMean = S2IdxAutum.mean().rename(['B2_Au', 'B3_Au', 'B4_Au', 'B5_Au', 'B8_Au', 'B11_Au', 'B12_Au', 'NDVI_Au', 'NDWI_Au', 'CVI_Au', 'MNDWI1_Au', 'MNDWI2_Au', 'SAVI_Au']) .select('B2_Au', 'B3_Au', 'B4_Au', 'B11_Au', 'B12_Au', 'NDVI_Au', 'NDWI_Au', 'CVI_Au', 'MNDWI1_Au', 'MNDWI2_Au', 'SAVI_Au'); // Compute annual mean of every band and index, then exclude bands 5 and 8. var S2IdxAnnualMean = S2IdxAnnual.mean().rename(['B2_An', 'B3_An', 'B4_An', 'B5_An', 'B8_An', 'B11_An', 'B12_An', 'NDVI_An', 'NDWI_An', 'CVI_An', 'MNDWI1_An', 'MNDWI2_An', 'SAVI_An']) .select('B2_An', 'B3_An', 'B4_An', 'B11_An', 'B12_An', 'NDVI_An', 'NDWI_An', 'CVI_An', 'MNDWI1_An', 'MNDWI2_An', 'SAVI_An'); // Compute annual standard deviation of indices var S2IdxAnnualStd = S2IdxAnnual.reduce(ee.Reducer.stdDev()).select('NDVI_stdDev', 'NDWI_stdDev', 'CVI_stdDev', 'MNDWI1_stdDev', 'MNDWI2_stdDev', 'SAVI_stdDev') .rename('NDVI_An_stdDev', 'NDWI_An_stdDev', 'CVI_An_stdDev', 'MNDWI1_An_stdDev', 'MNDWI2_An_stdDev', 'SAVI_An_stdDev'); // Compute GLCM texture and keep "Sum average", "Variance" and "Contrast" of some indices var ndviTexture = S2IdxSpringMean.select('NDVI_Sp').multiply(1000).toInt32().glcmTexture({size: glcmKernelSize, kernel: glcmKernel}).select('NDVI_Sp_savg','NDVI_Sp_var','NDVI_Sp_contrast'); var ndwiTexture = S2IdxWinterMean.select('NDWI_Wi').multiply(1000).toInt32().glcmTexture({size: glcmKernelSize, kernel: glcmKernel}).select('NDWI_Wi_savg','NDWI_Wi_var','NDWI_Wi_contrast'); var mndwi1Texture = S2IdxWinterMean.select('MNDWI1_Wi').multiply(1000).toInt32().glcmTexture({size: glcmKernelSize, kernel: glcmKernel}).select('MNDWI1_Wi_savg','MNDWI1_Wi_var','MNDWI1_Wi_contrast'); var TPI200Texture = TPI200.select('b1').multiply(1000).toInt32().glcmTexture({size: glcmKernelSize, kernel: glcmKernel}).select('b1_savg','b1_var','b1_contrast').rename('TPI200_savg','TPI200_var','TPI200_contrast'); var TPI500Texture = TPI500.select('b1').multiply(1000).toInt32().glcmTexture({size: glcmKernelSize, kernel: glcmKernel}).select('b1_savg','b1_var','b1_contrast').rename('TPI500_savg','TPI500_var','TPI500_contrast'); //Compute 95th percentile of winter MNDWI1 var mndwi1WinterP95 = S2IdxWinter.select('MNDWI1').reduce(ee.Reducer.percentile([95])).rename('MNDWI1_Wi_p95'); // Merge all images to a single input image var inputBands = S2IdxWinterMean .addBands(S2IdxSpringMean) .addBands(S2IdxSummerMean) .addBands(S2IdxAutumMean) .addBands(S2IdxAnnualMean) .addBands(S2IdxAnnualStd) .addBands(ndviTexture) .addBands(ndwiTexture) .addBands(mndwi1Texture) .addBands(TPI200Texture) .addBands(TPI500Texture) .addBands(mndwi1WinterP95) .addBands(TPI200.select('b1').rename('TPI200')) .addBands(TPI500.select('b1').rename('TPI500')) .addBands(sar) ; //---------------------------------------------------------------------------------------------- // SECTION 3 - RANDOM FOREST CLASSIFICATION //---------------------------------------------------------------------------------------------- // Incorporate complementary train points var trainSetCompRaw = ArtificialWetl .merge(Infrastructure) .merge(Ponds) .merge(Crops) .merge(WetMeadows) .merge(WoodyVeg) .merge(Rangelands) .merge(BareSoil) .filterBounds(roi); // Compute feature space of a dataset var getAttributes = function(fc){ var mscYear = inputBands; return mscYear.reduceRegions({ collection: ee.FeatureCollection(fc), reducer: ee.Reducer.first().forEach(mscYear.bandNames()), scale: 10, tileScale: 12 }); }; var trainSetIni = getAttributes(trainSetRaw.filterBounds(roi)); var trainSetComp = getAttributes(trainSetCompRaw); var trainSet = trainSetComp.merge(trainSetIni); var testSet = getAttributes(testSetRaw); var bandNames = inputBands.bandNames(); // Train a random forest classifier var randomForestClassifier = ee.Classifier.smileRandomForest({numberOfTrees: numTrees,maxNodes: maxNodes}) .train(trainSet,ClassPropertyName,bandNames); // Classify the whole area var lulcClassification = inputBands.classify(randomForestClassifier).rename('LULC_classification'); ///Compute confussion matrix var testSetClassification = testSet.classify(randomForestClassifier, 'Pred'); var OverallAccuracy = testSetClassification.errorMatrix(ClassPropertyName, 'Pred', [1,2,3,4,5,6,7,8]).accuracy(); var ConfMatrix = testSetClassification.errorMatrix(ClassPropertyName, 'Pred'); print(ConfMatrix, 'Confusion matrix of LULC classification of ' + classYear); print(OverallAccuracy, 'Overall accuracy of LULC classification of ' + classYear); // Get feature importance of random forest classifier var FeaturesImportance = ee.Feature(null, ee.Dictionary(randomForestClassifier.explain()).get('importance')); //---------------------------------------------------------------------------------------------- // SECTION 4 - PLOT LULC CLASSIFICATION //---------------------------------------------------------------------------------------------- //Set LULC classes color palette var colorPalette = [ 'FFFFFF', '0202b5', 'F0BB4A', '6eba7d', '71370C', 'FBF795', 'bfb48e', '02024d', ]; //Set LULC classes labels var lulcLabels = ['Impervious surfaces', 'Ponds', 'Crops', 'Low praires', 'Woody vegetation', 'Herbaceous vegetation', 'Bare Soil', 'Open waters', ]; // Set panel position var legend = ui.Panel({ style: { position: 'bottom-left', padding: '8px 15px' } }); // Set panel title var legendTitle = ui.Label({ value: 'REFERECES', style: { fontWeight: 'bold', fontSize: '18px', margin: '0 0 4px 0', padding: '0' } }); legend.add(legendTitle); // Creates and styles 1 row of the legend. var makeRow = function(color, lulcLabels) { var colorBox = ui.Label({ style: { backgroundColor: '#' + color, padding: '8px', margin: '0 0 4px 0'} }); // Create the label filled with the DESCRIPTION text. var DESCRIPTION = ui.Label({ value: lulcLabels, style: {margin: '0 0 4px 6px'} }); // return the panel return ui.Panel({ widgets: [colorBox, DESCRIPTION], layout: ui.Panel.Layout.Flow('horizontal') }); }; // Add colors and labels to panel for (var i = 0; i < 8; i++) { legend.add(makeRow(colorPalette[i], lulcLabels[i])); } Map.add(legend); Map.addLayer(lulcClassification, {min:1,max:8, palette: colorPalette}, 'LULC classification'); //---------------------------------------------------------------------------------------------- // SECTION 5 - EXPORT RESULTS //---------------------------------------------------------------------------------------------- Export.image.toDrive({ image: lulcClassification, description: lulcClassFN, folder : outputFolder, scale: 10, region: roi, crsTransform: [10,0,199980,0,-10,6300040], crs: targetCrs }); var FeaturesImportanceTable = ee.FeatureCollection(FeaturesImportance); Export.table.toDrive({ collection:FeaturesImportanceTable, description:FeatImpTableFN, folder : outputFolder, fileFormat: "CSV" });