Global Forest Change Ecoregion Statistics


The provided code has been used to generate data at the Ecoregion level for the Digital Observaty for Protected Areas
The script uses Hansen et al. (2013) Global Forest Change map in conjunction with Google Earth Engine to calculate forest cover in years 2000 - 2012 for a given Ecoregion or Country



Global Forest Change map


GFC map is composed of several bands (layers). Each layer is a separate image that represents a different piece of information. Here we will focus on four of them:
treecover2000: percentage of tree cover in the pixel. In other words, each pixel has value from 0 till 100, with 0 meaning no forest at all and 100 full forest cover
loss: pixel has value 1 if loss ever occurred during the study period.
gain: pixel has value 1 if gain ever occurred during the study period.
lossyear: value of a pixel denotes in which year loss occurred, starting with 2000. In other words, if pixel has value 5, then it means that the deforestation event occurred in 2005.



Ecoregion statistics using GEE


  • calculate values
var display = true;
var scale = 100;
// get the ecoregion fearure
var ecoregions = ee.FeatureCollection('ft:1GzN7apqS42eIZNUoS4uZoXyspckUuPh4dzLe0wMI', 'geometry');
// filter the ecoregion fearure
var filterBiome = ecoregions.filterMetadata('BIOME', 'equals', 12);
var filterRealm = filterBiome.filterMetadata('REALM', 'equals', 'PA');
// get the forest product
var gfcImage = ee.Image('UMD/hansen/global_forest_change_2015').clip(filterRealm);
var treecover2000 = gfcImage.select('treecover2000');
// set the treshold of the pixel value at 30 and multiply by the area of each pixel, this means that you consider only the pixels that are covered for more than 30% by trees
var treecover2000Area = treecover2000.gt(30).multiply(ee.Image.pixelArea());
// sum number of pixels within the region selected (TREECOVER 2000) resoult in Ha
var forest2000 = treecover2000Area.divide(10000).reduceRegions({
  'collection': filterRealm,
  'reducer': ee.Reducer.sum(),
  'scale': scale
});
var lossImage = gfcImage.select('loss');
// set the treshold of the pixel value at 30 and multiply by the area of each pixel, this means that you consider only the pixels that are covered for more than 30% by trees
var lossArea = lossImage.multiply(treecover2000.gt(30)).multiply(ee.Image.pixelArea());
// sum number of pixels within the region selected (LOSS 2015) resoult in Ha
var loss2012 = lossArea.divide(10000).reduceRegions({
  'collection': filterRealm,
  'reducer': ee.Reducer.sum(),
  'scale': scale
});
var gainImage = gfcImage.select('gain');
// calculate the area of the pixel without defining a treshold (according to GFW)
var gainArea = gainImage.multiply(ee.Image.pixelArea());
// sum number of pixels within the region selected (GAIN 2012) resoult in Ha
var gain2012 = gainArea.divide(10000).reduceRegions({
  'collection': filterRealm,
  'reducer': ee.Reducer.sum(),
  'scale': scale
});
// export data
Export.table.toDrive({
  collection: forest2000,
  description:'cover',
  fileFormat: 'CSV'
});
Export.table.toDrive({
  collection: gain2012,
  description:'gain',
  fileFormat: 'CSV'
});
Export.table.toDrive({
  collection: loss2015,
  description:'loss',
  fileFormat: 'CSV'
});
// visualize on the map
Map.addLayer(lossImage.mask(lossImage), {'palette': 'FF0000'}, 'Loss - red', display);
Map.addLayer(gainImage.mask(gainImage), {'palette': '0000FF'}, 'Gain - blue', display);
Map.centerObject(filterRealm);
 


  • Merge tables using Python

#A function to read forest area, gain and loss csv data and write out to single table
#Requirements: numpy, os, operator
 
import numpy as np
import os
from operator import itemgetter
 
def read_me(file):
 
    """
    Read the csv file, find ecoregion ID and associated data (forest area, loss, gain)
    :param file: input file name to load
    :return: ecoregion id and data (area, loss, gain)
 
    """
 
    #initiate the data lists
    id = []
    ddata = []
 
    #open the csv file
    with open(file) as f:
        lines = f.readlines()
        for line in lines[1:]:
            data = line.split(',')
            id_tmp = data[3]
            #load the id
            id.append(np.float(id_tmp))
            #load the data
            try:
                data_tmp = np.float(data[25])
                ddata.append(data_tmp)
            except ValueError:
                data_tmp = np.float(data[24])
                ddata.append(data_tmp)
 
    return id, ddata
 
def do_all(folder):
 
    """
    Do for all ecoregions; forest area, forest gain, forest loss.
    Load all data for each ecoregion (and associated ID) and save as a single table.
    :param folder: Define folder with all the data files
    :return: Saved csv file.
 
    """
 
    #initiate the data lists
    id = []
    area = []
    loss = []
    gain = []
 
    unique_file_name = []
 
    #find the seperate files for area (e.g. north am, europe)
    for _, _, files in os.walk(folder):
        for f in files:
            if "cover" in f:
                un = f.split("cover")[0][:-1]
            if "loss" in f:
                un = f.split("loss")[0][:-1]
            if "gain" in f:
                un = f.split("gain")[0][:-1]
            unique_file_name.append(un)
 
    for uni in set(unique_file_name):
        for _, _, files in os.walk(folder):
            for f in files:
 
                #extend id and forest area list with eco_id and forest area columns from csv
                if "cover" in f and f.startswith(uni):
                    myfile = os.path.join(folder, f)
                    print "Doing %s" %myfile
                    idd, ddata = read_me(myfile)
 
                    #find the unique ids
                    #some ids have multiple entries due to different geometries
                    unique_id = set(idd)
                    id.extend(unique_id)
 
                    #find the sum of all entries for a single id
                    for u in unique_id:
                        idx = np.where(np.array(idd)==u)[0]
                        area.extend([np.sum(itemgetter(*idx)(ddata))])
 
                #extend loss list with forest loss columns from csv
                if "loss" in f and f.startswith(uni):
                    myfile = os.path.join(folder, f)
                    print "Doing %s" % myfile
                    _, ddata = read_me(myfile)
                    for u in unique_id:
                        idx = np.where(np.array(idd)==u)[0]
                        loss.extend([np.sum(itemgetter(*idx)(ddata))])
 
                #extend gain list with forest gain columns from csv
                if "gain" in f and f.startswith(uni):
                    myfile = os.path.join(folder, f)
                    print "Doing %s" % myfile
                    _, ddata = read_me(myfile)
                    for u in unique_id:
                        idx = np.where(np.array(idd)==u)[0]
                        gain.extend([np.sum(itemgetter(*idx)(ddata))])
 
    #create the table with id, area, gain and loss
    big_table = np.ones([len(id),4])
    big_table[:,0] = id
    big_table[:, 1] = area
    big_table[:, 2] = gain
    big_table[:, 3] = loss
 
    #write data out to csv file
    outfile = folder + "/ecoregion_forest_tot.csv"
    print "Writing out to %s" %outfile
    np.savetxt(outfile, big_table, fmt="%.0f, %.12f, %.12f, %.12f", delimiter=',')
 
    return
 
#change the folder containing ecoregion csv files
folder = '/Users/lucabattistella/Downloads/ECOREGION_FOREST/'
do_all(folder)

Countries statistics using GEE

var display = true;
var scale = 30;
 
 
var country = ee.FeatureCollection("users/lucabattistellageo/continent");
var filtercountry = country.filterMetadata('country__2', 'equals', 'Africa'); // Concessions Filter
print (filtercountry);
 
var gfcImage = ee.Image('UMD/hansen/global_forest_change_2015').clip(filtercountry);
var treecover2000 = gfcImage.select('treecover2000');
var treecover2000Area = treecover2000.gt(30).multiply(ee.Image.pixelArea());
// in Ha
var forest2000 = treecover2000Area.divide(10000).reduceRegions({
  'collection': filtercountry,
  'reducer': ee.Reducer.sum(),
  'scale': scale
});
var lossImage = gfcImage.select('loss');
var lossArea = lossImage.multiply(treecover2000.gt(30)).multiply(ee.Image.pixelArea());
var loss2015 = lossArea.divide(10000).reduceRegions({
  'collection': filtercountry,
  'reducer': ee.Reducer.sum(),
  'scale': scale
});
var gainImage = gfcImage.select('gain');
var gainArea = gainImage.multiply(ee.Image.pixelArea());
var gain2012 = gainArea.divide(10000).reduceRegions({
  'collection': filtercountry,
  'reducer': ee.Reducer.sum(),
  'scale': scale
});
 
Export.table.toDrive({
  collection: forest2000,
  description:'cover',
  fileFormat: 'CSV'
});
Export.table.toDrive({
  collection: gain2012,
  description:'gain',
  fileFormat: 'CSV'
});
Export.table.toDrive({
  collection: loss2015,
  description:'loss',
  fileFormat: 'CSV'
});
 
Map.addLayer(lossImage.mask(lossImage), {'palette': 'FF0000'}, 'Loss - red', display);
Map.addLayer(gainImage.mask(gainImage), {'palette': '0000FF'}, 'Gain - blue', display);
Map.centerObject(filtercountry);