I'm trying to compute the standard deviation of NDVI scores at a 10 km grid across the UK. I am stuck at the rescaling phase, because my code apparently generates too many pixels to process. In my attempt to map the new data I get an error:
ndvi_sd_10: Tile error: Too many input pixels per output pixel. Need 1007880, but only 10000 allowed.
My code so far:
var bounds =
/* color: #d63000 */
/* shown: false */
/* displayProperties: [
{
"type": "rectangle"
}
] */
ee.Geometry.Polygon(
[[[-8.688766322302136, 61.10228892054647],
[-8.688766322302136, 49.8399844115027],
[2.912796177697863, 49.8399844115027],
[2.912796177697863, 61.10228892054647]]], null, false);
function maskS2clouds(image) {
var qa = image.select('QA60');
// Bits 10 and 11 are clouds and cirrus, respectively.
var cloudBitMask = 1 << 10;
var cirrusBitMask = 1 << 11;
// Both flags should be set to zero, indicating clear conditions.
var mask = qa.bitwiseAnd(cloudBitMask).eq(0)
.and(qa.bitwiseAnd(cirrusBitMask).eq(0));
return image.updateMask(mask).divide(10000);
}
// Function to calculate and add an NDVI band
var addNDVI = function(image) {
return image.addBands(image.normalizedDifference(['B8', 'B4']).rename('ndvi'));
};
var S2 = ee.ImageCollection('COPERNICUS/S2_SR_HARMONIZED')
.filterDate('2025-08-04', '2025-08-04')
.filter(ee.Filter.calendarRange(6, 8,'month'))
.filterBounds(bounds)
// Pre-filter to get less cloudy granules.
.filter(ee.Filter.lt('CLOUDY_PIXEL_PERCENTAGE',20))
.map(maskS2clouds)
.map(addNDVI);
print(S2);
Map.setCenter(-2, 55, 5);
// ****** Aggregation to 10km ****** //
// Get the projection at required scale
var projection = ee.Image(S2.select('ndvi').first()).projection();
print('Native Resolution:', projection.nominalScale());
var projectionAt1k = projection.atScale(1000);
print('1k Resolution:', projectionAt1k.nominalScale());
var projectionAt10k = projection.atScale(10000);
print('10k Resolution:', projectionAt10k.nominalScale());
// start by projecting to 1km
var med_ndvi = S2.select('ndvi').median().reproject({
crs: 'EPSG:32631'
}); // for some reason computing the median reprojects to WGS84
print('med_ndvi:',med_ndvi);
var ndvi_sd_1k = med_ndvi
.reduceResolution({
reducer: ee.Reducer.stdDev(),
maxPixels: 10000
})
// Request the data at the scale and projection
// of reduced resolution
.reproject({
crs: projectionAt1k
});
// continue by reducing to 10km
var ndvi_sd_10k = ndvi_sd_1k.multiply(ndvi_sd_1k).reduceResolution({ // sd^2 is the variance, which can be added
reducer: ee.Reducer.sum(),
maxPixels: 1000
})
// Request the data at the scale and projection
// of reduced resolution
.reproject({
crs: projectionAt10k
}).sqrt(); // return to sd
print('ndvi SD at 10 km', ndvi_sd_10k)
Map.addLayer(ndvi_sd_10k, {}, 'ndvi_sd_10');
Any pointers?
Update
Following the suggestion below I have altered the line producing the median to use the correct projection, but I am unable to even plot this (code below). As I can map the image that is the first layer of the collection, it seems like something odd is still happening in that median computation...
var med_ndvi = S2.select('ndvi').median().reproject({
crs: projection
})//.setDefaultProjection(crs:projection);
print('med_ndvi:',med_ndvi);
Map.addLayer(med_ndvi, {}, 'med_ndvi');