Gap filling a monthly-reduced imageCollectiom - javascript

I'm attempting to gap fill 9 datasets on Earth Engine. So far I've been able to reduce the daily dataset to monthly taking help of the site but I'm unable to apply focal mean to fill any possible nodata instances in the pixel values on the resultant collection. Need some help in this regard.
var bess = ee.ImageCollection("SNU/ESL/BESS/Rad/v1"),
aod = ee.ImageCollection("MODIS/006/MCD19A2_GRANULES"),
ndsi = ee.ImageCollection("MODIS/006/MYD10A1"),
ndwi = ee.ImageCollection("MODIS/MYD09GA_006_NDWI"),
evi = ee.ImageCollection("MODIS/061/MYD13A2"),
lst = ee.ImageCollection("MODIS/061/MYD11A1"),
erafi = ee.ImageCollection("ECMWF/ERA5_LAND/HOURLY"),
ele = ee.Image("CGIAR/SRTM90_V4"),
var vari = bess;
// function to generate monthly mean
var monmei = ee.List.sequence(0, 16*12).map(function(n) { // .sequence: number of years from starting year to present
var start = ee.Date('2021-01-01').advance(n, 'month'); // Starting date
var end = start.advance(1, 'month'); // Step by each iteration
return vari
.filterDate(start, end)
.select('RSDN_Daily')
.mean()
.set('system:time_start', start.millis());
});
print(monmei)
var fill = monmei.focal_mean(1, 'square', 'pixels', 20)
var full = fill.blend(monmei)
filvari = full

Related

I want to solve the "system:time_start" error when trying to output GLDAS monthly precipitation to CSV using Google Earth Engine

I am using GoogleEarthEngine to look at precipitation data for the Mekong River basin. The GLDAS data are set every three hours per day. My goal is to extract the GLDAS data from 2000 to 2020 by adding up the data by month.
I want to extract the total monthly precipitation in GLDAS using the Google Earth Engine, but I cannot extract the CSV due to the error
Image.date: Image '120' has a 'system:time_start' property which is not a number: 2010-01-01T00:00:00
I think I can extract the CSV by converting "system:time_start", how should I change it?
var studyArea = ee.Geometry.Rectangle(102, 8.5, 107, 15);
Map.centerObject(mekong, 9);
// 解析対象年
var years = ee.List.sequence(2000, 2019);
var months = ee.List.sequence(1, 12);
var early = ('2010-01-01');
var late = ('2011-01-01');
// MOD11A1の取り出し
var image = ee.ImageCollection('NASA/GLDAS/V021/NOAH/G025/T3H')
.filterDate(early, late).filterBounds(mekong);
//print(image);
// mmに変換してmodLSTcに保存
var gldas_precipitation = image.select('Rainf_f_tavg');
var gldas_precipitation_mm = gldas_precipitation.map(function(img)
{return img.multiply(10080.0).copyProperties(img, ['system:time_start'])});
// 変数gldas_precipitation_mm_monthの中に月単位のメジアンデータを保存 ////////////////////////////////////////////
var gldas_precipitation_mm_month = ee.ImageCollection.fromImages(
years.map(function(y) {
return months.map(function(m) {
var monthly = gldas_precipitation_mm.filter(ee.Filter.calendarRange(y, y, 'year'))
.filter(ee.Filter.calendarRange(m, m, 'month'))
.sum()
.rename('precipitation_mm_month');
return monthly.set('year', y).set('system:time_start', ee.Date.fromYMD(y, 1, 1))
.set('month', y).set('system:time_start', ee.Date.fromYMD(y, m, 1));
});
}).flatten());
var gldas_precipitation_mm_month = gldas_precipitation_mm_month.filterBounds(mekong);
// TSLのポリゴン ///////////////////////////////////////////////////////////////////////////////////
var empty = ee.Image().byte();
// Paint all the polygon edges with the same number and width, display
var outline = empty.paint({
featureCollection: mekong,
color: 1,
width: 2
});
Map.addLayer(outline, {palette: 'FF0000'}, 'TSL');
//output_csv_precipitation
//Create variables and extract data
var scale = gldas_precipitation_mm_month.mean().projection().nominalScale().multiply(0.05); print(scale);
var gldas = gldas_precipitation_mm_month.filter(ee.Filter.listContains('system:band_names', gldas_precipitation_mm.mean().bandNames().get(0)));
var ft = ee.FeatureCollection(ee.List([]));
//Function to extract values from image collection based on point file and export as a table
var fill = function(img, ini) {
var inift = ee.FeatureCollection(ini);
var ft2 = img.reduceRegions(mekong, ee.Reducer.mean(), scale);
var date = img.date().format("YYYY/MM/dd");
var ft3 = ft2.map(function(f){return f.set('month', date)});
return inift.merge(ft3);
};
// Iterates over the ImageCollection
var profile = ee.FeatureCollection(gldas_precipitation_mm_month.iterate(fill, ft));
print(profile,'profile');
The value of the property system:time_start must be a number (even though it would make sense for it to be a Date, the system design didn't end up that way). You must change calls like
.set('system:time_start', ee.Date.fromYMD(y, m, 1))
to
.set('system:time_start', ee.Date.fromYMD(y, m, 1).millis())
While looking, I see other possible problems here:
return monthly.set('year', y).set('system:time_start', ee.Date.fromYMD(y, 1, 1))
.set('month', y).set('system:time_start', ee.Date.fromYMD(y, m, 1));
This is setting the month property value to the y variable (not m), and it's setting system:time_start twice (so only the second value will be used). Probably this is not what you meant. I have not looked at what you're intending to do with the collection, so you'll have to figure that part out yourself.

Error "Array: Parameter 'values' is required"

I'm working on a platform called Google Earth Engine that allows compute heavy analysis of satellite images on a cloud.
I have written a code in Javascript that takes two images, use one band inside them and then it suppose to create scatter plot.
I have problem with the part of the scatter plot which I struggle to understand.
Any time I run this part:
// Convert the band data to plot on the y-axis to arrays.
var x= ee.Array(imageNDVIcor.get('NDVI'));
var y = ee.Array(SARreproject.get('VH'));
// Make a band correlation chart.
var chart = ui.Chart.array.values(y, 0, x)
.setSeriesNames(['SAR vs NDVI'])
.setOptions({
title: 'NDVI vs SAR VH',
hAxis: {'title': 'SAR VH'},
vAxis: {'title': 'NDVI'},
pointSize: 3,
});
// Print the chart.
print(chart);
I get the following error:
Array: Parameter 'values' is required
I don't understand which values are missing or how and when they got lost. I put here the full code I have, any help to understand where the values dissapeard will be helpful.
//STEP 1:NDVI
/**
* Function to mask clouds using the Sentinel-2 QA band
* #param {ee.Image} image Sentinel-2 image
* #return {ee.Image} cloud masked Sentinel-2 image
*/
function maskS2clouds(image) {
var qa = image.select('QA60');
// Bits 10 and 11 are clouds and cirrus, respectively.
var cloudBitMask = 1 << 10;
var cirrusBitMask = 1 << 11;
// Both flags should be set to zero, indicating clear conditions.
var mask = qa.bitwiseAnd(cloudBitMask).eq(0)
.and(qa.bitwiseAnd(cirrusBitMask).eq(0));
return image.updateMask(mask).divide(10000)
.copyProperties(image, ['system:time_start']);
}
// Map the function over one year of data and take the median.
// Load Sentinel-2 TOA reflectance data.
var dataset = ee.ImageCollection('COPERNICUS/S2')
.filterDate('2019-01-01', '2019-11-12')
// Pre-filter to get less cloudy granules.
.filter(ee.Filter.lt('CLOUDY_PIXEL_PERCENTAGE', 20))
.select('B2','B3','B4','B8','QA60')
.filterBounds(geometry)
.map(maskS2clouds);
var clippedCol=dataset.map(function(im){
return im.clip(geometry);
});
// Get the number of images.
var count = dataset.size();
print('Count: ',count);
// print(clippedCol);//here I get the error messege "collection query aborted after accumulation over 5000 elements
// print(dataset,'dataset');//the same error here
//function to calculate NDVI
var addNDVI = function(image) {
var ndvi = image.normalizedDifference(['B8', 'B4'])
.rename('NDVI')
.copyProperties(image,['system:time_start']);
return image.addBands(ndvi);
};
//NDVI to the clipped image collection
var withNDVI = clippedCol.map(addNDVI).select('NDVI');
var NDVIcolor = {
min: 0,
max:1,
palette: ['FFFFFF', 'CE7E45', 'DF923D', 'F1B555', 'FCD163', '99B718', '74A901',
'66A000', '529400', '3E8601', '207401', '056201', '004C00', '023B01',
'012E01', '011D01', '011301'],
};
//Filter according to number of pixels
var ndviWithCount = withNDVI.map(function(image){
var countpixels = ee.Number(image.reduceRegion({
reducer: ee.Reducer.count(),
geometry: geometry,
crs: 'EPSG:4326',
scale: 20,
}).get('NDVI'));
return image.set('count', countpixels);
});
print(ndviWithCount, 'ndviWithCount');
var max = ndviWithCount.reduceColumns(ee.Reducer.max(), ["count"]);
print('Number of pixels max:',max.get('max'));
//filter between a range
var filterNDVI = ndviWithCount.filter(ee.Filter.rangeContains(
'count', 98258, 98258));
print('Filtered NDVI:', filterNDVI);
var listOfImages =(filterNDVI.toList(filterNDVI.size()));
var listOfNumbers = [5]
for (var i in listOfNumbers) {
var image = ee.Image(listOfImages.get(listOfNumbers[i]));
var toexport=image.visualize(NDVIcolor).addBands(image);
// do what ever you need with image
Map.addLayer(image, NDVIcolor, i);
// Export.image.toDrive({
// image: toexport.toFloat(),
// description: i,
// scale:20,
// crs:'EPSG:4326',
// maxPixels:1310361348,
// region:geometry.geometry().bounds()
// });
}
Map.centerObject(geometry);
//STEP2: SAR
// Filter the collection for the VH product from the descending track
//var geometry=MITR;
var Sentinel1 = ee.ImageCollection('COPERNICUS/S1_GRD')
.filter(ee.Filter.listContains('transmitterReceiverPolarisation', 'VH'))
.filter(ee.Filter.eq('orbitProperties_pass', 'DESCENDING'))
.filter(ee.Filter.eq('instrumentMode', 'IW'))
.select('VH')
.filterDate('2019-01-01','2019-11-12')
.filterBounds(geometry);
var clippedVH= Sentinel1.map(function(im){
return im.clip(geometry);
});
var clippedVHsize=clippedVH.size();
print('SAR Size:',clippedVHsize);
print('SAR images data:',clippedVH)
var listOfImagesSAR =(clippedVH.toList(clippedVH.size()));
var listOfNumbersSAR = [3];
for (var i in listOfNumbersSAR) {
var image = ee.Image(listOfImagesSAR.get(listOfNumbersSAR[i]));
var toexport=image.visualize({min: -30, max: 1}).addBands(image);
// do what ever you need with image
Map.addLayer(image,{min: -30, max: 1}, i);
// Export.image.toDrive({
// image: toexport.toFloat(),
// description: i,
// scale:10,
// crs:'EPSG:4326',
// maxPixels:1310361348,
// region:geometry.geometry().bounds()
// });
}
//print(ui.Chart.image.series(filterNDVI, geometry, ee.Reducer.mean(), 20));
//(ui.Chart.image.series(clippedVH, geometry, ee.Reducer.mean(), 10));
//select the images for scatter plot
//select NDVI
var imageNDVIcor=ee.Image(listOfImages.get(5));
var imageSARcor=ee.Image(listOfImagesSAR.get(3));
// Get information about the projection.
var sar1Projection = imageSARcor.projection();
print('SAR projection:', sar1Projection);
var NDVIProjection = imageNDVIcor.projection();
print('NDVI projection:', NDVIProjection);
//resample SAR image to NDVI image
var SARreproject=imageSARcor.reduceResolution({reducer: ee.Reducer.mean()}).reproject({crs: NDVIProjection});
Map.addLayer(SARreproject,{min: -30, max: 1},'Reproject SAR');
// print(imageNDVIcor)
// print(imageSARcor)
//Map.addLayer(imageNDVIcor,NDVIcolor,'NDVI select');
//Map.addLayer(imageSARcor,{min: -30, max: 1},'SAR select');
// Convert the band data to plot on the y-axis to arrays.
var x= ee.Array(imageNDVIcor.get('NDVI'));
var y = ee.Array(SARreproject.get('VH'));
// Make a band correlation chart.
var chart = ui.Chart.array.values(y, 0, x)
.setSeriesNames(['SAR vs NDVI'])
.setOptions({
title: 'NDVI vs SAR VH',
hAxis: {'title': 'SAR VH'},
vAxis: {'title': 'NDVI'},
pointSize: 3,
});
// Print the chart.
print(chart);
Not sure what you are trying to accomplish. But if you are trying to compare the values between NDVI and VH, here's a way to do that. Click on any point inside the geometry, and the values for the point in the time-series are plotted in the console tab.
var geometry =
ee.Geometry.Polygon(
[[[-122.56145019531249, 37.93899992220671],
[-122.56145019531249, 37.37365054197817],
[-121.80888671874999, 37.37365054197817],
[-121.80888671874999, 37.93899992220671]]], null, false);
Map.centerObject(geometry, 10);
function maskS2clouds(image) {
var qa = image.select('QA60');
var cloudBitMask = 1 << 10;
var cirrusBitMask = 1 << 11;
var mask = qa.bitwiseAnd(cloudBitMask).eq(0).and(qa.bitwiseAnd(cirrusBitMask).eq(0));
return image.updateMask(mask).divide(10000).copyProperties(image, ['system:time_start']);
}
// Sentinel 2
var sentinel2 = ee.ImageCollection('COPERNICUS/S2')
.filterDate('2019-01-01', '2019-12-31')
.filter(ee.Filter.lt('CLOUDY_PIXEL_PERCENTAGE', 20))
.select('B2','B3','B4','B8','QA60').filterBounds(geometry).map(maskS2clouds);
// NDVI
var addNDVI = function(image) {
var ndvi = image.normalizedDifference(['B8', 'B4']).rename('NDVI').copyProperties(image,['system:time_start']);
return image.addBands(ndvi);
};
var sentinel2NDVI = sentinel2.map(addNDVI).select('NDVI');
// SAR
var sentinel1 = ee.ImageCollection('COPERNICUS/S1_GRD')
.filter(ee.Filter.listContains('transmitterReceiverPolarisation', 'VH'))
.filter(ee.Filter.eq('orbitProperties_pass', 'DESCENDING'))
.filter(ee.Filter.eq('instrumentMode', 'IW'))
.select('VH').filterDate('2019-01-01','2019-12-31').filterBounds(geometry);
var list1 = sentinel2NDVI.toList(sentinel2NDVI.size());
var list2 = sentinel1.toList(sentinel1.size());
var list = list1.cat(list2);
var combinedData = ee.ImageCollection(list);
// chart
var generateChart = function (coords) {
print('-----------------------');
var lat = coords.lat;
var lon = coords.long;
var point = ee.Geometry.Point(coords.lon, coords.lat);
var dot = ui.Map.Layer(point, {color: '000000'}, 'clicked location');
var chart = ui.Chart.image.series(combinedData, point, ee.Reducer.mean(), 30);
chart.setOptions({
title: 'VH vs NDVI',
hAxis: {'title': 'Time'},
vAxis: {'title': 'SAR VH'},
pointSize: 3,
});
print(chart);
};
Map.onClick(generateChart);
Map.style().set('cursor', 'crosshair');
Also, get the GEE link for the same from here.

Google Earth Engine - image gap filling in imagecollection

i need to combine MODIS Terra and Aqua collection in a new collection in order to get the max NDSI value by day.
The problem is that, for some years, the dataset has some gaps and the join function keeps just the common images on the two dataset.
For example in 2003 the Terra dataset has 358 images while the Aqua ones has 365, the combined dataset results with just 358 images.
I would like the combine function keeps the Aqua images values when the Terra ones are not available in order to have 365 images at the end.
Any suggestion? Thanks!
here is the GEE link to the code
https://code.earthengine.google.com/b71873bff48fe5f3de883763f25c4938
here is the code
var MOD = ee.ImageCollection("MODIS/006/MOD10A1")
var MYD = ee.ImageCollection("MODIS/006/MYD10A1")
var ROI = geometry
// MONITORING DATE
var currdate = ee.Date.fromYMD(2004,1,1)
var firstdate = ee.Date.fromYMD(2003,1,1)
Map.centerObject(ROI,5);
var clip = function(img) {return img.clip(ROI);}
var MOD = MOD.filterDate(firstdate, currdate)
.map(clip)
var MYD = MYD.filterDate(firstdate, currdate)
.map(clip)
print(MOD,'MOD')
print(MYD,'MYD')
// Combine collections (MOD & MYD)
var Filter = ee.Filter.equals({
leftField: 'system:time_start',
rightField: 'system:time_start'
});
var simpleJoin = ee.Join.inner();
var innerJoin = ee.ImageCollection(simpleJoin.apply(MOD, MYD, Filter))
var comb = innerJoin.map(function(feature) {
return ee.Image.cat(feature.get('primary'), feature.get('secondary'));
})
print(comb,'comb');
// calculate maximum value from the two MODIS images
var max = function(img) {
var br = img.select('NDSI_Snow_Cover').max(img.select('NDSI_Snow_Cover_1'))
return br.rename('NDSI')
.copyProperties(img, ['system:time_start', 'system:time_end']);
}
var NDSImax = comb.map(max)
print(NDSImax,'NDSImax')
// Map.addLayer(NDSImax.first().clip(ROI).select('NDSI'),{min: -9,max: 100,palette: ['red', 'white', 'blue']}, 'NDSImax')

loadTable() function is p5.js only working as expected for 2 out of the 5 .csv files given whilst using same procedure each time

I'm developing in Google Chrome browser using node.js and p5.js. I want to read 5 .csv files and place the values into arrays, which has worked successfully for 2 out of the 5 files. To do this I have used the loadtable function to place the data into p5 table objects to then loop through to retrieve the values and place them into arrays. To loop through, my for loop is using the getrowcount function as a bound which returns the correct number of rows for the two working instances, however for the 3 that don't work, the row count returned is zero, even though when I look at the table objects I've loaded the csv data into, they are clearly populated with the records. I can see that the "columns" and "rows" value when expanding the table objects do read 0, however when traversing down another layer, the "columns" and "rows" values are correctly stored and I'm assuming the function is returning the first incorrect case of rows and columns. Please find attached links to screenshots as I'm sure my explanation is very confusing. The method I've used is very simple and almost identical each time so I'm unsure where I could have made a mistake, any ideas would be great.
console log screenshot https://imgur.com/HmcZmw9
code screenshot https://imgur.com/Ouv6OLX
code screenshot https://imgur.com/cxEkWVc
code screenshot https://imgur.com/3sPGXgb
code screenshot https://imgur.com/LlRdEhw
The first line in the console log shows a successful occurrence where the data table is loaded in fine, the second line shows the array being populated correctly and the third line prints the correct row count.
The second data table shows an incorrect time where the table is populated with values but the rows and columns count is incorrect, but when expanding the object once, you can see the correct rows and columns count is there. Below shows the empty array and row count of 0.
This is my first post so apologies if I've asked in the wrong way. Happy to give more information. Thanks.
The following is the code used. Most of the operations take place in function setup():
// This file retrieves and formats the data from smartgrid readings
// Create p5 Table objects
let wind_energy_table = new p5.Table();
let solar_energy_table = new p5.Table();
let weather_10_table = new p5.Table();
let weather_60_table = new p5.Table();
let weather_forecast_table = new p5.Table();
// Create arrays to handle and access values easier
const wind_energy = [];
const solar_energy = [];
const weather_10 = [];
const weather_60 = [];
const weather_forecast = [];
// Populate tables with values, preload runs before setup
function preload() {
wind_energy_table = loadTable("/data/wind_energy.csv", "csv");
solar_energy_table = loadTable("/data/solar_energy.csv", "csv");
weather_10_table = loadTable("/data/weather_data_10.csv", "csv");
weather_60_table = loadTable("/data/weather_data_60.csv", "csv");
weather_forecast_table = loadTable("/data/weather_forecast.csv", "csv");
}
function setup() {
let we_date, we_time, we_p1, we_p2, we_p3, we_pt;
// pi indicates power of ith wind farm (1,2,3), pt = power exchange (Megawatts)
let se_date, se_time, se_ps;
// ps = power in watts of subgrid
let w10_date, w10_time, w10_airtemp, w10_wd, w10_ws, w10_sd, w10_ap, w10_precip;
// wd = wind direction (in decadegrees Celsius), ws = wind speed (in 0.1 m/s), sd = sunshine duration in last 10 minutes,
// air pressure (in 0.1 hPa), precip = amount of precipitation in last 10 minutes
// decadegrees = 10 degrees
// hPa = hectopascal i.e. 100 pascal
let w60_date, w60_time, w60_cc, w60_wd, w60_ws, w60_at, w60_ap, w60_sd, w60_precip;
// cc = cloud coverage (in 1/8), wd = wind direction (in decadegrees Celsius), ws = wind speed (in 0.1 m/s),
// at = air temperature (in 0.1 degrees), ap = air pressure (in 0.1 hPa), sd = sunshine duration in last hour,
// precip = precipitation in last hour
// cloud coverage 8/8 = whole sky covered
let wf_date, wf_time, wf_validDate, wf_temp, wf_dp, wf_ws, wf_gs, wf_ap, wf_precipProb, wf_cc, wf_si, wf_wd, wf_ah, wf_ad
// validDate = date the forecast is valid for, dp = dew point (in Celsius), ws = wind speed (in m/s), gs = gust speed (in m/s),
// ap = air pressure (in hPa), precipProb = probability of precipitation (in %), cc = cloud coverage, si = solar irradiance (in kJ/m^2),
// wd = wind direction (in degrees), ah = air humidity (in %), ad = air density (in J/(kg*K))
// temp (in degrees Celsius), J = Joule, K = Kelvin
// Populate arrays with arrays for each values by looping through the tables in csv files
for (let i = 0; i < wind_energy_table.getRowCount(); i++) {
we_date = wind_energy_table.getRow(i).arr[0];
we_time = wind_energy_table.getRow(i).arr[1];
we_p1 = wind_energy_table.getRow(i).arr[2];
we_p2 = wind_energy_table.getRow(i).arr[3];
we_p3 = wind_energy_table.getRow(i).arr[4];
we_pt = wind_energy_table.getRow(i).arr[5];
wind_energy[i] = [we_date, we_time, we_p1, we_p2, we_p3, we_pt];
}
console.log(wind_energy_table);
console.log(wind_energy);
console.log(wind_energy_table.getRowCount());
for (let j = 0; j < solar_energy_table.getRowCount(); j++) {
se_date = solar_energy_table.getRow(j).arr[0];
se_time = solar_energy_table.getRow(j).arr[1];
se_ps = solar_energy_table.getRow(j).arr[2];
solar_energy[j] = [se_date, se_time, se_ps];
}
console.log(solar_energy_table);
console.log(solar_energy);
console.log(solar_energy_table.getRowCount());
console.log(solar_energy_table.getRow(0));
for (let k = 0; k < weather_10_table.getRowCount(); k++) {
w10_date = weather_10_table.getRow(k).arr[0];
w10_time = weather_10_table.getRow(k).arr[1];
w10_airtemp = weather_10_table.getRow(k).arr[2];
w10_wd = weather_10_table.getRow(k).arr[3];
w10_ws = weather_10_table.getRow(k).arr[4];
w10_sd = weather_10_table.getRow(k).arr[5];
w10_ap = weather_10_table.getRow(k).arr[6];
w10_precip = weather_10_table.getRow(k).arr[7];
weather_10[k] = [w10_date, w10_time, w10_airtemp, w10_wd, w10_ws, w10_sd, w10_ap, w10_precip];
}
console.log(weather_10_table);
console.log(weather_10);
for (let l = 0; l < weather_60_table.getRowCount(); l++) {
w60_date = weather_60_table.getRow(l).arr[0];
w60_time = weather_60_table.getRow(l).arr[1];
w60_cc = weather_60_table.getRow(l).arr[2];
w60_wd = weather_60_table.getRow(l).arr[3];
w60_ws = weather_60_table.getRow(l).arr[4];
w60_at = weather_60_table.getRow(l).arr[5];
w60_ap = weather_60_table.getRow(l).arr[6];
w60_sd = weather_60_table.getRow(l).arr[7];
w60_precip = weather_60_table.getRow(l).arr[8];
weather_60[l] = [w60_date, w60_time, w60_cc, w60_wd, w60_ws, w60_at,
w60_ap, w60_sd, w60_precip
];
}
console.log(weather_60_table);
console.log(weather_60);
for (let m = 0; m < weather_forecast_table.getRowCount(); m++) {
wf_date = weather_forecast_table.getRow(m).arr[0];
wf_time = weather_forecast_table.getRow(m).arr[1];
wf_validDate = weather_forecast_table.getRow(m).arr[2];
wf_temp = weather_forecast_table.getRow(m).arr[3];
wf_dp = weather_forecast_table.getRow(m).arr[4];
wf_ws = weather_forecast_table.getRow(m).arr[5];
wf_gs = weather_forecast_table.getRow(m).arr[6];
wf_ap = weather_forecast_table.getRow(m).arr[7];
wf_precipProb = weather_forecast_table.getRow(m).arr[8];
wf_cc = weather_forecast_table.getRow(m).arr[9];
wf_si = weather_forecast_table.getRow(m).arr[10];
wf_wd = weather_forecast_table.getRow(m).arr[11];
wf_ah = weather_forecast_table.getRow(m).arr[12];
wf_ad = weather_forecast_table.getRow(m).arr[13];
weather_forecast[m] = [wf_date, wf_time, wf_validDate, wf_temp, wf_dp, wf_ws,
wf_gs, wf_ap, wf_precipProb, wf_cc, wf_si, wf_wd, wf_ah, wf_ad
];
}
console.log(weather_forecast_table);
console.log(weather_forecast);
}

how to pass big data to google scatter chart

I am relatively new to JavaScript and Django and I am struggling with passing big data to my google chart.
I have a chart representing velocities for a given date and distance. In my django views I create list of distances, dates and according velocities. I also generate there a list with sorted values occuring in velocity list and a list with colors according to velocity's value.
I want to have a chart with velocity map with applied colortable like this :
http://i.imgur.com/9Tyv8Rn.jpg
So I used scatter chart with velocity series. The chart is dynamic, it's diffrent for every item selected by a user.
JS to generate rows and columns :
// Define data table rows:
var rows = [];
var rows_list = [];
var vl_max = vel_list.length;
for (i=0; i < vl_max; i+=1) {
var date_tmp = new Date(date_list[i].split(',')[0],date_list[i].split(',')[1]-1,date_list[i].split(',')[2]);
var date = [date_tmp];
var vel_tmp = vel_list[i];
var vtemp_max = vel_tmp.length;
var tooltip_dsname = dsname_list[i];
var tooltip_track = track_list[i];
for (j=0; j < vtemp_max; j+=1) {
var cell = [{v : date_tmp}];
for (k=0; k < vr_max; k+=1) {
var vel_full = vel_tmp[j];
var vel = vel_full.toFixed(1);
if (vel == vel_range[k]) {
// tooltip:
var dist = dist_list[j]/1000;
var yyyy = date_tmp.getFullYear().toString();
var mm = (date_tmp.getMonth()+1).toString(); // getMonth() is zero-based
var dd = date_tmp.getDate().toString();
var tooltip_date = yyyy + "-" + (mm[1]?mm:"0"+mm[0]) + "-" + (dd[1]?dd:"0"+dd[0]);
var tooltip = "<b>dataset: </b>"+tooltip_dsname+"<br><b>date: </b>"+tooltip_date+"<br><b>track: </b>"+tooltip_track+"<br><b>distance: </b>"+dist+" k"+mapunit+"<br><b> velocity: </b>"+vel_full.toFixed(2)+" m/d";
var color = color_list[k]
var style = "point { shape-type: square; fill-color: "+color+";}"
} else {
var dist = NaN;
var tooltip = "empty" ;
var style = "empty" ;
}
cell.push({v: dist},{v: tooltip},{v:style});
}
rows_list.push({c: cell});
}
};
Here is JSfiddle for chart generation with smaller data :
http://jsfiddle.net/joannao89/t26ooyrt/2/
The problem that I have is while the chart is working for smaller data, once I want to load it for a long distance and a wide date range, the browser keeps on popping us this line : "A website is slowing down your browser, what would you like to do ? {stop} {wait}"
I know that this is probably the problem of too large amount of rows, my website generates also 3 other charts like this, with the same data but in another X-Y axis combination (for example time on X-axis, velocity on Y-axis and distance as series) and it works perfectly fine. That's why I would like to pass the data to the chart in some faster way, but I have no clue how.
I already tried to use setTimeout, but it doesn't change a lot. I also tried doing a little less coding on JS side and more in django views, but it also didn't help.
So any suggestions about how to solve this will be very appreciated!

Categories

Resources