sobre gdal_translate e gdalwarp

Olá Tenho algumas dúvidas sobre o uso gdal no R. Depois de converter um arquivo HDF, com dados de temperatura de superfície do mar, para geotiff eu quero reprojetar para WGS84 ou SIRGAS2000 e contar entre as lats de 22 a 29 S e longs de 40 a 50 W. o HDF para exemplo está em https://app.box.com/s/16cf7qv6af6gsz1v66staori2mtneu0r Estou seguindo basicamente https://scottishsnow.wordpress.com/2014/08/24/many-rastered-beast/ Bem, converti o HDF com: gdal_translate("A20080012008031.L3m_MO_SST_4","georef.tif",sd_index=1,a_ullr=c(0,4320,8640,0), a_srs="+proj=eqc +lat_ts=0 +lat_0=0 +lon_0=0 +x_0=0 +y_0=0 +a=6371007 +b=6371007 +units=m +no_defs") Sem a_ullr e a_srs quando utilizava o gdalwarp eu recebia mensage: "ERROR 1: Unable to compute a transformation between pixel/line and georeferenced coordinates" Copiei os valores de a_ullr and a_srs values de GDALinfo("georef.tif") map <- raster("georef.tif") plot(map) Meu problema agora é reprojetar e cortar a imagem georef.tif: gdalwarp("georef.tif", "georef2.tif", s_srs="+proj=eqc +lat_ts=0 +lat_0=0 +lon_0=0 +x_0=0 +y_0=0 +a=6371007 +b=6371007 +units=m +no_defs", t_srs="+proj=longlat +ellps=GRS80 +towgs84=0,0,0,0,0,0,0 +no_defs") map2 <- raster("georef2.tif") plot(map2) Map2 não está em SIRGAS2000 e a opção de corte te=c(-50,-29,-40,-22) não funciona. Onde será que estou errando? Talvez em a_ullr e a_srs, não sei. Agradeço muito qualquer ajuda. Sds Antonio Olinto

Caros, Na semana passada escrevi para lista sobre como obter dados de temperatura do mar e clorofila em arquivos HDF. Estava com problemas para reprojetar um mapa. Depois de muita leitura escrvi um script para fazer o trabalho em lote para ambas variáveis utilizando arquivos nc, Compartilho-os abaixo e espero que sejam úteis para mais alguém. Sugestões para melhorias ou avisos de erros são bem vindos. Sds Antonio Olinto *SST===* # Antonio Olinto Avila-da-Silva, Instituto de Pesca, Brasil # script to process Aqua MODIS Sea Surface Temperature # Monthly means 9 km resolution # files downloaded from http://oceancolor.gsfc.nasa.gov/cgi/l3 # all .L3m_MO_SST_sst_9km.nc files must be in the working directory # the script will open each nc file, read lon, lat and sst data, # select data from a specific area and write them into # a single csv file named MODISA_sst.csv # Some reference pages # http://geog.uoregon.edu/GeogR/topics/netCDF-read-ncdf4.html # https://scottishsnow.wordpress.com/2014/08/24/many-rastered-beast/ # load libraries # ncdf4 needs libnetcdf-dev netcdf-bin in Linux # install.packages(c("ncdf4","reshape2")) library("ncdf4") library("reshape2") # set working directory setwd("/mnt/Dados02/MODIS/SST") # indicate the path to the files file.exists("MODISA_sst.csv") # caution new data will be appended to this file if it already exists # file.rename("MODISA_sst.csv","MODISA_sst.old") # file.remove("MODISA_sst.csv") # list and remove objects ls() rm(list = ls()) # set the study area lonmax<--40 lonmin<--50 latmax<--22 latmin<--29 # create a list of files and indicate its length (f <- list.files(".", pattern="*.L3m_MO_SST_sst_9km.nc",full.names=F)) (lf<-length(f)) # variable var<-"sst" for (i in 1:lf) { # progress indicator print(paste("Processing file",i,"from",length(f),sep=" ")) # open netCDF file data<-nc_open(f) # extract data lon<-ncvar_get(data,"lon") lat<-ncvar_get(data,"lat") value<-ncvar_get(data,var) unit<-ncatt_get(data,var,"units")$value # matrix to data.frame dimnames(value)<-list(lon=lon,lat=lat) dat.var<-melt(value,id="lon") # select data from the study area taking out missing data dat.varSAtmp<-subset(dat.var,lon<=lonmax & lon>=lonmin & lat<=latmax & lat>=latmin & value<45) # extract date information dateini<-ncatt_get(data,0,"time_coverage_start")$value dateend<-ncatt_get(data,0,"time_coverage_end")$value datemean<-mean(c(as.Date(dateend,"%Y-%m-%dT%H:%M:%OSZ"),as.Date(dateini,"%Y-%m-%dT%H:%M:%OSZ"))) year<-substring(datemean,0,4) month<-substring(datemean,6,7) # prepare final data set dat.varSA<-data.frame(rep(as.integer(year,nrow(dat.varSAtmp))),rep(as.integer(month,nrow(dat.varSAtmp))), dat.varSAtmp,rep(unit,nrow(dat.varSAtmp)),rep(var,nrow(dat.varSAtmp))) names(dat.varSA)<-c("year","month","lon","lat","value","unit","var") # save csv file fe<-file.exists("MODISA_sst.csv") write.table(dat.varSA,"MODISA_sst.csv",row.names=FALSE,col.names=!fe,sep=";",dec=",",append=fe) # close connection nc_close(data) # clean workspace rm(data,lon,lat,value,unit,dat.var,dat.varSAtmp,dateini,dateend,datemean,year,month,dat.varSA,fe) } rm(var,f,i,latmax,latmin,lf,lonmax,lonmin) *CHL_a=====* # Antonio Olinto Avila-da-Silva, Instituto de Pesca, Brasil # script to process Aqua MODIS Chlorophyll Concentration OCx Algorithm # Monthly means 9 km resolution # see http://oceancolor.gsfc.nasa.gov/WIKI/OCChlOCI.html # files downloaded from http://oceancolor.gsfc.nasa.gov/cgi/l3 # all .L3m_MO_CHL_chlor_a_9km.nc files must be in the working directory # the script will open each nc file, read lon, lat and chl data, # select data from a specific area and write them into # a single csv file named MODISA_chl.csv # Some reference pages # http://geog.uoregon.edu/GeogR/topics/netCDF-read-ncdf4.html # https://scottishsnow.wordpress.com/2014/08/24/many-rastered-beast/ # load libraries # ncdf4 needs libnetcdf-dev netcdf-bin in Linux # install.packages(c("ncdf4","reshape2")) library("ncdf4") library("reshape2") # set working directory setwd("/mnt/Dados02/MODIS/CHL") # indicate the path to the files file.exists("MODISA_chl.csv") # caution new data will be appended to this file if it already exists # file.rename("MODISA_chl.csv","MODISA_chl.old") # file.remove("MODISA_chl.csv") # list and remove objects ls() rm(list = ls()) # set the study area lonmax<--40 lonmin<--50 latmax<--22 latmin<--29 # create a list of files and indicate its length (f <- list.files(".", pattern="*.L3m_MO_CHL_chlor_a_9km.nc",full.names=F)) (lf<-length(f)) # variable var<-"chlor_a" for (i in 1:lf) { # progress indicator print(paste("Processing file",i,"from",length(f),sep=" ")) # open netCDF file data<-nc_open(f) # extract data lon<-ncvar_get(data,"lon") lat<-ncvar_get(data,"lat") value<-ncvar_get(data,var) unit<-ncatt_get(data,var,"units")$value # matrix to data.frame dimnames(value)<-list(lon=lon,lat=lat) dat.var<-melt(value,id="lon") # select data from the study area taking out missing data dat.varSAtmp<-na.omit(subset(dat.var,lon<=lonmax & lon>=lonmin & lat<=latmax & lat>=latmin)) # extract date information dateini<-ncatt_get(data,0,"time_coverage_start")$value dateend<-ncatt_get(data,0,"time_coverage_end")$value datemean<-mean(c(as.Date(dateend,"%Y-%m-%dT%H:%M:%OSZ"),as.Date(dateini,"%Y-%m-%dT%H:%M:%OSZ"))) year<-substring(datemean,0,4) month<-substring(datemean,6,7) # prepare final data set dat.varSA<-data.frame(rep(as.integer(year,nrow(dat.varSAtmp))),rep(as.integer(month,nrow(dat.varSAtmp))), dat.varSAtmp,rep(unit,nrow(dat.varSAtmp)),rep(var,nrow(dat.varSAtmp))) names(dat.varSA)<-c("year","month","lon","lat","value","unit","var") # save csv file fe<-file.exists("MODISA_chl.csv") write.table(dat.varSA,"MODISA_chl.csv",row.names=FALSE,col.names=!fe,sep=";",dec=",",append=fe) # close connection nc_close(data) # clean workspace rm(data,lon,lat,value,unit,dat.var,dat.varSAtmp,dateini,dateend,datemean,year,month,dat.varSA,fe) } rm(var,f,i,latmax,latmin,lf,lonmax,lonmin)
participantes (1)
-
Antonio Silva