In [1]:
options(repos = "https://cloud.r-project.org") # Definimos repositorio
if (!require("pacman")) install.packages("pacman") # Si no tenemos, pacman, lo instalamos
In [2]:
pacman::p_load(rgeos,
maptools,
rgdal,
ggplot2,
ggthemes,
ggalt,
mapproj)
También es necesario descargar tres 'shapefiles' de:
In [3]:
# El globo terraqueo
world <- readOGR("countries.geo.json",
"OGRGeoJSON",
stringsAsFactors = FALSE)
world <- gBuffer(world, byid = TRUE, width = 0)
world <- nowrapRecenter(world)
In [4]:
# Las placas
plates <- readOGR("plates.json", "OGRGeoJSON",
stringsAsFactors = FALSE)
plates <- nowrapRecenter(plates)
In [5]:
# Se define funcion() para manipular las coordenadas
# y recentrar el mapa en la placa del Pacifico
recenter_points <- function (obj) {
crds <- coordinates(obj)
inout <- (crds[, 1] < 0)
if (all(inout)) { crds[, 1] <- crds[, 1] + 360 }
else {
if (any(inout)) { crds[, 1] <-
ifelse(inout, crds[, 1] + 360, crds[, 1]) } }
SpatialPointsDataFrame(crds, obj@data)
}
In [6]:
# Se agregan las coordenadas de los sismos
quakes <- readOGR("quakes.json", "OGRGeoJSON",
stringsAsFactors = FALSE)
quakes <- recenter_points(quakes)
In [7]:
# Se genera el mapa
world_map <- fortify(world)
plates_map <- fortify(plates)
quakes_dat <- data.frame(quakes)
quakes_dat$trans <- quakes_dat$mag %% 5
In [8]:
gg <- ggplot() # Inicializamos el mapa como objeto, gg
gg <- gg + geom_cartogram(data = world_map,
map = world_map,
aes(x = long,
y = lat, map_id = id),
color = "white",
size = 0.15,
fill = "#d8d8d6")
gg # Mostramos el resultado
In [9]:
gg <- gg + geom_cartogram(data = plates_map, map = plates_map,
aes(x = long, y = lat, map_id = id),
color = "black", size = 0.1,
fill = "#00000000", alpha = 0)
gg # Mostramos el resultado
In [10]:
gg <- gg + geom_point(data = quakes_dat,
aes(x = coords.x1, y = coords.x2, size = trans),
shape = 1,
alpha = 1/3,
color = "#d47e5d",
fill = "#00000000")
gg # Mostramos el resultado
In [11]:
gg <- gg + geom_point(data = subset(quakes_dat, mag > 7.5),
aes(x = coords.x1, y = coords.x2, size = trans),
shape = 1, alpha = 1, color = "black",
fill = "#00000000")
gg # Mostramos el resultado
In [12]:
gg <- gg + geom_text(data = subset(quakes_dat, mag > 7.5),
aes(x = coords.x1, y = coords.x2,
label = sprintf("Mag %2.1f", mag)),
color = "black",
size = 3,
vjust = c(3.9, 3.9, 5),
fontface = "bold")
gg # Mostramos el resultado
In [13]:
gg <- gg + scale_size(name = "Magnitud",
trans = "exp",
labels = c(5:8),
range = c(1, 20))
gg # Mostramos el resultado
In [14]:
gg <- gg + coord_quickmap()
gg # Mostramos el resultado
In [15]:
gg <- gg + theme_map()
gg <- gg + theme(legend.position = c(0.05, 0.99))
gg <- gg + theme(legend.direction = "horizontal")
gg <- gg + theme(legend.key = element_rect(color="#00000000"))
gg # Mostramos el resultado
https://www.r-bloggers.com/cluster-analysis-on-earthquake-data-from-usgs/
In [16]:
pacman::p_load(sp,
raster,
plotrix,
rgeos,
rgdal)
# scatterplot3d)
In [17]:
URL <- "http://earthquake.usgs.gov/earthquakes/feed/v1.0/summary/all_month.csv"
Earthquake_30Days <- read.table(URL, sep = ",", header = T)
In [18]:
#Download, unzip and load the polygon shapefile with the countries' borders
download.file("http://thematicmapping.org/downloads/TM_WORLD_BORDERS_SIMPL-0.3.zip",destfile="TM_WORLD_BORDERS_SIMPL-0.3.zip")
unzip("TM_WORLD_BORDERS_SIMPL-0.3.zip",exdir=getwd())
polygons <- shapefile("TM_WORLD_BORDERS_SIMPL-0.3.shp")
In [19]:
dir.create(paste(getwd(),"/GeologicalData",sep=""))
In [20]:
#Faults
download.file("http://legacy.jefferson.kctcs.edu/techcenter/gis%20data/World/Zip/FAULTS.zip",destfile="GeologicalData/FAULTS.zip")
unzip("GeologicalData/FAULTS.zip",exdir="GeologicalData")
# faults <- shapefile("GeologicalData/FAULTS.SHP") # Does not work!
faults <- readOGR("GeologicalData/FAULTS.SHP")
In [21]:
#Plates
download.file("http://legacy.jefferson.kctcs.edu/techcenter/gis%20data/World/Zip/PLAT_LIN.zip",destfile="GeologicalData/plates.zip")
unzip("GeologicalData/plates.zip",exdir="GeologicalData")
# plates <- shapefile("GeologicalData/PLAT_LIN.SHP")
plates <- readOGR("GeologicalData/PLAT_LIN.SHP")
In [22]:
#Volcano
download.file("http://legacy.jefferson.kctcs.edu/techcenter/gis%20data/World/Zip/VOLCANO.zip",destfile="GeologicalData/VOLCANO.zip")
unzip("GeologicalData/VOLCANO.zip",exdir="GeologicalData")
# volcano <- shapefile("GeologicalData/VOLCANO.SHP")
volcano <- readOGR("GeologicalData/VOLCANO.SHP")
In [23]:
Earthquakes <- Earthquake_30Days[paste(Earthquake_30Days$type)=="earthquake",]
coordinates(Earthquakes)=~longitude+latitude
In [24]:
# jpeg("Earthquake_Origin.jpg",4000,2000,res=300)
plot(plates, col = "red")
plot(polygons, add = T)
# title("Earthquakes in the last 30 days",cex.main=3)
lines(faults,col = "dark grey")
points(Earthquakes,
col = "blue",
cex = 0.5,
pch = "+")
points(volcano,
pch = "*",
cex = 0.7,
col = "dark red")
legend.pos <- list(x = 20.97727, y = -57.86364)
#legend(legend.pos,legend=c("Plates","Faults","Volcanoes","Earthquakes"),pch=c("-","-","*","+"),col=c("red","dark grey","dark red","blue"),bty="n",bg=c("white"),y.intersp=0.75,title="Days from Today",cex=0.8)
legend(legend.pos,
legend = c("Placas", "Fallas", "Volcanes", "Sismos"),
pch = c("-", "-", "*", "+"),
col = c("red", "dark grey", "dark red", "blue"),
bty = "n",
bg = c("white"),
y.intersp = 0.75,
title = "Últimos 30 días",
cex = 0.8)
# text(legend.pos$x, legend.pos$y+2, "Legend:")
# dev.off()
https://r-video-tutorial.blogspot.mx/2015/05/introductory-time-series-analysis-of-us.html
In [25]:
pacman::p_load(sp,
raster,
xts)
In [26]:
download.EPA <- function(year,
property = c("ozone", "so2", "co", "no2",
"pm25.frm", "pm25", "pm10",
"wind", "temp", "pressure",
"dewpoint", "hap", "voc", "lead"),
type = c("hourly", "daily", "annual")) {
if(property=="ozone"){PROP="44201"}
if(property=="so2"){PROP="42401"}
if(property=="co"){PROP="42101"}
if(property=="no2"){PROP="42602"}
if(property=="pm25.frm"){PROP="88101"}
if(property=="pm25"){PROP="88502"}
if(property=="pm10"){PROP="81102"}
if(property=="wind"){PROP="WIND"}
if(property=="temp"){PROP="TEMP"}
if(property=="pressure"){PROP="PRESS"}
if(property=="dewpoint"){PROP="RH_DP"}
if(property=="hap"){PROP="HAPS"}
if(property=="voc"){PROP="VOCS"}
if(property=="lead"){PROP="lead"}
URL <- paste0("http://aqsdr1.epa.gov/aqsweb/aqstmp/airdata/",
type,"_",PROP,"_",year,".zip")
download.file(URL, destfile = paste0(type,"_",
PROP,"_", year, ".zip"))
unzip(paste0(type,"_", PROP, "_", year, ".zip"),
exdir=paste0(getwd()))
read.table(paste0(type,"_", PROP, "_", year, ".csv"),
sep=",", header=T)
}
In [27]:
data <- download.EPA(year = 2016,
property = "ozone",
type = "daily")
In [28]:
str(data)
In [29]:
locations <- data.frame(ID = numeric(),
LON = numeric(),
LAT = numeric(),
OZONE = numeric(),
AQI = numeric())
In [30]:
for(i in unique(data$Address)){
dat <- data[data$Address == i, ]
locations[which(i == unique(data$Address)), ] <-
data.frame(which(i == unique(data$Address)),
unique(dat$Longitude),
unique(dat$Latitude),
round(mean(dat$Arithmetic.Mean, na.rm = T), 2),
round(mean(dat$AQI, na.rm = T), 0))
}
In [31]:
locations$ADDRESS <- unique(data$Address)
In [32]:
coordinates(locations) = ~LON + LAT
In [33]:
projection(locations) = CRS("+init=epsg:4326")
In [34]:
ID = 135
Ozone <- data[paste(data$Address) ==
unique(data$Address)[ID] & paste(data$Event.Type) =="None",]
ADDRESS = "966 W 32ND"
Ozone <- data[paste(data$Address) ==
ADDRESS & paste(data$Event.Type) == "None", ]
In [35]:
str(Ozone$Date.Local)
In [36]:
Ozone$DATE <- as.Date(Ozone$Date.Local)
In [37]:
Ozone.TS <- xts(x = Ozone$Arithmetic.Mean,
order.by = Ozone$DATE)
plot(Ozone.TS,
main = "Ozone Data",
sub = "Year 2016")
In [38]:
index(Ozone.TS)
In [39]:
Ozone.TS['2013-05-06'] #Selection of a single day
Ozone.TS['2013-03'] #Selection of March data
Ozone.TS['2013-05/2013-07'] #Selection by time range
In [40]:
index(Ozone.TS[coredata(Ozone.TS)>0.03,])
In [41]:
apply.weekly(Ozone.TS,FUN=mean)
apply.monthly(Ozone.TS,FUN=max)
In [42]:
apply.monthly(Ozone.TS,FUN=function(x) {sd(x)/sqrt(length(x))})
In [43]:
plot(Ozone.TS,
main = "Ozone Data",
sub = "Year 2016")
lines(apply.weekly(Ozone.TS, FUN = mean),
col = "red")
lines(apply.monthly(Ozone.TS, FUN = mean),
col = "blue")
lines(apply.quarterly(Ozone.TS, FUN = mean),
col = "green")
lines(apply.yearly(Ozone.TS, FUN = mean),
col = "pink")
In [44]:
NO2.2016.DATA <- download.EPA(year=2016,property="no2",type="daily")
NO2.2015.DATA <- download.EPA(year=2015,property="no2",type="daily")
NO2.2014.DATA <- download.EPA(year=2014,property="no2",type="daily")
In [45]:
ADDRESS = "2 miles south of Ouray and south of the White and Green River confluence" #Copied and pasted from the interactive map
NO2.2016 <- NO2.2016.DATA[paste(NO2.2016.DATA$Address)==ADDRESS&paste(NO2.2016.DATA$Event.Type)=="None",]
NO2.2015 <- NO2.2015.DATA[paste(NO2.2015.DATA$Address)==ADDRESS&paste(NO2.2015.DATA$Event.Type)=="None",]
NO2.2014 <- NO2.2014.DATA[paste(NO2.2014.DATA$Address)==ADDRESS&paste(NO2.2014.DATA$Event.Type)=="None",]
NO2.TS <- ts(c(NO2.2014$Arithmetic.Mean,
NO2.2015$Arithmetic.Mean,
NO2.2016$Arithmetic.Mean),
frequency = 365,
start = c(2014, 1))
In [46]:
dec <- decompose(NO2.TS)
plot(dec)
In [ ]:
STL <- stl(NO2.TS,"periodic")
plot(STL)
In [47]:
pacman::p_load(ropenaq,
import,
countrycode,
knitr,
purrr,
viridis,
ggplot2,
dplyr)
In [48]:
import::from(dplyr, filter)
countries <- aq_countries()
filter(countries, name == "India")
In [49]:
in_cities <- aq_cities(country = "IN")
filter(in_cities, city == "Hyderabad")
In [50]:
aq_locations(city = "Hyderabad") %>%
knitr::kable()
In [51]:
aq_locations(city = "Hyderabad", parameter = "pm25")
In [52]:
# find how many measurements there are
first_test <- aq_measurements(city = "Hyderabad",
date_from = "2016-01-01",
date_to = "2016-12-31",
parameter = "pm25")
count <- attr(first_test, "meta")$found
print(count)
In [ ]:
In [53]:
import::from(dplyr, filter)
countries <- aq_countries()
filter(countries, name == "Mexico")
In [54]:
import::from(dplyr, filter)
countries <- aq_countries()
filter(countries, name == "Mexico")
In [55]:
in_cities <- aq_cities(country = "MX")
In [56]:
in_cities
In [57]:
filter(in_cities, city == "DISTRITO FEDERAL")
In [58]:
aq_locations(city = "DISTRITO FEDERAL") %>%
knitr::kable()
In [ ]:
?cities
In [ ]: