Skip to content
Open
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
2 changes: 1 addition & 1 deletion DESCRIPTION
Original file line number Diff line number Diff line change
Expand Up @@ -71,5 +71,5 @@ Suggests:
mockery,
R.utils
License: GPL-2
RoxygenNote: 7.1.1
RoxygenNote: 7.3.1
Encoding: UTF-8
1 change: 1 addition & 0 deletions NAMESPACE
Original file line number Diff line number Diff line change
Expand Up @@ -67,6 +67,7 @@ export(install.rtools)
export(install.swftools)
export(install.texmaker)
export(installr)
export(iplookup)
export(is.RStudio)
export(is.Rgui)
export(is.empty)
Expand Down
184 changes: 108 additions & 76 deletions R/geo_functions.R
Original file line number Diff line number Diff line change
Expand Up @@ -30,50 +30,50 @@
#' @title Measures the speed of downloading from different CRAN mirrors
#' @export
#' @description Estimates the speed of each CRAN mirror by measuring the time it takes to download the NEWS file.
#'
#'
#' @author Barry Rowlingson <b.rowlingson@@lancaster.ac.uk>
#'
#'
#' @param ms - the output of getCRANmirrors. Defaults to using all of the mirrors.
#' @param ... not in use
#'
#'
#'
#'
#' @details
#' It works by downloading the latest NEWS file (288 Kbytes at the moment, so not huge)
#' from each of the mirror sites in the CRAN mirrors list.
#' It works by downloading the latest NEWS file (288 Kbytes at the moment, so not huge)
#' from each of the mirror sites in the CRAN mirrors list.
#' If you want to test it on a subset then call getCRANmirrors yourself and subset it somehow.
#'
#' It runs on the full CRAN list and while designing this package I've yet to find a
#'
#' It runs on the full CRAN list and while designing this package I've yet to find a
#' timeout or error so I'm not sure what will happen if download.file
#' fails. It returns a data frame like you get from getCRANmirrors but
#' with an extra 't' column giving the elapsed time to get the NEWS file.
#'
#'
#' CAVEATS: if your network has any local caching then these results
#' will be wrong, since your computer will probably be getting the
#' locally cached NEWS file and not the one on the server. Especially if
#' you run it twice. Oh, I should have put cacheOK=FALSE in the
#' download.file - but even that might get overruled somewhere. Also,
#' sites may have good days and bad days, good minutes and bad minutes,
#' your network may be congested on a short-term basis, etc etc.
#'
#'
#' There may also be a difference in reliability, which would not so easily be measured by an individual user.
#'
#'
#' Later that year, Barry also wrote Cranography. See: \url{https://www.maths.lancs.ac.uk/~rowlings/R/Cranography/}.
#'
#'
#' @return a data.frame with details on mirror sites and the time it took to download their NEWS file.
#'
#'
#'
#'
#' @seealso \link{freegeoip}, \link{myip}, \link{cranometer}
#'
#'
#' @examples
#' \dontrun{
#' # this can take some time
#' x <- cranometer()
#'
#' x <- cranometer()
#'
#' time_order <- order(x$t)
#'
#'
#' # a quick overview of the fastest mirrors
#' head(x[time_order,c(1:4, 9)], 20)
#'
#'
#' # a dotchart of the fastest mirrors
#' with(x[rev(time_order),],
#' dotchart(t, labels =Name,
Expand All @@ -85,7 +85,7 @@
#'require(plyr)
#'ss <- !(x$Name == "0-Cloud")
#'gvis_df <- ddply(x[ss,], .(CountryCode), function(xx) {
#' ss <- which.min(xx$t)
#' ss <- which.min(xx$t)
#' if(length(ss) == 0) ss <- 1
#' data.frame(time = xx$t[ss], name = xx$Name[ss] )
#'})
Expand All @@ -97,32 +97,32 @@
#' numvar="time",
#' hovervar = "name",
#' options=list(
#' colors='[0xA5EF63,
#' colors='[0xA5EF63,
#' 0xFFB581, 0xFF8747]')
#' )
#'# Display chart
#'plot(Geo)
#'plot(Geo)
#' }
cranometer <- function(ms = getCRANmirrors(all = FALSE, local.only = FALSE),...){
dest = tempfile()
nms = dim(ms)[1]
ms$t = rep(NA,nms)
for(i in 1:nms){
m = ms[i,]
url = paste(m$URL,"/src/base/NEWS",sep="")
t = try(system.time(download.file(url,dest),gcFirst=TRUE))
if(file.exists(dest)){
file.remove(dest)
ms$t[i]=t['elapsed']
}else{
ms$t[i]=NA
}
}
ms$t <- as.numeric(ms$t)
return(ms)
dest = tempfile()

nms = dim(ms)[1]
ms$t = rep(NA,nms)
for(i in 1:nms){
m = ms[i,]
url = paste(m$URL,"/src/base/NEWS",sep="")
t = try(system.time(download.file(url,dest),gcFirst=TRUE))
if(file.exists(dest)){
file.remove(dest)
ms$t[i]=t['elapsed']
}else{
ms$t[i]=NA
}
}

ms$t <- as.numeric(ms$t)

return(ms)
}

## ----
Expand All @@ -136,11 +136,11 @@ cranometer <- function(ms = getCRANmirrors(all = FALSE, local.only = FALSE),...)
# # if we have no columns, it probably means that GNsearch couldn't find that city name, in which case, we shorten the name of the city and search again.
# if(ncol(tmp_geo) == 0) {
# tmp_city <- paste(tail(strsplit(x[i,"City"], " ")[[1]], -1), collapse = " ")
# tmp_geo <- with(x[i,], GNsearch(name=tmp_city, country= CountryCode))[1,]
# tmp_geo <- with(x[i,], GNsearch(name=tmp_city, country= CountryCode))[1,]
# }
# if(ncol(tmp_geo) == 0) tmp_geo <- NA # if we still can't find anything, we should turn this to NA so that we would still add a row (though an empy one) to the data.frame
#
# geonames_df <- rbind(geonames_df,tmp_geo)
#
# geonames_df <- rbind(geonames_df,tmp_geo)
# }
# LatLong <- with(geonames_df, paste(lat, ":", lng, sep = ""))
# gvis_df <- data.frame(LatLong, time = x$t, name = x$Name)
Expand All @@ -153,26 +153,26 @@ cranometer <- function(ms = getCRANmirrors(all = FALSE, local.only = FALSE),...)

#' @title Geolocate IP addresses in R
#' @export
#' @description
#' @description
#' This R function uses the free freegeoip.net geocoding service to resolve an IP address (or a vector of them) into country, region, city, zip, latitude, longitude, area and metro codes.
#'
#'
#' The function require rjson.
#'
#'
#' @author Heuristic Andrew (see source for details)
#'
#'
#' @param ip a character vector of ips (default is the output from \link{myip})
#' @param format format of the output. Either "list" (default) or "data.frame"
#' @param format format of the output. Either "list" (default) or "data.frame"
#' @param ... not in use
#'
#'
#' @return a list or data.frame with details on your geo location based on the freegeoip.net service.
#'
#'
#' @source \url{https://heuristically.wordpress.com/2013/05/20/geolocate-ip-addresses-in-r/}.
#'
#'
#' @seealso \link{freegeoip}, \link{myip}, \link{cranometer}
#' @examples
#' \dontrun{
#' freegeoip()
#'
#'
#' ## http://www.students.ncl.ac.uk/keith.newman/r/maps-in-r
#' # install.packages("maps")
#' # install.packages("mapdata")
Expand All @@ -184,29 +184,29 @@ cranometer <- function(ms = getCRANmirrors(all = FALSE, local.only = FALSE),...)
#' my_lati <- myip_details$latitude
#' my_long <- myip_details$longitude
#' points(my_lati,my_long,col=2,pch=18, cex = 1)
#' # lines(c(my_lati,0) ,c(my_long, 50), col = 2)#'
#' # lines(c(my_lati,0) ,c(my_long, 50), col = 2)#'
#' }
freegeoip <- function(ip = myip(), format = ifelse(length(ip)==1,'list','dataframe'),...)
{
stop("The website of this service has moved to https://ipstack.com/. If you want to update this function to work with it, please send a pull request on https://github.com/talgalili/installr")
if (1 == length(ip))
{
# a single IP address
require2("rjson")
url <- paste(c("http://freegeoip.net/json/", ip), collapse='')
ret <- rjson::fromJSON(readLines(url, warn=FALSE))
if (format == 'dataframe')
ret <- data.frame(t(unlist(ret)))
return(ret)
} else {
ret <- data.frame()
for (i in 1:length(ip))
{
r <- freegeoip(ip[i], format="dataframe")
ret <- rbind(ret, r)
}
return(ret)
}
stop("The website of this service has moved to https://ipstack.com/. If you want to update this function to work with it, please send a pull request on https://github.com/talgalili/installr")
if (1 == length(ip))
{
# a single IP address
require2("rjson")
url <- paste(c("http://freegeoip.net/json/", ip), collapse='')
ret <- rjson::fromJSON(readLines(url, warn=FALSE))
if (format == 'dataframe')
ret <- data.frame(t(unlist(ret)))
return(ret)
} else {
ret <- data.frame()
for (i in 1:length(ip))
{
r <- freegeoip(ip[i], format="dataframe")
ret <- rbind(ret, r)
}
return(ret)
}
}


Expand All @@ -215,21 +215,53 @@ freegeoip <- function(ip = myip(), format = ifelse(length(ip)==1,'list','datafra

#' @title What is my IP
#' @export
#' @description
#' @description
#' Retrieving your public IP via \url{https://api.ipify.org}.
#' (old solution used: http://api.exip.org/
#' based on http://stackoverflow.com/questions/3097589/getting-my-public-ip-via-api)
#' @param ... not in use
#' @return your current ip (character string)
#' @source \url{https://api.ipify.org}
#' @seealso \link{freegeoip}, \link{myip}, \link{cranometer}
#'
#'
#' @examples
#' \dontrun{
#' myip() # "37.132.25.15"
#' }
myip <- function(...) {
readLines("https://api.ipify.org", warn = FALSE)
readLines("https://api.ipify.org", warn = FALSE)
}



#' @title Geolocate IP address using IP2Location.io in R
#' @export
#' @description
#' This R function uses the IP2Location.io geolocation API to look up for an IP address for a set of enriched information. This includes country, region, city, latitude and longtitude, zipcode and ASN. For more information about the API, kindly visit the documentation at \url{https://www.ip2location.io/ip2location-documentation}.
#'
#' The function require rjson.
#'
#' @author IP2Location
#' @param ip IPv4 or IPv6 address
#' @param key Your IP2Location.io API Key
#' @return API result from IP2Location.io API. Visit the Response Format section in \url{https://www.ip2location.io/ip2location-documentation} to learn more on the fields available in the result.
#' @source \url{https://api.ip2location.io}
#' @seealso \link{freegeoip}, \link{myip}, \link{cranometer}
#'
#' @examples
#' \dontrun{
#' iplookup("8.8.8.8", "YOUR_API_KEY")
#' }
iplookup <- function(ip, key) {
require2("rjson")
base_url <- "https://api.ip2location.io/?"
params <- list(
ip = ip,
key = key
)
url <- paste0(base_url, paste0(names(params), "=", URLencode(unlist(params)), collapse = "&"), sep = "")
ret <- rjson::fromJSON(readLines(url, warn=FALSE))
return(ret)
}


Expand Down
35 changes: 35 additions & 0 deletions man/iplookup.Rd

Some generated files are not rendered by default. Learn more about how customized files appear on GitHub.