From a83d81c4b12dbefc7ad34cb8f5d44450098277ff Mon Sep 17 00:00:00 2001 From: ip2location Date: Tue, 20 Feb 2024 08:50:51 +0800 Subject: [PATCH] Add IP2Location.io API --- DESCRIPTION | 2 +- NAMESPACE | 1 + R/geo_functions.R | 184 +++++++++++++++++++++++++++------------------- man/iplookup.Rd | 35 +++++++++ 4 files changed, 145 insertions(+), 77 deletions(-) create mode 100644 man/iplookup.Rd diff --git a/DESCRIPTION b/DESCRIPTION index 97ba9e1..add8b50 100644 --- a/DESCRIPTION +++ b/DESCRIPTION @@ -71,5 +71,5 @@ Suggests: mockery, R.utils License: GPL-2 -RoxygenNote: 7.1.1 +RoxygenNote: 7.3.1 Encoding: UTF-8 diff --git a/NAMESPACE b/NAMESPACE index 0f8ff11..a0adeed 100644 --- a/NAMESPACE +++ b/NAMESPACE @@ -67,6 +67,7 @@ export(install.rtools) export(install.swftools) export(install.texmaker) export(installr) +export(iplookup) export(is.RStudio) export(is.Rgui) export(is.empty) diff --git a/R/geo_functions.R b/R/geo_functions.R index b4018c5..ab27229 100644 --- a/R/geo_functions.R +++ b/R/geo_functions.R @@ -30,23 +30,23 @@ #' @title Measures the speed of downloading from different CRAN mirrors #' @export #' @description Estimates the speed of each CRAN mirror by measuring the time it takes to download the NEWS file. -#' +#' #' @author Barry Rowlingson -#' +#' #' @param ms - the output of getCRANmirrors. Defaults to using all of the mirrors. #' @param ... not in use -#' -#' +#' +#' #' @details -#' It works by downloading the latest NEWS file (288 Kbytes at the moment, so not huge) -#' from each of the mirror sites in the CRAN mirrors list. +#' It works by downloading the latest NEWS file (288 Kbytes at the moment, so not huge) +#' from each of the mirror sites in the CRAN mirrors list. #' If you want to test it on a subset then call getCRANmirrors yourself and subset it somehow. -#' -#' It runs on the full CRAN list and while designing this package I've yet to find a +#' +#' It runs on the full CRAN list and while designing this package I've yet to find a #' timeout or error so I'm not sure what will happen if download.file #' fails. It returns a data frame like you get from getCRANmirrors but #' with an extra 't' column giving the elapsed time to get the NEWS file. -#' +#' #' CAVEATS: if your network has any local caching then these results #' will be wrong, since your computer will probably be getting the #' locally cached NEWS file and not the one on the server. Especially if @@ -54,26 +54,26 @@ #' download.file - but even that might get overruled somewhere. Also, #' sites may have good days and bad days, good minutes and bad minutes, #' your network may be congested on a short-term basis, etc etc. -#' +#' #' There may also be a difference in reliability, which would not so easily be measured by an individual user. -#' +#' #' Later that year, Barry also wrote Cranography. See: \url{https://www.maths.lancs.ac.uk/~rowlings/R/Cranography/}. -#' +#' #' @return a data.frame with details on mirror sites and the time it took to download their NEWS file. -#' -#' +#' +#' #' @seealso \link{freegeoip}, \link{myip}, \link{cranometer} -#' +#' #' @examples #' \dontrun{ #' # this can take some time -#' x <- cranometer() -#' +#' x <- cranometer() +#' #' time_order <- order(x$t) -#' +#' #' # a quick overview of the fastest mirrors #' head(x[time_order,c(1:4, 9)], 20) -#' +#' #' # a dotchart of the fastest mirrors #' with(x[rev(time_order),], #' dotchart(t, labels =Name, @@ -85,7 +85,7 @@ #'require(plyr) #'ss <- !(x$Name == "0-Cloud") #'gvis_df <- ddply(x[ss,], .(CountryCode), function(xx) { -#' ss <- which.min(xx$t) +#' ss <- which.min(xx$t) #' if(length(ss) == 0) ss <- 1 #' data.frame(time = xx$t[ss], name = xx$Name[ss] ) #'}) @@ -97,32 +97,32 @@ #' numvar="time", #' hovervar = "name", #' options=list( -#' colors='[0xA5EF63, +#' colors='[0xA5EF63, #' 0xFFB581, 0xFF8747]') #' ) #'# Display chart -#'plot(Geo) +#'plot(Geo) #' } cranometer <- function(ms = getCRANmirrors(all = FALSE, local.only = FALSE),...){ - dest = tempfile() - - nms = dim(ms)[1] - ms$t = rep(NA,nms) - for(i in 1:nms){ - m = ms[i,] - url = paste(m$URL,"/src/base/NEWS",sep="") - t = try(system.time(download.file(url,dest),gcFirst=TRUE)) - if(file.exists(dest)){ - file.remove(dest) - ms$t[i]=t['elapsed'] - }else{ - ms$t[i]=NA - } - } - - ms$t <- as.numeric(ms$t) - - return(ms) + dest = tempfile() + + nms = dim(ms)[1] + ms$t = rep(NA,nms) + for(i in 1:nms){ + m = ms[i,] + url = paste(m$URL,"/src/base/NEWS",sep="") + t = try(system.time(download.file(url,dest),gcFirst=TRUE)) + if(file.exists(dest)){ + file.remove(dest) + ms$t[i]=t['elapsed'] + }else{ + ms$t[i]=NA + } + } + + ms$t <- as.numeric(ms$t) + + return(ms) } ## ---- @@ -136,11 +136,11 @@ cranometer <- function(ms = getCRANmirrors(all = FALSE, local.only = FALSE),...) # # if we have no columns, it probably means that GNsearch couldn't find that city name, in which case, we shorten the name of the city and search again. # if(ncol(tmp_geo) == 0) { # tmp_city <- paste(tail(strsplit(x[i,"City"], " ")[[1]], -1), collapse = " ") -# tmp_geo <- with(x[i,], GNsearch(name=tmp_city, country= CountryCode))[1,] +# tmp_geo <- with(x[i,], GNsearch(name=tmp_city, country= CountryCode))[1,] # } # if(ncol(tmp_geo) == 0) tmp_geo <- NA # if we still can't find anything, we should turn this to NA so that we would still add a row (though an empy one) to the data.frame -# -# geonames_df <- rbind(geonames_df,tmp_geo) +# +# geonames_df <- rbind(geonames_df,tmp_geo) # } # LatLong <- with(geonames_df, paste(lat, ":", lng, sep = "")) # gvis_df <- data.frame(LatLong, time = x$t, name = x$Name) @@ -153,26 +153,26 @@ cranometer <- function(ms = getCRANmirrors(all = FALSE, local.only = FALSE),...) #' @title Geolocate IP addresses in R #' @export -#' @description +#' @description #' This R function uses the free freegeoip.net geocoding service to resolve an IP address (or a vector of them) into country, region, city, zip, latitude, longitude, area and metro codes. -#' +#' #' The function require rjson. -#' +#' #' @author Heuristic Andrew (see source for details) -#' +#' #' @param ip a character vector of ips (default is the output from \link{myip}) -#' @param format format of the output. Either "list" (default) or "data.frame" +#' @param format format of the output. Either "list" (default) or "data.frame" #' @param ... not in use -#' +#' #' @return a list or data.frame with details on your geo location based on the freegeoip.net service. -#' +#' #' @source \url{https://heuristically.wordpress.com/2013/05/20/geolocate-ip-addresses-in-r/}. -#' +#' #' @seealso \link{freegeoip}, \link{myip}, \link{cranometer} #' @examples #' \dontrun{ #' freegeoip() -#' +#' #' ## http://www.students.ncl.ac.uk/keith.newman/r/maps-in-r #' # install.packages("maps") #' # install.packages("mapdata") @@ -184,29 +184,29 @@ cranometer <- function(ms = getCRANmirrors(all = FALSE, local.only = FALSE),...) #' my_lati <- myip_details$latitude #' my_long <- myip_details$longitude #' points(my_lati,my_long,col=2,pch=18, cex = 1) -#' # lines(c(my_lati,0) ,c(my_long, 50), col = 2)#' +#' # lines(c(my_lati,0) ,c(my_long, 50), col = 2)#' #' } freegeoip <- function(ip = myip(), format = ifelse(length(ip)==1,'list','dataframe'),...) { - stop("The website of this service has moved to https://ipstack.com/. If you want to update this function to work with it, please send a pull request on https://github.com/talgalili/installr") - if (1 == length(ip)) - { - # a single IP address - require2("rjson") - url <- paste(c("http://freegeoip.net/json/", ip), collapse='') - ret <- rjson::fromJSON(readLines(url, warn=FALSE)) - if (format == 'dataframe') - ret <- data.frame(t(unlist(ret))) - return(ret) - } else { - ret <- data.frame() - for (i in 1:length(ip)) - { - r <- freegeoip(ip[i], format="dataframe") - ret <- rbind(ret, r) - } - return(ret) - } + stop("The website of this service has moved to https://ipstack.com/. If you want to update this function to work with it, please send a pull request on https://github.com/talgalili/installr") + if (1 == length(ip)) + { + # a single IP address + require2("rjson") + url <- paste(c("http://freegeoip.net/json/", ip), collapse='') + ret <- rjson::fromJSON(readLines(url, warn=FALSE)) + if (format == 'dataframe') + ret <- data.frame(t(unlist(ret))) + return(ret) + } else { + ret <- data.frame() + for (i in 1:length(ip)) + { + r <- freegeoip(ip[i], format="dataframe") + ret <- rbind(ret, r) + } + return(ret) + } } @@ -215,7 +215,7 @@ freegeoip <- function(ip = myip(), format = ifelse(length(ip)==1,'list','datafra #' @title What is my IP #' @export -#' @description +#' @description #' Retrieving your public IP via \url{https://api.ipify.org}. #' (old solution used: http://api.exip.org/ #' based on http://stackoverflow.com/questions/3097589/getting-my-public-ip-via-api) @@ -223,13 +223,45 @@ freegeoip <- function(ip = myip(), format = ifelse(length(ip)==1,'list','datafra #' @return your current ip (character string) #' @source \url{https://api.ipify.org} #' @seealso \link{freegeoip}, \link{myip}, \link{cranometer} -#' +#' #' @examples #' \dontrun{ #' myip() # "37.132.25.15" #' } myip <- function(...) { - readLines("https://api.ipify.org", warn = FALSE) + readLines("https://api.ipify.org", warn = FALSE) +} + + + +#' @title Geolocate IP address using IP2Location.io in R +#' @export +#' @description +#' This R function uses the IP2Location.io geolocation API to look up for an IP address for a set of enriched information. This includes country, region, city, latitude and longtitude, zipcode and ASN. For more information about the API, kindly visit the documentation at \url{https://www.ip2location.io/ip2location-documentation}. +#' +#' The function require rjson. +#' +#' @author IP2Location +#' @param ip IPv4 or IPv6 address +#' @param key Your IP2Location.io API Key +#' @return API result from IP2Location.io API. Visit the Response Format section in \url{https://www.ip2location.io/ip2location-documentation} to learn more on the fields available in the result. +#' @source \url{https://api.ip2location.io} +#' @seealso \link{freegeoip}, \link{myip}, \link{cranometer} +#' +#' @examples +#' \dontrun{ +#' iplookup("8.8.8.8", "YOUR_API_KEY") +#' } +iplookup <- function(ip, key) { + require2("rjson") + base_url <- "https://api.ip2location.io/?" + params <- list( + ip = ip, + key = key + ) + url <- paste0(base_url, paste0(names(params), "=", URLencode(unlist(params)), collapse = "&"), sep = "") + ret <- rjson::fromJSON(readLines(url, warn=FALSE)) + return(ret) } diff --git a/man/iplookup.Rd b/man/iplookup.Rd new file mode 100644 index 0000000..3ab3c24 --- /dev/null +++ b/man/iplookup.Rd @@ -0,0 +1,35 @@ +% Generated by roxygen2: do not edit by hand +% Please edit documentation in R/geo_functions.R +\name{iplookup} +\alias{iplookup} +\title{Geolocate IP address using IP2Location.io in R} +\source{ +\url{https://api.ip2location.io} +} +\usage{ +iplookup(ip, key) +} +\arguments{ +\item{ip}{IPv4 or IPv6 address} + +\item{key}{Your IP2Location.io API Key} +} +\value{ +API result from IP2Location.io API. Visit the Response Format section in \url{https://www.ip2location.io/ip2location-documentation} to learn more on the fields available in the result. +} +\description{ +This R function uses the IP2Location.io geolocation API to look up for an IP address for a set of enriched information. This includes country, region, city, latitude and longtitude, zipcode and ASN. For more information about the API, kindly visit the documentation at \url{https://www.ip2location.io/ip2location-documentation}. + +The function require rjson. +} +\examples{ +\dontrun{ +iplookup("8.8.8.8", "YOUR_API_KEY") +} +} +\seealso{ +\link{freegeoip}, \link{myip}, \link{cranometer} +} +\author{ +IP2Location +}