Skip to content
Open
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
17 changes: 0 additions & 17 deletions .gitignore
Original file line number Diff line number Diff line change
@@ -1,20 +1,3 @@
<<<<<<< HEAD
<<<<<<< HEAD
=======
.DS_Store
>>>>>>> a6f59f203de28cf40b2ba12cb8e33b68b6052274
.Rproj.user
.Rhistory
.RData
=======
.Rhistory
.RData
*.swp
*.csv
*.png
*.rst
*.rdb
*.rdx
*.o
*.dll
>>>>>>> 600f742dfa647ab4bb65c5f3b29bd6222ae6a4ae
1 change: 0 additions & 1 deletion README.md
Original file line number Diff line number Diff line change
Expand Up @@ -2,4 +2,3 @@ teamcode
========

The github repository for TEAM. This will be the high level repository to share code amongst the TEAM team. These should be functions or scripts that you may feel others will need to use at some point. Make sure these are adequately commented. Functions for comments should describe the arguments (in and return) as well as the key objectives.
>>>>>>> a6f59f203de28cf40b2ba12cb8e33b68b6052274
85 changes: 85 additions & 0 deletions cameratrapping/TV_editor_events.r
Original file line number Diff line number Diff line change
@@ -0,0 +1,85 @@
# TV_editor_events.r
#

rm(list = ls())
require(lubridate)
require(ggplot2)
library(RPostgreSQL) # Need to explore RJDBC to connect with vertica

# SQL to update tv_photo table with events_all column. This colun will store
# all the events. event_id is what is used by the Taxonomic Editor app and can
# be assigned to experts.

# Load R Object created directly from DB or replace by query to database.
load("ct_data2014-04-02.gzip")
# Small Dataset
#animals = cam_trap_data[which(cam_trap_data$Photo.Type == 'Animal' & cam_trap_data$Site.Name == 'Volcán Barva'),]


# Order data by: Smapling Period, Sampling unit name and Photo Taken Time
## Temp code to to generate data for one site
order_data <- f.order.data(animals) # Small dataset for testing
#order_data <- f.order.data(cam_trap_data)
# Seperate into events
#CAUTION: this function removes records that are NOT images (e.g. Sampling Date records)
data1<-f.separate.events(order_data,5) #the entire grp column is what makes it unique
# Save data1 so don't need to rerun separate events.
#save(data1,file="data1.gzip",compress="gzip")
# Create small subset for viewing
view_data <- data.frame(data1$Site.Name,data1$Sampling.Period,data1$Sampling.Unit.Name,data1$Photo.Taken.Time,data1$grp)

drv <- dbDriver("PostgreSQL")
con <- dbConnect(drv,user="teamuser",password="",
dbname="team_2.0_devel",port="5444",
host="data.team.sdsc.edu")

# Will need to explore whether we want to include already reviewed images/events.

for (i in 1:nrow(data1)) {
update_sql <- paste("UPDATE tv_photo SET events_all=",shQuote(data1$grp[i])," where id=",data1$ID[i])
a <-dbSendQuery(con,update_sql)
i
}

# Start from loading ct_groups or run from the beginning
#load("ct_groups")
# Create a new column with Family, Genus, Species
data1$sp_all <- paste(data1$Family,data1$Genus,data1$Species)
# Crate a new column to store the random sample of events that will be used in
# taxonomic editor
data1[,'event'] <- NA
sites <- unique(data1$Site.Name)
# Create a dataframe to store the tv_photo.ID and the event_id's to be used in the taxonomic app.
final_event_df<- data.frame(final_event=character(0))
for (i in 1:length(unique(data1$Site.Name))) {
site_index <- which(data1$Site.Name == sites[i])
length(site_index)
temp_sp_unique <- unique(paste(data1$Family[site_index],data1$Genus[site_index],data1$Species[site_index]))
for (j in 1: length(temp_sp_unique)){
#Data by TEAM Site by species
site_data <- data1[site_index,]
sp_by_site <- site_data[which(temp_sp_unique[j] == site_data$sp_all),]
# Get events, determine if there are 50 or more and insert into database
num_events = length(unique(sp_by_site$grp))
# Get a list of unique events to select from if more than 50 or assign
# to the editor if less than 50
unique_events = unique(sp_by_site$grp)
if (num_events > 50) {
# Get a random sample w/o replacement
event_index <- sample(1:num_events,50 , replace=F)
final_event <- unique_events[event_index]
} else {
final_event <- unique(sp_by_site$grp)
}
final_event_df <-rbind(final_event_df,data.frame(final_event=final_event))
}
}
# SQL to insert the final events into the tv_photo table. This will add the event
# to the event_id as selected in the final_event_df dataframe.
for (i in 1:nrow(final_event_df)) {
update_sql <- paste("UPDATE tv_photo SET event_id=",
shQuote(final_event_df$final_event[i])," where events_all=",
shQuote(final_event_df$final_event[i]))
a <-dbSendQuery(con,update_sql)
}
Sys.time()
Binary file added cameratrapping/ct_groups
Binary file not shown.
130 changes: 130 additions & 0 deletions climate/climate_quality_control.R
Original file line number Diff line number Diff line change
@@ -0,0 +1,130 @@
rm(list = ls())
load("cl_data_new2014-04-24.gzip")


# Add columns to store Pass/Fails from Tests
cl_data_all$StepRH1 <-NA
cl_data_all$StepRH2 <-NA
cl_data_all$StepRH3 <-NA
cl_data_all$StepSR1 <-NA
cl_data_all$StepSR2 <-NA
cl_data_all$StepT1 <-NA
cl_data_all$StepT2 <-NA
cl_data_all$StepT3 <-NA
cl_data_all$StepT4 <-NA
cl_data_all$StepT5 <-NA
cl_data_all$Sensor1_RelativeHumidity <-as.numeric(cl_data_all$Sensor1_RelativeHumidity)
cl_data_all$Sensor2_RelativeHumidity <-as.numeric(cl_data_all$Sensor2_RelativeHumidity)
cl_data_all$Sensor3_RelativeHumidity <-as.numeric(cl_data_all$Sensor3_RelativeHumidity)
cl_data_all$Sensor1_AvgTemp <-as.numeric(cl_data_all$Sensor1_AvgTemp)
cl_data_all$Sensor2_AvgTemp <-as.numeric(cl_data_all$Sensor2_AvgTemp)
cl_data_all$Sensor3_AvgTemp <-as.numeric(cl_data_all$Sensor3_AvgTemp)
cl_data_all$Sensor1_TotalSolarRadiation <-as.numeric(cl_data_all$Sensor1_TotalSolarRadiation)
cl_data_all$Sensor2_TotalSolarRadiation <-as.numeric(cl_data_all$Sensor2_TotalSolarRadiation)






# Get a unique list of sites and loop through them
unique_sites <- unique(cl_data_all$TEAMSiteName)
for (i in 1:1) { #length(unqiue(cl_data_all$TEAMSiteName)) {
site_data = cl_data_all[which(cl_data_all$TEAMSiteName == unique_sites[i]),]
unique_stations <- unique(paste(site_data$SamplingUnitName))
# get a unique list of stations..handle sites that have more than one station
for (j in 1:1) { #lengh(unique_stations)) {
# if more than 1 do something
station_data = site_data
# Perform tests on a particular climate station data.
for (k in 1:nrow(station_data)) {
# Find records 30 minutes,1 hour, 2hour, 3hour, 6hour and 12 hours ahead.
new_date = station_data$Observation[k] + 60*30
new_date_hour = station_data$Observation[k] + 60*60
new_date_2hour = station_data$Observation[k] + 2*60*60
new_date_3hour = station_data$Observation[k] + 3*60*60
new_date_6hour = station_data$Observation[k] + 6*60*60
new_date_12hour = station_data$Observation[k] + 12*60*60
# Create thirty min index and check to make sure it exists..
thirty_min_index <-which(station_data$Observation == new_date)
if (length(thirty_min_index) == 0) {
thirty_min_index <- 'a'
}
sixty_min_index <- which(station_data$Observation == new_date_hour)
if (length(sixty_min_index) == 0) {
sixty_min_index <- 'a'
}
two_hour_index <- which(station_data$Observation == new_date_2hour)
if (length(two_hour_index) == 0) {
two_hour_index <- 'a'
}
three_hour_index <- which(station_data$Observation == new_date_3hour)
if (length(three_hour_index) == 0) {
three_hour_index <- 'a'
}
six_hour_index <- which(station_data$Observation == new_date_6hour)
if (length(six_hour_index) == 0) {
six_hour_index <- 'a'
}
twelve_hour_index <- which(station_data$Observation == new_date_12hour)
if (length(twelve_hour_index) == 0) {
twelve_hour_index <- 'a'
}


# Sensor 1
# RH - a value of 1 is equal to fail.
if(is.numeric(station_data$Sensor1_RelativeHumidity[k]) & is.numeric(station_data$Sensor1_RelativeHumidity[thirty_min_index])) {
station_data$StepRH1[k] <- ifelse (station_data$Sensor1_RelativeHumidity[thirty_min_index] -
station_data$Sensor1_RelativeHumidity[k] > 45,1,0)
}
# Solar radiation
# What are our units?
if(is.numeric(station_data$Sensor1_TotalSolarRadiation[k]) & is.numeric(station_data$Sensor1_TotalSolarRadiation[sixty_min_index])) {
station_data$StepSR1[k] <- ifelse (station_data$Sensor1_TotalSolarRadiation[sixty_min_index] -
station_data$Sensor1_TotalSolarRadiation[k] >= 0 & station_data$Sensor1_TotalSolarRadiation[sixty_min_index] -
station_data$Sensor1_TotalSolarRadiation[k] < 555,1,0)
}
# Temperature
# 1 hour
if(is.numeric(station_data$Sensor1_AvgTemp[k]) & is.numeric(station_data$Sensor1_AvgTemp[sixty_min_index])) {
station_data$StepT1[k] <- ifelse (station_data$Sensor1_AvgTemp[sixty_min_index] -
station_data$Sensor1_AvgTemp[k] >= 4,1,0)
}
# 2hour
if(is.numeric(station_data$Sensor1_AvgTemp[k]) & is.numeric(station_data$Sensor1_AvgTemp[two_hour_index])) {
station_data$StepT2[k] <- ifelse (station_data$Sensor1_AvgTemp[two_hour_index] -
station_data$Sensor1_AvgTemp[k] >= 7,1,0)
}
# 3hour
if(is.numeric(station_data$Sensor1_AvgTemp[k]) & is.numeric(station_data$Sensor1_AvgTemp[three_hour_index])) {
station_data$StepT3[k] <- ifelse (station_data$Sensor1_AvgTemp[three_hour_index] -
station_data$Sensor1_AvgTemp[k] >= 9,1,0)
}
#6 hour
if(is.numeric(station_data$Sensor1_AvgTemp[k]) & is.numeric(station_data$Sensor1_AvgTemp[six_hour_index])) {
station_data$StepT4[k] <- ifelse (station_data$Sensor1_AvgTemp[six_hour_index] -
station_data$Sensor1_AvgTemp[k] >= 15,1,0)
}
#12 hour
if(is.numeric(station_data$Sensor1_AvgTemp[k]) & is.numeric(station_data$Sensor1_AvgTemp[twelve_hour_index])) {
station_data$StepT5[k] <- ifelse (station_data$Sensor1_AvgTemp[twelve_hour_index] -
station_data$Sensor1_AvgTemp[k] >= 25,1,0)
}
###############
# Sensor 2
# RH
if(is.numeric(station_data$Sensor2_RelativeHumidity[k]) & is.numeric(station_data$Sensor2_RelativeHumidity[thirty_min_index])) {
station_data$StepRH2[k] <- ifelse (station_data$Sensor2_RelativeHumidity[thirty_min_index] -
station_data$Sensor2_RelativeHumidity[k] > 45,1,0)
}
# Sensor 3
# RH
if(is.numeric(station_data$Sensor3_RelativeHumidity[k]) & is.numeric(station_data$Sensor3_RelativeHumidity[thirty_min_index])) {
station_data$StepRH3[k] <- ifelse (station_data$Sensor3_RelativeHumidity[thirty_min_index] -
station_data$Sensor3_RelativeHumidity[k] > 45,1,0)
}
}

}
}
68 changes: 68 additions & 0 deletions climate/new_climate_dataset.R
Original file line number Diff line number Diff line change
@@ -0,0 +1,68 @@
rm(list = ls())
library(RPostgreSQL) # Need to explore RJDBC to connect with vertica
library(data.table)
drv <- dbDriver("PostgreSQL")
con <- dbConnect(drv,user="teamuser",password="",
dbname="team_2.0_production",port="5444",
host="data.team.sdsc.edu")

cl_3_data <- dbSendQuery(con,"SELECT * FROM dqa_climate3")
data_3 <- fetch(cl_3_data, n = -1)
#data_3["protocol"] <- 3 # Mark records as climate protocol 3

#** Get Clamate 2.0 data from climate_samples table
cl_2_data <- dbSendQuery(con,"SELECT * FROM dqa_climate2_analysis")
data_2 <- fetch(cl_2_data, n = -1)
# Get data_2 to match the structure of data_3
# Create new columns
data_2$collected_at <- paste(data_2$collected_at,data_2$collected_time)
data_2$Observation <-data_2$collected_at
setnames(data_2,"Id","ReferenceID")
setnames(data_2,"TotalSolarRadiation","Sensor1_TotalSolarRadiation")
setnames(data_2,"Precipitation","Rainfall")
setnames(data_2,"DryTemperature","Sensor1_AvgTemp")
setnames(data_2,"RelativeHumidity","Sensor1_RelativeHumidity")
setnames(data_2,"Site Name","TEAMSiteName")
data_2["RecordID"]<- NA
data_2["MinimumBatteryVoltage"]<- NA
data_2["Sensor1_TempStdDeviation"]<- NA
data_2["Sensor2_AvgTemp"]<- NA
data_2["Sensor2_TempStdDeviation"]<- NA
data_2["Sensor2_RelativeHumidity"]<- NA
data_2["Sensor3_AvgTemp"]<- NA
data_2["Sensor3_TempStdDeviation"]<- NA
data_2["Sensor3_RelativeHumidity"]<- NA
data_2["Sensor1_AvgSolarRadiation"]<- NA
data_2["Sensor1_SolarRadiationStdDeviation"]<- NA
data_2["Sensor2_AvgSolarRadiation"]<- NA
data_2["Sensor2_SolarRadiationStdDeviation"]<- NA
data_2["Sensor2_TotalSolarRadiation"]<- NA
data_2["SerialNumber"]<- NA
data_2["ProgramName"]<- NA
data_2["OperatingSystem"]<- NA
data_2["Tachometer_RPM"]<- NA
# Get rid of these two coolumns
data_2$ObservationDate <- NULL
data_2$ObservationTime <- NULL
data_2_new <-data.frame(cbind(data_2$ReferenceID, data_2$Observation, data_2$RecordID, data_2$MinimumBatteryVoltage,
data_2$Sensor1_AvgTemp, data_2$Sensor1_TempStdDeviation, data_2$Sensor1_RelativeHumidity,
data_2$Sensor2_AvgTemp, data_2$Sensor2_TempStdDeviation, data_2$Sensor2_RelativeHumidity,
data_2$Sensor3_AvgTemp, data_2$Sensor3_TempStdDeviation, data_2$Sensor3_RelativeHumidity,
data_2$Sensor1_AvgSolarRadiation, data_2$Sensor1_SolarRadiationStdDeviation,data_2$Sensor1_TotalSolarRadiation,
data_2$Sensor2_AvgSolarRadiation, data_2$Sensor2_SolarRadiationStdDeviation,data_2$Sensor2_TotalSolarRadiation,
data_2$Rainfall,data_2$SerialNumber,data_2$ProgramName,data_2$OperatingSystem,
data_2$Tachometer_RPM, data_2$ProtocolVersion, data_2$SamplingUnitName, data_2$Latitude,
data_2$Longitude, data_2$TEAMSiteName))
colnames(data_2_new)<-c('ReferenceID','Observation','RecordID','MinimumBatteryVoltage','Sensor1_AvgTemp','Sensor1_TempStdDeviation',
'Sensor1_RelativeHumidity','Sensor2_AvgTemp','Sensor2_TempStdDeviation','Sensor2_RelativeHumidity',
'Sensor3_AvgTemp','Sensor3_TempStdDeviation','Sensor3_RelativeHumidity','Sensor1_AvgSolarRadiation',
'Sensor1_SolarRadiationStdDeviation','Sensor1_TotalSolarRadiation','Sensor2_AvgSolarRadiation','Sensor2_SolarRadiationStdDeviation',
'Sensor2_TotalSolarRadiation','Rainfall','SerialNumber','ProgramName','OperatingSystem',
'Tachometer_RPM','ProtocolVersion','SamplingUnitName','Latitude',
'Longitude','TEAMSiteName')
####
# Combine climate 2.0 and 3.0 dataasets
cl_data_all = rbind(data_3,data_2_new)
sysdate = Sys.Date()
filename= paste("cl_data_new",sysdate,".gzip",sep="")
save(cl_data_all, file=filename,compress="gzip")
13 changes: 13 additions & 0 deletions teamcode-forkedrepo.Rproj
Original file line number Diff line number Diff line change
@@ -0,0 +1,13 @@
Version: 1.0

RestoreWorkspace: Default
SaveWorkspace: Default
AlwaysSaveHistory: Default

EnableCodeIndexing: Yes
UseSpacesForTab: Yes
NumSpacesForTab: 2
Encoding: UTF-8

RnwWeave: Sweave
LaTeX: pdfLaTeX
Loading