Skip to content
Open
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
22 changes: 15 additions & 7 deletions app/models/github/issue.rb
Original file line number Diff line number Diff line change
Expand Up @@ -194,7 +194,6 @@ def self.extract_from_url(url)
def update_from_github(options={})
# read synced_at before updating
if_modified_since = options[:force] ? nil : self.synced_at.try(:httpdate)
update_attribute(:synced_at, Time.now)

# add access token
params = {
Expand All @@ -209,7 +208,12 @@ def update_from_github(options={})
)

# trigger an update on the underlying object
self.class.update_attributes_from_github_data(api_response.data, obj: self)
if api_response.success?
update_attribute(:synced_at, Time.now)
self.class.update_attributes_from_github_data(api_response.data, obj: self)
else
Rails.logger.error "Github::Issue(#{id}) Update Failed: #{api_response.status} - #{api_response.data.inspect}"
end
end

# Sync comments with GitHub. Deletes comments that no longer exist.
Expand Down Expand Up @@ -281,23 +285,27 @@ def github_api_path
end

def self.update_attributes_from_github_array(github_data, options={})
# IMPROVEMENT: Guard against non-array data
return [] unless github_data.is_a?(Array)

# bulk load all issues
issue_hash = where("remote_id in (?)", github_data.map { |r| r['id'] })
issue_hash = where("remote_id in (?)", github_data.map { |r| r['id'] if r.is_a?(Hash) }.compact)
.inject({}) do |hash,obj|
hash[obj.remote_id.to_i] = obj
hash
end


# bulk update
repos = Github::Repository.update_attributes_from_github_array(github_data.map { |issue| issue['repo'] }.uniq.compact)
repos = Github::Repository.update_attributes_from_github_array(github_data.map { |issue| issue['repo'] if issue.is_a?(Hash) }.uniq.compact)
repos_hash = repos.inject({}) { |hash,obj| hash[obj.remote_id.to_i] = obj; hash }

# bulk update linked accounts to save lots of lookups
linked_accounts = LinkedAccount::Github.update_attributes_from_github_array(github_data.map { |issue| issue['user'] }.uniq.compact)
linked_accounts_hash = linked_accounts.inject({}) { |hash,obj| hash[obj.remote_id.to_i] = obj; hash }
linked_accounts = LinkedAccount::Github.update_attributes_from_github_array(github_data.map { |issue| issue['user'] if issue.is_a?(Hash) }.uniq.compact)
linked_accounts_hash = linked_accounts.inject({}) { |hash,obj| hash[obj.uid.to_i] = obj; hash }

github_data.map do |issue_data|
next unless issue_data.is_a?(Hash)
update_attributes_from_github_data(
issue_data,
options.merge(
Expand All @@ -306,7 +314,7 @@ def self.update_attributes_from_github_array(github_data, options={})
tracker: (issue_data.has_key?('repo') ? repos_hash[issue_data['repo']['id'].to_i] : options[:tracker])
)
)
end
end.compact
end

def self.update_attributes_from_github_data(github_data, options={})
Expand Down
39 changes: 25 additions & 14 deletions app/models/github/repository.rb
Original file line number Diff line number Diff line change
Expand Up @@ -125,7 +125,6 @@ def remote_sync(options={})
def update_from_github(options={})
# read synced_at before updating
if_modified_since = options[:force] ? nil : self.synced_at.try(:httpdate)
update_attribute(:synced_at, Time.now)

# add access token
params = {
Expand All @@ -139,7 +138,10 @@ def update_from_github(options={})
headers: { 'If-Modified-Since' => if_modified_since }
)
# trigger an update on the underlying object if modified
self.class.update_attributes_from_github_data(api_response.data, obj: self) if api_response.modified?
if api_response.modified?
update_attribute(:synced_at, Time.now)
self.class.update_attributes_from_github_data(api_response.data, obj: self)
end
end

# Update the repo languages from GitHub
Expand Down Expand Up @@ -188,29 +190,38 @@ def find_or_create_issues_from_github(options={})
# TODO: optimization: github api returns open_issues_count which we could compare to issues.where(state=open)

if options[:state]
# IMPROVEMENT: Add a 10-minute buffer to account for clock drift/indexing delays
since_timestamp = options[:since]
since_timestamp -= 10.minutes if since_timestamp

# make a call to github API
api_response = Github::API.call(
url: File.join(self.github_api_path, "/issues"),
params: {
access_token: options[:person].try(:github_account).try(:oauth_token),
since: options[:since].try(:iso8601),
since: since_timestamp.try(:iso8601),
state: options[:state],
per_page: 100,
page: options[:page] || 1
}.reject { |k,v| v.nil? },

# NOTE: this doesn't have any affect... and we don't want to store etags, so no API optimization here.
#headers: {
# 'If-Modified-Since' => options[:since].try(:httpdate)
#}.reject { |k,v| v.nil? }
}.reject { |k,v| v.nil? }
)

# Edge case: tracker is deleted, just raise for now
if !api_response.success? && api_response.status == 404
raise "Github::Repository(#{id}) not found at GitHub(#{url}). Maybe it was deleted?"
# IMPROVEMENT: Robust error logging and status check
unless api_response.success?
Rails.logger.error "Github::Repository(#{id}) Issue Sync Failed: #{api_response.status} - #{api_response.data.inspect}"
if api_response.status == 404
raise "Github::Repository(#{id}) not found at GitHub(#{url}). Maybe it was deleted?"
end
return false # Stop processing this branch
end

# process these 100 issues
Github::Issue.update_attributes_from_github_array(api_response.data)
# IMPROVEMENT: Type validation before processing
if api_response.data.is_a?(Array)
Github::Issue.update_attributes_from_github_array(api_response.data)
else
Rails.logger.error "Github::Repository(#{id}) Expected Array from API, got: #{api_response.data.class}"
return false
end

# if a page param wasn't passed in, load the rest of the pages
if !options[:page] && api_response.link[:last]
Expand Down