Skip to content

Commit

Permalink
Added: unit test
Browse files Browse the repository at this point in the history
  • Loading branch information
ileodo committed Feb 2, 2021
1 parent 1e5e7c2 commit 4f6dcac
Show file tree
Hide file tree
Showing 4 changed files with 175 additions and 88 deletions.
2 changes: 2 additions & 0 deletions .gitignore
Original file line number Diff line number Diff line change
@@ -1,3 +1,5 @@
*.alfredworkflow
build/
coverage/
out/
tmp/
2 changes: 1 addition & 1 deletion app.rb
Original file line number Diff line number Diff line change
Expand Up @@ -18,7 +18,7 @@ def client
host: ENV['GITHUB_API_HOST'] || 'api.github.com',
access_token: ENV['GITHUB_ACCESS_TOKEN'],
me_account: ENV['GITHUB_ME_ACCOUNT'] || '@me',
pr_all_involve_me: ENV['PR_ALL_INVOLVE_ME'],
pr_all_involve_me: ENV['PR_ALL_INVOLVE_ME'].nil?,
cache_dir: ENV['alfred_workflow_cache'],
cache_ttl_sec_repo: (ENV['CACHE_TTL_SEC_REPO'] || (24 * 60 * 60)).to_i,
cache_ttl_sec_org: (ENV['CACHE_TTL_SEC_ORG'] || (24 * 60 * 60)).to_i,
Expand Down
129 changes: 57 additions & 72 deletions lib/data_source/client/github.rb
Original file line number Diff line number Diff line change
Expand Up @@ -42,74 +42,72 @@ def user_repos
direction: 'desc',
per_page: 100
}
with_cache(:user_repos) do

page_count = get_total_page_for_request('/user/repos', params)
if page_count != nil
all_user_repos = Array.new
write = true
(1..page_count).step(1) do |n|
params[:page] = n
part_res = request('/user/repos', params)
if not part_res.is_a?(Net::HTTPSuccess)
write = false
break
else
all_user_repos = all_user_repos+ deserialize_body(part_res.body)
end
end
{write: write, cache_content: all_user_repos}
else
res = request('/user/repos', params)
if not res.is_a?(Net::HTTPSuccess)
{ write: false, cache_content: nil }
else
{ write: true, cache_content: deserialize_body(res.body) }
end
end

responses = with_cache(:user_repos) do
all_user_repos = merge_multipage_results('/user/repos', params, 100)
all_user_repos
end

all_user_repos = Array.new
responses.each do |response|
all_user_repos = all_user_repos + response
end
all_user_repos
end

def user_pulls
if @pr_all_involve_me.nil?
modifiers = org_modifiers('is:pr', "user:#@me_account", 'state:open', "involves:#@me_account")
else
if @pr_all_involve_me
modifiers = ['is:pr', 'state:open', "involves:#@me_account"]
else
modifiers = org_modifiers('is:pr', "user:#@me_account", 'state:open', "involves:#@me_account")
end
params = search_params('', modifiers).merge(
per_page: 100
)
response = with_cache(:user_pulls) do
page_count = get_total_page_for_request('/search/issues', params)
if page_count != nil
all_user_pulls = Array.new
write = true
(1..page_count).step(1) do |n|
params[:page] = n
part_res = request('/search/issues', params)
if not part_res.is_a?(Net::HTTPSuccess)
write = false
break
else
all_user_pulls = all_user_pulls+ deserialize_body(part_res.body)
end
end
{write: write, cache_content: all_user_pulls}
responses = with_cache(:user_pulls) do
all_user_pulls = merge_multipage_results('/search/issues', params, 100)
all_user_pulls
end
all_user_pulls = Array.new
responses.each do |response|
all_user_pulls = all_user_pulls + response[:items]
end
all_user_pulls
end

private

def merge_multipage_results(path, params, per_page)
params[:per_page] = per_page
params[:page] = 1
res = request(path, params)
raise res[:message] unless res.is_a?(Net::HTTPSuccess)

result = Array.new
result.append deserialize_body(res.body)


page_count = 1
if res.key?("Link")
res["Link"].split(",").map do |result|
page_num, rel = result.match(/&page=(\d+)>; .*?"(\w+)"/i).captures
page_count = page_num.to_i if rel == "last"
end
end

(2..page_count).step(1) do |n|
params[:page] = n
params[:per_page] = per_page
part_res = request(path, params)
if not part_res.is_a?(Net::HTTPSuccess)
result = nil
break
else
res = request('/search/issues', params)
if not res.is_a?(Net::HTTPSuccess)
{ write: false, cache_content: nil }
else
{ write: true, cache_content: deserialize_body(res.body) }
end
result.append deserialize_body(part_res.body)
end
end
response[:items]
result
end

private

def search_params(query, modifiers)
{ q: "#{query} #{modifiers.join(' ')}" }
Expand All @@ -119,9 +117,9 @@ def org_modifiers(*initial)
orgs = with_cache(:user_orgs) do
res = request('/user/orgs')
if not res.is_a?(Net::HTTPSuccess)
{ write: false, cache_content: nil }
nil
else
{ write: true, cache_content: deserialize_body(res.body) }
deserialize_body(res.body)
end
end
orgs.inject(initial) do |memo, org|
Expand All @@ -144,8 +142,8 @@ def with_cache(filename, &block)

return cache if cache
ret = block.call
write_cache(filename, JSON.dump(ret[:cache_content])) if ret[:write]
ret[:cache_content]
write_cache(filename, ret)
ret
end

def read_cache(filename)
Expand All @@ -162,12 +160,13 @@ def read_cache(filename)
nil
end


def write_cache(filename, value)
return value unless @cache_dir

FileUtils.mkdir_p(@cache_dir) unless File.directory?(@cache_dir)
path = File.join(@cache_dir, @cache_name_hash[filename])
File.open(path, 'w') { |f| f.write(value) }
File.open(path, 'w') { |f| f.write(JSON.dump(value)) }
value
end

Expand All @@ -178,20 +177,6 @@ def build_request(uri)
request
end

def get_total_page_for_request(path, params = {})
res = request(path, params)
raise res[:message] unless res.is_a?(Net::HTTPSuccess)

begin
res.header["Link"].split(",").map do |result|
page_num, rel = result.match(/&page=(\d+)>; .*?"(\w+)"/i).captures
return page_num.to_i if rel == "last"
end
rescue StandardError => e
nil
end
end

def handle_response(response)
res = deserialize_body(response.body)
raise res[:message] unless response.is_a?(Net::HTTPSuccess)
Expand Down
Loading

0 comments on commit 4f6dcac

Please sign in to comment.