-
Notifications
You must be signed in to change notification settings - Fork 85
GitHub pagination #1113
New issue
Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.
By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.
Already on GitHub? Sign in to your account
GitHub pagination #1113
Changes from all commits
File filter
Filter by extension
Conversations
Jump to
Diff view
Diff view
There are no files selected for viewing
| Original file line number | Diff line number | Diff line change |
|---|---|---|
| @@ -0,0 +1,83 @@ | ||
| defmodule CodeCorps.GitHub.EagerAPI do | ||
| @moduledoc """ | ||
| Eager loads a resource from the GitHub API by fetching all of its pages in | ||
| parallel. | ||
| """ | ||
|
|
||
| def get_all(url, headers, options) do | ||
| HTTPoison.start | ||
| {:ok, response} = HTTPoison.get(url, headers, options) | ||
|
|
||
| first_page = Poison.decode!(response.body) | ||
| case response.headers |> retrieve_total_pages do | ||
| 1 -> first_page | ||
| total -> | ||
| first_page ++ get_remaining_pages(total, url, headers, options) |> List.flatten | ||
| end | ||
|
Contributor
Author
There was a problem hiding this comment. Choose a reason for hiding this commentThe reason will be displayed to describe this comment to others. Learn more. With having a separate gateway, our
This isn't absolutely needed in this PR, but could be an approach |
||
| end | ||
|
|
||
| defp get_remaining_pages(total, url, headers, options) do | ||
| 2..total | ||
| |> Enum.to_list | ||
| |> Enum.map(&Task.async(fn -> | ||
| params = options[:params] ++ [page: &1] | ||
| HTTPoison.get(url, headers, options ++ [params: params]) | ||
| end)) | ||
| |> Enum.map(&Task.await(&1, 10000)) | ||
| |> Enum.map(&handle_page_response/1) | ||
| end | ||
|
|
||
| defp handle_page_response({:ok, %{body: body}}), do: Poison.decode!(body) | ||
|
|
||
| def retrieve_total_pages(headers) do | ||
| case headers |> List.keyfind("Link", 0, nil) do | ||
| nil -> 1 | ||
| {"Link", value} -> value |> extract_total_pages | ||
| end | ||
| end | ||
|
|
||
| defp extract_total_pages(links_string) do | ||
| # We use regex to parse the pagination info from the GitHub API response | ||
| # headers. | ||
| # | ||
| # The headers render pages in the following format: | ||
| # | ||
| # ``` | ||
| # {"Link", '<https://api.github.com/search/code?q=addClass+user%3Amozilla&page=15>; rel="next", | ||
| # <https://api.github.com/search/code?q=addClass+user%3Amozilla&page=34>; rel="last", | ||
| # <https://api.github.com/search/code?q=addClass+user%3Amozilla&page=1>; rel="first", | ||
| # <https://api.github.com/search/code?q=addClass+user%3Amozilla&page=13>; rel="prev"' | ||
| # ``` | ||
| # | ||
| # If the response has no list header, then we have received all the records | ||
| # from the only possible page. | ||
| # | ||
| # If the response has a list header, the value will contain at least the | ||
| # "last" relation. | ||
| links_string | ||
| |> String.split(", ") | ||
| |> Enum.map(fn link -> | ||
| rel = get_rel(link) | ||
| page = get_page(link) | ||
| {rel, page} | ||
| end) | ||
| |> Enum.into(%{}) | ||
| |> Map.get("last") | ||
| end | ||
|
|
||
| defp get_rel(link) do | ||
| # Searches for `rel=` | ||
| Regex.run(~r{rel="([a-z]+)"}, link) |> List.last() | ||
| end | ||
|
|
||
| defp get_page(link) do | ||
| # Searches for the following variations: | ||
| # ``` | ||
| # ?page={match}> | ||
| # ?page={match}&... | ||
| # &page={match}> | ||
| # &page={match}&... | ||
| # ``` | ||
| Regex.run(~r{[&/?]page=([^>&]+)}, link) |> List.last |> String.to_integer | ||
| end | ||
| end | ||
| Original file line number | Diff line number | Diff line change |
|---|---|---|
| @@ -0,0 +1,31 @@ | ||
| defmodule CodeCorps.GitHub.API.Repository do | ||
| @moduledoc ~S""" | ||
| Functions for working with issues on GitHub. | ||
| """ | ||
|
|
||
| alias CodeCorps.{ | ||
| GitHub, | ||
| GitHub.API, | ||
| GithubAppInstallation, | ||
| GithubRepo, | ||
| } | ||
|
|
||
| @spec issues(GithubRepo.t) :: {:ok, list(map)} | {:error, GitHub.api_error_struct} | ||
| def issues(%GithubRepo{github_app_installation: %GithubAppInstallation{} = installation} = github_repo) do | ||
| with {:ok, access_token} <- API.Installation.get_access_token(installation), | ||
| issues <- fetch_issues(github_repo, access_token) | ||
| do | ||
| {:ok, issues} | ||
| else | ||
| {:error, error} -> {:error, error} | ||
| end | ||
| end | ||
|
|
||
| defp fetch_issues(%GithubRepo{github_app_installation: %GithubAppInstallation{github_account_login: owner}, name: repo}, access_token) do | ||
| per_page = 100 | ||
| path = "repos/#{owner}/#{repo}/issues" | ||
| params = [per_page: per_page, state: "all"] | ||
| opts = [access_token: access_token, params: params] | ||
| GitHub.get_all(path, %{}, opts) | ||
| end | ||
| end |
| Original file line number | Diff line number | Diff line change |
|---|---|---|
|
|
@@ -11,7 +11,10 @@ defmodule CodeCorps.Validators.TimeValidator do | |
| def validate_time_after(%{data: data} = changeset, field) do | ||
| previous_time = Map.get(data, field) | ||
| current_time = Changeset.get_change(changeset, field) | ||
| case current_time |> Timex.after?(previous_time) do | ||
| is_after = current_time |> Timex.after?(previous_time) | ||
| is_equal = current_time |> Timex.equal?(previous_time) | ||
| after_or_equal = is_after || is_equal | ||
| case after_or_equal do | ||
|
Contributor
There was a problem hiding this comment. Choose a reason for hiding this commentThe reason will be displayed to describe this comment to others. Learn more. Nothing is testing for this change yet.
Contributor
Author
There was a problem hiding this comment. Choose a reason for hiding this commentThe reason will be displayed to describe this comment to others. Learn more. I'm OK with creating an issue and doing it separately. |
||
| true -> changeset | ||
| false -> Changeset.add_error(changeset, field, "cannot be before the last recorded time") | ||
| end | ||
|
|
||
There was a problem hiding this comment.
Choose a reason for hiding this comment
The reason will be displayed to describe this comment to others. Learn more.
This should likely be modified to call into a more central place so we can mock the
%HTTPoison.Response{}(headers included). Otherwise it will be hard to test pagination.There was a problem hiding this comment.
Choose a reason for hiding this comment
The reason will be displayed to describe this comment to others. Learn more.
I'm thinking we have an
API.Gatewaywhich simply has a request method that callsHTTPosion. Response parsing can be done in this module. We then mock the gateway in tests.