Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
10 changes: 10 additions & 0 deletions config/config.exs
Original file line number Diff line number Diff line change
@@ -1,9 +1,13 @@
import Config

config :hexdocs,
scheme: "http",
port: "4002",
hexpm_url: "http://localhost:4000",
hexpm_secret: "2cd6d09334d4b00a2be4d532342b799b",
# OAuth client credentials for hexpm integration
oauth_client_id: "hexdocs",
oauth_client_secret: "dev_secret_for_testing",
typesense_url: "http://localhost:8108",
typesense_api_key: "hexdocs",
typesense_collection: "hexdocs",
Expand Down Expand Up @@ -38,6 +42,12 @@ config :hexdocs, :docs_private_bucket, name: "hexdocs-private-staging"

config :hexdocs, :docs_public_bucket, name: "hexdocs-public-staging"

config :ex_aws,
http_client: ExAws.Request.Req,
json_codec: JSON

config :sentry, client: Sentry.FinchClient

config :logger, :console, format: "[$level] $metadata$message\n"

import_config "#{Mix.env()}.exs"
4 changes: 1 addition & 3 deletions config/prod.exs
Original file line number Diff line number Diff line change
@@ -1,6 +1,7 @@
import Config

config :hexdocs,
scheme: "https",
hexpm_impl: Hexdocs.Hexpm.Impl,
store_impl: Hexdocs.Store.Impl,
cdn_impl: Hexdocs.CDN.Fastly,
Expand All @@ -14,9 +15,6 @@ config :hexdocs, :docs_private_bucket, implementation: Hexdocs.Store.GS

config :hexdocs, :docs_public_bucket, implementation: Hexdocs.Store.GS

config :ex_aws,
json_codec: Jason

config :sentry,
enable_source_code_context: true,
root_source_code_paths: [File.cwd!()],
Expand Down
2 changes: 2 additions & 0 deletions config/runtime.exs
Original file line number Diff line number Diff line change
Expand Up @@ -5,6 +5,8 @@ if config_env() == :prod do
port: System.fetch_env!("HEXDOCS_PORT"),
hexpm_url: System.fetch_env!("HEXDOCS_HEXPM_URL"),
hexpm_secret: System.fetch_env!("HEXDOCS_HEXPM_SECRET"),
oauth_client_id: System.fetch_env!("HEXDOCS_OAUTH_CLIENT_ID"),
oauth_client_secret: System.fetch_env!("HEXDOCS_OAUTH_CLIENT_SECRET"),
typesense_url: System.fetch_env!("HEXDOCS_TYPESENSE_URL"),
typesense_api_key: System.fetch_env!("HEXDOCS_TYPESENSE_API_KEY"),
typesense_collection: System.fetch_env!("HEXDOCS_TYPESENSE_COLLECTION"),
Expand Down
14 changes: 6 additions & 8 deletions lib/hexdocs/cdn/fastly.ex
Original file line number Diff line number Diff line change
Expand Up @@ -29,20 +29,18 @@ defmodule Hexdocs.CDN.Fastly do
url = @fastly_url <> url

headers = [
"fastly-key": auth(),
accept: "application/json",
"content-type": "application/json"
{"fastly-key", auth()},
{"accept", "application/json"},
{"content-type", "application/json"}
]

body = JSON.encode!(body)

Hexdocs.HTTP.retry("fastly", url, fn -> :hackney.post(url, headers, body, []) end)
|> read_body()
Hexdocs.HTTP.retry("fastly", url, fn -> Hexdocs.HTTP.post(url, headers, body) end)
|> decode_body()
end

defp read_body({:ok, status, headers, client}) do
{:ok, body} = :hackney.body(client)

defp decode_body({:ok, status, headers, body}) do
body =
case JSON.decode(body) do
{:ok, map} -> map
Expand Down
14 changes: 12 additions & 2 deletions lib/hexdocs/hexpm/impl.ex
Original file line number Diff line number Diff line change
Expand Up @@ -56,10 +56,20 @@ defmodule Hexdocs.Hexpm.Impl do
Application.get_env(:hexdocs, :hexpm_url) <> path
end

defp headers(key) do
defp headers(key_or_token) do
# Support both legacy API keys and OAuth Bearer tokens
# OAuth tokens are JWTs that start with "eyJ" (base64 of '{"')
# Legacy API keys are shorter hex strings
authorization =
if String.starts_with?(key_or_token, "eyJ") do
"Bearer #{key_or_token}"
else
key_or_token
end

[
{"accept", "application/json"},
{"authorization", key}
{"authorization", authorization}
]
end
end
89 changes: 61 additions & 28 deletions lib/hexdocs/http.ex
Original file line number Diff line number Diff line change
Expand Up @@ -5,61 +5,94 @@ defmodule Hexdocs.HTTP do
require Logger

def head(url, headers) do
:hackney.head(url, headers)
case Req.head(url, headers: headers, retry: false, decode_body: false) do
{:ok, response} ->
{:ok, response.status, normalize_headers(response.headers)}

{:error, reason} ->
{:error, reason}
end
end

def get(url, headers) do
:hackney.get(url, headers)
|> read_response()
def get(url, headers, _opts \\ []) do
case Req.get(url, headers: headers, retry: false, decode_body: false) do
{:ok, response} ->
{:ok, response.status, normalize_headers(response.headers), response.body}

{:error, reason} ->
{:error, reason}
end
end

def get_stream(url, headers) do
:hackney.get(url, headers)
|> stream_response()
case Req.get(url, headers: headers, retry: false, decode_body: false, into: :self) do
{:ok, response} ->
stream = stream_body(response.body)
{:ok, response.status, normalize_headers(response.headers), stream}

{:error, reason} ->
{:error, reason}
end
end

def put(url, headers, body) do
:hackney.put(url, headers, body, recv_timeout: 10_000)
|> read_response()
end
case Req.put(url,
headers: headers,
body: body,
retry: false,
decode_body: false,
receive_timeout: 10_000
) do
{:ok, response} ->
{:ok, response.status, normalize_headers(response.headers), response.body}

def post(url, headers, body, opts \\ []) do
:hackney.post(url, headers, body, opts)
{:error, reason} ->
{:error, reason}
end
end

def delete(url, headers, opts \\ []) do
:hackney.delete(url, headers, "", opts)
|> read_response()
def post(url, headers, body, _opts \\ []) do
case Req.post(url, headers: headers, body: body, retry: false, decode_body: false) do
{:ok, response} ->
{:ok, response.status, normalize_headers(response.headers), response.body}

{:error, reason} ->
{:error, reason}
end
end

defp read_response(result) do
with {:ok, status, headers, ref} <- result,
{:ok, body} <- :hackney.body(ref) do
{:ok, status, headers, body}
def delete(url, headers, _opts \\ []) do
case Req.delete(url, headers: headers, retry: false, decode_body: false) do
{:ok, response} ->
{:ok, response.status, normalize_headers(response.headers), response.body}

{:error, reason} ->
{:error, reason}
end
end

defp stream_response({:ok, status, headers, ref}) do
defp normalize_headers(headers) do
Enum.map(headers, fn {name, values} -> {name, Enum.join(values, ", ")} end)
end

defp stream_body(ref) do
start_fun = fn -> :cont end
after_fun = fn _ -> :ok end

next_fun = fn
:cont ->
case :hackney.stream_body(ref) do
{:ok, data} -> {[{:ok, data}], :cont}
:done -> {:halt, :ok}
{:error, reason} -> {[{:error, reason}], :stop}
receive do
{^ref, {:data, data}} -> {[{:ok, data}], :cont}
{^ref, :done} -> {:halt, :ok}
after
30_000 -> {[{:error, :timeout}], :stop}
end

:stop ->
{:halt, :ok}
end

{:ok, status, headers, Stream.resource(start_fun, next_fun, after_fun)}
end

defp stream_response(other) do
other
Stream.resource(start_fun, next_fun, after_fun)
end

def retry(service, url, fun) do
Expand Down
Loading