Line | Hits | Source |
---|---|---|
0 | defmodule Avy do | |
1 | @moduledoc """ | |
2 | Avy keeps the contexts that define your domain | |
3 | and business logic. | |
4 | ||
5 | Contexts are also responsible for managing your data, regardless | |
6 | if it comes from the database, an external API or others. | |
7 | """ | |
8 | end |
Line | Hits | Source |
---|---|---|
0 | defmodule Avy.Application do | |
1 | # See https://hexdocs.pm/elixir/Application.html | |
2 | # for more information on OTP Applications | |
3 | @moduledoc false | |
4 | ||
5 | use Application | |
6 | ||
7 | @impl true | |
8 | def start(_type, _args) do | |
9 | 1 | children = [ |
10 | AvyWeb.Telemetry, | |
11 | Avy.Repo, | |
12 | 1 | {DNSCluster, query: Application.get_env(:avy, :dns_cluster_query) || :ignore}, |
13 | {Phoenix.PubSub, name: Avy.PubSub}, | |
14 | # Start the Finch HTTP client for sending emails | |
15 | {Finch, name: Avy.Finch}, | |
16 | # Start a worker by calling: Avy.Worker.start_link(arg) | |
17 | # {Avy.Worker, arg}, | |
18 | # Start to serve requests, typically the last entry | |
19 | AvyWeb.Endpoint | |
20 | ] | |
21 | ||
22 | # See https://hexdocs.pm/elixir/Supervisor.html | |
23 | # for other strategies and supported options | |
24 | 1 | opts = [strategy: :one_for_one, name: Avy.Supervisor] |
25 | 1 | Supervisor.start_link(children, opts) |
26 | end | |
27 | ||
28 | # Tell Phoenix to update the endpoint configuration | |
29 | # whenever the application is updated. | |
30 | @impl true | |
31 | def config_change(changed, _new, removed) do | |
32 | 0 | AvyWeb.Endpoint.config_change(changed, removed) |
33 | :ok | |
34 | end | |
35 | end |
Line | Hits | Source |
---|---|---|
0 | defmodule Avy.Data.Content do | |
1 | use Ecto.Schema | |
2 | import Ecto.Changeset | |
3 | ||
4 | @schema_prefix :data | |
5 | 1694 | schema "contents" do |
6 | field :source_identifier, :string | |
7 | field :starttime, :utc_datetime | |
8 | field :endtime, :utc_datetime | |
9 | field :quality, :string | |
10 | field :time_ordered, :boolean, default: false | |
11 | field :timespans, {:array, :map} | |
12 | belongs_to :channel, Avy.Metadata.Channel | |
13 | ||
14 | timestamps(type: :utc_datetime) | |
15 | end | |
16 | ||
17 | @doc false | |
18 | def changeset(content, attrs) do | |
19 | content | |
20 | |> cast(attrs, [:source_identifier, :starttime, :endtime, :quality, :time_ordered, :timespans]) | |
21 | 0 | |> validate_required([:source_identifier, :starttime, :endtime, :quality, :time_ordered]) |
22 | end | |
23 | end |
Line | Hits | Source |
---|---|---|
0 | defmodule Avy.FdsnRequestParams do | |
1 | @moduledoc """ | |
2 | Defines a structure representing all the parameters in an FDSN availability request. | |
3 | """ | |
4 | alias Avy.FdsnSourceIdentifier | |
5 | ||
6 | defstruct nodata: 204, | |
7 | quality: [:D, :M, :Q, :R], | |
8 | # valid options: :samplerate, :quality, :overlap | |
9 | merge: [], | |
10 | orderby: :nslc_time_quality_samplerate, | |
11 | # limit results to the specified | |
12 | limit: 0, | |
13 | includerestricted: false, | |
14 | # can be geocsv, json or request | |
15 | format: :text, | |
16 | source_identifiers: [], | |
17 | # Specific to query method | |
18 | mergegaps: 1.0, | |
19 | show: [] | |
20 | ||
21 | @typedoc """ | |
22 | Represents the normal parameters as defined by the FDSN | |
23 | """ | |
24 | ||
25 | @type t() :: %Avy.FdsnRequestParams{ | |
26 | nodata: Integer | String.t(), | |
27 | quality: list(atom), | |
28 | merge: list(atom), | |
29 | orderby: atom, | |
30 | limit: Integer, | |
31 | includerestricted: boolean(), | |
32 | format: atom, | |
33 | source_identifiers: list(FdsnSourceIdentifier.t()), | |
34 | mergegaps: Float, | |
35 | show: list(atom) | |
36 | } | |
37 | end |
Line | Hits | Source |
---|---|---|
0 | # defimpl String.Chars, for: Avy.FdsnSourceIdentifier do | |
1 | # def to_string(s) do | |
2 | # "#{s.net}_#{s.sta}_#{s.loc}_#{Enum.join(Avy.FdsnSourceIdentifier.split_channel(s), "_")}" | |
3 | # end | |
4 | # end | |
5 | ||
6 | defmodule Avy.FdsnSourceIdentifier do | |
7 | @moduledoc """ | |
8 | This modules defines a source identifier type and all logic for validation and manipulation. | |
9 | """ | |
10 | require Avy.Metadata.{Network, Station, Channel} | |
11 | require Logger | |
12 | ||
13 | defstruct net: "*", | |
14 | sta: "*", | |
15 | loc: "*", | |
16 | cha: "*", | |
17 | start: DateTime.from_unix!(1), | |
18 | end: DateTime.from_unix!(5_682_956_400) | |
19 | ||
20 | @typedoc """ | |
21 | An FdsnSourceIdentifier describes an NSLC and time boundaries. | |
22 | """ | |
23 | ||
24 | @type t() :: %Avy.FdsnSourceIdentifier{ | |
25 | net: String.t(), | |
26 | sta: String.t(), | |
27 | loc: String.t(), | |
28 | cha: String.t(), | |
29 | start: DateTime.t(), | |
30 | end: DateTime.t() | |
31 | # Add restricted: true | |
32 | } | |
33 | ||
34 | @doc """ | |
35 | Splits securely the channel code in 3 parts, even if the channel code is just one or two characters. | |
36 | ||
37 | ## Examples | |
38 | ||
39 | iex> Avy.FdsnSourceIdentifier.split_channel(%Avy.FdsnSourceIdentifier{cha: "*"}) | |
40 | ["*", "*", "*"] | |
41 | iex> Avy.FdsnSourceIdentifier.split_channel(%Avy.FdsnSourceIdentifier{cha: "HN*"}) | |
42 | ["H", "N", "*"] | |
43 | iex> Avy.FdsnSourceIdentifier.split_channel(%Avy.FdsnSourceIdentifier{cha: "H*"}) | |
44 | ["H", "*", "*"] | |
45 | iex> Avy.FdsnSourceIdentifier.split_channel(%Avy.FdsnSourceIdentifier{cha: "*H"}) | |
46 | ["*", "*", "H"] | |
47 | iex> Avy.FdsnSourceIdentifier.split_channel(%Avy.FdsnSourceIdentifier{cha: "*H*"}) | |
48 | ["*", "H", "*"] | |
49 | ||
50 | """ | |
51 | @spec split_channel(t()) :: list | |
52 | def split_channel(s) do | |
53 | 22 | case String.length(s.cha) do |
54 | 15 | 1 -> |
55 | 15 | [s.cha, "*", "*"] |
56 | ||
57 | 2 -> | |
58 | 2 | case String.graphemes(s.cha) do |
59 | 1 | ["*", x] -> ["*", "*", x] |
60 | 1 | [x, "*"] -> [x, "*", "*"] |
61 | end | |
62 | ||
63 | 3 -> | |
64 | 5 | String.graphemes(s.cha) |
65 | end | |
66 | end | |
67 | ||
68 | # @doc """ | |
69 | # Test if NSLC match between 2 streams. | |
70 | # The second stream can have wildcards. | |
71 | ||
72 | # ## Examples | |
73 | ||
74 | # iex> Avy.FdsnSourceIdentifier.match_nslc?(%Avy.FdsnSourceIdentifier{net: "FR"}, %Avy.FdsnSourceIdentifier{net: "FR"}) | |
75 | # true | |
76 | ||
77 | # """ | |
78 | # @spec match_nslc?(t(), t()) :: boolean() | |
79 | # def match_nslc?(s1, s2) do | |
80 | # Logger.debug(to_string(s1)) | |
81 | # Regex.match?(fdsn_regex(s2), to_string(s1)) | |
82 | # end | |
83 | ||
84 | # @spec fdsn_regex(t()) :: Regex.t() | |
85 | # defp fdsn_regex(%FdsnSourceIdentifier{} = s) do | |
86 | # nslc = | |
87 | # [ | |
88 | # fdsn_regex_nslc(s.net), | |
89 | # fdsn_regex_nslc(s.sta), | |
90 | # fdsn_regex_nslc(s.loc), | |
91 | # fdsn_regex_nslc(split_channel(s) |> Enum.join("_")) | |
92 | # ] | |
93 | # |> Enum.join("_") | |
94 | ||
95 | # Regex.compile!("^FDSN:#{nslc}") | |
96 | # end | |
97 | ||
98 | # # From FDSN wildcard spec to a normal regex | |
99 | # @spec fdsn_regex_nslc(String.t()) :: String.t() | |
100 | # defp fdsn_regex_nslc(s) when is_binary(s) do | |
101 | # String.replace(s, "*", ".*") | |
102 | # |> String.replace("?", ".?") | |
103 | # |> String.replace("%", ".*") | |
104 | # end | |
105 | ||
106 | @doc """ | |
107 | Builds a list of FDSN source identifiers. | |
108 | The date is parsed. In case of failures, returns {:error, message}. | |
109 | On success, returns a list of FdsnSourceIdentifier. | |
110 | ||
111 | ## Examples | |
112 | iex> Avy.FdsnSourceIdentifier.build_list("FR,RA","*","*","H?Z") | |
113 | {:ok, | |
114 | [ | |
115 | %Avy.FdsnSourceIdentifier{ | |
116 | net: "FR", | |
117 | sta: "*", | |
118 | loc: "*", | |
119 | cha: "H?Z", | |
120 | start: ~U[1000-01-01 00:00:00Z], | |
121 | end: ~U[3000-01-01 00:00:00Z] | |
122 | }, | |
123 | %Avy.FdsnSourceIdentifier{ | |
124 | net: "RA", | |
125 | sta: "*", | |
126 | loc: "*", | |
127 | cha: "H?Z", | |
128 | start: ~U[1000-01-01 00:00:00Z], | |
129 | end: ~U[3000-01-01 00:00:00Z] | |
130 | } | |
131 | ]} | |
132 | iex> Avy.FdsnSourceIdentifier.build_list() | |
133 | {:error, "Requesting all nslc is not allowed. Provide at least one of net, sta, loc, cha"} | |
134 | iex> Avy.FdsnSourceIdentifier.build_list("*", "*", "--", "*") | |
135 | {:ok, | |
136 | [ | |
137 | %Avy.FdsnSourceIdentifier{ | |
138 | net: "*", | |
139 | sta: "*", | |
140 | loc: "", | |
141 | cha: "*", | |
142 | start: ~U[1000-01-01 00:00:00Z], | |
143 | end: ~U[3000-01-01 00:00:00Z] | |
144 | } | |
145 | ]} | |
146 | iex> Avy.FdsnSourceIdentifier.build_list("*", "*", " ", "*") | |
147 | {:ok, | |
148 | [ | |
149 | %Avy.FdsnSourceIdentifier{ | |
150 | net: "*", | |
151 | sta: "*", | |
152 | loc: "", | |
153 | cha: "*", | |
154 | start: ~U[1000-01-01 00:00:00Z], | |
155 | end: ~U[3000-01-01 00:00:00Z] | |
156 | } | |
157 | ]} | |
158 | iex> Avy.FdsnSourceIdentifier.build_list("*", "*", " ", "*", "2024-01-01", "2025-01-01") | |
159 | {:ok, | |
160 | [ | |
161 | %Avy.FdsnSourceIdentifier{ | |
162 | net: "*", | |
163 | sta: "*", | |
164 | loc: "", | |
165 | cha: "*", | |
166 | start: ~U[2024-01-01 00:00:00Z], | |
167 | end: ~U[2025-01-01 00:00:00Z] | |
168 | } | |
169 | ]} | |
170 | """ | |
171 | @spec build_list(String.t(), String.t(), String.t(), String.t(), String.t(), String.t()) :: | |
172 | {:ok, list(t())} | {:error, String.t()} | |
173 | def build_list( | |
174 | 1 | net \\ "*", |
175 | 0 | sta \\ "*", |
176 | 0 | loc \\ "*", |
177 | 0 | cha \\ "*", |
178 | 3 | starttime \\ "1000-01-01", |
179 | 0 | endtime \\ "3000-01-01" |
180 | ) do | |
181 | 30 | Logger.debug("Building a list for #{net}_#{sta}_#{loc}_#{cha} #{starttime} -> #{endtime}") |
182 | ||
183 | 30 | if net == "*" and sta == "*" and loc == "*" and cha == "*" do |
184 | {:error, "Requesting all nslc is not allowed. Provide at least one of net, sta, loc, cha"} | |
185 | else | |
186 | 29 | with {:ok, p_starttime, 0} <- parse_date(starttime), |
187 | 27 | {:ok, p_endtime, 0} <- parse_date(endtime), |
188 | 27 | fixed_loc <- fix_location(loc) do |
189 | { | |
190 | :ok, | |
191 | expand_multi_values(%Avy.FdsnSourceIdentifier{ | |
192 | net: net, | |
193 | sta: sta, | |
194 | loc: fixed_loc, | |
195 | cha: cha, | |
196 | start: p_starttime, | |
197 | end: p_endtime | |
198 | }) | |
199 | } | |
200 | else | |
201 | 2 | {:error, _} -> {:error, "Error parsing dates in #{starttime} or #{endtime}"} |
202 | end | |
203 | end | |
204 | end | |
205 | ||
206 | # Location specified in the request can be double space or double dash, this has to be translated to empty location | |
207 | defp fix_location(loc) do | |
208 | 27 | case loc do |
209 | 2 | " " -> "" |
210 | 1 | "--" -> "" |
211 | 24 | l -> l |
212 | end | |
213 | end | |
214 | ||
215 | @doc """ | |
216 | If the stream has multiple values on :net, :sta, :loc or :cha, then the function splits the stream and returns a list of stream. | |
217 | If there is nothing to expand, it returns a list with only the stream given in parameter. | |
218 | ||
219 | ## Parameters | |
220 | - stream: a %Avy.FdsnSourceIdentifier{} structure | |
221 | ||
222 | ## Examples | |
223 | ||
224 | iex> Avy.FdsnSourceIdentifier.expand_multi_values(%{net: "FR,RA", sta: "CIEL,ABCD", loc: "00,01", cha: "HNZ,HNE"}) | |
225 | [ | |
226 | %{cha: "HNZ", loc: "00", net: "FR", sta: "CIEL"}, | |
227 | %{cha: "HNZ", loc: "00", net: "RA", sta: "CIEL"}, | |
228 | %{cha: "HNZ", loc: "00", net: "FR", sta: "ABCD"}, | |
229 | %{cha: "HNZ", loc: "00", net: "RA", sta: "ABCD"}, | |
230 | %{cha: "HNZ", loc: "01", net: "FR", sta: "CIEL"}, | |
231 | %{cha: "HNZ", loc: "01", net: "RA", sta: "CIEL"}, | |
232 | %{cha: "HNZ", loc: "01", net: "FR", sta: "ABCD"}, | |
233 | %{cha: "HNZ", loc: "01", net: "RA", sta: "ABCD"}, | |
234 | %{cha: "HNE", loc: "00", net: "FR", sta: "CIEL"}, | |
235 | %{cha: "HNE", loc: "00", net: "RA", sta: "CIEL"}, | |
236 | %{cha: "HNE", loc: "00", net: "FR", sta: "ABCD"}, | |
237 | %{cha: "HNE", loc: "00", net: "RA", sta: "ABCD"}, | |
238 | %{cha: "HNE", loc: "01", net: "FR", sta: "CIEL"}, | |
239 | %{cha: "HNE", loc: "01", net: "RA", sta: "CIEL"}, | |
240 | %{cha: "HNE", loc: "01", net: "FR", sta: "ABCD"}, | |
241 | %{cha: "HNE", loc: "01", net: "RA", sta: "ABCD"} | |
242 | ] | |
243 | """ | |
244 | @spec expand_multi_values(t()) :: list(t()) | |
245 | def expand_multi_values(stream) do | |
246 | [stream] | |
247 | 28 | |> Enum.flat_map(&expand_multi_values(&1, :cha)) |
248 | 29 | |> Enum.flat_map(&expand_multi_values(&1, :loc)) |
249 | 31 | |> Enum.flat_map(&expand_multi_values(&1, :sta)) |
250 | 28 | |> Enum.flat_map(&expand_multi_values(&1, :net)) |
251 | end | |
252 | ||
253 | defp expand_multi_values(stream, key) do | |
254 | 123 | Logger.debug("Stream to expand: #{inspect(stream)} on #{key}") |
255 | ||
256 | 123 | case Map.fetch(stream, key) do |
257 | 0 | :error -> |
258 | :error | |
259 | ||
260 | {:ok, val} -> | |
261 | val | |
262 | |> String.split(",") | |
263 | 123 | |> Enum.map(fn v -> Map.replace(stream, key, v) end) |
264 | end | |
265 | end | |
266 | ||
267 | @doc """ | |
268 | Parse a date from all accepted formats in the FDSN norm. | |
269 | - just a date | |
270 | - a date with time separated by T | |
271 | All dates are assumed to be UTC | |
272 | Return a tupple {:ok, DateTime} or {:error, "error message"} | |
273 | ||
274 | ## Examples | |
275 | ||
276 | iex> Avy.FdsnSourceIdentifier.parse_date("2023-01-01") | |
277 | {:ok, ~U[2023-01-01 00:00:00Z], 0} | |
278 | ||
279 | iex> Avy.FdsnSourceIdentifier.parse_date("2023-01-01T23:59:59.999") | |
280 | {:ok, ~U[2023-01-01 23:59:59.999Z], 0} | |
281 | ||
282 | iex> Avy.FdsnSourceIdentifier.parse_date(~U[2023-01-01 23:59:59.999Z]) | |
283 | {:ok, ~U[2023-01-01 23:59:59.999Z], 0} | |
284 | ||
285 | iex> Avy.FdsnSourceIdentifier.parse_date("2023-02-31T23:59:59.999") | |
286 | {:error, :invalid_date} | |
287 | ||
288 | """ | |
289 | @spec parse_date(DateTime.t() | String.t()) :: {:ok, DateTime.t(), integer} | {:error, atom()} | |
290 | def parse_date(%DateTime{} = d) do | |
291 | 1 | {:ok, d, 0} |
292 | end | |
293 | ||
294 | def parse_date(date_string) do | |
295 | 60 | re_date = ~r/^[0-9]{4}-[0-9]{2}-[0-9]{2}$/ |
296 | 60 | re_datetime = ~r/^[0-9]{4}-[0-9]{2}-[0-9]{2}T/ |
297 | ||
298 | 60 | cond do |
299 | Regex.match?(re_date, date_string) -> | |
300 | 49 | DateTime.from_iso8601(date_string <> "T00:00:00Z") |
301 | ||
302 | 11 | Regex.match?(re_datetime, date_string) -> |
303 | 8 | DateTime.from_iso8601(date_string <> "Z") |
304 | ||
305 | 3 | "currentutcday" == date_string -> |
306 | 1 | Logger.debug("Set time at midnight") |
307 | 1 | {:ok, at_midnight} = Date.utc_today() |> DateTime.new(~T[00:00:00]) |
308 | 1 | {:ok, at_midnight, 0} |
309 | ||
310 | 2 | true -> |
311 | 2 | DateTime.from_iso8601(date_string) |
312 | end | |
313 | end | |
314 | end |
Line | Hits | Source |
---|---|---|
0 | defmodule Avy.Mailer do | |
1 | use Swoosh.Mailer, otp_app: :avy | |
2 | end |
Line | Hits | Source |
---|---|---|
0 | defmodule Avy.Metadata.Channel do | |
1 | use Ecto.Schema | |
2 | import Ecto.Changeset | |
3 | ||
4 | @schema_prefix :metadata | |
5 | 2706 | schema "channels" do |
6 | field :band_code, :string | |
7 | field :instrument_code, :string | |
8 | field :orientation_code, :string | |
9 | field :location, :string | |
10 | field :starttime, :utc_datetime | |
11 | field :endtime, :utc_datetime | |
12 | field :restricted, :boolean, default: false | |
13 | field :samplerate, :decimal | |
14 | belongs_to :station, Avy.Metadata.Station | |
15 | has_many :contents, Avy.Data.Content | |
16 | ||
17 | timestamps(type: :utc_datetime) | |
18 | end | |
19 | ||
20 | @doc false | |
21 | def changeset(channel, attrs) do | |
22 | channel | |
23 | |> cast(attrs, [:code, :starttime, :endtime, :restricted, :samplerate]) | |
24 | 0 | |> validate_required([:code, :starttime, :endtime, :restricted, :samplerate]) |
25 | end | |
26 | ||
27 | def source_identifier(c) do | |
28 | 1 | c = Avy.Repo.preload(c, station: [:network]) |
29 | ||
30 | 1 | "#{c.station.network.code}_#{c.station.code}_#{c.location}_#{c.band_code}_#{c.instrument_code}_#{c.orientation_code}" |
31 | end | |
32 | end |
Line | Hits | Source |
---|---|---|
0 | defmodule Avy.Metadata.Network do | |
1 | use Ecto.Schema | |
2 | import Ecto.Changeset | |
3 | ||
4 | @schema_prefix :metadata | |
5 | 2023 | schema "networks" do |
6 | field :code, :string | |
7 | field :starttime, :utc_datetime | |
8 | field :endtime, :utc_datetime | |
9 | has_many :stations, Avy.Metadata.Station | |
10 | ||
11 | timestamps(type: :utc_datetime) | |
12 | end | |
13 | ||
14 | @doc false | |
15 | def changeset(network, attrs) do | |
16 | network | |
17 | |> cast(attrs, [:code, :starttime, :endtime]) | |
18 | 0 | |> validate_required([:code, :starttime, :endtime]) |
19 | end | |
20 | end |
Line | Hits | Source |
---|---|---|
0 | defmodule Avy.Metadata.Station do | |
1 | use Ecto.Schema | |
2 | import Ecto.Changeset | |
3 | ||
4 | @schema_prefix :metadata | |
5 | 2051 | schema "stations" do |
6 | field :code, :string | |
7 | field :starttime, :utc_datetime | |
8 | field :endtime, :utc_datetime | |
9 | field :restricted, :boolean, default: false | |
10 | belongs_to :network, Avy.Metadata.Network | |
11 | has_many :channels, Avy.Metadata.Channel | |
12 | ||
13 | timestamps(type: :utc_datetime) | |
14 | end | |
15 | ||
16 | @doc false | |
17 | def changeset(station, attrs) do | |
18 | station | |
19 | |> cast(attrs, [:code, :starttime, :endtime, :restricted]) | |
20 | 0 | |> validate_required([:code, :starttime, :endtime, :restricted]) |
21 | end | |
22 | end |
Line | Hits | Source |
---|---|---|
0 | defmodule Avy.Repo do | |
1 | use Ecto.Repo, | |
2 | otp_app: :avy, | |
3 | adapter: Ecto.Adapters.Postgres | |
4 | ||
5 | import Ecto.Query | |
6 | ||
7 | require Logger | |
8 | ||
9 | alias Avy.FdsnSourceIdentifier | |
10 | alias Avy.Metadata.{Channel, Station, Network} | |
11 | alias Avy.Data.Content | |
12 | ||
13 | ### La structure renvoyée: | |
14 | ### [ | |
15 | ### %Avy.Metadata.Network{ | |
16 | ### __meta__: #Ecto.Schema.Metadata<:loaded, :metadata, "networks">, | |
17 | ### id: 1, | |
18 | ### code: "FR", | |
19 | ### starttime: nil, | |
20 | ### endtime: nil, | |
21 | ### stations: [ | |
22 | ### %Avy.Metadata.Station{ | |
23 | ### __meta__: #Ecto.Schema.Metadata<:loaded, :metadata, "stations">, | |
24 | ### id: 1, | |
25 | ### code: "CIEL", | |
26 | ### starttime: nil, | |
27 | ### endtime: nil, | |
28 | ### restricted: false, | |
29 | ### network_id: 1, | |
30 | ### network: #Ecto.Association.NotLoaded<association :network is not loaded>, | |
31 | ### channels: [ | |
32 | ### %Avy.Metadata.Channel{ | |
33 | ### __meta__: #Ecto.Schema.Metadata<:loaded, :metadata, "channels">, | |
34 | ### id: 1, | |
35 | ### code: "HHN", | |
36 | ### starttime: ~U[2025-01-01 00:00:00Z], | |
37 | ### endtime: ~U[2025-01-02 00:00:01Z], | |
38 | ### restricted: false, | |
39 | ### samplerate: nil, | |
40 | ### station_id: 1, | |
41 | ### station: #Ecto.Association.NotLoaded<association :station is not loaded>, | |
42 | ### contents: [ | |
43 | ### %Avy.Data.Content{ | |
44 | ### __meta__: #Ecto.Schema.Metadata<:loaded, :data, "contents">, | |
45 | ### id: 1, | |
46 | ### source_identifier: "FDSN:FR_CIEL_00_H_H_N", | |
47 | ### starttime: ~U[2025-01-01 00:00:00Z], | |
48 | ### endtime: ~U[2025-01-02 00:00:01Z], | |
49 | ### quality: "4", | |
50 | ### time_ordered: true, | |
51 | ### timespans: [ | |
52 | ### %{ | |
53 | ### "end" => 1735776000910000000, | |
54 | ### "sample_rate" => 200, | |
55 | ### "start" => 1735689600385000000 | |
56 | ### } | |
57 | ### ], | |
58 | ### channel_id: 1, | |
59 | ### channel: #Ecto.Association.NotLoaded<association :channel is not loaded>, | |
60 | ### } | |
61 | ### ], | |
62 | ### } | |
63 | ### ], | |
64 | ### } | |
65 | ### ], | |
66 | ### } | |
67 | ### ] | |
68 | ||
69 | @spec get_contents(%FdsnSourceIdentifier{}, boolean) :: list(map) | |
70 | def get_contents(sid, include_restricted \\ true) do | |
71 | 17 | Logger.debug("Getting all channels for #{inspect(sid, pretty: true)}") |
72 | ||
73 | 17 | [band_code, instrument_code, orientation_code] = FdsnSourceIdentifier.split_channel(sid) |
74 | ||
75 | 17 | query = |
76 | 17 | from n in Network, |
77 | as: :net, | |
78 | join: s in Station, | |
79 | as: :sta, | |
80 | on: s.network_id == n.id, | |
81 | join: c in Channel, | |
82 | as: :cha, | |
83 | on: c.station_id == s.id | |
84 | ||
85 | 17 | query = |
86 | if include_restricted do | |
87 | 1 | query |
88 | else | |
89 | 16 | from([net: n, sta: s, cha: c] in query, |
90 | where: c.restricted != true | |
91 | ) | |
92 | end | |
93 | ||
94 | 17 | query = |
95 | 17 | from([net: n, sta: s, cha: c] in query, |
96 | 17 | where: like(n.code, ^fdsn_wildcard_to_sql(sid.net)) |
97 | ) | |
98 | ||
99 | 17 | query = |
100 | 17 | from([net: n, sta: s, cha: c] in query, |
101 | 17 | where: like(s.code, ^fdsn_wildcard_to_sql(sid.sta)) |
102 | ) | |
103 | ||
104 | 17 | query = |
105 | 17 | from([net: n, sta: s, cha: c] in query, |
106 | 17 | where: like(c.location, ^fdsn_wildcard_to_sql(sid.loc)), |
107 | where: like(c.band_code, ^fdsn_wildcard_to_sql(band_code)), | |
108 | where: like(c.instrument_code, ^fdsn_wildcard_to_sql(instrument_code)), | |
109 | where: like(c.orientation_code, ^fdsn_wildcard_to_sql(orientation_code)) | |
110 | ) | |
111 | ||
112 | 17 | query = from [net: n, sta: s, cha: c] in query, where: c.starttime <= ^sid.end |
113 | ||
114 | 17 | query = |
115 | 17 | from [net: n, sta: s, cha: c] in query, |
116 | 17 | where: c.endtime > ^sid.start or is_nil(c.endtime) |
117 | ||
118 | 17 | query = |
119 | 17 | from [net: n, sta: s, cha: c] in query, |
120 | join: content in Content, | |
121 | as: :content, | |
122 | on: content.channel_id == c.id, | |
123 | order_by: content.starttime, | |
124 | preload: [stations: {s, :channels}] | |
125 | ||
126 | 17 | selected_channels = Avy.Repo.all(query) |
127 | ||
128 | # Now we can load the contents and benefit from parallel queries | |
129 | 17 | Avy.Repo.preload(selected_channels, stations: [channels: :contents]) |
130 | end | |
131 | ||
132 | # | |
133 | # From FDSN wildcard spec to SQL wildcards | |
134 | @spec fdsn_wildcard_to_sql(String.t()) :: String.t() | |
135 | defp fdsn_wildcard_to_sql(s) do | |
136 | String.replace(s, "*", "%") | |
137 | 102 | |> String.replace("?", "_") |
138 | end | |
139 | end |
Line | Hits | Source |
---|---|---|
0 | defmodule AvyWeb do | |
1 | @moduledoc """ | |
2 | The entrypoint for defining your web interface, such | |
3 | as controllers, components, channels, and so on. | |
4 | ||
5 | This can be used in your application as: | |
6 | ||
7 | use AvyWeb, :controller | |
8 | use AvyWeb, :html | |
9 | ||
10 | The definitions below will be executed for every controller, | |
11 | component, etc, so keep them short and clean, focused | |
12 | on imports, uses and aliases. | |
13 | ||
14 | Do NOT define functions inside the quoted expressions | |
15 | below. Instead, define additional modules and import | |
16 | those modules here. | |
17 | """ | |
18 | ||
19 | 6 | def static_paths, |
20 | do: ~w(assets fonts images favicon.ico robots.txt application.wadl documentation.html) | |
21 | ||
22 | def router do | |
23 | quote do | |
24 | use Phoenix.Router, helpers: false | |
25 | ||
26 | # Import common connection and controller functions to use in pipelines | |
27 | import Plug.Conn | |
28 | import Phoenix.Controller | |
29 | end | |
30 | end | |
31 | ||
32 | def channel do | |
33 | quote do | |
34 | use Phoenix.Channel | |
35 | end | |
36 | end | |
37 | ||
38 | def controller do | |
39 | quote do | |
40 | use Phoenix.Controller, | |
41 | formats: [:text, :json, :geocsv, :request, :html], | |
42 | layouts: [html: AvyWeb.Layouts] | |
43 | ||
44 | import Plug.Conn | |
45 | ||
46 | unquote(verified_routes()) | |
47 | end | |
48 | end | |
49 | ||
50 | def verified_routes do | |
51 | quote do | |
52 | use Phoenix.VerifiedRoutes, | |
53 | endpoint: AvyWeb.Endpoint, | |
54 | router: AvyWeb.Router, | |
55 | statics: AvyWeb.static_paths() | |
56 | end | |
57 | end | |
58 | ||
59 | @doc """ | |
60 | When used, dispatch to the appropriate controller/live_view/etc. | |
61 | """ | |
62 | defmacro __using__(which) when is_atom(which) do | |
63 | 6 | apply(__MODULE__, which, []) |
64 | end | |
65 | end |
Line | Hits | Source |
---|---|---|
0 | defmodule AvyWeb.AvyGEOCSV do | |
1 | require Plug.Conn | |
2 | require Logger | |
3 | ||
4 | @extent_header "#dataset: GeoCSV 2.0 | |
5 | #delimiter: | | |
6 | #field_unit: unitless|unitless|unitless|unitless|unitless|hertz|ISO_8601|ISO_8601|ISO_8601|unitless|unitless | |
7 | #field_type: string|string|string|string|string|float|datetime|datetime|datetime|integer|string | |
8 | Network|Station|Location|Channel|Quality|SampleRate|Earliest|Latest|Updated|TimeSpans|Restriction" | |
9 | ||
10 | def extent(assigns) do | |
11 | [ | |
12 | format_header(@extent_header, assigns) | |
13 | 2 | | Enum.map(assigns.availabilities, fn d -> |
14 | 4 | format_datasource(d, assigns.fdsn_parameters, :extent) |
15 | end) | |
16 | ] | |
17 | 2 | |> Enum.join("\n") |
18 | end | |
19 | ||
20 | @query_header "#dataset: GeoCSV 2.0 | |
21 | #delimiter: | | |
22 | #field_unit: unitless|unitless|unitless|unitless|unitless|hertz|ISO_8601|ISO_8601|ISO_8601 | |
23 | #field_type: string|string|string|string|string|float|datetime|datetime|datetime | |
24 | Network|Station|Location|Channel|Quality|SampleRate|Earliest|Latest|Updated" | |
25 | def query(assigns) do | |
26 | [ | |
27 | format_header(@query_header, assigns) | |
28 | 1 | | Enum.map(assigns.availabilities, fn d -> |
29 | 4 | format_datasource(d, assigns.fdsn_parameters, :query) |
30 | end) | |
31 | ] | |
32 | 1 | |> Enum.join("\n") |
33 | end | |
34 | ||
35 | defp format_header(headers, %{fdsn_parameters: fdsn_params}) do | |
36 | 3 | headers = |
37 | 3 | if :samplerate in fdsn_params.merge do |
38 | String.replace(headers, "hertz|", "") | |
39 | |> String.replace("float|", "") | |
40 | 1 | |> String.replace("SampleRate|", "") |
41 | else | |
42 | 2 | headers |
43 | end | |
44 | ||
45 | 3 | headers = |
46 | 3 | if :quality in fdsn_params.merge do |
47 | String.replace(headers, "unitless|", "", global: false) | |
48 | |> String.replace("string|", "", global: false) | |
49 | 0 | |> String.replace("Quality|", "") |
50 | else | |
51 | 3 | headers |
52 | end | |
53 | ||
54 | 3 | if :latestupdate in fdsn_params.show do |
55 | 2 | headers |
56 | else | |
57 | String.replace(headers, "|ISO_8601", "") | |
58 | |> String.replace("|datetime", "") | |
59 | 1 | |> String.replace("|Updated", "") |
60 | end | |
61 | end | |
62 | ||
63 | # Network|Station|Location|Channel|Quality|SampleRate|Earliest|Latest|Updated|TimeSpans|Restriction | |
64 | defp format_datasource(d, fdsn_params, method) do | |
65 | 8 | attr_list = |
66 | 4 | if method == :extent do |
67 | 4 | [d.timespancount, d.restriction] |
68 | else | |
69 | [] | |
70 | end | |
71 | ||
72 | 8 | attr_list = |
73 | 8 | if :latestupdate in fdsn_params.show do |
74 | [ | |
75 | 4 | DateTime.to_iso8601(d.earliest), |
76 | 4 | DateTime.to_iso8601(d.latest), |
77 | 4 | DateTime.to_iso8601(d.updated) | attr_list |
78 | ] | |
79 | else | |
80 | [ | |
81 | 4 | DateTime.to_iso8601(d.earliest), |
82 | 4 | DateTime.to_iso8601(d.latest) |
83 | | attr_list | |
84 | ] | |
85 | end | |
86 | ||
87 | 8 | attr_list = |
88 | 8 | if :samplerate in fdsn_params.merge do |
89 | 2 | attr_list |
90 | else | |
91 | 6 | [d.samplerate | attr_list] |
92 | end | |
93 | ||
94 | 8 | attr_list = |
95 | 8 | if :quality in fdsn_params.merge do |
96 | 0 | attr_list |
97 | else | |
98 | 8 | [d.quality | attr_list] |
99 | end | |
100 | ||
101 | 8 | [d.network, d.station, d.location, d.channel | attr_list] |
102 | 8 | |> Enum.join("|") |
103 | end | |
104 | end |
Line | Hits | Source |
---|---|---|
0 | defmodule AvyWeb.AvyJSON do | |
1 | require Plug.Conn | |
2 | require Logger | |
3 | ||
4 | def version(_) do | |
5 | 0 | %{version: "avy v2025.024, commit #{System.get_env("GIT_COMMIT_SHA", "unspecified")}"} |
6 | end | |
7 | ||
8 | def extent(assigns) do | |
9 | 3 | %{ |
10 | created: DateTime.utc_now(), | |
11 | version: 1.0, | |
12 | 3 | datasources: assigns.availabilities |
13 | } | |
14 | end | |
15 | ||
16 | def query(assigns) do | |
17 | 2 | %{ |
18 | created: DateTime.utc_now(), | |
19 | version: 1.0, | |
20 | 2 | datasources: assigns.availabilities |
21 | } | |
22 | end | |
23 | end |
Line | Hits | Source |
---|---|---|
0 | defmodule AvyWeb.AvyREQUEST do | |
1 | require Plug.Conn | |
2 | require Logger | |
3 | ||
4 | @doc """ | |
5 | Request output format | |
6 | """ | |
7 | ||
8 | 1 | def extent(assigns), do: request(assigns) |
9 | 1 | def query(assigns), do: request(assigns) |
10 | ||
11 | defp request(assigns) do | |
12 | 2 | Enum.map(assigns.availabilities, fn d -> |
13 | 6 | format_datasource(d) |
14 | end) | |
15 | 2 | |> Enum.join("\n") |
16 | end | |
17 | ||
18 | defp format_datasource(d) do | |
19 | [ | |
20 | 6 | d.network, |
21 | 6 | d.station, |
22 | 6 | d.location, |
23 | 6 | d.channel, |
24 | 6 | DateTime.to_iso8601(d.earliest), |
25 | 6 | DateTime.to_iso8601(d.latest) |
26 | ] | |
27 | 6 | |> Enum.join(" ") |
28 | end | |
29 | end |
Line | Hits | Source |
---|---|---|
0 | defmodule AvyWeb.AvyTEXT do | |
1 | require Plug.Conn | |
2 | require Logger | |
3 | ||
4 | @extent_header "#Network Station Location Channel Quality SampleRate Earliest Latest Updated TimeSpans Restriction" | |
5 | @query_header "#Network Station Location Channel Quality SampleRate Earliest Latest Updated" | |
6 | ||
7 | @doc """ | |
8 | Text output | |
9 | """ | |
10 | ||
11 | def version(_) do | |
12 | 1 | "avy v2025.024, commit #{System.get_env("GIT_COMMIT_SHA", "unspecified")}" |
13 | end | |
14 | ||
15 | def extent(assigns) do | |
16 | 4 | headers = format_header(@extent_header, assigns) |
17 | ||
18 | [ | |
19 | headers | |
20 | 4 | | Enum.map(assigns.availabilities, fn d -> |
21 | 5 | format_datasource(d, assigns.fdsn_parameters, :extent) |
22 | end) | |
23 | ] | |
24 | 4 | |> Enum.join("\n") |
25 | end | |
26 | ||
27 | @query_header "#Network Station Location Channel Quality SampleRate Earliest Latest Updated" | |
28 | def query(assigns) do | |
29 | 2 | headers = format_header(@query_header, assigns) |
30 | ||
31 | [ | |
32 | headers | |
33 | 2 | | Enum.map(assigns.availabilities, fn d -> |
34 | 5 | format_datasource(d, assigns.fdsn_parameters, :query) |
35 | end) | |
36 | ] | |
37 | 2 | |> Enum.join("\n") |
38 | end | |
39 | ||
40 | defp format_header(headers, %{fdsn_parameters: fdsn_params} = _) do | |
41 | 6 | headers = |
42 | 6 | if :samplerate in fdsn_params.merge do |
43 | 2 | String.replace(headers, "SampleRate ", "") |
44 | else | |
45 | 4 | headers |
46 | end | |
47 | ||
48 | 6 | headers = |
49 | 6 | if :quality in fdsn_params.merge do |
50 | 2 | String.replace(headers, "Quality ", "") |
51 | else | |
52 | 4 | headers |
53 | end | |
54 | ||
55 | 6 | if :latestupdate in fdsn_params.show do |
56 | 5 | headers |
57 | else | |
58 | 1 | String.replace(headers, " Updated", "") |
59 | end | |
60 | end | |
61 | ||
62 | defp format_datasource(d, fdsn_params, method) do | |
63 | 10 | attr_list = |
64 | 5 | if method == :extent do |
65 | 5 | [d.timespancount, d.restriction] |
66 | else | |
67 | [] | |
68 | end | |
69 | ||
70 | 10 | attr_list = |
71 | 10 | if :latestupdate in fdsn_params.show do |
72 | [ | |
73 | 9 | DateTime.to_iso8601(d.earliest), |
74 | 9 | DateTime.to_iso8601(d.latest), |
75 | 9 | DateTime.to_iso8601(d.updated) | attr_list |
76 | ] | |
77 | else | |
78 | [ | |
79 | 1 | DateTime.to_iso8601(d.earliest), |
80 | 1 | DateTime.to_iso8601(d.latest) |
81 | | attr_list | |
82 | ] | |
83 | end | |
84 | ||
85 | 10 | attr_list = |
86 | 10 | if :samplerate in fdsn_params.merge do |
87 | 3 | attr_list |
88 | else | |
89 | 7 | [d.samplerate | attr_list] |
90 | end | |
91 | ||
92 | 10 | attr_list = |
93 | 10 | if :quality in fdsn_params.merge do |
94 | 2 | attr_list |
95 | else | |
96 | 8 | [d.quality | attr_list] |
97 | end | |
98 | ||
99 | 10 | [d.network, d.station, d.location, d.channel | attr_list] |
100 | 10 | |> Enum.join(" ") |
101 | end | |
102 | end |
Line | Hits | Source |
---|---|---|
0 | defmodule AvyWeb.AvyController do | |
1 | @moduledoc """ | |
2 | Manages a user request. | |
3 | """ | |
4 | alias Avy.Repo | |
5 | require Logger | |
6 | ||
7 | use AvyWeb, :controller | |
8 | ||
9 | plug :forbidden_params_extent, ["mergegaps"] when action in [:extent] | |
10 | plug :forbidden_params_extent, ["show"] when action in [:extent] | |
11 | plug :format | |
12 | ||
13 | def version(conn, _params) do | |
14 | 1 | render(conn, :version) |
15 | end | |
16 | ||
17 | def extent(conn, _params) do | |
18 | 10 | datasources = |
19 | 10 | Task.async_stream(conn.assigns.fdsn_parameters.source_identifiers, Repo, :get_contents, [ |
20 | 10 | conn.assigns.fdsn_parameters.includerestricted |
21 | ]) | |
22 | |> Enum.flat_map(fn {:ok, av} -> | |
23 | 10 | contents_to_datasources(av, :extent) |
24 | end) | |
25 | ||
26 | 10 | Logger.debug("Computed datasources: #{inspect(datasources, pretty: true)}") |
27 | ||
28 | 10 | if datasources == [] do |
29 | conn | |
30 | 0 | |> send_resp(conn.assigns.fdsn_parameters.nodata, "") |
31 | 0 | |> halt |
32 | else | |
33 | conn | |
34 | |> assign( | |
35 | :availabilities, | |
36 | 10 | merge_datasources(datasources, conn.assigns.fdsn_parameters.merge) |
37 | ) | |
38 | 10 | |> render(:extent) |
39 | end | |
40 | end | |
41 | ||
42 | def query(conn, _params) do | |
43 | 7 | datasources = |
44 | 7 | Task.async_stream(conn.assigns.fdsn_parameters.source_identifiers, Repo, :get_contents, [ |
45 | 7 | conn.assigns.fdsn_parameters.includerestricted |
46 | ]) | |
47 | |> Enum.flat_map(fn {:ok, av} -> | |
48 | 7 | contents_to_datasources(av, conn.assigns.fdsn_parameters.mergegaps * 10 ** 9, :query) |
49 | end) | |
50 | ||
51 | 7 | Logger.debug("Computed datasources: #{inspect(datasources, pretty: true)}") |
52 | ||
53 | 7 | if datasources == [] do |
54 | conn | |
55 | 1 | |> send_resp(conn.assigns.fdsn_parameters.nodata, "") |
56 | 1 | |> halt |
57 | else | |
58 | conn | |
59 | |> assign( | |
60 | :availabilities, | |
61 | 6 | merge_datasources(datasources, conn.assigns.fdsn_parameters.merge) |
62 | ) | |
63 | 6 | |> render(:query) |
64 | end | |
65 | end | |
66 | ||
67 | #### Private functions for query requests | |
68 | # | |
69 | # Convert the structure returned by the Repo in a usable list of datasources, fit for the views | |
70 | # | |
71 | defp contents_to_datasources(lst, mergegap, :query) do | |
72 | Enum.map(lst, fn n -> | |
73 | 6 | Enum.map(n.stations, fn s -> |
74 | 6 | Enum.map(s.channels, fn ch -> |
75 | 6 | Enum.group_by(ch.contents, & &1.quality) |
76 | 6 | |> Enum.map(fn {q, contents} -> |
77 | # Each content has many timespans | |
78 | # Create one map per timespan. | |
79 | # Make a flat list of all timespans | |
80 | 18 | Enum.flat_map(contents, & &1.timespans) |
81 | # Try to merge contiguous timespans | |
82 | |> merge_contiguous_timespans(mergegap) | |
83 | # Now create the structure as a list of availabilities | |
84 | 11 | |> Enum.map(fn x -> |
85 | 23 | %{ |
86 | 23 | network: n.code, |
87 | 23 | station: s.code, |
88 | 23 | location: ch.location, |
89 | 23 | channel: ch.band_code <> ch.instrument_code <> ch.orientation_code, |
90 | quality: q, | |
91 | 23 | samplerate: ch.samplerate, |
92 | earliest: DateTime.from_unix!(x["start"], :nanosecond), | |
93 | latest: DateTime.from_unix!(x["end"], :nanosecond), | |
94 | updated: | |
95 | 23 | Enum.max_by(ch.contents, & &1.updated_at, Date) |> Map.fetch!(:updated_at), |
96 | 23 | restriction: ch.restricted |
97 | } | |
98 | end) | |
99 | end) | |
100 | end) | |
101 | end) | |
102 | end) | |
103 | 7 | |> List.flatten() |
104 | end | |
105 | ||
106 | defp merge_contiguous_timespans(timespans, mergegap) do | |
107 | Enum.reduce( | |
108 | timespans, | |
109 | Enum.take(timespans, 1), | |
110 | fn timespan, acc -> | |
111 | 33 | [previous | tail] = acc |
112 | ||
113 | 33 | if timespan["start"] - previous["end"] < mergegap do |
114 | [%{"start" => previous["start"], "end" => timespan["end"]} | tail] | |
115 | else | |
116 | [timespan | acc] | |
117 | end | |
118 | end | |
119 | ) | |
120 | 11 | |> Enum.reverse() |
121 | end | |
122 | ||
123 | #### Private functions for extent requests | |
124 | ||
125 | # Plug function: In /extent action, mergegaps and show are not allowed | |
126 | defp forbidden_params_extent(conn, [opt]) do | |
127 | 22 | Logger.debug( |
128 | 0 | "Check for forbidden params for query whith #{inspect(conn.params, pretty: true)}" |
129 | ) | |
130 | ||
131 | 22 | case Map.fetch(conn.params, opt) do |
132 | {:ok, _} -> | |
133 | 1 | send_resp(conn, 400, "Options #{opt} is only supported in the /query method.") |
134 | 1 | |> halt |
135 | ||
136 | :error -> | |
137 | 21 | conn |
138 | end | |
139 | end | |
140 | ||
141 | # Convert the structure returned by the Repo in a usable list of datasources, fit for the views | |
142 | defp contents_to_datasources(lst, :extent) do | |
143 | Enum.map(lst, fn n -> | |
144 | 10 | Enum.map(n.stations, fn s -> |
145 | 10 | Enum.map(s.channels, fn ch -> |
146 | 10 | Enum.group_by(ch.contents, & &1.quality) |
147 | 10 | |> Enum.map(fn {q, contents} -> |
148 | 19 | %{ |
149 | 19 | network: n.code, |
150 | 19 | station: s.code, |
151 | 19 | location: ch.location, |
152 | 19 | channel: ch.band_code <> ch.instrument_code <> ch.orientation_code, |
153 | quality: q, | |
154 | 19 | samplerate: ch.samplerate, |
155 | 30 | earliest: Enum.min_by(contents, & &1.starttime, Date) |> Map.fetch!(:starttime), |
156 | 30 | latest: Enum.max_by(contents, & &1.endtime, Date) |> Map.fetch!(:endtime), |
157 | timespancount: | |
158 | Enum.reduce(contents, 0, fn content, acc -> | |
159 | 30 | acc + length(content.timespans) |
160 | end), | |
161 | 19 | updated: Enum.max_by(ch.contents, & &1.updated_at, Date) |> Map.fetch!(:updated_at), |
162 | 19 | restriction: ch.restricted |
163 | } | |
164 | end) | |
165 | end) | |
166 | end) | |
167 | end) | |
168 | 10 | |> List.flatten() |
169 | end | |
170 | ||
171 | @spec availabilities_to_map(list) :: map | |
172 | defp availabilities_to_map(avs) do | |
173 | 8 | %{ |
174 | 8 | network: List.first(avs).network, |
175 | 8 | station: List.first(avs).station, |
176 | 8 | location: List.first(avs).location, |
177 | 8 | channel: List.first(avs).channel, |
178 | 8 | quality: List.first(avs).quality, |
179 | 8 | samplerate: List.first(avs).samplerate, |
180 | 8 | restriction: List.first(avs).restriction, |
181 | 12 | earliest: Enum.min_by(avs, & &1.earliest, Date) |> Map.fetch!(:earliest), |
182 | 12 | latest: Enum.max_by(avs, & &1.latest, Date) |> Map.fetch!(:latest), |
183 | timespancount: | |
184 | Enum.reduce(avs, 0, fn av, acc -> | |
185 | 12 | acc + av.timespancount |
186 | end), | |
187 | 12 | updated: Enum.max_by(avs, & &1.updated, Date) |> Map.fetch!(:updated) |
188 | } | |
189 | end | |
190 | ||
191 | @spec merge_datasources(list, list) :: list | |
192 | 10 | defp merge_datasources(availabilities, []), do: availabilities |
193 | ||
194 | defp merge_datasources(availabilities, merge) do | |
195 | 6 | Logger.info( |
196 | "Merging datasources #{inspect(availabilities, pretty: true)} on #{inspect(merge)}" | |
197 | ) | |
198 | ||
199 | 6 | group_by_keys = |
200 | cond do | |
201 | 6 | :quality in merge and :samplerate in merge -> |
202 | 2 | fn a -> |
203 | 4 | "#{a.network}_#{a.station}_#{a.location}_#{a.channel}_#{a.restriction}" |
204 | end | |
205 | ||
206 | 4 | :samplerate in merge -> |
207 | 2 | fn a -> |
208 | 4 | "#{a.network}_#{a.station}_#{a.location}_#{a.channel}_#{a.restriction}_#{a.quality}" |
209 | end | |
210 | ||
211 | 2 | :quality in merge -> |
212 | 2 | fn a -> |
213 | 4 | "#{a.network}_#{a.station}_#{a.location}_#{a.channel}_#{a.restriction}_#{a.samplerate}" |
214 | end | |
215 | end | |
216 | ||
217 | availabilities | |
218 | |> Enum.group_by(group_by_keys) | |
219 | 6 | |> Enum.map(fn {_, avs} -> availabilities_to_map(avs) |> Map.drop(merge) end) |
220 | end | |
221 | ||
222 | # The API uses the "format" parameter on order to set the output format. | |
223 | # Translate this to the _format param for Phoenix magic to take place. | |
224 | # Also force the response content type header to text | |
225 | defp format(conn, _) do | |
226 | 18 | conn = |
227 | 18 | case Map.fetch(conn.params, "format") do |
228 | 14 | {:ok, format} -> Phoenix.Controller.put_format(conn, format) |
229 | 4 | _ -> Phoenix.Controller.put_format(conn, "text") |
230 | end | |
231 | ||
232 | 18 | conn = |
233 | 18 | case Map.fetch(conn.params, "format") do |
234 | 5 | {:ok, "json"} -> Plug.Conn.put_resp_content_type(conn, "application/json") |
235 | 3 | {:ok, "geocsv"} -> Plug.Conn.put_resp_content_type(conn, "text/csv") |
236 | 10 | _ -> Plug.Conn.put_resp_content_type(conn, "text/plain") |
237 | end | |
238 | ||
239 | 18 | Logger.debug(inspect(conn.params)) |
240 | 18 | conn |
241 | end | |
242 | end |
Line | Hits | Source |
---|---|---|
0 | defmodule AvyWeb.ErrorJSON do | |
1 | @moduledoc """ | |
2 | This module is invoked by your endpoint in case of errors on JSON requests. | |
3 | ||
4 | See config/config.exs. | |
5 | """ | |
6 | ||
7 | # If you want to customize a particular status code, | |
8 | # you may add your own clauses, such as: | |
9 | # | |
10 | # def render("500.json", _assigns) do | |
11 | # %{errors: %{detail: "Internal Server Error"}} | |
12 | # end | |
13 | ||
14 | # By default, Phoenix returns the status message from | |
15 | # the template name. For example, "404.json" becomes | |
16 | # "Not Found". | |
17 | def render(template, _assigns) do | |
18 | 2 | %{errors: %{detail: Phoenix.Controller.status_message_from_template(template)}} |
19 | end | |
20 | end |
Line | Hits | Source |
---|---|---|
0 | defmodule AvyWeb.Endpoint do | |
1 | use Phoenix.Endpoint, otp_app: :avy | |
2 | ||
3 | # The session will be stored in the cookie and signed, | |
4 | # this means its contents can be read but not tampered with. | |
5 | # Set :encryption_salt if you would also like to encrypt it. | |
6 | @session_options [ | |
7 | store: :cookie, | |
8 | key: "_avy_key", | |
9 | signing_salt: "xyXumY7h", | |
10 | same_site: "Lax" | |
11 | ] | |
12 | ||
13 | socket "/live", Phoenix.LiveView.Socket, | |
14 | websocket: [connect_info: [session: @session_options]], | |
15 | longpoll: [connect_info: [session: @session_options]] | |
16 | ||
17 | # Serve at "/" the static files from "priv/static" directory. | |
18 | # | |
19 | # You should set gzip to true if you are running phx.digest | |
20 | # when deploying your static files in production. | |
21 | plug Plug.Static, | |
22 | at: "/", | |
23 | from: :avy, | |
24 | gzip: true, | |
25 | content_types: %{"application.wadl" => "text/xml"}, | |
26 | only: AvyWeb.static_paths() | |
27 | ||
28 | # Answer to /_health request | |
29 | plug AvyWeb.Plug.HealthCheck | |
30 | ||
31 | # Code reloading can be explicitly enabled under the | |
32 | # :code_reloader configuration of your endpoint. | |
33 | if code_reloading? do | |
34 | plug Phoenix.CodeReloader | |
35 | plug Phoenix.Ecto.CheckRepoStatus, otp_app: :avy | |
36 | end | |
37 | ||
38 | plug Phoenix.LiveDashboard.RequestLogger, | |
39 | param_key: "request_logger", | |
40 | cookie_key: "request_logger" | |
41 | ||
42 | plug Plug.RequestId | |
43 | plug Plug.Telemetry, event_prefix: [:phoenix, :endpoint] | |
44 | ||
45 | plug Plug.Parsers, | |
46 | parsers: [:urlencoded, :multipart, :json], | |
47 | pass: ["*/*"], | |
48 | json_decoder: Phoenix.json_library() | |
49 | ||
50 | plug Plug.MethodOverride | |
51 | plug Plug.Head | |
52 | plug Plug.Session, @session_options | |
53 | plug AvyWeb.Plugs.RedirectDoc | |
54 | plug AvyWeb.Router | |
55 | end |
Line | Hits | Source |
---|---|---|
0 | require Logger | |
1 | ||
2 | defmodule AvyWeb.Plugs.BodyParser.ParseError do | |
3 | defexception message: "Malformed body" | |
4 | end | |
5 | ||
6 | defmodule AvyWeb.Plugs.BodyParser do | |
7 | @moduledoc """ | |
8 | This modules parses the body of a HTTP request to generate a %Avy.FdsnRequestParams{} structure. | |
9 | ||
10 | The max number of lines in a post request can be parametrised using the environment variable AVY_POST_MAX_LINES. | |
11 | """ | |
12 | alias Avy.{FdsnRequestParams, FdsnSourceIdentifier} | |
13 | @behaviour Plug.Parsers | |
14 | ||
15 | 28 | def init(opts), do: opts |
16 | ||
17 | @doc """ | |
18 | Parse the body of the request in order to return a %Avy.FdsnRequestParams{} | |
19 | This function implements the behavior of Plug.Parsers. | |
20 | Note: The parser will halt as soon as a line is not parseable. | |
21 | """ | |
22 | def parse(%Plug.Conn{method: "POST"} = conn, _, _, _, _) do | |
23 | 5 | max_lines = Application.get_env(:avy, :post_max_lines) |
24 | 5 | {:ok, body, conn} = Plug.Conn.read_body(conn, []) |
25 | 5 | body_list = String.split(body, "\n", trim: true) |
26 | 5 | Logger.debug("Parsing body #{body}") |
27 | ||
28 | 5 | if length(body_list) > max_lines do |
29 | 0 | Logger.warning("Body has more than #{max_lines} lines. (#{length(body_list)})") |
30 | 0 | {:error, :too_large, conn} |
31 | else | |
32 | 5 | case Enum.reduce_while(body_list, %FdsnRequestParams{}, &parse_line/2) do |
33 | {:error, m} -> | |
34 | 1 | Logger.warning(m) |
35 | # Hack qui est OK | |
36 | 1 | {:ok, %FdsnRequestParams{}, conn |> Plug.Conn.send_resp(400, m) |> Plug.Conn.halt()} |
37 | ||
38 | datareq -> | |
39 | 4 | Logger.debug("Body parsed: #{inspect(datareq)}") |
40 | 4 | {:ok, datareq, Plug.Conn.assign(conn, :fdsn_parameters, datareq)} |
41 | end | |
42 | end | |
43 | end | |
44 | ||
45 | # Parse a body line and add the result to a %Avy.FdsnRequestParams{} | |
46 | defp parse_line(line, datareq) do | |
47 | 6 | Logger.debug("Parsing line #{line}") |
48 | 6 | param_line = ~r/^(?<key>.*)=(?<val>.*)$/ |
49 | ||
50 | 6 | case Regex.named_captures(param_line, line) do |
51 | nil -> | |
52 | # This should be a stream description line. Parse it and add the result to datareq | |
53 | 4 | case parse_stream(line) do |
54 | 1 | {:error, msg} -> |
55 | {:halt, {:error, msg}} | |
56 | ||
57 | {:ok, streams} -> | |
58 | 3 | Logger.debug("Adding #{inspect(streams)} to the source identifiers") |
59 | ||
60 | 3 | req = %{ |
61 | datareq | |
62 | 3 | | source_identifiers: List.flatten([streams | datareq.source_identifiers]) |
63 | } | |
64 | ||
65 | 3 | Logger.debug("Request is now: #{inspect(req)}") |
66 | {:cont, req} | |
67 | end | |
68 | ||
69 | %{"key" => k, "val" => v} -> | |
70 | 2 | req = Map.replace(datareq, String.to_atom(k), v) |
71 | {:cont, req} | |
72 | end | |
73 | end | |
74 | ||
75 | @spec parse_stream(String.t()) :: {:ok, list(FdsnSourceIdentifier.t())} | {:error, String.t()} | |
76 | defp parse_stream(line) do | |
77 | 4 | case String.trim(line) |> String.split(~r/[[:space:]]+/) do |
78 | 4 | [n, s, l, c, startt, endt] -> FdsnSourceIdentifier.build_list(n, s, l, c, startt, endt) |
79 | 0 | _ -> {:error, "Malformed line in body: #{inspect(line)}"} |
80 | end | |
81 | end | |
82 | end |
Line | Hits | Source |
---|---|---|
0 | defmodule AvyWeb.Plugs.FdsnParameters do | |
1 | @moduledoc """ | |
2 | This is a module Plug that will check all parameters of the get request against FDSN specification. | |
3 | It will add in the Plug.Conn a new attribute :fdsn_parameters of type FdsnRequestParams that is expected by FdsnController | |
4 | """ | |
5 | ||
6 | import Plug.Conn | |
7 | alias Avy.FdsnSourceIdentifier | |
8 | require Logger | |
9 | ||
10 | 23 | def init(default), do: default |
11 | ||
12 | # This should only be called on GET requests | |
13 | # For POST request, look at the AvyWeb.Plugs.BodyParser | |
14 | 2 | def call(%Plug.Conn{method: "POST"} = conn, _opts), do: conn |
15 | ||
16 | def call(%Plug.Conn{method: "GET"} = conn, _opts) do | |
17 | 21 | short_p = shorten_params(conn.params) |
18 | 21 | start_p = Map.get(short_p, :start, "1900-01-01") |
19 | 21 | end_p = Map.get(short_p, :end, "2150-01-31") |
20 | 21 | net = Map.get(short_p, :net, "*") |
21 | 21 | sta = Map.get(short_p, :sta, "*") |
22 | 21 | loc = Map.get(short_p, :loc, "*") |
23 | 21 | cha = Map.get(short_p, :cha, "*") |
24 | 21 | fdsn_p = struct(Avy.FdsnRequestParams, shorten_params(conn.params)) |
25 | ||
26 | 21 | case FdsnSourceIdentifier.build_list(net, sta, loc, cha, start_p, end_p) do |
27 | {:error, msg} -> | |
28 | 1 | conn |> send_resp(400, msg) |> halt |
29 | ||
30 | {:ok, sids} -> | |
31 | 20 | fdsn_p = Map.replace!(fdsn_p, :source_identifiers, sids) |
32 | 20 | Logger.info("FDSN request: #{inspect(fdsn_p)}") |
33 | 20 | assign(conn, :fdsn_parameters, fdsn_p) |
34 | end | |
35 | end | |
36 | ||
37 | @spec shorten_params(map) :: map | |
38 | defp shorten_params(params) do | |
39 | 42 | short_keys = %{ |
40 | starttime: :start, | |
41 | endtime: :end, | |
42 | network: :net, | |
43 | station: :sta, | |
44 | location: :loc, | |
45 | channel: :cha | |
46 | } | |
47 | ||
48 | 42 | Logger.debug(params) |
49 | ||
50 | params | |
51 | 134 | |> Enum.into(Map.new(), fn {k, v} -> {String.to_atom(k), v} end) |
52 | 42 | |> Enum.reduce(%{}, fn {k, v}, acc -> |
53 | 134 | normalized_key = Map.get(short_keys, k, k) |
54 | 134 | Map.put(acc, normalized_key, v) |
55 | end) | |
56 | end | |
57 | end |
Line | Hits | Source |
---|---|---|
0 | defmodule AvyWeb.Plug.HealthCheck do | |
1 | import Plug.Conn | |
2 | ||
3 | 29 | def init(opts), do: opts |
4 | ||
5 | # If the request path matches "/_health", we return a 200 response. | |
6 | def call(conn = %Plug.Conn{request_path: "/_health"}, _opts) do | |
7 | conn | |
8 | |> send_resp(200, "") | |
9 | 1 | |> halt() |
10 | end | |
11 | ||
12 | # If the request path is anything else, we pass the connection along. | |
13 | 28 | def call(conn, _opts), do: conn |
14 | end |
Line | Hits | Source |
---|---|---|
0 | defmodule AvyWeb.Plugs.NonEmptyRequest do | |
1 | @moduledoc """ | |
2 | This plug checks if the request is empty. | |
3 | This depends on the HTTP method. | |
4 | When GET is used, check that the conn.params has at least one element. | |
5 | When POST is used, the assigns is already in :fdsn_parameters. Check it has at least one element. | |
6 | """ | |
7 | import Plug.Conn | |
8 | require Logger | |
9 | ||
10 | 25 | def init(opts), do: opts |
11 | ||
12 | def call(%Plug.Conn{method: "GET"} = conn, _) when map_size(conn.params) == 0 do | |
13 | 2 | conn |> send_resp(400, "Empty request") |> halt() |
14 | end | |
15 | ||
16 | 22 | def call(%Plug.Conn{method: "GET"} = conn, _), do: conn |
17 | ||
18 | def call(%Plug.Conn{method: "POST"} = conn, _) do | |
19 | 3 | if not Map.has_key?(conn.assigns, :fdsn_parameters) do |
20 | 1 | Logger.info("No key fdsn_parameters in conn assigns #{inspect(conn)}") |
21 | 1 | conn |> send_resp(400, "Empty request") |> halt() |
22 | else | |
23 | 2 | conn |
24 | end | |
25 | end | |
26 | end |
Line | Hits | Source |
---|---|---|
0 | defmodule AvyWeb.Plugs.ConsistentSourceIdentifier do | |
1 | import Plug.Conn | |
2 | require Logger | |
3 | ||
4 | 20 | def init(opts), do: opts |
5 | # At least one source identifier | |
6 | # No error in them | |
7 | ||
8 | @spec call(Plug.Conn.t(), list) :: Plug.Conn.t() | |
9 | def call(%Plug.Conn{} = conn, _) do | |
10 | conn | |
11 | |> non_empty() | |
12 | 20 | |> consistent_start_end() |
13 | end | |
14 | ||
15 | defp non_empty(%Plug.Conn{} = conn) do | |
16 | 20 | Logger.debug("Valid source identifiers in #{inspect(conn.assigns[:fdsn_parameters])}") |
17 | ||
18 | 20 | case Map.fetch(conn.assigns[:fdsn_parameters], :source_identifiers) do |
19 | :error -> | |
20 | conn | |
21 | |> send_resp(400, "You must provide at least one valid source identifier (got nothing)") | |
22 | 0 | |> halt |
23 | ||
24 | {:ok, []} -> | |
25 | conn | |
26 | |> send_resp(400, "You must provide at least one valid source identifier (none found)") | |
27 | 1 | |> halt |
28 | ||
29 | {:ok, sids} -> | |
30 | 19 | all_errors = |
31 | Enum.reduce(sids, [], fn sid, acc -> | |
32 | 19 | case sid do |
33 | 0 | {:error, m} -> [m | acc] |
34 | 19 | _ -> acc |
35 | end | |
36 | end) | |
37 | ||
38 | 19 | if length(all_errors) > 0 do |
39 | conn | |
40 | 0 | |> send_resp(400, Enum.reduce(all_errors, "", fn e, acc -> acc <> "\n" <> e end)) |
41 | 0 | |> halt |
42 | else | |
43 | 19 | conn |
44 | end | |
45 | end | |
46 | end | |
47 | ||
48 | defp consistent_start_end(conn) do | |
49 | 20 | case Map.fetch(conn.assigns, :fdsn_parameters) do |
50 | {:ok, %Avy.FdsnRequestParams{source_identifiers: sid}} -> | |
51 | 20 | Logger.debug(inspect(sid)) |
52 | ||
53 | 20 | if Enum.all?(sid, fn s -> DateTime.after?(s.end, s.start) end) do |
54 | 19 | conn |
55 | else | |
56 | 1 | conn |> send_resp(400, "endtime is set before starttime in one of the queries") |> halt |
57 | end | |
58 | ||
59 | :error -> | |
60 | 0 | Logger.debug("No key fdsn_params in conn. #{inspect(conn)}") |
61 | ||
62 | conn | |
63 | |> send_resp( | |
64 | 500, | |
65 | "Something is wrong in the code. This plug has to be called after FdsnRequestParams" | |
66 | ) | |
67 | 0 | |> halt |
68 | end | |
69 | end | |
70 | end |
Line | Hits | Source |
---|---|---|
0 | defmodule AvyWeb.Plugs.OnlyValidParams do | |
1 | @moduledoc """ | |
2 | This plug will make sure that the request does only present valid parsable parameters. | |
3 | """ | |
4 | import Plug.Conn | |
5 | require Logger | |
6 | ||
7 | @valid_parameters [ | |
8 | "network", | |
9 | "net", | |
10 | "station", | |
11 | "sta", | |
12 | "localtion", | |
13 | "loc", | |
14 | "channel", | |
15 | "cha", | |
16 | "starttime", | |
17 | "start", | |
18 | "endtime", | |
19 | "end", | |
20 | "quality", | |
21 | "merge", | |
22 | "orderby", | |
23 | "limit", | |
24 | "includerestricted", | |
25 | "format", | |
26 | "_format", | |
27 | "nodata", | |
28 | "mergegaps", | |
29 | "show" | |
30 | ] | |
31 | ||
32 | 27 | def init(opts), do: opts |
33 | ||
34 | 2 | def call(%Plug.Conn{method: "POST"} = conn, _), do: conn |
35 | ||
36 | def call(%Plug.Conn{method: "GET"} = conn, _) do | |
37 | 25 | Logger.debug("Checking if all parameters are valid") |
38 | ||
39 | 25 | case all_valid?(conn.params) do |
40 | :ok -> | |
41 | 23 | conn |
42 | ||
43 | {:error, msg} -> | |
44 | 2 | Logger.info("User request has invalid parameter: #{msg}") |
45 | 2 | conn |> send_resp(400, msg) |> halt |
46 | end | |
47 | end | |
48 | ||
49 | defp all_valid?(params) do | |
50 | 25 | Logger.debug("All valid ? #{inspect(params)}") |
51 | ||
52 | 25 | Enum.reduce_while(params, :ok, fn {k, v}, _ -> |
53 | 74 | if is_valid?(k) do |
54 | {:cont, :ok} | |
55 | else | |
56 | 2 | Logger.debug("#{k} Not valid") |
57 | 2 | {:halt, {:error, "invalid parameter '#{k}=#{v}'"}} |
58 | end | |
59 | end) | |
60 | end | |
61 | ||
62 | defp is_valid?(p) do | |
63 | 74 | Logger.debug("Checking if #{inspect(p)} is valid") |
64 | 74 | Enum.find(@valid_parameters, fn v -> v == p end) != nil |
65 | end | |
66 | end |
Line | Hits | Source |
---|---|---|
0 | defmodule AvyWeb.Plugs.ParseParameters do | |
1 | @moduledoc """ | |
2 | Plug module that parses all the parameters and validates the values. | |
3 | """ | |
4 | use Plug.Builder | |
5 | require Logger | |
6 | ||
7 | @nodata_values [204, 404] | |
8 | @quality_values [:M, :Q, :D, :R] | |
9 | @merge_values [:samplerate, :quality, :overlap, nil] | |
10 | @format_values [:text, :geocsv, :json, :request] | |
11 | @orderby_values [ | |
12 | :nslc_time_quality_samplerate, | |
13 | :latestupdate, | |
14 | :latestupdate_desc, | |
15 | :timespancount, | |
16 | :timespancount_desc | |
17 | ] | |
18 | @show_values [:latestupdate] | |
19 | ||
20 | plug :cast_numeric_param, [:mergegaps, "Elixir.Float"] | |
21 | plug :cast_numeric_param, [:limit, "Elixir.Integer"] | |
22 | plug :cast_numeric_param, [:nodata, "Elixir.Integer"] | |
23 | plug :validate_param_list, [:nodata, @nodata_values] | |
24 | plug :validate_param_list, [:format, @format_values] | |
25 | plug :validate_multiparams_list, [:merge, @merge_values] | |
26 | plug :validate_param_list, [:orderby, @orderby_values] | |
27 | plug :validate_multiparams_list, [:show, @show_values] | |
28 | plug :validate_multiparams_list, [:quality, @quality_values] | |
29 | plug :validate_boolean, :includerestricted | |
30 | plug :set_default_show_for_extent | |
31 | ||
32 | # Weirdness of the specification : it is not allowed to use show option for extent, | |
33 | # but the behaviour is always to set the latest_update | |
34 | # Fix this by poutting the value of show when method is extent | |
35 | defp set_default_show_for_extent(conn, _) do | |
36 | 20 | assign( |
37 | conn, | |
38 | :fdsn_parameters, | |
39 | 20 | if conn.request_path == "/extent" do |
40 | 13 | %{conn.assigns.fdsn_parameters | show: [:latestupdate]} |
41 | else | |
42 | 7 | conn.assigns.fdsn_parameters |
43 | end | |
44 | ) | |
45 | end | |
46 | ||
47 | defp validate_boolean(conn, p) do | |
48 | 20 | params = conn.assigns[:fdsn_parameters] |
49 | ||
50 | 20 | case Map.fetch!(params, p) do |
51 | 0 | "FALSE" -> assign(conn, :fdsn_parameters, Map.replace!(params, p, false)) |
52 | 1 | "TRUE" -> assign(conn, :fdsn_parameters, Map.replace!(params, p, true)) |
53 | 0 | true -> conn |
54 | 19 | false -> conn |
55 | 0 | x -> conn |> send_resp(400, "#{p} should be TRUE or FALSE, not #{x}") |> halt |
56 | end | |
57 | end | |
58 | ||
59 | defp validate_multiparams_list(conn, [p, values]) do | |
60 | 61 | params = conn.assigns[:fdsn_parameters] |
61 | 61 | pp = Map.fetch!(params, p) |> make_atom_list() |
62 | ||
63 | 61 | Logger.debug("Test if #{inspect(pp)} is a sublist of #{inspect(values)}") |
64 | ||
65 | 61 | if Enum.all?(pp, fn p -> p in values end) do |
66 | 60 | params = Map.replace!(params, p, pp) |
67 | 60 | assign(conn, :fdsn_parameters, params) |
68 | else | |
69 | conn | |
70 | 1 | |> send_resp(400, "#{p} must be in #{Enum.join(values, ", ")}") |
71 | 1 | |> halt |
72 | end | |
73 | end | |
74 | ||
75 | defp make_atom_list(p) when is_atom(p) do | |
76 | p | |
77 | |> Atom.to_string() | |
78 | |> String.split(",") | |
79 | 0 | |> Enum.map(&String.to_atom/1) |
80 | end | |
81 | ||
82 | defp make_atom_list(p) when is_binary(p) do | |
83 | p | |
84 | |> String.split(",") | |
85 | 9 | |> Enum.map(&String.to_atom/1) |
86 | end | |
87 | ||
88 | defp make_atom_list(p) when is_list(p) do | |
89 | 52 | if Enum.all?(p, &is_binary/1) do |
90 | 32 | p |> Enum.map(&String.to_atom/1) |
91 | else | |
92 | 20 | p |
93 | end | |
94 | end | |
95 | ||
96 | # Transform binary string in atom and check validity | |
97 | defp validate_param_list(conn, [p, values]) do | |
98 | 65 | Logger.debug("Validating #{p} against #{inspect(values)}") |
99 | 65 | params = conn.assigns[:fdsn_parameters] |
100 | 65 | value = Map.fetch!(params, p) |
101 | 65 | Logger.debug("Value #{p} = #{value}") |
102 | ||
103 | 65 | value = |
104 | cond do | |
105 | 27 | is_atom(value) -> value |
106 | 38 | is_binary(value) -> String.to_atom(value) |
107 | 23 | true -> value |
108 | end | |
109 | ||
110 | 65 | if value in values do |
111 | 63 | params = Map.replace!(params, p, value) |
112 | 63 | assign(conn, :fdsn_parameters, params) |
113 | else | |
114 | 2 | Logger.debug("#{p} has invalid value #{value}") |
115 | ||
116 | conn | |
117 | 2 | |> send_resp(400, "#{p} must be in #{Enum.join(values, ", ")}") |
118 | 2 | |> halt |
119 | end | |
120 | end | |
121 | ||
122 | # Try to cast all the numerical parameters. | |
123 | # Also validate the values of textual parameters | |
124 | defp cast_numeric_param(conn, [p, type]) do | |
125 | 73 | Logger.debug("Casting #{p} to #{type}") |
126 | 73 | params = conn.assigns[:fdsn_parameters] |
127 | 73 | pp = Map.fetch!(params, p) |
128 | ||
129 | 73 | if is_number(pp) do |
130 | 66 | conn |
131 | else | |
132 | 7 | case apply(String.to_existing_atom(type), :parse, [pp]) do |
133 | :error -> | |
134 | 2 | conn |> send_resp(400, "#{p} should be a float (ie. 1.2)") |> halt |
135 | ||
136 | {x, _} -> | |
137 | 5 | params = Map.replace(params, p, x) |
138 | 5 | assign(conn, :fdsn_parameters, params) |
139 | end | |
140 | end | |
141 | end | |
142 | end |
Line | Hits | Source |
---|---|---|
0 | defmodule AvyWeb.Plugs.RedirectDoc do | |
1 | @moduledoc """ | |
2 | Redirects / to /documentation.html | |
3 | """ | |
4 | ||
5 | import Plug.Conn | |
6 | import Phoenix.Controller | |
7 | ||
8 | @redirect_to "/documentation.html" | |
9 | @redirect_from "/" | |
10 | ||
11 | 28 | def init(default), do: default |
12 | ||
13 | def call(%Plug.Conn{request_path: path} = conn, _) do | |
14 | 28 | if path == @redirect_from do |
15 | conn | |
16 | |> redirect(to: @redirect_to) | |
17 | 1 | |> halt |
18 | else | |
19 | 27 | conn |
20 | end | |
21 | end | |
22 | end |
Line | Hits | Source |
---|---|---|
0 | defmodule AvyWeb.Router do | |
1 | use AvyWeb, :router | |
2 | ||
3 | 26 | pipeline :fdsn do |
4 | plug :accepts, ["json", "text"] | |
5 | ||
6 | plug Plug.Parsers, | |
7 | parsers: [AvyWeb.Plugs.BodyParser], | |
8 | pass: ["application/txt"] | |
9 | ||
10 | plug AvyWeb.Plugs.OnlyValidParams | |
11 | plug AvyWeb.Plugs.NonEmptyRequest | |
12 | plug AvyWeb.Plugs.FdsnParameters | |
13 | plug AvyWeb.Plugs.ParseParameters | |
14 | plug AvyWeb.Plugs.ConsistentSourceIdentifier | |
15 | end | |
16 | ||
17 | 1 | get "/version", AvyWeb.AvyController, :version |
18 | ||
19 | scope "/", AvyWeb do | |
20 | pipe_through :fdsn | |
21 | 16 | get "/extent", AvyController, :extent |
22 | 3 | post "/extent", AvyController, :extent |
23 | 7 | get "/query", AvyController, :query |
24 | 0 | post "/query", AvyController, :query |
25 | end | |
26 | ||
27 | # Enable LiveDashboard and Swoosh mailbox preview in development | |
28 | if Application.compile_env(:avy, :dev_routes) do | |
29 | # If you want to use the LiveDashboard in production, you should put | |
30 | # it behind authentication and allow only admins to access it. | |
31 | # If your application does not have an admins-only section yet, | |
32 | # you can use Plug.BasicAuth to set up some basic authentication | |
33 | # as long as you are also using SSL (which you should anyway). | |
34 | import Phoenix.LiveDashboard.Router | |
35 | ||
36 | scope "/dev" do | |
37 | pipe_through [:fetch_session, :protect_from_forgery] | |
38 | ||
39 | live_dashboard "/dashboard", metrics: AvyWeb.Telemetry | |
40 | forward "/mailbox", Plug.Swoosh.MailboxPreview | |
41 | end | |
42 | end | |
43 | end |