| Line | Hits | Source |
|---|---|---|
| 0 | defmodule Avy do | |
| 1 | @moduledoc """ | |
| 2 | Avy keeps the contexts that define your domain | |
| 3 | and business logic. | |
| 4 | ||
| 5 | Contexts are also responsible for managing your data, regardless | |
| 6 | if it comes from the database, an external API or others. | |
| 7 | """ | |
| 8 | end |
| Line | Hits | Source |
|---|---|---|
| 0 | defmodule Avy.ApiSpec do | |
| 1 | alias OpenApiSpex.Parameter | |
| 2 | alias OpenApiSpex.RequestBody | |
| 3 | ||
| 4 | alias OpenApiSpex.{ | |
| 5 | Components, | |
| 6 | Info, | |
| 7 | OpenApi, | |
| 8 | Paths, | |
| 9 | Server, | |
| 10 | Response, | |
| 11 | MediaType, | |
| 12 | Schema, | |
| 13 | RequestBody, | |
| 14 | Example, | |
| 15 | Reference | |
| 16 | } | |
| 17 | ||
| 18 | alias AvyWeb.{Endpoint, Router} | |
| 19 | @behaviour OpenApi | |
| 20 | ||
| 21 | # References are not supported for RequestBody. Let's define it here | |
| 22 | # https://github.com/open-api-spex/open_api_spex/issues/692 | |
| 23 | @request_body_spec %RequestBody{ | |
| 24 | description: """ | |
| 25 | all parameters must be submitted as part of the POST body. The quality, | |
| 26 | minimumlength and longestonly parameters should be specified as key=value pairs on separate lines | |
| 27 | and the simple-time and channel constraints parameters repeated as many times as necessary following | |
| 28 | this pattern: | |
| 29 | ```` | |
| 30 | quality=M | |
| 31 | NET STA LOC CHA STARTTIME ENDTIME | |
| 32 | NET STA LOC CHA STARTTIME ENDTIME | |
| 33 | NET STA LOC CHA STARTTIME ENDTIME | |
| 34 | ```` | |
| 35 | All rules for parameters apply equally whether specified using the GET or POST methods with the | |
| 36 | exception of blank location IDs, which must be specified as “--“ in the POST body due to spaces being | |
| 37 | used as the field separato | |
| 38 | """, | |
| 39 | required: true, | |
| 40 | content: %{ | |
| 41 | "plain/text" => %MediaType{ | |
| 42 | schema: %Schema{type: :string}, | |
| 43 | example: """ | |
| 44 | quality=B | |
| 45 | FR CIEL 00 HHZ 2025-11-20T12:00:00 2025-11-20T12:01:00 | |
| 46 | RA UNIO 00 HNZ 2025-11-20T12:00:00 2025-11-20T12:01:00 | |
| 47 | """ | |
| 48 | } | |
| 49 | } | |
| 50 | } | |
| 51 | @responses_spec [ | |
| 52 | ok: %Reference{"$ref": "#/components/responses/success"}, | |
| 53 | no_content: %Reference{"$ref": "#/components/responses/nodata"}, | |
| 54 | not_found: %Reference{"$ref": "#/components/responses/nodata"}, | |
| 55 | bad_request: %Reference{"$ref": "#/components/responses/bad_request"}, | |
| 56 | request_entity_too_large: %Reference{"$ref": "#/components/responses/too_much_data"} | |
| 57 | ] | |
| 58 | @parameters_spec [ | |
| 59 | %Reference{"$ref": "#/components/parameters/network"}, | |
| 60 | %Reference{"$ref": "#/components/parameters/station"}, | |
| 61 | %Reference{"$ref": "#/components/parameters/location"}, | |
| 62 | %Reference{"$ref": "#/components/parameters/channel"}, | |
| 63 | %Reference{"$ref": "#/components/parameters/starttime"}, | |
| 64 | %Reference{"$ref": "#/components/parameters/endtime"}, | |
| 65 | %Reference{"$ref": "#/components/parameters/nodata"}, | |
| 66 | %Reference{"$ref": "#/components/parameters/format"}, | |
| 67 | %Reference{"$ref": "#/components/parameters/quality"}, | |
| 68 | %Reference{"$ref": "#/components/parameters/merge"}, | |
| 69 | %Reference{"$ref": "#/components/parameters/orderby"}, | |
| 70 | %Reference{"$ref": "#/components/parameters/includerestricted"} | |
| 71 | ] | |
| 72 | 0 | def request_body_spec, do: @request_body_spec |
| 73 | 0 | def responses_spec, do: @responses_spec |
| 74 | 0 | def parameters_spec, do: @parameters_spec |
| 75 | ||
| 76 | def query_parameters_spec, | |
| 77 | do: | |
| 78 | 0 | @parameters_spec ++ |
| 79 | [ | |
| 80 | %Reference{"$ref": "#/components/parameters/mergegaps"}, | |
| 81 | %Reference{"$ref": "#/components/parameters/show"}, | |
| 82 | %Reference{"$ref": "#/components/parameters/showgaps"} | |
| 83 | ] | |
| 84 | ||
| 85 | @impl OpenApi | |
| 86 | def spec do | |
| 87 | %OpenApi{ | |
| 88 | servers: [ | |
| 89 | openapi_server_from_endpoint(Endpoint) | |
| 90 | ], | |
| 91 | info: %Info{ | |
| 92 | title: "FDSNWS availability - OpenAPI 3.0", | |
| 93 | version: "1.0" | |
| 94 | }, | |
| 95 | # Populate the paths from a phoenix router | |
| 96 | paths: Paths.from_router(Router), | |
| 97 | components: %Components{ | |
| 98 | responses: %{ | |
| 99 | success: %Response{ | |
| 100 | description: "successful request", | |
| 101 | content: %{ | |
| 102 | "text/plain" => %MediaType{schema: %Schema{type: :string}}, | |
| 103 | "application/json" => %MediaType{schema: %Schema{type: :string}}, | |
| 104 | "text/csv" => %MediaType{schema: %Schema{type: :string}} | |
| 105 | } | |
| 106 | }, | |
| 107 | nodata: %Response{ | |
| 108 | description: "no data" | |
| 109 | }, | |
| 110 | bad_request: %Response{ | |
| 111 | description: "Invalid parameters in query string." | |
| 112 | }, | |
| 113 | too_much_data: %Response{ | |
| 114 | description: | |
| 115 | "Too much data requested. Try to split your request with narrower criteria" | |
| 116 | } | |
| 117 | }, | |
| 118 | requestBodies: %{query: @request_body_spec}, | |
| 119 | parameters: %{ | |
| 120 | network: %Parameter{ | |
| 121 | name: "network", | |
| 122 | in: :query, | |
| 123 | required: false, | |
| 124 | explode: false, | |
| 125 | description: """ | |
| 126 | Select one or more network codes. Can be SEED network codes or data center defined codes. | |
| 127 | Multiple codes are comma-separated. Shortcut `net` can also be used. | |
| 128 | Wildcards are supported (`_` means any character, once; `%` means any character 0 or several times.) | |
| 129 | """, | |
| 130 | schema: %Schema{ | |
| 131 | type: :array, | |
| 132 | items: %Schema{type: :string, example: "FR"} | |
| 133 | }, | |
| 134 | examples: %{ | |
| 135 | FR: %Example{value: "FR", description: "Select the FR network code"}, | |
| 136 | "FR,RA": %Example{ | |
| 137 | value: ["FR", "RA"], | |
| 138 | description: "Select the FR and RA network codes" | |
| 139 | }, | |
| 140 | "X%": %Example{ | |
| 141 | value: "X%", | |
| 142 | description: "Select all network codes starting with X" | |
| 143 | } | |
| 144 | } | |
| 145 | }, | |
| 146 | station: %Parameter{ | |
| 147 | name: "station", | |
| 148 | in: :query, | |
| 149 | required: false, | |
| 150 | explode: false, | |
| 151 | description: """ | |
| 152 | Select one or more SEED station codes. | |
| 153 | Multiple codes are comma-separated. | |
| 154 | Shortcut `sta` can also be used. | |
| 155 | Wildcards are supported (`_` means any character, once; `%` means any character 0 or several times.) | |
| 156 | """, | |
| 157 | schema: %Schema{ | |
| 158 | type: :array, | |
| 159 | items: %Schema{type: :string, example: "CIEL"} | |
| 160 | }, | |
| 161 | examples: %{ | |
| 162 | CIEL: %Example{value: "CIEL", description: "Select the CIEL sation code"}, | |
| 163 | "CIEL,ILLK": %Example{ | |
| 164 | value: ["CIEL", "ILLK"], | |
| 165 | description: "Select two station codes" | |
| 166 | }, | |
| 167 | "Z%": %Example{ | |
| 168 | value: "Z%", | |
| 169 | description: "Select all station codes starting with Z" | |
| 170 | } | |
| 171 | } | |
| 172 | }, | |
| 173 | location: %Parameter{ | |
| 174 | name: "location", | |
| 175 | in: :query, | |
| 176 | required: false, | |
| 177 | explode: false, | |
| 178 | description: """ | |
| 179 | Select one or more SEED location codes. | |
| 180 | Empty location code can be specified using `--`. | |
| 181 | Multiple codes are comma-separated. | |
| 182 | Shortcut `loc` can also be used. | |
| 183 | Wildcards are supported (`_` means any character, once; `%` means any character 0 or several times.) | |
| 184 | """, | |
| 185 | schema: %Schema{ | |
| 186 | type: :array, | |
| 187 | items: %Schema{type: :string, example: "00"} | |
| 188 | }, | |
| 189 | examples: %{ | |
| 190 | "00": %Example{value: "00", description: "Select the 00 location code"}, | |
| 191 | "00,--": %Example{ | |
| 192 | value: ["00", "--"], | |
| 193 | description: "Select location codes 00 and empty" | |
| 194 | } | |
| 195 | } | |
| 196 | }, | |
| 197 | channel: %Parameter{ | |
| 198 | name: "channel", | |
| 199 | in: :query, | |
| 200 | required: false, | |
| 201 | explode: false, | |
| 202 | description: """ | |
| 203 | Select one or more SEED channel codes. | |
| 204 | Multiple codes are comma-separated. | |
| 205 | Shortcut `cha` can also be used. | |
| 206 | Wildcards are supported (`_` means any character, once; `%` means any character 0 or several times.) | |
| 207 | """, | |
| 208 | schema: %Schema{ | |
| 209 | type: :array, | |
| 210 | items: %Schema{type: :string, example: "HHZ"} | |
| 211 | }, | |
| 212 | examples: %{ | |
| 213 | HHZ: %Example{value: "HHZ", description: "Select the HNE channel code"}, | |
| 214 | "HHZ,BHZ": %Example{value: ["HHZ", "BHZ"], description: "Select two channel codes"}, | |
| 215 | "%Z": %Example{description: "Select all channel ending with Z"} | |
| 216 | } | |
| 217 | }, | |
| 218 | starttime: %Parameter{ | |
| 219 | name: "starttime", | |
| 220 | in: :query, | |
| 221 | required: false, | |
| 222 | explode: false, | |
| 223 | description: """ | |
| 224 | Limit results to time series samples on or after the specified start time. If not specified, timezone is assumed as UTC. Shortcut `start` | |
| 225 | """, | |
| 226 | schema: %Schema{ | |
| 227 | type: :string, | |
| 228 | format: "date-time" | |
| 229 | }, | |
| 230 | examples: %{ | |
| 231 | "2025-11-20T12:38:04.000000Z": %Example{ | |
| 232 | value: "2025-11-20T12:38:04.000000Z", | |
| 233 | description: "microseconds and timezone" | |
| 234 | }, | |
| 235 | "2025-11-20T12:38:04Z": %Example{ | |
| 236 | value: "2025-11-20T12:38:04Z", | |
| 237 | description: "seconds and timezone" | |
| 238 | }, | |
| 239 | "2025-11-20T12:38:04": %Example{ | |
| 240 | value: "2025-11-20T12:38:04", | |
| 241 | description: "seconds without timezone (UTC is assumed)" | |
| 242 | }, | |
| 243 | "2025-11-20": %Example{ | |
| 244 | value: "2025-11-20", | |
| 245 | description: "Without time, 00:00:00.000000Z is assumed" | |
| 246 | } | |
| 247 | } | |
| 248 | }, | |
| 249 | endtime: %Parameter{ | |
| 250 | name: "endtime", | |
| 251 | in: :query, | |
| 252 | required: false, | |
| 253 | explode: false, | |
| 254 | description: | |
| 255 | "Limit results to time series samples on or after the specified end time. If not specified, timezone is assumed as UTC. Shortcut `end`", | |
| 256 | schema: %Schema{ | |
| 257 | type: :string, | |
| 258 | format: "date-time" | |
| 259 | }, | |
| 260 | examples: %{ | |
| 261 | "2025-11-20T12:38:14.000000Z": %Example{ | |
| 262 | value: "2025-11-20T12:38:14.000000Z", | |
| 263 | description: "microseconds and timezone" | |
| 264 | }, | |
| 265 | "2025-11-20T12:38:14Z": %Example{ | |
| 266 | value: "2025-11-20T12:38:14Z", | |
| 267 | description: "seconds and timezone" | |
| 268 | }, | |
| 269 | "2025-11-20T12:38:14": %Example{ | |
| 270 | value: "2025-11-20T12:38:14", | |
| 271 | description: "seconds without timezone (UTC is assumed)" | |
| 272 | }, | |
| 273 | "2025-11-21": %Example{ | |
| 274 | value: "2025-11-21", | |
| 275 | description: "Without time, 00:00:00.000000Z is assumed" | |
| 276 | } | |
| 277 | } | |
| 278 | }, | |
| 279 | format: %Parameter{ | |
| 280 | name: "format", | |
| 281 | in: :query, | |
| 282 | required: false, | |
| 283 | description: "Specify format of result, the default value is text.", | |
| 284 | schema: %Schema{ | |
| 285 | type: :string, | |
| 286 | default: "text", | |
| 287 | enum: ["text", "geocsv", "json", "request"] | |
| 288 | } | |
| 289 | }, | |
| 290 | nodata: %Parameter{ | |
| 291 | name: "nodata", | |
| 292 | in: :query, | |
| 293 | required: false, | |
| 294 | description: "Specify the return code for no data. Valid values are 204 or 404", | |
| 295 | schema: %Schema{ | |
| 296 | type: :integer, | |
| 297 | default: 204, | |
| 298 | enum: [204, 404] | |
| 299 | } | |
| 300 | }, | |
| 301 | quality: %Parameter{ | |
| 302 | name: "quality", | |
| 303 | in: :query, | |
| 304 | required: false, | |
| 305 | explode: false, | |
| 306 | description: """ | |
| 307 | Select a specific SEED quality indicator. | |
| 308 | * R -> Raw | |
| 309 | * Q -> Quality controlled | |
| 310 | * M -> Modified | |
| 311 | * D -> Data, unspecified | |
| 312 | """, | |
| 313 | schema: %Schema{ | |
| 314 | type: :array, | |
| 315 | items: %Schema{type: :string, example: "M", enum: ["R", "Q", "M", "D"]}, | |
| 316 | default: "M" | |
| 317 | } | |
| 318 | }, | |
| 319 | merge: %Parameter{ | |
| 320 | name: "merge", | |
| 321 | in: :query, | |
| 322 | required: false, | |
| 323 | explode: false, | |
| 324 | description: """ | |
| 325 | If set to one or more of the following values, time spans are merged as described. Multiple values may be specified as a | |
| 326 | comma-delimited list, e.g. ``merge=samplerate,quality``. | |
| 327 | * `samplerate`: time spans from data with differing sample rates will be grouped together. If specified this field will be | |
| 328 | omitted from the result. | |
| 329 | * `quality`: time spans from data with differing quality codes will be grouped together. If specified this field will be omitted | |
| 330 | from the result. | |
| 331 | * `overlap`: time spans from data that overlap will be merged together. This option does not apply to the extent method | |
| 332 | """, | |
| 333 | schema: %Schema{ | |
| 334 | type: :array, | |
| 335 | items: %Schema{ | |
| 336 | type: :string, | |
| 337 | example: "samplerate", | |
| 338 | enum: ["quality", "samplerate", "overlap"] | |
| 339 | }, | |
| 340 | default: "" | |
| 341 | } | |
| 342 | }, | |
| 343 | includerestricted: %Parameter{ | |
| 344 | name: "includerestricted", | |
| 345 | in: :query, | |
| 346 | required: false, | |
| 347 | description: | |
| 348 | "If true, all data are reported. If false, only data that can be openly accessed.", | |
| 349 | schema: %Schema{ | |
| 350 | type: :string, | |
| 351 | default: "TRUE", | |
| 352 | enum: ["TRUE", "FALSE"] | |
| 353 | } | |
| 354 | }, | |
| 355 | orderby: %Parameter{ | |
| 356 | name: "orderby", | |
| 357 | in: :query, | |
| 358 | required: false, | |
| 359 | description: """ | |
| 360 | Sort results by one of the following values in the order specified: | |
| 361 | * `nslc_time_quality_samplerate`: network, station, location, channel, time-range, quality, sample-rate (default) | |
| 362 | * `latestupdate`: update-date (past to present), network, station, location, channel, time-range, quality, sample-rate | |
| 363 | * `latestupdate_desc`: update-date (present to past), network, station, location, channel, time-range, quality, sample-rate | |
| 364 | * `timespancount`: number of timespans (small to large), network, station, location, channel, time-range, quality, sample-rate | |
| 365 | * `timespancount_desc`: number of timespans (large to small), network, station, location, channel, time-range, quality, sample-rate | |
| 366 | """, | |
| 367 | schema: %Schema{ | |
| 368 | type: :strung, | |
| 369 | example: "latestupdate_desc", | |
| 370 | enum: [ | |
| 371 | "nslc_time_quality_samplerate", | |
| 372 | "latestupdate", | |
| 373 | "latestupdate_desc", | |
| 374 | "timaspancount", | |
| 375 | "timespancount_desc" | |
| 376 | ], | |
| 377 | default: "latestupdate" | |
| 378 | } | |
| 379 | }, | |
| 380 | mergegaps: %Parameter{ | |
| 381 | name: "mergegaps", | |
| 382 | in: :query, | |
| 383 | required: false, | |
| 384 | description: | |
| 385 | "Merge time spans that are separated by the specified tolerance in seconds. Only positive values are allowed.", | |
| 386 | schema: %Schema{ | |
| 387 | type: :number, | |
| 388 | minimum: 0, | |
| 389 | example: 10.5, | |
| 390 | default: 0.0 | |
| 391 | } | |
| 392 | }, | |
| 393 | show: %Parameter{ | |
| 394 | name: "show", | |
| 395 | in: :query, | |
| 396 | required: false, | |
| 397 | description: """ | |
| 398 | If set to `latestupdate`, the latest times at which data contributing to the returned time spans were loaded into the repository are included in the result. | |
| 399 | This option applies to all formats except `request`. | |
| 400 | """, | |
| 401 | schema: %Schema{ | |
| 402 | type: :string, | |
| 403 | example: "latestupdate", | |
| 404 | enum: ["latestupdate", ""], | |
| 405 | default: "" | |
| 406 | } | |
| 407 | }, | |
| 408 | showgaps: %Parameter{ | |
| 409 | name: "showgaps", | |
| 410 | in: :query, | |
| 411 | required: false, | |
| 412 | description: """ | |
| 413 | If present, instead of showing timespans of available data, the result shows timespans of missing data. | |
| 414 | This parameter is not in the FDSN specification and has been added for convenience. | |
| 415 | """, | |
| 416 | schema: %Schema{ | |
| 417 | type: :boolean, | |
| 418 | nullable: true | |
| 419 | } | |
| 420 | } | |
| 421 | } | |
| 422 | } | |
| 423 | } | |
| 424 | # Discover request/response schemas from path specs | |
| 425 | 0 | |> OpenApiSpex.resolve_schema_modules() |
| 426 | end | |
| 427 | ||
| 428 | defp openapi_server_from_endpoint(endpoint) do | |
| 429 | 0 | uri = endpoint.struct_url() |
| 430 | 0 | path = Application.get_env(:avy, :url_prefix) |
| 431 | 0 | uri = %{uri | path: path} |
| 432 | ||
| 433 | 0 | %Server{ |
| 434 | url: URI.to_string(uri) | |
| 435 | } | |
| 436 | end | |
| 437 | end |
| Line | Hits | Source |
|---|---|---|
| 0 | defmodule Avy.Application do | |
| 1 | # See https://hexdocs.pm/elixir/Application.html | |
| 2 | # for more information on OTP Applications | |
| 3 | @moduledoc false | |
| 4 | ||
| 5 | use Application | |
| 6 | ||
| 7 | @impl true | |
| 8 | def start(_type, _args) do | |
| 9 | 1 | :logger.add_handler(:my_sentry_handler, Sentry.LoggerHandler, %{ |
| 10 | config: %{metadata: [:file, :line]} | |
| 11 | }) | |
| 12 | ||
| 13 | 1 | children = [ |
| 14 | AvyWeb.Telemetry, | |
| 15 | Avy.Repo, | |
| 16 | 1 | {DNSCluster, query: Application.get_env(:avy, :dns_cluster_query) || :ignore}, |
| 17 | {Phoenix.PubSub, name: Avy.PubSub}, | |
| 18 | # Start the Finch HTTP client for sending emails | |
| 19 | {Finch, name: Avy.Finch}, | |
| 20 | # Start a worker by calling: Avy.Worker.start_link(arg) | |
| 21 | # {Avy.Worker, arg}, | |
| 22 | # Start to serve requests, typically the last entry | |
| 23 | AvyWeb.Endpoint | |
| 24 | ] | |
| 25 | ||
| 26 | # See https://hexdocs.pm/elixir/Supervisor.html | |
| 27 | # for other strategies and supported options | |
| 28 | 1 | opts = [strategy: :one_for_one, name: Avy.Supervisor] |
| 29 | 1 | Supervisor.start_link(children, opts) |
| 30 | end | |
| 31 | ||
| 32 | # Tell Phoenix to update the endpoint configuration | |
| 33 | # whenever the application is updated. | |
| 34 | @impl true | |
| 35 | def config_change(changed, _new, removed) do | |
| 36 | 0 | AvyWeb.Endpoint.config_change(changed, removed) |
| 37 | :ok | |
| 38 | end | |
| 39 | end |
| Line | Hits | Source |
|---|---|---|
| 0 | defmodule Avy.Data.Content do | |
| 1 | use Ecto.Schema | |
| 2 | ||
| 3 | 2722 | schema "inventory_content" do |
| 4 | field :start, :utc_datetime, source: :start | |
| 5 | field :end, :utc_datetime, source: :end | |
| 6 | field :quality, :integer | |
| 7 | field :time_ordered, :boolean, default: false | |
| 8 | field :time_spans, {:array, :map}, source: :time_spans | |
| 9 | field :modified, :utc_datetime, source: :modified | |
| 10 | ||
| 11 | many_to_many :epochs, Avy.Metadata.Epoch, | |
| 12 | join_through: "inventory_content_epochs", | |
| 13 | preload_order: [asc: :starttime] | |
| 14 | end | |
| 15 | end |
| Line | Hits | Source |
|---|---|---|
| 0 | defmodule Avy.Data.ContentEpoch do | |
| 1 | use Ecto.Schema | |
| 2 | alias Avy.Data.Content | |
| 3 | alias Avy.Metadata.Epoch | |
| 4 | ||
| 5 | # Important: Set primary_key to false for junction tables | |
| 6 | @primary_key false | |
| 7 | 0 | schema "inventory_content_epochs" do |
| 8 | belongs_to :content, Content, primary_key: true | |
| 9 | belongs_to :epoch, Epoch, primary_key: true | |
| 10 | end | |
| 11 | end |
| Line | Hits | Source |
|---|---|---|
| 0 | defmodule Avy.Filter do | |
| 1 | alias FdsnPlugs.SourceIdentifier | |
| 2 | require Logger | |
| 3 | ||
| 4 | @moduledoc """ | |
| 5 | A filter is a structure containing all filter parameters given in an FDSN webservice request. | |
| 6 | ||
| 7 | """ | |
| 8 | defstruct net: "*", | |
| 9 | sta: "*", | |
| 10 | loc: "*", | |
| 11 | cha: "*", | |
| 12 | start: DateTime.from_unix!(1), | |
| 13 | end: DateTime.from_unix!(5_682_956_400), | |
| 14 | quality: [:R, :D, :M, :Q], | |
| 15 | includerestricted: false, | |
| 16 | epochids: [] | |
| 17 | ||
| 18 | @type t() :: %Avy.Filter{ | |
| 19 | net: String.t(), | |
| 20 | sta: String.t(), | |
| 21 | loc: String.t(), | |
| 22 | cha: String.t(), | |
| 23 | start: DateTime.t(), | |
| 24 | end: DateTime.t(), | |
| 25 | quality: String.t(), | |
| 26 | includerestricted: boolean, | |
| 27 | epochids: list | |
| 28 | } | |
| 29 | ||
| 30 | @spec split_channel(t()) :: map() | |
| 31 | def split_channel(f) do | |
| 32 | 36 | case SourceIdentifier.split_channel(%SourceIdentifier{cha: f.cha}) do |
| 33 | {:ok, [b, i, o]} -> | |
| 34 | 36 | %{band_code: b, instrument_code: i, orientation_code: o} |
| 35 | ||
| 36 | {:error, m} -> | |
| 37 | 0 | Logger.error(inspect(m)) |
| 38 | 0 | %{band_code: "*", instrument_code: "*", orientation_code: "*"} |
| 39 | ||
| 40 | _ -> | |
| 41 | 0 | %{} |
| 42 | end | |
| 43 | end | |
| 44 | ||
| 45 | @spec from_source_identifier(SourceIdentifier.t(), boolean, list) :: t() | |
| 46 | def from_source_identifier(sid, includerestricted \\ false, quality \\ [:R, :D, :M, :Q]) do | |
| 47 | 36 | %Avy.Filter{ |
| 48 | 36 | net: sid.net, |
| 49 | 36 | sta: sid.sta, |
| 50 | 36 | loc: sid.loc, |
| 51 | 36 | cha: sid.cha, |
| 52 | 36 | start: sid.start, |
| 53 | 36 | end: sid.end, |
| 54 | includerestricted: includerestricted, | |
| 55 | quality: quality | |
| 56 | } | |
| 57 | end | |
| 58 | end |
| Line | Hits | Source |
|---|---|---|
| 0 | defmodule Avy.Mailer do | |
| 1 | use Swoosh.Mailer, otp_app: :avy | |
| 2 | end |
| Line | Hits | Source |
|---|---|---|
| 0 | defmodule Avy.Metadata.Epoch do | |
| 1 | use Ecto.Schema | |
| 2 | ||
| 3 | 4582 | schema "inventory_epoch" do |
| 4 | field :band_code, :string | |
| 5 | field :instrument_code, :string | |
| 6 | field :orientation_code, :string | |
| 7 | field :location_code, :string, source: :location_code | |
| 8 | field :start_date, :utc_datetime, source: :start_date | |
| 9 | field :end_date, :utc_datetime, source: :end_date | |
| 10 | field :policy, :string, default: "O" | |
| 11 | field :sample_rate, :decimal, source: :sample_rate | |
| 12 | field :modified, :utc_datetime, source: :modified | |
| 13 | belongs_to :station, Avy.Metadata.Station | |
| 14 | ||
| 15 | many_to_many :contents, Avy.Data.Content, | |
| 16 | join_through: "inventory_content_epochs", | |
| 17 | preload_order: [asc: :starttime] | |
| 18 | end | |
| 19 | ||
| 20 | def source_identifier(c) do | |
| 21 | 1 | c = Avy.Repo.preload(c, station: [:network]) |
| 22 | ||
| 23 | 1 | "#{c.station.network.code}_#{c.station.code}_#{c.location_code}_#{c.band_code}_#{c.instrument_code}_#{c.orientation_code}" |
| 24 | end | |
| 25 | end |
| Line | Hits | Source |
|---|---|---|
| 0 | defmodule Avy.Metadata.Network do | |
| 1 | use Ecto.Schema | |
| 2 | ||
| 3 | 3017 | schema "inventory_network" do |
| 4 | field :code, :string | |
| 5 | field :start_date, :utc_datetime, source: :start_date | |
| 6 | field :end_date, :utc_datetime, source: :end_date | |
| 7 | has_many :stations, Avy.Metadata.Station | |
| 8 | end | |
| 9 | end |
| Line | Hits | Source |
|---|---|---|
| 0 | defmodule Avy.Metadata.Station do | |
| 1 | use Ecto.Schema | |
| 2 | ||
| 3 | 3031 | schema "inventory_station" do |
| 4 | field :code, :string | |
| 5 | field :start_date, :utc_datetime, source: :start_date | |
| 6 | field :end_date, :utc_datetime, source: :end_date | |
| 7 | belongs_to :network, Avy.Metadata.Network | |
| 8 | ||
| 9 | has_many :epochs, Avy.Metadata.Epoch, | |
| 10 | preload_order: [:band_code, :instrument_code, :orientation_cde, :starttime] | |
| 11 | end | |
| 12 | end |
| Line | Hits | Source |
|---|---|---|
| 0 | defmodule Avy.Repo do | |
| 1 | use Ecto.Repo, | |
| 2 | otp_app: :avy, | |
| 3 | adapter: Ecto.Adapters.Postgres | |
| 4 | ||
| 5 | import Ecto.Query | |
| 6 | ||
| 7 | require Logger | |
| 8 | ||
| 9 | alias FdsnPlugs.PublicationVersion | |
| 10 | alias Avy.Metadata.{Epoch, Station, Network} | |
| 11 | alias Avy.Data.Content | |
| 12 | ||
| 13 | # Create a map of all keyword parameters. | |
| 14 | # Especially, splits the channel code from filter into instrument, band, orientation codes. | |
| 15 | @spec make_keywords_params(Avy.Filter.t()) :: map() | |
| 16 | defp make_keywords_params(filter) do | |
| 17 | Avy.Filter.split_channel(filter) | |
| 18 | 36 | |> Map.merge(%{ |
| 19 | 36 | net: filter.net, |
| 20 | 36 | sta: filter.sta, |
| 21 | 36 | loc: filter.loc, |
| 22 | 36 | includerestricted: filter.includerestricted |
| 23 | }) | |
| 24 | end | |
| 25 | ||
| 26 | # NOTE: the call to split_channels seems to bother dialyzer. I was not able to fix this. | |
| 27 | @doc """ | |
| 28 | Get all contents from database related to the Avy.Filter | |
| 29 | """ | |
| 30 | @spec get_contents(Avy.Filter.t()) :: [map()] | |
| 31 | def get_contents(filter) do | |
| 32 | 36 | query = |
| 33 | from(c in Content) | |
| 34 | 36 | |> join(:inner, [c], ec in "inventory_content_epochs", |
| 35 | on: c.id == ec.content_id, | |
| 36 | as: :content_epochs | |
| 37 | ) | |
| 38 | 36 | |> join(:inner, [c, ec], e in Epoch, on: ec.epoch_id == e.id, as: :epochs) |
| 39 | 36 | |> join(:inner, [c, ec, e], s in Station, on: e.station_id == s.id, as: :stations) |
| 40 | 36 | |> join(:inner, [c, ec, e, s], n in Network, on: s.network_id == n.id, as: :networks) |
| 41 | |> where(^conditional_where_clause(make_keywords_params(filter))) | |
| 42 | |> where( | |
| 43 | [c, ec, e, s, n], | |
| 44 | 36 | c.quality in ^PublicationVersion.qualities_to_pubversions(filter.quality) |
| 45 | ) | |
| 46 | 36 | |> where([c, ec, e, s, n], c.start <= ^filter.end) |
| 47 | 36 | |> where([c, ec, e, s, n], c.end >= ^filter.start) |
| 48 | ||
| 49 | 36 | query = |
| 50 | 36 | from [c, ec, e, s, n] in query, |
| 51 | order_by: [ | |
| 52 | asc: e.band_code, | |
| 53 | asc: e.instrument_code, | |
| 54 | asc: e.orientation_code, | |
| 55 | asc: e.start_date, | |
| 56 | asc: c.start | |
| 57 | ], | |
| 58 | select: %{ | |
| 59 | network: n.code, | |
| 60 | station: s.code, | |
| 61 | location: e.location_code, | |
| 62 | channel: fragment("?||?||?", e.band_code, e.instrument_code, e.orientation_code), | |
| 63 | quality: c.quality, | |
| 64 | samplerate: e.sample_rate, | |
| 65 | 36 | earliest: fragment("GREATEST(?, ?) AT TIME ZONE 'utc'", c.start, ^filter.start), |
| 66 | 36 | latest: fragment("LEAST(?,?) AT TIME ZONE 'utc'", c.end, ^filter.end), |
| 67 | timespans: c.time_spans, | |
| 68 | restriction: e.policy, | |
| 69 | updated: c.modified | |
| 70 | } | |
| 71 | ||
| 72 | Avy.Repo.all(query) | |
| 73 | 92 | |> Enum.reject(fn t -> is_nil(t) end) |
| 74 | 36 | |> Enum.map(&naive_to_utc(&1)) |
| 75 | end | |
| 76 | ||
| 77 | # Define all dynamic where clauses. | |
| 78 | # If a parameter is a wildcard, we do not need to add the where clause. | |
| 79 | @spec conditional_where_clause(map()) :: any() | |
| 80 | def conditional_where_clause(keyword_params) do | |
| 81 | 36 | Enum.reduce(keyword_params, dynamic(true), fn |
| 82 | {_k, "*"}, dynamic -> | |
| 83 | # User wants all, no where clause | |
| 84 | 163 | dynamic |
| 85 | ||
| 86 | {:includerestricted, true}, dynamic -> | |
| 87 | 2 | dynamic |
| 88 | ||
| 89 | {:includerestricted, false}, dynamic -> | |
| 90 | 34 | dynamic([epochs: e], ^dynamic and e.policy == "O") |
| 91 | ||
| 92 | {:instrument_code, value}, dynamic -> | |
| 93 | 4 | dynamic([epochs: e], ^dynamic and like(e.instrument_code, ^fdsn_wildcard_to_sql(value))) |
| 94 | ||
| 95 | {:band_code, value}, dynamic -> | |
| 96 | 4 | dynamic([epochs: e], ^dynamic and like(e.band_code, ^fdsn_wildcard_to_sql(value))) |
| 97 | ||
| 98 | {:orientation_code, value}, dynamic -> | |
| 99 | 4 | dynamic([epochs: e], ^dynamic and like(e.orientation_code, ^fdsn_wildcard_to_sql(value))) |
| 100 | ||
| 101 | {:loc, value}, dynamic -> | |
| 102 | 1 | dynamic([epochs: e], ^dynamic and like(e.location_code, ^fdsn_wildcard_to_sql(value))) |
| 103 | ||
| 104 | {:sta, value}, dynamic -> | |
| 105 | 36 | dynamic([stations: s], ^dynamic and like(s.code, ^fdsn_wildcard_to_sql(value))) |
| 106 | ||
| 107 | {:net, value}, dynamic -> | |
| 108 | 4 | dynamic([networks: n], ^dynamic and like(n.code, ^fdsn_wildcard_to_sql(value))) |
| 109 | end) | |
| 110 | end | |
| 111 | ||
| 112 | # | |
| 113 | # From FDSN wildcard spec to SQL wildcards | |
| 114 | @spec fdsn_wildcard_to_sql(String.t()) :: String.t() | |
| 115 | defp fdsn_wildcard_to_sql(s) do | |
| 116 | String.replace(s, "*", "%") | |
| 117 | 53 | |> String.replace("?", "_") |
| 118 | end | |
| 119 | ||
| 120 | @spec naive_to_utc(map) :: map | |
| 121 | defp naive_to_utc(q) do | |
| 122 | 92 | if Map.has_key?(q.earliest, :time_zone) do |
| 123 | 0 | q |
| 124 | else | |
| 125 | %{ | |
| 126 | q | |
| 127 | 92 | | earliest: DateTime.from_naive!(q.earliest, "Etc/UTC"), |
| 128 | 92 | latest: DateTime.from_naive!(q.latest, "Etc/UTC") |
| 129 | } | |
| 130 | end | |
| 131 | end | |
| 132 | end |
| Line | Hits | Source |
|---|---|---|
| 0 | defmodule AvyWeb do | |
| 1 | @moduledoc """ | |
| 2 | The entrypoint for defining your web interface, such | |
| 3 | as controllers, components, channels, and so on. | |
| 4 | ||
| 5 | This can be used in your application as: | |
| 6 | ||
| 7 | use AvyWeb, :controller | |
| 8 | use AvyWeb, :html | |
| 9 | ||
| 10 | The definitions below will be executed for every controller, | |
| 11 | component, etc, so keep them short and clean, focused | |
| 12 | on imports, uses and aliases. | |
| 13 | ||
| 14 | Do NOT define functions inside the quoted expressions | |
| 15 | below. Instead, define additional modules and import | |
| 16 | those modules here. | |
| 17 | """ | |
| 18 | ||
| 19 | 6 | def static_paths, |
| 20 | do: ~w(assets fonts images favicon.ico robots.txt application.wadl) | |
| 21 | ||
| 22 | def router do | |
| 23 | quote do | |
| 24 | use Phoenix.Router, helpers: false | |
| 25 | ||
| 26 | # Import common connection and controller functions to use in pipelines | |
| 27 | import Plug.Conn | |
| 28 | import Phoenix.Controller | |
| 29 | end | |
| 30 | end | |
| 31 | ||
| 32 | def channel do | |
| 33 | quote do | |
| 34 | use Phoenix.Channel | |
| 35 | end | |
| 36 | end | |
| 37 | ||
| 38 | def controller do | |
| 39 | quote do | |
| 40 | use Phoenix.Controller, | |
| 41 | formats: [:text, :json, :geocsv, :request, :html], | |
| 42 | layouts: [html: AvyWeb.Layouts] | |
| 43 | ||
| 44 | import Plug.Conn | |
| 45 | ||
| 46 | unquote(verified_routes()) | |
| 47 | end | |
| 48 | end | |
| 49 | ||
| 50 | def verified_routes do | |
| 51 | quote do | |
| 52 | use Phoenix.VerifiedRoutes, | |
| 53 | endpoint: AvyWeb.Endpoint, | |
| 54 | router: AvyWeb.Router, | |
| 55 | statics: AvyWeb.static_paths() | |
| 56 | end | |
| 57 | end | |
| 58 | ||
| 59 | @doc """ | |
| 60 | When used, dispatch to the appropriate controller/live_view/etc. | |
| 61 | """ | |
| 62 | defmacro __using__(which) when is_atom(which) do | |
| 63 | 6 | apply(__MODULE__, which, []) |
| 64 | end | |
| 65 | end |
| Line | Hits | Source |
|---|---|---|
| 0 | defmodule AvyWeb.AvyGEOCSV do | |
| 1 | require Plug.Conn | |
| 2 | require Logger | |
| 3 | ||
| 4 | @extent_header "#dataset: GeoCSV 2.0 | |
| 5 | #delimiter: | | |
| 6 | #field_unit: unitless|unitless|unitless|unitless|unitless|hertz|ISO_8601|ISO_8601|ISO_8601|unitless|unitless | |
| 7 | #field_type: string|string|string|string|string|float|datetime|datetime|datetime|integer|string | |
| 8 | Network|Station|Location|Channel|Quality|SampleRate|Earliest|Latest|Updated|TimeSpans|Restriction" | |
| 9 | ||
| 10 | def extent(assigns) do | |
| 11 | [ | |
| 12 | format_header(@extent_header, assigns) | |
| 13 | 2 | | Enum.map(assigns.availabilities, fn d -> |
| 14 | 4 | format_datasource(d, assigns.fdsn_parameters, :extent) |
| 15 | end) | |
| 16 | ] | |
| 17 | 2 | |> Enum.join("\n") |
| 18 | end | |
| 19 | ||
| 20 | @query_header "#dataset: GeoCSV 2.0 | |
| 21 | #delimiter: | | |
| 22 | #field_unit: unitless|unitless|unitless|unitless|unitless|hertz|ISO_8601|ISO_8601|ISO_8601 | |
| 23 | #field_type: string|string|string|string|string|float|datetime|datetime|datetime | |
| 24 | Network|Station|Location|Channel|Quality|SampleRate|Earliest|Latest|Updated" | |
| 25 | def query(assigns) do | |
| 26 | [ | |
| 27 | format_header(@query_header, assigns) | |
| 28 | 4 | | Enum.map(assigns.availabilities, fn d -> |
| 29 | 6 | Enum.map(d.timespans, fn ts -> |
| 30 | 22 | %{d | timespans: [ts], earliest: List.first(ts), latest: List.last(ts)} |
| 31 | end) | |
| 32 | 6 | |> Enum.map(fn splitd -> |
| 33 | 22 | format_datasource( |
| 34 | splitd, | |
| 35 | 22 | assigns.fdsn_parameters, |
| 36 | :query | |
| 37 | ) | |
| 38 | end) | |
| 39 | end) | |
| 40 | ] | |
| 41 | |> List.flatten() | |
| 42 | 4 | |> Enum.join("\n") |
| 43 | end | |
| 44 | ||
| 45 | defp format_header(headers, %{fdsn_parameters: fdsn_params}) do | |
| 46 | 6 | headers = |
| 47 | 6 | if :samplerate in fdsn_params.merge do |
| 48 | String.replace(headers, "hertz|", "") | |
| 49 | |> String.replace("float|", "") | |
| 50 | 3 | |> String.replace("SampleRate|", "") |
| 51 | else | |
| 52 | 3 | headers |
| 53 | end | |
| 54 | ||
| 55 | 6 | headers = |
| 56 | 6 | if :quality in fdsn_params.merge do |
| 57 | String.replace(headers, "unitless|", "", global: false) | |
| 58 | |> String.replace("string|", "", global: false) | |
| 59 | 2 | |> String.replace("Quality|", "") |
| 60 | else | |
| 61 | 4 | headers |
| 62 | end | |
| 63 | ||
| 64 | 6 | if :latestupdate in fdsn_params.show do |
| 65 | 2 | headers |
| 66 | else | |
| 67 | String.replace(headers, "|ISO_8601", "", global: false) | |
| 68 | |> String.replace("|datetime", "", global: false) | |
| 69 | 4 | |> String.replace("|Updated", "") |
| 70 | end | |
| 71 | end | |
| 72 | ||
| 73 | # Network|Station|Location|Channel|Quality|SampleRate|Earliest|Latest|Updated|TimeSpans|Restriction | |
| 74 | defp format_datasource(d, fdsn_params, method) do | |
| 75 | 26 | attr_list = |
| 76 | 26 | if method == :extent do |
| 77 | 4 | [d.timespancount, d.restriction] |
| 78 | else | |
| 79 | [] | |
| 80 | end | |
| 81 | ||
| 82 | 26 | attr_list = |
| 83 | 26 | if :latestupdate in fdsn_params.show do |
| 84 | [ | |
| 85 | 4 | DateTime.to_iso8601(d.earliest), |
| 86 | 4 | DateTime.to_iso8601(d.latest), |
| 87 | 4 | DateTime.to_iso8601(d.updated) | attr_list |
| 88 | ] | |
| 89 | else | |
| 90 | [ | |
| 91 | 22 | DateTime.to_iso8601(d.earliest), |
| 92 | 22 | DateTime.to_iso8601(d.latest) |
| 93 | | attr_list | |
| 94 | ] | |
| 95 | end | |
| 96 | ||
| 97 | 26 | attr_list = |
| 98 | 26 | if :samplerate in fdsn_params.merge do |
| 99 | 14 | attr_list |
| 100 | else | |
| 101 | 12 | [d.samplerate | attr_list] |
| 102 | end | |
| 103 | ||
| 104 | 26 | attr_list = |
| 105 | 26 | if :quality in fdsn_params.merge do |
| 106 | 12 | attr_list |
| 107 | else | |
| 108 | 14 | [d.quality | attr_list] |
| 109 | end | |
| 110 | ||
| 111 | 26 | [d.network, d.station, d.location, d.channel | attr_list] |
| 112 | 26 | |> Enum.join("|") |
| 113 | end | |
| 114 | end |
| Line | Hits | Source |
|---|---|---|
| 0 | defmodule AvyWeb.AvyJSON do | |
| 1 | require Plug.Conn | |
| 2 | require Logger | |
| 3 | ||
| 4 | def version(_) do | |
| 5 | 0 | %{ |
| 6 | application: Application.spec(:avy)[:app], | |
| 7 | version: Application.spec(:avy)[:vsn], | |
| 8 | commit: System.get_env("SENTRY_RELEASE", "unspecified") | |
| 9 | } | |
| 10 | end | |
| 11 | ||
| 12 | def extent(assigns) do | |
| 13 | 5 | %{ |
| 14 | created: DateTime.utc_now(), | |
| 15 | version: 1.0, | |
| 16 | 5 | datasources: assigns.availabilities |
| 17 | } | |
| 18 | end | |
| 19 | ||
| 20 | def query(assigns) do | |
| 21 | 6 | %{ |
| 22 | created: DateTime.utc_now(), | |
| 23 | version: 1.0, | |
| 24 | 6 | datasources: assigns.availabilities |
| 25 | } | |
| 26 | end | |
| 27 | ||
| 28 | @doc """ | |
| 29 | This view is meant to output a json format for grafana "state timeline" graphs. | |
| 30 | It is very different from the FDSN output and therefore needs a lot of manipulations. | |
| 31 | ||
| 32 | The final structure is a list of maps with the key :timestamp as a date, | |
| 33 | and all FDSN source identifiers in the response as keys, with default value "". | |
| 34 | ||
| 35 | At each timestamp, we want to put for each FDSN source identifier a quality code or | |
| 36 | an empty string if there is no data. | |
| 37 | ||
| 38 | [ | |
| 39 | %{ timeline: ~U[2026-01-01 10:00:00Z], | |
| 40 | "FR_CIEL_00_HNZ" => "D" | |
| 41 | } | |
| 42 | } | |
| 43 | """ | |
| 44 | def statetimeline(assigns) do | |
| 45 | # First get all FDSN identifiers in the availabilities and build the default map of identifiers. | |
| 46 | 1 | fdsn_identifiers = |
| 47 | 1 | assigns.availabilities |
| 48 | 4 | |> Enum.reduce([], fn a, acc -> |
| 49 | [availability_to_fdsn_source_identifier(a) | acc] | |
| 50 | end) | |
| 51 | |> Enum.sort() | |
| 52 | |> Enum.dedup() | |
| 53 | |> Map.from_keys("") | |
| 54 | ||
| 55 | # Then build a list of timestamps | |
| 56 | # All elements in the timespans list must result in a new entry in the list | |
| 57 | # In the same time, we add the correct FDSN identifier and quality in the map. | |
| 58 | # The list needs to be sorted by DateTime | |
| 59 | 1 | timeline_list = |
| 60 | 1 | assigns.availabilities |
| 61 | |> Enum.reduce([], fn a, acc -> | |
| 62 | [ | |
| 63 | elem( | |
| 64 | Enum.reduce( | |
| 65 | 4 | a.timespans, |
| 66 | 4 | {availability_to_fdsn_source_identifier(a), a.quality, []}, |
| 67 | fn [ts_start, ts_end], {fdsnid, qual, acc} -> | |
| 68 | 10 | {fdsnid, qual, |
| 69 | [ | |
| 70 | %{fdsnid => "", "Timestamps" => ts_end}, | |
| 71 | %{fdsnid => qual, "Timestamps" => ts_start} | |
| 72 | ] ++ acc} | |
| 73 | end | |
| 74 | ), | |
| 75 | 2 | |
| 76 | ) | |
| 77 | 4 | ] ++ acc |
| 78 | end) | |
| 79 | |> List.flatten() | |
| 80 | 20 | |> Enum.sort_by(&Map.fetch!(&1, "Timestamps"), DateTime) |
| 81 | ||
| 82 | 1 | Logger.debug(inspect(timeline_list, pretty: true)) |
| 83 | ||
| 84 | # Then we need to fill the struct with all other missing fdsn identifiers. | |
| 85 | # For each timeline, the quality value must be the same as in the previous | |
| 86 | # timeline. Therefore, we need to know which it was. | |
| 87 | 1 | initial_tl = Map.merge(fdsn_identifiers, List.first(timeline_list)) |
| 88 | ||
| 89 | Enum.reduce(timeline_list, {initial_tl, [initial_tl]}, fn tl, {prev_tl, acc} -> | |
| 90 | 20 | full_tl = Map.merge(prev_tl, tl) |
| 91 | ||
| 92 | 20 | if Map.fetch!(tl, "Timestamps") == Map.fetch!(prev_tl, "Timestamps") do |
| 93 | {full_tl, List.replace_at(acc, 0, full_tl)} | |
| 94 | else | |
| 95 | {full_tl, [full_tl | acc]} | |
| 96 | end | |
| 97 | end) | |
| 98 | |> elem(1) | |
| 99 | 1 | |> Enum.reverse() |
| 100 | end | |
| 101 | ||
| 102 | defp availability_to_fdsn_source_identifier(a) do | |
| 103 | 8 | "#{a.network}_#{a.station}_#{a.location}_#{a.channel}" |
| 104 | end | |
| 105 | end |
| Line | Hits | Source |
|---|---|---|
| 0 | defmodule AvyWeb.AvyREQUEST do | |
| 1 | require Plug.Conn | |
| 2 | require Logger | |
| 3 | ||
| 4 | @doc """ | |
| 5 | Request output format | |
| 6 | """ | |
| 7 | ||
| 8 | def extent(assigns) do | |
| 9 | 1 | Enum.map_join(assigns.availabilities, "\n", fn d -> |
| 10 | 2 | format_datasource(d) |
| 11 | end) | |
| 12 | end | |
| 13 | ||
| 14 | def query(assigns) do | |
| 15 | 1 | Enum.map(assigns.availabilities, fn d -> |
| 16 | 2 | Enum.map(d.timespans, fn ts -> |
| 17 | 4 | %{d | earliest: List.first(ts), latest: List.last(ts)} |
| 18 | end) | |
| 19 | end) | |
| 20 | |> List.flatten() | |
| 21 | 1 | |> Enum.map_join("\n", fn d -> format_datasource(d) end) |
| 22 | end | |
| 23 | ||
| 24 | defp format_datasource(d) do | |
| 25 | [ | |
| 26 | 6 | d.network, |
| 27 | 6 | d.station, |
| 28 | 6 | d.location, |
| 29 | 6 | d.channel, |
| 30 | 6 | DateTime.to_iso8601(d.earliest), |
| 31 | 6 | DateTime.to_iso8601(d.latest) |
| 32 | ] | |
| 33 | 6 | |> Enum.join(" ") |
| 34 | end | |
| 35 | end |
| Line | Hits | Source |
|---|---|---|
| 0 | defmodule AvyWeb.AvyTEXT do | |
| 1 | @moduledoc """ | |
| 2 | Text format renderer | |
| 3 | """ | |
| 4 | require Plug.Conn | |
| 5 | require Logger | |
| 6 | ||
| 7 | @extent_header [ | |
| 8 | "Network", | |
| 9 | "Station", | |
| 10 | "Location", | |
| 11 | "Channel", | |
| 12 | "Quality", | |
| 13 | "SampleRate", | |
| 14 | String.pad_trailing("Earliest", 27), | |
| 15 | String.pad_trailing("Latest", 27), | |
| 16 | String.pad_trailing("Updated", 20), | |
| 17 | "TimeSpans", | |
| 18 | "Restriction" | |
| 19 | ] | |
| 20 | ||
| 21 | @doc """ | |
| 22 | Text output | |
| 23 | """ | |
| 24 | ||
| 25 | def version(_) do | |
| 26 | 2 | "#{Application.spec(:avy)[:vsn]}" |
| 27 | end | |
| 28 | ||
| 29 | def extent(assigns) do | |
| 30 | 8 | headers = format_header(@extent_header, assigns) |
| 31 | ||
| 32 | [ | |
| 33 | headers | |
| 34 | 8 | | Enum.map(assigns.availabilities, fn d -> |
| 35 | 13 | format_datasource(d, assigns.fdsn_parameters, :extent) |
| 36 | end) | |
| 37 | ] | |
| 38 | 8 | |> Enum.join("\n") |
| 39 | end | |
| 40 | ||
| 41 | @query_header [ | |
| 42 | "Network", | |
| 43 | "Station", | |
| 44 | "Location", | |
| 45 | "Channel", | |
| 46 | "Quality", | |
| 47 | "SampleRate", | |
| 48 | String.pad_trailing("Earliest", 27), | |
| 49 | String.pad_trailing("Latest", 27), | |
| 50 | "Updated" | |
| 51 | ] | |
| 52 | def query(assigns) do | |
| 53 | 5 | headers = format_header(@query_header, assigns) |
| 54 | ||
| 55 | [ | |
| 56 | headers | |
| 57 | 5 | | Enum.map(assigns.availabilities, fn d -> |
| 58 | 9 | Enum.map(d.timespans, fn ts -> |
| 59 | 13 | %{d | timespans: [ts], earliest: List.first(ts), latest: List.last(ts)} |
| 60 | end) | |
| 61 | 9 | |> Enum.map(fn splitd -> |
| 62 | 13 | format_datasource( |
| 63 | splitd, | |
| 64 | 13 | assigns.fdsn_parameters, |
| 65 | :query | |
| 66 | ) | |
| 67 | end) | |
| 68 | end) | |
| 69 | ] | |
| 70 | |> List.flatten() | |
| 71 | 5 | |> Enum.join("\n") |
| 72 | end | |
| 73 | ||
| 74 | defp format_header(headers, %{fdsn_parameters: fdsn_params} = _) do | |
| 75 | 13 | headers = |
| 76 | 13 | if :samplerate in fdsn_params.merge do |
| 77 | 2 | Enum.reject(headers, &String.starts_with?(&1, "SampleRate")) |
| 78 | else | |
| 79 | 11 | headers |
| 80 | end | |
| 81 | ||
| 82 | 13 | headers = |
| 83 | 13 | if :quality in fdsn_params.merge do |
| 84 | 2 | Enum.reject(headers, &String.starts_with?(&1, "Quality")) |
| 85 | else | |
| 86 | 11 | headers |
| 87 | end | |
| 88 | ||
| 89 | 13 | headers = |
| 90 | 13 | if :latestupdate in fdsn_params.show do |
| 91 | 12 | headers |
| 92 | else | |
| 93 | 1 | Enum.reject(headers, &String.starts_with?(&1, "Updated")) |
| 94 | end | |
| 95 | ||
| 96 | 13 | "#" <> Enum.join(headers, " ") |
| 97 | end | |
| 98 | ||
| 99 | defp format_datasource(d, fdsn_params, method) do | |
| 100 | 26 | attr_list = |
| 101 | 13 | if method == :extent do |
| 102 | 13 | [d.timespancount |> Integer.to_string() |> String.pad_trailing(9), d.restriction] |
| 103 | else | |
| 104 | [] | |
| 105 | end | |
| 106 | ||
| 107 | 26 | attr_list = |
| 108 | 26 | if :latestupdate in fdsn_params.show do |
| 109 | [ | |
| 110 | 25 | DateTime.to_iso8601(d.earliest), |
| 111 | 25 | DateTime.to_iso8601(d.latest), |
| 112 | 25 | DateTime.to_iso8601(d.updated) | attr_list |
| 113 | ] | |
| 114 | else | |
| 115 | [ | |
| 116 | 1 | DateTime.to_iso8601(d.earliest), |
| 117 | 1 | DateTime.to_iso8601(d.latest) |
| 118 | | attr_list | |
| 119 | ] | |
| 120 | end | |
| 121 | ||
| 122 | 26 | attr_list = |
| 123 | 26 | if :samplerate in fdsn_params.merge do |
| 124 | 3 | attr_list |
| 125 | else | |
| 126 | [ | |
| 127 | 23 | d.samplerate |
| 128 | |> Decimal.to_string(:xsd) | |
| 129 | |> String.pad_trailing(10) | |
| 130 | | attr_list | |
| 131 | ] | |
| 132 | end | |
| 133 | ||
| 134 | 26 | attr_list = |
| 135 | 26 | if :quality in fdsn_params.merge do |
| 136 | 2 | attr_list |
| 137 | else | |
| 138 | 24 | [String.pad_trailing(d.quality, 7) | attr_list] |
| 139 | end | |
| 140 | ||
| 141 | [ | |
| 142 | 26 | String.pad_trailing(d.network, 8), |
| 143 | 26 | String.pad_trailing(d.station, 7), |
| 144 | 26 | String.pad_trailing(d.location, 8), |
| 145 | 26 | String.pad_trailing(d.channel, 7) | attr_list |
| 146 | ] | |
| 147 | 26 | |> Enum.join(" ") |
| 148 | end | |
| 149 | end |
| Line | Hits | Source |
|---|---|---|
| 0 | defmodule AvyWeb.AvyController do | |
| 1 | @moduledoc """ | |
| 2 | Manages a user request. | |
| 3 | """ | |
| 4 | alias Avy.Repo | |
| 5 | require Logger | |
| 6 | ||
| 7 | use AvyWeb, :controller | |
| 8 | alias FdsnPlugs.PublicationVersion | |
| 9 | use OpenApiSpex.ControllerSpecs | |
| 10 | ||
| 11 | plug :forbidden_params_extent, ["mergegaps"] when action in [:extent] | |
| 12 | plug :forbidden_params_extent, ["show"] when action in [:extent] | |
| 13 | plug :format | |
| 14 | plug AvyWeb.Plugs.Filters | |
| 15 | ||
| 16 | ### Define the OpenApi spec | |
| 17 | 0 | operation :version, |
| 18 | summary: "Get the version number of the service" | |
| 19 | ||
| 20 | 0 | operation :extent, |
| 21 | summary: "Query the datacenter for available waveform data.", | |
| 22 | description: "Returns only the earliest and latest data.", | |
| 23 | parameters: Avy.ApiSpec.parameters_spec(), | |
| 24 | responses: Avy.ApiSpec.responses_spec() | |
| 25 | ||
| 26 | 0 | operation :extent_post, |
| 27 | summary: "Query the datacenter for available waveform data.", | |
| 28 | description: "Returns only the earliest and latest data.", | |
| 29 | request_body: Avy.ApiSpec.request_body_spec(), | |
| 30 | responses: Avy.ApiSpec.responses_spec() | |
| 31 | ||
| 32 | 0 | operation :query, |
| 33 | summary: "Query the datacenter for available waveform data.", | |
| 34 | description: "Returns details of all segments of data", | |
| 35 | parameters: Avy.ApiSpec.query_parameters_spec(), | |
| 36 | responses: Avy.ApiSpec.responses_spec() | |
| 37 | ||
| 38 | 0 | operation :query_post, |
| 39 | summary: "Query the datacenter for available waveform data", | |
| 40 | request_body: Avy.ApiSpec.request_body_spec(), | |
| 41 | responses: Avy.ApiSpec.responses_spec() | |
| 42 | ||
| 43 | ### End of OpenApi definition | |
| 44 | ||
| 45 | def version(conn, _params) do | |
| 46 | 2 | render(conn, :version) |
| 47 | end | |
| 48 | ||
| 49 | # It seems that I need to define this placeholder function in order to be able to define a specific | |
| 50 | # operation for POST extent in OpenApi | |
| 51 | 0 | def extent_post(%Plug.Conn{method: "POST"} = conn, params), do: query(conn, params) |
| 52 | ||
| 53 | def extent(conn, _params) do | |
| 54 | 18 | case manage_request(conn, :extent) do |
| 55 | {:ok, []} -> | |
| 56 | conn | |
| 57 | 2 | |> send_resp(conn.assigns.fdsn_parameters.nodata, "") |
| 58 | 2 | |> halt |
| 59 | ||
| 60 | {:ok, ds} -> | |
| 61 | 16 | datasources = Enum.map(ds, &Map.drop(&1, [:timespans])) |
| 62 | ||
| 63 | conn | |
| 64 | |> assign(:availabilities, datasources) | |
| 65 | 16 | |> render(:extent) |
| 66 | ||
| 67 | {:too_much_data, msg} -> | |
| 68 | 0 | conn |> send_resp(413, msg) |> halt() |
| 69 | end | |
| 70 | end | |
| 71 | ||
| 72 | @doc """ | |
| 73 | This controller exists to reformat the output suitable for a grafana state timeline | |
| 74 | ||
| 75 | All the formatting logic takes place in the render function AvyJson.statetimeline/1 | |
| 76 | """ | |
| 77 | def statetimeline(conn, opts) do | |
| 78 | conn | |
| 79 | |> put_format("json") | |
| 80 | |> put_resp_content_type("application/json") | |
| 81 | 1 | |> query(opts) |
| 82 | end | |
| 83 | ||
| 84 | def query(conn, _params) do | |
| 85 | 18 | case manage_request(conn, :query) do |
| 86 | {:ok, []} -> | |
| 87 | conn | |
| 88 | 1 | |> send_resp(conn.assigns.fdsn_parameters.nodata, "") |
| 89 | 1 | |> halt |
| 90 | ||
| 91 | {:ok, ds} -> | |
| 92 | 17 | datasources = Enum.map(ds, &Map.drop(&1, [:timespancount])) |
| 93 | ||
| 94 | 17 | Logger.info("Fetched #{length(datasources)} traces") |
| 95 | ||
| 96 | conn | |
| 97 | |> assign(:availabilities, datasources) | |
| 98 | 17 | |> render() |
| 99 | ||
| 100 | {:too_much_data, msg} -> | |
| 101 | 0 | conn |> send_resp(413, msg) |> halt() |
| 102 | end | |
| 103 | end | |
| 104 | ||
| 105 | @doc """ | |
| 106 | This function is called on a /extent request. | |
| 107 | It will fetch all the contents corresponding to the filters that were set up by | |
| 108 | AvyWeb.Plugs.Filters, in the assigns of the connection plug (`conn.assigns.filters`). | |
| 109 | For each filter, the database request is done asynchronously in parallel. | |
| 110 | Desactivate the timeout, as the DB pool has it's own already, and some requests can get quite long. | |
| 111 | ||
| 112 | For each entry, we call the post_process function in order to build a list of datasources and merge the contents of the same properties. | |
| 113 | ||
| 114 | Finally, the datasources are put in the conn.assigns in order for ther renderer to access them in the `:availabilities` keyword. | |
| 115 | """ | |
| 116 | @spec manage_request(Plug.Conn.t(), atom) :: {:ok, list} | {:too_much_data, String.t()} | |
| 117 | def manage_request(conn, method) do | |
| 118 | 36 | datasources = |
| 119 | 36 | Task.async_stream(conn.assigns.filters, Repo, :get_contents, [], timeout: :infinity) |
| 120 | 36 | |> Enum.flat_map(fn {:ok, f} -> f end) |
| 121 | ||
| 122 | # TODO Improve evaluation of too much data | |
| 123 | 36 | if length(datasources) > Application.get_env(:avy, :max_sids) do |
| 124 | {:too_much_data, | |
| 125 | 0 | "Request has #{length(datasources)} epochs. Try to restrict the source identifiers selection and/or select smaller timespans."} |
| 126 | else | |
| 127 | 36 | datasources = |
| 128 | datasources | |
| 129 | |> post_process( | |
| 130 | method, | |
| 131 | 36 | conn.assigns.fdsn_parameters.merge, |
| 132 | 36 | conn.assigns.fdsn_parameters.mergegaps * 10 ** 9 |
| 133 | ) | |
| 134 | 58 | |> Enum.sort_by(&{&1.network, &1.station, &1.location, &1.channel, &1.earliest}) |
| 135 | 36 | |> orderby(conn.assigns.fdsn_parameters.orderby, method) |
| 136 | 36 | |> to_gaps(Map.has_key?(conn.params, "showgaps")) |
| 137 | ||
| 138 | {:ok, datasources} | |
| 139 | end | |
| 140 | end | |
| 141 | ||
| 142 | @doc """ | |
| 143 | ||
| 144 | This function will convert a list of ordered datasources to | |
| 145 | show gaps. | |
| 146 | ||
| 147 | First, group all datasources by the full keys. | |
| 148 | Then for each list: | |
| 149 | - keep the previous end date. (Start first iteration at second element) | |
| 150 | - on the current element: | |
| 151 | - set the start time at previous end date | |
| 152 | - set then end time at current start. | |
| 153 | - put the result at the head of the list we are building | |
| 154 | - we obtain a reversed list, so at the end we need to reverse it. | |
| 155 | """ | |
| 156 | ||
| 157 | @spec to_gaps(list(), boolean) :: list() | |
| 158 | 36 | def to_gaps(datasources, false), do: datasources |
| 159 | ||
| 160 | def to_gaps(datasources, true) do | |
| 161 | 1 | Logger.info("Computing gaps") |
| 162 | ||
| 163 | datasources | |
| 164 | |> Enum.group_by(group_by_keys([], :query)) | |
| 165 | |> Enum.map(fn {_, contents} -> | |
| 166 | 1 | Enum.map(contents, fn content -> |
| 167 | 2 | Logger.debug(inspect(content, pretty: true)) |
| 168 | 2 | [[_, init_end] | tail] = content.timespans |
| 169 | ||
| 170 | 2 | %{prev_end: _, list: gaps} = |
| 171 | Enum.reduce(tail, %{prev_end: init_end, list: []}, fn [tss, tse], acc -> | |
| 172 | 2 | %{ |
| 173 | prev_end: tse, | |
| 174 | 2 | list: [[acc.prev_end, tss] | acc.list] |
| 175 | } | |
| 176 | end) | |
| 177 | ||
| 178 | 2 | Logger.debug(inspect(gaps)) |
| 179 | 2 | %{content | timespans: Enum.reverse(gaps)} |
| 180 | end) | |
| 181 | end) | |
| 182 | |> List.flatten() | |
| 183 | 1 | |> Enum.reject(fn a -> a.timespans == [] end) |
| 184 | end | |
| 185 | ||
| 186 | #### Private functions for query requests | |
| 187 | ||
| 188 | # post_process/4 contains all the logic to handle the contents retrieved in the Repo. | |
| 189 | # Convert the structure returned by the Repo in a usable list of datasources, fit for the views | |
| 190 | # Post process the structure returned by the Repo call in order to: | |
| 191 | # - Group all elements by their discriminating properties (including restriction status) | |
| 192 | # - For each group, | |
| 193 | # - clean the unwanted timespans in contents | |
| 194 | # - merge consecutive timespans either by using the mergegap provided in the request, or by the samplerate | |
| 195 | # With the result, prepare a structure suitable for the views: | |
| 196 | # - compute the number of timespans | |
| 197 | # - convert publication version to quality code | |
| 198 | # | |
| 199 | @spec post_process(list, atom, list, integer) :: list | |
| 200 | defp post_process(lst, method, merge_properties, mergegap) do | |
| 201 | lst | |
| 202 | # Group all the contents by a key, depending on the user request | |
| 203 | |> Enum.group_by(group_by_keys(merge_properties, method)) | |
| 204 | # With the grouped contents, do the post processing. | |
| 205 | |> Task.async_stream(fn {k, contents} -> | |
| 206 | 58 | Logger.info("Processing #{length(contents)} contents for #{inspect(k)}") |
| 207 | 58 | contents = filter_outofbound_timespans(contents, List.first(contents).earliest) |
| 208 | ||
| 209 | 58 | if length(contents) > 0 do |
| 210 | 58 | ref_content = List.first(contents) |
| 211 | ||
| 212 | 58 | ref_samplerate = |
| 213 | 92 | Enum.min_by(contents, & &1.samplerate) |> Map.fetch!(:samplerate) |> Decimal.to_float() |
| 214 | ||
| 215 | 58 | Logger.debug("Min samplerate: #{ref_samplerate}") |
| 216 | ||
| 217 | # When we merge contiguous timespans based on the samplerate | |
| 218 | 58 | mergegap = |
| 219 | if mergegap <= 0 do | |
| 220 | # When no mergegaps are indicated | |
| 221 | 44 | 10 ** 9 / ref_samplerate |
| 222 | else | |
| 223 | 14 | mergegap |
| 224 | end | |
| 225 | ||
| 226 | 58 | Logger.debug("Merging on gaps lower than #{mergegap}") |
| 227 | ||
| 228 | 58 | merged_timespans = |
| 229 | 92 | Enum.flat_map(contents, & &1.timespans) |
| 230 | |> merge_contiguous_timespans(mergegap) | |
| 231 | 122 | |> Enum.map(fn x -> |
| 232 | [ | |
| 233 | Map.fetch!(x, "start") |> DateTime.from_unix!(:nanosecond), | |
| 234 | Map.fetch!(x, "end") |> DateTime.from_unix!(:nanosecond) | |
| 235 | ] | |
| 236 | end) | |
| 237 | 58 | |> snip_timespans(ref_content.earliest, List.last(contents).latest) |
| 238 | ||
| 239 | 58 | %{ |
| 240 | 58 | network: ref_content.network, |
| 241 | 58 | station: ref_content.station, |
| 242 | 58 | location: ref_content.location, |
| 243 | 58 | channel: ref_content.channel, |
| 244 | 58 | quality: PublicationVersion.pubversion_to_quality!(ref_content.quality), |
| 245 | 58 | samplerate: ref_content.samplerate, |
| 246 | earliest: contents |> List.first() |> Map.fetch!(:earliest), | |
| 247 | latest: contents |> List.last() |> Map.fetch!(:latest), | |
| 248 | 92 | updated: Enum.max_by(contents, & &1.updated, Date) |> Map.fetch!(:updated), |
| 249 | restriction: | |
| 250 | 58 | case ref_content.restriction do |
| 251 | 56 | "O" -> "OPEN" |
| 252 | 2 | _ -> "RESTRICTED" |
| 253 | end, | |
| 254 | 92 | timespancount: Enum.reduce(contents, 0, fn c, acc -> acc + length(c.timespans) end), |
| 255 | timespans: merged_timespans | |
| 256 | } | |
| 257 | else | |
| 258 | nil | |
| 259 | end | |
| 260 | end) | |
| 261 | 58 | |> Enum.into([], fn {:ok, res} -> res end) |
| 262 | 36 | |> Enum.reject(&is_nil(&1)) |
| 263 | end | |
| 264 | ||
| 265 | # | |
| 266 | # After grouping all contents by epochs, | |
| 267 | # In the timespans, there can be some contents that are outside of the request boundaries. | |
| 268 | # This function will remove contents that are completely out of bound | |
| 269 | # And if a content is left empty, we remove it from the list. | |
| 270 | defp filter_outofbound_timespans(contents, lowerbound) do | |
| 271 | contents | |
| 272 | |> Enum.map(fn content -> | |
| 273 | 92 | lower_bound = DateTime.to_unix(lowerbound, :nanosecond) |
| 274 | ||
| 275 | %{ | |
| 276 | content | |
| 277 | 92 | | timespans: |
| 278 | 92 | Enum.reject(content.timespans, fn ts -> |
| 279 | 134 | if ts["end"] <= lower_bound do |
| 280 | 0 | Logger.debug( |
| 281 | 0 | "Rejecting #{DateTime.from_unix!(ts["end"], :nanosecond)} < #{content.earliest}" |
| 282 | ) | |
| 283 | ||
| 284 | true | |
| 285 | end | |
| 286 | end) | |
| 287 | } | |
| 288 | end) | |
| 289 | 58 | |> Enum.reject(fn content -> |
| 290 | 92 | if content.timespans == [] do |
| 291 | 0 | Logger.debug("Rejecting empty timespans in #{inspect(content)}") |
| 292 | true | |
| 293 | end | |
| 294 | end) | |
| 295 | end | |
| 296 | ||
| 297 | defp snip_timespans(timespans, earliest, latest) do | |
| 298 | 58 | Logger.debug("Snip #{inspect(timespans, pretty: true)}") |
| 299 | 58 | Logger.debug("Over #{earliest} and #{latest}") |
| 300 | ||
| 301 | 58 | [[ts_start, ts_end] | tail] = timespans |
| 302 | ||
| 303 | 58 | snipped_timespans_at_start = |
| 304 | 1 | if DateTime.before?(ts_start, earliest) do |
| 305 | # TODO Faire un test qui arrive ici | |
| 306 | [[earliest, ts_end] | tail] | |
| 307 | else | |
| 308 | 57 | timespans |
| 309 | end | |
| 310 | ||
| 311 | 58 | [[ts_start, ts_end] | tail] = |
| 312 | Enum.reverse(snipped_timespans_at_start) | |
| 313 | 59 | |> Enum.drop_while(fn [ts, _] -> DateTime.after?(ts, latest) end) |
| 314 | ||
| 315 | 58 | if DateTime.after?(ts_end, latest) do |
| 316 | 2 | [[ts_start, latest] | tail] |> Enum.reverse() |
| 317 | else | |
| 318 | 56 | snipped_timespans_at_start |
| 319 | end | |
| 320 | end | |
| 321 | ||
| 322 | defp merge_contiguous_timespans(timespans, mergegap) do | |
| 323 | Enum.reduce( | |
| 324 | timespans, | |
| 325 | Enum.take(timespans, 1), | |
| 326 | fn timespan, acc -> | |
| 327 | 134 | [previous | tail] = acc |
| 328 | ||
| 329 | 134 | if timespan["start"] - previous["end"] <= mergegap do |
| 330 | 70 | Logger.debug( |
| 331 | 0 | "Merging timespan #{previous["start"] |> DateTime.from_unix!(:nanosecond)} -> #{previous["end"] |> DateTime.from_unix!(:nanosecond)}" |
| 332 | ) | |
| 333 | ||
| 334 | 70 | Logger.debug( |
| 335 | 0 | "and #{timespan["start"] |> DateTime.from_unix!(:nanosecond)} -> #{timespan["end"] |> DateTime.from_unix!(:nanosecond)}" |
| 336 | ) | |
| 337 | ||
| 338 | 70 | Logger.debug("with gap #{timespan["start"] - previous["end"]} <= #{mergegap}") |
| 339 | ||
| 340 | [ | |
| 341 | %{ | |
| 342 | "start" => previous["start"], | |
| 343 | "end" => timespan["end"], | |
| 344 | "sample_rate" => previous["sample_rate"] | |
| 345 | } | |
| 346 | | tail | |
| 347 | ] | |
| 348 | else | |
| 349 | [timespan | acc] | |
| 350 | end | |
| 351 | end | |
| 352 | ) | |
| 353 | 58 | |> Enum.reverse() |
| 354 | end | |
| 355 | ||
| 356 | # Plug function: In /extent action, mergegaps and show are not allowed | |
| 357 | defp forbidden_params_extent(conn, [opt]) do | |
| 358 | 39 | Logger.debug( |
| 359 | 0 | "Check for forbidden params for query whith #{inspect(conn.params, pretty: true)}" |
| 360 | ) | |
| 361 | ||
| 362 | 39 | case Map.fetch(conn.params, opt) do |
| 363 | {:ok, _} -> | |
| 364 | 2 | send_resp(conn, 400, "Options #{opt} is only supported in the /query method.") |
| 365 | 2 | |> halt |
| 366 | ||
| 367 | :error -> | |
| 368 | 37 | conn |
| 369 | end | |
| 370 | end | |
| 371 | ||
| 372 | @spec group_by_keys(list(atom), :query | :extent) :: function | |
| 373 | defp group_by_keys(merge_properties, :extent) do | |
| 374 | 18 | cond do |
| 375 | 18 | :quality in merge_properties and :samplerate in merge_properties -> |
| 376 | 2 | fn a -> |
| 377 | 6 | {a.network, a.station, a.location, a.channel, a.restriction} |
| 378 | end | |
| 379 | ||
| 380 | 16 | :samplerate in merge_properties -> |
| 381 | 2 | fn a -> |
| 382 | 6 | {a.network, a.station, a.location, a.channel, a.quality, a.restriction} |
| 383 | end | |
| 384 | ||
| 385 | 14 | :quality in merge_properties -> |
| 386 | 4 | fn a -> |
| 387 | 15 | {a.network, a.station, a.location, a.channel, a.samplerate, a.restriction} |
| 388 | end | |
| 389 | ||
| 390 | # Default behaviour | |
| 391 | 10 | true -> |
| 392 | 10 | fn a -> |
| 393 | 19 | {a.network, a.station, a.location, a.channel, a.quality, a.samplerate, a.restriction} |
| 394 | end | |
| 395 | end | |
| 396 | end | |
| 397 | ||
| 398 | defp group_by_keys(merge_properties, :query) do | |
| 399 | 19 | cond do |
| 400 | 19 | :quality in merge_properties and :samplerate in merge_properties -> |
| 401 | 1 | fn a -> |
| 402 | 3 | {a.network, a.station, a.location, a.channel} |
| 403 | end | |
| 404 | ||
| 405 | 18 | :samplerate in merge_properties -> |
| 406 | 1 | fn a -> |
| 407 | 3 | {a.network, a.station, a.location, a.channel, a.quality} |
| 408 | end | |
| 409 | ||
| 410 | 17 | :quality in merge_properties -> |
| 411 | 1 | fn a -> |
| 412 | 3 | {a.network, a.station, a.location, a.channel, a.samplerate} |
| 413 | end | |
| 414 | ||
| 415 | # Default behaviour | |
| 416 | 16 | true -> |
| 417 | 16 | fn a -> |
| 418 | 39 | {a.network, a.station, a.location, a.channel, a.quality, a.samplerate} |
| 419 | end | |
| 420 | end | |
| 421 | end | |
| 422 | ||
| 423 | # The API uses the "format" parameter on order to set the output format. | |
| 424 | # Translate this to the _format param for Phoenix magic to take place. | |
| 425 | # Also force the response content type header to text | |
| 426 | defp format(conn, _) do | |
| 427 | 38 | with {:ok, _} <- Map.fetch(conn.assigns, :fdsn_parameters), |
| 428 | 36 | {:ok, format} <- Map.fetch(conn.assigns.fdsn_parameters, :format) do |
| 429 | 36 | case format do |
| 430 | :json -> | |
| 431 | conn | |
| 432 | |> Phoenix.Controller.put_format("json") | |
| 433 | 12 | |> Plug.Conn.put_resp_content_type("application/json") |
| 434 | ||
| 435 | :geocsv -> | |
| 436 | conn | |
| 437 | |> Phoenix.Controller.put_format("geocsv") | |
| 438 | 6 | |> Plug.Conn.put_resp_content_type("text/csv") |
| 439 | ||
| 440 | :request -> | |
| 441 | conn | |
| 442 | |> Phoenix.Controller.put_format("request") | |
| 443 | 2 | |> Plug.Conn.put_resp_content_type("text/plain") |
| 444 | ||
| 445 | _ -> | |
| 446 | conn | |
| 447 | |> Phoenix.Controller.put_format("text") | |
| 448 | 16 | |> Plug.Conn.put_resp_content_type("text/plain") |
| 449 | end | |
| 450 | else | |
| 451 | _ -> | |
| 452 | conn | |
| 453 | |> Phoenix.Controller.put_format("text") | |
| 454 | 2 | |> Plug.Conn.put_resp_content_type("text/plain") |
| 455 | end | |
| 456 | end | |
| 457 | ||
| 458 | defp orderby(ds, orderby, :extent) do | |
| 459 | 18 | Logger.debug("Reorder datasources by #{orderby}") |
| 460 | ||
| 461 | 18 | case orderby do |
| 462 | 1 | :latestupdate -> Enum.sort_by(ds, & &1.updated, {:asc, DateTime}) |
| 463 | 1 | :latestupdate_desc -> Enum.sort_by(ds, & &1.updated, {:desc, DateTime}) |
| 464 | 1 | :timespancount -> Enum.sort_by(ds, & &1.timespancount, :asc) |
| 465 | 1 | :timespancount_desc -> Enum.sort_by(ds, & &1.timespancount, :desc) |
| 466 | 14 | _ -> ds |
| 467 | end | |
| 468 | end | |
| 469 | ||
| 470 | defp orderby(ds, orderby, :query) do | |
| 471 | 18 | Logger.debug("Reorder datasources by #{orderby}") |
| 472 | ||
| 473 | 18 | case orderby do |
| 474 | 2 | :latestupdate -> Enum.sort_by(ds, & &1.updated, {:asc, DateTime}) |
| 475 | 2 | :latestupdate_desc -> Enum.sort_by(ds, & &1.updated, {:desc, DateTime}) |
| 476 | 14 | _ -> ds |
| 477 | end | |
| 478 | end | |
| 479 | end |
| Line | Hits | Source |
|---|---|---|
| 0 | defmodule AvyWeb.ErrorJSON do | |
| 1 | @moduledoc """ | |
| 2 | This module is invoked by your endpoint in case of errors on JSON requests. | |
| 3 | ||
| 4 | See config/config.exs. | |
| 5 | """ | |
| 6 | ||
| 7 | # If you want to customize a particular status code, | |
| 8 | # you may add your own clauses, such as: | |
| 9 | # | |
| 10 | # def render("500.json", _assigns) do | |
| 11 | # %{errors: %{detail: "Internal Server Error"}} | |
| 12 | # end | |
| 13 | ||
| 14 | # By default, Phoenix returns the status message from | |
| 15 | # the template name. For example, "404.json" becomes | |
| 16 | # "Not Found". | |
| 17 | def render(template, _assigns) do | |
| 18 | 2 | %{errors: %{detail: Phoenix.Controller.status_message_from_template(template)}} |
| 19 | end | |
| 20 | end |
| Line | Hits | Source |
|---|---|---|
| 0 | defmodule AvyWeb.Endpoint do | |
| 1 | use Sentry.PlugCapture | |
| 2 | use Phoenix.Endpoint, otp_app: :avy | |
| 3 | ||
| 4 | # The session will be stored in the cookie and signed, | |
| 5 | # this means its contents can be read but not tampered with. | |
| 6 | # Set :encryption_salt if you would also like to encrypt it. | |
| 7 | @session_options [ | |
| 8 | store: :cookie, | |
| 9 | key: "_avy_key", | |
| 10 | signing_salt: "xyXumY7h", | |
| 11 | same_site: "Lax" | |
| 12 | ] | |
| 13 | ||
| 14 | socket "/live", Phoenix.LiveView.Socket, | |
| 15 | websocket: [connect_info: [session: @session_options]], | |
| 16 | longpoll: [connect_info: [session: @session_options]] | |
| 17 | ||
| 18 | # Serve at "/" the static files from "priv/static" directory. | |
| 19 | # | |
| 20 | # You should set gzip to true if you are running phx.digest | |
| 21 | # when deploying your static files in production. | |
| 22 | plug Plug.Static, | |
| 23 | at: "/", | |
| 24 | from: :avy, | |
| 25 | gzip: true, | |
| 26 | content_types: %{"application.wadl" => "text/xml"}, | |
| 27 | only: AvyWeb.static_paths() | |
| 28 | ||
| 29 | # Answer to /_health request | |
| 30 | plug AvyWeb.Plug.HealthCheck | |
| 31 | ||
| 32 | # Code reloading can be explicitly enabled under the | |
| 33 | # :code_reloader configuration of your endpoint. | |
| 34 | if code_reloading? do | |
| 35 | plug Phoenix.CodeReloader | |
| 36 | plug Phoenix.Ecto.CheckRepoStatus, otp_app: :avy | |
| 37 | end | |
| 38 | ||
| 39 | plug Phoenix.LiveDashboard.RequestLogger, | |
| 40 | param_key: "request_logger", | |
| 41 | cookie_key: "request_logger" | |
| 42 | ||
| 43 | plug Plug.RequestId | |
| 44 | plug Plug.Telemetry, event_prefix: [:phoenix, :endpoint], log: {__MODULE__, :log_level, []} | |
| 45 | ||
| 46 | plug Plug.Parsers, | |
| 47 | parsers: [:urlencoded, :multipart, :json], | |
| 48 | pass: ["*/*"], | |
| 49 | json_decoder: Phoenix.json_library() | |
| 50 | ||
| 51 | plug Sentry.PlugContext | |
| 52 | plug Plug.MethodOverride | |
| 53 | plug Plug.Head | |
| 54 | plug Plug.Session, @session_options | |
| 55 | ||
| 56 | plug AvyWeb.Plug.TrafficDrain | |
| 57 | plug AvyWeb.Router | |
| 58 | # | |
| 59 | # Return `false` for the __health endpoint | |
| 60 | 0 | def log_level(%{path_info: ["__health" | _]}), do: false |
| 61 | 2 | def log_level(%{path_info: ["__traffic" | _]}), do: false |
| 62 | # Default to the normal loglevel | |
| 63 | 98 | def log_level(_), do: :info |
| 64 | end |
| Line | Hits | Source |
|---|---|---|
| 0 | defmodule AvyWeb.Plugs.Filters do | |
| 1 | @moduledoc """ | |
| 2 | A plug to add a Filter struct to the connection assigns. | |
| 3 | The filter is made of the source identifier + includerestricted + quality | |
| 4 | NOTE: this is maybe redundant with FdsnPlugs.RequestParams but is more suited to filtering by Avy.Repo | |
| 5 | Refactoring may be. | |
| 6 | """ | |
| 7 | import Plug.Conn | |
| 8 | ||
| 9 | 38 | def init(opts), do: opts |
| 10 | ||
| 11 | def call(conn, _opts) do | |
| 12 | conn | |
| 13 | 38 | |> assign( |
| 14 | :filters, | |
| 15 | 38 | if Map.has_key?(conn.assigns, :fdsn_parameters) do |
| 16 | 36 | Enum.map(conn.assigns.fdsn_parameters.source_identifiers, fn s -> |
| 17 | 36 | Avy.Filter.from_source_identifier( |
| 18 | s, | |
| 19 | 36 | conn.assigns.fdsn_parameters.includerestricted, |
| 20 | 36 | conn.assigns.fdsn_parameters.quality |
| 21 | ) | |
| 22 | end) | |
| 23 | else | |
| 24 | [] | |
| 25 | end | |
| 26 | ) | |
| 27 | end | |
| 28 | end |
| Line | Hits | Source |
|---|---|---|
| 0 | defmodule AvyWeb.Plug.HealthCheck do | |
| 1 | import Plug.Conn | |
| 2 | ||
| 3 | 51 | def init(opts), do: opts |
| 4 | ||
| 5 | # If the request path matches "/_health", we return a 200 response. | |
| 6 | def call(%Plug.Conn{request_path: "/__health"} = conn, _opts) do | |
| 7 | conn | |
| 8 | |> send_resp( | |
| 9 | 200, | |
| 10 | "" | |
| 11 | ) | |
| 12 | 1 | |> halt() |
| 13 | end | |
| 14 | ||
| 15 | # If the request path is anything else, we pass the connection along. | |
| 16 | 50 | def call(conn, _opts), do: conn |
| 17 | end |
| Line | Hits | Source |
|---|---|---|
| 0 | defmodule AvyWeb.Plug.TrafficDrain do | |
| 1 | @moduledoc """ | |
| 2 | Plug for handling Kubernetes readinessProbe. | |
| 3 | ||
| 4 | Plug starts responding with 503 - Service Unavailable from `/__traffic`, when traffic is being drained. | |
| 5 | Otherwise we respond with 200 - OK. | |
| 6 | """ | |
| 7 | ||
| 8 | import Plug.Conn | |
| 9 | ||
| 10 | @behaviour Plug | |
| 11 | ||
| 12 | @impl true | |
| 13 | 50 | def init(opts), do: opts |
| 14 | ||
| 15 | @impl true | |
| 16 | def call(%Plug.Conn{path_info: ["__traffic"]} = conn, _opts) do | |
| 17 | 1 | case GracefulStop.get_status() do |
| 18 | :stopping -> | |
| 19 | conn | |
| 20 | |> put_resp_content_type("text/plain") | |
| 21 | |> send_resp(:service_unavailable, "Draining") | |
| 22 | 0 | |> halt() |
| 23 | ||
| 24 | :running -> | |
| 25 | conn | |
| 26 | |> put_resp_content_type("text/plain") | |
| 27 | |> send_resp(:ok, "Serving") | |
| 28 | 1 | |> halt() |
| 29 | end | |
| 30 | end | |
| 31 | ||
| 32 | @impl true | |
| 33 | def call(conn, _opts) do | |
| 34 | 49 | conn |
| 35 | end | |
| 36 | end |
| Line | Hits | Source |
|---|---|---|
| 0 | defmodule AvyWeb.Router do | |
| 1 | use AvyWeb, :router | |
| 2 | ||
| 3 | 46 | pipeline :fdsn do |
| 4 | plug :accepts, ["json", "text"] | |
| 5 | ||
| 6 | plug FdsnAvailabilityPlugs | |
| 7 | end | |
| 8 | ||
| 9 | 0 | pipeline :openapi do |
| 10 | plug OpenApiSpex.Plug.PutApiSpec, module: Avy.ApiSpec | |
| 11 | end | |
| 12 | ||
| 13 | 1 | pipeline :browser do |
| 14 | plug :accepts, ["html"] | |
| 15 | plug :fetch_session | |
| 16 | # plug :fetch_live_flash | |
| 17 | plug :put_root_layout, html: {AvyWeb.Layouts, :root} | |
| 18 | plug :protect_from_forgery | |
| 19 | plug :put_secure_browser_headers | |
| 20 | end | |
| 21 | ||
| 22 | 2 | get "/version", AvyWeb.AvyController, :version |
| 23 | ||
| 24 | scope "/", AvyWeb do | |
| 25 | pipe_through :fdsn | |
| 26 | 25 | get "/extent", AvyController, :extent |
| 27 | 3 | post "/extent", AvyController, :extent_post |
| 28 | 15 | get "/query", AvyController, :query |
| 29 | 2 | post "/query", AvyController, :query |
| 30 | 1 | get "/statetimeline", AvyController, :statetimeline |
| 31 | end | |
| 32 | ||
| 33 | scope "/" do | |
| 34 | pipe_through :browser | |
| 35 | ||
| 36 | # To serve the UI, a static path has to be given at compile time. | |
| 37 | # I declare here the official FDSN location it should work in | |
| 38 | # most cases. | |
| 39 | 1 | get "/", OpenApiSpex.Plug.SwaggerUI, path: Application.compile_env(:avy, :open_api_path) |
| 40 | end | |
| 41 | ||
| 42 | scope "/" do | |
| 43 | pipe_through :openapi | |
| 44 | 0 | get "/openapi", OpenApiSpex.Plug.RenderSpec, [] |
| 45 | end | |
| 46 | ||
| 47 | # Enable LiveDashboard and Swoosh mailbox preview in development | |
| 48 | if Application.compile_env(:avy, :dev_routes) do | |
| 49 | # If you want to use the LiveDashboard in production, you should put | |
| 50 | # it behind authentication and allow only admins to access it. | |
| 51 | # If your application does not have an admins-only section yet, | |
| 52 | # you can use Plug.BasicAuth to set up some basic authentication | |
| 53 | # as long as you are also using SSL (which you should anyway). | |
| 54 | import Phoenix.LiveDashboard.Router | |
| 55 | ||
| 56 | scope "/dev" do | |
| 57 | pipe_through [:fetch_session, :protect_from_forgery] | |
| 58 | ||
| 59 | live_dashboard "/dashboard", | |
| 60 | metrics: AvyWeb.Telemetry, | |
| 61 | additional_pages: [ | |
| 62 | flame_on: FlameOn.DashboardPage | |
| 63 | ] | |
| 64 | ||
| 65 | forward "/mailbox", Plug.Swoosh.MailboxPreview | |
| 66 | end | |
| 67 | end | |
| 68 | end |