Compare commits

..

10 Commits

Author SHA1 Message Date
fe55eefe36 configure release 2024-05-13 02:22:04 +02:00
b1605cef70 fix match classifier 2024-05-12 18:22:23 +02:00
ca75944558 update config 2024-05-12 02:46:01 +02:00
fb7679b028 update config 2024-05-12 02:44:05 +02:00
304adfe245 update config for prod endpoint 2024-05-12 02:00:47 +02:00
23d581c4cd add release config 2024-05-12 01:51:52 +02:00
c109a958ec fix store match 2024-05-12 01:28:51 +02:00
f8dbf17c73 update config for storage 2024-05-12 01:22:45 +02:00
3239f09cbd update config 2024-05-12 00:31:51 +02:00
918cf497da processing champion win rates 2024-05-12 00:22:40 +02:00
28 changed files with 507 additions and 23 deletions

40
.dockerignore Normal file
View File

@ -0,0 +1,40 @@
# # This file excludes paths from the Docker build context.
# #
# # By default, Docker's build context includes all files (and folders) in the
# # current directory. Even if a file isn't copied into the container it is still sent to
# # the Docker daemon.
# #
# # There are multiple reasons to exclude files from the build context:
# #
# # 1. Prevent nested folders from being copied into the container (ex: exclude
# # /assets/node_modules when copying /assets)
# # 2. Reduce the size of the build context and improve build time (ex. /build, /deps, /doc)
# # 3. Avoid sending files containing sensitive information
# #
# # More information on using .dockerignore is available here:
# # https://docs.docker.com/engine/reference/builder/#dockerignore-file
# .dockerignore
# # Ignore git, but keep git HEAD and refs to access current commit hash if needed:
# #
# # $ cat .git/HEAD | awk '{print ".git/"$2}' | xargs cat
# # d0b8727759e1e0e7aa3d41707d12376e373d5ecc
# .git
# !.git/HEAD
# !.git/refs
# # Common development/test artifacts
# /cover/
# /doc/
# /test/
# /tmp/
# .elixir_ls
# # Mix artifacts
# *.ez
# # Generated on crash by the VM
# erl_crash.dump
# # Static artifacts - These should be fetched and built inside the Docker image

101
Dockerfile Normal file
View File

@ -0,0 +1,101 @@
# Find eligible builder and runner images on Docker Hub. We use Ubuntu/Debian
# instead of Alpine to avoid DNS resolution issues in production.
#
# https://hub.docker.com/r/hexpm/elixir/tags?page=1&name=ubuntu
# https://hub.docker.com/_/ubuntu?tab=tags
#
# This file is based on these images:
#
# - https://hub.docker.com/r/hexpm/elixir/tags - for the build image
# - https://hub.docker.com/_/debian?tab=tags&page=1&name=bullseye-20231009-slim - for the release image
# - https://pkgs.org/ - resource for finding needed packages
# - Ex: hexpm/elixir:1.16.0-erlang-26.2.1-debian-bullseye-20231009-slim
#
ARG ELIXIR_VERSION=1.16.0
ARG OTP_VERSION=26.2.1
ARG DEBIAN_VERSION=bullseye-20231009-slim
ARG BUILDER_IMAGE="hexpm/elixir:${ELIXIR_VERSION}-erlang-${OTP_VERSION}-debian-${DEBIAN_VERSION}"
ARG RUNNER_IMAGE="debian:${DEBIAN_VERSION}"
FROM ${BUILDER_IMAGE} as builder
# install build dependencies
RUN apt-get update -y && apt-get install -y build-essential git \
&& apt-get clean && rm -f /var/lib/apt/lists/*_*
# prepare build dir
WORKDIR /app
# install hex + rebar
RUN mix local.hex --force && \
mix local.rebar --force
# set build ENV
ENV MIX_ENV="prod"
# install mix dependencies
COPY mix.exs mix.lock ./
COPY apps/ ./apps/
RUN mix deps.get --only $MIX_ENV
RUN mkdir config
# copy compile-time config files before we compile dependencies
# to ensure any relevant config change will trigger the dependencies
# to be re-compiled.
COPY config/config.exs config/${MIX_ENV}.exs config/
RUN mix deps.compile
COPY apps/lol_analytics_web/priv apps/lol_analytics_web/priv
COPY apps/lol_analytics_web/assets apps/lol_analytics_web/assets
COPY apps/lol_analytics/priv apps/lol_analytics/priv
COPY apps apps
COPY assets assets
# compile assets
RUN cd apps/lol_analytics_web && mix assets.deploy
# Compile the release
RUN mix compile
# Changes to config/runtime.exs don't require recompiling the code
COPY config/runtime.exs config/
COPY rel rel
RUN mix release
# start a new build stage so that the final image will only contain
# the compiled release and other runtime necessities
FROM ${RUNNER_IMAGE}
RUN apt-get update -y && \
apt-get install -y libstdc++6 openssl libncurses5 locales ca-certificates \
&& apt-get clean && rm -f /var/lib/apt/lists/*_*
# Set the locale
RUN sed -i '/en_US.UTF-8/s/^# //g' /etc/locale.gen && locale-gen
ENV LANG en_US.UTF-8
ENV LANGUAGE en_US:en
ENV LC_ALL en_US.UTF-8
WORKDIR "/app"
RUN chown nobody /app
# set runner ENV
ENV MIX_ENV="prod"
# Only copy the final release from the build stage
COPY --from=builder --chown=nobody:root /app/_build/${MIX_ENV}/rel/prod ./
USER nobody
# If using an environment that doesn't automatically reap zombie processes, it is
# advised to add an init process such as tini via `apt-get install`
# above and adding an entrypoint. See https://github.com/krallin/tini for details
# ENTRYPOINT ["/tini", "--"]
ENTRYPOINT [ "/app/bin/prod" ]

View File

@ -1,3 +1,3 @@
defmodule LolAnalytics.Analyzer.BaseAnalyzer do
defmodule LolAnalytics.Analyzer do
@callback analyze(:url, path :: String.t()) :: :ok
end

View File

@ -1,9 +1,17 @@
defmodule LolAnalytics.Analyzer.ChampionAnalyzer do
alias Hex.HTTP
@behaviour LolAnalytics.Analyzer.BaseAnalyzer
@behaviour LolAnalytics.Analyzer
def analyze_all_matches do
Storage.MatchStorage.S3MatchStorage.list_files("ranked")
|> Enum.map(& &1.key)
|> Enum.each(fn path ->
LolAnalytics.Analyzer.ChampionAnalyzer.analyze(:url, "http://localhost:9000/ranked/#{path}")
end)
end
@doc """
:ok
iex> LolAnalytics.Analyzer.ChampionAnalyzer.analyze(:url, "http://localhost:9000/ranked/14.9.580.2108/EUW1_6923309745.json")
"""
@impl true
@spec analyze(atom(), String.t()) :: :ok
@ -13,19 +21,30 @@ defmodule LolAnalytics.Analyzer.ChampionAnalyzer do
:ok
end
@doc """
iex> LolAnalytics.Analyzer.ChampionAnalyzer.analyze(:url, "http://localhost:9000/ranked/14.9.580.2108/EUW1_6923309745.json")
"""
@impl true
@spec analyze(atom(), any()) :: list(LoLAPI.Model.Participant.t())
def analyze(:data, data) do
decoded = Poison.decode!(data)
%{"info" => %{"participants" => participants}} = decoded
decoded_match = Poison.decode!(data, as: %LoLAPI.Model.MatchResponse{})
participants = decoded_match.info.participants
version = extract_game_version(decoded_match)
participants
|> Enum.each(fn %{"win" => win, "championId" => champion_id} ->
IO.inspect(%{win: win, champion_id: champion_id})
|> Enum.each(fn participant = %LoLAPI.Model.Participant{} ->
if participant.teamPosition != "" do
LolAnalytics.ChampionWinRate.ChampionWinRateRepo.add_champion_win_rate(
participant.championId,
version,
participant.teamPosition,
participant.win
)
end
end)
end
defp extract_game_version(game_data) do
game_data.info.gameVersion
|> String.split(".")
|> Enum.take(2)
|> Enum.join(".")
end
end

View File

@ -0,0 +1,56 @@
defmodule LolAnalytics.ChampionWinRate.ChampionWinRateRepo do
import Ecto.Query
alias LolAnalytics.ChampionWinRate.ChampionWinRateSchema
alias LoLAnalytics.Repo
@spec add_champion_win_rate(
champion_id :: String.t(),
patch :: String.t(),
position :: String.t(),
win? :: boolean
) :: {:ok, ChampionWinRateSchema.t()} | {:error, Ecto.Changeset.t()}
def add_champion_win_rate(champion_id, patch, position, win?) do
Repo.transaction(fn ->
champion_query =
from cwr in LolAnalytics.ChampionWinRate.ChampionWinRateSchema,
where: cwr.champion_id == ^champion_id and cwr.position == ^position,
lock: "FOR UPDATE"
champion_data = Repo.one(champion_query)
case champion_data do
nil ->
ChampionWinRateSchema.changeset(%ChampionWinRateSchema{}, %{
champion_id: champion_id,
patch: patch,
total_games: 1,
position: position,
total_wins: if(win?, do: 1, else: 0)
})
|> Repo.insert!()
_ ->
total_games = champion_data.total_games + 1
total_wins = champion_data.total_wins + if win?, do: 1, else: 0
ChampionWinRateSchema.changeset(champion_data, %{
total_games: total_games,
total_wins: total_wins
})
|> Repo.update!()
end
end)
end
def list_win_rates() do
Repo.all(ChampionWinRateSchema)
end
def get_champion_win_rate(champion_id, patch) do
champion_query =
from cwr in LolAnalytics.ChampionWinRate.ChampionWinRateSchema,
where: cwr.champion_id == ^champion_id
Repo.one(champion_query)
end
end

View File

@ -0,0 +1,20 @@
defmodule LolAnalytics.ChampionWinRate.ChampionWinRateSchema do
use Ecto.Schema
import Ecto.Changeset
schema "champion_win_rate" do
field :champion_id, :integer
field :total_games, :integer
field :patch, :string
field :position, :string
field :total_wins, :integer
timestamps()
end
def changeset(%__MODULE__{} = champion_win_rate, attrs) do
champion_win_rate
|> cast(attrs, [:champion_id, :total_games, :patch, :total_wins, :position])
|> validate_required([:champion_id, :total_games, :patch, :total_wins, :position])
end
end

View File

@ -0,0 +1,21 @@
defmodule LoLAnalytics.Repo.Migrations.ChampionWinRate do
use Ecto.Migration
def change do
create table("champion_win_rate") do
add :champion_id, :integer
add :patch, :string
add :position, :string
add :total_games, :integer
add :total_wins, :integer
timestamps()
end
alter table("match") do
add :win_rate_processed, :boolean, default: false
end
index("champion_win_rate", [:champion_id], unique: false)
index("champion_win_rate", [:champion_id, :patch], unique: true)
end
end

View File

@ -0,0 +1,45 @@
# This file excludes paths from the Docker build context.
#
# By default, Docker's build context includes all files (and folders) in the
# current directory. Even if a file isn't copied into the container it is still sent to
# the Docker daemon.
#
# There are multiple reasons to exclude files from the build context:
#
# 1. Prevent nested folders from being copied into the container (ex: exclude
# /assets/node_modules when copying /assets)
# 2. Reduce the size of the build context and improve build time (ex. /build, /deps, /doc)
# 3. Avoid sending files containing sensitive information
#
# More information on using .dockerignore is available here:
# https://docs.docker.com/engine/reference/builder/#dockerignore-file
.dockerignore
# Ignore git, but keep git HEAD and refs to access current commit hash if needed:
#
# $ cat .git/HEAD | awk '{print ".git/"$2}' | xargs cat
# d0b8727759e1e0e7aa3d41707d12376e373d5ecc
.git
!.git/HEAD
!.git/refs
# Common development/test artifacts
/cover/
/doc/
/test/
/tmp/
.elixir_ls
# Mix artifacts
/_build/
/deps/
*.ez
# Generated on crash by the VM
erl_crash.dump
# Static artifacts - These should be fetched and built inside the Docker image
/assets/node_modules/
/priv/static/assets/
/priv/static/cache_manifest.json

View File

@ -0,0 +1,96 @@
# Find eligible builder and runner images on Docker Hub. We use Ubuntu/Debian
# instead of Alpine to avoid DNS resolution issues in production.
#
# https://hub.docker.com/r/hexpm/elixir/tags?page=1&name=ubuntu
# https://hub.docker.com/_/ubuntu?tab=tags
#
# This file is based on these images:
#
# - https://hub.docker.com/r/hexpm/elixir/tags - for the build image
# - https://hub.docker.com/_/debian?tab=tags&page=1&name=bullseye-20231009-slim - for the release image
# - https://pkgs.org/ - resource for finding needed packages
# - Ex: hexpm/elixir:1.16.0-erlang-26.2.1-debian-bullseye-20231009-slim
#
ARG ELIXIR_VERSION=1.16.0
ARG OTP_VERSION=26.2.1
ARG DEBIAN_VERSION=bullseye-20231009-slim
ARG BUILDER_IMAGE="hexpm/elixir:${ELIXIR_VERSION}-erlang-${OTP_VERSION}-debian-${DEBIAN_VERSION}"
ARG RUNNER_IMAGE="debian:${DEBIAN_VERSION}"
FROM ${BUILDER_IMAGE} as builder
# install build dependencies
RUN apt-get update -y && apt-get install -y build-essential git \
&& apt-get clean && rm -f /var/lib/apt/lists/*_*
# prepare build dir
WORKDIR /app
# install hex + rebar
RUN mix local.hex --force && \
mix local.rebar --force
# set build ENV
ENV MIX_ENV="prod"
# install mix dependencies
COPY mix.exs ./
RUN mix deps.get --only $MIX_ENV
RUN mkdir config
# copy compile-time config files before we compile dependencies
# to ensure any relevant config change will trigger the dependencies
# to be re-compiled.
COPY config/config.exs config/${MIX_ENV}.exs config/runtime.exs config/
RUN mix deps.compile
COPY priv priv
COPY lib lib
COPY assets assets
# compile assets
RUN mix assets.deploy
# Compile the release
RUN mix compile
# Changes to config/runtime.exs don't require recompiling the code
COPY rel rel
RUN mix release
# start a new build stage so that the final image will only contain
# the compiled release and other runtime necessities
FROM ${RUNNER_IMAGE}
RUN apt-get update -y && \
apt-get install -y libstdc++6 openssl libncurses5 locales ca-certificates \
&& apt-get clean && rm -f /var/lib/apt/lists/*_*
# Set the locale
RUN sed -i '/en_US.UTF-8/s/^# //g' /etc/locale.gen && locale-gen
ENV LANG en_US.UTF-8
ENV LANGUAGE en_US:en
ENV LC_ALL en_US.UTF-8
WORKDIR "/app"
RUN chown nobody /app
# set runner ENV
ENV MIX_ENV="prod"
# Only copy the final release from the build stage
COPY --from=builder --chown=nobody:root /app/_build/${MIX_ENV}/rel/prod ./
USER nobody
# If using an environment that doesn't automatically reap zombie processes, it is
# advised to add an init process such as tini via `apt-get install`
# above and adding an entrypoint. See https://github.com/krallin/tini for details
# ENTRYPOINT ["/tini", "--"]
CMD ["/app/bin/server"]

View File

@ -0,0 +1,28 @@
defmodule LoLAnalyticsWeb.Release do
@moduledoc """
Used for executing DB release tasks when run in production without Mix
installed.
"""
@app :lol_analytics_web
def migrate do
load_app()
for repo <- repos() do
{:ok, _, _} = Ecto.Migrator.with_repo(repo, &Ecto.Migrator.run(&1, :up, all: true))
end
end
def rollback(repo, version) do
load_app()
{:ok, _, _} = Ecto.Migrator.with_repo(repo, &Ecto.Migrator.run(&1, :down, to: version))
end
defp repos do
Application.fetch_env!(@app, :ecto_repos)
end
defp load_app do
Application.load(@app)
end
end

View File

@ -41,7 +41,6 @@ defmodule LoLAnalyticsWeb.MixProject do
{:phoenix_html, "~> 4.0"},
{:phoenix_live_reload, "~> 1.2", only: :dev},
{:phoenix_live_view, "~> 0.20.2"},
{:floki, ">= 0.30.0", only: :test},
{:phoenix_live_dashboard, "~> 0.8.3"},
{:esbuild, "~> 0.8", runtime: Mix.env() == :dev},
{:tailwind, "~> 0.2", runtime: Mix.env() == :dev},

View File

@ -0,0 +1,5 @@
#!/bin/sh
set -eu
cd -P -- "$(dirname -- "$0")"
exec ./lol_analytics_web eval LoLAnalyticsWeb.Release.migrate

View File

@ -0,0 +1 @@
call "%~dp0\lol_analytics_web" eval LoLAnalyticsWeb.Release.migrate

View File

@ -0,0 +1,5 @@
#!/bin/sh
set -eu
cd -P -- "$(dirname -- "$0")"
PHX_SERVER=true exec ./lol_analytics_web start

View File

@ -0,0 +1,2 @@
set PHX_SERVER=true
call "%~dp0\lol_analytics_web" start

View File

@ -0,0 +1 @@
import Config

View File

@ -8,7 +8,7 @@ defmodule Scrapper.MatchClassifier do
@spec classify_match_by_queue(String.t()) :: nil
def classify_match_by_queue("420") do
matches = Storage.MatchStorage.S3MatchStorage.list_matches()
matches = Storage.MatchStorage.S3MatchStorage.list_files("matches")
total_matches = Enum.count(matches)
matches
@ -16,7 +16,11 @@ defmodule Scrapper.MatchClassifier do
|> Scrapper.Parallel.peach(fn {match, index} ->
%{key: json_file} = match
[key | _] = String.split(json_file, ".")
response = HTTPoison.get!("http://localhost:9000/matches/#{key}.json", [], timeout: 5000)
response =
HTTPoison.get!("http://#{System.get_env("EX_AWS_ENDPOINT")}:9000/matches/#{key}.json", [],
timeout: 5000
)
%{"info" => %{"gameVersion" => gameVersion, "queueId" => queueId}} =
Poison.decode!(response.body)
@ -30,8 +34,6 @@ defmodule Scrapper.MatchClassifier do
end)
end
# pass functions, not data
def classify_match_by_queue(_) do
end
end

View File

@ -44,7 +44,7 @@ defmodule Scrapper.Processor.MatchProcessor do
def process_resp({:ok, raw_match}, match_id) do
decoded_match = Poison.decode!(raw_match, as: %LoLAPI.Model.MatchResponse{})
match_url = Storage.MatchStorage.S3MatchStorage.store_match(match_id, raw_match)
match_url = Storage.MatchStorage.S3MatchStorage.store_match(match_id, raw_match, "matches")
match = LolAnalytics.Match.MatchRepo.get_match(match_id)
case match do

View File

@ -8,10 +8,12 @@ import Config
# secret_access_key: System.get_env("EX_AWS_SECRET_KEY")
config :ex_aws,
access_key_id: "3zwMWl4RPCs8CHzhKmIX",
secret_access_key: "79B6LmryjJElkrIiHgDcfIxSpmvrLdVy75MyAJC2",
# "EX_AWS_SECRET_KEY",
# EX_AWS_ACCESS_KEY
access_key_id: System.get_env("EX_AWS_ACCESS_KEY"),
secret_access_key: System.get_env("EX_AWS_SECRET_KEY"),
s3: [
scheme: "http://",
host: "localhost",
port: "9000"
host: System.get_env("EX_AWS_ENDPOINT"),
port: System.get_env("EX_AWS_PORT")
]

View File

@ -0,0 +1,12 @@
import Config
config :ex_aws,
# "EX_AWS_SECRET_KEY",
# EX_AWS_ACCESS_KEY
access_key_id: System.get_env("EX_AWS_ACCESS_KEY"),
secret_access_key: System.get_env("EX_AWS_SECRET_KEY"),
s3: [
scheme: "http://",
host: System.get_env("EX_AWS_ENDPOINT"),
port: System.get_env("EX_AWS_PORT")
]

View File

@ -1 +1,3 @@
import Config
import_config("libs/ex_aws_prod.exs")

View File

@ -6,7 +6,9 @@ import Config
# which you should run after static files are built and
# before starting your production server.
config :lol_analytics_web, LoLAnalyticsWeb.Endpoint,
url: [host: "example.com", port: 80],
server: true,
http: [ip: {0, 0, 0, 0}, port: 4000],
url: [host: "lol-analytics.kaizer.cloud", port: 80],
cache_static_manifest: "priv/static/cache_manifest.json"
# Do not print debug messages in production

View File

@ -37,6 +37,7 @@ if config_env() == :prod do
"""
config :lol_analytics_web, LoLAnalyticsWeb.Endpoint,
server: true,
http: [
# Enable IPv6 and bind on all interfaces.
# Set it to {0, 0, 0, 0, 0, 0, 0, 1} for local network only access.

13
mix.exs
View File

@ -7,7 +7,18 @@ defmodule LoLAnalytics.Umbrella.MixProject do
version: "0.1.0",
start_permanent: Mix.env() == :prod,
deps: deps(),
aliases: aliases()
aliases: aliases(),
releases: [
prod: [
applications: [
scrapper: :permanent,
lol_analytics: :permanent,
lol_analytics_web: :permanent,
storage: :permanent,
lol_api: :permanent
]
]
]
]
end

5
rel/overlays/bin/migrate Executable file
View File

@ -0,0 +1,5 @@
#!/bin/sh
set -eu
cd -P -- "$(dirname -- "$0")"
exec ./lol_analytics_web eval LoLAnalyticsWeb.Release.migrate

1
rel/overlays/bin/migrate.bat Executable file
View File

@ -0,0 +1 @@
call "%~dp0\lol_analytics_web" eval LoLAnalyticsWeb.Release.migrate

5
rel/overlays/bin/server Executable file
View File

@ -0,0 +1,5 @@
#!/bin/sh
set -eu
cd -P -- "$(dirname -- "$0")"
PHX_SERVER=true exec ./lol_analytics_web start

2
rel/overlays/bin/server.bat Executable file
View File

@ -0,0 +1,2 @@
set PHX_SERVER=true
call "%~dp0\lol_analytics_web" start