TD-275: Add CI through GH Actions (#1)

* Switch to valitydev upstreams
* valitydev/thrift_erlang@c280ff2
* valitydev/scoper@7f3183d
* valitydev/genlib@82c5ff3
* valitydev/erlang-health@5958e2f
* valitydev/damsel@1d60b20
* Drop how_are_you
This commit is contained in:
Andrew Mayorov 2022-04-18 11:00:48 +03:00 committed by GitHub
parent 9a5ae488c2
commit f577d1292a
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
25 changed files with 613 additions and 395 deletions

4
.env Normal file
View File

@ -0,0 +1,4 @@
SERVICE_NAME=limiter
OTP_VERSION=24.2.0
REBAR_VERSION=3.18
THRIFT_VERSION=0.14.2.2

53
.github/workflows/build-image.yml vendored Normal file
View File

@ -0,0 +1,53 @@
name: Build Docker image
on:
push:
branches:
- 'master'
- 'epic/**'
pull_request:
branches: ['**']
env:
REGISTRY: ghcr.io
jobs:
build:
runs-on: ubuntu-latest
steps:
- name: Checkout code
uses: actions/checkout@v2
- name: Setup Buildx
uses: docker/setup-buildx-action@v1
# https://docs.github.com/en/actions/learn-github-actions/workflow-commands-for-github-actions#setting-an-environment-variable
- name: Update environment variables
run: grep -v '^#' .env >> $GITHUB_ENV
- name: Log in to the Container registry
uses: docker/login-action@v1.12.0
with:
registry: ${{ env.REGISTRY }}
username: ${{ github.actor }}
password: ${{ secrets.GITHUB_TOKEN }}
- name: Construct tags / labels for an image
id: meta
uses: docker/metadata-action@v3.6.2
with:
images: |
${{ env.REGISTRY }}/${{ github.repository }}
tags: |
type=sha
- name: Build and push Docker image
uses: docker/build-push-action@v2.9.0
with:
push: ${{ github.event_name == 'push' }}
tags: ${{ steps.meta.outputs.tags }}
labels: ${{ steps.meta.outputs.labels }}
build-args: |
SERVICE_NAME=${{ env.SERVICE_NAME }}
OTP_VERSION=${{ env.OTP_VERSION }}
THRIFT_VERSION=${{ env.THRIFT_VERSION }}

39
.github/workflows/erlang-checks.yml vendored Normal file
View File

@ -0,0 +1,39 @@
name: Erlang CI Checks
on:
push:
branches:
- 'master'
- 'epic/**'
pull_request:
branches: ['**']
jobs:
setup:
name: Load .env
runs-on: ubuntu-latest
outputs:
otp-version: ${{ steps.otp-version.outputs.version }}
rebar-version: ${{ steps.rebar-version.outputs.version }}
thrift-version: ${{ steps.thrift-version.outputs.version }}
steps:
- name: Checkout repository
uses: actions/checkout@v2
- run: grep -v '^#' .env >> $GITHUB_ENV
- id: otp-version
run: echo "::set-output name=version::$OTP_VERSION"
- id: rebar-version
run: echo "::set-output name=version::$REBAR_VERSION"
- id: thrift-version
run: echo "::set-output name=version::$THRIFT_VERSION"
run:
name: Run checks
needs: setup
uses: valitydev/erlang-workflows/.github/workflows/erlang-parallel-build.yml@v1.0.1
with:
otp-version: ${{ needs.setup.outputs.otp-version }}
rebar-version: ${{ needs.setup.outputs.rebar-version }}
use-thrift: true
thrift-version: ${{ needs.setup.outputs.thrift-version }}
run-ct-with-compose: true

6
.gitignore vendored
View File

@ -10,4 +10,8 @@ erl_crash.dump
.DS_Store
/.idea/
*.beam
rebar3.crashdump
rebar3.crashdump
# make stuff
/.image.*
Makefile.env

42
Dockerfile Normal file
View File

@ -0,0 +1,42 @@
ARG OTP_VERSION
# Build the release
FROM docker.io/library/erlang:${OTP_VERSION} AS builder
SHELL ["/bin/bash", "-o", "pipefail", "-c"]
# Install thrift compiler
ARG THRIFT_VERSION
ARG TARGETARCH
RUN wget -q -O- "https://github.com/valitydev/thrift/releases/download/${THRIFT_VERSION}/thrift-${THRIFT_VERSION}-linux-${TARGETARCH}.tar.gz" \
| tar -xvz -C /usr/local/bin/
# Copy sources
RUN mkdir /build
COPY . /build/
# Build the release
WORKDIR /build
RUN rebar3 compile && \
rebar3 as prod release
# Make a runner image
FROM docker.io/library/erlang:${OTP_VERSION}-slim
ARG SERVICE_NAME
# Set env
ENV CHARSET=UTF-8
ENV LANG=C.UTF-8
# Set runtime
WORKDIR /opt/${SERVICE_NAME}
COPY --from=builder /build/_build/prod/rel/${SERVICE_NAME} /opt/${SERVICE_NAME}
RUN echo "#!/bin/sh" >> /entrypoint.sh && \
echo "exec /opt/${SERVICE_NAME}/bin/${SERVICE_NAME} foreground" >> /entrypoint.sh && \
chmod +x /entrypoint.sh
ENTRYPOINT []
CMD ["/entrypoint.sh"]
EXPOSE 8022

17
Dockerfile.dev Normal file
View File

@ -0,0 +1,17 @@
ARG OTP_VERSION
FROM docker.io/library/erlang:${OTP_VERSION}
SHELL ["/bin/bash", "-o", "pipefail", "-c"]
# Install thrift compiler
ARG THRIFT_VERSION
ARG TARGETARCH
RUN wget -q -O- "https://github.com/valitydev/thrift/releases/download/${THRIFT_VERSION}/thrift-${THRIFT_VERSION}-linux-${TARGETARCH}.tar.gz" \
| tar -xvz -C /usr/local/bin/
# Set env
ENV CHARSET=UTF-8
ENV LANG=C.UTF-8
# Set runtime
CMD ["/bin/bash"]

108
Makefile Normal file
View File

@ -0,0 +1,108 @@
# HINT
# Use this file to override variables here.
# For example, to run with podman put `DOCKER=podman` there.
-include Makefile.env
# NOTE
# Variables specified in `.env` file are used to pick and setup specific
# component versions, both when building a development image and when running
# CI workflows on GH Actions. This ensures that tasks run with `wc-` prefix
# (like `wc-dialyze`) are reproducible between local machine and CI runners.
DOTENV := $(shell grep -v '^\#' .env)
# Development images
DEV_IMAGE_TAG = limiter-dev
DEV_IMAGE_ID = $(file < .image.dev)
DOCKER ?= docker
DOCKERCOMPOSE ?= docker-compose
DOCKERCOMPOSE_W_ENV = DEV_IMAGE_TAG=$(DEV_IMAGE_TAG) $(DOCKERCOMPOSE)
REBAR ?= rebar3
TEST_CONTAINER_NAME ?= testrunner
all: compile
.PHONY: dev-image clean-dev-image wc-shell test
dev-image: .image.dev
.image.dev: Dockerfile.dev .env
env $(DOTENV) $(DOCKERCOMPOSE_W_ENV) build $(TEST_CONTAINER_NAME)
$(DOCKER) image ls -q -f "reference=$(DEV_IMAGE_TAG)" | head -n1 > $@
clean-dev-image:
ifneq ($(DEV_IMAGE_ID),)
$(DOCKER) image rm -f $(DEV_IMAGE_TAG)
rm .image.dev
endif
DOCKER_WC_OPTIONS := -v $(PWD):$(PWD) --workdir $(PWD)
DOCKER_WC_EXTRA_OPTIONS ?= --rm
DOCKER_RUN = $(DOCKER) run -t $(DOCKER_WC_OPTIONS) $(DOCKER_WC_EXTRA_OPTIONS)
DOCKERCOMPOSE_RUN = $(DOCKERCOMPOSE_W_ENV) run --rm $(DOCKER_WC_OPTIONS)
# Utility tasks
wc-shell: dev-image
$(DOCKER_RUN) --interactive --tty $(DEV_IMAGE_TAG)
wc-%: dev-image
$(DOCKER_RUN) $(DEV_IMAGE_TAG) make $*
# TODO docker compose down doesn't work yet
wdeps-shell: dev-image
$(DOCKERCOMPOSE_RUN) $(TEST_CONTAINER_NAME) su; \
$(DOCKERCOMPOSE_W_ENV) down
wdeps-%: dev-image
$(DOCKERCOMPOSE_RUN) -T $(TEST_CONTAINER_NAME) make $*; \
res=$$?; \
$(DOCKERCOMPOSE_W_ENV) down; \
exit $$res
# Rebar tasks
rebar-shell:
$(REBAR) shell
compile:
$(REBAR) compile
xref:
$(REBAR) xref
lint:
$(REBAR) lint
check-format:
$(REBAR) fmt -c
dialyze:
$(REBAR) as test dialyzer
release:
$(REBAR) as prod release
eunit:
$(REBAR) eunit --cover
common-test:
$(REBAR) ct --cover
cover:
$(REBAR) covertool generate
format:
$(REBAR) fmt -w
clean:
$(REBAR) clean
distclean: clean-build-image
rm -rf _build
test: eunit common-test
cover-report:
$(REBAR) cover

View File

@ -109,7 +109,7 @@ construct_plan(PlanID, Batches) ->
id = BatchID,
postings = Postings
}
|| {BatchID, Postings} <- Batches
|| {BatchID, Postings} <- Batches
]
}.

View File

@ -281,16 +281,19 @@ get_handler(ID, LimitContext) ->
-spec calculate_time_range(timestamp(), config()) -> time_range().
calculate_time_range(Timestamp, Config) ->
StartedAt = started_at(Config),
{StartDateTime, _USec0} = lim_range_codec:parse_timestamp(StartedAt),
{CurrentDateTime, _USec1} = lim_range_codec:parse_timestamp(Timestamp),
CurrentSec = calendar:datetime_to_gregorian_seconds(CurrentDateTime),
case time_range_type(Config) of
{calendar, Range} ->
calculate_calendar_time_range(Range, CurrentSec, CurrentDateTime, StartDateTime);
calculate_calendar_time_range(Range, Timestamp, StartedAt);
{interval, _Interval} ->
erlang:error({interval_time_range_not_implemented, Config})
end.
calculate_calendar_time_range(Range, Timestamp, StartedAt) ->
{StartDatetime, _USec0} = lim_range_codec:parse_timestamp(StartedAt),
{CurrentDatetime, _USec1} = lim_range_codec:parse_timestamp(Timestamp),
CurrentSec = calendar:datetime_to_gregorian_seconds(CurrentDatetime),
calculate_calendar_time_range(Range, CurrentSec, CurrentDatetime, StartDatetime).
calculate_calendar_time_range(year, CurrentSec, {CurrentDate, _CurrentTime}, {StartDate, StartTime}) ->
{_StartYear, StartMonth, StartDay} = StartDate,
{CurrentYear, _CurrentMonth, _} = CurrentDate,
@ -427,57 +430,43 @@ marshal_timestamp(DateTime) ->
calculate_shard_id(Timestamp, Config) ->
StartedAt = started_at(Config),
ShardSize = shard_size(Config),
{StartDateTime, _USec0} = lim_range_codec:parse_timestamp(StartedAt),
{CurrentDateTime, _USec1} = lim_range_codec:parse_timestamp(Timestamp),
case time_range_type(Config) of
{calendar, Range} ->
Units = calculate_time_units(Range, CurrentDateTime, StartDateTime),
SignPrefix = mk_sign_prefix(Units),
RangePrefix = mk_prefix(Range),
mk_shard_id(<<SignPrefix/binary, "/", RangePrefix/binary>>, Units, ShardSize);
calculate_calendar_shard_id(Range, Timestamp, StartedAt, ShardSize);
{interval, _Interval} ->
erlang:error({interval_time_range_not_implemented, Config})
end.
calculate_time_units(year, {CurrentDate, CurrentTime}, {StartDate, StartTime}) ->
{StartYear, _, _} = StartDate,
{CurrentYear, _, _} = CurrentDate,
calculate_calendar_shard_id(Range, Timestamp, StartedAt, ShardSize) ->
{StartDatetime, _USec0} = lim_range_codec:parse_timestamp(StartedAt),
{CurrentDatetime, _USec1} = lim_range_codec:parse_timestamp(Timestamp),
Units = calculate_time_units(Range, CurrentDatetime, StartDatetime),
SignPrefix = mk_sign_prefix(Units),
RangePrefix = mk_prefix(Range),
mk_shard_id(<<SignPrefix/binary, "/", RangePrefix/binary>>, Units, ShardSize).
StartSecBase = calendar:datetime_to_gregorian_seconds({{StartYear, 1, 1}, {0, 0, 0}}),
StartSec = calendar:datetime_to_gregorian_seconds({StartDate, StartTime}),
CurrentSecBase = calendar:datetime_to_gregorian_seconds({{CurrentYear, 1, 1}, {0, 0, 0}}),
CurrentSec = calendar:datetime_to_gregorian_seconds({CurrentDate, CurrentTime}),
calculate_time_units(year, CurrentDatetime, StartDatetime) ->
StartSecBase = calculate_start_of_year_seconds(StartDatetime),
StartSec = calendar:datetime_to_gregorian_seconds(StartDatetime),
CurrentSecBase = calculate_start_of_year_seconds(CurrentDatetime),
CurrentSec = calendar:datetime_to_gregorian_seconds(CurrentDatetime),
StartDelta = StartSec - StartSecBase,
CurrentDelta = CurrentSec - (CurrentSecBase + StartDelta),
maybe_previous_unit(CurrentDelta, year(CurrentDatetime) - year(StartDatetime));
calculate_time_units(month, CurrentDatetime, StartDatetime) ->
StartSecBase = calculate_start_of_month_seconds(StartDatetime),
StartSec = calendar:datetime_to_gregorian_seconds(StartDatetime),
CurrentSecBase = calculate_start_of_month_seconds(CurrentDatetime),
CurrentSec = calendar:datetime_to_gregorian_seconds(CurrentDatetime),
StartDelta = StartSec - StartSecBase,
CurrentDelta = CurrentSec - (CurrentSecBase + StartDelta),
case CurrentDelta >= 0 of
true ->
CurrentYear - StartYear;
false ->
CurrentYear - StartYear - 1
end;
calculate_time_units(month, {CurrentDate, CurrentTime}, {StartDate, StartTime}) ->
{StartYear, StartMonth, _} = StartDate,
{CurrentYear, CurrentMonth, _} = CurrentDate,
YearDiff = year(CurrentDatetime) - year(StartDatetime),
MonthDiff = month(CurrentDatetime) - month(StartDatetime),
StartSecBase = calendar:datetime_to_gregorian_seconds({{StartYear, StartMonth, 1}, {0, 0, 0}}),
StartSec = calendar:datetime_to_gregorian_seconds({StartDate, StartTime}),
CurrentSecBase = calendar:datetime_to_gregorian_seconds({{CurrentYear, CurrentMonth, 1}, {0, 0, 0}}),
CurrentSec = calendar:datetime_to_gregorian_seconds({CurrentDate, CurrentTime}),
StartDelta = StartSec - StartSecBase,
CurrentDelta = CurrentSec - (CurrentSecBase + StartDelta),
YearDiff = CurrentYear - StartYear,
MonthDiff = CurrentMonth - StartMonth,
case CurrentDelta >= 0 of
true ->
YearDiff * 12 + MonthDiff;
false ->
YearDiff * 12 + MonthDiff - 1
end;
maybe_previous_unit(CurrentDelta, YearDiff * 12 + MonthDiff);
calculate_time_units(week, {CurrentDate, CurrentTime}, {StartDate, StartTime}) ->
StartWeekRem = calendar:date_to_gregorian_days(StartDate) rem 7,
StartWeekBase = (calendar:date_to_gregorian_days(StartDate) div 7) * 7,
@ -501,12 +490,7 @@ calculate_time_units(week, {CurrentDate, CurrentTime}, {StartDate, StartTime}) -
StartWeeks = calendar:date_to_gregorian_days(StartDate) div 7,
CurrentWeeks = calendar:date_to_gregorian_days(CurrentDate) div 7,
case CurrentDelta >= 0 of
true ->
CurrentWeeks - StartWeeks;
false ->
CurrentWeeks - StartWeeks - 1
end;
maybe_previous_unit(CurrentDelta, CurrentWeeks - StartWeeks);
calculate_time_units(day, {CurrentDate, CurrentTime}, {StartDate, StartTime}) ->
StartSecBase = calendar:datetime_to_gregorian_seconds({StartDate, {0, 0, 0}}),
StartSec = calendar:datetime_to_gregorian_seconds({StartDate, StartTime}),
@ -516,12 +500,24 @@ calculate_time_units(day, {CurrentDate, CurrentTime}, {StartDate, StartTime}) ->
CurrentDelta = CurrentSec - (CurrentSecBase + StartDelta),
StartDays = calendar:date_to_gregorian_days(StartDate),
CurrentDays = calendar:date_to_gregorian_days(CurrentDate),
case CurrentDelta >= 0 of
true ->
CurrentDays - StartDays;
false ->
CurrentDays - StartDays - 1
end.
maybe_previous_unit(CurrentDelta, CurrentDays - StartDays).
maybe_previous_unit(Delta, Unit) when Delta < 0 ->
Unit - 1;
maybe_previous_unit(_Delta, Unit) ->
Unit.
calculate_start_of_year_seconds({{Year, _, _}, _Time}) ->
calendar:datetime_to_gregorian_seconds({{Year, 1, 1}, {0, 0, 0}}).
calculate_start_of_month_seconds({{Year, Month, _}, _Time}) ->
calendar:datetime_to_gregorian_seconds({{Year, Month, 1}, {0, 0, 0}}).
year({{Year, _, _}, _Time}) ->
Year.
month({{_Year, Month, _}, _Time}) ->
Month.
mk_prefix(day) -> <<"day">>;
mk_prefix(week) -> <<"week">>;
@ -614,167 +610,135 @@ check_sign_prefix_test() ->
-spec check_calculate_day_time_range_test() -> _.
check_calculate_day_time_range_test() ->
Config0 = #{
started_at => <<"2000-01-01T00:00:00Z">>,
time_range_type => {calendar, day}
},
StartedAt1 = <<"2000-01-01T00:00:00Z">>,
?assertEqual(
#{lower => <<"2000-01-01T00:00:00Z">>, upper => <<"2000-01-02T00:00:00Z">>},
calculate_time_range(<<"2000-01-01T02:00:00Z">>, Config0)
calculate_calendar_time_range(day, <<"2000-01-01T02:00:00Z">>, StartedAt1)
),
?assertEqual(
#{lower => <<"1999-12-31T00:00:00Z">>, upper => <<"2000-01-01T00:00:00Z">>},
calculate_time_range(<<"1999-12-31T02:00:00Z">>, Config0)
calculate_calendar_time_range(day, <<"1999-12-31T02:00:00Z">>, StartedAt1)
),
?assertEqual(
#{lower => <<"2000-01-10T00:00:00Z">>, upper => <<"2000-01-11T00:00:00Z">>},
calculate_time_range(<<"2000-01-10T02:00:00Z">>, Config0)
calculate_calendar_time_range(day, <<"2000-01-10T02:00:00Z">>, StartedAt1)
),
Config1 = Config0#{started_at => <<"2000-01-01T03:00:00Z">>},
?assertEqual(
#{lower => <<"1999-12-31T03:00:00Z">>, upper => <<"2000-01-01T03:00:00Z">>},
calculate_time_range(<<"2000-01-01T02:00:00Z">>, Config1)
calculate_calendar_time_range(day, <<"2000-01-01T02:00:00Z">>, <<"2000-01-01T03:00:00Z">>)
).
-spec check_calculate_week_time_range_test() -> _.
check_calculate_week_time_range_test() ->
Config0 = #{
started_at => <<"2000-01-01T00:00:00Z">>,
time_range_type => {calendar, week}
},
StartedAt = <<"2000-01-01T00:00:00Z">>,
?assertEqual(
#{lower => <<"2000-01-01T00:00:00Z">>, upper => <<"2000-01-08T00:00:00Z">>},
calculate_time_range(<<"2000-01-01T02:00:00Z">>, Config0)
calculate_calendar_time_range(week, <<"2000-01-01T02:00:00Z">>, StartedAt)
),
?assertEqual(
#{lower => <<"1999-12-25T00:00:00Z">>, upper => <<"2000-01-01T00:00:00Z">>},
calculate_time_range(<<"1999-12-31T02:00:00Z">>, Config0)
calculate_calendar_time_range(week, <<"1999-12-31T02:00:00Z">>, StartedAt)
),
?assertEqual(
#{lower => <<"2000-09-30T00:00:00Z">>, upper => <<"2000-10-07T00:00:00Z">>},
calculate_time_range(<<"2000-10-03T02:00:00Z">>, Config0)
calculate_calendar_time_range(week, <<"2000-10-03T02:00:00Z">>, StartedAt)
),
Config1 = Config0#{started_at => <<"2000-01-01T03:00:00Z">>},
?assertEqual(
#{lower => <<"1999-12-25T03:00:00Z">>, upper => <<"2000-01-01T03:00:00Z">>},
calculate_time_range(<<"2000-01-01T02:00:00Z">>, Config1)
calculate_calendar_time_range(week, <<"2000-01-01T02:00:00Z">>, <<"2000-01-01T03:00:00Z">>)
).
-spec check_calculate_month_time_range_test() -> _.
check_calculate_month_time_range_test() ->
Config0 = #{
started_at => <<"2000-01-01T00:00:00Z">>,
time_range_type => {calendar, month}
},
StartedAt = <<"2000-01-01T00:00:00Z">>,
?assertEqual(
#{lower => <<"2000-01-01T00:00:00Z">>, upper => <<"2000-02-01T00:00:00Z">>},
calculate_time_range(<<"2000-01-01T02:00:00Z">>, Config0)
calculate_calendar_time_range(month, <<"2000-01-01T02:00:00Z">>, StartedAt)
),
?assertEqual(
#{lower => <<"1999-12-01T00:00:00Z">>, upper => <<"2000-01-01T00:00:00Z">>},
calculate_time_range(<<"1999-12-31T02:00:00Z">>, Config0)
calculate_calendar_time_range(month, <<"1999-12-31T02:00:00Z">>, StartedAt)
),
?assertEqual(
#{lower => <<"2000-10-01T00:00:00Z">>, upper => <<"2000-11-01T00:00:00Z">>},
calculate_time_range(<<"2000-10-03T02:00:00Z">>, Config0)
calculate_calendar_time_range(month, <<"2000-10-03T02:00:00Z">>, StartedAt)
),
Config1 = Config0#{started_at => <<"2000-01-01T03:00:00Z">>},
?assertEqual(
#{lower => <<"1999-12-01T03:00:00Z">>, upper => <<"2000-01-01T03:00:00Z">>},
calculate_time_range(<<"2000-01-01T02:00:00Z">>, Config1)
calculate_calendar_time_range(month, <<"2000-01-01T02:00:00Z">>, <<"2000-01-01T03:00:00Z">>)
).
-spec check_calculate_year_time_range_test() -> _.
check_calculate_year_time_range_test() ->
Config0 = #{
started_at => <<"2000-01-01T00:00:00Z">>,
time_range_type => {calendar, year}
},
StartedAt = <<"2000-01-01T00:00:00Z">>,
?assertEqual(
#{lower => <<"2000-01-01T00:00:00Z">>, upper => <<"2001-01-01T00:00:00Z">>},
calculate_time_range(<<"2000-01-01T02:00:00Z">>, Config0)
calculate_calendar_time_range(year, <<"2000-01-01T02:00:00Z">>, StartedAt)
),
?assertEqual(
#{lower => <<"1999-01-01T00:00:00Z">>, upper => <<"2000-01-01T00:00:00Z">>},
calculate_time_range(<<"1999-12-31T02:00:00Z">>, Config0)
calculate_calendar_time_range(year, <<"1999-12-31T02:00:00Z">>, StartedAt)
),
?assertEqual(
#{lower => <<"2010-01-01T00:00:00Z">>, upper => <<"2011-01-01T00:00:00Z">>},
calculate_time_range(<<"2010-10-03T02:00:00Z">>, Config0)
calculate_calendar_time_range(year, <<"2010-10-03T02:00:00Z">>, StartedAt)
),
Config1 = Config0#{started_at => <<"2000-01-01T03:00:00Z">>},
?assertEqual(
#{lower => <<"1999-01-01T03:00:00Z">>, upper => <<"2000-01-01T03:00:00Z">>},
calculate_time_range(<<"2000-01-01T02:00:00Z">>, Config1)
calculate_calendar_time_range(year, <<"2000-01-01T02:00:00Z">>, <<"2000-01-01T03:00:00Z">>)
).
-spec check_calculate_day_shard_id_test() -> _.
check_calculate_day_shard_id_test() ->
Config0 = #{
started_at => <<"2000-01-01T00:00:00Z">>,
shard_size => 1,
time_range_type => {calendar, day}
},
?assertEqual(<<"future/day/0">>, calculate_shard_id(<<"2000-01-01T00:00:00Z">>, Config0)),
?assertEqual(<<"future/day/2">>, calculate_shard_id(<<"2000-01-03T00:00:00Z">>, Config0)),
?assertEqual(<<"past/day/1">>, calculate_shard_id(<<"1999-12-31T00:00:00Z">>, Config0)),
?assertEqual(<<"future/day/1">>, calculate_shard_id(<<"2000-01-02T23:59:59Z">>, Config0)),
?assertEqual(<<"future/day/1">>, calculate_shard_id(<<"2000-01-04T00:00:00Z">>, Config0#{shard_size => 2})),
?assertEqual(<<"future/day/366">>, calculate_shard_id(<<"2001-01-01T00:00:00Z">>, Config0)),
?assertEqual(<<"future/day/12">>, calculate_shard_id(<<"2001-01-01T00:00:00Z">>, Config0#{shard_size => 30})),
Config1 = Config0#{started_at => <<"2000-01-01T03:00:00Z">>},
?assertEqual(<<"past/day/1">>, calculate_shard_id(<<"2000-01-01T00:00:00Z">>, Config1)),
?assertEqual(<<"future/day/1">>, calculate_shard_id(<<"2000-01-03T00:00:00Z">>, Config1)).
StartedAt1 = <<"2000-01-01T00:00:00Z">>,
?assertEqual(<<"future/day/0">>, calculate_calendar_shard_id(day, <<"2000-01-01T00:00:00Z">>, StartedAt1, 1)),
?assertEqual(<<"future/day/2">>, calculate_calendar_shard_id(day, <<"2000-01-03T00:00:00Z">>, StartedAt1, 1)),
?assertEqual(<<"past/day/1">>, calculate_calendar_shard_id(day, <<"1999-12-31T00:00:00Z">>, StartedAt1, 1)),
?assertEqual(<<"future/day/1">>, calculate_calendar_shard_id(day, <<"2000-01-02T23:59:59Z">>, StartedAt1, 1)),
?assertEqual(<<"future/day/1">>, calculate_calendar_shard_id(day, <<"2000-01-04T00:00:00Z">>, StartedAt1, 2)),
?assertEqual(<<"future/day/366">>, calculate_calendar_shard_id(day, <<"2001-01-01T00:00:00Z">>, StartedAt1, 1)),
?assertEqual(<<"future/day/12">>, calculate_calendar_shard_id(day, <<"2001-01-01T00:00:00Z">>, StartedAt1, 30)),
StartedAt2 = <<"2000-01-01T03:00:00Z">>,
?assertEqual(<<"past/day/1">>, calculate_calendar_shard_id(day, <<"2000-01-01T00:00:00Z">>, StartedAt2, 1)),
?assertEqual(<<"future/day/1">>, calculate_calendar_shard_id(day, <<"2000-01-03T00:00:00Z">>, StartedAt2, 1)).
-spec check_calculate_week_shard_id_test() -> _.
check_calculate_week_shard_id_test() ->
Config0 = #{
started_at => <<"2000-01-01T00:00:00Z">>,
shard_size => 1,
time_range_type => {calendar, week}
},
?assertEqual(<<"future/week/0">>, calculate_shard_id(<<"2000-01-01T00:00:00Z">>, Config0)),
?assertEqual(<<"past/week/1">>, calculate_shard_id(<<"1999-12-31T00:00:00Z">>, Config0)),
?assertEqual(<<"future/week/1">>, calculate_shard_id(<<"2000-01-08T00:00:00Z">>, Config0)),
?assertEqual(<<"future/week/1">>, calculate_shard_id(<<"2000-01-15T00:00:00Z">>, Config0#{shard_size => 2})),
?assertEqual(<<"future/week/52">>, calculate_shard_id(<<"2001-01-01T00:00:00Z">>, Config0)),
?assertEqual(<<"future/week/13">>, calculate_shard_id(<<"2001-01-01T00:00:00Z">>, Config0#{shard_size => 4})),
Config1 = Config0#{started_at => <<"2000-01-02T03:00:00Z">>},
?assertEqual(<<"past/week/1">>, calculate_shard_id(<<"2000-01-02T00:00:00Z">>, Config1)),
?assertEqual(<<"future/week/0">>, calculate_shard_id(<<"2000-01-09T00:00:00Z">>, Config1)).
StartedAt1 = <<"2000-01-01T00:00:00Z">>,
?assertEqual(<<"future/week/0">>, calculate_calendar_shard_id(week, <<"2000-01-01T00:00:00Z">>, StartedAt1, 1)),
?assertEqual(<<"past/week/1">>, calculate_calendar_shard_id(week, <<"1999-12-31T00:00:00Z">>, StartedAt1, 1)),
?assertEqual(<<"future/week/1">>, calculate_calendar_shard_id(week, <<"2000-01-08T00:00:00Z">>, StartedAt1, 1)),
?assertEqual(<<"future/week/1">>, calculate_calendar_shard_id(week, <<"2000-01-15T00:00:00Z">>, StartedAt1, 2)),
?assertEqual(<<"future/week/52">>, calculate_calendar_shard_id(week, <<"2001-01-01T00:00:00Z">>, StartedAt1, 1)),
?assertEqual(<<"future/week/13">>, calculate_calendar_shard_id(week, <<"2001-01-01T00:00:00Z">>, StartedAt1, 4)),
StartedAt2 = <<"2000-01-02T03:00:00Z">>,
?assertEqual(<<"past/week/1">>, calculate_calendar_shard_id(week, <<"2000-01-02T00:00:00Z">>, StartedAt2, 1)),
?assertEqual(<<"future/week/0">>, calculate_calendar_shard_id(week, <<"2000-01-09T00:00:00Z">>, StartedAt2, 1)).
-spec check_calculate_month_shard_id_test() -> _.
check_calculate_month_shard_id_test() ->
Config0 = #{
started_at => <<"2000-01-01T00:00:00Z">>,
shard_size => 1,
time_range_type => {calendar, month}
},
?assertEqual(<<"future/month/0">>, calculate_shard_id(<<"2000-01-01T00:00:00Z">>, Config0)),
?assertEqual(<<"past/month/1">>, calculate_shard_id(<<"1999-12-31T00:00:00Z">>, Config0)),
?assertEqual(<<"future/month/1">>, calculate_shard_id(<<"2000-02-01T00:00:00Z">>, Config0)),
?assertEqual(<<"future/month/1">>, calculate_shard_id(<<"2000-03-01T00:00:00Z">>, Config0#{shard_size => 2})),
?assertEqual(<<"future/month/12">>, calculate_shard_id(<<"2001-01-01T00:00:00Z">>, Config0)),
?assertEqual(<<"future/month/1">>, calculate_shard_id(<<"2001-01-01T00:00:00Z">>, Config0#{shard_size => 12})),
Config1 = Config0#{started_at => <<"2000-01-02T03:00:00Z">>},
?assertEqual(<<"past/month/1">>, calculate_shard_id(<<"2000-01-02T00:00:00Z">>, Config1)),
?assertEqual(<<"future/month/0">>, calculate_shard_id(<<"2000-02-02T00:00:00Z">>, Config1)).
StartedAt1 = <<"2000-01-01T00:00:00Z">>,
?assertEqual(<<"future/month/0">>, calculate_calendar_shard_id(month, <<"2000-01-01T00:00:00Z">>, StartedAt1, 1)),
?assertEqual(<<"past/month/1">>, calculate_calendar_shard_id(month, <<"1999-12-31T00:00:00Z">>, StartedAt1, 1)),
?assertEqual(<<"future/month/1">>, calculate_calendar_shard_id(month, <<"2000-02-01T00:00:00Z">>, StartedAt1, 1)),
?assertEqual(<<"future/month/1">>, calculate_calendar_shard_id(month, <<"2000-03-01T00:00:00Z">>, StartedAt1, 2)),
?assertEqual(<<"future/month/12">>, calculate_calendar_shard_id(month, <<"2001-01-01T00:00:00Z">>, StartedAt1, 1)),
?assertEqual(<<"future/month/1">>, calculate_calendar_shard_id(month, <<"2001-01-01T00:00:00Z">>, StartedAt1, 12)),
StartedAt2 = <<"2000-01-02T03:00:00Z">>,
?assertEqual(<<"past/month/1">>, calculate_calendar_shard_id(month, <<"2000-01-02T00:00:00Z">>, StartedAt2, 1)),
?assertEqual(<<"future/month/0">>, calculate_calendar_shard_id(month, <<"2000-02-02T00:00:00Z">>, StartedAt2, 1)).
-spec check_calculate_year_shard_id_test() -> _.
check_calculate_year_shard_id_test() ->
Config0 = #{
started_at => <<"2000-01-01T00:00:00Z">>,
shard_size => 1,
time_range_type => {calendar, year}
},
?assertEqual(<<"future/year/0">>, calculate_shard_id(<<"2000-01-01T00:00:00Z">>, Config0)),
?assertEqual(<<"past/year/1">>, calculate_shard_id(<<"1999-12-31T00:00:00Z">>, Config0)),
?assertEqual(<<"future/year/1">>, calculate_shard_id(<<"2001-01-01T00:00:00Z">>, Config0)),
?assertEqual(<<"future/year/1">>, calculate_shard_id(<<"2003-01-01T00:00:00Z">>, Config0#{shard_size => 2})),
?assertEqual(<<"future/year/10">>, calculate_shard_id(<<"2010-01-01T00:00:00Z">>, Config0)),
?assertEqual(<<"future/year/2">>, calculate_shard_id(<<"2020-01-01T00:00:00Z">>, Config0#{shard_size => 10})),
Config1 = Config0#{started_at => <<"2000-01-02T03:00:00Z">>},
?assertEqual(<<"past/year/1">>, calculate_shard_id(<<"2000-01-01T00:00:00Z">>, Config1)),
?assertEqual(<<"future/year/0">>, calculate_shard_id(<<"2001-01-01T00:00:00Z">>, Config1)).
StartedAt1 = <<"2000-01-01T00:00:00Z">>,
?assertEqual(<<"future/year/0">>, calculate_calendar_shard_id(year, <<"2000-01-01T00:00:00Z">>, StartedAt1, 1)),
?assertEqual(<<"past/year/1">>, calculate_calendar_shard_id(year, <<"1999-12-31T00:00:00Z">>, StartedAt1, 1)),
?assertEqual(<<"future/year/1">>, calculate_calendar_shard_id(year, <<"2001-01-01T00:00:00Z">>, StartedAt1, 1)),
?assertEqual(<<"future/year/1">>, calculate_calendar_shard_id(year, <<"2003-01-01T00:00:00Z">>, StartedAt1, 2)),
?assertEqual(<<"future/year/10">>, calculate_calendar_shard_id(year, <<"2010-01-01T00:00:00Z">>, StartedAt1, 1)),
?assertEqual(<<"future/year/2">>, calculate_calendar_shard_id(year, <<"2020-01-01T00:00:00Z">>, StartedAt1, 10)),
StartedAt2 = <<"2000-01-02T03:00:00Z">>,
?assertEqual(<<"past/year/1">>, calculate_calendar_shard_id(year, <<"2000-01-01T00:00:00Z">>, StartedAt2, 1)),
?assertEqual(<<"future/year/0">>, calculate_calendar_shard_id(year, <<"2001-01-01T00:00:00Z">>, StartedAt2, 1)).
-endif.

View File

@ -90,9 +90,10 @@ unmarshal_event(1, EncodedChange, Context) ->
-spec marshal_unmarshal_created_test() -> _.
marshal_unmarshal_created_test() ->
ID = <<"id">>,
Created =
{created, #{
id => <<"id">>,
id => ID,
processor_type => <<"type">>,
created_at => lim_time:now(),
body_type => {cash, <<"RUB">>},
@ -104,9 +105,10 @@ marshal_unmarshal_created_test() ->
scope => {scope, party},
description => <<"description">>
}},
Context = #{machine_ref => ID, machine_ns => config},
Event = {ev, lim_time:machinery_now(), Created},
{Marshaled, _} = marshal_event(1, Event, {}),
{Unmarshaled, _} = unmarshal_event(1, Marshaled, {}),
{Marshaled, _} = marshal_event(1, Event, Context),
{Unmarshaled, _} = unmarshal_event(1, Marshaled, Context),
?assertEqual(Event, Unmarshaled).
-endif.

View File

@ -51,18 +51,18 @@ reverse_postings(Postings) ->
from_id = AccountTo,
to_id = AccountFrom
}
|| Posting = #accounter_Posting{from_id = AccountFrom, to_id = AccountTo} <- Postings
|| Posting = #accounter_Posting{from_id = AccountFrom, to_id = AccountTo} <- Postings
].
-spec assert_partial_posting_amount([posting()], [posting()]) -> ok | {error, forbidden_operation_amount_error()}.
assert_partial_posting_amount(
[#accounter_Posting{amount = Partial, currency_sym_code = Currency} | _Rest],
[#accounter_Posting{amount = Full, currency_sym_code = Currency} | _Rest]
[#accounter_Posting{amount = Partial, currency_sym_code = Currency} | _],
[#accounter_Posting{amount = Full, currency_sym_code = Currency} | _]
) ->
compare_amount(Partial, Full, Currency);
assert_partial_posting_amount(
[#accounter_Posting{amount = Partial, currency_sym_code = PartialCurrency} | _Rest],
[#accounter_Posting{amount = Full, currency_sym_code = FullCurrency} | _Rest]
[#accounter_Posting{amount = Partial, currency_sym_code = PartialCurrency} | _],
[#accounter_Posting{amount = Full, currency_sym_code = FullCurrency} | _]
) ->
erlang:error({invalid_partial_cash, {Partial, PartialCurrency}, {Full, FullCurrency}}).

View File

@ -27,7 +27,7 @@ do(Fun) ->
R ->
{ok, R}
catch
Thrown -> {error, Thrown}
throw:Thrown -> {error, Thrown}
end.
-spec do(Tag, fun(() -> ok | T | thrown(E))) -> ok | result(T, {Tag, E}).

View File

@ -90,16 +90,18 @@ unmarshal_event(1, EncodedChange, Context) ->
-spec marshal_unmarshal_created_test() -> _.
marshal_unmarshal_created_test() ->
ID = <<"id">>,
Created =
{created, #{
id => <<"id">>,
id => ID,
type => {calendar, day},
created_at => <<"2000-01-01T00:00:00Z">>,
currency => <<"USD">>
}},
Context = #{machine_ref => ID, machine_ns => limrange},
Event = {ev, lim_time:machinery_now(), Created},
{Marshaled, _} = marshal_event(1, Event, {}),
{Unmarshaled, _} = unmarshal_event(1, Marshaled, {}),
{Marshaled, _} = marshal_event(1, Event, Context),
{Unmarshaled, _} = unmarshal_event(1, Marshaled, Context),
?assertEqual(Event, Unmarshaled).
-spec marshal_unmarshal_time_range_created_test() -> _.
@ -111,9 +113,10 @@ marshal_unmarshal_time_range_created_test() ->
upper => <<"2000-01-01T00:00:00Z">>,
lower => <<"2000-01-01T00:00:00Z">>
}},
Context = #{machine_ref => <<"id">>, machine_ns => limrange},
Event = {ev, lim_time:machinery_now(), TimeRangeCreated},
{Marshaled, _} = marshal_event(1, Event, {}),
{Unmarshaled, _} = unmarshal_event(1, Marshaled, {}),
{Marshaled, _} = marshal_event(1, Event, Context),
{Unmarshaled, _} = unmarshal_event(1, Marshaled, Context),
?assertEqual(Event, Unmarshaled).
-endif.

View File

@ -24,7 +24,4 @@ get_woody_client(Url) ->
-spec get_woody_event_handlers() -> woody:ev_handlers().
get_woody_event_handlers() ->
genlib_app:env(limiter, woody_event_handlers, [
scoper_woody_event_handler,
hay_woody_event_handler
]).
genlib_app:env(limiter, woody_event_handlers, [scoper_woody_event_handler]).

View File

@ -10,11 +10,8 @@
xrates_proto,
machinery,
woody,
how_are_you, % must be after ranch and before any woody usage
scoper, % should be before any scoper event handler usage
erl_health,
prometheus,
prometheus_cowboy
erl_health
]},
{mod, {limiter, []}},
{env, []}

View File

@ -1,7 +1,6 @@
-module(lim_client).
-include_lib("limiter_proto/include/lim_limiter_thrift.hrl").
-include_lib("limiter_proto/include/lim_configurator_thrift.hrl").
-export([new/0]).
-export([get/3]).
@ -18,7 +17,8 @@
-type limit_change() :: lim_limiter_thrift:'LimitChange'().
-type limit_context() :: lim_limiter_thrift:'LimitContext'().
-type clock() :: lim_limiter_thrift:'Clock'().
-type limit_config_params() :: lim_limiter_config_thrift:'LimitCreateParams'().
-type legacy_create_params() :: lim_configurator_thrift:'LimitCreateParams'().
-type limit_config_params() :: lim_limiter_config_thrift:'LimitConfigParams'().
%%% API
@ -40,7 +40,7 @@ commit(LimitChange, Context, Client) ->
%%
-spec legacy_create_config(limit_config_params(), client()) -> woody:result() | no_return().
-spec legacy_create_config(legacy_create_params(), client()) -> woody:result() | no_return().
legacy_create_config(LimitCreateParams, Client) ->
call_configurator('CreateLegacy', {LimitCreateParams}, Client).

View File

@ -17,13 +17,14 @@
-export([create_config/1]).
-export([get_config/1]).
-type group_name() :: atom().
-type test_case_name() :: atom().
-define(RATE_SOURCE_ID, <<"dummy_source_id">>).
%% tests descriptions
-spec all() -> [test_case_name()].
-spec all() -> [{group, group_name()}].
all() ->
[
{group, default}
@ -60,14 +61,13 @@ init_per_suite(Config) ->
-spec end_per_suite(config()) -> _.
end_per_suite(Config) ->
[application:stop(App) || App <- proplists:get_value(apps, Config)],
Config.
_ = [application:stop(App) || App <- proplists:get_value(apps, Config)].
-spec init_per_testcase(test_case_name(), config()) -> config().
init_per_testcase(_Name, C) ->
C.
-spec end_per_testcase(test_case_name(), config()) -> config().
-spec end_per_testcase(test_case_name(), config()) -> ok.
end_per_testcase(_Name, _C) ->
ok.

View File

@ -4,6 +4,10 @@
-export([handle_function/4]).
-spec handle_function(woody:func(), woody:args(), woody_context:ctx(), #{}) -> {ok, term()}.
-type opts() :: #{
function := fun((woody:func(), woody:args()) -> woody:result())
}.
-spec handle_function(woody:func(), woody:args(), woody_context:ctx(), opts()) -> {ok, term()}.
handle_function(FunName, Args, _, #{function := Fun}) ->
Fun(FunName, Args).

View File

@ -26,13 +26,14 @@
-export([refund_ok/1]).
-export([get_config_ok/1]).
-type group_name() :: atom().
-type test_case_name() :: atom().
-define(RATE_SOURCE_ID, <<"dummy_source_id">>).
%% tests descriptions
-spec all() -> [test_case_name()].
-spec all() -> [{group, group_name()}].
all() ->
[
{group, default}
@ -84,16 +85,16 @@ init_per_suite(Config) ->
-spec end_per_suite(config()) -> _.
end_per_suite(Config) ->
[application:stop(App) || App <- proplists:get_value(apps, Config)],
_ = [application:stop(App) || App <- proplists:get_value(apps, Config)],
Config.
-spec init_per_testcase(test_case_name(), config()) -> config().
init_per_testcase(_Name, C) ->
[{test_sup, lim_mock:start_mocked_service_sup()} | C].
-spec end_per_testcase(test_case_name(), config()) -> config().
-spec end_per_testcase(test_case_name(), config()) -> ok.
end_per_testcase(_Name, C) ->
lim_mock:stop_mocked_service_sup(?config(test_sup, C)),
_ = lim_mock:stop_mocked_service_sup(?config(test_sup, C)),
ok.
%%
@ -101,7 +102,7 @@ end_per_testcase(_Name, C) ->
-spec commit_with_default_exchange(config()) -> _.
commit_with_default_exchange(C) ->
Rational = #base_Rational{p = 1000000, q = 100},
mock_exchange(Rational, C),
_ = mock_exchange(Rational, C),
ID = lim_time:to_rfc3339(lim_time:now()),
#{client := Client} = prepare_environment(ID, <<"GlobalMonthTurnover">>, C),
Context = #limiter_context_LimitContext{
@ -128,7 +129,7 @@ commit_with_default_exchange(C) ->
-spec partial_commit_with_exchange(config()) -> _.
partial_commit_with_exchange(C) ->
Rational = #base_Rational{p = 800000, q = 100},
mock_exchange(Rational, C),
_ = mock_exchange(Rational, C),
ID = lim_time:to_rfc3339(lim_time:now()),
#{client := Client} = prepare_environment(ID, <<"GlobalMonthTurnover">>, C),
Context = #limiter_context_LimitContext{
@ -161,7 +162,7 @@ partial_commit_with_exchange(C) ->
-spec commit_with_exchange(config()) -> _.
commit_with_exchange(C) ->
Rational = #base_Rational{p = 1000000, q = 100},
mock_exchange(Rational, C),
_ = mock_exchange(Rational, C),
ID = lim_time:to_rfc3339(lim_time:now()),
#{client := Client} = prepare_environment(ID, <<"GlobalMonthTurnover">>, C),
Context = #limiter_context_LimitContext{
@ -188,7 +189,7 @@ commit_with_exchange(C) ->
-spec get_rate(config()) -> _.
get_rate(C) ->
Rational = #base_Rational{p = 10, q = 10},
mock_exchange(Rational, C),
_ = mock_exchange(Rational, C),
Request = #rate_ConversionRequest{
source = <<"RUB">>,
destination = <<"USD">>,

60
compose.yml Normal file
View File

@ -0,0 +1,60 @@
version: '3'
services:
testrunner:
image: $DEV_IMAGE_TAG
build:
dockerfile: Dockerfile.dev
context: .
args:
OTP_VERSION: $OTP_VERSION
THRIFT_VERSION: $THRIFT_VERSION
volumes:
- .:$PWD
hostname: $SERVICE_NAME
working_dir: $PWD
command: /sbin/init
depends_on:
machinegun:
condition: service_healthy
shumway:
condition: service_healthy
machinegun:
image: ghcr.io/valitydev/machinegun:sha-7e785cd
volumes:
- ./test/machinegun/config.yaml:/opt/machinegun/etc/config.yaml
- ./test/log/machinegun:/var/log/machinegun
- ./test/machinegun/cookie:/opt/machinegun/etc/cookie
healthcheck:
test: "/opt/machinegun/bin/machinegun ping"
interval: 5s
timeout: 1s
retries: 10
shumway:
image: docker.io/rbkmoney/shumway:44eb989065b27be619acd16b12ebdb2288b46c36
restart: unless-stopped
entrypoint:
- java
- -Xmx512m
- -jar
- /opt/shumway/shumway.jar
- --spring.datasource.url=jdbc:postgresql://shumway-db:5432/shumway
- --spring.datasource.username=postgres
- --spring.datasource.password=postgres
- --management.metrics.export.statsd.enabled=false
depends_on:
- shumway-db
healthcheck:
test: "curl http://localhost:8022/actuator/health"
interval: 5s
timeout: 1s
retries: 20
shumway-db:
image: docker.io/library/postgres:9.6
environment:
- POSTGRES_DB=shumway
- POSTGRES_USER=postgres
- POSTGRES_PASSWORD=postgres

View File

@ -44,7 +44,6 @@
{shutdown_timeout, 1000},
{woody_event_handlers, [
hay_woody_event_handler,
{scoper_woody_event_handler, #{
event_handler_opts => #{
formatter_opts => #{
@ -77,12 +76,8 @@
{logger, [
{handler, default, logger_std_h, #{
level => error,
config => #{
type => standard_error
},
formatter => {logger_formatter, #{
depth => 30
}}
config => #{type => standard_error},
formatter => {logger_formatter, #{depth => 30}}
}},
{handler, console, logger_std_h, #{
config => #{
@ -93,16 +88,6 @@
]}
]},
% {how_are_you, [
% {metrics_publishers, [
% % {hay_statsd_publisher, #{
% % key_prefix => <<"limiter.">>,
% % host => "localhost",
% % port => 8125
% % }}
% ]}
% ]},
{os_mon, [
% for better compatibility with busybox coreutils
{disksup_posix_only, true}

View File

@ -1,50 +1,30 @@
[
{elvis, [
{verbose, true},
{config, [
#{
dirs => ["apps/*/src"],
dirs => ["apps/*/src", "apps/*/include"],
filter => "*.erl",
ruleset => erl_files,
rules => [
{elvis_style, line_length, #{limit => 120, skip_comments => false}},
{elvis_style, no_tabs},
{elvis_style, no_trailing_whitespace},
{elvis_style, macro_module_names},
{elvis_style, operator_spaces, #{rules => [{right, ","}, {right, "++"}, {left, "++"}]}},
{elvis_text_style, line_length, #{limit => 120}},
{elvis_style, nesting_level, #{level => 3}},
{elvis_style, god_modules, #{limit => 25}},
{elvis_style, no_if_expression},
{elvis_style, invalid_dynamic_call, #{ignore => [elvis]}},
{elvis_style, used_ignored_variable},
{elvis_style, no_behavior_info},
{elvis_style, module_naming_convention, #{regex => "^([a-z][a-z0-9]*_?)*(_SUITE)?$"}},
{elvis_style, function_naming_convention, #{regex => "^([a-z][a-z0-9]*_?)*$"}},
{elvis_style, state_record_and_type},
{elvis_style, no_spec_with_records},
{elvis_style, dont_repeat_yourself, #{min_complexity => 10, ignore => [lim_config_machine]}},
{elvis_style, no_debug_call, #{ignore => [elvis, elvis_utils]}}
{elvis_style, no_if_expression, disable}
]
},
#{
dirs => ["apps/*/test"],
filter => "*.erl",
ruleset => erl_files,
rules => [
{elvis_style, line_length, #{limit => 120, skip_comments => false}},
{elvis_style, no_tabs},
{elvis_style, no_trailing_whitespace},
{elvis_style, macro_module_names},
{elvis_style, operator_spaces, #{rules => [{right, ","}, {right, "++"}, {left, "++"}]}},
{elvis_style, nesting_level, #{level => 3}},
{elvis_style, god_modules, #{limit => 25}},
{elvis_style, no_if_expression},
{elvis_style, invalid_dynamic_call, #{ignore => [elvis]}},
{elvis_style, used_ignored_variable},
{elvis_style, no_behavior_info},
{elvis_style, module_naming_convention, #{regex => "^([a-z][a-z0-9]*_?)*(_SUITE)?$"}},
{elvis_style, function_naming_convention, #{regex => "^([a-z][a-z0-9]*_?)*$"}},
{elvis_style, state_record_and_type},
{elvis_style, no_spec_with_records},
{elvis_style, dont_repeat_yourself, #{min_complexity => 10, ignore => [lim_turnover_SUITE]}},
{elvis_style, no_debug_call, #{ignore => [elvis, elvis_utils]}}
{elvis_text_style, line_length, #{limit => 120}},
% We want to use `ct:pal/2` and friends in test code.
{elvis_style, no_debug_call, disable},
% Assert macros can trigger use of ignored binding, yet we want them for better
% readability.
{elvis_style, used_ignored_variable, disable},
% Tests are usually more comprehensible when a bit more verbose.
{elvis_style, dont_repeat_yourself, #{min_complexity => 20}}
]
},
#{
@ -58,30 +38,24 @@
ruleset => elvis_config
},
#{
dirs => ["."],
dirs => [".", "apps/*"],
filter => "rebar.config",
ruleset => rebar_config,
rules => [
{elvis_style, line_length, #{limit => 120, skip_comments => false}},
{elvis_style, no_tabs},
{elvis_style, no_trailing_whitespace}
{elvis_text_style, line_length, #{limit => 120}},
{elvis_text_style, no_tabs},
{elvis_text_style, no_trailing_whitespace},
%% Temporarily disabled till regex pattern is available
{elvis_project, no_deps_master_rebar, disable}
]
},
#{
dirs => ["."],
filter => "rebar.config",
rules => [
{elvis_style, line_length, #{limit => 120, skip_comments => false}},
{elvis_style, no_tabs},
{elvis_style, no_trailing_whitespace}
]
},
#{
dirs => ["src"],
dirs => ["apps/*/src"],
filter => "*.app.src",
rules => [
{elvis_style, line_length, #{limit => 120, skip_comments => false}},
{elvis_style, no_tabs},
{elvis_style, no_trailing_whitespace}
{elvis_text_style, line_length, #{limit => 120}},
{elvis_text_style, no_tabs},
{elvis_text_style, no_trailing_whitespace}
]
}
]}

View File

@ -26,50 +26,16 @@
%% Common project dependencies.
{deps, [
{damsel,
{git, "https://github.com/rbkmoney/damsel.git",
{branch, "release/erlang/master"}
}
},
{limiter_proto,
{git, "git@github.com:rbkmoney/limiter-proto.git",
{branch, "master"}
}
},
{xrates_proto,
{git, "git@github.com:rbkmoney/xrates-proto.git",
{branch, "master"}
}
},
{machinery,
{git, "https://github.com/rbkmoney/machinery.git",
{branch, "master"}}
},
{erl_health,
{git, "https://github.com/rbkmoney/erlang-health.git",
{branch, "master"}}
},
{genlib ,
{git, "https://github.com/rbkmoney/genlib.git",
{branch, "master"}}
},
{scoper,
{git, "https://github.com/rbkmoney/scoper.git",
{branch, "master"}}
},
{how_are_you,
{git, "https://github.com/rbkmoney/how_are_you.git",
{branch, "master"}}
},
{woody,
{git, "https://github.com/rbkmoney/woody_erlang.git",
{branch, "master"}}
},
{prometheus, "4.6.0"},
{prometheus_cowboy, "0.1.8"}
{damsel, {git, "https://github.com/valitydev/damsel.git", {branch, "master"}}},
{limiter_proto, {git, "https://github.com/valitydev/limiter-proto.git", {branch, "master"}}},
{xrates_proto, {git, "https://github.com/valitydev/xrates-proto.git", {branch, "master"}}},
{machinery, {git, "https://github.com/valitydev/machinery.git", {branch, "master"}}},
{erl_health, {git, "https://github.com/valitydev/erlang-health.git", {branch, "master"}}},
{genlib, {git, "https://github.com/valitydev/genlib.git", {branch, "master"}}},
{scoper, {git, "https://github.com/valitydev/scoper.git", {branch, "master"}}},
{woody, {git, "https://github.com/valitydev/woody_erlang.git", {branch, "master"}}}
]}.
%% XRef checks
{xref_checks, [
undefined_function_calls,
@ -78,9 +44,6 @@
deprecated_functions
]}.
%% Tests
{cover_enabled, true}.
%% Dialyzer static analyzing
{dialyzer, [
{warnings, [
@ -89,9 +52,6 @@
error_handling,
race_conditions,
unknown
% hardcore mode
% overspecs,
% underspecs
]},
{plt_apps, all_deps}
]}.
@ -99,37 +59,61 @@
{profiles, [
{prod, [
{deps, [
{logger_logstash_formatter,
{git, "https://github.com/rbkmoney/logger_logstash_formatter.git",
{ref, "87e52c755"}}
},
% for introspection on production
{recon, "2.3.2"}
{recon, "2.5.2"},
% Because of a dependency conflict, prometheus libs are only included in the prod profile for now
% https://github.com/project-fifo/rebar3_lint/issues/42
% https://github.com/valitydev/hellgate/pull/2/commits/884724c1799703cee4d1033850fe32c17f986d9e
{prometheus, "4.8.1"},
{prometheus_cowboy, "0.1.8"},
{logger_logstash_formatter,
{git, "https://github.com/valitydev/logger_logstash_formatter.git", {ref, "2c7b716"}}},
{iosetopts, {git, "https://github.com/valitydev/iosetopts.git", {ref, "edb445c"}}}
]},
{relx, [
{release, {limiter, "1.0.0"}, [
{recon, load}, % tools for introspection
{runtime_tools, load}, % debugger
{tools, load}, % profiler
{logger_logstash_formatter, load}, % log formatter
iosetopts,
{recon, load},
{runtime_tools, load},
{tools, load},
{logger_logstash_formatter, load},
prometheus,
prometheus_cowboy,
sasl,
limiter
]},
{mode, minimal},
{sys_config, "./config/sys.config"},
{vm_args, "./config/vm.args"},
{mode, minimal},
{extended_start_script, true}
]}
]},
{test, [
{cover_enabled, true},
{deps, []},
{dialyzer, [
{plt_extra_apps, [eunit, common_test]}
]}
]}
]}.
{plugins, [
{erlfmt, "0.8.0"},
{rebar3_thrift_compiler,
{git, "https://github.com/rbkmoney/rebar3_thrift_compiler.git", {tag, "0.3.1"}}}
{project_plugins, [
{rebar3_lint, "1.0.1"},
{erlfmt, "1.0.0"},
{covertool, "2.0.4"}
]}.
%% Linter config.
{elvis_output_format, colors}.
{erlfmt, [
{print_width, 120},
{files, "apps/*/{src,include,test}/*.{hrl,erl}"}
{files, ["apps/*/{src,include,test}/*.{hrl,erl}", "rebar.config", "elvis.config", "config/sys.config"]}
]}.
{covertool, [
{coverdata_files, [
"eunit.coverdata",
"ct.coverdata"
]}
]}.

View File

@ -1,44 +1,34 @@
{"1.2.0",
[{<<"accept">>,{pkg,<<"accept">>,<<"0.3.5">>},2},
{<<"bear">>,{pkg,<<"bear">>,<<"0.8.7">>},2},
{<<"cache">>,{pkg,<<"cache">>,<<"2.2.0">>},1},
{<<"certifi">>,{pkg,<<"certifi">>,<<"2.5.1">>},2},
[{<<"cache">>,{pkg,<<"cache">>,<<"2.3.3">>},1},
{<<"certifi">>,{pkg,<<"certifi">>,<<"2.8.0">>},2},
{<<"cg_mon">>,
{git,"https://github.com/rbkmoney/cg_mon.git",
{ref,"5a87a37694e42b6592d3b4164ae54e0e87e24e18"}},
1},
{<<"cowboy">>,{pkg,<<"cowboy">>,<<"2.8.0">>},1},
{<<"cowlib">>,{pkg,<<"cowlib">>,<<"2.9.1">>},2},
{<<"cowboy">>,{pkg,<<"cowboy">>,<<"2.9.0">>},1},
{<<"cowlib">>,{pkg,<<"cowlib">>,<<"2.11.0">>},2},
{<<"damsel">>,
{git,"https://github.com/rbkmoney/damsel.git",
{ref,"9e0e884bfeaf8ad1cadd01802200f4e204cf27e3"}},
{git,"https://github.com/valitydev/damsel.git",
{ref,"1d60b20f2136938c43902dd38a80ae70f03e4a14"}},
0},
{<<"erl_health">>,
{git,"https://github.com/rbkmoney/erlang-health.git",
{ref,"982af88738ca062eea451436d830eef8c1fbe3f9"}},
{git,"https://github.com/valitydev/erlang-health.git",
{ref,"5958e2f35cd4d09f40685762b82b82f89b4d9333"}},
0},
{<<"folsom">>,
{git,"https://github.com/folsom-project/folsom.git",
{ref,"eeb1cc467eb64bd94075b95b8963e80d8b4df3df"}},
1},
{<<"genlib">>,
{git,"https://github.com/rbkmoney/genlib.git",
{ref,"4565a8d73f34a0b78cca32c9cd2b97d298bdadf8"}},
{git,"https://github.com/valitydev/genlib.git",
{ref,"82c5ff3866e3019eb347c7f1d8f1f847bed28c10"}},
0},
{<<"gproc">>,{pkg,<<"gproc">>,<<"0.8.0">>},1},
{<<"hackney">>,{pkg,<<"hackney">>,<<"1.15.2">>},1},
{<<"how_are_you">>,
{git,"https://github.com/rbkmoney/how_are_you.git",
{ref,"29f9d3d7c35f7a2d586c8571f572838df5ec91dd"}},
0},
{<<"idna">>,{pkg,<<"idna">>,<<"6.0.0">>},2},
{<<"jsx">>,{pkg,<<"jsx">>,<<"3.0.0">>},1},
{<<"gproc">>,{pkg,<<"gproc">>,<<"0.9.0">>},1},
{<<"hackney">>,{pkg,<<"hackney">>,<<"1.18.0">>},1},
{<<"idna">>,{pkg,<<"idna">>,<<"6.1.1">>},2},
{<<"jsx">>,{pkg,<<"jsx">>,<<"3.1.0">>},1},
{<<"limiter_proto">>,
{git,"git@github.com:rbkmoney/limiter-proto.git",
{git,"https://github.com/valitydev/limiter-proto.git",
{ref,"2e2cdab859222648e389dc74867b5273e73583e9"}},
0},
{<<"machinery">>,
{git,"https://github.com/rbkmoney/machinery.git",
{git,"https://github.com/valitydev/machinery.git",
{ref,"db7c94b9913451e9558afa19f2fe77bf48d391da"}},
0},
{<<"metrics">>,{pkg,<<"metrics">>,<<"1.0.1">>},2},
@ -47,72 +37,59 @@
{ref,"d814d6948d4ff13f6f41d12c6613f59c805750b2"}},
1},
{<<"mimerl">>,{pkg,<<"mimerl">>,<<"1.2.0">>},2},
{<<"parse_trans">>,{pkg,<<"parse_trans">>,<<"3.3.0">>},3},
{<<"prometheus">>,{pkg,<<"prometheus">>,<<"4.6.0">>},0},
{<<"prometheus_cowboy">>,{pkg,<<"prometheus_cowboy">>,<<"0.1.8">>},0},
{<<"prometheus_httpd">>,{pkg,<<"prometheus_httpd">>,<<"2.1.11">>},1},
{<<"ranch">>,{pkg,<<"ranch">>,<<"1.7.1">>},2},
{<<"parse_trans">>,{pkg,<<"parse_trans">>,<<"3.3.1">>},2},
{<<"ranch">>,{pkg,<<"ranch">>,<<"1.8.0">>},2},
{<<"scoper">>,
{git,"https://github.com/rbkmoney/scoper.git",
{ref,"89a973bf3cedc5a48c9fd89d719d25e79fe10027"}},
{git,"https://github.com/valitydev/scoper.git",
{ref,"7f3183df279bc8181efe58dafd9cae164f495e6f"}},
0},
{<<"snowflake">>,
{git,"https://github.com/rbkmoney/snowflake.git",
{ref,"7f379ad5e389e1c96389a8d60bae8117965d6a6d"}},
{git,"https://github.com/valitydev/snowflake.git",
{ref,"de159486ef40cec67074afe71882bdc7f7deab72"}},
1},
{<<"ssl_verify_fun">>,{pkg,<<"ssl_verify_fun">>,<<"1.1.5">>},2},
{<<"ssl_verify_fun">>,{pkg,<<"ssl_verify_fun">>,<<"1.1.6">>},2},
{<<"thrift">>,
{git,"https://github.com/rbkmoney/thrift_erlang.git",
{ref,"846a0819d9b6d09d0c31f160e33a78dbad2067b4"}},
{git,"https://github.com/valitydev/thrift_erlang.git",
{ref,"c280ff266ae1c1906fb0dcee8320bb8d8a4a3c75"}},
1},
{<<"unicode_util_compat">>,{pkg,<<"unicode_util_compat">>,<<"0.4.1">>},3},
{<<"unicode_util_compat">>,{pkg,<<"unicode_util_compat">>,<<"0.7.0">>},2},
{<<"woody">>,
{git,"https://github.com/rbkmoney/woody_erlang.git",
{ref,"58f56b462429ab1fee65e1bdb34b73512406ba00"}},
{git,"https://github.com/valitydev/woody_erlang.git",
{ref,"3ddacb9296691aa8ddad05498d1fd34b078eda75"}},
0},
{<<"xrates_proto">>,
{git,"git@github.com:rbkmoney/xrates-proto.git",
{git,"https://github.com/valitydev/xrates-proto.git",
{ref,"66906cd0a8ee9a00fb447f3c3e5b09d3c6fab942"}},
0}]}.
[
{pkg_hash,[
{<<"accept">>, <<"B33B127ABCA7CC948BBE6CAA4C263369ABF1347CFA9D8E699C6D214660F10CD1">>},
{<<"bear">>, <<"16264309AE5D005D03718A5C82641FCC259C9E8F09ADEB6FD79CA4271168656F">>},
{<<"cache">>, <<"3C11DBF4CD8FCD5787C95A5FB2A04038E3729CFCA0386016EEA8C953AB48A5AB">>},
{<<"certifi">>, <<"867CE347F7C7D78563450A18A6A28A8090331E77FA02380B4A21962A65D36EE5">>},
{<<"cowboy">>, <<"F3DC62E35797ECD9AC1B50DB74611193C29815401E53BAC9A5C0577BD7BC667D">>},
{<<"cowlib">>, <<"61A6C7C50CF07FDD24B2F45B89500BB93B6686579B069A89F88CB211E1125C78">>},
{<<"gproc">>, <<"CEA02C578589C61E5341FCE149EA36CCEF236CC2ECAC8691FBA408E7EA77EC2F">>},
{<<"hackney">>, <<"07E33C794F8F8964EE86CEBEC1A8ED88DB5070E52E904B8F12209773C1036085">>},
{<<"idna">>, <<"689C46CBCDF3524C44D5F3DDE8001F364CD7608A99556D8FBD8239A5798D4C10">>},
{<<"jsx">>, <<"20A170ABD4335FC6DB24D5FAD1E5D677C55DADF83D1B20A8A33B5FE159892A39">>},
{<<"cache">>, <<"B23A5FE7095445A88412A6E614C933377E0137B44FFED77C9B3FEF1A731A20B2">>},
{<<"certifi">>, <<"D4FB0A6BB20B7C9C3643E22507E42F356AC090A1DCEA9AB99E27E0376D695EBA">>},
{<<"cowboy">>, <<"865DD8B6607E14CF03282E10E934023A1BD8BE6F6BACF921A7E2A96D800CD452">>},
{<<"cowlib">>, <<"0B9FF9C346629256C42EBE1EEB769A83C6CB771A6EE5960BD110AB0B9B872063">>},
{<<"gproc">>, <<"853CCB7805E9ADA25D227A157BA966F7B34508F386A3E7E21992B1B484230699">>},
{<<"hackney">>, <<"C4443D960BB9FBA6D01161D01CD81173089686717D9490E5D3606644C48D121F">>},
{<<"idna">>, <<"8A63070E9F7D0C62EB9D9FCB360A7DE382448200FBBD1B106CC96D3D8099DF8D">>},
{<<"jsx">>, <<"D12516BAA0BB23A59BB35DCCAF02A1BD08243FCBB9EFE24F2D9D056CCFF71268">>},
{<<"metrics">>, <<"25F094DEA2CDA98213CECC3AEFF09E940299D950904393B2A29D191C346A8486">>},
{<<"mimerl">>, <<"67E2D3F571088D5CFD3E550C383094B47159F3EEE8FFA08E64106CDF5E981BE3">>},
{<<"parse_trans">>, <<"09765507A3C7590A784615CFD421D101AEC25098D50B89D7AA1D66646BC571C1">>},
{<<"prometheus">>, <<"20510F381DB1CCAB818B4CF2FAC5FA6AB5CC91BC364A154399901C001465F46F">>},
{<<"prometheus_cowboy">>, <<"CFCE0BC7B668C5096639084FCD873826E6220EA714BF60A716F5BD080EF2A99C">>},
{<<"prometheus_httpd">>, <<"F616ED9B85B536B195D94104063025A91F904A4CFC20255363F49A197D96C896">>},
{<<"ranch">>, <<"6B1FAB51B49196860B733A49C07604465A47BDB78AA10C1C16A3D199F7F8C881">>},
{<<"ssl_verify_fun">>, <<"6EAF7AD16CB568BB01753DBBD7A95FF8B91C7979482B95F38443FE2C8852A79B">>},
{<<"unicode_util_compat">>, <<"D869E4C68901DD9531385BB0C8C40444EBF624E60B6962D95952775CAC5E90CD">>}]},
{<<"parse_trans">>, <<"16328AB840CC09919BD10DAB29E431DA3AF9E9E7E7E6F0089DD5A2D2820011D8">>},
{<<"ranch">>, <<"8C7A100A139FD57F17327B6413E4167AC559FBC04CA7448E9BE9057311597A1D">>},
{<<"ssl_verify_fun">>, <<"CF344F5692C82D2CD7554F5EC8FD961548D4FD09E7D22F5B62482E5AEAEBD4B0">>},
{<<"unicode_util_compat">>, <<"BC84380C9AB48177092F43AC89E4DFA2C6D62B40B8BD132B1059ECC7232F9A78">>}]},
{pkg_hash_ext,[
{<<"accept">>, <<"11B18C220BCC2EAB63B5470C038EF10EB6783BCB1FCDB11AA4137DEFA5AC1BB8">>},
{<<"bear">>, <<"534217DCE6A719D59E54FB0EB7A367900DBFC5F85757E8C1F94269DF383F6D9B">>},
{<<"cache">>, <<"3E7D6706DE5DF76C4D71C895B4BE62B01C3DE6EDB63197035E465C3BCE63F19B">>},
{<<"certifi">>, <<"805ABD97539CAF89EC6D4732C91E62BA9DA0CDA51AC462380BBD28EE697A8C42">>},
{<<"cowboy">>, <<"4643E4FBA74AC96D4D152C75803DE6FAD0B3FA5DF354C71AFDD6CBEEB15FAC8A">>},
{<<"cowlib">>, <<"E4175DC240A70D996156160891E1C62238EDE1729E45740BDD38064DAD476170">>},
{<<"gproc">>, <<"580ADAFA56463B75263EF5A5DF4C86AF321F68694E7786CB057FD805D1E2A7DE">>},
{<<"hackney">>, <<"E0100F8EF7D1124222C11AD362C857D3DF7CB5F4204054F9F0F4A728666591FC">>},
{<<"idna">>, <<"4BDD305EB64E18B0273864920695CB18D7A2021F31A11B9C5FBCD9A253F936E2">>},
{<<"jsx">>, <<"37BECA0435F5CA8A2F45F76A46211E76418FBEF80C36F0361C249FC75059DC6D">>},
{<<"cache">>, <<"44516CE6FA03594D3A2AF025DD3A87BFE711000EB730219E1DDEFC816E0AA2F4">>},
{<<"certifi">>, <<"6AC7EFC1C6F8600B08D625292D4BBF584E14847CE1B6B5C44D983D273E1097EA">>},
{<<"cowboy">>, <<"2C729F934B4E1AA149AFF882F57C6372C15399A20D54F65C8D67BEF583021BDE">>},
{<<"cowlib">>, <<"2B3E9DA0B21C4565751A6D4901C20D1B4CC25CBB7FD50D91D2AB6DD287BC86A9">>},
{<<"gproc">>, <<"587E8AF698CCD3504CF4BA8D90F893EDE2B0F58CABB8A916E2BF9321DE3CF10B">>},
{<<"hackney">>, <<"9AFCDA620704D720DB8C6A3123E9848D09C87586DC1C10479C42627B905B5C5E">>},
{<<"idna">>, <<"92376EB7894412ED19AC475E4A86F7B413C1B9FBB5BD16DCCD57934157944CEA">>},
{<<"jsx">>, <<"0C5CC8FDC11B53CC25CF65AC6705AD39E54ECC56D1C22E4ADB8F5A53FB9427F3">>},
{<<"metrics">>, <<"69B09ADDDC4F74A40716AE54D140F93BEB0FB8978D8636EADED0C31B6F099F16">>},
{<<"mimerl">>, <<"F278585650AA581986264638EBF698F8BB19DF297F66AD91B18910DFC6E19323">>},
{<<"parse_trans">>, <<"17EF63ABDE837AD30680EA7F857DD9E7CED9476CDD7B0394432AF4BFC241B960">>},
{<<"prometheus">>, <<"4905FD2992F8038ECCD7AA0CD22F40637ED618C0BED1F75C05AACEC15B7545DE">>},
{<<"prometheus_cowboy">>, <<"BA286BECA9302618418892D37BCD5DC669A6CC001F4EB6D6AF85FF81F3F4F34C">>},
{<<"prometheus_httpd">>, <<"0BBE831452CFDF9588538EB2F570B26F30C348ADAE5E95A7D87F35A5910BCF92">>},
{<<"ranch">>, <<"451D8527787DF716D99DC36162FCA05934915DB0B6141BBDAC2EA8D3C7AFC7D7">>},
{<<"ssl_verify_fun">>, <<"13104D7897E38ED7F044C4DE953A6C28597D1C952075EB2E328BC6D6F2BFC496">>},
{<<"unicode_util_compat">>, <<"1D1848C40487CDB0B30E8ED975E34E025860C02E419CB615D255849F3427439D">>}]}
{<<"parse_trans">>, <<"07CD9577885F56362D414E8C4C4E6BDF10D43A8767ABB92D24CBE8B24C54888B">>},
{<<"ranch">>, <<"49FBCFD3682FAB1F5D109351B61257676DA1A2FDBE295904176D5E521A2DDFE5">>},
{<<"ssl_verify_fun">>, <<"BDB0D2471F453C88FF3908E7686F86F9BE327D065CC1EC16FA4540197EA04680">>},
{<<"unicode_util_compat">>, <<"25EEE6D67DF61960CF6A794239566599B09E17E668D3700247BC498638152521">>}]}
].

View File

@ -3,6 +3,9 @@ service_name: machinegun
erlang:
secret_cookie_file: "/opt/machinegun/etc/cookie"
logging:
out_type: stdout
namespaces:
lim/config_v1:
processor: