mirror of
https://github.com/valitydev/elixir-thrift.git
synced 2024-11-06 10:15:17 +00:00
Remove tng thrift (#67)
* Revert "Added support for ordinals (#62)" This reverts commitb3ec59a87b
. * Revert "Add serialization test for i32, i64, double (#61)" This reverts commit2145c105e8
. * Revert "Fix CompactProtocol warning (#60)" This reverts commitf5c5a1067e
. * Revert "Support file argument to thrift.generate (#59)" This reverts commita35c8a723b
. * Revert "Pure elixir implementation (#54)" This reverts commit819216f830
.
This commit is contained in:
parent
4c71e984e7
commit
1f922d9f33
2
.gitignore
vendored
2
.gitignore
vendored
@ -1,8 +1,6 @@
|
||||
/_build
|
||||
/bench/snapshots
|
||||
/deps
|
||||
/doc
|
||||
/lib/generated
|
||||
/src/thrift_lexer.erl
|
||||
/src/thrift_parser.erl
|
||||
/test/fixtures/app/src
|
||||
|
28
TODO.md
28
TODO.md
@ -1,28 +0,0 @@
|
||||
Stuff left TODO:
|
||||
|
||||
* Riffed Parity
|
||||
* [ ] Typedef resolution support
|
||||
- Typedefs can't be resolved across files
|
||||
* [ ] Better Testing Story For Model Generation
|
||||
* [ ] Presently Model Generation Tests The _output_, it should test behavior
|
||||
* [ ] Enum Improvements
|
||||
* [ ] Should have a way to convert from name to ordinal
|
||||
* [ ] Should be able to match on ordinals as well as values
|
||||
* [ ] Model Improvements
|
||||
* [ ] Support for Unions
|
||||
* [ ] Typespecs for generated models
|
||||
* [ ] Correct handing of default values
|
||||
* [ ] Create `new` function for models that does type checking of fields
|
||||
* [ ] Binary Protocol
|
||||
* [ ] Binary protocol decoder
|
||||
* [ ] Improve benchmark tests
|
||||
* [ ] Add Message handing for encoding
|
||||
* [ ] Add Function call serialization
|
||||
* [ ] Framed Client
|
||||
* [ ] Framed Server
|
||||
|
||||
* Beyond Riffed
|
||||
* [ ] Finagle Client
|
||||
* [ ] Finagle Server
|
||||
* [ ] Thrift MUX Client
|
||||
* [ ] Thrift MUX Server
|
@ -1,97 +0,0 @@
|
||||
defmodule BinaryProtocolBenchmark do
|
||||
use Benchfella
|
||||
|
||||
@thrift_file_path "./test/fixtures/app/thrift/simple.thrift"
|
||||
import ParserUtils
|
||||
|
||||
setup_all do
|
||||
parse_thrift(@thrift_file_path)
|
||||
|> compile_module
|
||||
|
||||
{:ok, :ok}
|
||||
end
|
||||
|
||||
before_each_bench _ do
|
||||
user_options = [
|
||||
is_evil: true,
|
||||
user_id: 1234567,
|
||||
number_of_hairs_on_head: 26482,
|
||||
amount_of_red: 182,
|
||||
nineties_era_color: 24345,
|
||||
mint_gum: 28.282,
|
||||
username: "esteban",
|
||||
friends: [],
|
||||
# # my_map: %{1 => "abc", 2 => "def", 3 => "asldfkjlasdkjf"},
|
||||
# blocked_user_ids: [2234, 2345, 654365, 4356, 3456, 1234, 234, 2345, 3456, 4567],
|
||||
optional_integers: [2234, 2345, 654365, 4356, 3456, 1234, 234, 2345, 3456, 4567],
|
||||
]
|
||||
|
||||
erlang_users = for _ <- 1..1000 do
|
||||
user(:erlang, user_options)
|
||||
end
|
||||
|
||||
elixir_users = for _ <- 1..1000 do
|
||||
user(:elixir, user_options)
|
||||
end
|
||||
|
||||
user_binary = user(:elixir, user_options)
|
||||
|> serialize_user_elixir(convert_to_binary: true)
|
||||
|
||||
context = [
|
||||
elixir_users: elixir_users,
|
||||
erlang_users: erlang_users,
|
||||
user_binary: user_binary,
|
||||
]
|
||||
{:ok, context}
|
||||
end
|
||||
|
||||
bench "erlang serialization (converted to binary)" do
|
||||
for user <- bench_context[:erlang_users] do
|
||||
serialize_user_erlang(user, convert_to_binary: true)
|
||||
end
|
||||
:ok
|
||||
end
|
||||
|
||||
bench "erlang serialization left as IOList" do
|
||||
for user <- bench_context[:erlang_users] do
|
||||
serialize_user_erlang(user, convert_to_binary: false)
|
||||
end
|
||||
:ok
|
||||
end
|
||||
|
||||
bench "elixir serialization (iolist_size)" do
|
||||
for user <- bench_context[:elixir_users] do
|
||||
serialize_user_elixir(user, convert_to_binary: false)
|
||||
|> :erlang.iolist_size
|
||||
end
|
||||
:ok
|
||||
end
|
||||
|
||||
bench "elixir serialization (converted to binary)" do
|
||||
for user <- bench_context[:elixir_users] do
|
||||
serialize_user_elixir(user, convert_to_binary: true)
|
||||
end
|
||||
:ok
|
||||
end
|
||||
|
||||
bench "elixir serialization (left as IOList)" do
|
||||
for user <- bench_context[:elixir_users] do
|
||||
serialize_user_elixir(user, convert_to_binary: false)
|
||||
end
|
||||
:ok
|
||||
end
|
||||
|
||||
bench "erlang deserialization" do
|
||||
for _ <- 1..1000 do
|
||||
deserialize_user_erlang(bench_context[:user_binary])
|
||||
end
|
||||
:ok
|
||||
end
|
||||
|
||||
bench "elixir deserialization" do
|
||||
for _ <- 1..1000 do
|
||||
deserialize_user_elixir(bench_context[:user_binary])
|
||||
end
|
||||
:ok
|
||||
end
|
||||
end
|
@ -1,3 +1,3 @@
|
||||
#!/bin/bash
|
||||
rootdir="$( cd "$( dirname "${BASH_SOURCE[0]}" )" && cd .. && pwd )"
|
||||
docker run -v ${rootdir}:/thrift-elixir -w /thrift-elixir/${DOCKER_THRIFT_OUT_ROOT} --rm thrift:0.9.3 thrift $*
|
||||
docker run -v ${rootdir}:/thrift-elixir -w /thrift-elixir/test/fixtures/app --rm thrift:0.9.3 thrift $*
|
||||
|
@ -1,8 +1,7 @@
|
||||
{
|
||||
"skip_files": [
|
||||
"ext",
|
||||
"src/thrift_lexer.erl",
|
||||
"src/thrift_parser.erl",
|
||||
"test/support"
|
||||
"ext",
|
||||
"src/thrift_lexer.erl",
|
||||
"src/thrift_parser.erl"
|
||||
]
|
||||
}
|
||||
|
@ -1,42 +0,0 @@
|
||||
defmodule Mix.Tasks.Thrift.Generate do
|
||||
use Mix.Task
|
||||
|
||||
@moduledoc """
|
||||
Generate Elixir modules from Thrift schema definitions.
|
||||
|
||||
Syntax:
|
||||
mix thrift.generate [options] myfile.thrift
|
||||
mix thrift.generate [options] dir_with_thrift_files
|
||||
|
||||
## Command line options
|
||||
* `--output-dir` - Directory under which to place generated .ex files. (Default: ./lib)
|
||||
"""
|
||||
|
||||
def run(args) do
|
||||
{opts, args, _} = OptionParser.parse(args)
|
||||
if args == [] do
|
||||
print_help
|
||||
exit :normal
|
||||
end
|
||||
[input | _] = args
|
||||
output_dir = Keyword.get(opts, :output_dir, "lib")
|
||||
|
||||
thrift_files = cond do
|
||||
File.dir?(input) ->
|
||||
Mix.Utils.extract_files([input], "*.thrift")
|
||||
File.regular?(input) ->
|
||||
[input]
|
||||
end
|
||||
|
||||
for thrift_file <- thrift_files do
|
||||
for output_file <- Thrift.Generator.generate!(thrift_file, output_dir) do
|
||||
Mix.shell.info "Generated #{output_file}"
|
||||
end
|
||||
end
|
||||
end
|
||||
|
||||
defp print_help do
|
||||
Mix.Task.moduledoc(__MODULE__)
|
||||
|> Mix.shell.info
|
||||
end
|
||||
end
|
67
lib/parser/file_group.ex
Normal file
67
lib/parser/file_group.ex
Normal file
@ -0,0 +1,67 @@
|
||||
defmodule Thrift.Parser.FileGroup do
|
||||
@moduledoc """
|
||||
Represents a group of parsed files. When you parse a file, it might include other thrift files.
|
||||
These files are in turn accumulated and parsed and added to this module.
|
||||
Additionally, this module allows resolution of the names of Structs / Enums / Unions etc across
|
||||
files.
|
||||
"""
|
||||
alias Thrift.Parser.{
|
||||
FileGroup,
|
||||
FileRef,
|
||||
Resolver,
|
||||
ParsedFile
|
||||
}
|
||||
|
||||
alias Thrift.Parser.Models.{
|
||||
Field,
|
||||
StructRef,
|
||||
Schema,
|
||||
}
|
||||
|
||||
@type t :: %FileGroup{
|
||||
parsed_files: %{FileRef.thrift_include => %ParsedFile{}},
|
||||
schemas: %{FileRef.thrift_include => %Schema{}}}
|
||||
|
||||
defstruct parsed_files: %{}, schemas: %{}, resolutions: %{}
|
||||
|
||||
def add(file_group, parsed_file) do
|
||||
file_group = add_includes(file_group, parsed_file)
|
||||
new_parsed_files = Map.put(file_group.parsed_files, parsed_file.name, parsed_file)
|
||||
new_schemas = Map.put(file_group.schemas, parsed_file.name, parsed_file.schema)
|
||||
|
||||
Resolver.add(parsed_file)
|
||||
%__MODULE__{file_group |
|
||||
parsed_files: new_parsed_files,
|
||||
schemas: new_schemas}
|
||||
end
|
||||
|
||||
def add_includes(%__MODULE__{} = group,
|
||||
%ParsedFile{schema: schema, file_ref: file_ref}) do
|
||||
|
||||
Enum.reduce(schema.includes, group, fn(include, file_group) ->
|
||||
parsed_file = file_ref.path
|
||||
|> Path.dirname
|
||||
|> Path.join(include.path)
|
||||
|> FileRef.new
|
||||
|> ParsedFile.new
|
||||
add(file_group, parsed_file)
|
||||
end)
|
||||
end
|
||||
|
||||
def resolve(%FileGroup{} = group, %Field{type: %StructRef{} = ref} = field) do
|
||||
%Field{field | type: resolve(group, ref)}
|
||||
end
|
||||
|
||||
def resolve(%FileGroup{resolutions: resolutions}, %StructRef{referenced_type: type_name}) do
|
||||
resolutions[type_name]
|
||||
end
|
||||
|
||||
def resolve(%FileGroup{resolutions: resolutions}, path) when is_atom(path) do
|
||||
resolutions[path]
|
||||
end
|
||||
|
||||
def resolve(_, other) do
|
||||
other
|
||||
end
|
||||
|
||||
end
|
562
lib/parser/models.ex
Normal file
562
lib/parser/models.ex
Normal file
@ -0,0 +1,562 @@
|
||||
defmodule Thrift.Parser.Types do
|
||||
@moduledoc """
|
||||
A container module for modules containing typespecs for Thrift files.
|
||||
"""
|
||||
defmodule Primitive do
|
||||
@moduledoc """
|
||||
Typespec for Thrift primitives
|
||||
"""
|
||||
@type t :: :bool | :i8 | :i16 | :i64 | :binary | :double | :byte | :string
|
||||
end
|
||||
|
||||
defmodule Ident do
|
||||
@moduledoc """
|
||||
A Thrift identifier
|
||||
"""
|
||||
@type t :: String.t
|
||||
end
|
||||
|
||||
defmodule Standalone do
|
||||
@moduledoc """
|
||||
A Thrift type that isn't a container
|
||||
"""
|
||||
@type t :: Ident.t | Primitive.t
|
||||
end
|
||||
|
||||
defmodule List do
|
||||
@moduledoc """
|
||||
A Thrift list.
|
||||
"""
|
||||
@type t :: {:list, Thrift.Parser.Types.t}
|
||||
end
|
||||
|
||||
defmodule Map do
|
||||
@moduledoc """
|
||||
A Thrift map
|
||||
"""
|
||||
@type t :: {:map, {Thrift.Parser.Types.t, Thrift.Parser.Types.t}}
|
||||
end
|
||||
|
||||
defmodule Set do
|
||||
@moduledoc """
|
||||
A Thrift set
|
||||
"""
|
||||
@type t :: {:set, Thrift.Parser.Types.t}
|
||||
end
|
||||
|
||||
defmodule Container do
|
||||
@moduledoc """
|
||||
A Thrift contianer type
|
||||
"""
|
||||
@type t :: List.t | Map.t | Set.t
|
||||
end
|
||||
|
||||
@type t :: Container.t | Standalone.t
|
||||
end
|
||||
|
||||
defmodule Thrift.Parser.Literals do
|
||||
@moduledoc """
|
||||
A module containing types for defining Thrift literals
|
||||
Thrift literals are used when setting default values and constants.
|
||||
"""
|
||||
defmodule Primitive do
|
||||
@moduledoc """
|
||||
A Thrift primitive type
|
||||
"""
|
||||
@type t :: integer | boolean | String.t | float
|
||||
end
|
||||
|
||||
defmodule List do
|
||||
@moduledoc """
|
||||
A Thrift list
|
||||
"""
|
||||
@type t :: [Thrift.Parser.Literals.t]
|
||||
end
|
||||
|
||||
defmodule Map do
|
||||
@moduledoc """
|
||||
A Thrift map
|
||||
"""
|
||||
@type t :: %{Thrift.Parser.Literals.t => Thrift.Parser.Literals.t}
|
||||
end
|
||||
|
||||
defmodule Container do
|
||||
@moduledoc """
|
||||
A Thrift container type
|
||||
"""
|
||||
@type t :: Map.t | List.t
|
||||
end
|
||||
|
||||
@type t :: Container.t | Primitive.t
|
||||
@type s :: atom
|
||||
end
|
||||
|
||||
defmodule Thrift.Parser.Conversions do
|
||||
@moduledoc """
|
||||
Conversion utilities useful for parsing Thrift.
|
||||
"""
|
||||
|
||||
@doc """
|
||||
Ensures that the argument is an atom.
|
||||
"""
|
||||
def atomify(nil), do: nil
|
||||
def atomify(l) when is_list(l) do
|
||||
List.to_atom(l)
|
||||
end
|
||||
|
||||
def cast(_, nil) do
|
||||
nil
|
||||
end
|
||||
|
||||
def cast(:double, val) do
|
||||
val
|
||||
end
|
||||
|
||||
def cast(:string, val) do
|
||||
List.to_string(val)
|
||||
end
|
||||
|
||||
def cast({:set, type}, val) do
|
||||
MapSet.new(val, &cast(type, &1))
|
||||
end
|
||||
|
||||
def cast({:map, {key_type, val_type}}, val) do
|
||||
Enum.into(val, %{}, fn {k, v} ->
|
||||
{cast(key_type, k), cast(val_type, v)}
|
||||
end)
|
||||
end
|
||||
|
||||
def cast(_, val) do
|
||||
val
|
||||
end
|
||||
end
|
||||
|
||||
defmodule Thrift.Parser.Models do
|
||||
@moduledoc """
|
||||
Models used by the Thrift parser that represent different Thrift components.
|
||||
The models defined here are returned by the parse functions in the
|
||||
`Thrift.Parser` module.
|
||||
"""
|
||||
|
||||
alias Thrift.Parser.{Literals, Types}
|
||||
|
||||
defmodule Namespace do
|
||||
@moduledoc """
|
||||
A Thrift namespace.
|
||||
The namespace is a language-specific place where the generated structs are
|
||||
placed.
|
||||
"""
|
||||
|
||||
@type t :: %Namespace{name: String.t, path: String.t}
|
||||
defstruct name: nil, path: nil
|
||||
|
||||
import Thrift.Parser.Conversions
|
||||
|
||||
@spec new(char_list, char_list) :: %Namespace{}
|
||||
def new(name, path) do
|
||||
%Namespace{name: atomify(name), path: List.to_string(path)}
|
||||
end
|
||||
end
|
||||
|
||||
defmodule Include do
|
||||
@moduledoc """
|
||||
An included file.
|
||||
In Thrift, you can include other files to share structs, enums and the like.
|
||||
"""
|
||||
|
||||
@type t :: %Include{path: String.t}
|
||||
defstruct path: nil
|
||||
|
||||
import Thrift.Parser.Conversions
|
||||
|
||||
@spec new(char_list) :: %Include{}
|
||||
def new(path) do
|
||||
%Include{path: List.to_string(path)}
|
||||
end
|
||||
end
|
||||
|
||||
defmodule Constant do
|
||||
@moduledoc """
|
||||
A Thrift constant.
|
||||
Constants of any primitive or container type can be created in Thrift.
|
||||
"""
|
||||
|
||||
@type t :: %Constant{name: String.t, value: Literal.t, type: Types.t}
|
||||
defstruct name: nil, value: nil, type: nil
|
||||
|
||||
import Thrift.Parser.Conversions
|
||||
|
||||
@spec new(char_list, Literals.t, Types.t) :: %Constant{}
|
||||
def new(name, val, type) do
|
||||
%Constant{name: atomify(name), value: cast(type, val), type: type}
|
||||
end
|
||||
end
|
||||
|
||||
defmodule TEnum do
|
||||
@moduledoc """
|
||||
A Thrift enumeration
|
||||
An enumeration contains names and (usually sequential) values, and
|
||||
allows you to map from one to the other.
|
||||
"""
|
||||
|
||||
@type enum_value :: bitstring | integer
|
||||
@type t :: %TEnum{name: String.t, values: %{String.t => enum_value}}
|
||||
defstruct name: nil, values: []
|
||||
|
||||
import Thrift.Parser.Conversions
|
||||
|
||||
@spec new(char_list, %{char_list => enum_value}) :: %TEnum{}
|
||||
def new(name, values) do
|
||||
values = values
|
||||
|> Enum.with_index
|
||||
|> Enum.map(fn
|
||||
{{name, value}, _index} ->
|
||||
{atomify(name), value}
|
||||
|
||||
{name, index} ->
|
||||
{atomify(name), index + 1}
|
||||
end)
|
||||
|
||||
%TEnum{name: atomify(name), values: values}
|
||||
end
|
||||
end
|
||||
|
||||
defmodule Field do
|
||||
@moduledoc """
|
||||
A Thrift field.
|
||||
|
||||
Fields define a named type and can occur in functions, structs, unions,
|
||||
exceptions and the parameter list and `throws` clauses of functions.
|
||||
|
||||
Fields can refer to each other. These are represented by the FieldReference
|
||||
type.
|
||||
|
||||
This module also contains some utilities for validating and fixing up fields.
|
||||
"""
|
||||
|
||||
@type printable :: String.t | atom
|
||||
@type t :: %Field{id: integer, name: String.t, type: Types.t,
|
||||
required: boolean, default: Literals.t}
|
||||
defstruct id: nil, name: nil, type: nil, required: :default, default: nil
|
||||
|
||||
import Thrift.Parser.Conversions
|
||||
|
||||
@spec new(integer, boolean, Types.t, char_list, Literals.t) :: %Field{}
|
||||
def new(id, required, type, name, default) do
|
||||
%Field{id: id,
|
||||
type: type,
|
||||
name: atomify(name),
|
||||
required: required,
|
||||
default: cast(type, default)}
|
||||
end
|
||||
|
||||
@spec build_field_list(printable, [%Field{}]) :: [%Field{}]
|
||||
def build_field_list(parent_name, fields) do
|
||||
fields
|
||||
|> update_ids(parent_name)
|
||||
|> validate_ids(parent_name)
|
||||
end
|
||||
|
||||
defp validate_ids(fields, name) do
|
||||
dupes = fields
|
||||
|> Enum.group_by(&(&1.id))
|
||||
|> Enum.filter(fn {_, v} -> length(v) > 1 end)
|
||||
|
||||
unless Enum.empty?(dupes) do
|
||||
{id, dupe_fields} = List.first(dupes)
|
||||
|
||||
names = dupe_fields
|
||||
|> Enum.map(&("#{name}.#{&1.name}"))
|
||||
|> Enum.sort
|
||||
|> Enum.join(", ")
|
||||
|
||||
raise "Error: #{names} share field number #{id}."
|
||||
end
|
||||
|
||||
fields
|
||||
end
|
||||
|
||||
defp update_ids(fields, parent_name) do
|
||||
alias Thrift.Parser.Shell
|
||||
fields
|
||||
|> Enum.with_index
|
||||
|> Enum.map(fn
|
||||
{%__MODULE__{} = field, idx} ->
|
||||
case field.id do
|
||||
nil ->
|
||||
Shell.warn "Warning: id not set for field '#{parent_name}.#{field.name}'."
|
||||
%__MODULE__{field | id: idx + 1}
|
||||
_ ->
|
||||
field
|
||||
end
|
||||
end)
|
||||
end
|
||||
end
|
||||
|
||||
defmodule Exception do
|
||||
@moduledoc """
|
||||
A Thrift exception
|
||||
|
||||
Exceptions can happen when the remote service encounters an error.
|
||||
"""
|
||||
|
||||
@type t :: %Exception{name: String.t, fields: [%Field{}]}
|
||||
defstruct fields: %{}, name: nil
|
||||
|
||||
import Thrift.Parser.Conversions
|
||||
alias Thrift.Parser.Models.Field
|
||||
|
||||
@spec new(char_list, [%Field{}, ...]) :: %Exception{}
|
||||
def new(name, fields) do
|
||||
ex_name = atomify(name)
|
||||
updated_fields = Field.build_field_list(ex_name, fields)
|
||||
|
||||
%Exception{name: ex_name, fields: updated_fields}
|
||||
end
|
||||
end
|
||||
|
||||
defmodule Struct do
|
||||
@moduledoc """
|
||||
A Thrift struct
|
||||
|
||||
The basic datastructure in Thrift, structs have aa name and a field list.
|
||||
"""
|
||||
|
||||
@type t :: %Struct{name: String.t, fields: %{String.t => %Field{}}}
|
||||
defstruct name: nil, fields: %{}
|
||||
|
||||
import Thrift.Parser.Conversions
|
||||
alias Thrift.Parser.Models.Field
|
||||
|
||||
@spec new(char_list, [%Field{}, ...]) :: %Struct{}
|
||||
def new(name, fields) do
|
||||
struct_name = atomify(name)
|
||||
fields = Field.build_field_list(struct_name, fields)
|
||||
|
||||
%Struct{name: struct_name, fields: fields}
|
||||
end
|
||||
end
|
||||
|
||||
defmodule Union do
|
||||
@moduledoc """
|
||||
A Thrift union
|
||||
|
||||
Unions can have one field set at a time.
|
||||
"""
|
||||
|
||||
@type t :: %Union{name: String.t, fields: %{String.t => %Field{}}}
|
||||
defstruct name: nil, fields: %{}
|
||||
|
||||
import Thrift.Parser.Conversions
|
||||
alias Thrift.Parser.Models.Field
|
||||
|
||||
@spec new(char_list, [%Field{}, ...]) :: %Union{}
|
||||
def new(name, fields) do
|
||||
name = atomify(name)
|
||||
|
||||
fields = name
|
||||
|> Field.build_field_list(fields)
|
||||
|> Enum.map(fn(%Field{} = field) ->
|
||||
# According to Thrift docs, unions have implicitly optional
|
||||
# fields. See https://thrift.apache.org/docs/idl#union
|
||||
%Field{field | required: false}
|
||||
end)
|
||||
|
||||
%Union{name: name, fields: fields}
|
||||
end
|
||||
end
|
||||
|
||||
defmodule StructRef do
|
||||
@moduledoc """
|
||||
A reference to another struct.
|
||||
|
||||
While not a Thrift type, this represents when a Thrift type refers to
|
||||
another.
|
||||
"""
|
||||
|
||||
@type t :: %StructRef{referenced_type: String.t}
|
||||
defstruct referenced_type: nil
|
||||
|
||||
import Thrift.Parser.Conversions
|
||||
|
||||
@spec new(char_list) :: %StructRef{}
|
||||
def new(referenced_type) do
|
||||
%StructRef{referenced_type: atomify(referenced_type)}
|
||||
end
|
||||
end
|
||||
|
||||
defmodule Function do
|
||||
@moduledoc """
|
||||
A Thrift function
|
||||
|
||||
Functions are remote endpoints for Thrift services. They contain an argument list, exceptions and return a typed object.
|
||||
They can also be `oneway`, which means that Thrift doesn't have to wait for
|
||||
a reply from them.
|
||||
"""
|
||||
|
||||
@type return :: :void | Types.t
|
||||
@type t :: %Function{oneway: boolean, return_type: return, name: String.t,
|
||||
params: [%Field{}], exceptions: [%Exception{}]}
|
||||
defstruct oneway: false, return_type: :void, name: nil, params: [], exceptions: []
|
||||
alias Thrift.Parser.Models.Field
|
||||
import Thrift.Parser.Conversions
|
||||
|
||||
@spec new(boolean, Types.t, char_list, [%Field{}, ...], [%Exception{}, ...]) :: %Function{}
|
||||
def new(oneway, return_type, name, params, exceptions) do
|
||||
name = atomify(name)
|
||||
params = Field.build_field_list(name, params)
|
||||
|
||||
%Function{
|
||||
oneway: oneway,
|
||||
return_type: return_type,
|
||||
name: name,
|
||||
params: params,
|
||||
exceptions: exceptions
|
||||
}
|
||||
end
|
||||
end
|
||||
|
||||
defmodule Service do
|
||||
@moduledoc """
|
||||
A Thrift service
|
||||
|
||||
Services hold RPC functions and can extend other services.
|
||||
"""
|
||||
|
||||
@type t :: %Service{name: String.t, extends: String.t, functions: %{atom => %Function{}}}
|
||||
defstruct name: nil, extends: nil, functions: %{}
|
||||
|
||||
import Thrift.Parser.Conversions
|
||||
|
||||
@spec new(char_list, [%Function{}, ...], char_list) :: %Service{}
|
||||
def new(name, functions, extends) do
|
||||
fn_map = Enum.into(functions, %{}, fn(f) -> {f.name, f} end)
|
||||
%Service{name: atomify(name), extends: atomify(extends), functions: fn_map}
|
||||
end
|
||||
end
|
||||
|
||||
defmodule Schema do
|
||||
@moduledoc """
|
||||
A Thrift schema.
|
||||
|
||||
A program represents a single parsed file in Thrift.
|
||||
Many programs can be compiled together to build a Thrift service.
|
||||
|
||||
This is the root datastructure that the parser emits after running.
|
||||
"""
|
||||
|
||||
@type header :: %Include{} | %Namespace{}
|
||||
@type typedef :: {:typedef, Types.t, atom}
|
||||
@type definition :: %Service{} | %TEnum{} | %Exception{} | %Union{} | %Struct{} | %Constant{} | typedef
|
||||
@type model :: header | definition
|
||||
@type t :: %Schema{
|
||||
absolute_path: Path.t,
|
||||
module: String.t,
|
||||
thrift_namespace: String.t,
|
||||
namespaces: %{String.t => %Namespace{}},
|
||||
structs: %{String.t => %Struct{}},
|
||||
services: %{String.t => %Service{}},
|
||||
enums: %{String.t => %TEnum{}},
|
||||
unions: %{String.t => %Union{}},
|
||||
includes: [%Include{}],
|
||||
constants: %{String.t => Literals.t},
|
||||
exceptions: %{String.t => %Exception{}},
|
||||
typedefs: %{String.t => Types.t}
|
||||
}
|
||||
defstruct absolute_path: nil,
|
||||
module: nil,
|
||||
thrift_namespace: nil,
|
||||
namespaces: %{},
|
||||
structs: %{},
|
||||
services: %{},
|
||||
enums: %{},
|
||||
unions: %{},
|
||||
includes: [],
|
||||
constants: %{},
|
||||
exceptions: %{},
|
||||
typedefs: %{}
|
||||
|
||||
import Thrift.Parser.Conversions
|
||||
alias Thrift.Parser.Models.{Constant,
|
||||
Exception,
|
||||
Include,
|
||||
Namespace,
|
||||
Struct,
|
||||
TEnum,
|
||||
Union
|
||||
}
|
||||
|
||||
@doc """
|
||||
Constructs a schema with both headers and definitions.
|
||||
"""
|
||||
@spec new(Path.t, [header], [definition]) :: t
|
||||
def new(file_absolute_path, headers, defs) do
|
||||
orig_schema = %Schema{absolute_path: file_absolute_path,
|
||||
module: module_name(file_absolute_path)}
|
||||
|
||||
schema = headers
|
||||
|> Enum.reverse
|
||||
|> Enum.reduce(orig_schema, &merge(&2, &1))
|
||||
|
||||
defs
|
||||
|> Enum.reverse
|
||||
|> Enum.reduce(schema, &merge(&2, &1))
|
||||
end
|
||||
|
||||
defp module_name(nil), do: nil
|
||||
|
||||
defp module_name(path_name) when is_bitstring(path_name) do
|
||||
path_name
|
||||
|> Path.basename
|
||||
|> Path.rootname
|
||||
|> String.to_atom
|
||||
end
|
||||
|
||||
@spec merge(t, model) :: t
|
||||
defp merge(schema, %Include{} = inc) do
|
||||
%Schema{schema | includes: [inc | schema.includes]}
|
||||
end
|
||||
|
||||
defp merge(schema, %Namespace{} = ns) do
|
||||
%Schema{schema | namespaces: Map.put(schema.namespaces, ns.name, ns)}
|
||||
end
|
||||
|
||||
defp merge(schema, %Constant{} = const) do
|
||||
%Schema{schema | constants: Map.put(schema.constants, const.name, const)}
|
||||
end
|
||||
|
||||
defp merge(schema, %TEnum{} = enum) do
|
||||
%Schema{schema | enums: Map.put(schema.enums, enum.name, canonicalize_name(schema, enum))}
|
||||
end
|
||||
|
||||
defp merge(schema, %Exception{} = exc) do
|
||||
%Schema{schema | exceptions: Map.put(schema.exceptions, exc.name, canonicalize_name(schema, exc))}
|
||||
end
|
||||
|
||||
defp merge(schema, %Struct{} = s) do
|
||||
%Schema{schema | structs: Map.put(schema.structs, s.name, canonicalize_name(schema, s))}
|
||||
end
|
||||
|
||||
defp merge(schema, %Union{} = union) do
|
||||
%Schema{schema | unions: Map.put(schema.unions, union.name, canonicalize_name(schema, union))}
|
||||
end
|
||||
|
||||
defp merge(schema, %Service{} = service) do
|
||||
%Schema{schema | services: Map.put(schema.services, service.name, canonicalize_name(schema, service))}
|
||||
end
|
||||
|
||||
defp merge(schema, {:typedef, actual_type, type_alias}) do
|
||||
%Schema{schema | typedefs: Map.put(schema.typedefs, atomify(type_alias), actual_type)}
|
||||
end
|
||||
|
||||
defp canonicalize_name(%{module: nil}, model) do
|
||||
model
|
||||
end
|
||||
|
||||
defp canonicalize_name(schema, %{name: name} = model) do
|
||||
%{model | name: :"#{schema.module}.#{name}"}
|
||||
end
|
||||
end
|
||||
|
||||
@type all :: %Namespace{} | %Include{} | %Constant{} | %TEnum{} | %Field{} | %Exception{} | %Struct{} | %Union{} | %Function{} | %Service{} | %Schema{}
|
||||
end
|
@ -5,7 +5,7 @@ defmodule Thrift.Parser do
|
||||
|
||||
@type path_element :: String.t | atom
|
||||
|
||||
alias Thrift.Parser.{FileGroup, FileRef, Models, ParsedFile}
|
||||
alias Thrift.Parser.{FileGroup, FileRef, Models, ParsedFile, Resolver}
|
||||
alias Thrift.Parser.Models.Schema
|
||||
|
||||
@doc """
|
||||
@ -52,13 +52,17 @@ defmodule Thrift.Parser do
|
||||
"""
|
||||
@spec parse_file(Path.t) :: %FileGroup{}
|
||||
def parse_file(file_path) do
|
||||
Resolver.start_link()
|
||||
parsed_file = file_path
|
||||
|> FileRef.new
|
||||
|> ParsedFile.new
|
||||
|
||||
file_group = FileGroup.new(file_path)
|
||||
file_group = %FileGroup{}
|
||||
|> FileGroup.add(parsed_file)
|
||||
|
||||
FileGroup.update_resolutions(file_group)
|
||||
resolutions = Resolver.get()
|
||||
Resolver.stop()
|
||||
|
||||
%{file_group | resolutions: resolutions}
|
||||
end
|
||||
end
|
@ -8,38 +8,32 @@ defmodule Thrift.Parser.Resolver do
|
||||
alias Thrift.Parser.ParsedFile
|
||||
|
||||
def start_link do
|
||||
Agent.start_link(&Map.new/0)
|
||||
Agent.start_link(&Map.new/0, name: __MODULE__)
|
||||
end
|
||||
|
||||
def stop(pid) do
|
||||
Agent.stop(pid)
|
||||
def stop do
|
||||
Agent.stop(__MODULE__)
|
||||
end
|
||||
|
||||
def add(pid, %ParsedFile{} = f) do
|
||||
Agent.update(pid, fn(state) ->
|
||||
def add(%ParsedFile{} = f) do
|
||||
Agent.update(__MODULE__, fn(state) ->
|
||||
state
|
||||
|> update(f.name, f.schema.services)
|
||||
|> update(f.name, f.schema.structs)
|
||||
|> update(f.name, f.schema.exceptions)
|
||||
|> update(f.name, f.schema.unions)
|
||||
|> update(f.name, f.schema.enums)
|
||||
|> update(f.name, f.schema.typedefs)
|
||||
end)
|
||||
end
|
||||
|
||||
def get(pid) do
|
||||
Agent.get(pid, &(&1))
|
||||
def get do
|
||||
Agent.get(__MODULE__, &(&1))
|
||||
end
|
||||
|
||||
defp update(%{} = state, include_name, %{} = local_mappings) do
|
||||
new_mappings = local_mappings
|
||||
|> Map.new(fn {name, val} ->
|
||||
case val do
|
||||
val when is_atom(val) ->
|
||||
{:"#{include_name}.#{name}", val}
|
||||
val when is_map(val) ->
|
||||
{:"#{include_name}.#{name}", Map.put(val, :name, :"#{include_name}.#{name}")}
|
||||
end
|
||||
{:"#{include_name}.#{name}", Map.put(val, :name, :"#{include_name}.#{name}")}
|
||||
end)
|
||||
|
||||
Map.merge(state, new_mappings)
|
@ -1,81 +0,0 @@
|
||||
defmodule Thrift.Generator do
|
||||
alias Thrift.Parser.FileGroup
|
||||
alias Thrift.Generator.EnumGenerator
|
||||
alias Thrift.Generator.StructGenerator
|
||||
|
||||
def generate!(thrift_filename, output_dir) when is_bitstring(thrift_filename) do
|
||||
thrift_filename
|
||||
|> Thrift.Parser.parse_file
|
||||
|> generate!(output_dir)
|
||||
end
|
||||
|
||||
def generate!(%FileGroup{}=file_group, output_dir) do
|
||||
Enum.flat_map(file_group.schemas, fn {_, schema} ->
|
||||
schema
|
||||
|> Map.put(:file_group, file_group)
|
||||
|> generate_schema
|
||||
|> write_schema_to_file(output_dir)
|
||||
end)
|
||||
end
|
||||
|
||||
def generate_to_string!(%FileGroup{}=file_group) do
|
||||
Enum.flat_map(file_group.schemas, fn {_, schema} ->
|
||||
schema
|
||||
|> Map.put(:file_group, file_group)
|
||||
|> generate_schema
|
||||
end)
|
||||
|> Enum.reverse
|
||||
|> Enum.map(fn {_, code} ->
|
||||
Macro.to_string(code)
|
||||
end)
|
||||
|> Enum.join("\n")
|
||||
end
|
||||
|
||||
def generate_schema(schema) do
|
||||
List.flatten([
|
||||
generate_enum_modules(schema),
|
||||
generate_struct_modules(schema),
|
||||
generate_exception_modules(schema),
|
||||
])
|
||||
end
|
||||
|
||||
defp write_schema_to_file(generated_modules, output_dir) do
|
||||
generated_modules
|
||||
|> Enum.map(fn {name, quoted} ->
|
||||
filename = name
|
||||
|> inspect
|
||||
|> String.split(".")
|
||||
|> Enum.map(&Macro.underscore/1)
|
||||
|> Path.join
|
||||
|> Kernel.<>(".ex")
|
||||
source = Macro.to_string(quoted)
|
||||
|
||||
path = Path.join(output_dir, filename)
|
||||
path |> Path.dirname |> File.mkdir_p!
|
||||
path |> File.write!(source)
|
||||
|
||||
filename
|
||||
end)
|
||||
end
|
||||
|
||||
defp generate_enum_modules(schema) do
|
||||
for {_, enum} <- schema.enums do
|
||||
full_name = FileGroup.dest_module(schema.file_group, enum)
|
||||
{full_name, EnumGenerator.generate(full_name, enum)}
|
||||
end
|
||||
end
|
||||
|
||||
defp generate_struct_modules(schema) do
|
||||
for {_, struct} <- schema.structs do
|
||||
full_name = FileGroup.dest_module(schema.file_group, struct)
|
||||
{full_name, StructGenerator.generate("struct", schema, full_name, struct)}
|
||||
end
|
||||
end
|
||||
|
||||
defp generate_exception_modules(schema) do
|
||||
for {_, exception} <- schema.exceptions do
|
||||
full_name = FileGroup.dest_module(schema.file_group, exception)
|
||||
{full_name, StructGenerator.generate("exception", schema, full_name, exception)}
|
||||
end
|
||||
end
|
||||
end
|
@ -1,52 +0,0 @@
|
||||
defmodule Thrift.Generator.EnumGenerator do
|
||||
|
||||
def generate(name, enum) do
|
||||
macro_defs = Enum.map(enum.values, fn {key, value} ->
|
||||
macro_name = to_name(key)
|
||||
quote do
|
||||
defmacro unquote(Macro.var(macro_name, nil)), do: unquote(value)
|
||||
end
|
||||
end)
|
||||
|
||||
member_defs = Enum.map(enum.values, fn {_key, value} ->
|
||||
quote do
|
||||
def member?(unquote(value)), do: true
|
||||
end
|
||||
end)
|
||||
|
||||
value_to_name_defs = Enum.map(enum.values, fn {key, value} ->
|
||||
enum_name = to_name(key)
|
||||
quote do
|
||||
def value_to_name(unquote(value)), do: {:ok, unquote(enum_name)}
|
||||
end
|
||||
end)
|
||||
|
||||
names = enum.values
|
||||
|> Keyword.keys
|
||||
|> Enum.map(&to_name/1)
|
||||
|
||||
quote do
|
||||
defmodule unquote(name) do
|
||||
@moduledoc unquote("Auto-generated Thrift enum #{enum.name}")
|
||||
unquote_splicing(macro_defs)
|
||||
|
||||
unquote_splicing(value_to_name_defs)
|
||||
def value_to_name(v), do: {:error, {:invalid_enum_value, v}}
|
||||
|
||||
def value_to_name!(value) do
|
||||
{:ok, name} = value_to_name(value)
|
||||
name
|
||||
end
|
||||
|
||||
def names, do: unquote(names)
|
||||
|
||||
unquote_splicing(member_defs)
|
||||
def member?(_), do: false
|
||||
end
|
||||
end
|
||||
end
|
||||
|
||||
defp to_name(key) do
|
||||
key |> to_string |> String.downcase |> String.to_atom
|
||||
end
|
||||
end
|
@ -1,635 +0,0 @@
|
||||
defmodule Thrift.Generator.StructBinaryProtocol do
|
||||
@moduledoc """
|
||||
This module implements code generation of binary protocol deserialization.
|
||||
|
||||
Consider a Thrift struct.
|
||||
|
||||
struct MyStruct {
|
||||
1: i32 num;
|
||||
2: list<i32> nums;
|
||||
3: map<string, OtherStruct> structs;
|
||||
}
|
||||
|
||||
You could visual this as a tree of types like the following.
|
||||
|
||||
struct (MyStruct)
|
||||
├ i32
|
||||
├ list
|
||||
│ └ i32
|
||||
└ map
|
||||
├ key
|
||||
│ └ string
|
||||
└ value
|
||||
└ struct (OtherStruct)
|
||||
|
||||
We care about the edges of this graph. This module needs to know how to
|
||||
implement deserializers for each transition from one type to another.
|
||||
|
||||
- struct -> i32
|
||||
- struct -> list
|
||||
- list -> i32
|
||||
- struct -> map
|
||||
- map key -> string
|
||||
- map value -> struct
|
||||
|
||||
For the struct at the root of the graph, we generate a deserializer for each
|
||||
field. Other structs are leaf nodes in the graph. Rather than generating the
|
||||
deserialization logic inline, we make a call to the module we expect to have
|
||||
been generated for that struct.
|
||||
"""
|
||||
|
||||
alias Thrift.Generator.Utils
|
||||
alias Thrift.Parser.FileGroup
|
||||
alias Thrift.Parser.Models.{
|
||||
# Exception,
|
||||
Field,
|
||||
Struct,
|
||||
StructRef,
|
||||
TEnum,
|
||||
}
|
||||
|
||||
@doc """
|
||||
Generate a deserializer for a Thrift struct or exception.
|
||||
|
||||
At the moment it also generates an experimental serializer that may be faster.
|
||||
"""
|
||||
def struct_deserializer(%{fields: fields}, name, file_group) do
|
||||
field_matchers = Enum.map(fields, fn %Field{name: name} ->
|
||||
{name, Macro.var(name, nil)}
|
||||
end)
|
||||
|
||||
struct_matcher = {:%, [], [name, {:%{}, [], field_matchers}]}
|
||||
|
||||
field_serializers = Enum.map(fields, fn %Field{name: name, type: type, id: id} ->
|
||||
var = Macro.var(name, nil)
|
||||
quote do
|
||||
case unquote(var) do
|
||||
nil ->
|
||||
<<>>
|
||||
_ ->
|
||||
unquote([
|
||||
quote do <<unquote(type_id(type, file_group)), unquote(id) :: size(16)>> end,
|
||||
value_serializer(type, var, file_group)
|
||||
] |> Utils.merge_binaries |> Utils.simplify_iolist)
|
||||
end
|
||||
end
|
||||
end)
|
||||
|
||||
field_deserializers = fields
|
||||
|> Enum.map(&field_deserializer(&1.type, &1, :deserialize, file_group))
|
||||
|> Utils.merge_blocks
|
||||
|
||||
quote do
|
||||
def serialize(unquote(struct_matcher)) do
|
||||
unquote([field_serializers, <<0>>] |> Utils.merge_binaries)
|
||||
end
|
||||
def bool_to_int(false), do: 0
|
||||
def bool_to_int(nil), do: 0
|
||||
def bool_to_int(_), do: 1
|
||||
|
||||
def deserialize(binary) do
|
||||
deserialize(binary, %unquote(name){})
|
||||
end
|
||||
defp deserialize(<<0, rest::binary>>, acc=%unquote(name){}) do
|
||||
{acc, rest}
|
||||
end
|
||||
unquote_splicing(field_deserializers)
|
||||
end
|
||||
end
|
||||
|
||||
def field_deserializer(:bool, field, name, _file_group) do
|
||||
quote do
|
||||
defp unquote(name)(<<2, unquote(field.id)::size(16), 1, rest::binary>>, acc) do
|
||||
unquote(name)(rest, %{acc | unquote(field.name) => true})
|
||||
end
|
||||
defp unquote(name)(<<2, unquote(field.id)::size(16), 0, rest::binary>>, acc) do
|
||||
unquote(name)(rest, %{acc | unquote(field.name) => false})
|
||||
end
|
||||
end
|
||||
end
|
||||
def field_deserializer(:byte, field, name, file_group) do
|
||||
field_deserializer(:i8, field, name, file_group)
|
||||
end
|
||||
def field_deserializer(:i8, field, name, _file_group) do
|
||||
quote do
|
||||
defp unquote(name)(<<3, unquote(field.id)::size(16), value, rest::binary>>, acc) do
|
||||
unquote(name)(rest, %{acc | unquote(field.name) => value})
|
||||
end
|
||||
end
|
||||
end
|
||||
def field_deserializer(:double, field, name, _file_group) do
|
||||
quote do
|
||||
defp unquote(name)(<<4, unquote(field.id)::size(16), value::signed-float, rest::binary>>, acc) do
|
||||
unquote(name)(rest, %{acc | unquote(field.name) => value})
|
||||
end
|
||||
end
|
||||
end
|
||||
def field_deserializer(:i16, field, name, _file_group) do
|
||||
quote do
|
||||
defp unquote(name)(<<6, unquote(field.id)::size(16), value::size(16), rest::binary>>, acc) do
|
||||
unquote(name)(rest, %{acc | unquote(field.name) => value})
|
||||
end
|
||||
end
|
||||
end
|
||||
def field_deserializer(:i32, field, name, _file_group) do
|
||||
quote do
|
||||
defp unquote(name)(<<8, unquote(field.id)::size(16), value::size(32), rest::binary>>, acc) do
|
||||
unquote(name)(rest, %{acc | unquote(field.name) => value})
|
||||
end
|
||||
end
|
||||
end
|
||||
def field_deserializer(%TEnum{}, field, name, file_group) do
|
||||
field_deserializer(:i32, field, name, file_group)
|
||||
end
|
||||
def field_deserializer(:i64, field, name, _file_group) do
|
||||
quote do
|
||||
defp unquote(name)(<<10, unquote(field.id)::size(16), value::size(64), rest::binary>>, acc) do
|
||||
unquote(name)(rest, %{acc | unquote(field.name) => value})
|
||||
end
|
||||
end
|
||||
end
|
||||
def field_deserializer(:binary, field, name, file_group) do
|
||||
field_deserializer(:string, field, name, file_group)
|
||||
end
|
||||
def field_deserializer(:string, field, name, _file_group) do
|
||||
quote do
|
||||
defp unquote(name)(<<11, unquote(field.id)::16-signed, string_size::32-signed, rest::binary>>, acc) do
|
||||
<<value::binary-size(string_size), rest::binary>> = rest
|
||||
unquote(name)(rest, %{acc | unquote(field.name) => value})
|
||||
end
|
||||
end
|
||||
end
|
||||
def field_deserializer(struct=%Struct{}, field, name, file_group) do
|
||||
dest_module = FileGroup.dest_module(file_group, struct)
|
||||
quote do
|
||||
defp unquote(name)(<<12, unquote(field.id)::16-signed, rest::binary>>, acc) do
|
||||
{value, rest} = unquote(dest_module).BinaryProtocol.deserialize(rest)
|
||||
unquote(name)(rest, %{acc | unquote(field.name) => value})
|
||||
end
|
||||
end
|
||||
end
|
||||
def field_deserializer({:map, {key_type, value_type}}, field, name, file_group) do
|
||||
key_name = :"#{name}__#{field.name}__key"
|
||||
value_name = :"#{name}__#{field.name}__value"
|
||||
quote do
|
||||
defp unquote(name)(<<13,
|
||||
unquote(field.id)::size(16),
|
||||
unquote(type_id(key_type, file_group)),
|
||||
unquote(type_id(value_type, file_group)),
|
||||
map_size::size(32),
|
||||
rest::binary>>, struct) do
|
||||
unquote(key_name)(rest, [%{}, map_size, struct])
|
||||
end
|
||||
defp unquote(key_name)(<<rest::binary>>, [map, 0, struct]) do
|
||||
unquote(name)(rest, %{struct | unquote(field.name) => map})
|
||||
end
|
||||
unquote(map_key_deserializer(key_type, key_name, value_name, file_group))
|
||||
unquote(map_value_deserializer(value_type, key_name, value_name, file_group))
|
||||
end
|
||||
end
|
||||
def field_deserializer({:set, element_type}, field, name, file_group) do
|
||||
sub_name = :"#{name}__#{field.name}"
|
||||
quote do
|
||||
defp unquote(name)(<<14, unquote(field.id)::size(16), unquote(type_id(element_type, file_group)), remaining::size(32), rest::binary>>, struct) do
|
||||
unquote(sub_name)(rest, [[], remaining, struct])
|
||||
end
|
||||
defp unquote(sub_name)(<<rest::binary>>, [list, 0, struct]) do
|
||||
unquote(name)(rest, %{struct | unquote(field.name) => MapSet.new(Enum.reverse(list))})
|
||||
end
|
||||
unquote(list_deserializer(element_type, sub_name, file_group))
|
||||
end
|
||||
end
|
||||
def field_deserializer({:list, element_type}, field, name, file_group) do
|
||||
sub_name = :"#{name}__#{field.name}"
|
||||
quote do
|
||||
defp unquote(name)(<<15, unquote(field.id)::size(16), unquote(type_id(element_type, file_group)), remaining::size(32), rest::binary>>, struct) do
|
||||
unquote(sub_name)(rest, [[], remaining, struct])
|
||||
end
|
||||
defp unquote(sub_name)(<<rest::binary>>, [list, 0, struct]) do
|
||||
unquote(name)(rest, %{struct | unquote(field.name) => Enum.reverse(list)})
|
||||
end
|
||||
unquote(list_deserializer(element_type, sub_name, file_group))
|
||||
end
|
||||
end
|
||||
def field_deserializer(%StructRef{referenced_type: type}, field, name, file_group) do
|
||||
FileGroup.resolve(file_group, type)
|
||||
|> field_deserializer(field, name, file_group)
|
||||
end
|
||||
|
||||
|
||||
def map_key_deserializer(:bool, key_name, value_name, _file_group) do
|
||||
quote do
|
||||
defp unquote(key_name)(<<0, rest::binary>>, stack) do
|
||||
unquote(value_name)(rest, false, stack)
|
||||
end
|
||||
defp unquote(key_name)(<<1, rest::binary>>, stack) do
|
||||
unquote(value_name)(rest, true, stack)
|
||||
end
|
||||
end
|
||||
end
|
||||
def map_key_deserializer(:byte, key_name, value_name, file_group) do
|
||||
map_key_deserializer(:i8, key_name, value_name, file_group)
|
||||
end
|
||||
def map_key_deserializer(:double, key_name, value_name, _file_group) do
|
||||
quote do
|
||||
defp unquote(key_name)(<<key::signed-float, rest::binary>>, stack) do
|
||||
unquote(value_name)(rest, key, stack)
|
||||
end
|
||||
end
|
||||
end
|
||||
def map_key_deserializer(:i8, key_name, value_name, _file_group) do
|
||||
quote do
|
||||
defp unquote(key_name)(<<key, rest::binary>>, stack) do
|
||||
unquote(value_name)(rest, key, stack)
|
||||
end
|
||||
end
|
||||
end
|
||||
def map_key_deserializer(:i16, key_name, value_name, _file_group) do
|
||||
quote do
|
||||
defp unquote(key_name)(<<key::size(16), rest::binary>>, stack) do
|
||||
unquote(value_name)(rest, key, stack)
|
||||
end
|
||||
end
|
||||
end
|
||||
def map_key_deserializer(:i32, key_name, value_name, _file_group) do
|
||||
quote do
|
||||
defp unquote(key_name)(<<key::size(32), rest::binary>>, stack) do
|
||||
unquote(value_name)(rest, key, stack)
|
||||
end
|
||||
end
|
||||
end
|
||||
def map_key_deserializer(%TEnum{}, key_name, value_name, file_group) do
|
||||
map_key_deserializer(:i32, key_name, value_name, file_group)
|
||||
end
|
||||
def map_key_deserializer(:i64, key_name, value_name, _file_group) do
|
||||
quote do
|
||||
defp unquote(key_name)(<<key::size(64), rest::binary>>, stack) do
|
||||
unquote(value_name)(rest, key, stack)
|
||||
end
|
||||
end
|
||||
end
|
||||
def map_key_deserializer(:string, key_name, value_name, _file_group) do
|
||||
quote do
|
||||
defp unquote(key_name)(<<string_size::32-signed, rest::binary>>, stack) do
|
||||
<<key::binary-size(string_size), rest::binary>> = rest
|
||||
unquote(value_name)(rest, key, stack)
|
||||
end
|
||||
end
|
||||
end
|
||||
def map_key_deserializer(struct=%Struct{}, key_name, value_name, file_group) do
|
||||
dest_module = FileGroup.dest_module(file_group, struct)
|
||||
quote do
|
||||
defp unquote(key_name)(<<rest::binary>>, stack) do
|
||||
{key, rest} = unquote(dest_module).BinaryProtocol.deserialize(rest)
|
||||
unquote(value_name)(rest, key, stack)
|
||||
end
|
||||
end
|
||||
end
|
||||
def map_key_deserializer({:map, {key_type, value_type}}, key_name, value_name, file_group) do
|
||||
child_key_name = :"#{key_name}__key"
|
||||
child_value_name = :"#{key_name}__value"
|
||||
quote do
|
||||
defp unquote(key_name)(<<unquote(type_id(key_type, file_group)), unquote(type_id(value_type, file_group)), remaining::size(32), rest::binary>>, stack) do
|
||||
unquote(child_key_name)(rest, [%{}, remaining | stack])
|
||||
end
|
||||
defp unquote(child_key_name)(<<rest::binary>>, [key, 0 | stack]) do
|
||||
unquote(value_name)(rest, key, stack)
|
||||
end
|
||||
unquote(map_key_deserializer(key_type, child_key_name, child_value_name, file_group))
|
||||
unquote(map_value_deserializer(value_type, child_key_name, child_value_name, file_group))
|
||||
end
|
||||
end
|
||||
def map_key_deserializer({:set, element_type}, key_name, value_name, file_group) do
|
||||
sub_name = :"#{key_name}__element"
|
||||
quote do
|
||||
defp unquote(key_name)(<<unquote(type_id(element_type, file_group)), remaining::size(32), rest::binary>>, stack) do
|
||||
unquote(sub_name)(rest, [[], remaining | stack])
|
||||
end
|
||||
defp unquote(sub_name)(<<rest::binary>>, [key, 0 | stack]) do
|
||||
unquote(value_name)(rest, MapSet.new(Enum.reverse(key)), stack)
|
||||
end
|
||||
unquote(list_deserializer(element_type, sub_name, file_group))
|
||||
end
|
||||
end
|
||||
def map_key_deserializer({:list, element_type}, key_name, value_name, file_group) do
|
||||
sub_name = :"#{key_name}__element"
|
||||
quote do
|
||||
defp unquote(key_name)(<<unquote(type_id(element_type, file_group)), remaining::size(32), rest::binary>>, stack) do
|
||||
unquote(sub_name)(rest, [[], remaining | stack])
|
||||
end
|
||||
defp unquote(sub_name)(<<rest::binary>>, [key, 0 | stack]) do
|
||||
unquote(value_name)(rest, Enum.reverse(key), stack)
|
||||
end
|
||||
unquote(list_deserializer(element_type, sub_name, file_group))
|
||||
end
|
||||
end
|
||||
def map_key_deserializer(%StructRef{referenced_type: type}, key_name, value_name, file_group) do
|
||||
FileGroup.resolve(file_group, type)
|
||||
|> map_key_deserializer(key_name, value_name, file_group)
|
||||
end
|
||||
|
||||
|
||||
def map_value_deserializer(:bool, key_name, value_name, _file_group) do
|
||||
quote do
|
||||
defp unquote(value_name)(<<0, rest::binary>>, key, [map, remaining | stack]) do
|
||||
unquote(key_name)(rest, [Map.put(map, key, false), remaining - 1 | stack])
|
||||
end
|
||||
defp unquote(value_name)(<<1, rest::binary>>, key, [map, remaining | stack]) do
|
||||
unquote(key_name)(rest, [Map.put(map, key, true), remaining - 1 | stack])
|
||||
end
|
||||
end
|
||||
end
|
||||
def map_value_deserializer(:byte, key_name, value_name, file_group) do
|
||||
map_value_deserializer(:i8, key_name, value_name, file_group)
|
||||
end
|
||||
def map_value_deserializer(:double, key_name, value_name, _file_group) do
|
||||
quote do
|
||||
defp unquote(value_name)(<<value::signed-float, rest::binary>>, key, [map, remaining | stack]) do
|
||||
unquote(key_name)(rest, [Map.put(map, key, value), remaining - 1 | stack])
|
||||
end
|
||||
end
|
||||
end
|
||||
def map_value_deserializer(:i8, key_name, value_name, _file_group) do
|
||||
quote do
|
||||
defp unquote(value_name)(<<value, rest::binary>>, key, [map, remaining | stack]) do
|
||||
unquote(key_name)(rest, [Map.put(map, key, value), remaining - 1 | stack])
|
||||
end
|
||||
end
|
||||
end
|
||||
def map_value_deserializer(:i16, key_name, value_name, _file_group) do
|
||||
quote do
|
||||
defp unquote(value_name)(<<value::size(16), rest::binary>>, key, [map, remaining | stack]) do
|
||||
unquote(key_name)(rest, [Map.put(map, key, value), remaining - 1 | stack])
|
||||
end
|
||||
end
|
||||
end
|
||||
def map_value_deserializer(:i32, key_name, value_name, _file_group) do
|
||||
quote do
|
||||
defp unquote(value_name)(<<value::size(32), rest::binary>>, key, [map, remaining | stack]) do
|
||||
unquote(key_name)(rest, [Map.put(map, key, value), remaining - 1 | stack])
|
||||
end
|
||||
end
|
||||
end
|
||||
def map_value_deserializer(%TEnum{}, key_name, value_name, file_group) do
|
||||
map_value_deserializer(:i32, key_name, value_name, file_group)
|
||||
end
|
||||
def map_value_deserializer(:i64, key_name, value_name, _file_group) do
|
||||
quote do
|
||||
defp unquote(value_name)(<<value::size(64), rest::binary>>, key, [map, remaining | stack]) do
|
||||
unquote(key_name)(rest, [Map.put(map, key, value), remaining - 1 | stack])
|
||||
end
|
||||
end
|
||||
end
|
||||
def map_value_deserializer(:string, key_name, value_name, _file_group) do
|
||||
quote do
|
||||
defp unquote(value_name)(<<string_size::32-signed, rest::binary>>, key, [map, remaining | stack]) do
|
||||
<<value::binary-size(string_size), rest::binary>> = rest
|
||||
unquote(key_name)(rest, [Map.put(map, key, value), remaining - 1 | stack])
|
||||
end
|
||||
end
|
||||
end
|
||||
def map_value_deserializer(struct=%Struct{}, key_name, value_name, file_group) do
|
||||
dest_module = FileGroup.dest_module(file_group, struct)
|
||||
quote do
|
||||
defp unquote(value_name)(<<rest::binary>>, key, [map, remaining | stack]) do
|
||||
{value, rest} = unquote(dest_module).BinaryProtocol.deserialize(rest)
|
||||
unquote(key_name)(rest, [Map.put(map, key, value), remaining - 1 | stack])
|
||||
end
|
||||
end
|
||||
end
|
||||
def map_value_deserializer({:map, {key_type, value_type}}, key_name, value_name, file_group) do
|
||||
child_key_name = :"#{value_name}__key"
|
||||
child_value_name = :"#{value_name}__value"
|
||||
quote do
|
||||
defp unquote(value_name)(<<unquote(type_id(key_type, file_group)), unquote(type_id(value_type, file_group)), remaining::size(32), rest::binary>>, key, stack) do
|
||||
unquote(child_key_name)(rest, [%{}, remaining, key | stack])
|
||||
end
|
||||
defp unquote(child_key_name)(<<rest::binary>>, [value, 0, key, map, remaining | stack]) do
|
||||
unquote(key_name)(rest, [Map.put(map, key, value), remaining - 1 | stack])
|
||||
end
|
||||
unquote(map_key_deserializer(key_type, child_key_name, child_value_name, file_group))
|
||||
unquote(map_value_deserializer(value_type, child_key_name, child_value_name, file_group))
|
||||
end
|
||||
end
|
||||
def map_value_deserializer({:set, element_type}, key_name, value_name, file_group) do
|
||||
sub_name = :"#{value_name}__element"
|
||||
quote do
|
||||
defp unquote(value_name)(<<unquote(type_id(element_type, file_group)), remaining::size(32), rest::binary>>, key, stack) do
|
||||
unquote(sub_name)(rest, [[], remaining, key | stack])
|
||||
end
|
||||
defp unquote(sub_name)(<<rest::binary>>, [value, 0, key, map, remaining | stack]) do
|
||||
unquote(key_name)(rest, [Map.put(map, key, MapSet.new(Enum.reverse(value))), remaining - 1 | stack])
|
||||
end
|
||||
unquote(list_deserializer(element_type, sub_name, file_group))
|
||||
end
|
||||
end
|
||||
def map_value_deserializer({:list, element_type}, key_name, value_name, file_group) do
|
||||
sub_name = :"#{value_name}__element"
|
||||
quote do
|
||||
defp unquote(value_name)(<<unquote(type_id(element_type, file_group)), remaining::size(32), rest::binary>>, key, stack) do
|
||||
unquote(sub_name)(rest, [[], remaining, key | stack])
|
||||
end
|
||||
defp unquote(sub_name)(<<rest::binary>>, [value, 0, key, map, remaining | stack]) do
|
||||
unquote(key_name)(rest, [Map.put(map, key, Enum.reverse(value)), remaining - 1 | stack])
|
||||
end
|
||||
unquote(list_deserializer(element_type, sub_name, file_group))
|
||||
end
|
||||
end
|
||||
def map_value_deserializer(%StructRef{referenced_type: type}, key_name, value_name, file_group) do
|
||||
FileGroup.resolve(file_group, type)
|
||||
|> map_value_deserializer(key_name, value_name, file_group)
|
||||
end
|
||||
|
||||
|
||||
def list_deserializer(:bool, name, _file_group) do
|
||||
quote do
|
||||
defp unquote(name)(<<0, rest::binary>>, [list, remaining | stack]) do
|
||||
unquote(name)(rest, [[false | list], remaining - 1 | stack])
|
||||
end
|
||||
defp unquote(name)(<<1, rest::binary>>, [list, remaining | stack]) do
|
||||
unquote(name)(rest, [[true | list], remaining - 1 | stack])
|
||||
end
|
||||
end
|
||||
end
|
||||
def list_deserializer(:byte, name, file_group) do
|
||||
list_deserializer(:i8, name, file_group)
|
||||
end
|
||||
def list_deserializer(:double, name, _file_group) do
|
||||
quote do
|
||||
defp unquote(name)(<<element::signed-float, rest::binary>>, [list, remaining | stack]) do
|
||||
unquote(name)(rest, [[element | list], remaining - 1 | stack])
|
||||
end
|
||||
end
|
||||
end
|
||||
def list_deserializer(:i8, name, _file_group) do
|
||||
quote do
|
||||
defp unquote(name)(<<element::size(8), rest::binary>>, [list, remaining | stack]) do
|
||||
unquote(name)(rest, [[element | list], remaining - 1 | stack])
|
||||
end
|
||||
end
|
||||
end
|
||||
def list_deserializer(:i16, name, _file_group) do
|
||||
quote do
|
||||
defp unquote(name)(<<element::size(16), rest::binary>>, [list, remaining | stack]) do
|
||||
unquote(name)(rest, [[element | list], remaining - 1 | stack])
|
||||
end
|
||||
end
|
||||
end
|
||||
def list_deserializer(:i32, name, _file_group) do
|
||||
quote do
|
||||
defp unquote(name)(<<element::size(32), rest::binary>>, [list, remaining | stack]) do
|
||||
unquote(name)(rest, [[element | list], remaining - 1 | stack])
|
||||
end
|
||||
end
|
||||
end
|
||||
def list_deserializer(%TEnum{}, name, file_group) do
|
||||
list_deserializer(:i32, name, file_group)
|
||||
end
|
||||
def list_deserializer(:i64, name, _file_group) do
|
||||
quote do
|
||||
defp unquote(name)(<<element::size(64), rest::binary>>, [list, remaining | stack]) do
|
||||
unquote(name)(rest, [[element | list], remaining - 1 | stack])
|
||||
end
|
||||
end
|
||||
end
|
||||
def list_deserializer(:string, name, _file_group) do
|
||||
quote do
|
||||
defp unquote(name)(<<string_size::32-signed, rest::binary>>, [list, remaining | stack]) do
|
||||
<<element::binary-size(string_size), rest::binary>> = rest
|
||||
unquote(name)(rest, [[element | list], remaining - 1 | stack])
|
||||
end
|
||||
end
|
||||
end
|
||||
def list_deserializer(struct=%Struct{}, name, file_group) do
|
||||
dest_module = FileGroup.dest_module(file_group, struct)
|
||||
quote do
|
||||
defp unquote(name)(<<rest::binary>>, [list, remaining | stack]) do
|
||||
{element, rest} = unquote(dest_module).BinaryProtocol.deserialize(rest)
|
||||
unquote(name)(rest, [[element | list], remaining - 1 | stack])
|
||||
end
|
||||
end
|
||||
end
|
||||
def list_deserializer({:map, {key_type, value_type}}, name, file_group) do
|
||||
key_name = :"#{name}__key"
|
||||
value_name = :"#{name}__value"
|
||||
quote do
|
||||
defp unquote(name)(<<unquote(type_id(key_type, file_group)),
|
||||
unquote(type_id(value_type, file_group)),
|
||||
inner_remaining::size(32),
|
||||
rest::binary>>,
|
||||
[list, remaining | stack]) do
|
||||
unquote(key_name)(rest, [%{}, inner_remaining, list, remaining | stack])
|
||||
end
|
||||
defp unquote(key_name)(<<rest::binary>>, [map, 0, list, remaining | stack]) do
|
||||
unquote(name)(rest, [[map | list], remaining - 1 | stack])
|
||||
end
|
||||
unquote(map_key_deserializer(key_type, key_name, value_name, file_group))
|
||||
unquote(map_value_deserializer(value_type, key_name, value_name, file_group))
|
||||
end
|
||||
end
|
||||
def list_deserializer({:set, element_type}, name, file_group) do
|
||||
sub_name = :"#{name}__element"
|
||||
quote do
|
||||
defp unquote(name)(<<unquote(type_id(element_type, file_group)), inner_remaining::size(32), rest::binary>>, [list, remaining | stack]) do
|
||||
unquote(sub_name)(rest, [[], inner_remaining, list, remaining | stack])
|
||||
end
|
||||
defp unquote(sub_name)(<<rest::binary>>, [inner_list, 0, list, remaining | stack]) do
|
||||
unquote(name)(rest, [[MapSet.new(Enum.reverse(inner_list)) | list], remaining - 1 | stack])
|
||||
end
|
||||
unquote(list_deserializer(element_type, sub_name, file_group))
|
||||
end
|
||||
end
|
||||
def list_deserializer({:list, element_type}, name, file_group) do
|
||||
sub_name = :"#{name}__element"
|
||||
quote do
|
||||
defp unquote(name)(<<unquote(type_id(element_type, file_group)), inner_remaining::size(32), rest::binary>>, [list, remaining | stack]) do
|
||||
unquote(sub_name)(rest, [[], inner_remaining, list, remaining | stack])
|
||||
end
|
||||
defp unquote(sub_name)(<<rest::binary>>, [inner_list, 0, list, remaining | stack]) do
|
||||
unquote(name)(rest, [[Enum.reverse(inner_list) | list], remaining - 1 | stack])
|
||||
end
|
||||
unquote(list_deserializer(element_type, sub_name, file_group))
|
||||
end
|
||||
end
|
||||
def list_deserializer(%StructRef{referenced_type: type}, name, file_group) do
|
||||
FileGroup.resolve(file_group, type)
|
||||
|> list_deserializer(name, file_group)
|
||||
end
|
||||
|
||||
|
||||
def value_serializer(:bool, var, _file_group), do: quote do: <<bool_to_int(unquote(var))>>
|
||||
def value_serializer(:byte, var, _file_group), do: quote do: <<unquote(var) :: 8-signed>>
|
||||
def value_serializer(:i8, var, _file_group), do: quote do: <<unquote(var) :: 8-signed>>
|
||||
def value_serializer(:double, var, _file_group), do: quote do: <<unquote(var) :: signed-float>>
|
||||
def value_serializer(:i16, var, _file_group), do: quote do: <<unquote(var) :: 16-signed>>
|
||||
def value_serializer(:i32, var, _file_group), do: quote do: <<unquote(var) :: 32-signed>>
|
||||
def value_serializer(%TEnum{}, var, _file_group), do: quote do: <<unquote(var) :: 32-signed>>
|
||||
def value_serializer(:i64, var, _file_group), do: quote do: <<unquote(var) :: 64-signed>>
|
||||
def value_serializer(:binary, var, _file_group), do: quote do: [<<byte_size(unquote(var)) :: size(32)>>, unquote(var)]
|
||||
def value_serializer(:string, var, _file_group), do: quote do: [<<byte_size(unquote(var)) :: size(32)>>, unquote(var)]
|
||||
def value_serializer({:map, {key_type, val_type}}, var, file_group) do
|
||||
quote do
|
||||
[
|
||||
<<unquote(type_id(key_type, file_group)),
|
||||
unquote(type_id(val_type, file_group)),
|
||||
map_size(unquote(var)) :: size(32)>>,
|
||||
for {unquote(Macro.var(:k, nil)), unquote(Macro.var(:v, nil))} <- unquote(var) do
|
||||
unquote([
|
||||
value_serializer(key_type, Macro.var(:k, nil), file_group),
|
||||
value_serializer(val_type, Macro.var(:v, nil), file_group),
|
||||
] |> Utils.merge_binaries |> Utils.simplify_iolist)
|
||||
end
|
||||
]
|
||||
end
|
||||
end
|
||||
def value_serializer({:set, type}, var, file_group) do
|
||||
quote do
|
||||
[
|
||||
<<unquote(type_id(type, file_group)), MapSet.size(unquote(var)) :: size(32)>>,
|
||||
for unquote(Macro.var(:e, nil)) <- unquote(var) do
|
||||
unquote(value_serializer(type, Macro.var(:e, nil), file_group) |> Utils.merge_binaries |> Utils.simplify_iolist)
|
||||
end,
|
||||
]
|
||||
end
|
||||
end
|
||||
def value_serializer({:list, type}, var, file_group) do
|
||||
quote do
|
||||
[
|
||||
<<unquote(type_id(type, file_group)), length(unquote(var)) :: size(32)>>,
|
||||
for unquote(Macro.var(:e, nil)) <- unquote(var) do
|
||||
unquote(value_serializer(type, Macro.var(:e, nil), file_group) |> Utils.merge_binaries |> Utils.simplify_iolist)
|
||||
end,
|
||||
]
|
||||
end
|
||||
end
|
||||
def value_serializer(struct=%Struct{name: _name}, var, file_group) do
|
||||
dest_module = FileGroup.dest_module(file_group, struct)
|
||||
quote do
|
||||
unquote(dest_module).serialize(unquote(var))
|
||||
end
|
||||
end
|
||||
def value_serializer(%StructRef{referenced_type: type}, var, file_group) do
|
||||
FileGroup.resolve(file_group, type)
|
||||
|> value_serializer(var, file_group)
|
||||
end
|
||||
|
||||
|
||||
def type_id(:bool, _file_group), do: 2
|
||||
def type_id(:byte, _file_group), do: 3
|
||||
def type_id(:i8, _file_group), do: 3
|
||||
def type_id(:double, _file_group), do: 4
|
||||
def type_id(:i16, _file_group), do: 6
|
||||
def type_id(:i32, _file_group), do: 8
|
||||
def type_id(%TEnum{}, _file_group), do: 8
|
||||
def type_id(:i64, _file_group), do: 10
|
||||
def type_id(:string, _file_group), do: 11
|
||||
def type_id(:binary, _file_group), do: 11
|
||||
def type_id(%Struct{}, _file_group), do: 12
|
||||
def type_id({:map, _}, _file_group), do: 13
|
||||
def type_id({:set, _}, _file_group), do: 14
|
||||
def type_id({:list, _}, _file_group), do: 15
|
||||
def type_id(%StructRef{referenced_type: type}, file_group) do
|
||||
FileGroup.resolve(file_group, type)
|
||||
|> type_id(file_group)
|
||||
end
|
||||
end
|
@ -1,105 +0,0 @@
|
||||
defmodule Thrift.Generator.StructGenerator do
|
||||
alias Thrift.Generator.StructBinaryProtocol
|
||||
alias Thrift.Generator.Utils
|
||||
alias Thrift.Parser.FileGroup
|
||||
|
||||
def generate(label, schema, name, struct) do
|
||||
struct_parts = Enum.map(struct.fields, fn
|
||||
%{name: name, default: nil, type: type} ->
|
||||
{name, zero(schema, type)}
|
||||
%{name: name, default: default} when not is_nil(default) ->
|
||||
{name, default}
|
||||
end)
|
||||
|
||||
binary_protocol_defs = [
|
||||
StructBinaryProtocol.struct_deserializer(struct, name, schema.file_group),
|
||||
]
|
||||
|> Utils.merge_blocks
|
||||
|> Utils.sort_defs
|
||||
|
||||
quote do
|
||||
defmodule unquote(name) do
|
||||
_ = unquote "Auto-generated Thrift #{label} #{struct.name}"
|
||||
unquote_splicing(for field <- struct.fields do
|
||||
quote do
|
||||
_ = unquote "#{field.id}: #{to_thrift(field.type, schema.file_group)} #{field.name}"
|
||||
end
|
||||
end)
|
||||
defstruct unquote(struct_parts)
|
||||
def new, do: %__MODULE__{}
|
||||
defmodule BinaryProtocol do
|
||||
unquote_splicing(binary_protocol_defs)
|
||||
end
|
||||
def serialize(struct) do
|
||||
BinaryProtocol.serialize(struct)
|
||||
end
|
||||
def serialize(struct, :binary) do
|
||||
BinaryProtocol.serialize(struct)
|
||||
end
|
||||
def deserialize(binary) do
|
||||
BinaryProtocol.deserialize(binary)
|
||||
end
|
||||
end
|
||||
end
|
||||
end
|
||||
|
||||
# Zero values for built-in types
|
||||
defp zero(_schema, :bool), do: nil
|
||||
defp zero(_schema, :byte), do: nil
|
||||
defp zero(_schema, :i8), do: nil
|
||||
defp zero(_schema, :i16), do: nil
|
||||
defp zero(_schema, :i32), do: nil
|
||||
defp zero(_schema, :i64), do: nil
|
||||
defp zero(_schema, :double), do: nil
|
||||
defp zero(_schema, :string), do: nil
|
||||
defp zero(_schema, :binary), do: nil
|
||||
defp zero(_schema, {:map, _}), do: nil
|
||||
defp zero(_schema, {:list, _}), do: nil
|
||||
defp zero(_schema, {:set, _}), do: quote do: nil
|
||||
defp zero(_schema, %{values: [{_, value} | _]}), do: value
|
||||
defp zero(_schema, %Thrift.Parser.Models.Struct{}), do: nil
|
||||
|
||||
# Zero values for user defined types
|
||||
defp zero(schema, %{referenced_type: type}=ref) do
|
||||
cond do
|
||||
# Local references
|
||||
Map.has_key?(schema.enums, type) ->
|
||||
zero(schema, schema.enums[type])
|
||||
Map.has_key?(schema.typedefs, type) ->
|
||||
zero(schema, schema.typedefs[type])
|
||||
Map.has_key?(schema.structs, type) ->
|
||||
quote do: nil
|
||||
|
||||
# Included references
|
||||
true ->
|
||||
case FileGroup.resolve(schema.file_group, ref) do
|
||||
nil ->
|
||||
raise "Unknown type: #{inspect type}"
|
||||
thing ->
|
||||
zero(schema, thing)
|
||||
end
|
||||
end
|
||||
end
|
||||
|
||||
def to_thrift(base_type, _file_group) when is_atom(base_type) do
|
||||
Atom.to_string(base_type)
|
||||
end
|
||||
def to_thrift({:map, {key_type, val_type}}, file_group) do
|
||||
"map<#{to_thrift key_type, file_group},#{to_thrift val_type, file_group}>"
|
||||
end
|
||||
def to_thrift({:set, element_type}, file_group) do
|
||||
"set<#{to_thrift element_type, file_group}>"
|
||||
end
|
||||
def to_thrift({:list, element_type}, file_group) do
|
||||
"list<#{to_thrift element_type, file_group}>"
|
||||
end
|
||||
def to_thrift(%Thrift.Parser.Models.TEnum{name: name}, _file_group) do
|
||||
"#{name}"
|
||||
end
|
||||
def to_thrift(%Thrift.Parser.Models.Struct{name: name}, _file_group) do
|
||||
"#{name}"
|
||||
end
|
||||
def to_thrift(%Thrift.Parser.Models.StructRef{referenced_type: type}, file_group) do
|
||||
FileGroup.resolve(file_group, type) |> to_thrift(file_group)
|
||||
end
|
||||
end
|
@ -1,89 +0,0 @@
|
||||
defmodule Thrift.Generator.Utils do
|
||||
@moduledoc """
|
||||
Collection of utilities for working with generated code.
|
||||
"""
|
||||
|
||||
@doc """
|
||||
When nesting a quote with multiple defs into another quote, the defs end up
|
||||
wrapped in blocks. Take the following code, for example.
|
||||
|
||||
foo_bar = quote do
|
||||
def foo, do: 1
|
||||
def bar, do: 2
|
||||
end
|
||||
quote do
|
||||
unquote(foo_bar)
|
||||
def baz, do: 3
|
||||
end
|
||||
|
||||
This generates code like the following.
|
||||
|
||||
(
|
||||
def foo, do: 1
|
||||
def bar, do: 2
|
||||
)
|
||||
def baz, do: 3
|
||||
|
||||
Running it through merge_blocks turns it into this:
|
||||
|
||||
def foo, do: 1
|
||||
def bar, do: 2
|
||||
def baz, do: 3
|
||||
"""
|
||||
def merge_blocks([{:__block__, _, contents} | rest]) do
|
||||
merge_blocks(contents) ++ merge_blocks(rest)
|
||||
end
|
||||
def merge_blocks([statement | rest]) do
|
||||
[statement | merge_blocks(rest)]
|
||||
end
|
||||
def merge_blocks([]) do
|
||||
[]
|
||||
end
|
||||
|
||||
@doc """
|
||||
Sort a list of quoted def/defp function clauses by name and arity. When
|
||||
similar clauses are not grouped together, Elixir prints a warning.
|
||||
"""
|
||||
def sort_defs(statements) do
|
||||
Enum.sort_by(statements, fn
|
||||
{:def, _, [{:when, _, [{name, _, args} | _]} | _]} ->
|
||||
{name, length(args)}
|
||||
{:defp, _, [{:when, _, [{name, _, args} | _]} | _]} ->
|
||||
{name, length(args)}
|
||||
{:def, _, [{name, _, args} | _]} ->
|
||||
{name, length(args)}
|
||||
{:defp, _, [{name, _, args} | _]} ->
|
||||
{name, length(args)}
|
||||
{:=, _, [{:_, _, _} | _]} ->
|
||||
nil
|
||||
end)
|
||||
end
|
||||
|
||||
@doc """
|
||||
Merge the binaries in an iolist.
|
||||
|
||||
["a", "b", ["c", [var]]] => ["abc", var]
|
||||
"""
|
||||
def merge_binaries([a | rest]) when is_list(a) do
|
||||
merge_binaries(a ++ rest)
|
||||
end
|
||||
def merge_binaries([a, b | rest]) when is_list(b) do
|
||||
merge_binaries([a] ++ b ++ rest)
|
||||
end
|
||||
def merge_binaries([{:<<>>, [], a}, {:<<>>, [], b} | rest]) do
|
||||
merge_binaries([{:<<>>, [], a ++ b} | rest])
|
||||
end
|
||||
def merge_binaries([a | rest]) do
|
||||
[a] ++ merge_binaries(rest)
|
||||
end
|
||||
def merge_binaries(a) do
|
||||
a
|
||||
end
|
||||
|
||||
def simplify_iolist([{:<<>>, _, _}=binary]) do
|
||||
binary
|
||||
end
|
||||
def simplify_iolist(other) do
|
||||
other
|
||||
end
|
||||
end
|
@ -1,50 +0,0 @@
|
||||
defmodule Thrift.Parser.Conversions do
|
||||
@moduledoc """
|
||||
Conversion utilities useful for parsing Thrift.
|
||||
"""
|
||||
|
||||
@doc """
|
||||
Ensures that the argument is an atom.
|
||||
"""
|
||||
def atomify(nil), do: nil
|
||||
def atomify(l) when is_list(l) do
|
||||
List.to_atom(l)
|
||||
end
|
||||
|
||||
def cast(_, nil) do
|
||||
nil
|
||||
end
|
||||
|
||||
def cast(type, %{}=val) do
|
||||
# this is for TEnumValues
|
||||
%{val | type: type}
|
||||
end
|
||||
|
||||
def cast(:double, val) do
|
||||
val
|
||||
end
|
||||
|
||||
def cast(:string, val) do
|
||||
List.to_string(val)
|
||||
end
|
||||
|
||||
def cast({:set, type}, val) do
|
||||
MapSet.new(val, &cast(type, &1))
|
||||
end
|
||||
|
||||
def cast({:map, {key_type, val_type}}, val) do
|
||||
Enum.into(val, %{}, fn {k, v} ->
|
||||
{cast(key_type, k), cast(val_type, v)}
|
||||
end)
|
||||
end
|
||||
|
||||
def cast({:list, elem_type}, val) do
|
||||
Enum.map(val, fn elem ->
|
||||
cast(elem_type, elem)
|
||||
end)
|
||||
end
|
||||
|
||||
def cast(_, val) do
|
||||
val
|
||||
end
|
||||
end
|
@ -1,153 +0,0 @@
|
||||
defmodule Thrift.Parser.FileGroup do
|
||||
@moduledoc """
|
||||
Represents a group of parsed files. When you parse a file, it might include other thrift files.
|
||||
These files are in turn accumulated and parsed and added to this module.
|
||||
Additionally, this module allows resolution of the names of Structs / Enums / Unions etc across
|
||||
files.
|
||||
"""
|
||||
alias Thrift.Parser.{
|
||||
FileGroup,
|
||||
FileRef,
|
||||
Resolver,
|
||||
ParsedFile
|
||||
}
|
||||
|
||||
alias Thrift.Parser.Models.{
|
||||
TEnum,
|
||||
Exception,
|
||||
Field,
|
||||
Namespace,
|
||||
StructRef,
|
||||
Schema,
|
||||
Struct,
|
||||
}
|
||||
|
||||
@type t :: %FileGroup{
|
||||
resolver: pid,
|
||||
initial_file: Path.t,
|
||||
parsed_files: %{FileRef.thrift_include => %ParsedFile{}},
|
||||
schemas: %{FileRef.thrift_include => %Schema{}},
|
||||
ns_mappings: %{atom => %Namespace{}}
|
||||
}
|
||||
|
||||
defstruct resolver: nil, initial_file: nil, parsed_files: %{}, schemas: %{}, resolutions: %{}, ns_mappings: %{}
|
||||
|
||||
def new(initial_file) do
|
||||
{:ok, resolver} = Resolver.start_link()
|
||||
%FileGroup{initial_file: initial_file, resolver: resolver}
|
||||
end
|
||||
|
||||
def add(file_group, parsed_file) do
|
||||
file_group = add_includes(file_group, parsed_file)
|
||||
new_parsed_files = Map.put(file_group.parsed_files, parsed_file.name, parsed_file)
|
||||
new_schemas = Map.put(file_group.schemas, parsed_file.name, parsed_file.schema)
|
||||
|
||||
Resolver.add(file_group.resolver, parsed_file)
|
||||
%__MODULE__{file_group |
|
||||
parsed_files: new_parsed_files,
|
||||
schemas: new_schemas}
|
||||
end
|
||||
|
||||
def add_includes(%__MODULE__{} = group,
|
||||
%ParsedFile{schema: schema, file_ref: file_ref}) do
|
||||
|
||||
Enum.reduce(schema.includes, group, fn(include, file_group) ->
|
||||
parsed_file = file_ref.path
|
||||
|> Path.dirname
|
||||
|> Path.join(include.path)
|
||||
|> FileRef.new
|
||||
|> ParsedFile.new
|
||||
add(file_group, parsed_file)
|
||||
end)
|
||||
end
|
||||
|
||||
def update_resolutions(file_group) do
|
||||
# since in a file, we can refer to things defined in that file in a non-qualified
|
||||
# way, we add unqualified names to the resolutions map.
|
||||
resolutions = Resolver.get(file_group.resolver)
|
||||
to_update = resolutions
|
||||
|> Enum.map(fn {name, v}=kvp ->
|
||||
case String.split(Atom.to_string(name), ".") do
|
||||
[_initial_module, rest] ->
|
||||
{:"#{rest}", v}
|
||||
_ ->
|
||||
kvp
|
||||
end
|
||||
end)
|
||||
|> Map.new
|
||||
|
||||
resolutions = Map.merge(resolutions, to_update)
|
||||
ns_mappings = build_ns_mappings(file_group.schemas)
|
||||
Resolver.stop(file_group.resolver)
|
||||
|
||||
%FileGroup{file_group |
|
||||
resolutions: resolutions,
|
||||
ns_mappings: ns_mappings}
|
||||
end
|
||||
|
||||
for type <- [:bool, :byte, :i8, :i16, :i32, :i64, :double, :string, :binary] do
|
||||
def resolve(_, unquote(type)), do: unquote(type)
|
||||
end
|
||||
def resolve(%FileGroup{} = group, %Field{type: %StructRef{} = ref} = field) do
|
||||
%Field{field | type: resolve(group, ref)}
|
||||
end
|
||||
def resolve(%FileGroup{}=group, %Field{type: {:list, elem_type}}=field) do
|
||||
%Field{field | type: {:list, resolve(group, elem_type)}}
|
||||
end
|
||||
def resolve(%FileGroup{}=group, %Field{type: {:set, elem_type}}=field) do
|
||||
%Field{field | type: {:set, resolve(group, elem_type)}}
|
||||
end
|
||||
def resolve(%FileGroup{}=group, %Field{type: {:map, {key_type, val_type}}}=field) do
|
||||
%Field{field | type: {:map, {resolve(group, key_type), resolve(group, val_type)}}}
|
||||
end
|
||||
def resolve(%FileGroup{resolutions: resolutions}, %StructRef{referenced_type: type_name}) do
|
||||
resolutions[type_name]
|
||||
end
|
||||
def resolve(%FileGroup{resolutions: resolutions}, path) when is_atom(path) do
|
||||
# this can resolve local mappings like :Weather or
|
||||
# remote mappings like :"common.Weather"
|
||||
resolutions[path]
|
||||
end
|
||||
def resolve(_, other) do
|
||||
other
|
||||
end
|
||||
|
||||
def dest_module(file_group, %Struct{name: name}) do
|
||||
dest_module(file_group, name)
|
||||
end
|
||||
|
||||
def dest_module(file_group, %Exception{name: name}) do
|
||||
dest_module(file_group, name)
|
||||
end
|
||||
|
||||
def dest_module(file_group, %TEnum{name: name}) do
|
||||
dest_module(file_group, name)
|
||||
end
|
||||
|
||||
def dest_module(file_group, name) do
|
||||
[thrift_module, struct_name] = name
|
||||
|> Atom.to_string
|
||||
|> String.split(".")
|
||||
|> Enum.map(&String.to_atom/1)
|
||||
|
||||
case file_group.ns_mappings[thrift_module] do
|
||||
nil ->
|
||||
Module.concat(Elixir, struct_name)
|
||||
namespace = %Namespace{} ->
|
||||
namespace.path
|
||||
|> String.split(".")
|
||||
|> Enum.map(&Macro.camelize/1)
|
||||
|> Enum.join(".")
|
||||
|> String.to_atom
|
||||
|> Module.concat(struct_name)
|
||||
end
|
||||
end
|
||||
|
||||
defp build_ns_mappings(schemas) do
|
||||
schemas
|
||||
|> Enum.map(fn {module_name, %Schema{namespaces: ns}} ->
|
||||
{String.to_atom(module_name), ns[:elixir]}
|
||||
end)
|
||||
|> Map.new
|
||||
end
|
||||
end
|
@ -1,36 +0,0 @@
|
||||
defmodule Thrift.Parser.Literals do
|
||||
@moduledoc """
|
||||
A module containing types for defining Thrift literals
|
||||
Thrift literals are used when setting default values and constants.
|
||||
"""
|
||||
defmodule Primitive do
|
||||
@moduledoc """
|
||||
A Thrift primitive type
|
||||
"""
|
||||
@type t :: integer | boolean | String.t | float
|
||||
end
|
||||
|
||||
defmodule List do
|
||||
@moduledoc """
|
||||
A Thrift list
|
||||
"""
|
||||
@type t :: [Thrift.Parser.Literals.t]
|
||||
end
|
||||
|
||||
defmodule Map do
|
||||
@moduledoc """
|
||||
A Thrift map
|
||||
"""
|
||||
@type t :: %{Thrift.Parser.Literals.t => Thrift.Parser.Literals.t}
|
||||
end
|
||||
|
||||
defmodule Container do
|
||||
@moduledoc """
|
||||
A Thrift container type
|
||||
"""
|
||||
@type t :: Map.t | List.t
|
||||
end
|
||||
|
||||
@type t :: Container.t | Primitive.t
|
||||
@type s :: atom
|
||||
end
|
@ -1,452 +0,0 @@
|
||||
defmodule Thrift.Parser.Models do
|
||||
@moduledoc """
|
||||
Models used by the Thrift parser that represent different Thrift components.
|
||||
The models defined here are returned by the parse functions in the
|
||||
`Thrift.Parser` module.
|
||||
"""
|
||||
|
||||
alias Thrift.Parser.{Literals, Types}
|
||||
|
||||
defmodule Namespace do
|
||||
@moduledoc """
|
||||
A Thrift namespace.
|
||||
The namespace is a language-specific place where the generated structs are
|
||||
placed.
|
||||
"""
|
||||
|
||||
@type t :: %Namespace{name: String.t, path: String.t}
|
||||
defstruct name: nil, path: nil
|
||||
|
||||
import Thrift.Parser.Conversions
|
||||
|
||||
@spec new(char_list, char_list) :: %Namespace{}
|
||||
def new(name, path) do
|
||||
%Namespace{name: atomify(name), path: List.to_string(path)}
|
||||
end
|
||||
end
|
||||
|
||||
defmodule Include do
|
||||
@moduledoc """
|
||||
An included file.
|
||||
In Thrift, you can include other files to share structs, enums and the like.
|
||||
"""
|
||||
|
||||
@type t :: %Include{path: String.t}
|
||||
defstruct path: nil
|
||||
|
||||
import Thrift.Parser.Conversions
|
||||
|
||||
@spec new(char_list) :: %Include{}
|
||||
def new(path) do
|
||||
%Include{path: List.to_string(path)}
|
||||
end
|
||||
end
|
||||
|
||||
defmodule Constant do
|
||||
@moduledoc """
|
||||
A Thrift constant.
|
||||
Constants of any primitive or container type can be created in Thrift.
|
||||
"""
|
||||
|
||||
@type t :: %Constant{name: String.t, value: Literal.t, type: Types.t}
|
||||
defstruct name: nil, value: nil, type: nil
|
||||
|
||||
import Thrift.Parser.Conversions
|
||||
|
||||
@spec new(char_list, Literals.t, Types.t) :: %Constant{}
|
||||
def new(name, val, type) do
|
||||
%Constant{name: atomify(name), value: cast(type, val), type: type}
|
||||
end
|
||||
end
|
||||
|
||||
defmodule TEnum do
|
||||
@moduledoc """
|
||||
A Thrift enumeration
|
||||
An enumeration contains names and (usually sequential) values, and
|
||||
allows you to map from one to the other.
|
||||
"""
|
||||
|
||||
@type enum_value :: bitstring | integer
|
||||
@type t :: %TEnum{name: String.t, values: %{String.t => enum_value}}
|
||||
defstruct name: nil, values: []
|
||||
|
||||
import Thrift.Parser.Conversions
|
||||
|
||||
@spec new(char_list, %{char_list => enum_value}) :: %TEnum{}
|
||||
def new(name, values) do
|
||||
values = values
|
||||
|> Enum.with_index
|
||||
|> Enum.map(fn
|
||||
{{name, value}, _index} ->
|
||||
{atomify(name), value}
|
||||
|
||||
{name, index} ->
|
||||
{atomify(name), index + 1}
|
||||
end)
|
||||
|
||||
%TEnum{name: atomify(name), values: values}
|
||||
end
|
||||
end
|
||||
|
||||
defmodule TEnumValue do
|
||||
@moduledoc """
|
||||
A reference to an enum value
|
||||
For example, in a constant or default value.
|
||||
|
||||
const string DEFAULT_WEATHER = Weather.SUNNY;
|
||||
"""
|
||||
@type t :: %TEnumValue{enum_name: atom, enum_value: atom, type: atom}
|
||||
defstruct enum_name: nil, enum_value: nil, type: nil
|
||||
|
||||
import Thrift.Parser.Conversions
|
||||
|
||||
@spec new(char_list) :: %TEnumValue{}
|
||||
def new(enum_value) do
|
||||
[enum_name, enum_value] = enum_value
|
||||
|> List.to_string
|
||||
|> String.split(".")
|
||||
|> Enum.map(&String.to_atom/1)
|
||||
|
||||
%TEnumValue{enum_name: enum_name, enum_value: enum_value}
|
||||
end
|
||||
end
|
||||
|
||||
defmodule Field do
|
||||
@moduledoc """
|
||||
A Thrift field.
|
||||
|
||||
Fields define a named type and can occur in functions, structs, unions,
|
||||
exceptions and the parameter list and `throws` clauses of functions.
|
||||
|
||||
Fields can refer to each other. These are represented by the FieldReference
|
||||
type.
|
||||
|
||||
This module also contains some utilities for validating and fixing up fields.
|
||||
"""
|
||||
|
||||
@type printable :: String.t | atom
|
||||
@type t :: %Field{id: integer, name: String.t, type: Types.t,
|
||||
required: boolean, default: Literals.t}
|
||||
defstruct id: nil, name: nil, type: nil, required: :default, default: nil
|
||||
|
||||
import Thrift.Parser.Conversions
|
||||
|
||||
@spec new(integer, boolean, Types.t, char_list, Literals.t) :: %Field{}
|
||||
def new(id, required, type, name, default) do
|
||||
%Field{id: id,
|
||||
type: type,
|
||||
name: atomify(name),
|
||||
required: required,
|
||||
default: cast(type, default)}
|
||||
end
|
||||
|
||||
@spec build_field_list(printable, [%Field{}]) :: [%Field{}]
|
||||
def build_field_list(parent_name, fields) do
|
||||
fields
|
||||
|> update_ids(parent_name)
|
||||
|> validate_ids(parent_name)
|
||||
end
|
||||
|
||||
defp validate_ids(fields, name) do
|
||||
dupes = fields
|
||||
|> Enum.group_by(&(&1.id))
|
||||
|> Enum.filter(fn {_, v} -> length(v) > 1 end)
|
||||
|
||||
unless Enum.empty?(dupes) do
|
||||
{id, dupe_fields} = List.first(dupes)
|
||||
|
||||
names = dupe_fields
|
||||
|> Enum.map(&("#{name}.#{&1.name}"))
|
||||
|> Enum.sort
|
||||
|> Enum.join(", ")
|
||||
|
||||
raise "Error: #{names} share field number #{id}."
|
||||
end
|
||||
|
||||
fields
|
||||
end
|
||||
|
||||
defp update_ids(fields, parent_name) do
|
||||
alias Thrift.Parser.Shell
|
||||
fields
|
||||
|> Enum.with_index
|
||||
|> Enum.map(fn
|
||||
{%__MODULE__{} = field, idx} ->
|
||||
case field.id do
|
||||
nil ->
|
||||
Shell.warn "Warning: id not set for field '#{parent_name}.#{field.name}'."
|
||||
%__MODULE__{field | id: idx + 1}
|
||||
_ ->
|
||||
field
|
||||
end
|
||||
end)
|
||||
end
|
||||
end
|
||||
|
||||
defmodule Exception do
|
||||
@moduledoc """
|
||||
A Thrift exception
|
||||
|
||||
Exceptions can happen when the remote service encounters an error.
|
||||
"""
|
||||
|
||||
@type t :: %Exception{name: String.t, fields: [%Field{}]}
|
||||
defstruct fields: %{}, name: nil
|
||||
|
||||
import Thrift.Parser.Conversions
|
||||
alias Thrift.Parser.Models.Field
|
||||
|
||||
@spec new(char_list, [%Field{}, ...]) :: %Exception{}
|
||||
def new(name, fields) do
|
||||
ex_name = atomify(name)
|
||||
updated_fields = Field.build_field_list(ex_name, fields)
|
||||
|
||||
%Exception{name: ex_name, fields: updated_fields}
|
||||
end
|
||||
end
|
||||
|
||||
defmodule Struct do
|
||||
@moduledoc """
|
||||
A Thrift struct
|
||||
|
||||
The basic datastructure in Thrift, structs have aa name and a field list.
|
||||
"""
|
||||
|
||||
@type t :: %Struct{name: String.t, fields: %{String.t => %Field{}}}
|
||||
defstruct name: nil, fields: %{}
|
||||
|
||||
import Thrift.Parser.Conversions
|
||||
alias Thrift.Parser.Models.Field
|
||||
|
||||
@spec new(char_list, [%Field{}, ...]) :: %Struct{}
|
||||
def new(name, fields) do
|
||||
struct_name = atomify(name)
|
||||
fields = Field.build_field_list(struct_name, fields)
|
||||
|
||||
%Struct{name: struct_name, fields: fields}
|
||||
end
|
||||
end
|
||||
|
||||
defmodule Union do
|
||||
@moduledoc """
|
||||
A Thrift union
|
||||
|
||||
Unions can have one field set at a time.
|
||||
"""
|
||||
|
||||
@type t :: %Union{name: String.t, fields: %{String.t => %Field{}}}
|
||||
defstruct name: nil, fields: %{}
|
||||
|
||||
import Thrift.Parser.Conversions
|
||||
alias Thrift.Parser.Models.Field
|
||||
|
||||
@spec new(char_list, [%Field{}, ...]) :: %Union{}
|
||||
def new(name, fields) do
|
||||
name = atomify(name)
|
||||
|
||||
fields = name
|
||||
|> Field.build_field_list(fields)
|
||||
|> Enum.map(fn(%Field{} = field) ->
|
||||
# According to Thrift docs, unions have implicitly optional
|
||||
# fields. See https://thrift.apache.org/docs/idl#union
|
||||
%Field{field | required: false}
|
||||
end)
|
||||
|
||||
%Union{name: name, fields: fields}
|
||||
end
|
||||
end
|
||||
|
||||
defmodule StructRef do
|
||||
@moduledoc """
|
||||
A reference to another struct.
|
||||
|
||||
While not a Thrift type, this represents when a Thrift type refers to
|
||||
another.
|
||||
"""
|
||||
|
||||
@type t :: %StructRef{referenced_type: String.t}
|
||||
defstruct referenced_type: nil
|
||||
|
||||
import Thrift.Parser.Conversions
|
||||
|
||||
@spec new(char_list) :: %StructRef{}
|
||||
def new(referenced_type) do
|
||||
%StructRef{referenced_type: atomify(referenced_type)}
|
||||
end
|
||||
end
|
||||
|
||||
defmodule Function do
|
||||
@moduledoc """
|
||||
A Thrift function
|
||||
|
||||
Functions are remote endpoints for Thrift services. They contain an argument list, exceptions and return a typed object.
|
||||
They can also be `oneway`, which means that Thrift doesn't have to wait for
|
||||
a reply from them.
|
||||
"""
|
||||
|
||||
@type return :: :void | Types.t
|
||||
@type t :: %Function{oneway: boolean, return_type: return, name: String.t,
|
||||
params: [%Field{}], exceptions: [%Exception{}]}
|
||||
defstruct oneway: false, return_type: :void, name: nil, params: [], exceptions: []
|
||||
alias Thrift.Parser.Models.Field
|
||||
import Thrift.Parser.Conversions
|
||||
|
||||
@spec new(boolean, Types.t, char_list, [%Field{}, ...], [%Exception{}, ...]) :: %Function{}
|
||||
def new(oneway, return_type, name, params, exceptions) do
|
||||
name = atomify(name)
|
||||
params = Field.build_field_list(name, params)
|
||||
|
||||
%Function{
|
||||
oneway: oneway,
|
||||
return_type: return_type,
|
||||
name: name,
|
||||
params: params,
|
||||
exceptions: exceptions
|
||||
}
|
||||
end
|
||||
end
|
||||
|
||||
defmodule Service do
|
||||
@moduledoc """
|
||||
A Thrift service
|
||||
|
||||
Services hold RPC functions and can extend other services.
|
||||
"""
|
||||
|
||||
@type t :: %Service{name: String.t, extends: String.t, functions: %{atom => %Function{}}}
|
||||
defstruct name: nil, extends: nil, functions: %{}
|
||||
|
||||
import Thrift.Parser.Conversions
|
||||
|
||||
@spec new(char_list, [%Function{}, ...], char_list) :: %Service{}
|
||||
def new(name, functions, extends) do
|
||||
fn_map = Enum.into(functions, %{}, fn(f) -> {f.name, f} end)
|
||||
%Service{name: atomify(name), extends: atomify(extends), functions: fn_map}
|
||||
end
|
||||
end
|
||||
|
||||
defmodule Schema do
|
||||
@moduledoc """
|
||||
A Thrift schema.
|
||||
|
||||
A program represents a single parsed file in Thrift.
|
||||
Many programs can be compiled together to build a Thrift service.
|
||||
|
||||
This is the root datastructure that the parser emits after running.
|
||||
"""
|
||||
|
||||
@type header :: %Include{} | %Namespace{}
|
||||
@type typedef :: {:typedef, Types.t, atom}
|
||||
@type definition :: %Service{} | %TEnum{} | %Exception{} | %Union{} | %Struct{} | %Constant{} | typedef
|
||||
@type model :: header | definition
|
||||
@type t :: %Schema{
|
||||
absolute_path: Path.t,
|
||||
module: String.t,
|
||||
thrift_namespace: String.t,
|
||||
namespaces: %{String.t => %Namespace{}},
|
||||
structs: %{String.t => %Struct{}},
|
||||
services: %{String.t => %Service{}},
|
||||
enums: %{String.t => %TEnum{}},
|
||||
unions: %{String.t => %Union{}},
|
||||
includes: [%Include{}],
|
||||
constants: %{String.t => Literals.t},
|
||||
exceptions: %{String.t => %Exception{}},
|
||||
typedefs: %{String.t => Types.t}
|
||||
}
|
||||
defstruct absolute_path: nil,
|
||||
module: nil,
|
||||
thrift_namespace: nil,
|
||||
namespaces: %{},
|
||||
structs: %{},
|
||||
services: %{},
|
||||
enums: %{},
|
||||
unions: %{},
|
||||
includes: [],
|
||||
constants: %{},
|
||||
exceptions: %{},
|
||||
typedefs: %{}
|
||||
|
||||
import Thrift.Parser.Conversions
|
||||
alias Thrift.Parser.Models.{Constant,
|
||||
Exception,
|
||||
Include,
|
||||
Namespace,
|
||||
Struct,
|
||||
TEnum,
|
||||
Union
|
||||
}
|
||||
|
||||
@doc """
|
||||
Constructs a schema with both headers and definitions.
|
||||
"""
|
||||
@spec new(Path.t, [header], [definition]) :: t
|
||||
def new(file_absolute_path, headers, defs) do
|
||||
orig_schema = %Schema{absolute_path: file_absolute_path,
|
||||
module: module_name(file_absolute_path)}
|
||||
|
||||
schema = headers
|
||||
|> Enum.reverse
|
||||
|> Enum.reduce(orig_schema, &merge(&2, &1))
|
||||
|
||||
defs
|
||||
|> Enum.reverse
|
||||
|> Enum.reduce(schema, &merge(&2, &1))
|
||||
end
|
||||
|
||||
defp module_name(nil), do: nil
|
||||
|
||||
defp module_name(path_name) when is_bitstring(path_name) do
|
||||
path_name
|
||||
|> Path.basename
|
||||
|> Path.rootname
|
||||
|> String.to_atom
|
||||
end
|
||||
|
||||
@spec merge(t, model) :: t
|
||||
defp merge(schema, %Include{} = inc) do
|
||||
%Schema{schema | includes: [inc | schema.includes]}
|
||||
end
|
||||
|
||||
defp merge(schema, %Namespace{} = ns) do
|
||||
%Schema{schema | namespaces: Map.put(schema.namespaces, ns.name, ns)}
|
||||
end
|
||||
|
||||
defp merge(schema, %Constant{} = const) do
|
||||
%Schema{schema | constants: Map.put(schema.constants, const.name, const)}
|
||||
end
|
||||
|
||||
defp merge(schema, %TEnum{} = enum) do
|
||||
%Schema{schema | enums: Map.put(schema.enums, enum.name, canonicalize_name(schema, enum))}
|
||||
end
|
||||
|
||||
defp merge(schema, %Exception{} = exc) do
|
||||
%Schema{schema | exceptions: Map.put(schema.exceptions, exc.name, canonicalize_name(schema, exc))}
|
||||
end
|
||||
|
||||
defp merge(schema, %Struct{} = s) do
|
||||
%Schema{schema | structs: Map.put(schema.structs, s.name, canonicalize_name(schema, s))}
|
||||
end
|
||||
|
||||
defp merge(schema, %Union{} = union) do
|
||||
%Schema{schema | unions: Map.put(schema.unions, union.name, canonicalize_name(schema, union))}
|
||||
end
|
||||
|
||||
defp merge(schema, %Service{} = service) do
|
||||
%Schema{schema | services: Map.put(schema.services, service.name, canonicalize_name(schema, service))}
|
||||
end
|
||||
|
||||
defp merge(schema, {:typedef, actual_type, type_alias}) do
|
||||
%Schema{schema | typedefs: Map.put(schema.typedefs, atomify(type_alias), actual_type)}
|
||||
end
|
||||
|
||||
defp canonicalize_name(%{module: nil}, model) do
|
||||
model
|
||||
end
|
||||
|
||||
defp canonicalize_name(schema, %{name: name} = model) do
|
||||
%{model | name: :"#{schema.module}.#{name}"}
|
||||
end
|
||||
end
|
||||
|
||||
@type all :: %Namespace{} | %Include{} | %Constant{} | %TEnum{} | %Field{} | %Exception{} | %Struct{} | %Union{} | %Function{} | %Service{} | %Schema{}
|
||||
end
|
@ -1,55 +0,0 @@
|
||||
defmodule Thrift.Parser.Types do
|
||||
@moduledoc """
|
||||
A container module for modules containing typespecs for Thrift files.
|
||||
"""
|
||||
defmodule Primitive do
|
||||
@moduledoc """
|
||||
Typespec for Thrift primitives
|
||||
"""
|
||||
@type t :: :bool | :i8 | :i16 | :i64 | :binary | :double | :byte | :string
|
||||
end
|
||||
|
||||
defmodule Ident do
|
||||
@moduledoc """
|
||||
A Thrift identifier
|
||||
"""
|
||||
@type t :: String.t
|
||||
end
|
||||
|
||||
defmodule Standalone do
|
||||
@moduledoc """
|
||||
A Thrift type that isn't a container
|
||||
"""
|
||||
@type t :: Ident.t | Primitive.t
|
||||
end
|
||||
|
||||
defmodule List do
|
||||
@moduledoc """
|
||||
A Thrift list.
|
||||
"""
|
||||
@type t :: {:list, Thrift.Parser.Types.t}
|
||||
end
|
||||
|
||||
defmodule Map do
|
||||
@moduledoc """
|
||||
A Thrift map
|
||||
"""
|
||||
@type t :: {:map, {Thrift.Parser.Types.t, Thrift.Parser.Types.t}}
|
||||
end
|
||||
|
||||
defmodule Set do
|
||||
@moduledoc """
|
||||
A Thrift set
|
||||
"""
|
||||
@type t :: {:set, Thrift.Parser.Types.t}
|
||||
end
|
||||
|
||||
defmodule Container do
|
||||
@moduledoc """
|
||||
A Thrift contianer type
|
||||
"""
|
||||
@type t :: List.t | Map.t | Set.t
|
||||
end
|
||||
|
||||
@type t :: Container.t | Standalone.t
|
||||
end
|
@ -1,104 +0,0 @@
|
||||
defmodule Thrift.Protocols.Binary do
|
||||
# field types, which are the type ids from the thrift spec.
|
||||
@bool 2
|
||||
@byte 3
|
||||
@double 4
|
||||
@i16 6
|
||||
@i32 8
|
||||
@i64 10
|
||||
@string 11
|
||||
@struct 12
|
||||
@map 13
|
||||
@set 14
|
||||
@list 15
|
||||
|
||||
@types %{bool: @bool,
|
||||
byte: @byte,
|
||||
double: @double,
|
||||
i8: @byte,
|
||||
i16: @i16,
|
||||
i32: @i32,
|
||||
i64: @i64,
|
||||
string: @string,
|
||||
binary: @string,
|
||||
struct: @struct,
|
||||
map: @map,
|
||||
set: @set,
|
||||
list: @list
|
||||
}
|
||||
|
||||
for {atom_type, int_type} <- @types do
|
||||
def int_type(unquote(atom_type)) do
|
||||
unquote(int_type)
|
||||
end
|
||||
end
|
||||
def int_type({:map, _}), do: 13
|
||||
def int_type({:set, _}), do: 14
|
||||
def int_type({:list, _}), do: 15
|
||||
|
||||
defp bool_to_int(false), do: 0
|
||||
defp bool_to_int(nil), do: 0
|
||||
defp bool_to_int(_), do: 1
|
||||
|
||||
defp to_message_type(:call), do: 1
|
||||
defp to_message_type(:reply), do: 2
|
||||
defp to_message_type(:exception), do: 3
|
||||
defp to_message_type(:oneway), do: 4
|
||||
|
||||
def serialize(_, nil) do
|
||||
[]
|
||||
end
|
||||
def serialize(:bool, value) do
|
||||
value = bool_to_int(value)
|
||||
<<value::8-signed>>
|
||||
end
|
||||
def serialize(:i8, value) do
|
||||
<<value::8-signed>>
|
||||
end
|
||||
def serialize(:i16, value) do
|
||||
<<value::16-signed>>
|
||||
end
|
||||
def serialize(:i32, value) do
|
||||
<<value::32-signed>>
|
||||
end
|
||||
def serialize(:i64, value) do
|
||||
<<value::64-signed>>
|
||||
end
|
||||
def serialize(:double, value) do
|
||||
<<value::signed-float>>
|
||||
end
|
||||
def serialize(:string, value) do
|
||||
[<<byte_size(value)::size(32)>>, value]
|
||||
end
|
||||
def serialize(:binary, value) do
|
||||
[<<byte_size(value)::size(32)>>, value]
|
||||
end
|
||||
def serialize({:list, elem_type}, elems) when is_list(elems) do
|
||||
rest = Enum.map(elems, &serialize(elem_type, &1))
|
||||
|
||||
[<<int_type(elem_type)::size(8), Enum.count(elems)::32-signed>>, rest]
|
||||
end
|
||||
def serialize({:set, elem_type}, %MapSet{}=elems) do
|
||||
rest = Enum.map(elems, &serialize(elem_type, &1))
|
||||
|
||||
[<<int_type(elem_type)::size(8), Enum.count(elems)::32-signed>>, rest]
|
||||
end
|
||||
def serialize({:map, {key_type, val_type}}, map) when is_map(map) do
|
||||
elem_count = map_size(map)
|
||||
rest = Enum.map(map, fn {key, value} ->
|
||||
[serialize(key_type, key), serialize(val_type, value)]
|
||||
end)
|
||||
[<<int_type(key_type)::size(8), int_type(val_type)::size(8), elem_count::32-signed>>, rest]
|
||||
end
|
||||
def serialize(:struct, %{__struct__: mod}=struct) do
|
||||
mod.serialize(struct, :binary)
|
||||
end
|
||||
def serialize(:message_begin, {sequence_id, message_type, name}) do
|
||||
# Taken from https://erikvanoosten.github.io/thrift-missing-specification/#_message_encoding
|
||||
|
||||
<<1::size(1), 1::size(15), 0::size(8),
|
||||
# ^^ Strange, I know. We could integrate the 8-bit zero here with the 5 bit zero below.
|
||||
0::size(5), to_message_type(message_type)::size(3),
|
||||
byte_size(name)::32-signed, sequence_id::32-signed>>
|
||||
end
|
||||
end
|
22
mix.exs
22
mix.exs
@ -11,9 +11,8 @@ defmodule Thrift.Mixfile do
|
||||
deps: deps(),
|
||||
|
||||
# Build Environment
|
||||
erlc_paths: erlc_paths(Mix.env),
|
||||
erlc_paths: ["src", "ext/thrift/lib/erl/src"],
|
||||
erlc_include_path: "ext/thrift/lib/erl/include",
|
||||
elixirc_paths: elixirc_paths(Mix.env),
|
||||
compilers: [:leex, :yecc, :erlang, :elixir, :app],
|
||||
|
||||
# Testing
|
||||
@ -37,29 +36,12 @@ defmodule Thrift.Mixfile do
|
||||
[]
|
||||
end
|
||||
|
||||
defp erlc_paths(:prod) do
|
||||
["src", "ext/thrift/lib/erl/src"]
|
||||
end
|
||||
|
||||
defp erlc_paths(_) do
|
||||
erlc_paths(:prod) ++ ["test/support/src"]
|
||||
end
|
||||
|
||||
defp elixirc_paths(:prod) do
|
||||
["lib"]
|
||||
end
|
||||
|
||||
defp elixirc_paths(_) do
|
||||
elixirc_paths(:prod) ++ ["test/support/lib"]
|
||||
end
|
||||
|
||||
defp deps do
|
||||
[{:ex_doc, "~> 0.14.3", only: :dev},
|
||||
{:earmark, "~> 1.0.2", only: :dev},
|
||||
{:excoveralls, "~> 0.5.7", only: :test},
|
||||
{:credo, "~> 0.5.2", only: [:dev, :test]},
|
||||
{:dialyxir, "~> 0.4.0", only: [:dev, :test]},
|
||||
{:benchfella, "~> 0.3.0", only: [:dev, :test]}
|
||||
{:dialyxir, "~> 0.4.0", only: [:dev, :test]}
|
||||
]
|
||||
end
|
||||
|
||||
|
3
mix.lock
3
mix.lock
@ -1,5 +1,4 @@
|
||||
%{"benchfella": {:hex, :benchfella, "0.3.3", "bbde48b5fe1ef556baa7ad933008e214e050e81ddb0916350715f5759fb35c0c", [:mix], []},
|
||||
"bunt": {:hex, :bunt, "0.1.6", "5d95a6882f73f3b9969fdfd1953798046664e6f77ec4e486e6fafc7caad97c6f", [:mix], []},
|
||||
%{"bunt": {:hex, :bunt, "0.1.6", "5d95a6882f73f3b9969fdfd1953798046664e6f77ec4e486e6fafc7caad97c6f", [:mix], []},
|
||||
"certifi": {:hex, :certifi, "0.4.0", "a7966efb868b179023618d29a407548f70c52466bf1849b9e8ebd0e34b7ea11f", [:rebar3], []},
|
||||
"credo": {:hex, :credo, "0.5.2", "92e8c9f86e0ffbf9f688595e9f4e936bc96a52e5606d2c19713e9e4d191d5c74", [:mix], [{:bunt, "~> 0.1.6", [hex: :bunt, optional: false]}]},
|
||||
"dialyxir": {:hex, :dialyxir, "0.4.0", "53ac3014bb4aef647728a697052b4db3a84c6742de7aab0e0a1c863ea274007b", [:mix], []},
|
||||
|
@ -122,7 +122,6 @@ ns_name -> ident: unwrap('$1').
|
||||
|
||||
%% JS Style mapping "foo": 32
|
||||
mapping -> literal ':' literal: {'$1', '$3'}.
|
||||
mappings -> '$empty': [].
|
||||
mappings -> mapping: ['$1'].
|
||||
mappings -> mapping ',' mappings: ['$1'] ++ '$3'.
|
||||
|
||||
@ -130,7 +129,6 @@ mappings -> mapping ',' mappings: ['$1'] ++ '$3'.
|
||||
literal_list -> literal: ['$1'].
|
||||
literal_list -> literal ',' literal_list: ['$1'] ++ '$3'.
|
||||
|
||||
literal -> ident: 'Elixir.Thrift.Parser.Models.TEnumValue':new(unwrap('$1')).
|
||||
literal -> true: unwrap('$1').
|
||||
literal -> false: unwrap('$1').
|
||||
literal -> int: unwrap('$1').
|
||||
|
1
test/fixtures/app/thrift/shared.thrift
vendored
1
test/fixtures/app/thrift/shared.thrift
vendored
@ -29,7 +29,6 @@ namespace java shared
|
||||
namespace perl shared
|
||||
namespace php shared
|
||||
namespace haxe shared
|
||||
namespace elixir shared
|
||||
|
||||
struct SharedStruct {
|
||||
1: i32 key
|
||||
|
20
test/fixtures/app/thrift/simple.thrift
vendored
20
test/fixtures/app/thrift/simple.thrift
vendored
@ -1,20 +0,0 @@
|
||||
include "shared.thrift"
|
||||
|
||||
struct User {
|
||||
1: bool is_evil,
|
||||
2: i64 user_id,
|
||||
3: i32 number_of_hairs_on_head,
|
||||
4: byte amount_of_red,
|
||||
5: i16 nineties_era_color,
|
||||
6: double mint_gum,
|
||||
7: string username,
|
||||
8: list<User> friends,
|
||||
9: map<byte, string> my_map,
|
||||
10: set<i32> blocked_user_ids,
|
||||
11: optional list<i32> optional_integers,
|
||||
}
|
||||
|
||||
struct Nesting {
|
||||
1: User user,
|
||||
2: shared.SharedStruct nested
|
||||
}
|
1
test/fixtures/app/thrift/tutorial.thrift
vendored
1
test/fixtures/app/thrift/tutorial.thrift
vendored
@ -69,7 +69,6 @@ namespace java tutorial
|
||||
namespace php tutorial
|
||||
namespace perl tutorial
|
||||
namespace haxe tutorial
|
||||
namespace elixir tutorial
|
||||
|
||||
/**
|
||||
* Thrift lets you do typedefs to get pretty names for your types. Standard
|
||||
|
@ -1,248 +0,0 @@
|
||||
defmodule Thrift.Generator.BinaryProtocolTest do
|
||||
use ThriftTestCase
|
||||
|
||||
alias Thrift.Protocols.Binary
|
||||
|
||||
def assert_serializes(struct=%{__struct__: mod}, binary) do
|
||||
assert binary == Binary.serialize(:struct, struct) |> IO.iodata_to_binary
|
||||
assert {^struct, ""} = mod.deserialize(binary)
|
||||
end
|
||||
|
||||
def assert_serializes(struct=%{__struct__: mod}, binary, deserialized_struct=%{__struct__: mod}) do
|
||||
assert binary == Binary.serialize(:struct, struct) |> IO.iodata_to_binary
|
||||
assert {^deserialized_struct, ""} = mod.deserialize(binary)
|
||||
end
|
||||
|
||||
@thrift_file name: "bool.thrift", contents: """
|
||||
struct Bool {
|
||||
1: bool val;
|
||||
2: map<bool, bool> val_map;
|
||||
3: set<bool> val_set;
|
||||
4: list<bool> val_list;
|
||||
}
|
||||
"""
|
||||
|
||||
thrift_test "bool serialization" do
|
||||
assert_serializes %Bool{}, <<0>>
|
||||
assert_serializes %Bool{val: false}, <<2, 0, 1, 0, 0>>
|
||||
assert_serializes %Bool{val: true}, <<2, 0, 1, 1, 0>>
|
||||
assert_serializes %Bool{val_map: %{}}, <<13, 0, 2, 2, 2, 0, 0, 0, 0, 0>>
|
||||
assert_serializes %Bool{val_map: %{false => true}}, <<13, 0, 2, 2, 2, 0, 0, 0, 1, 0, 1, 0>>
|
||||
assert_serializes %Bool{val_set: MapSet.new}, <<14, 0, 3, 2, 0, 0, 0, 0, 0>>
|
||||
assert_serializes %Bool{val_set: MapSet.new([true])}, <<14, 0, 3, 2, 0, 0, 0, 1, 1, 0>>
|
||||
assert_serializes %Bool{val_list: []}, <<15, 0, 4, 2, 0, 0, 0, 0, 0>>
|
||||
assert_serializes %Bool{val_list: [true]}, <<15, 0, 4, 2, 0, 0, 0, 1, 1, 0>>
|
||||
assert_serializes %Bool{val_list: [true, false]}, <<15, 0, 4, 2, 0, 0, 0, 2, 1, 0, 0>>
|
||||
end
|
||||
|
||||
@thrift_file name: "byte.thrift", contents: """
|
||||
struct Byte {
|
||||
1: byte val;
|
||||
2: map<byte, byte> val_map;
|
||||
3: set<byte> val_set;
|
||||
4: list<byte> val_list;
|
||||
}
|
||||
"""
|
||||
|
||||
thrift_test "byte serialization" do
|
||||
assert_serializes %Byte{}, <<0>>
|
||||
assert_serializes %Byte{val: 0}, <<3, 0, 1, 0, 0>>
|
||||
assert_serializes %Byte{val: 1}, <<3, 0, 1, 1, 0>>
|
||||
assert_serializes %Byte{val: 255}, <<3, 0, 1, 255, 0>>
|
||||
assert_serializes %Byte{val: 256}, <<3, 0, 1, 0, 0>>, %Byte{val: 0}
|
||||
assert_serializes %Byte{val_map: %{}}, <<13, 0, 2, 3, 3, 0, 0, 0, 0, 0>>
|
||||
assert_serializes %Byte{val_map: %{91 => 92}}, <<13, 0, 2, 3, 3, 0, 0, 0, 1, 91, 92, 0>>
|
||||
assert_serializes %Byte{val_set: MapSet.new}, <<14, 0, 3, 3, 0, 0, 0, 0, 0>>
|
||||
assert_serializes %Byte{val_set: MapSet.new([91])}, <<14, 0, 3, 3, 0, 0, 0, 1, 91, 0>>
|
||||
assert_serializes %Byte{val_list: []}, <<15, 0, 4, 3, 0, 0, 0, 0, 0>>
|
||||
assert_serializes %Byte{val_list: [91]}, <<15, 0, 4, 3, 0, 0, 0, 1, 91, 0>>
|
||||
assert_serializes %Byte{val_list: [91, 92]}, <<15, 0, 4, 3, 0, 0, 0, 2, 91, 92, 0>>
|
||||
assert_serializes %Byte{val_list: [91, 92, 93]}, <<15, 0, 4, 3, 0, 0, 0, 3, 91, 92, 93, 0>>
|
||||
end
|
||||
|
||||
@thrift_file name: "double.thrift", contents: """
|
||||
struct Double {
|
||||
1: double val;
|
||||
2: map<double, double> val_map;
|
||||
3: set<double> val_set;
|
||||
4: list<double> val_list;
|
||||
}
|
||||
"""
|
||||
|
||||
thrift_test "double serialization" do
|
||||
assert_serializes %Double{}, <<0>>
|
||||
assert_serializes %Double{val: 0.0}, <<4, 0, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0>>
|
||||
assert_serializes %Double{val: 1.0}, <<4, 0, 1, 1::signed-float, 0>>
|
||||
assert_serializes %Double{val: 255.0}, <<4, 0, 1, 255::signed-float, 0>>
|
||||
assert_serializes %Double{val_map: %{}}, <<13, 0, 2, 4, 4, 0, 0, 0, 0, 0>>
|
||||
assert_serializes %Double{val_map: %{91.0 => 92.0}}, <<13, 0, 2, 4, 4, 0, 0, 0, 1, 91::signed-float, 92::signed-float, 0>>
|
||||
assert_serializes %Double{val_set: MapSet.new}, <<14, 0, 3, 4, 0, 0, 0, 0, 0>>
|
||||
assert_serializes %Double{val_set: MapSet.new([91.0])}, <<14, 0, 3, 4, 0, 0, 0, 1, 91::signed-float, 0>>
|
||||
assert_serializes %Double{val_list: []}, <<15, 0, 4, 4, 0, 0, 0, 0, 0>>
|
||||
assert_serializes %Double{val_list: [91.0]}, <<15, 0, 4, 4, 0, 0, 0, 1, 91::signed-float, 0>>
|
||||
end
|
||||
|
||||
@thrift_file name: "i16.thrift", contents: """
|
||||
struct I16 {
|
||||
1: i16 val;
|
||||
2: map<i16, i16> val_map;
|
||||
3: set<i16> val_set;
|
||||
4: list<i16> val_list;
|
||||
}
|
||||
"""
|
||||
|
||||
thrift_test "i16 serialization" do
|
||||
assert_serializes %I16{}, <<0>>
|
||||
assert_serializes %I16{val: 0}, <<6, 0, 1, 0, 0, 0>>
|
||||
assert_serializes %I16{val: 1}, <<6, 0, 1, 0, 1, 0>>
|
||||
assert_serializes %I16{val: 255}, <<6, 0, 1, 0, 255, 0>>
|
||||
assert_serializes %I16{val: 256}, <<6, 0, 1, 1, 0, 0>>
|
||||
assert_serializes %I16{val: 65535}, <<6, 0, 1, 255, 255, 0>>
|
||||
assert_serializes %I16{val: 65536}, <<6, 0, 1, 0, 0, 0>>, %I16{val: 0}
|
||||
assert_serializes %I16{val_map: %{}}, <<13, 0, 2, 6, 6, 0, 0, 0, 0, 0>>
|
||||
assert_serializes %I16{val_map: %{91 => 92}}, <<13, 0, 2, 6, 6, 0, 0, 0, 1, 0, 91, 0, 92, 0>>
|
||||
assert_serializes %I16{val_set: MapSet.new}, <<14, 0, 3, 6, 0, 0, 0, 0, 0>>
|
||||
assert_serializes %I16{val_set: MapSet.new([91])}, <<14, 0, 3, 6, 0, 0, 0, 1, 0, 91, 0>>
|
||||
assert_serializes %I16{val_list: []}, <<15, 0, 4, 6, 0, 0, 0, 0, 0>>
|
||||
assert_serializes %I16{val_list: [91]}, <<15, 0, 4, 6, 0, 0, 0, 1, 0, 91, 0>>
|
||||
assert_serializes %I16{val_list: [91, 92]}, <<15, 0, 4, 6, 0, 0, 0, 2, 0, 91, 0, 92, 0>>
|
||||
assert_serializes %I16{val_list: [91, 92, 93]}, <<15, 0, 4, 6, 0, 0, 0, 3, 0, 91, 0, 92, 0, 93, 0>>
|
||||
end
|
||||
|
||||
@thrift_file name: "i32.thrift", contents: """
|
||||
struct I32 {
|
||||
1: i32 val;
|
||||
2: map<i32, i32> val_map;
|
||||
3: set<i32> val_set;
|
||||
4: list<i32> val_list;
|
||||
}
|
||||
"""
|
||||
|
||||
thrift_test "i32 serialization" do
|
||||
assert_serializes %I32{}, <<0>>
|
||||
assert_serializes %I32{val: 0}, <<8, 0, 1, 0, 0, 0, 0, 0>>
|
||||
assert_serializes %I32{val: 1}, <<8, 0, 1, 0, 0, 0, 1, 0>>
|
||||
assert_serializes %I32{val: 255}, <<8, 0, 1, 0, 0, 0, 255, 0>>
|
||||
assert_serializes %I32{val: 256}, <<8, 0, 1, 0, 0, 1, 0, 0>>
|
||||
assert_serializes %I32{val: 65535}, <<8, 0, 1, 0, 0, 255, 255, 0>>
|
||||
assert_serializes %I32{val_map: %{}}, <<13, 0, 2, 8, 8, 0, 0, 0, 0, 0>>
|
||||
assert_serializes %I32{val_map: %{91 => 92}}, <<13, 0, 2, 8, 8, 0, 0, 0, 1, 0, 0, 0, 91, 0, 0, 0, 92, 0>>
|
||||
assert_serializes %I32{val_set: MapSet.new}, <<14, 0, 3, 8, 0, 0, 0, 0, 0>>
|
||||
assert_serializes %I32{val_set: MapSet.new([91])}, <<14, 0, 3, 8, 0, 0, 0, 1, 0, 0, 0, 91, 0>>
|
||||
assert_serializes %I32{val_list: []}, <<15, 0, 4, 8, 0, 0, 0, 0, 0>>
|
||||
assert_serializes %I32{val_list: [91]}, <<15, 0, 4, 8, 0, 0, 0, 1, 0, 0, 0, 91, 0>>
|
||||
assert_serializes %I32{val_list: [91, 92]}, <<15, 0, 4, 8, 0, 0, 0, 2, 0, 0, 0, 91, 0, 0, 0, 92, 0>>
|
||||
end
|
||||
|
||||
@thrift_file name: "i64.thrift", contents: """
|
||||
struct I64 {
|
||||
1: i64 val;
|
||||
2: map<i64, i64> val_map;
|
||||
3: set<i64> val_set;
|
||||
4: list<i64> val_list;
|
||||
}
|
||||
"""
|
||||
|
||||
thrift_test "i64 serialization" do
|
||||
assert_serializes %I64{}, <<0>>
|
||||
assert_serializes %I64{val: 0}, <<10, 0, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0>>
|
||||
assert_serializes %I64{val: 1}, <<10, 0, 1, 0, 0, 0, 0, 0, 0, 0, 1, 0>>
|
||||
assert_serializes %I64{val: 255}, <<10, 0, 1, 0, 0, 0, 0, 0, 0, 0, 255, 0>>
|
||||
assert_serializes %I64{val: 256}, <<10, 0, 1, 0, 0, 0, 0, 0, 0, 1, 0, 0>>
|
||||
assert_serializes %I64{val: 65535}, <<10, 0, 1, 0, 0, 0, 0, 0, 0, 255, 255, 0>>
|
||||
assert_serializes %I64{val_map: %{}}, <<13, 0, 2, 10, 10, 0, 0, 0, 0, 0>>
|
||||
assert_serializes %I64{val_map: %{91 => 92}}, <<13, 0, 2, 10, 10, 0, 0, 0, 1, 0, 0, 0, 0, 0, 0, 0, 91, 0, 0, 0, 0, 0, 0, 0, 92, 0>>
|
||||
assert_serializes %I64{val_set: MapSet.new}, <<14, 0, 3, 10, 0, 0, 0, 0, 0>>
|
||||
assert_serializes %I64{val_set: MapSet.new([91])}, <<14, 0, 3, 10, 0, 0, 0, 1, 0, 0, 0, 0, 0, 0, 0, 91, 0>>
|
||||
assert_serializes %I64{val_list: []}, <<15, 0, 4, 10, 0, 0, 0, 0, 0>>
|
||||
assert_serializes %I64{val_list: [91]}, <<15, 0, 4, 10, 0, 0, 0, 1, 0, 0, 0, 0, 0, 0, 0, 91, 0>>
|
||||
assert_serializes %I64{val_list: [91, 92]}, <<15, 0, 4, 10, 0, 0, 0, 2, 0, 0, 0, 0, 0, 0, 0, 91, 0, 0, 0, 0, 0, 0, 0, 92, 0>>
|
||||
end
|
||||
|
||||
@thrift_file name: "string.thrift", contents: """
|
||||
struct String {
|
||||
1: string val;
|
||||
2: map<string, string> val_map;
|
||||
3: set<string> val_set;
|
||||
4: list<string> val_list;
|
||||
}
|
||||
"""
|
||||
|
||||
thrift_test "string serialization" do
|
||||
assert_serializes %String{}, <<0>>
|
||||
assert_serializes %String{val: ""}, <<11, 0, 1, 0, 0, 0, 0, 0>>
|
||||
assert_serializes %String{val: "abc"}, <<11, 0, 1, 0, 0, 0, 3, "abc", 0>>
|
||||
assert_serializes %String{val_map: %{}}, <<13, 0, 2, 11, 11, 0, 0, 0, 0, 0>>
|
||||
assert_serializes %String{val_map: %{"abc" => "def"}}, <<13, 0, 2, 11, 11, 0, 0, 0, 1, 0, 0, 0, 3, "abc", 0, 0, 0, 3, "def", 0>>
|
||||
assert_serializes %String{val_set: MapSet.new}, <<14, 0, 3, 11, 0, 0, 0, 0, 0>>
|
||||
assert_serializes %String{val_set: MapSet.new(["abc"])}, <<14, 0, 3, 11, 0, 0, 0, 1, 0, 0, 0, 3, "abc", 0>>
|
||||
assert_serializes %String{val_list: []}, <<15, 0, 4, 11, 0, 0, 0, 0, 0>>
|
||||
assert_serializes %String{val_list: ["abc"]}, <<15, 0, 4, 11, 0, 0, 0, 1, 0, 0, 0, 3, "abc", 0>>
|
||||
assert_serializes %String{val_list: ["abc", "def"]}, <<15, 0, 4, 11, 0, 0, 0, 2, 0, 0, 0, 3, "abc", 0, 0, 0, 3, "def", 0>>
|
||||
end
|
||||
|
||||
@thrift_file name: "struct.thrift", contents: """
|
||||
struct Val {
|
||||
99: byte num;
|
||||
}
|
||||
struct Struct {
|
||||
1: Val val;
|
||||
2: map<Val, Val> val_map;
|
||||
3: set<Val> val_set;
|
||||
4: list<Val> val_list;
|
||||
}
|
||||
"""
|
||||
|
||||
thrift_test "struct serialization" do
|
||||
assert_serializes %Struct{}, <<0>>
|
||||
assert_serializes %Struct{val: %Val{}}, <<12, 0, 1, 0, 0>>
|
||||
assert_serializes %Struct{val: %Val{num: 91}}, <<12, 0, 1, 3, 0, 99, 91, 0, 0>>
|
||||
assert_serializes %Struct{val_map: %{}}, <<13, 0, 2, 12, 12, 0, 0, 0, 0, 0>>
|
||||
assert_serializes %Struct{val_map: %{%Val{num: 91} => %Val{num: 92}}},
|
||||
<<13, 0, 2, 12, 12, 0, 0, 0, 1, 3, 0, 99, 91, 0, 3, 0, 99, 92, 0, 0>>
|
||||
assert_serializes %Struct{val_set: MapSet.new}, <<14, 0, 3, 12, 0, 0, 0, 0, 0>>
|
||||
assert_serializes %Struct{val_set: MapSet.new([%Val{num: 91}])}, <<14, 0, 3, 12, 0, 0, 0, 1, 3, 0, 99, 91, 0, 0>>
|
||||
assert_serializes %Struct{val_list: []}, <<15, 0, 4, 12, 0, 0, 0, 0, 0>>
|
||||
assert_serializes %Struct{val_list: [%Val{num: 91}]}, <<15, 0, 4, 12, 0, 0, 0, 1, 3, 0, 99, 91, 0, 0>>
|
||||
end
|
||||
|
||||
@thrift_file name: "composite.thrift", contents: """
|
||||
struct Composite {
|
||||
1: map<map<byte, byte>, map<byte, byte>> map_of_maps;
|
||||
2: map<set<byte>, set<byte>> map_of_sets;
|
||||
3: map<list<byte>, list<byte>> map_of_lists;
|
||||
4: set<map<byte, byte>> set_of_maps;
|
||||
5: set<set<byte>> set_of_sets;
|
||||
6: set<list<byte>> set_of_lists;
|
||||
7: list<map<byte, byte>> list_of_maps;
|
||||
8: list<set<byte>> list_of_sets;
|
||||
9: list<list<byte>> list_of_lists;
|
||||
}
|
||||
"""
|
||||
|
||||
thrift_test "composite serialization" do
|
||||
assert_serializes %Composite{}, <<0>>
|
||||
assert_serializes %Composite{map_of_maps: %{}}, <<13, 0, 1, 13, 13, 0, 0, 0, 0, 0>>
|
||||
assert_serializes %Composite{map_of_maps: %{%{91 => 92} => %{93 => 94, 95 => 96}}},
|
||||
<<13, 0, 1, 13, 13, 0, 0, 0, 1, 3, 3, 0, 0, 0, 1, 91, 92, 3, 3, 0, 0, 0, 2, 93, 94, 95, 96, 0>>
|
||||
assert_serializes %Composite{map_of_sets: %{}}, <<13, 0, 2, 14, 14, 0, 0, 0, 0, 0>>
|
||||
assert_serializes %Composite{map_of_sets: %{MapSet.new([91]) => MapSet.new([92, 93])}},
|
||||
<<13, 0, 2, 14, 14, 0, 0, 0, 1, 3, 0, 0, 0, 1, 91, 3, 0, 0, 0, 2, 92, 93, 0>>
|
||||
assert_serializes %Composite{map_of_lists: %{}}, <<13, 0, 3, 15, 15, 0, 0, 0, 0, 0>>
|
||||
assert_serializes %Composite{map_of_lists: %{[91] => [92, 93]}}, <<13, 0, 3, 15, 15, 0, 0, 0, 1, 3, 0, 0, 0, 1, 91, 3, 0, 0, 0, 2, 92, 93, 0>>
|
||||
assert_serializes %Composite{set_of_maps: MapSet.new}, <<14, 0, 4, 13, 0, 0, 0, 0, 0>>
|
||||
assert_serializes %Composite{set_of_maps: MapSet.new([%{91 => 92}])}, <<14, 0, 4, 13, 0, 0, 0, 1, 3, 3, 0, 0, 0, 1, 91, 92, 0>>
|
||||
assert_serializes %Composite{set_of_sets: MapSet.new}, <<14, 0, 5, 14, 0, 0, 0, 0, 0>>
|
||||
assert_serializes %Composite{set_of_sets: MapSet.new([MapSet.new([91]), MapSet.new([92, 93])])},
|
||||
<<14, 0, 5, 14, 0, 0, 0, 2, 3, 0, 0, 0, 1, 91, 3, 0, 0, 0, 2, 92, 93, 0>>
|
||||
assert_serializes %Composite{set_of_lists: MapSet.new}, <<14, 0, 6, 15, 0, 0, 0, 0, 0>>
|
||||
assert_serializes %Composite{set_of_lists: MapSet.new([[91], [92, 93]])}, <<14, 0, 6, 15, 0, 0, 0, 2, 3, 0, 0, 0, 1, 91, 3, 0, 0, 0, 2, 92, 93, 0>>
|
||||
assert_serializes %Composite{list_of_maps: []}, <<15, 0, 7, 13, 0, 0, 0, 0, 0>>
|
||||
assert_serializes %Composite{list_of_maps: [%{91 => 92}]}, <<15, 0, 7, 13, 0, 0, 0, 1, 3, 3, 0, 0, 0, 1, 91, 92, 0>>
|
||||
assert_serializes %Composite{list_of_sets: []}, <<15, 0, 8, 14, 0, 0, 0, 0, 0>>
|
||||
assert_serializes %Composite{list_of_sets: [MapSet.new([91]), MapSet.new([92, 93])]},
|
||||
<<15, 0, 8, 14, 0, 0, 0, 2, 3, 0, 0, 0, 1, 91, 3, 0, 0, 0, 2, 92, 93, 0>>
|
||||
assert_serializes %Composite{list_of_lists: []}, <<15, 0, 9, 15, 0, 0, 0, 0, 0>>
|
||||
assert_serializes %Composite{list_of_lists: [[91], [92, 93]]}, <<15, 0, 9, 15, 0, 0, 0, 2, 3, 0, 0, 0, 1, 91, 3, 0, 0, 0, 2, 92, 93, 0>>
|
||||
end
|
||||
end
|
@ -1,152 +0,0 @@
|
||||
defmodule Thrift.Generator.ModelsTest do
|
||||
use ThriftTestCase
|
||||
|
||||
@thrift_file name: "enums.thrift", contents: """
|
||||
enum Status {
|
||||
ACTIVE,
|
||||
INACTIVE,
|
||||
BANNED = 6,
|
||||
EVIL = 0x20,
|
||||
}
|
||||
struct StructWithEnum {
|
||||
1: Status status_field,
|
||||
2: map<Status, Status> status_map,
|
||||
3: set<Status> status_set,
|
||||
4: list<Status> status_list,
|
||||
}
|
||||
"""
|
||||
|
||||
thrift_test "generating enum" do
|
||||
assert Status.active == 1
|
||||
assert Status.inactive == 2
|
||||
assert Status.banned == 6
|
||||
assert Status.evil == 32
|
||||
|
||||
assert Status.member?(0) == false
|
||||
assert Status.member?(1) == true
|
||||
assert Status.member?(2) == true
|
||||
assert Status.member?(3) == false
|
||||
assert Status.member?(4) == false
|
||||
assert Status.member?(5) == false
|
||||
assert Status.member?(6) == true
|
||||
assert Status.member?(7) == false
|
||||
|
||||
assert Status.value_to_name(1) == {:ok, :active}
|
||||
assert Status.value_to_name(2) == {:ok, :inactive}
|
||||
assert Status.value_to_name(6) == {:ok, :banned}
|
||||
assert Status.value_to_name(32) == {:ok, :evil}
|
||||
assert Status.value_to_name(65536) == {:error, {:invalid_enum_value, 65536}}
|
||||
|
||||
assert Status.value_to_name!(1) == :active
|
||||
assert Status.value_to_name!(2) == :inactive
|
||||
assert Status.value_to_name!(6) == :banned
|
||||
assert Status.value_to_name!(32) == :evil
|
||||
assert_raise MatchError, fn -> Status.value_to_name!(38210) end
|
||||
|
||||
assert Status.names == [:active, :inactive, :banned, :evil]
|
||||
|
||||
struct = %StructWithEnum{}
|
||||
assert struct.status_field == Status.active
|
||||
assert struct.status_map == nil
|
||||
assert struct.status_set == nil
|
||||
assert struct.status_list == nil
|
||||
end
|
||||
|
||||
@thrift_file name: "exceptions.thrift", contents: """
|
||||
exception ApplicationException {
|
||||
1: string message,
|
||||
2: required i32 count,
|
||||
3: optional string reason
|
||||
4: optional string other;
|
||||
5: optional string fixed = "foo"
|
||||
}
|
||||
"""
|
||||
|
||||
thrift_test "generating exception" do
|
||||
e = %ApplicationException{}
|
||||
assert e.message == nil
|
||||
assert e.count == nil
|
||||
assert e.reason == nil
|
||||
assert e.other == nil
|
||||
assert e.fixed == "foo"
|
||||
end
|
||||
|
||||
@thrift_file name: "struct_includes.thrift", contents: """
|
||||
struct RemoteStruct {
|
||||
1: optional i32 num;
|
||||
}
|
||||
"""
|
||||
|
||||
@thrift_file name: "structs.thrift", contents: """
|
||||
include "struct_includes.thrift"
|
||||
struct LocalStruct {
|
||||
1: optional i32 num;
|
||||
}
|
||||
struct MyStruct {
|
||||
1: optional bool my_bool;
|
||||
2: optional byte my_byte;
|
||||
3: optional double my_double;
|
||||
4: optional i8 my_i8;
|
||||
5: optional i16 my_i16;
|
||||
6: optional i32 my_i32;
|
||||
7: optional i64 my_i64;
|
||||
8: optional string my_string;
|
||||
9: optional LocalStruct local_struct;
|
||||
10: optional struct_includes.RemoteStruct remote_struct;
|
||||
11: optional list<LocalStruct> local_struct_list;
|
||||
12: optional map<LocalStruct, LocalStruct> local_struct_map;
|
||||
}
|
||||
"""
|
||||
|
||||
thrift_test "generating struct" do
|
||||
s = %MyStruct{}
|
||||
assert s.my_bool == nil
|
||||
assert s.my_byte == nil
|
||||
assert s.my_double == nil
|
||||
assert s.my_i8 == nil
|
||||
assert s.my_i16 == nil
|
||||
assert s.my_i32 == nil
|
||||
assert s.my_i64 == nil
|
||||
assert s.my_string == nil
|
||||
assert s.local_struct == nil
|
||||
assert s.remote_struct == nil
|
||||
assert s.local_struct_list == nil
|
||||
assert s.local_struct_map == nil
|
||||
end
|
||||
|
||||
@thrift_file name: "typedefs.thrift", contents: """
|
||||
typedef i32 MyInteger
|
||||
typedef string MyString
|
||||
typedef MyInteger DefinitelyNumber
|
||||
|
||||
struct StructWithTypedefs {
|
||||
1: optional MyString str;
|
||||
2: optional MyInteger num1 = 1;
|
||||
3: optional DefinitelyNumber num2 = 2;
|
||||
}
|
||||
"""
|
||||
|
||||
thrift_test "generating typedefs" do
|
||||
s = %StructWithTypedefs{}
|
||||
assert s.str == nil
|
||||
assert s.num1 == 1
|
||||
assert s.num2 == 2
|
||||
end
|
||||
|
||||
@thrift_file name: "shared.thrift", contents: """
|
||||
typedef i32 MyInteger
|
||||
"""
|
||||
|
||||
@thrift_file name: "includes.thrift", contents: """
|
||||
include "shared.thrift"
|
||||
|
||||
struct StructWithIncludedNum {
|
||||
1: optional MyInteger num = 5;
|
||||
}
|
||||
"""
|
||||
|
||||
thrift_test "includes" do
|
||||
struct = %StructWithIncludedNum{}
|
||||
assert struct.num == 5
|
||||
end
|
||||
end
|
@ -242,20 +242,4 @@ defmodule LexerTest do
|
||||
{:namespace, 3}, {:*, 3}, {:ident, 3, 'foo.bar'}
|
||||
]
|
||||
end
|
||||
|
||||
test "a const definition" do
|
||||
assert tokenize("const my_const = 32")
|
||||
==
|
||||
[
|
||||
{:const, 1}, {:ident, 1, 'my_const'}, {:"=", 1}, {:int, 1, 32}
|
||||
]
|
||||
end
|
||||
|
||||
test "a const definition of an enum value" do
|
||||
assert tokenize("const string my_const = MyEnum.VALUE")
|
||||
==
|
||||
[
|
||||
{:const, 1}, {:string, 1}, {:ident, 1, 'my_const'}, {:"=", 1}, {:ident, 1, 'MyEnum.VALUE'}
|
||||
]
|
||||
end
|
||||
end
|
||||
|
@ -4,15 +4,9 @@ defmodule Mix.Tasks.Compile.ThriftTest do
|
||||
import Mix.Tasks.Compile.Thrift, only: [run: 1]
|
||||
import ExUnit.CaptureIO
|
||||
|
||||
@project_root Path.expand("../../../", __DIR__)
|
||||
@fixture_project_relative "test/fixtures/app"
|
||||
@fixture_project Path.join(@project_root, @fixture_project_relative)
|
||||
@fixture_project Path.expand("../../fixtures/app", __DIR__)
|
||||
|
||||
setup do
|
||||
# so that the docker-based tests will work
|
||||
System.put_env("DOCKER_THRIFT_OUT_ROOT", @fixture_project_relative)
|
||||
on_exit fn -> System.delete_env("DOCKER_THRIFT_OUT_ROOT") end
|
||||
|
||||
in_fixture(fn -> File.rm_rf!("src") end)
|
||||
:ok
|
||||
end
|
||||
@ -22,7 +16,6 @@ defmodule Mix.Tasks.Compile.ThriftTest do
|
||||
with_project_config [], fn ->
|
||||
assert capture_io(fn -> run([]) end) =~ """
|
||||
Compiled thrift/shared.thrift
|
||||
Compiled thrift/simple.thrift
|
||||
Compiled thrift/tutorial.thrift
|
||||
"""
|
||||
assert File.exists?("src/shared_types.hrl")
|
||||
|
@ -1,76 +0,0 @@
|
||||
defmodule Mix.Tasks.Thrift.GenerateTest do
|
||||
use ExUnit.Case
|
||||
|
||||
import Mix.Tasks.Thrift.Generate, only: [run: 1]
|
||||
import ExUnit.CaptureIO
|
||||
|
||||
setup %{test: test} do
|
||||
dir = Path.join([System.tmp_dir!, to_string(__MODULE__), to_string(test)])
|
||||
File.rm_rf!(dir)
|
||||
File.mkdir_p!(dir)
|
||||
{:ok, dir: dir}
|
||||
end
|
||||
|
||||
test "generates files", %{dir: dir} do
|
||||
File.write! "#{dir}/shared.thrift", """
|
||||
namespace elixir shared
|
||||
|
||||
struct SharedStruct {
|
||||
1: i32 key
|
||||
2: string value
|
||||
}
|
||||
|
||||
exception SharedException {
|
||||
1: string message,
|
||||
2: i32 code
|
||||
}
|
||||
|
||||
service SharedService {
|
||||
SharedStruct getStruct(1: i32 key)
|
||||
}
|
||||
"""
|
||||
|
||||
File.write! "#{dir}/tutorial.thrift", """
|
||||
include "shared.thrift"
|
||||
|
||||
namespace elixir tutorial
|
||||
|
||||
typedef i32 MyInteger
|
||||
|
||||
const i32 INT32CONSTANT = 9853
|
||||
const map<string,string> MAPCONSTANT = {'hello':'world', 'goodnight':'moon'}
|
||||
|
||||
enum Operation {
|
||||
ADD = 1,
|
||||
SUBTRACT = 2,
|
||||
MULTIPLY = 3,
|
||||
DIVIDE = 4
|
||||
}
|
||||
|
||||
struct Work {
|
||||
1: i32 num1 = 0,
|
||||
2: i32 num2,
|
||||
3: Operation op,
|
||||
4: optional string comment,
|
||||
}
|
||||
|
||||
exception InvalidOperation {
|
||||
1: i32 whatOp,
|
||||
2: string why
|
||||
}
|
||||
"""
|
||||
|
||||
output = capture_io(fn -> run([dir, "--output-dir", dir]) end)
|
||||
|
||||
assert String.contains? output, "Generated shared/shared_struct.ex"
|
||||
assert String.contains? output, "Generated shared/shared_exception.ex"
|
||||
assert String.contains? output, "Generated shared/shared_struct.ex"
|
||||
assert String.contains? output, "Generated shared/shared_exception.ex"
|
||||
assert String.contains? output, "Generated tutorial/operation.ex"
|
||||
assert String.contains? output, "Generated tutorial/work.ex"
|
||||
assert String.contains? output, "Generated tutorial/invalid_operation.ex"
|
||||
|
||||
assert File.exists? "#{dir}/shared/shared_struct.ex"
|
||||
assert File.exists? "#{dir}/tutorial/invalid_operation.ex"
|
||||
end
|
||||
end
|
@ -14,7 +14,6 @@ defmodule ParserTest do
|
||||
alias Thrift.Parser.Models.Struct
|
||||
alias Thrift.Parser.Models.StructRef
|
||||
alias Thrift.Parser.Models.TEnum
|
||||
alias Thrift.Parser.Models.TEnumValue
|
||||
alias Thrift.Parser.Models.Union
|
||||
|
||||
import ExUnit.CaptureIO
|
||||
@ -175,132 +174,6 @@ defmodule ParserTest do
|
||||
type: {:list, :i32}}
|
||||
end
|
||||
|
||||
test "parsing an enum value constant" do
|
||||
constant = "const string SUNNY = Weather.SUNNY;"
|
||||
|> parse([:constants, :SUNNY])
|
||||
|
||||
assert constant == %Constant{
|
||||
name: :SUNNY,
|
||||
value: %TEnumValue{enum_name: :Weather, enum_value: :SUNNY, type: :string},
|
||||
type: :string}
|
||||
end
|
||||
|
||||
test "parsing a list constant with enum values" do
|
||||
constant = """
|
||||
const list<string> WEATHER_TYPES = [
|
||||
Weather.SUNNY,
|
||||
Weather.CLOUDY,
|
||||
Weather.RAINY,
|
||||
Weather.SNOWY
|
||||
]
|
||||
"""
|
||||
|> parse([:constants, :WEATHER_TYPES])
|
||||
|
||||
assert constant == %Constant{
|
||||
name: :WEATHER_TYPES,
|
||||
type: {:list, :string},
|
||||
value: [
|
||||
%TEnumValue{enum_name: :Weather, enum_value: :SUNNY, type: :string},
|
||||
%TEnumValue{enum_name: :Weather, enum_value: :CLOUDY, type: :string},
|
||||
%TEnumValue{enum_name: :Weather, enum_value: :RAINY, type: :string},
|
||||
%TEnumValue{enum_name: :Weather, enum_value: :SNOWY, type: :string},
|
||||
]}
|
||||
end
|
||||
|
||||
test "parsing a set constant with enum values" do
|
||||
constant = """
|
||||
const set<string> WEATHER_TYPES = {
|
||||
Weather.SUNNY,
|
||||
Weather.CLOUDY,
|
||||
Weather.RAINY,
|
||||
Weather.SNOWY
|
||||
}
|
||||
"""
|
||||
|> parse([:constants, :WEATHER_TYPES])
|
||||
|
||||
assert constant == %Constant{
|
||||
name: :WEATHER_TYPES,
|
||||
type: {:set, :string},
|
||||
value: MapSet.new([
|
||||
%TEnumValue{enum_name: :Weather, enum_value: :SUNNY, type: :string},
|
||||
%TEnumValue{enum_name: :Weather, enum_value: :CLOUDY, type: :string},
|
||||
%TEnumValue{enum_name: :Weather, enum_value: :RAINY, type: :string},
|
||||
%TEnumValue{enum_name: :Weather, enum_value: :SNOWY, type: :string},
|
||||
])}
|
||||
end
|
||||
|
||||
test "parsing a map constant with enum keys" do
|
||||
constant = """
|
||||
const map<Weather, string> weather_messages = {
|
||||
Weather.SUNNY: "Yay, it's sunny!",
|
||||
Weather.CLOUDY: "Welcome to Cleveland!",
|
||||
Weather.RAINY: "Welcome to Seattle!",
|
||||
Weather.SNOWY: "Welcome to Canada!"
|
||||
}
|
||||
"""
|
||||
|> parse([:constants, :weather_messages])
|
||||
|
||||
assert constant == %Constant{
|
||||
name: :weather_messages,
|
||||
type: {:map, {%StructRef{referenced_type: :Weather}, :string}},
|
||||
value: %{
|
||||
%TEnumValue{
|
||||
enum_name: :Weather,
|
||||
enum_value: :CLOUDY,
|
||||
type: %StructRef{referenced_type: :Weather}} => "Welcome to Cleveland!",
|
||||
|
||||
%TEnumValue{
|
||||
enum_name: :Weather,
|
||||
enum_value: :RAINY,
|
||||
type: %StructRef{referenced_type: :Weather}} => "Welcome to Seattle!",
|
||||
|
||||
%TEnumValue{
|
||||
enum_name: :Weather,
|
||||
enum_value: :SNOWY,
|
||||
type: %StructRef{referenced_type: :Weather}} => "Welcome to Canada!",
|
||||
|
||||
%TEnumValue{
|
||||
enum_name: :Weather,
|
||||
enum_value: :SUNNY,
|
||||
type: %StructRef{referenced_type: :Weather}} => "Yay, it's sunny!"}}
|
||||
end
|
||||
|
||||
test "parsing a map constant with enum values as values" do
|
||||
constant = """
|
||||
const map<string, Weather> clothes_to_wear = {
|
||||
"gloves": Weather.SNOWY,
|
||||
"umbrella": Weather.RAINY,
|
||||
"sweater": Weather.CLOUDY,
|
||||
"sunglasses": Weather.SUNNY
|
||||
}
|
||||
"""
|
||||
|> parse([:constants, :clothes_to_wear])
|
||||
|
||||
assert constant == %Constant{
|
||||
name: :clothes_to_wear,
|
||||
type: {:map, {:string, %StructRef{referenced_type: :Weather}}},
|
||||
value: %{
|
||||
"gloves" => %TEnumValue{
|
||||
enum_name: :Weather,
|
||||
enum_value: :SNOWY,
|
||||
type: %StructRef{referenced_type: :Weather}},
|
||||
|
||||
"sunglasses" => %TEnumValue{
|
||||
enum_name: :Weather,
|
||||
enum_value: :SUNNY,
|
||||
type: %StructRef{referenced_type: :Weather}},
|
||||
|
||||
"sweater" => %TEnumValue{
|
||||
enum_name: :Weather,
|
||||
enum_value: :CLOUDY,
|
||||
type: %StructRef{referenced_type: :Weather}},
|
||||
|
||||
"umbrella" => %TEnumValue{
|
||||
enum_name: :Weather,
|
||||
enum_value: :RAINY,
|
||||
type: %StructRef{referenced_type: :Weather}}}}
|
||||
end
|
||||
|
||||
test "parsing an enum" do
|
||||
user_status = """
|
||||
enum UserStatus {
|
||||
@ -458,24 +331,6 @@ defmodule ParserTest do
|
||||
end)
|
||||
end
|
||||
|
||||
test "parsing an empty map default value" do
|
||||
struct = """
|
||||
struct EmptyDefault {
|
||||
1: i64 id,
|
||||
2: map<string, string> myMap={},
|
||||
}
|
||||
"""
|
||||
|> parse([:structs, :EmptyDefault])
|
||||
|
||||
assert struct == %Struct{
|
||||
name: :EmptyDefault,
|
||||
fields: [
|
||||
%Field{default: nil, id: 1, name: :id, required: :default, type: :i64},
|
||||
%Field{default: %{}, id: 2, name: :myMap,
|
||||
required: :default, type: {:map, {:string, :string}}}
|
||||
]}
|
||||
end
|
||||
|
||||
test "when default ids conflict with explicit ids" do
|
||||
|
||||
assert_raise RuntimeError, fn ->
|
||||
|
@ -1,183 +0,0 @@
|
||||
defmodule BinaryProtocolTest do
|
||||
use ThriftTestCase, gen_erl: true
|
||||
use ExUnit.Case
|
||||
|
||||
@moduletag :integration
|
||||
|
||||
def round_trip_struct(data, serializer_mf, deserializer_mf) do
|
||||
{serializer_mod, serializer_fn} = serializer_mf
|
||||
{deserializer_mod, deserializer_fn} = deserializer_mf
|
||||
|
||||
serialized = :erlang.apply(serializer_mod, serializer_fn, [data, :binary])
|
||||
|> IO.iodata_to_binary
|
||||
|
||||
:erlang.apply(deserializer_mod, deserializer_fn, [serialized])
|
||||
end
|
||||
|
||||
@thrift_file name: "enums.thrift", contents: """
|
||||
enum Status {
|
||||
ACTIVE,
|
||||
INACTIVE,
|
||||
BANNED = 6,
|
||||
EVIL = 0x20,
|
||||
}
|
||||
|
||||
struct StructWithEnum {
|
||||
1: Status status
|
||||
}
|
||||
"""
|
||||
thrift_test "encoding enums" do
|
||||
encoder = {StructWithEnum, :serialize}
|
||||
decoder = {Erlang.Enums, :deserialize_struct_with_enum}
|
||||
|
||||
assert {:StructWithEnum, 1} == round_trip_struct(StructWithEnum.new, encoder, decoder)
|
||||
assert {:StructWithEnum, 32} == round_trip_struct(%StructWithEnum{status: Status.evil}, encoder, decoder)
|
||||
assert {:StructWithEnum, 6} == round_trip_struct(%StructWithEnum{status: Status.banned}, encoder, decoder)
|
||||
end
|
||||
|
||||
@thrift_file name: "scalars.thrift", contents: """
|
||||
enum Weather {
|
||||
SUNNY,
|
||||
CLOUDY,
|
||||
RAINY,
|
||||
SNOWY
|
||||
}
|
||||
|
||||
struct Scalars {
|
||||
1: bool is_true,
|
||||
2: byte byte_value,
|
||||
3: i16 sixteen_bits,
|
||||
4: i32 thirty_two_bits,
|
||||
5: i64 sixty_four_bits,
|
||||
6: double double_value,
|
||||
7: string string_value,
|
||||
8: binary raw_binary
|
||||
}
|
||||
"""
|
||||
|
||||
thrift_test "it should be able to encode scalar values" do
|
||||
encoder = {Scalars, :serialize}
|
||||
decoder = {Erlang.Scalars, :deserialize_scalars}
|
||||
|
||||
assert Erlang.Scalars.new_scalars(is_true: true) == round_trip_struct(%Scalars{is_true: true}, encoder, decoder)
|
||||
|
||||
assert Erlang.Scalars.new_scalars(byte_value: 127) == round_trip_struct(%Scalars{byte_value: 127}, encoder, decoder)
|
||||
|
||||
assert Erlang.Scalars.new_scalars(sixteen_bits: 12723) == round_trip_struct(%Scalars{sixteen_bits: 12723}, encoder, decoder)
|
||||
|
||||
assert Erlang.Scalars.new_scalars(thirty_two_bits: 1_8362_832) == round_trip_struct(%Scalars{thirty_two_bits: 1_8362_832}, encoder, decoder)
|
||||
|
||||
assert Erlang.Scalars.new_scalars(sixty_four_bits: 8872372) == round_trip_struct(%Scalars{sixty_four_bits: 8872372}, encoder, decoder)
|
||||
|
||||
assert Erlang.Scalars.new_scalars(double_value: 2.37219) == round_trip_struct(%Scalars{double_value: 2.37219}, encoder, decoder)
|
||||
|
||||
assert Erlang.Scalars.new_scalars(string_value: "I am a string") == round_trip_struct(%Scalars{string_value: "I am a string"}, encoder, decoder)
|
||||
|
||||
assert Erlang.Scalars.new_scalars(raw_binary: <<224, 186, 2, 1, 0>>) == round_trip_struct(%Scalars{raw_binary: <<224, 186, 2, 1, 0>>}, encoder, decoder)
|
||||
end
|
||||
|
||||
thrift_test "it should not encode unset fields" do
|
||||
encoded = Scalars.serialize(%Scalars{})
|
||||
|> IO.iodata_to_binary
|
||||
|
||||
assert <<0>> == encoded
|
||||
end
|
||||
|
||||
@thrift_file name: "containers.thrift", contents: """
|
||||
enum Weather {
|
||||
SUNNY,
|
||||
CLOUDY,
|
||||
RAINY,
|
||||
SNOWY
|
||||
}
|
||||
|
||||
struct Friend {
|
||||
1: i64 id,
|
||||
2: string username,
|
||||
}
|
||||
|
||||
struct Containers {
|
||||
1: list<i64> users,
|
||||
2: list<Weather> weekly_forecast,
|
||||
3: set<string> taken_usernames,
|
||||
// Lists of structs are broken
|
||||
// 4: list<Friend> friends,
|
||||
// Deserializers for maps break the build
|
||||
// 3: map<i64, Weather> user_forecasts,
|
||||
// 4: map<string, User> users_by_username
|
||||
}
|
||||
"""
|
||||
|
||||
thrift_test "containers serialize properly" do
|
||||
encoder = {Containers, :serialize}
|
||||
decoder = {Erlang.Containers, :deserialize_containers}
|
||||
|
||||
# unset containers become undefined
|
||||
assert Erlang.Containers.new_containers() == round_trip_struct(%Containers{}, encoder, decoder)
|
||||
|
||||
# empty containers are sent
|
||||
assert Erlang.Containers.new_containers(users: []) == round_trip_struct(%Containers{users: []}, encoder, decoder)
|
||||
|
||||
# containers can contain enums
|
||||
forecast = [Weather.sunny,
|
||||
Weather.sunny,
|
||||
Weather.sunny,
|
||||
Weather.sunny,
|
||||
Weather.cloudy,
|
||||
Weather.sunny,
|
||||
Weather.sunny]
|
||||
assert Erlang.Containers.new_containers(weekly_forecast: [1, 1, 1, 1, 2, 1, 1]) == round_trip_struct(%Containers{weekly_forecast: forecast}, encoder, decoder)
|
||||
taken_usernames = ["scohen", "pguillory"]
|
||||
assert Erlang.Containers.new_containers(taken_usernames: :sets.from_list(taken_usernames)) == round_trip_struct(%Containers{taken_usernames: MapSet.new(taken_usernames)}, encoder, decoder)
|
||||
|
||||
# # containers can contain structs
|
||||
# erlang_friends = [
|
||||
# Erlang.Containers.new_friend(id: 1, username: "scohen"),
|
||||
# Erlang.Containers.new_friend(id: 2, username: "pguillory"),
|
||||
# Erlang.Containers.new_friend(id: 3, username: "dantswain"),
|
||||
# ]
|
||||
# assert Erlang.Containers.new_containers(friends: erlang_friends) == round_trip_struct(%Containers{
|
||||
# friends:
|
||||
# [%Friend{id: 1, username: "scohen"},
|
||||
# %Friend{id: 2, username: "pguillory"},
|
||||
# %Friend{id: 3, username: "dantswain"}
|
||||
# ]}, encoder, decoder)
|
||||
end
|
||||
|
||||
|
||||
@thrift_file name: "across.thrift", contents: """
|
||||
include "containers.thrift"
|
||||
|
||||
struct User {
|
||||
1: i64 id,
|
||||
2: containers.Friend best_friend;
|
||||
}
|
||||
"""
|
||||
|
||||
thrift_test "serializing structs across modules" do
|
||||
encoder = {User, :serialize}
|
||||
decoder = {Erlang.Across, :deserialize_user}
|
||||
|
||||
erl_user = Erlang.Across.new_user(
|
||||
id: 1234,
|
||||
best_friend: Erlang.Containers.new_friend(id: 3282, username: "stinkypants"))
|
||||
|
||||
assert erl_user == round_trip_struct(%User{
|
||||
id: 1234,
|
||||
best_friend: %Friend{id: 3282, username: "stinkypants"}}, encoder, decoder)
|
||||
end
|
||||
|
||||
# test "nil nested fields get their default value" do
|
||||
# erlang_nested = serialize_nesting_to_erlang(user: user(:elixir, username: "frank"))
|
||||
|
||||
# assert {:Nesting, user, nested} = erlang_nested
|
||||
# assert user == user(:erlang, username: "frank")
|
||||
# assert nested == {:SharedStruct, 44291, "Look at my value..."}
|
||||
|
||||
# erlang_nested = serialize_nesting_to_erlang(nested: %Shared.SharedStruct{key: 2916, value: "my value"})
|
||||
|
||||
# assert {:Nesting, user, nested} = erlang_nested
|
||||
# assert nested == {:SharedStruct, 2916, "my value"}
|
||||
# assert user == user(:erlang)
|
||||
# end
|
||||
end
|
@ -1,169 +0,0 @@
|
||||
defmodule User do
|
||||
defstruct is_evil: false, user_id: 0, number_of_hairs_on_head: 0, amount_of_red: 0, nineties_era_color: 0, mint_gum: 0.0, username: "", friends: [], my_map: %{}, blocked_user_ids: MapSet.new(), optional_integers: []
|
||||
end
|
||||
|
||||
defmodule Nesting do
|
||||
defstruct user: nil, nested: nil
|
||||
end
|
||||
|
||||
defmodule Shared.SharedStruct do
|
||||
defstruct key: nil, value: nil
|
||||
end
|
||||
|
||||
defmodule ParserUtils do
|
||||
alias Thrift.Parser
|
||||
|
||||
def parse_thrift(file_path) do
|
||||
Parser.parse_file(file_path)
|
||||
end
|
||||
|
||||
def compile_module(file_group) do
|
||||
Thrift.Generator.generate_to_string!(file_group)
|
||||
|> Code.compile_string
|
||||
end
|
||||
|
||||
# Debugging aid. Non-private in order to mute "function is unused" warning.
|
||||
def print_compiled_code(code_string) do
|
||||
code_string
|
||||
|> String.split("\n")
|
||||
|> Enum.with_index
|
||||
|> Enum.each(fn {line, idx} ->
|
||||
IO.puts "#{idx + 1} #{line}"
|
||||
end)
|
||||
|
||||
code_string
|
||||
end
|
||||
|
||||
def user(type, opts \\ [])
|
||||
def user(:erlang, opts) do
|
||||
is_evil = Keyword.get(opts, :is_evil, :undefined)
|
||||
user_id = Keyword.get(opts, :user_id, :undefined)
|
||||
number_of_hairs_on_head = Keyword.get(opts, :number_of_hairs_on_head, :undefined)
|
||||
amount_of_red = Keyword.get(opts, :amount_of_red, :undefined)
|
||||
nineties_era_color = Keyword.get(opts, :nineties_era_color, :undefined)
|
||||
mint_gum = Keyword.get(opts, :mint_gum, :undefined)
|
||||
username = Keyword.get(opts, :username, :undefined)
|
||||
friends = Keyword.get(opts, :friends, :undefined)
|
||||
my_map = Keyword.get(opts, :my_map, :undefined)
|
||||
blocked_user_ids = case Keyword.get(opts, :blocked_user_ids) do
|
||||
nil -> :undefined
|
||||
list when is_list(list) -> :sets.from_list(list)
|
||||
end
|
||||
optional_integers = Keyword.get(opts, :optional_integers, :undefined)
|
||||
|
||||
{:User, is_evil, user_id, number_of_hairs_on_head,
|
||||
amount_of_red, nineties_era_color, mint_gum, username,
|
||||
friends, my_map, blocked_user_ids, optional_integers}
|
||||
end
|
||||
def user(:elixir, opts) do
|
||||
%{__struct__: User,
|
||||
is_evil: Keyword.get(opts, :is_evil),
|
||||
user_id: Keyword.get(opts, :user_id),
|
||||
number_of_hairs_on_head: Keyword.get(opts, :number_of_hairs_on_head),
|
||||
amount_of_red: Keyword.get(opts, :amount_of_red),
|
||||
nineties_era_color: Keyword.get(opts, :nineties_era_color),
|
||||
mint_gum: Keyword.get(opts, :mint_gum),
|
||||
friends: Keyword.get(opts, :friends),
|
||||
my_map: Keyword.get(opts, :my_map),
|
||||
blocked_user_ids: case Keyword.get(opts, :blocked_user_ids) do
|
||||
nil -> nil
|
||||
list when is_list(list) -> MapSet.new(list)
|
||||
end,
|
||||
username: Keyword.get(opts, :username),
|
||||
optional_integers: Keyword.get(opts, :optional_integers)
|
||||
}
|
||||
end
|
||||
|
||||
def serialize_user_erlang(user, opts \\ []) do
|
||||
struct_info = {:struct, {:simple_types, :User}}
|
||||
serialize_to_erlang(user, struct_info, opts)
|
||||
end
|
||||
def serialize_user_elixir(user, opts \\ []) do
|
||||
serialized = User.BinaryProtocol.serialize(user)
|
||||
|
||||
if Keyword.get(opts, :convert_to_binary, true) do
|
||||
IO.iodata_to_binary(serialized)
|
||||
else
|
||||
serialized
|
||||
end
|
||||
end
|
||||
|
||||
def deserialize_user_elixir(binary_data) do
|
||||
{%User{}, ""} = User.BinaryProtocol.deserialize(binary_data)
|
||||
end
|
||||
|
||||
def deserialize_user_erlang(binary_data) do
|
||||
struct_definition = {:struct, {:simple_types, :User}}
|
||||
deserialize_to_erlang(binary_data, struct_definition)
|
||||
end
|
||||
|
||||
def shared_struct(:elixir, opts \\ []) do
|
||||
%Shared.SharedStruct{
|
||||
key: Keyword.get(opts, :key, 44291),
|
||||
value: Keyword.get(opts, :value, "Look at my value...")
|
||||
}
|
||||
end
|
||||
|
||||
def nesting(:elixir, opts) do
|
||||
%Nesting{
|
||||
user: Keyword.get(opts, :user, user(:elixir)),
|
||||
nested: Keyword.get(opts, :nested, shared_struct(:elixir))}
|
||||
end
|
||||
|
||||
def serialize_nesting_to_erlang(opts) do
|
||||
nesting(:elixir, opts)
|
||||
|> serialize_nesting
|
||||
|> deserialize_nesting(:erlang)
|
||||
end
|
||||
|
||||
def serialize_nesting(nesting, opts \\ [])
|
||||
def serialize_nesting(nesting, opts) when is_map(nesting) do
|
||||
alias Nesting.BinaryProtocol
|
||||
serialized = BinaryProtocol.serialize(:struct, nesting)
|
||||
if Keyword.get(opts, :convert_to_binary, true) do
|
||||
IO.iodata_to_binary(serialized)
|
||||
else
|
||||
serialized
|
||||
end
|
||||
end
|
||||
|
||||
def serialize_nesting(erlang_nesting, opts) when is_tuple(erlang_nesting) do
|
||||
struct_info = {:struct, {:simple_types, :Nesting}}
|
||||
serialize_to_erlang(erlang_nesting, struct_info, opts)
|
||||
end
|
||||
|
||||
def deserialize_nesting(nesting_binary, :erlang) do
|
||||
struct_definition = {:struct, {:simple_types, :Nesting}}
|
||||
deserialize_to_erlang(nesting_binary, struct_definition)
|
||||
end
|
||||
|
||||
defp serialize_to_erlang(thrift_structure, struct_info, opts) do
|
||||
iolist_struct = with({:ok, tf} <- :thrift_memory_buffer.new_transport_factory(),
|
||||
{:ok, pf} <- :thrift_binary_protocol.new_protocol_factory(tf, []),
|
||||
{:ok, binary_protocol} <- pf.()) do
|
||||
|
||||
{proto, :ok} = :thrift_protocol.write(binary_protocol, {struct_info, thrift_structure})
|
||||
{_, data} = :thrift_protocol.flush_transport(proto)
|
||||
data
|
||||
end
|
||||
|
||||
if Keyword.get(opts, :convert_to_binary, true) do
|
||||
:erlang.iolist_to_binary(iolist_struct)
|
||||
else
|
||||
iolist_struct
|
||||
end
|
||||
end
|
||||
|
||||
def deserialize_to_erlang(binary_data, struct_definition) do
|
||||
try do
|
||||
with({:ok, memory_buffer_transport} <- :thrift_memory_buffer.new(binary_data),
|
||||
{:ok, binary_protocol} <- :thrift_binary_protocol.new(memory_buffer_transport),
|
||||
{_, {:ok, record}} <- :thrift_protocol.read(binary_protocol, struct_definition)) do
|
||||
|
||||
record
|
||||
end
|
||||
rescue _ ->
|
||||
{:error, :cant_decode}
|
||||
end
|
||||
end
|
||||
end
|
@ -1,239 +0,0 @@
|
||||
defmodule ThriftTestCase do
|
||||
@project_root Path.expand("../../../", __DIR__)
|
||||
|
||||
defmacro __using__(opts \\ []) do
|
||||
quote do
|
||||
@thrift_test_opts unquote(opts)
|
||||
import unquote(__MODULE__)
|
||||
Module.register_attribute(__MODULE__, :thrift_file, accumulate: true)
|
||||
Module.register_attribute(__MODULE__, :thrift_test, accumulate: true)
|
||||
@before_compile unquote(__MODULE__)
|
||||
use ExUnit.Case, async: true
|
||||
end
|
||||
end
|
||||
|
||||
defmacro __before_compile__(env) do
|
||||
tag = Module.get_attribute(__CALLER__.module, :moduletag)
|
||||
|> Map.new(fn tag -> {tag, true} end)
|
||||
|
||||
config = ExUnit.configuration
|
||||
case ExUnit.Filters.eval(config[:include], config[:exclude], tag, []) do
|
||||
:ok ->
|
||||
compile_and_build_erlang_helpers(__CALLER__, env)
|
||||
{:error, _} ->
|
||||
nil
|
||||
end
|
||||
end
|
||||
|
||||
defp compile_and_build_erlang_helpers(caller, env) do
|
||||
opts = Module.get_attribute(caller.module, :thrift_test_opts)
|
||||
|
||||
namespace = inspect(env.module)
|
||||
|
||||
out_root = Path.join(@project_root, "tmp")
|
||||
dir = Path.join([out_root, inspect(__MODULE__), namespace])
|
||||
File.rm_rf!(dir)
|
||||
File.mkdir_p!(dir)
|
||||
|
||||
|
||||
modules = caller.module
|
||||
|> Module.get_attribute(:thrift_file)
|
||||
|> Enum.reverse
|
||||
|> Enum.map(fn [name: filename, contents: contents] ->
|
||||
filename = Path.expand(filename, dir)
|
||||
File.write!(filename, "namespace elixir #{namespace}\n" <> contents)
|
||||
filename
|
||||
end)
|
||||
|> Enum.flat_map(&Thrift.Generator.generate!(&1, dir))
|
||||
|> Enum.uniq
|
||||
|> Enum.map(fn output_file ->
|
||||
output_file
|
||||
|> Path.expand(dir)
|
||||
|> Code.eval_file
|
||||
|
||||
namespace_module = output_file
|
||||
|> Path.dirname
|
||||
|> String.split("/")
|
||||
|> Enum.map(&Macro.camelize/1)
|
||||
|> Enum.join(".")
|
||||
|
||||
basename_module = output_file
|
||||
|> Path.basename(".ex")
|
||||
|> Macro.camelize
|
||||
|
||||
:"Elixir.#{namespace_module}.#{basename_module}"
|
||||
end)
|
||||
|
||||
record_requires = if opts[:gen_erl] do
|
||||
caller.module
|
||||
|> Module.get_attribute(:thrift_file)
|
||||
|> Enum.reverse
|
||||
|> generate_erlang_files(dir)
|
||||
else
|
||||
[]
|
||||
end
|
||||
|
||||
tests = caller.module
|
||||
|> Module.get_attribute(:thrift_test)
|
||||
|> Enum.reverse
|
||||
|> Enum.map(fn {test_name, block} ->
|
||||
quote location: :keep do
|
||||
test unquote(test_name) do
|
||||
unquote(block)
|
||||
end
|
||||
end
|
||||
end)
|
||||
|
||||
quote do
|
||||
unquote_splicing(Enum.map(modules, fn module ->
|
||||
quote do: alias unquote(module)
|
||||
end))
|
||||
unquote_splicing(Enum.map(modules, fn module ->
|
||||
quote do: require unquote(module)
|
||||
end))
|
||||
|
||||
unquote_splicing(record_requires)
|
||||
setup_all do
|
||||
on_exit fn ->
|
||||
unquote(if Keyword.get(opts, :cleanup, true) do
|
||||
quote do: File.rm_rf!(unquote(dir))
|
||||
else
|
||||
quote do: IO.puts IO.ANSI.format([:yellow, unquote("Leaving files in #{inspect dir}")])
|
||||
end)
|
||||
end
|
||||
:ok
|
||||
end
|
||||
|
||||
unquote_splicing(tests)
|
||||
end
|
||||
end
|
||||
|
||||
defp generate_erlang_files(list_of_files, dir) do
|
||||
erlang_source_dir = Path.join(dir, "src")
|
||||
|
||||
File.mkdir(erlang_source_dir)
|
||||
|
||||
outdir = Path.relative_to(erlang_source_dir, @project_root)
|
||||
reldir = Path.relative_to(dir, @project_root)
|
||||
|
||||
list_of_files
|
||||
|> Enum.map(fn file ->
|
||||
filename = Path.join(reldir, file[:name])
|
||||
System.cmd(System.get_env("THRIFT") || "thrift",
|
||||
["-out", outdir,
|
||||
"--gen", "erl", "-r", filename],
|
||||
cd: @project_root)
|
||||
end)
|
||||
|
||||
Path.wildcard("#{erlang_source_dir}/*.erl")
|
||||
|> Enum.map(fn source_file ->
|
||||
{:ok, mod_name, code} = source_file
|
||||
|> String.to_char_list
|
||||
|> :compile.file([:binary])
|
||||
|
||||
:code.load_binary(mod_name, [], code)
|
||||
end)
|
||||
|
||||
Path.wildcard("#{erlang_source_dir}/*_types.hrl")
|
||||
|> Enum.map(&build_records/1)
|
||||
end
|
||||
|
||||
defp build_records(file_path) do
|
||||
erlang_module = file_path
|
||||
|> Path.basename
|
||||
|> Path.rootname
|
||||
|> String.to_atom
|
||||
|
||||
record_module_name = erlang_module
|
||||
|> Atom.to_string
|
||||
|> String.replace("_types", "")
|
||||
|> Macro.camelize
|
||||
|> String.to_atom
|
||||
|
||||
module_name = Module.concat(Erlang, record_module_name)
|
||||
|
||||
records = Record.extract_all(from: file_path)
|
||||
|> Enum.map(fn {record_name, fields} ->
|
||||
underscored_record_name = record_name
|
||||
|> Atom.to_string
|
||||
|> Macro.underscore
|
||||
|> String.to_atom
|
||||
|
||||
new_fn_name = :"new_#{underscored_record_name}"
|
||||
serialize_fn_name = :"serialize_#{underscored_record_name}"
|
||||
deserialize_fn_name = :"deserialize_#{underscored_record_name}"
|
||||
|
||||
match = Enum.map(fields, fn _ -> Macro.var(:_, nil) end)
|
||||
kw_match = Enum.map(fields, fn {name, _} -> {name, Macro.var(name, nil)} end)
|
||||
variable_assigns = Enum.map(fields, fn {name, default} ->
|
||||
field_var = Macro.var(name, nil)
|
||||
quote do
|
||||
unquote(field_var) = Keyword.get(opts, unquote(name), unquote(default))
|
||||
end
|
||||
end)
|
||||
|
||||
quote do
|
||||
Record.defrecord unquote(underscored_record_name), unquote(fields)
|
||||
def unquote(new_fn_name)(opts \\ []) do
|
||||
unquote_splicing(variable_assigns)
|
||||
record = unquote(underscored_record_name)(unquote(kw_match))
|
||||
:erlang.setelement(1, record, unquote(record_name))
|
||||
end
|
||||
|
||||
def unquote(serialize_fn_name)({unquote(underscored_record_name), unquote_splicing(match)}=record, opts \\ []) do
|
||||
record = :erlang.setelement(1, record, unquote(record_name))
|
||||
struct_info = {:struct, {unquote(erlang_module), unquote(record_name)}}
|
||||
iolist_struct = with({:ok, tf} <- :thrift_memory_buffer.new_transport_factory(),
|
||||
{:ok, pf} <- :thrift_binary_protocol.new_protocol_factory(tf, []),
|
||||
{:ok, binary_protocol} <- pf.()) do
|
||||
|
||||
{proto, :ok} = :thrift_protocol.write(binary_protocol, {struct_info, record})
|
||||
{_, data} = :thrift_protocol.flush_transport(proto)
|
||||
data
|
||||
end
|
||||
|
||||
if Keyword.get(opts, :convert_to_binary, true) do
|
||||
:erlang.iolist_to_binary(iolist_struct)
|
||||
else
|
||||
iolist_struct
|
||||
end
|
||||
end
|
||||
|
||||
def unquote(deserialize_fn_name)(binary_data) do
|
||||
struct_info = {:struct, {unquote(erlang_module), unquote(record_name)}}
|
||||
try do
|
||||
with({:ok, memory_buffer_transport} <- :thrift_memory_buffer.new(binary_data),
|
||||
{:ok, binary_protocol} <- :thrift_binary_protocol.new(memory_buffer_transport),
|
||||
{_, {:ok, record}} <- :thrift_protocol.read(binary_protocol, struct_info)) do
|
||||
|
||||
record
|
||||
end
|
||||
rescue _ ->
|
||||
{:error, :cant_decode}
|
||||
end
|
||||
end
|
||||
end
|
||||
end)
|
||||
|
||||
quote do
|
||||
defmodule unquote(module_name) do
|
||||
require Record
|
||||
unquote_splicing(records)
|
||||
end
|
||||
end
|
||||
|> Code.compile_quoted
|
||||
|
||||
quote do: require unquote(module_name)
|
||||
end
|
||||
|
||||
defmacro thrift_test(name, do: block) do
|
||||
quote do
|
||||
@thrift_test {unquote(name), unquote({:quote, [], [[do: block]]})}
|
||||
end
|
||||
end
|
||||
|
||||
def inspect_quoted(block) do
|
||||
block |> Macro.to_string |> IO.puts
|
||||
block
|
||||
end
|
||||
end
|
@ -1,9 +0,0 @@
|
||||
%%
|
||||
%% Autogenerated by Thrift Compiler (1.0.0-dev)
|
||||
%%
|
||||
%% DO NOT EDIT UNLESS YOU ARE SURE THAT YOU KNOW WHAT YOU ARE DOING
|
||||
%%
|
||||
|
||||
|
||||
-include("shared_types.hrl").
|
||||
|
@ -1,30 +0,0 @@
|
||||
%%
|
||||
%% Autogenerated by Thrift Compiler (1.0.0-dev)
|
||||
%%
|
||||
%% DO NOT EDIT UNLESS YOU ARE SURE THAT YOU KNOW WHAT YOU ARE DOING
|
||||
%%
|
||||
|
||||
-module(shared_service_thrift).
|
||||
-behaviour(thrift_service).
|
||||
|
||||
|
||||
-include("shared_service_thrift.hrl").
|
||||
|
||||
-export([struct_info/1, function_info/2, function_names/0]).
|
||||
|
||||
struct_info(_) -> erlang:error(function_clause).
|
||||
%%% interface
|
||||
% getStruct(This, Key)
|
||||
function_info('getStruct', params_type) ->
|
||||
{struct, [{1, i32}]}
|
||||
;
|
||||
function_info('getStruct', reply_type) ->
|
||||
{struct, {'shared_types', 'SharedStruct'}};
|
||||
function_info('getStruct', exceptions) ->
|
||||
{struct, []}
|
||||
;
|
||||
function_info(_Func, _Info) -> erlang:error(function_clause).
|
||||
|
||||
function_names() ->
|
||||
['getStruct'].
|
||||
|
@ -1,5 +0,0 @@
|
||||
-ifndef(_shared_service_included).
|
||||
-define(_shared_service_included, yeah).
|
||||
-include("shared_types.hrl").
|
||||
|
||||
-endif.
|
@ -1,47 +0,0 @@
|
||||
%%
|
||||
%% Autogenerated by Thrift Compiler (1.0.0-dev)
|
||||
%%
|
||||
%% DO NOT EDIT UNLESS YOU ARE SURE THAT YOU KNOW WHAT YOU ARE DOING
|
||||
%%
|
||||
|
||||
-module(shared_types).
|
||||
|
||||
-include("shared_types.hrl").
|
||||
|
||||
-export([struct_info/1, struct_info_ext/1, enum_info/1, enum_names/0, struct_names/0, exception_names/0]).
|
||||
|
||||
struct_info('SharedStruct') ->
|
||||
{struct, [{1, i32},
|
||||
{2, string}]}
|
||||
;
|
||||
|
||||
struct_info('SharedException') ->
|
||||
{struct, [{1, string},
|
||||
{2, i32}]}
|
||||
;
|
||||
|
||||
struct_info(_) -> erlang:error(function_clause).
|
||||
|
||||
struct_info_ext('SharedStruct') ->
|
||||
{struct, [{1, undefined, i32, 'key', undefined},
|
||||
{2, undefined, string, 'value', undefined}]}
|
||||
;
|
||||
|
||||
struct_info_ext('SharedException') ->
|
||||
{struct, [{1, undefined, string, 'message', undefined},
|
||||
{2, undefined, i32, 'code', undefined}]}
|
||||
;
|
||||
|
||||
struct_info_ext(_) -> erlang:error(function_clause).
|
||||
|
||||
struct_names() ->
|
||||
['SharedStruct'].
|
||||
|
||||
enum_info(_) -> erlang:error(function_clause).
|
||||
|
||||
enum_names() ->
|
||||
[].
|
||||
|
||||
exception_names() ->
|
||||
['SharedException'].
|
||||
|
@ -1,16 +0,0 @@
|
||||
-ifndef(_shared_types_included).
|
||||
-define(_shared_types_included, yeah).
|
||||
|
||||
%% struct 'SharedStruct'
|
||||
|
||||
-record('SharedStruct', {'key' :: integer(),
|
||||
'value' :: string() | binary()}).
|
||||
-type 'SharedStruct'() :: #'SharedStruct'{}.
|
||||
|
||||
%% struct 'SharedException'
|
||||
|
||||
-record('SharedException', {'message' :: string() | binary(),
|
||||
'code' :: integer()}).
|
||||
-type 'SharedException'() :: #'SharedException'{}.
|
||||
|
||||
-endif.
|
@ -1,9 +0,0 @@
|
||||
%%
|
||||
%% Autogenerated by Thrift Compiler (1.0.0-dev)
|
||||
%%
|
||||
%% DO NOT EDIT UNLESS YOU ARE SURE THAT YOU KNOW WHAT YOU ARE DOING
|
||||
%%
|
||||
|
||||
|
||||
-include("simple_types.hrl").
|
||||
|
@ -1,65 +0,0 @@
|
||||
%%
|
||||
%% Autogenerated by Thrift Compiler (1.0.0-dev)
|
||||
%%
|
||||
%% DO NOT EDIT UNLESS YOU ARE SURE THAT YOU KNOW WHAT YOU ARE DOING
|
||||
%%
|
||||
|
||||
-module(simple_types).
|
||||
|
||||
-include("simple_types.hrl").
|
||||
|
||||
-export([struct_info/1, struct_info_ext/1, enum_info/1, enum_names/0, struct_names/0, exception_names/0]).
|
||||
|
||||
struct_info('User') ->
|
||||
{struct, [{1, bool},
|
||||
{2, i64},
|
||||
{3, i32},
|
||||
{4, byte},
|
||||
{5, i16},
|
||||
{6, double},
|
||||
{7, string},
|
||||
{8, {list, {struct, {'simple_types', 'User'}}}},
|
||||
{9, {map, byte, string}},
|
||||
{10, {set, i32}},
|
||||
{11, {list, i32}}]}
|
||||
;
|
||||
|
||||
struct_info('Nesting') ->
|
||||
{struct, [{1, {struct, {'simple_types', 'User'}}},
|
||||
{2, {struct, {'shared_types', 'SharedStruct'}}}]}
|
||||
;
|
||||
|
||||
struct_info(_) -> erlang:error(function_clause).
|
||||
|
||||
struct_info_ext('User') ->
|
||||
{struct, [{1, undefined, bool, 'is_evil', undefined},
|
||||
{2, undefined, i64, 'user_id', undefined},
|
||||
{3, undefined, i32, 'number_of_hairs_on_head', undefined},
|
||||
{4, undefined, byte, 'amount_of_red', undefined},
|
||||
{5, undefined, i16, 'nineties_era_color', undefined},
|
||||
{6, undefined, double, 'mint_gum', undefined},
|
||||
{7, undefined, string, 'username', undefined},
|
||||
{8, undefined, {list, {struct, {'simple_types', 'User'}}}, 'friends', []},
|
||||
{9, undefined, {map, byte, string}, 'my_map', dict:new()},
|
||||
{10, undefined, {set, i32}, 'blocked_user_ids', sets:new()},
|
||||
{11, optional, {list, i32}, 'optional_integers', []}]}
|
||||
;
|
||||
|
||||
struct_info_ext('Nesting') ->
|
||||
{struct, [{1, undefined, {struct, {'simple_types', 'User'}}, 'user', #'User'{}},
|
||||
{2, undefined, {struct, {'shared_types', 'SharedStruct'}}, 'nested', #'SharedStruct'{}}]}
|
||||
;
|
||||
|
||||
struct_info_ext(_) -> erlang:error(function_clause).
|
||||
|
||||
struct_names() ->
|
||||
['User', 'Nesting'].
|
||||
|
||||
enum_info(_) -> erlang:error(function_clause).
|
||||
|
||||
enum_names() ->
|
||||
[].
|
||||
|
||||
exception_names() ->
|
||||
[].
|
||||
|
@ -1,27 +0,0 @@
|
||||
-ifndef(_simple_types_included).
|
||||
-define(_simple_types_included, yeah).
|
||||
-include("shared_types.hrl").
|
||||
|
||||
|
||||
%% struct 'User'
|
||||
|
||||
-record('User', {'is_evil' :: boolean(),
|
||||
'user_id' :: integer(),
|
||||
'number_of_hairs_on_head' :: integer(),
|
||||
'amount_of_red' :: integer(),
|
||||
'nineties_era_color' :: integer(),
|
||||
'mint_gum' :: float(),
|
||||
'username' :: string() | binary(),
|
||||
'friends' :: list(),
|
||||
'my_map' :: dict:dict(),
|
||||
'blocked_user_ids' :: sets:set(),
|
||||
'optional_integers' :: list()}).
|
||||
-type 'User'() :: #'User'{}.
|
||||
|
||||
%% struct 'Nesting'
|
||||
|
||||
-record('Nesting', {'user' :: 'User'(),
|
||||
'nested' :: 'SharedStruct'()}).
|
||||
-type 'Nesting'() :: #'Nesting'{}.
|
||||
|
||||
-endif.
|
@ -1,73 +0,0 @@
|
||||
%%
|
||||
%% Licensed to the Apache Software Foundation (ASF) under one
|
||||
%% or more contributor license agreements. See the NOTICE file
|
||||
%% distributed with this work for additional information
|
||||
%% regarding copyright ownership. The ASF licenses this file
|
||||
%% to you under the Apache License, Version 2.0 (the
|
||||
%% "License"); you may not use this file except in compliance
|
||||
%% with the License. You may obtain a copy of the License at
|
||||
%%
|
||||
%% http://www.apache.org/licenses/LICENSE-2.0
|
||||
%%
|
||||
%% Unless required by applicable law or agreed to in writing,
|
||||
%% software distributed under the License is distributed on an
|
||||
%% "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||
%% KIND, either express or implied. See the License for the
|
||||
%% specific language governing permissions and limitations
|
||||
%% under the License.
|
||||
%%
|
||||
%%% -*- mode:erlang -*-
|
||||
{application, thrift, [
|
||||
% A quick description of the application.
|
||||
{description, "Thrift bindings"},
|
||||
|
||||
% The version of the applicaton
|
||||
{vsn, "0.9.3"},
|
||||
|
||||
% All modules used by the application.
|
||||
{modules, [
|
||||
thrift_base64_transport,
|
||||
thrift_binary_protocol,
|
||||
thrift_buffered_transport,
|
||||
thrift_client_util,
|
||||
thrift_client,
|
||||
thrift_disk_log_transport,
|
||||
thrift_file_transport,
|
||||
thrift_framed_transport,
|
||||
thrift_http_transport,
|
||||
thrift_json_parser,
|
||||
thrift_json_protocol,
|
||||
thrift_memory_buffer,
|
||||
thrift_processor,
|
||||
thrift_protocol,
|
||||
thrift_reconnecting_client,
|
||||
thrift_server,
|
||||
thrift_service,
|
||||
thrift_socket_server,
|
||||
thrift_socket_transport,
|
||||
thrift_transport_state_test,
|
||||
thrift_transport
|
||||
]},
|
||||
|
||||
% All of the registered names the application uses. This can be ignored.
|
||||
{registered, []},
|
||||
|
||||
% Applications that are to be started prior to this one. This can be ignored
|
||||
% leave it alone unless you understand it well and let the .rel files in
|
||||
% your release handle this.
|
||||
{applications, [kernel, stdlib]},
|
||||
|
||||
% OTP application loader will load, but not start, included apps. Again
|
||||
% this can be ignored as well. To load but not start an application it
|
||||
% is easier to include it in the .rel file followed by the atom 'none'
|
||||
{included_applications, []},
|
||||
|
||||
% configuration parameters similar to those in the config file specified
|
||||
% on the command line. can be fetched with gas:get_env
|
||||
{env, [
|
||||
% If an error/crash occurs during processing of a function,
|
||||
% should the TApplicationException serialized back to the client
|
||||
% include the erlang backtrace?
|
||||
{exceptions_include_traces, true}
|
||||
]}
|
||||
]}.
|
@ -1,69 +0,0 @@
|
||||
%%
|
||||
%% Licensed to the Apache Software Foundation (ASF) under one
|
||||
%% or more contributor license agreements. See the NOTICE file
|
||||
%% distributed with this work for additional information
|
||||
%% regarding copyright ownership. The ASF licenses this file
|
||||
%% to you under the Apache License, Version 2.0 (the
|
||||
%% "License"); you may not use this file except in compliance
|
||||
%% with the License. You may obtain a copy of the License at
|
||||
%%
|
||||
%% http://www.apache.org/licenses/LICENSE-2.0
|
||||
%%
|
||||
%% Unless required by applicable law or agreed to in writing,
|
||||
%% software distributed under the License is distributed on an
|
||||
%% "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||
%% KIND, either express or implied. See the License for the
|
||||
%% specific language governing permissions and limitations
|
||||
%% under the License.
|
||||
%%
|
||||
|
||||
-module(thrift_base64_transport).
|
||||
|
||||
-behaviour(thrift_transport).
|
||||
|
||||
%% API
|
||||
-export([new/1, new_transport_factory/1]).
|
||||
|
||||
%% thrift_transport callbacks
|
||||
-export([write/2, read/2, flush/1, close/1]).
|
||||
|
||||
%% State
|
||||
-record(b64_transport, {wrapped}).
|
||||
-type state() :: #b64_transport{}.
|
||||
-include("thrift_transport_behaviour.hrl").
|
||||
|
||||
new(Wrapped) ->
|
||||
State = #b64_transport{wrapped = Wrapped},
|
||||
thrift_transport:new(?MODULE, State).
|
||||
|
||||
|
||||
write(This = #b64_transport{wrapped = Wrapped}, Data) ->
|
||||
{NewWrapped, Result} = thrift_transport:write(Wrapped, base64:encode(iolist_to_binary(Data))),
|
||||
{This#b64_transport{wrapped = NewWrapped}, Result}.
|
||||
|
||||
|
||||
%% base64 doesn't support reading quite yet since it would involve
|
||||
%% nasty buffering and such
|
||||
read(This = #b64_transport{}, _Data) ->
|
||||
{This, {error, no_reads_allowed}}.
|
||||
|
||||
|
||||
flush(This = #b64_transport{wrapped = Wrapped0}) ->
|
||||
{Wrapped1, ok} = thrift_transport:write(Wrapped0, <<"\n">>),
|
||||
{Wrapped2, ok} = thrift_transport:flush(Wrapped1),
|
||||
{This#b64_transport{wrapped = Wrapped2}, ok}.
|
||||
|
||||
|
||||
close(This0) ->
|
||||
{This1 = #b64_transport{wrapped = Wrapped}, ok} = flush(This0),
|
||||
{NewWrapped, ok} = thrift_transport:close(Wrapped),
|
||||
{This1#b64_transport{wrapped = NewWrapped}, ok}.
|
||||
|
||||
|
||||
%%%% FACTORY GENERATION %%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
|
||||
new_transport_factory(WrapFactory) ->
|
||||
F = fun() ->
|
||||
{ok, Wrapped} = WrapFactory(),
|
||||
new(Wrapped)
|
||||
end,
|
||||
{ok, F}.
|
@ -1,347 +0,0 @@
|
||||
%%
|
||||
%% Licensed to the Apache Software Foundation (ASF) under one
|
||||
%% or more contributor license agreements. See the NOTICE file
|
||||
%% distributed with this work for additional information
|
||||
%% regarding copyright ownership. The ASF licenses this file
|
||||
%% to you under the Apache License, Version 2.0 (the
|
||||
%% "License"); you may not use this file except in compliance
|
||||
%% with the License. You may obtain a copy of the License at
|
||||
%%
|
||||
%% http://www.apache.org/licenses/LICENSE-2.0
|
||||
%%
|
||||
%% Unless required by applicable law or agreed to in writing,
|
||||
%% software distributed under the License is distributed on an
|
||||
%% "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||
%% KIND, either express or implied. See the License for the
|
||||
%% specific language governing permissions and limitations
|
||||
%% under the License.
|
||||
%%
|
||||
|
||||
-module(thrift_binary_protocol).
|
||||
|
||||
-behaviour(thrift_protocol).
|
||||
|
||||
-include("thrift_constants.hrl").
|
||||
-include("thrift_protocol.hrl").
|
||||
|
||||
-export([new/1, new/2,
|
||||
read/2,
|
||||
write/2,
|
||||
flush_transport/1,
|
||||
close_transport/1,
|
||||
|
||||
new_protocol_factory/2
|
||||
]).
|
||||
|
||||
-record(binary_protocol, {transport,
|
||||
strict_read=true,
|
||||
strict_write=true
|
||||
}).
|
||||
-type state() :: #binary_protocol{}.
|
||||
-include("thrift_protocol_behaviour.hrl").
|
||||
|
||||
-define(VERSION_MASK, 16#FFFF0000).
|
||||
-define(VERSION_1, 16#80010000).
|
||||
-define(TYPE_MASK, 16#000000ff).
|
||||
|
||||
new(Transport) ->
|
||||
new(Transport, _Options = []).
|
||||
|
||||
new(Transport, Options) ->
|
||||
State = #binary_protocol{transport = Transport},
|
||||
State1 = parse_options(Options, State),
|
||||
thrift_protocol:new(?MODULE, State1).
|
||||
|
||||
parse_options([], State) ->
|
||||
State;
|
||||
parse_options([{strict_read, Bool} | Rest], State) when is_boolean(Bool) ->
|
||||
parse_options(Rest, State#binary_protocol{strict_read=Bool});
|
||||
parse_options([{strict_write, Bool} | Rest], State) when is_boolean(Bool) ->
|
||||
parse_options(Rest, State#binary_protocol{strict_write=Bool}).
|
||||
|
||||
|
||||
flush_transport(This = #binary_protocol{transport = Transport}) ->
|
||||
{NewTransport, Result} = thrift_transport:flush(Transport),
|
||||
{This#binary_protocol{transport = NewTransport}, Result}.
|
||||
|
||||
close_transport(This = #binary_protocol{transport = Transport}) ->
|
||||
{NewTransport, Result} = thrift_transport:close(Transport),
|
||||
{This#binary_protocol{transport = NewTransport}, Result}.
|
||||
|
||||
%%%
|
||||
%%% instance methods
|
||||
%%%
|
||||
|
||||
write(This0, #protocol_message_begin{
|
||||
name = Name,
|
||||
type = Type,
|
||||
seqid = Seqid}) ->
|
||||
case This0#binary_protocol.strict_write of
|
||||
true ->
|
||||
{This1, ok} = write(This0, {i32, ?VERSION_1 bor Type}),
|
||||
{This2, ok} = write(This1, {string, Name}),
|
||||
{This3, ok} = write(This2, {i32, Seqid}),
|
||||
{This3, ok};
|
||||
false ->
|
||||
{This1, ok} = write(This0, {string, Name}),
|
||||
{This2, ok} = write(This1, {byte, Type}),
|
||||
{This3, ok} = write(This2, {i32, Seqid}),
|
||||
{This3, ok}
|
||||
end;
|
||||
|
||||
write(This, message_end) -> {This, ok};
|
||||
|
||||
write(This0, #protocol_field_begin{
|
||||
name = _Name,
|
||||
type = Type,
|
||||
id = Id}) ->
|
||||
{This1, ok} = write(This0, {byte, Type}),
|
||||
{This2, ok} = write(This1, {i16, Id}),
|
||||
{This2, ok};
|
||||
|
||||
write(This, field_stop) ->
|
||||
write(This, {byte, ?tType_STOP});
|
||||
|
||||
write(This, field_end) -> {This, ok};
|
||||
|
||||
write(This0, #protocol_map_begin{
|
||||
ktype = Ktype,
|
||||
vtype = Vtype,
|
||||
size = Size}) ->
|
||||
{This1, ok} = write(This0, {byte, Ktype}),
|
||||
{This2, ok} = write(This1, {byte, Vtype}),
|
||||
{This3, ok} = write(This2, {i32, Size}),
|
||||
{This3, ok};
|
||||
|
||||
write(This, map_end) -> {This, ok};
|
||||
|
||||
write(This0, #protocol_list_begin{
|
||||
etype = Etype,
|
||||
size = Size}) ->
|
||||
{This1, ok} = write(This0, {byte, Etype}),
|
||||
{This2, ok} = write(This1, {i32, Size}),
|
||||
{This2, ok};
|
||||
|
||||
write(This, list_end) -> {This, ok};
|
||||
|
||||
write(This0, #protocol_set_begin{
|
||||
etype = Etype,
|
||||
size = Size}) ->
|
||||
{This1, ok} = write(This0, {byte, Etype}),
|
||||
{This2, ok} = write(This1, {i32, Size}),
|
||||
{This2, ok};
|
||||
|
||||
write(This, set_end) -> {This, ok};
|
||||
|
||||
write(This, #protocol_struct_begin{}) -> {This, ok};
|
||||
write(This, struct_end) -> {This, ok};
|
||||
|
||||
write(This, {bool, true}) -> write(This, {byte, 1});
|
||||
write(This, {bool, false}) -> write(This, {byte, 0});
|
||||
|
||||
write(This, {byte, Byte}) ->
|
||||
write(This, <<Byte:8/big-signed>>);
|
||||
|
||||
write(This, {i16, I16}) ->
|
||||
write(This, <<I16:16/big-signed>>);
|
||||
|
||||
write(This, {i32, I32}) ->
|
||||
write(This, <<I32:32/big-signed>>);
|
||||
|
||||
write(This, {i64, I64}) ->
|
||||
write(This, <<I64:64/big-signed>>);
|
||||
|
||||
write(This, {double, Double}) ->
|
||||
write(This, <<Double:64/big-signed-float>>);
|
||||
|
||||
write(This0, {string, Str}) when is_list(Str) ->
|
||||
{This1, ok} = write(This0, {i32, length(Str)}),
|
||||
{This2, ok} = write(This1, list_to_binary(Str)),
|
||||
{This2, ok};
|
||||
|
||||
write(This0, {string, Bin}) when is_binary(Bin) ->
|
||||
{This1, ok} = write(This0, {i32, size(Bin)}),
|
||||
{This2, ok} = write(This1, Bin),
|
||||
{This2, ok};
|
||||
|
||||
%% Data :: iolist()
|
||||
write(This = #binary_protocol{transport = Trans}, Data) ->
|
||||
{NewTransport, Result} = thrift_transport:write(Trans, Data),
|
||||
{This#binary_protocol{transport = NewTransport}, Result}.
|
||||
|
||||
%%
|
||||
|
||||
read(This0, message_begin) ->
|
||||
{This1, Initial} = read(This0, ui32),
|
||||
case Initial of
|
||||
{ok, Sz} when Sz band ?VERSION_MASK =:= ?VERSION_1 ->
|
||||
%% we're at version 1
|
||||
{This2, {ok, Name}} = read(This1, string),
|
||||
{This3, {ok, SeqId}} = read(This2, i32),
|
||||
Type = Sz band ?TYPE_MASK,
|
||||
{This3, #protocol_message_begin{name = binary_to_list(Name),
|
||||
type = Type,
|
||||
seqid = SeqId}};
|
||||
|
||||
{ok, Sz} when Sz < 0 ->
|
||||
%% there's a version number but it's unexpected
|
||||
{This1, {error, {bad_binary_protocol_version, Sz}}};
|
||||
|
||||
{ok, _Sz} when This1#binary_protocol.strict_read =:= true ->
|
||||
%% strict_read is true and there's no version header; that's an error
|
||||
{This1, {error, no_binary_protocol_version}};
|
||||
|
||||
{ok, Sz} when This1#binary_protocol.strict_read =:= false ->
|
||||
%% strict_read is false, so just read the old way
|
||||
{This2, {ok, Name}} = read_data(This1, Sz),
|
||||
{This3, {ok, Type}} = read(This2, byte),
|
||||
{This4, {ok, SeqId}} = read(This3, i32),
|
||||
{This4, #protocol_message_begin{name = binary_to_list(Name),
|
||||
type = Type,
|
||||
seqid = SeqId}};
|
||||
|
||||
Else ->
|
||||
{This1, Else}
|
||||
end;
|
||||
|
||||
read(This, message_end) -> {This, ok};
|
||||
|
||||
read(This, struct_begin) -> {This, ok};
|
||||
read(This, struct_end) -> {This, ok};
|
||||
|
||||
read(This0, field_begin) ->
|
||||
{This1, Result} = read(This0, byte),
|
||||
case Result of
|
||||
{ok, Type = ?tType_STOP} ->
|
||||
{This1, #protocol_field_begin{type = Type}};
|
||||
{ok, Type} ->
|
||||
{This2, {ok, Id}} = read(This1, i16),
|
||||
{This2, #protocol_field_begin{type = Type,
|
||||
id = Id}}
|
||||
end;
|
||||
|
||||
read(This, field_end) -> {This, ok};
|
||||
|
||||
read(This0, map_begin) ->
|
||||
{This1, {ok, Ktype}} = read(This0, byte),
|
||||
{This2, {ok, Vtype}} = read(This1, byte),
|
||||
{This3, {ok, Size}} = read(This2, i32),
|
||||
{This3, #protocol_map_begin{ktype = Ktype,
|
||||
vtype = Vtype,
|
||||
size = Size}};
|
||||
read(This, map_end) -> {This, ok};
|
||||
|
||||
read(This0, list_begin) ->
|
||||
{This1, {ok, Etype}} = read(This0, byte),
|
||||
{This2, {ok, Size}} = read(This1, i32),
|
||||
{This2, #protocol_list_begin{etype = Etype,
|
||||
size = Size}};
|
||||
read(This, list_end) -> {This, ok};
|
||||
|
||||
read(This0, set_begin) ->
|
||||
{This1, {ok, Etype}} = read(This0, byte),
|
||||
{This2, {ok, Size}} = read(This1, i32),
|
||||
{This2, #protocol_set_begin{etype = Etype,
|
||||
size = Size}};
|
||||
read(This, set_end) -> {This, ok};
|
||||
|
||||
read(This0, field_stop) ->
|
||||
{This1, {ok, ?tType_STOP}} = read(This0, byte),
|
||||
{This1, ok};
|
||||
|
||||
%%
|
||||
|
||||
read(This0, bool) ->
|
||||
{This1, Result} = read(This0, byte),
|
||||
case Result of
|
||||
{ok, Byte} -> {This1, {ok, Byte /= 0}};
|
||||
Else -> {This1, Else}
|
||||
end;
|
||||
|
||||
read(This0, byte) ->
|
||||
{This1, Bytes} = read_data(This0, 1),
|
||||
case Bytes of
|
||||
{ok, <<Val:8/integer-signed-big, _/binary>>} -> {This1, {ok, Val}};
|
||||
Else -> {This1, Else}
|
||||
end;
|
||||
|
||||
read(This0, i16) ->
|
||||
{This1, Bytes} = read_data(This0, 2),
|
||||
case Bytes of
|
||||
{ok, <<Val:16/integer-signed-big, _/binary>>} -> {This1, {ok, Val}};
|
||||
Else -> {This1, Else}
|
||||
end;
|
||||
|
||||
read(This0, i32) ->
|
||||
{This1, Bytes} = read_data(This0, 4),
|
||||
case Bytes of
|
||||
{ok, <<Val:32/integer-signed-big, _/binary>>} -> {This1, {ok, Val}};
|
||||
Else -> {This1, Else}
|
||||
end;
|
||||
|
||||
%% unsigned ints aren't used by thrift itself, but it's used for the parsing
|
||||
%% of the packet version header. Without this special function BEAM works fine
|
||||
%% but hipe thinks it received a bad version header.
|
||||
read(This0, ui32) ->
|
||||
{This1, Bytes} = read_data(This0, 4),
|
||||
case Bytes of
|
||||
{ok, <<Val:32/integer-unsigned-big, _/binary>>} -> {This1, {ok, Val}};
|
||||
Else -> {This1, Else}
|
||||
end;
|
||||
|
||||
read(This0, i64) ->
|
||||
{This1, Bytes} = read_data(This0, 8),
|
||||
case Bytes of
|
||||
{ok, <<Val:64/integer-signed-big, _/binary>>} -> {This1, {ok, Val}};
|
||||
Else -> {This1, Else}
|
||||
end;
|
||||
|
||||
read(This0, double) ->
|
||||
{This1, Bytes} = read_data(This0, 8),
|
||||
case Bytes of
|
||||
{ok, <<Val:64/float-signed-big, _/binary>>} -> {This1, {ok, Val}};
|
||||
Else -> {This1, Else}
|
||||
end;
|
||||
|
||||
% returns a binary directly, call binary_to_list if necessary
|
||||
read(This0, string) ->
|
||||
{This1, {ok, Sz}} = read(This0, i32),
|
||||
read_data(This1, Sz).
|
||||
|
||||
-spec read_data(#binary_protocol{}, non_neg_integer()) ->
|
||||
{#binary_protocol{}, {ok, binary()} | {error, _Reason}}.
|
||||
read_data(This, 0) -> {This, {ok, <<>>}};
|
||||
read_data(This = #binary_protocol{transport = Trans}, Len) when is_integer(Len) andalso Len > 0 ->
|
||||
{NewTransport, Result} = thrift_transport:read(Trans, Len),
|
||||
{This#binary_protocol{transport = NewTransport}, Result}.
|
||||
|
||||
|
||||
%%%% FACTORY GENERATION %%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
|
||||
|
||||
-record(tbp_opts, {strict_read = true,
|
||||
strict_write = true}).
|
||||
|
||||
parse_factory_options([], Opts) ->
|
||||
Opts;
|
||||
parse_factory_options([{strict_read, Bool} | Rest], Opts) when is_boolean(Bool) ->
|
||||
parse_factory_options(Rest, Opts#tbp_opts{strict_read=Bool});
|
||||
parse_factory_options([{strict_write, Bool} | Rest], Opts) when is_boolean(Bool) ->
|
||||
parse_factory_options(Rest, Opts#tbp_opts{strict_write=Bool}).
|
||||
|
||||
|
||||
%% returns a (fun() -> thrift_protocol())
|
||||
new_protocol_factory(TransportFactory, Options) ->
|
||||
ParsedOpts = parse_factory_options(Options, #tbp_opts{}),
|
||||
F = fun() ->
|
||||
case TransportFactory() of
|
||||
{ok, Transport} ->
|
||||
thrift_binary_protocol:new(
|
||||
Transport,
|
||||
[{strict_read, ParsedOpts#tbp_opts.strict_read},
|
||||
{strict_write, ParsedOpts#tbp_opts.strict_write}]);
|
||||
{error, Error} ->
|
||||
{error, Error}
|
||||
end
|
||||
end,
|
||||
{ok, F}.
|
||||
|
@ -1,77 +0,0 @@
|
||||
%%
|
||||
%% Licensed to the Apache Software Foundation (ASF) under one
|
||||
%% or more contributor license agreements. See the NOTICE file
|
||||
%% distributed with this work for additional information
|
||||
%% regarding copyright ownership. The ASF licenses this file
|
||||
%% to you under the Apache License, Version 2.0 (the
|
||||
%% "License"); you may not use this file except in compliance
|
||||
%% with the License. You may obtain a copy of the License at
|
||||
%%
|
||||
%% http://www.apache.org/licenses/LICENSE-2.0
|
||||
%%
|
||||
%% Unless required by applicable law or agreed to in writing,
|
||||
%% software distributed under the License is distributed on an
|
||||
%% "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||
%% KIND, either express or implied. See the License for the
|
||||
%% specific language governing permissions and limitations
|
||||
%% under the License.
|
||||
%%
|
||||
|
||||
-module(thrift_buffered_transport).
|
||||
|
||||
-behaviour(thrift_transport).
|
||||
|
||||
%% API
|
||||
-export([new/1, new_transport_factory/1]).
|
||||
|
||||
%% thrift_transport callbacks
|
||||
-export([write/2, read/2, flush/1, close/1]).
|
||||
|
||||
-record(buffered_transport, {wrapped, % a thrift_transport
|
||||
write_buffer % iolist()
|
||||
}).
|
||||
-type state() :: #buffered_transport{}.
|
||||
-include("thrift_transport_behaviour.hrl").
|
||||
|
||||
|
||||
new(WrappedTransport) ->
|
||||
State = #buffered_transport{wrapped = WrappedTransport,
|
||||
write_buffer = []},
|
||||
thrift_transport:new(?MODULE, State).
|
||||
|
||||
|
||||
%% Writes data into the buffer
|
||||
write(State = #buffered_transport{write_buffer = WBuf}, Data) ->
|
||||
{State#buffered_transport{write_buffer = [WBuf, Data]}, ok}.
|
||||
|
||||
%% Flushes the buffer through to the wrapped transport
|
||||
flush(State = #buffered_transport{write_buffer = WBuf,
|
||||
wrapped = Wrapped0}) ->
|
||||
{Wrapped1, Response} = thrift_transport:write(Wrapped0, WBuf),
|
||||
{Wrapped2, _} = thrift_transport:flush(Wrapped1),
|
||||
NewState = State#buffered_transport{write_buffer = [],
|
||||
wrapped = Wrapped2},
|
||||
{NewState, Response}.
|
||||
|
||||
%% Closes the transport and the wrapped transport
|
||||
close(State = #buffered_transport{wrapped = Wrapped0}) ->
|
||||
{Wrapped1, Result} = thrift_transport:close(Wrapped0),
|
||||
NewState = State#buffered_transport{wrapped = Wrapped1},
|
||||
{NewState, Result}.
|
||||
|
||||
%% Reads data through from the wrapped transport
|
||||
read(State = #buffered_transport{wrapped = Wrapped0}, Len) when is_integer(Len) ->
|
||||
{Wrapped1, Response} = thrift_transport:read(Wrapped0, Len),
|
||||
NewState = State#buffered_transport{wrapped = Wrapped1},
|
||||
{NewState, Response}.
|
||||
|
||||
%%--------------------------------------------------------------------
|
||||
%%% Internal functions
|
||||
%%--------------------------------------------------------------------
|
||||
%%%% FACTORY GENERATION %%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
|
||||
new_transport_factory(WrapFactory) ->
|
||||
F = fun() ->
|
||||
{ok, Wrapped} = WrapFactory(),
|
||||
new(Wrapped)
|
||||
end,
|
||||
{ok, F}.
|
@ -1,150 +0,0 @@
|
||||
%%
|
||||
%% Licensed to the Apache Software Foundation (ASF) under one
|
||||
%% or more contributor license agreements. See the NOTICE file
|
||||
%% distributed with this work for additional information
|
||||
%% regarding copyright ownership. The ASF licenses this file
|
||||
%% to you under the Apache License, Version 2.0 (the
|
||||
%% "License"); you may not use this file except in compliance
|
||||
%% with the License. You may obtain a copy of the License at
|
||||
%%
|
||||
%% http://www.apache.org/licenses/LICENSE-2.0
|
||||
%%
|
||||
%% Unless required by applicable law or agreed to in writing,
|
||||
%% software distributed under the License is distributed on an
|
||||
%% "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||
%% KIND, either express or implied. See the License for the
|
||||
%% specific language governing permissions and limitations
|
||||
%% under the License.
|
||||
%%
|
||||
|
||||
-module(thrift_client).
|
||||
|
||||
%% API
|
||||
-export([new/2, call/3, send_call/3, close/1]).
|
||||
|
||||
-include("thrift_constants.hrl").
|
||||
-include("thrift_protocol.hrl").
|
||||
|
||||
-record(tclient, {service, protocol, seqid}).
|
||||
|
||||
|
||||
new(Protocol, Service)
|
||||
when is_atom(Service) ->
|
||||
{ok, #tclient{protocol = Protocol,
|
||||
service = Service,
|
||||
seqid = 0}}.
|
||||
|
||||
-spec call(#tclient{}, atom(), list()) -> {#tclient{}, {ok, any()} | {error, any()}}.
|
||||
call(Client = #tclient{}, Function, Args)
|
||||
when is_atom(Function), is_list(Args) ->
|
||||
case send_function_call(Client, Function, Args) of
|
||||
{Client1, ok} ->
|
||||
receive_function_result(Client1, Function);
|
||||
Else ->
|
||||
Else
|
||||
end.
|
||||
|
||||
|
||||
%% Sends a function call but does not read the result. This is useful
|
||||
%% if you're trying to log non-oneway function calls to write-only
|
||||
%% transports like thrift_disk_log_transport.
|
||||
-spec send_call(#tclient{}, atom(), list()) -> {#tclient{}, ok}.
|
||||
send_call(Client = #tclient{}, Function, Args)
|
||||
when is_atom(Function), is_list(Args) ->
|
||||
send_function_call(Client, Function, Args).
|
||||
|
||||
-spec close(#tclient{}) -> ok.
|
||||
close(#tclient{protocol=Protocol}) ->
|
||||
thrift_protocol:close_transport(Protocol).
|
||||
|
||||
|
||||
%%--------------------------------------------------------------------
|
||||
%%% Internal functions
|
||||
%%--------------------------------------------------------------------
|
||||
-spec send_function_call(#tclient{}, atom(), list()) -> {#tclient{}, ok | {error, any()}}.
|
||||
send_function_call(Client = #tclient{protocol = Proto0,
|
||||
service = Service,
|
||||
seqid = SeqId},
|
||||
Function,
|
||||
Args) ->
|
||||
Params = try Service:function_info(Function, params_type)
|
||||
catch error:function_clause -> no_function
|
||||
end,
|
||||
case Params of
|
||||
no_function ->
|
||||
{Client, {error, {no_function, Function}}};
|
||||
{struct, PList} when length(PList) =/= length(Args) ->
|
||||
{Client, {error, {bad_args, Function, Args}}};
|
||||
{struct, _PList} ->
|
||||
Begin = #protocol_message_begin{name = atom_to_list(Function),
|
||||
type = ?tMessageType_CALL,
|
||||
seqid = SeqId},
|
||||
{Proto1, ok} = thrift_protocol:write(Proto0, Begin),
|
||||
{Proto2, ok} = thrift_protocol:write(Proto1, {Params, list_to_tuple([Function | Args])}),
|
||||
{Proto3, ok} = thrift_protocol:write(Proto2, message_end),
|
||||
{Proto4, ok} = thrift_protocol:flush_transport(Proto3),
|
||||
{Client#tclient{protocol = Proto4}, ok}
|
||||
end.
|
||||
|
||||
-spec receive_function_result(#tclient{}, atom()) -> {#tclient{}, {ok, any()} | {error, any()}}.
|
||||
receive_function_result(Client = #tclient{service = Service}, Function) ->
|
||||
ResultType = Service:function_info(Function, reply_type),
|
||||
read_result(Client, Function, ResultType).
|
||||
|
||||
read_result(Client, _Function, oneway_void) ->
|
||||
{Client, {ok, ok}};
|
||||
|
||||
read_result(Client = #tclient{protocol = Proto0,
|
||||
seqid = SeqId},
|
||||
Function,
|
||||
ReplyType) ->
|
||||
case thrift_protocol:read(Proto0, message_begin) of
|
||||
{Proto1, {error, Reason}} ->
|
||||
NewClient = Client#tclient{protocol = Proto1},
|
||||
{NewClient, {error, Reason}};
|
||||
{Proto1, MessageBegin} ->
|
||||
NewClient = Client#tclient{protocol = Proto1},
|
||||
case MessageBegin of
|
||||
#protocol_message_begin{seqid = RetSeqId} when RetSeqId =/= SeqId ->
|
||||
{NewClient, {error, {bad_seq_id, SeqId}}};
|
||||
#protocol_message_begin{type = ?tMessageType_EXCEPTION} ->
|
||||
handle_application_exception(NewClient);
|
||||
#protocol_message_begin{type = ?tMessageType_REPLY} ->
|
||||
handle_reply(NewClient, Function, ReplyType)
|
||||
end
|
||||
end.
|
||||
|
||||
|
||||
handle_reply(Client = #tclient{protocol = Proto0,
|
||||
service = Service},
|
||||
Function,
|
||||
ReplyType) ->
|
||||
{struct, ExceptionFields} = Service:function_info(Function, exceptions),
|
||||
ReplyStructDef = {struct, [{0, ReplyType}] ++ ExceptionFields},
|
||||
{Proto1, {ok, Reply}} = thrift_protocol:read(Proto0, ReplyStructDef),
|
||||
{Proto2, ok} = thrift_protocol:read(Proto1, message_end),
|
||||
NewClient = Client#tclient{protocol = Proto2},
|
||||
ReplyList = tuple_to_list(Reply),
|
||||
true = length(ReplyList) == length(ExceptionFields) + 1,
|
||||
ExceptionVals = tl(ReplyList),
|
||||
Thrown = [X || X <- ExceptionVals,
|
||||
X =/= undefined],
|
||||
case Thrown of
|
||||
[] when ReplyType == {struct, []} ->
|
||||
{NewClient, {ok, ok}};
|
||||
[] ->
|
||||
{NewClient, {ok, hd(ReplyList)}};
|
||||
[Exception] ->
|
||||
throw({NewClient, {exception, Exception}})
|
||||
end.
|
||||
|
||||
handle_application_exception(Client = #tclient{protocol = Proto0}) ->
|
||||
{Proto1, {ok, Exception}} =
|
||||
thrift_protocol:read(Proto0, ?TApplicationException_Structure),
|
||||
{Proto2, ok} = thrift_protocol:read(Proto1, message_end),
|
||||
XRecord = list_to_tuple(
|
||||
['TApplicationException' | tuple_to_list(Exception)]),
|
||||
error_logger:error_msg("X: ~p~n", [XRecord]),
|
||||
true = is_record(XRecord, 'TApplicationException'),
|
||||
NewClient = Client#tclient{protocol = Proto2},
|
||||
throw({NewClient, {exception, XRecord}}).
|
@ -1,65 +0,0 @@
|
||||
%%
|
||||
%% Licensed to the Apache Software Foundation (ASF) under one
|
||||
%% or more contributor license agreements. See the NOTICE file
|
||||
%% distributed with this work for additional information
|
||||
%% regarding copyright ownership. The ASF licenses this file
|
||||
%% to you under the Apache License, Version 2.0 (the
|
||||
%% "License"); you may not use this file except in compliance
|
||||
%% with the License. You may obtain a copy of the License at
|
||||
%%
|
||||
%% http://www.apache.org/licenses/LICENSE-2.0
|
||||
%%
|
||||
%% Unless required by applicable law or agreed to in writing,
|
||||
%% software distributed under the License is distributed on an
|
||||
%% "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||
%% KIND, either express or implied. See the License for the
|
||||
%% specific language governing permissions and limitations
|
||||
%% under the License.
|
||||
%%
|
||||
|
||||
-module(thrift_client_util).
|
||||
|
||||
-export([new/4]).
|
||||
|
||||
%%
|
||||
%% Splits client options into client, protocol, and transport options
|
||||
%%
|
||||
%% split_options([Options...]) -> {ProtocolOptions, TransportOptions}
|
||||
%%
|
||||
split_options(Options) ->
|
||||
split_options(Options, [], []).
|
||||
|
||||
split_options([], ProtoIn, TransIn) ->
|
||||
{ProtoIn, TransIn};
|
||||
|
||||
split_options([Opt = {OptKey, _} | Rest], ProtoIn, TransIn)
|
||||
when OptKey =:= strict_read;
|
||||
OptKey =:= strict_write ->
|
||||
split_options(Rest, [Opt | ProtoIn], TransIn);
|
||||
|
||||
split_options([Opt = {OptKey, _} | Rest], ProtoIn, TransIn)
|
||||
when OptKey =:= framed;
|
||||
OptKey =:= connect_timeout;
|
||||
OptKey =:= recv_timeout;
|
||||
OptKey =:= sockopts ->
|
||||
split_options(Rest, ProtoIn, [Opt | TransIn]).
|
||||
|
||||
|
||||
%% Client constructor for the common-case of socket transports
|
||||
%% with the binary protocol
|
||||
new(Host, Port, Service, Options)
|
||||
when is_integer(Port), is_atom(Service), is_list(Options) ->
|
||||
{ProtoOpts, TransOpts} = split_options(Options),
|
||||
|
||||
{ok, TransportFactory} =
|
||||
thrift_socket_transport:new_transport_factory(Host, Port, TransOpts),
|
||||
|
||||
{ok, ProtocolFactory} = thrift_binary_protocol:new_protocol_factory(
|
||||
TransportFactory, ProtoOpts),
|
||||
|
||||
case ProtocolFactory() of
|
||||
{ok, Protocol} ->
|
||||
thrift_client:new(Protocol, Service);
|
||||
{error, Error} ->
|
||||
{error, Error}
|
||||
end.
|
@ -1,58 +0,0 @@
|
||||
%%
|
||||
%% Licensed to the Apache Software Foundation (ASF) under one
|
||||
%% or more contributor license agreements. See the NOTICE file
|
||||
%% distributed with this work for additional information
|
||||
%% regarding copyright ownership. The ASF licenses this file
|
||||
%% to you under the Apache License, Version 2.0 (the
|
||||
%% "License"); you may not use this file except in compliance
|
||||
%% with the License. You may obtain a copy of the License at
|
||||
%%
|
||||
%% http://www.apache.org/licenses/LICENSE-2.0
|
||||
%%
|
||||
%% Unless required by applicable law or agreed to in writing,
|
||||
%% software distributed under the License is distributed on an
|
||||
%% "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||
%% KIND, either express or implied. See the License for the
|
||||
%% specific language governing permissions and limitations
|
||||
%% under the License.
|
||||
%%
|
||||
|
||||
%% TType
|
||||
-define(tType_STOP, 0).
|
||||
-define(tType_VOID, 1).
|
||||
-define(tType_BOOL, 2).
|
||||
-define(tType_BYTE, 3).
|
||||
-define(tType_DOUBLE, 4).
|
||||
-define(tType_I16, 6).
|
||||
-define(tType_I32, 8).
|
||||
-define(tType_I64, 10).
|
||||
-define(tType_STRING, 11).
|
||||
-define(tType_STRUCT, 12).
|
||||
-define(tType_MAP, 13).
|
||||
-define(tType_SET, 14).
|
||||
-define(tType_LIST, 15).
|
||||
|
||||
% TMessageType
|
||||
-define(tMessageType_CALL, 1).
|
||||
-define(tMessageType_REPLY, 2).
|
||||
-define(tMessageType_EXCEPTION, 3).
|
||||
-define(tMessageType_ONEWAY, 4).
|
||||
|
||||
% TApplicationException
|
||||
-define(TApplicationException_Structure,
|
||||
{struct, [{1, string},
|
||||
{2, i32}]}).
|
||||
|
||||
-record('TApplicationException', {message, type}).
|
||||
|
||||
-define(TApplicationException_UNKNOWN, 0).
|
||||
-define(TApplicationException_UNKNOWN_METHOD, 1).
|
||||
-define(TApplicationException_INVALID_MESSAGE_TYPE, 2).
|
||||
-define(TApplicationException_WRONG_METHOD_NAME, 3).
|
||||
-define(TApplicationException_BAD_SEQUENCE_ID, 4).
|
||||
-define(TApplicationException_MISSING_RESULT, 5).
|
||||
-define(TApplicationException_INTERNAL_ERROR, 6).
|
||||
-define(TApplicationException_PROTOCOL_ERROR, 7).
|
||||
-define(TApplicationException_INVALID_TRANSFORM, 8).
|
||||
-define(TApplicationException_INVALID_PROTOCOL, 9).
|
||||
-define(TApplicationException_UNSUPPORTED_CLIENT_TYPE, 10).
|
@ -1,123 +0,0 @@
|
||||
%%
|
||||
%% Licensed to the Apache Software Foundation (ASF) under one
|
||||
%% or more contributor license agreements. See the NOTICE file
|
||||
%% distributed with this work for additional information
|
||||
%% regarding copyright ownership. The ASF licenses this file
|
||||
%% to you under the Apache License, Version 2.0 (the
|
||||
%% "License"); you may not use this file except in compliance
|
||||
%% with the License. You may obtain a copy of the License at
|
||||
%%
|
||||
%% http://www.apache.org/licenses/LICENSE-2.0
|
||||
%%
|
||||
%% Unless required by applicable law or agreed to in writing,
|
||||
%% software distributed under the License is distributed on an
|
||||
%% "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||
%% KIND, either express or implied. See the License for the
|
||||
%% specific language governing permissions and limitations
|
||||
%% under the License.
|
||||
%%
|
||||
|
||||
%%% Todo: this might be better off as a gen_server type of transport
|
||||
%%% that handles stuff like group commit, similar to TFileTransport
|
||||
%%% in cpp land
|
||||
-module(thrift_disk_log_transport).
|
||||
|
||||
-behaviour(thrift_transport).
|
||||
|
||||
%% API
|
||||
-export([new/2, new_transport_factory/2, new_transport_factory/3]).
|
||||
|
||||
%% thrift_transport callbacks
|
||||
-export([read/2, write/2, force_flush/1, flush/1, close/1]).
|
||||
|
||||
%% state
|
||||
-record(dl_transport, {log,
|
||||
close_on_close = false,
|
||||
sync_every = infinity,
|
||||
sync_tref}).
|
||||
-type state() :: #dl_transport{}.
|
||||
-include("thrift_transport_behaviour.hrl").
|
||||
|
||||
|
||||
%% Create a transport attached to an already open log.
|
||||
%% If you'd like this transport to close the disk_log using disk_log:lclose()
|
||||
%% when the transport is closed, pass a {close_on_close, true} tuple in the
|
||||
%% Opts list.
|
||||
new(LogName, Opts) when is_atom(LogName), is_list(Opts) ->
|
||||
State = parse_opts(Opts, #dl_transport{log = LogName}),
|
||||
|
||||
State2 =
|
||||
case State#dl_transport.sync_every of
|
||||
N when is_integer(N), N > 0 ->
|
||||
{ok, TRef} = timer:apply_interval(N, ?MODULE, force_flush, [State]),
|
||||
State#dl_transport{sync_tref = TRef};
|
||||
_ -> State
|
||||
end,
|
||||
|
||||
thrift_transport:new(?MODULE, State2).
|
||||
|
||||
|
||||
parse_opts([], State) ->
|
||||
State;
|
||||
parse_opts([{close_on_close, Bool} | Rest], State) when is_boolean(Bool) ->
|
||||
parse_opts(Rest, State#dl_transport{close_on_close = Bool});
|
||||
parse_opts([{sync_every, Int} | Rest], State) when is_integer(Int), Int > 0 ->
|
||||
parse_opts(Rest, State#dl_transport{sync_every = Int}).
|
||||
|
||||
|
||||
%%%% TRANSPORT IMPLENTATION %%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
|
||||
|
||||
%% disk_log_transport is write-only
|
||||
read(State, _Len) ->
|
||||
{State, {error, no_read_from_disk_log}}.
|
||||
|
||||
write(This = #dl_transport{log = Log}, Data) ->
|
||||
{This, disk_log:balog(Log, erlang:iolist_to_binary(Data))}.
|
||||
|
||||
force_flush(#dl_transport{log = Log}) ->
|
||||
error_logger:info_msg("~p syncing~n", [?MODULE]),
|
||||
disk_log:sync(Log).
|
||||
|
||||
flush(This = #dl_transport{log = Log, sync_every = SE}) ->
|
||||
case SE of
|
||||
undefined -> % no time-based sync
|
||||
disk_log:sync(Log);
|
||||
_Else -> % sync will happen automagically
|
||||
ok
|
||||
end,
|
||||
{This, ok}.
|
||||
|
||||
|
||||
|
||||
|
||||
%% On close, close the underlying log if we're configured to do so.
|
||||
close(This = #dl_transport{close_on_close = false}) ->
|
||||
{This, ok};
|
||||
close(This = #dl_transport{log = Log}) ->
|
||||
{This, disk_log:lclose(Log)}.
|
||||
|
||||
|
||||
%%%% FACTORY GENERATION %%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
|
||||
|
||||
new_transport_factory(Name, ExtraLogOpts) ->
|
||||
new_transport_factory(Name, ExtraLogOpts, [{close_on_close, true},
|
||||
{sync_every, 500}]).
|
||||
|
||||
new_transport_factory(Name, ExtraLogOpts, TransportOpts) ->
|
||||
F = fun() -> factory_impl(Name, ExtraLogOpts, TransportOpts) end,
|
||||
{ok, F}.
|
||||
|
||||
factory_impl(Name, ExtraLogOpts, TransportOpts) ->
|
||||
LogOpts = [{name, Name},
|
||||
{format, external},
|
||||
{type, wrap} |
|
||||
ExtraLogOpts],
|
||||
Log =
|
||||
case disk_log:open(LogOpts) of
|
||||
{ok, LogS} ->
|
||||
LogS;
|
||||
{repaired, LogS, Info1, Info2} ->
|
||||
error_logger:info_msg("Disk log ~p repaired: ~p, ~p~n", [LogS, Info1, Info2]),
|
||||
LogS
|
||||
end,
|
||||
new(Log, TransportOpts).
|
@ -1,89 +0,0 @@
|
||||
%%
|
||||
%% Licensed to the Apache Software Foundation (ASF) under one
|
||||
%% or more contributor license agreements. See the NOTICE file
|
||||
%% distributed with this work for additional information
|
||||
%% regarding copyright ownership. The ASF licenses this file
|
||||
%% to you under the Apache License, Version 2.0 (the
|
||||
%% "License"); you may not use this file except in compliance
|
||||
%% with the License. You may obtain a copy of the License at
|
||||
%%
|
||||
%% http://www.apache.org/licenses/LICENSE-2.0
|
||||
%%
|
||||
%% Unless required by applicable law or agreed to in writing,
|
||||
%% software distributed under the License is distributed on an
|
||||
%% "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||
%% KIND, either express or implied. See the License for the
|
||||
%% specific language governing permissions and limitations
|
||||
%% under the License.
|
||||
%%
|
||||
|
||||
-module(thrift_file_transport).
|
||||
|
||||
-behaviour(thrift_transport).
|
||||
|
||||
-export([new_reader/1,
|
||||
new/1,
|
||||
new/2,
|
||||
write/2, read/2, flush/1, close/1]).
|
||||
|
||||
-record(t_file_transport, {device,
|
||||
should_close = true,
|
||||
mode = write}).
|
||||
-type state() :: #t_file_transport{}.
|
||||
-include("thrift_transport_behaviour.hrl").
|
||||
|
||||
%%%% CONSTRUCTION %%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
|
||||
|
||||
new_reader(Filename) ->
|
||||
case file:open(Filename, [read, binary, {read_ahead, 1024*1024}]) of
|
||||
{ok, IODevice} ->
|
||||
new(IODevice, [{should_close, true}, {mode, read}]);
|
||||
Error -> Error
|
||||
end.
|
||||
|
||||
new(Device) ->
|
||||
new(Device, []).
|
||||
|
||||
%% Device :: io_device()
|
||||
%%
|
||||
%% Device should be opened in raw and binary mode.
|
||||
new(Device, Opts) when is_list(Opts) ->
|
||||
State = parse_opts(Opts, #t_file_transport{device = Device}),
|
||||
thrift_transport:new(?MODULE, State).
|
||||
|
||||
|
||||
%% Parse options
|
||||
parse_opts([{should_close, Bool} | Rest], State) when is_boolean(Bool) ->
|
||||
parse_opts(Rest, State#t_file_transport{should_close = Bool});
|
||||
parse_opts([{mode, Mode} | Rest], State)
|
||||
when Mode =:= write;
|
||||
Mode =:= read ->
|
||||
parse_opts(Rest, State#t_file_transport{mode = Mode});
|
||||
parse_opts([], State) ->
|
||||
State.
|
||||
|
||||
|
||||
%%%% TRANSPORT IMPL %%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
|
||||
|
||||
write(This = #t_file_transport{device = Device, mode = write}, Data) ->
|
||||
{This, file:write(Device, Data)};
|
||||
write(This, _D) ->
|
||||
{This, {error, read_mode}}.
|
||||
|
||||
|
||||
read(This = #t_file_transport{device = Device, mode = read}, Len)
|
||||
when is_integer(Len), Len >= 0 ->
|
||||
{This, file:read(Device, Len)};
|
||||
read(This, _D) ->
|
||||
{This, {error, read_mode}}.
|
||||
|
||||
flush(This = #t_file_transport{device = Device, mode = write}) ->
|
||||
{This, file:sync(Device)}.
|
||||
|
||||
close(This = #t_file_transport{device = Device, should_close = SC}) ->
|
||||
case SC of
|
||||
true ->
|
||||
{This, file:close(Device)};
|
||||
false ->
|
||||
{This, ok}
|
||||
end.
|
@ -1,103 +0,0 @@
|
||||
%%
|
||||
%% Licensed to the Apache Software Foundation (ASF) under one
|
||||
%% or more contributor license agreements. See the NOTICE file
|
||||
%% distributed with this work for additional information
|
||||
%% regarding copyright ownership. The ASF licenses this file
|
||||
%% to you under the Apache License, Version 2.0 (the
|
||||
%% "License"); you may not use this file except in compliance
|
||||
%% with the License. You may obtain a copy of the License at
|
||||
%%
|
||||
%% http://www.apache.org/licenses/LICENSE-2.0
|
||||
%%
|
||||
%% Unless required by applicable law or agreed to in writing,
|
||||
%% software distributed under the License is distributed on an
|
||||
%% "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||
%% KIND, either express or implied. See the License for the
|
||||
%% specific language governing permissions and limitations
|
||||
%% under the License.
|
||||
%%
|
||||
|
||||
-module(thrift_framed_transport).
|
||||
|
||||
-behaviour(thrift_transport).
|
||||
|
||||
%% API
|
||||
-export([new/1]).
|
||||
|
||||
%% thrift_transport callbacks
|
||||
-export([write/2, read/2, flush/1, close/1]).
|
||||
|
||||
-record(framed_transport, {wrapped, % a thrift_transport
|
||||
read_buffer, % iolist()
|
||||
write_buffer % iolist()
|
||||
}).
|
||||
-type state() :: #framed_transport{}.
|
||||
-include("thrift_transport_behaviour.hrl").
|
||||
|
||||
new(WrappedTransport) ->
|
||||
State = #framed_transport{wrapped = WrappedTransport,
|
||||
read_buffer = [],
|
||||
write_buffer = []},
|
||||
thrift_transport:new(?MODULE, State).
|
||||
|
||||
%% Writes data into the buffer
|
||||
write(State = #framed_transport{write_buffer = WBuf}, Data) ->
|
||||
{State#framed_transport{write_buffer = [WBuf, Data]}, ok}.
|
||||
|
||||
%% Flushes the buffer through to the wrapped transport
|
||||
flush(State0 = #framed_transport{write_buffer = Buffer,
|
||||
wrapped = Wrapped0}) ->
|
||||
FrameLen = iolist_size(Buffer),
|
||||
Data = [<<FrameLen:32/integer-signed-big>>, Buffer],
|
||||
|
||||
{Wrapped1, Response} = thrift_transport:write(Wrapped0, Data),
|
||||
|
||||
{Wrapped2, _} = thrift_transport:flush(Wrapped1),
|
||||
|
||||
State1 = State0#framed_transport{wrapped = Wrapped2, write_buffer = []},
|
||||
{State1, Response}.
|
||||
|
||||
%% Closes the transport and the wrapped transport
|
||||
close(State = #framed_transport{wrapped = Wrapped0}) ->
|
||||
{Wrapped1, Result} = thrift_transport:close(Wrapped0),
|
||||
NewState = State#framed_transport{wrapped = Wrapped1},
|
||||
{NewState, Result}.
|
||||
|
||||
%% Reads data through from the wrapped transport
|
||||
read(State0 = #framed_transport{wrapped = Wrapped0, read_buffer = RBuf},
|
||||
Len) when is_integer(Len) ->
|
||||
{Wrapped1, {RBuf1, RBuf1Size}} =
|
||||
%% if the read buffer is empty, read another frame
|
||||
%% otherwise, just read from what's left in the buffer
|
||||
case iolist_size(RBuf) of
|
||||
0 ->
|
||||
%% read the frame length
|
||||
case thrift_transport:read(Wrapped0, 4) of
|
||||
{WrappedS1,
|
||||
{ok, <<FrameLen:32/integer-signed-big, _/binary>>}} ->
|
||||
%% then read the data
|
||||
case thrift_transport:read(WrappedS1, FrameLen) of
|
||||
{WrappedS2, {ok, Bin}} ->
|
||||
{WrappedS2, {Bin, erlang:byte_size(Bin)}};
|
||||
{WrappedS2, {error, Reason1}} ->
|
||||
{WrappedS2, {error, Reason1}}
|
||||
end;
|
||||
{WrappedS1, {error, Reason2}} ->
|
||||
{WrappedS1, {error, Reason2}}
|
||||
end;
|
||||
Sz ->
|
||||
{Wrapped0, {RBuf, Sz}}
|
||||
end,
|
||||
|
||||
%% pull off Give bytes, return them to the user, leave the rest in the buffer
|
||||
case RBuf1 of
|
||||
error ->
|
||||
{ State0#framed_transport {wrapped = Wrapped1, read_buffer = [] },
|
||||
{RBuf1, RBuf1Size} };
|
||||
_ ->
|
||||
Give = min(RBuf1Size, Len),
|
||||
<<Data:Give/binary, RBuf2/binary>> = iolist_to_binary(RBuf1),
|
||||
|
||||
{ State0#framed_transport{wrapped = Wrapped1, read_buffer=RBuf2},
|
||||
{ok, Data} }
|
||||
end.
|
@ -1,116 +0,0 @@
|
||||
%%
|
||||
%% Licensed to the Apache Software Foundation (ASF) under one
|
||||
%% or more contributor license agreements. See the NOTICE file
|
||||
%% distributed with this work for additional information
|
||||
%% regarding copyright ownership. The ASF licenses this file
|
||||
%% to you under the Apache License, Version 2.0 (the
|
||||
%% "License"); you may not use this file except in compliance
|
||||
%% with the License. You may obtain a copy of the License at
|
||||
%%
|
||||
%% http://www.apache.org/licenses/LICENSE-2.0
|
||||
%%
|
||||
%% Unless required by applicable law or agreed to in writing,
|
||||
%% software distributed under the License is distributed on an
|
||||
%% "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||
%% KIND, either express or implied. See the License for the
|
||||
%% specific language governing permissions and limitations
|
||||
%% under the License.
|
||||
%%
|
||||
|
||||
-module(thrift_http_transport).
|
||||
|
||||
-behaviour(thrift_transport).
|
||||
|
||||
%% API
|
||||
-export([new/2, new/3]).
|
||||
|
||||
%% thrift_transport callbacks
|
||||
-export([write/2, read/2, flush/1, close/1]).
|
||||
|
||||
-record(http_transport, {host, % string()
|
||||
path, % string()
|
||||
read_buffer, % iolist()
|
||||
write_buffer, % iolist()
|
||||
http_options, % see http(3)
|
||||
extra_headers % [{str(), str()}, ...]
|
||||
}).
|
||||
-type state() :: #http_transport{}.
|
||||
-include("thrift_transport_behaviour.hrl").
|
||||
|
||||
new(Host, Path) ->
|
||||
new(Host, Path, _Options = []).
|
||||
|
||||
%%--------------------------------------------------------------------
|
||||
%% Options include:
|
||||
%% {http_options, HttpOptions} = See http(3)
|
||||
%% {extra_headers, ExtraHeaders} = List of extra HTTP headers
|
||||
%%--------------------------------------------------------------------
|
||||
new(Host, Path, Options) ->
|
||||
State1 = #http_transport{host = Host,
|
||||
path = Path,
|
||||
read_buffer = [],
|
||||
write_buffer = [],
|
||||
http_options = [],
|
||||
extra_headers = []},
|
||||
ApplyOption =
|
||||
fun
|
||||
({http_options, HttpOpts}, State = #http_transport{}) ->
|
||||
State#http_transport{http_options = HttpOpts};
|
||||
({extra_headers, ExtraHeaders}, State = #http_transport{}) ->
|
||||
State#http_transport{extra_headers = ExtraHeaders};
|
||||
(Other, #http_transport{}) ->
|
||||
{invalid_option, Other};
|
||||
(_, Error) ->
|
||||
Error
|
||||
end,
|
||||
case lists:foldl(ApplyOption, State1, Options) of
|
||||
State2 = #http_transport{} ->
|
||||
thrift_transport:new(?MODULE, State2);
|
||||
Else ->
|
||||
{error, Else}
|
||||
end.
|
||||
|
||||
%% Writes data into the buffer
|
||||
write(State = #http_transport{write_buffer = WBuf}, Data) ->
|
||||
{State#http_transport{write_buffer = [WBuf, Data]}, ok}.
|
||||
|
||||
%% Flushes the buffer, making a request
|
||||
flush(State = #http_transport{host = Host,
|
||||
path = Path,
|
||||
read_buffer = Rbuf,
|
||||
write_buffer = Wbuf,
|
||||
http_options = HttpOptions,
|
||||
extra_headers = ExtraHeaders}) ->
|
||||
case iolist_to_binary(Wbuf) of
|
||||
<<>> ->
|
||||
%% Don't bother flushing empty buffers.
|
||||
{State, ok};
|
||||
WBinary ->
|
||||
{ok, {{_Version, 200, _ReasonPhrase}, _Headers, Body}} =
|
||||
httpc:request(post,
|
||||
{"http://" ++ Host ++ Path,
|
||||
[{"User-Agent", "Erlang/thrift_http_transport"} | ExtraHeaders],
|
||||
"application/x-thrift",
|
||||
WBinary},
|
||||
HttpOptions,
|
||||
[{body_format, binary}]),
|
||||
|
||||
State1 = State#http_transport{read_buffer = [Rbuf, Body],
|
||||
write_buffer = []},
|
||||
{State1, ok}
|
||||
end.
|
||||
|
||||
close(State) ->
|
||||
{State, ok}.
|
||||
|
||||
read(State = #http_transport{read_buffer = RBuf}, Len) when is_integer(Len) ->
|
||||
%% Pull off Give bytes, return them to the user, leave the rest in the buffer.
|
||||
Give = min(iolist_size(RBuf), Len),
|
||||
case iolist_to_binary(RBuf) of
|
||||
<<Data:Give/binary, RBuf1/binary>> ->
|
||||
Response = {ok, Data},
|
||||
State1 = State#http_transport{read_buffer=RBuf1},
|
||||
{State1, Response};
|
||||
_ ->
|
||||
{State, {error, 'EOF'}}
|
||||
end.
|
@ -1,419 +0,0 @@
|
||||
%% Licensed to the Apache Software Foundation (ASF) under one
|
||||
%% or more contributor license agreements. See the NOTICE file
|
||||
%% distributed with this work for additional information
|
||||
%% regarding copyright ownership. The ASF licenses this file
|
||||
%% to you under the Apache License, Version 2.0 (the
|
||||
%% "License"); you may not use this file except in compliance
|
||||
%% with the License. You may obtain a copy of the License at
|
||||
%%
|
||||
%% http://www.apache.org/licenses/LICENSE-2.0
|
||||
%%
|
||||
%% Unless required by applicable law or agreed to in writing,
|
||||
%% software distributed under the License is distributed on an
|
||||
%% "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||
%% KIND, either express or implied. See the License for the
|
||||
%% specific language governing permissions and limitations
|
||||
%% under the License.
|
||||
%%
|
||||
%% The json parser implementation was created by
|
||||
%% alisdair sullivan <alisdair@hartbrake.com> based on
|
||||
%% the jsx json library
|
||||
|
||||
-module(thrift_json_parser).
|
||||
-export([parser/0, handle_event/2]).
|
||||
|
||||
|
||||
-record(config, {strict_utf8 = false :: boolean()}).
|
||||
|
||||
|
||||
parser() -> fun(JSON) -> start(JSON, {?MODULE, []}, [], #config{}) end.
|
||||
|
||||
|
||||
handle_event(Event, {Handler, State}, _Config) -> {Handler, Handler:handle_event(Event, State)}.
|
||||
|
||||
handle_event(end_json, State) -> lists:reverse([end_json] ++ State);
|
||||
handle_event(Event, State) -> [Event] ++ State.
|
||||
|
||||
|
||||
%% whitespace
|
||||
-define(space, 16#20).
|
||||
-define(tab, 16#09).
|
||||
-define(cr, 16#0D).
|
||||
-define(newline, 16#0A).
|
||||
|
||||
%% object delimiters
|
||||
-define(start_object, 16#7B).
|
||||
-define(end_object, 16#7D).
|
||||
|
||||
%% array delimiters
|
||||
-define(start_array, 16#5B).
|
||||
-define(end_array, 16#5D).
|
||||
|
||||
%% kv seperator
|
||||
-define(comma, 16#2C).
|
||||
-define(doublequote, 16#22).
|
||||
-define(singlequote, 16#27).
|
||||
-define(colon, 16#3A).
|
||||
|
||||
%% string escape sequences
|
||||
-define(rsolidus, 16#5C).
|
||||
-define(solidus, 16#2F).
|
||||
|
||||
%% math
|
||||
-define(zero, 16#30).
|
||||
-define(decimalpoint, 16#2E).
|
||||
-define(negative, 16#2D).
|
||||
-define(positive, 16#2B).
|
||||
|
||||
%% comments
|
||||
-define(star, 16#2A).
|
||||
|
||||
|
||||
%% some useful guards
|
||||
-define(is_hex(Symbol),
|
||||
(Symbol >= $a andalso Symbol =< $f) orelse
|
||||
(Symbol >= $A andalso Symbol =< $F) orelse
|
||||
(Symbol >= $0 andalso Symbol =< $9)
|
||||
).
|
||||
|
||||
-define(is_nonzero(Symbol),
|
||||
Symbol >= $1 andalso Symbol =< $9
|
||||
).
|
||||
|
||||
-define(is_whitespace(Symbol),
|
||||
Symbol =:= ?space; Symbol =:= ?tab; Symbol =:= ?cr; Symbol =:= ?newline
|
||||
).
|
||||
|
||||
|
||||
%% lists are benchmarked to be faster (tho higher in memory usage) than binaries
|
||||
new_seq() -> [].
|
||||
new_seq(C) -> [C].
|
||||
|
||||
acc_seq(Seq, C) when is_list(C) -> lists:reverse(C) ++ Seq;
|
||||
acc_seq(Seq, C) -> [C] ++ Seq.
|
||||
|
||||
end_seq(Seq) -> unicode:characters_to_binary(lists:reverse(Seq)).
|
||||
|
||||
end_seq(Seq, _) -> end_seq(Seq).
|
||||
|
||||
|
||||
start(<<16#ef, 16#bb, 16#bf, Rest/binary>>, Handler, Stack, Config) ->
|
||||
value(Rest, Handler, Stack, Config);
|
||||
start(Bin, Handler, Stack, Config) ->
|
||||
value(Bin, Handler, Stack, Config).
|
||||
|
||||
|
||||
value(<<?doublequote, Rest/binary>>, Handler, Stack, Config) ->
|
||||
string(Rest, Handler, new_seq(), Stack, Config);
|
||||
value(<<$t, Rest/binary>>, Handler, Stack, Config) ->
|
||||
true(Rest, Handler, Stack, Config);
|
||||
value(<<$f, Rest/binary>>, Handler, Stack, Config) ->
|
||||
false(Rest, Handler, Stack, Config);
|
||||
value(<<$n, Rest/binary>>, Handler, Stack, Config) ->
|
||||
null(Rest, Handler, Stack, Config);
|
||||
value(<<?negative, Rest/binary>>, Handler, Stack, Config) ->
|
||||
negative(Rest, Handler, new_seq($-), Stack, Config);
|
||||
value(<<?zero, Rest/binary>>, Handler, Stack, Config) ->
|
||||
zero(Rest, Handler, new_seq($0), Stack, Config);
|
||||
value(<<S, Rest/binary>>, Handler, Stack, Config) when ?is_nonzero(S) ->
|
||||
integer(Rest, Handler, new_seq(S), Stack, Config);
|
||||
value(<<?start_object, Rest/binary>>, Handler, Stack, Config) ->
|
||||
object(Rest, handle_event(start_object, Handler, Config), [key|Stack], Config);
|
||||
value(<<?start_array, Rest/binary>>, Handler, Stack, Config) ->
|
||||
array(Rest, handle_event(start_array, Handler, Config), [array|Stack], Config);
|
||||
value(<<S, Rest/binary>>, Handler, Stack, Config) when ?is_whitespace(S) ->
|
||||
value(Rest, Handler, Stack, Config);
|
||||
value(_Bin, _Handler, _Stack, _Config) ->
|
||||
erlang:error(badarg).
|
||||
|
||||
|
||||
object(<<?doublequote, Rest/binary>>, Handler, Stack, Config) ->
|
||||
string(Rest, Handler, new_seq(), Stack, Config);
|
||||
object(<<?end_object, Rest/binary>>, Handler, [key|Stack], Config) ->
|
||||
maybe_done(Rest, handle_event(end_object, Handler, Config), Stack, Config);
|
||||
object(<<S, Rest/binary>>, Handler, Stack, Config) when ?is_whitespace(S) ->
|
||||
object(Rest, Handler, Stack, Config);
|
||||
object(_Bin, _Handler, _Stack, _Config) ->
|
||||
erlang:error(badarg).
|
||||
|
||||
|
||||
array(<<?end_array, Rest/binary>>, Handler, [array|Stack], Config) ->
|
||||
maybe_done(Rest, handle_event(end_array, Handler, Config), Stack, Config);
|
||||
array(<<S, Rest/binary>>, Handler, Stack, Config) when ?is_whitespace(S) ->
|
||||
array(Rest, Handler, Stack, Config);
|
||||
array(Bin, Handler, Stack, Config) ->
|
||||
value(Bin, Handler, Stack, Config).
|
||||
|
||||
|
||||
colon(<<?colon, Rest/binary>>, Handler, [key|Stack], Config) ->
|
||||
value(Rest, Handler, [object|Stack], Config);
|
||||
colon(<<S, Rest/binary>>, Handler, Stack, Config) when ?is_whitespace(S) ->
|
||||
colon(Rest, Handler, Stack, Config);
|
||||
colon(_Bin, _Handler, _Stack, _Config) ->
|
||||
erlang:error(badarg).
|
||||
|
||||
|
||||
key(<<?doublequote, Rest/binary>>, Handler, Stack, Config) ->
|
||||
string(Rest, Handler, new_seq(), Stack, Config);
|
||||
key(<<S, Rest/binary>>, Handler, Stack, Config) when ?is_whitespace(S) ->
|
||||
key(Rest, Handler, Stack, Config);
|
||||
key(_Bin, _Handler, _Stack, _Config) ->
|
||||
erlang:error(badarg).
|
||||
|
||||
|
||||
%% note that if you encounter an error from string and you can't find the clause that
|
||||
%% caused it here, it might be in unescape below
|
||||
string(<<?doublequote, Rest/binary>>, Handler, Acc, Stack, Config) ->
|
||||
doublequote(Rest, Handler, Acc, Stack, Config);
|
||||
string(<<?solidus, Rest/binary>>, Handler, Acc, Stack, Config) ->
|
||||
string(Rest, Handler, acc_seq(Acc, ?solidus), Stack, Config);
|
||||
string(<<?rsolidus/utf8, Rest/binary>>, Handler, Acc, Stack, Config) ->
|
||||
unescape(Rest, Handler, Acc, Stack, Config);
|
||||
string(<<X/utf8, Rest/binary>>, Handler, Acc, Stack, Config) when X >= 16#20, X < 16#2028 ->
|
||||
string(Rest, Handler, acc_seq(Acc, X), Stack, Config);
|
||||
string(<<X/utf8, Rest/binary>>, Handler, Acc, Stack, Config) when X == 16#2028; X == 16#2029 ->
|
||||
string(Rest, Handler, acc_seq(Acc, X), Stack, Config);
|
||||
string(<<X/utf8, Rest/binary>>, Handler, Acc, Stack, Config) when X > 16#2029, X < 16#d800 ->
|
||||
string(Rest, Handler, acc_seq(Acc, X), Stack, Config);
|
||||
string(<<X/utf8, Rest/binary>>, Handler, Acc, Stack, Config) when X > 16#dfff, X < 16#fdd0 ->
|
||||
string(Rest, Handler, acc_seq(Acc, X), Stack, Config);
|
||||
string(<<X/utf8, Rest/binary>>, Handler, Acc, Stack, Config) when X > 16#fdef, X < 16#fffe ->
|
||||
string(Rest, Handler, acc_seq(Acc, X), Stack, Config);
|
||||
string(<<X/utf8, Rest/binary>>, Handler, Acc, Stack, Config) when X >= 16#10000, X < 16#1fffe ->
|
||||
string(Rest, Handler, acc_seq(Acc, X), Stack, Config);
|
||||
string(<<X/utf8, Rest/binary>>, Handler, Acc, Stack, Config) when X >= 16#20000, X < 16#2fffe ->
|
||||
string(Rest, Handler, acc_seq(Acc, X), Stack, Config);
|
||||
string(<<X/utf8, Rest/binary>>, Handler, Acc, Stack, Config) when X >= 16#30000, X < 16#3fffe ->
|
||||
string(Rest, Handler, acc_seq(Acc, X), Stack, Config);
|
||||
string(<<X/utf8, Rest/binary>>, Handler, Acc, Stack, Config) when X >= 16#40000, X < 16#4fffe ->
|
||||
string(Rest, Handler, acc_seq(Acc, X), Stack, Config);
|
||||
string(<<X/utf8, Rest/binary>>, Handler, Acc, Stack, Config) when X >= 16#50000, X < 16#5fffe ->
|
||||
string(Rest, Handler, acc_seq(Acc, X), Stack, Config);
|
||||
string(<<X/utf8, Rest/binary>>, Handler, Acc, Stack, Config) when X >= 16#60000, X < 16#6fffe ->
|
||||
string(Rest, Handler, acc_seq(Acc, X), Stack, Config);
|
||||
string(<<X/utf8, Rest/binary>>, Handler, Acc, Stack, Config) when X >= 16#70000, X < 16#7fffe ->
|
||||
string(Rest, Handler, acc_seq(Acc, X), Stack, Config);
|
||||
string(<<X/utf8, Rest/binary>>, Handler, Acc, Stack, Config) when X >= 16#80000, X < 16#8fffe ->
|
||||
string(Rest, Handler, acc_seq(Acc, X), Stack, Config);
|
||||
string(<<X/utf8, Rest/binary>>, Handler, Acc, Stack, Config) when X >= 16#90000, X < 16#9fffe ->
|
||||
string(Rest, Handler, acc_seq(Acc, X), Stack, Config);
|
||||
string(<<X/utf8, Rest/binary>>, Handler, Acc, Stack, Config) when X >= 16#a0000, X < 16#afffe ->
|
||||
string(Rest, Handler, acc_seq(Acc, X), Stack, Config);
|
||||
string(<<X/utf8, Rest/binary>>, Handler, Acc, Stack, Config) when X >= 16#b0000, X < 16#bfffe ->
|
||||
string(Rest, Handler, acc_seq(Acc, X), Stack, Config);
|
||||
string(<<X/utf8, Rest/binary>>, Handler, Acc, Stack, Config) when X >= 16#c0000, X < 16#cfffe ->
|
||||
string(Rest, Handler, acc_seq(Acc, X), Stack, Config);
|
||||
string(<<X/utf8, Rest/binary>>, Handler, Acc, Stack, Config) when X >= 16#d0000, X < 16#dfffe ->
|
||||
string(Rest, Handler, acc_seq(Acc, X), Stack, Config);
|
||||
string(<<X/utf8, Rest/binary>>, Handler, Acc, Stack, Config) when X >= 16#e0000, X < 16#efffe ->
|
||||
string(Rest, Handler, acc_seq(Acc, X), Stack, Config);
|
||||
string(<<X/utf8, Rest/binary>>, Handler, Acc, Stack, Config) when X >= 16#f0000, X < 16#ffffe ->
|
||||
string(Rest, Handler, acc_seq(Acc, X), Stack, Config);
|
||||
string(<<X/utf8, Rest/binary>>, Handler, Acc, Stack, Config) when X >= 16#100000, X < 16#10fffe ->
|
||||
string(Rest, Handler, acc_seq(Acc, X), Stack, Config);
|
||||
%% surrogates
|
||||
string(<<237, X, _, Rest/binary>>, Handler, Acc, Stack, Config=#config{strict_utf8=false})
|
||||
when X >= 160 ->
|
||||
string(Rest, Handler, acc_seq(Acc, 16#fffd), Stack, Config);
|
||||
%% u+xfffe, u+xffff, control codes and other noncharacters
|
||||
string(<<_/utf8, Rest/binary>>, Handler, Acc, Stack, Config=#config{strict_utf8=false}) ->
|
||||
string(Rest, Handler, acc_seq(Acc, 16#fffd), Stack, Config);
|
||||
%% u+fffe and u+ffff for R14BXX (subsequent runtimes will happily match the
|
||||
%% preceding clause
|
||||
string(<<239, 191, X, Rest/binary>>, Handler, Acc, Stack, Config=#config{strict_utf8=false})
|
||||
when X == 190; X == 191 ->
|
||||
string(Rest, Handler, acc_seq(Acc, 16#fffd), Stack, Config);
|
||||
%% overlong encodings and missing continuations of a 2 byte sequence
|
||||
string(<<X, Rest/binary>>, Handler, Acc, Stack, Config=#config{strict_utf8=false})
|
||||
when X >= 192, X =< 223 ->
|
||||
strip_continuations(Rest, Handler, Acc, Stack, Config, 1);
|
||||
%% overlong encodings and missing continuations of a 3 byte sequence
|
||||
string(<<X, Rest/binary>>, Handler, Acc, Stack, Config=#config{strict_utf8=false})
|
||||
when X >= 224, X =< 239 ->
|
||||
strip_continuations(Rest, Handler, Acc, Stack, Config, 2);
|
||||
%% overlong encodings and missing continuations of a 4 byte sequence
|
||||
string(<<X, Rest/binary>>, Handler, Acc, Stack, Config=#config{strict_utf8=false})
|
||||
when X >= 240, X =< 247 ->
|
||||
strip_continuations(Rest, Handler, Acc, Stack, Config, 3);
|
||||
%% incompletes and unexpected bytes, including orphan continuations
|
||||
string(<<_, Rest/binary>>, Handler, Acc, Stack, Config=#config{strict_utf8=false}) ->
|
||||
string(Rest, Handler, acc_seq(Acc, 16#fffd), Stack, Config);
|
||||
string(_Bin, _Handler, _Acc, _Stack, _Config) ->
|
||||
erlang:error(badarg).
|
||||
|
||||
|
||||
doublequote(Rest, Handler, Acc, [key|_] = Stack, Config) ->
|
||||
colon(Rest, handle_event({key, end_seq(Acc, Config)}, Handler, Config), Stack, Config);
|
||||
doublequote(Rest, Handler, Acc, Stack, Config) ->
|
||||
maybe_done(Rest, handle_event({string, end_seq(Acc, Config)}, Handler, Config), Stack, Config).
|
||||
|
||||
|
||||
%% strips continuation bytes after bad utf bytes, guards against both too short
|
||||
%% and overlong sequences. N is the maximum number of bytes to strip
|
||||
strip_continuations(<<Rest/binary>>, Handler, Acc, Stack, Config, 0) ->
|
||||
string(Rest, Handler, acc_seq(Acc, 16#fffd), Stack, Config);
|
||||
strip_continuations(<<X, Rest/binary>>, Handler, Acc, Stack, Config, N) when X >= 128, X =< 191 ->
|
||||
strip_continuations(Rest, Handler, Acc, Stack, Config, N - 1);
|
||||
%% not a continuation byte, insert a replacement character for sequence thus
|
||||
%% far and dispatch back to string
|
||||
strip_continuations(<<Rest/binary>>, Handler, Acc, Stack, Config, _) ->
|
||||
string(Rest, Handler, acc_seq(Acc, 16#fffd), Stack, Config).
|
||||
|
||||
|
||||
%% this all gets really gross and should probably eventually be folded into
|
||||
%% but for now it fakes being part of string on incompletes and errors
|
||||
unescape(<<$b, Rest/binary>>, Handler, Acc, Stack, Config) ->
|
||||
string(Rest, Handler, acc_seq(Acc, $\b), Stack, Config);
|
||||
unescape(<<$f, Rest/binary>>, Handler, Acc, Stack, Config) ->
|
||||
string(Rest, Handler, acc_seq(Acc, $\f), Stack, Config);
|
||||
unescape(<<$n, Rest/binary>>, Handler, Acc, Stack, Config) ->
|
||||
string(Rest, Handler, acc_seq(Acc, $\n), Stack, Config);
|
||||
unescape(<<$r, Rest/binary>>, Handler, Acc, Stack, Config) ->
|
||||
string(Rest, Handler, acc_seq(Acc, $\r), Stack, Config);
|
||||
unescape(<<$t, Rest/binary>>, Handler, Acc, Stack, Config) ->
|
||||
string(Rest, Handler, acc_seq(Acc, $\t), Stack, Config);
|
||||
unescape(<<?doublequote, Rest/binary>>, Handler, Acc, Stack, Config) ->
|
||||
string(Rest, Handler, acc_seq(Acc, $\"), Stack, Config);
|
||||
unescape(<<?rsolidus, Rest/binary>>, Handler, Acc, Stack, Config) ->
|
||||
string(Rest, Handler, acc_seq(Acc, $\\), Stack, Config);
|
||||
unescape(<<?solidus, Rest/binary>>, Handler, Acc, Stack, Config) ->
|
||||
string(Rest, Handler, acc_seq(Acc, $/), Stack, Config);
|
||||
unescape(<<$u, $d, A, B, C, ?rsolidus, $u, $d, X, Y, Z, Rest/binary>>, Handler, Acc, Stack, Config)
|
||||
when (A == $8 orelse A == $9 orelse A == $a orelse A == $b),
|
||||
(X == $c orelse X == $d orelse X == $e orelse X == $f),
|
||||
?is_hex(B), ?is_hex(C), ?is_hex(Y), ?is_hex(Z)
|
||||
->
|
||||
High = erlang:list_to_integer([$d, A, B, C], 16),
|
||||
Low = erlang:list_to_integer([$d, X, Y, Z], 16),
|
||||
Codepoint = (High - 16#d800) * 16#400 + (Low - 16#dc00) + 16#10000,
|
||||
string(Rest, Handler, acc_seq(Acc, Codepoint), Stack, Config);
|
||||
unescape(<<$u, $d, A, B, C, ?rsolidus, $u, W, X, Y, Z, Rest/binary>>, Handler, Acc, Stack, Config)
|
||||
when (A == $8 orelse A == $9 orelse A == $a orelse A == $b),
|
||||
?is_hex(B), ?is_hex(C), ?is_hex(W), ?is_hex(X), ?is_hex(Y), ?is_hex(Z)
|
||||
->
|
||||
string(Rest, Handler, acc_seq(Acc, [16#fffd, 16#fffd]), Stack, Config);
|
||||
unescape(<<$u, A, B, C, D, Rest/binary>>, Handler, Acc, Stack, Config)
|
||||
when ?is_hex(A), ?is_hex(B), ?is_hex(C), ?is_hex(D) ->
|
||||
case erlang:list_to_integer([A, B, C, D], 16) of
|
||||
Codepoint when Codepoint < 16#d800; Codepoint > 16#dfff ->
|
||||
string(Rest, Handler, acc_seq(Acc, Codepoint), Stack, Config);
|
||||
_ ->
|
||||
string(Rest, Handler, acc_seq(Acc, 16#fffd), Stack, Config)
|
||||
end;
|
||||
unescape(_Bin, _Handler, _Acc, _Stack, _Config) ->
|
||||
erlang:error(badarg).
|
||||
|
||||
|
||||
%% like in strings, there's some pseudo states in here that will never
|
||||
%% show up in errors or incompletes. some show up in value, some show
|
||||
%% up in integer, decimal or exp
|
||||
negative(<<$0, Rest/binary>>, Handler, Acc, Stack, Config) ->
|
||||
zero(Rest, Handler, acc_seq(Acc, $0), Stack, Config);
|
||||
negative(<<S, Rest/binary>>, Handler, Acc, Stack, Config) when ?is_nonzero(S) ->
|
||||
integer(Rest, Handler, acc_seq(Acc, S), Stack, Config);
|
||||
negative(_Bin, _Handler, _Acc, _Stack, _Config) ->
|
||||
erlang:error(badarg).
|
||||
|
||||
|
||||
zero(<<?decimalpoint, Rest/binary>>, Handler, Acc, Stack, Config) ->
|
||||
decimal(Rest, Handler, acc_seq(Acc, ?decimalpoint), Stack, Config);
|
||||
zero(<<S, Rest/binary>>, Handler, Acc, Stack, Config) when S =:= $e; S =:= $E ->
|
||||
e(Rest, Handler, acc_seq(Acc, ".0e"), Stack, Config);
|
||||
zero(Bin, Handler, Acc, Stack, Config) ->
|
||||
finish_number(Bin, Handler, {zero, Acc}, Stack, Config).
|
||||
|
||||
|
||||
integer(<<S, Rest/binary>>, Handler, Acc, Stack, Config) when S =:= ?zero; ?is_nonzero(S) ->
|
||||
integer(Rest, Handler, acc_seq(Acc, S), Stack, Config);
|
||||
integer(<<?decimalpoint, Rest/binary>>, Handler, Acc, Stack, Config) ->
|
||||
initialdecimal(Rest, Handler, acc_seq(Acc, ?decimalpoint), Stack, Config);
|
||||
integer(<<S, Rest/binary>>, Handler, Acc, Stack, Config) when S =:= $e; S =:= $E ->
|
||||
e(Rest, Handler, acc_seq(Acc, ".0e"), Stack, Config);
|
||||
integer(Bin, Handler, Acc, Stack, Config) ->
|
||||
finish_number(Bin, Handler, {integer, Acc}, Stack, Config).
|
||||
|
||||
|
||||
initialdecimal(<<S, Rest/binary>>, Handler, Acc, Stack, Config) when S =:= ?zero; ?is_nonzero(S) ->
|
||||
decimal(Rest, Handler, acc_seq(Acc, S), Stack, Config);
|
||||
initialdecimal(_Bin, _Handler, _Acc, _Stack, _Config) ->
|
||||
erlang:error(badarg).
|
||||
|
||||
|
||||
decimal(<<S, Rest/binary>>, Handler, Acc, Stack, Config) when S =:= ?zero; ?is_nonzero(S) ->
|
||||
decimal(Rest, Handler, acc_seq(Acc, S), Stack, Config);
|
||||
decimal(<<S, Rest/binary>>, Handler, Acc, Stack, Config) when S =:= $e; S =:= $E ->
|
||||
e(Rest, Handler, acc_seq(Acc, $e), Stack, Config);
|
||||
decimal(Bin, Handler, Acc, Stack, Config) ->
|
||||
finish_number(Bin, Handler, {decimal, Acc}, Stack, Config).
|
||||
|
||||
|
||||
e(<<S, Rest/binary>>, Handler, Acc, Stack, Config) when S =:= ?zero; ?is_nonzero(S) ->
|
||||
exp(Rest, Handler, acc_seq(Acc, S), Stack, Config);
|
||||
e(<<Sign, Rest/binary>>, Handler, Acc, Stack, Config) when Sign =:= ?positive; Sign =:= ?negative ->
|
||||
ex(Rest, Handler, acc_seq(Acc, Sign), Stack, Config);
|
||||
e(_Bin, _Handler, _Acc, _Stack, _Config) ->
|
||||
erlang:error(badarg).
|
||||
|
||||
|
||||
ex(<<S, Rest/binary>>, Handler, Acc, Stack, Config) when S =:= ?zero; ?is_nonzero(S) ->
|
||||
exp(Rest, Handler, acc_seq(Acc, S), Stack, Config);
|
||||
ex(_Bin, _Handler, _Acc, _Stack, _Config) ->
|
||||
erlang:error(badarg).
|
||||
|
||||
|
||||
exp(<<S, Rest/binary>>, Handler, Acc, Stack, Config) when S =:= ?zero; ?is_nonzero(S) ->
|
||||
exp(Rest, Handler, acc_seq(Acc, S), Stack, Config);
|
||||
exp(Bin, Handler, Acc, Stack, Config) ->
|
||||
finish_number(Bin, Handler, {exp, Acc}, Stack, Config).
|
||||
|
||||
|
||||
finish_number(Rest, Handler, Acc, [], Config) ->
|
||||
maybe_done(Rest, handle_event(format_number(Acc), Handler, Config), [], Config);
|
||||
finish_number(Rest, Handler, Acc, Stack, Config) ->
|
||||
maybe_done(Rest, handle_event(format_number(Acc), Handler, Config), Stack, Config).
|
||||
|
||||
|
||||
format_number({zero, Acc}) -> {integer, list_to_integer(lists:reverse(Acc))};
|
||||
format_number({integer, Acc}) -> {integer, list_to_integer(lists:reverse(Acc))};
|
||||
format_number({decimal, Acc}) -> {float, list_to_float(lists:reverse(Acc))};
|
||||
format_number({exp, Acc}) -> {float, list_to_float(lists:reverse(Acc))}.
|
||||
|
||||
|
||||
true(<<$r, $u, $e, Rest/binary>>, Handler, Stack, Config) ->
|
||||
maybe_done(Rest, handle_event({literal, true}, Handler, Config), Stack, Config);
|
||||
true(_Bin, _Handler, _Stack, _Config) ->
|
||||
erlang:error(badarg).
|
||||
|
||||
|
||||
false(<<$a, $l, $s, $e, Rest/binary>>, Handler, Stack, Config) ->
|
||||
maybe_done(Rest, handle_event({literal, false}, Handler, Config), Stack, Config);
|
||||
false(_Bin, _Handler, _Stack, _Config) ->
|
||||
erlang:error(badarg).
|
||||
|
||||
|
||||
null(<<$u, $l, $l, Rest/binary>>, Handler, Stack, Config) ->
|
||||
maybe_done(Rest, handle_event({literal, null}, Handler, Config), Stack, Config);
|
||||
null(_Bin, _Handler, _Stack, _Config) ->
|
||||
erlang:error(badarg).
|
||||
|
||||
|
||||
maybe_done(<<Rest/binary>>, Handler, [], Config) ->
|
||||
done(Rest, handle_event(end_json, Handler, Config), [], Config);
|
||||
maybe_done(<<?end_object, Rest/binary>>, Handler, [object|Stack], Config) ->
|
||||
maybe_done(Rest, handle_event(end_object, Handler, Config), Stack, Config);
|
||||
maybe_done(<<?end_array, Rest/binary>>, Handler, [array|Stack], Config) ->
|
||||
maybe_done(Rest, handle_event(end_array, Handler, Config), Stack, Config);
|
||||
maybe_done(<<?comma, Rest/binary>>, Handler, [object|Stack], Config) ->
|
||||
key(Rest, Handler, [key|Stack], Config);
|
||||
maybe_done(<<?comma, Rest/binary>>, Handler, [array|_] = Stack, Config) ->
|
||||
value(Rest, Handler, Stack, Config);
|
||||
maybe_done(<<S, Rest/binary>>, Handler, Stack, Config) when ?is_whitespace(S) ->
|
||||
maybe_done(Rest, Handler, Stack, Config);
|
||||
maybe_done(_Bin, _Handler, _Stack, _Config) ->
|
||||
erlang:error(badarg).
|
||||
|
||||
|
||||
done(<<S, Rest/binary>>, Handler, [], Config) when ?is_whitespace(S) ->
|
||||
done(Rest, Handler, [], Config);
|
||||
done(<<>>, {_Handler, State}, [], _Config) -> State;
|
||||
done(_Bin, _Handler, _Stack, _Config) -> erlang:error(badarg).
|
@ -1,566 +0,0 @@
|
||||
%%
|
||||
%% Licensed to the Apache Software Foundation (ASF) under one
|
||||
%% or more contributor license agreements. See the NOTICE file
|
||||
%% distributed with this work for additional information
|
||||
%% regarding copyright ownership. The ASF licenses this file
|
||||
%% to you under the Apache License, Version 2.0 (the
|
||||
%% "License"); you may not use this file except in compliance
|
||||
%% with the License. You may obtain a copy of the License at
|
||||
%%
|
||||
%% http://www.apache.org/licenses/LICENSE-2.0
|
||||
%%
|
||||
%% Unless required by applicable law or agreed to in writing,
|
||||
%% software distributed under the License is distributed on an
|
||||
%% "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||
%% KIND, either express or implied. See the License for the
|
||||
%% specific language governing permissions and limitations
|
||||
%% under the License.
|
||||
%%
|
||||
%% The JSON protocol implementation was created by
|
||||
%% Peter Neumark <neumark.peter@gmail.com> based on
|
||||
%% the binary protocol implementation.
|
||||
|
||||
-module(thrift_json_protocol).
|
||||
|
||||
-behaviour(thrift_protocol).
|
||||
|
||||
-include("thrift_constants.hrl").
|
||||
-include("thrift_protocol.hrl").
|
||||
|
||||
-export([new/1, new/2,
|
||||
read/2,
|
||||
write/2,
|
||||
flush_transport/1,
|
||||
close_transport/1,
|
||||
new_protocol_factory/2
|
||||
]).
|
||||
|
||||
-record(json_context, {
|
||||
% the type of json_context: array or object
|
||||
type,
|
||||
% fields read or written
|
||||
fields_processed = 0
|
||||
}).
|
||||
|
||||
-record(json_protocol, {
|
||||
transport,
|
||||
context_stack = [],
|
||||
jsx
|
||||
}).
|
||||
-type state() :: #json_protocol{}.
|
||||
-include("thrift_protocol_behaviour.hrl").
|
||||
|
||||
-define(VERSION_1, 1).
|
||||
-define(JSON_DOUBLE_PRECISION, 16).
|
||||
|
||||
typeid_to_json(?tType_BOOL) -> "tf";
|
||||
typeid_to_json(?tType_BYTE) -> "i8";
|
||||
typeid_to_json(?tType_DOUBLE) -> "dbl";
|
||||
typeid_to_json(?tType_I16) -> "i16";
|
||||
typeid_to_json(?tType_I32) -> "i32";
|
||||
typeid_to_json(?tType_I64) -> "i64";
|
||||
typeid_to_json(?tType_STRING) -> "str";
|
||||
typeid_to_json(?tType_STRUCT) -> "rec";
|
||||
typeid_to_json(?tType_MAP) -> "map";
|
||||
typeid_to_json(?tType_SET) -> "set";
|
||||
typeid_to_json(?tType_LIST) -> "lst".
|
||||
|
||||
json_to_typeid("tf") -> ?tType_BOOL;
|
||||
json_to_typeid("i8") -> ?tType_BYTE;
|
||||
json_to_typeid("dbl") -> ?tType_DOUBLE;
|
||||
json_to_typeid("i16") -> ?tType_I16;
|
||||
json_to_typeid("i32") -> ?tType_I32;
|
||||
json_to_typeid("i64") -> ?tType_I64;
|
||||
json_to_typeid("str") -> ?tType_STRING;
|
||||
json_to_typeid("rec") -> ?tType_STRUCT;
|
||||
json_to_typeid("map") -> ?tType_MAP;
|
||||
json_to_typeid("set") -> ?tType_SET;
|
||||
json_to_typeid("lst") -> ?tType_LIST.
|
||||
|
||||
start_context(object) -> "{";
|
||||
start_context(array) -> "[".
|
||||
|
||||
end_context(object) -> "}";
|
||||
end_context(array) -> "]".
|
||||
|
||||
|
||||
new(Transport) ->
|
||||
new(Transport, _Options = []).
|
||||
|
||||
new(Transport, _Options) ->
|
||||
State = #json_protocol{transport = Transport},
|
||||
thrift_protocol:new(?MODULE, State).
|
||||
|
||||
flush_transport(This = #json_protocol{transport = Transport}) ->
|
||||
{NewTransport, Result} = thrift_transport:flush(Transport),
|
||||
{This#json_protocol{
|
||||
transport = NewTransport,
|
||||
context_stack = []
|
||||
}, Result}.
|
||||
|
||||
close_transport(This = #json_protocol{transport = Transport}) ->
|
||||
{NewTransport, Result} = thrift_transport:close(Transport),
|
||||
{This#json_protocol{
|
||||
transport = NewTransport,
|
||||
context_stack = [],
|
||||
jsx = undefined
|
||||
}, Result}.
|
||||
|
||||
%%%
|
||||
%%% instance methods
|
||||
%%%
|
||||
% places a new context on the stack:
|
||||
write(#json_protocol{context_stack = Stack} = State0, {enter_context, Type}) ->
|
||||
{State1, ok} = write_values(State0, [{context_pre_item, false}]),
|
||||
State2 = State1#json_protocol{context_stack = [
|
||||
#json_context{type=Type}|Stack]},
|
||||
write_values(State2, [list_to_binary(start_context(Type))]);
|
||||
|
||||
% removes the topmost context from stack
|
||||
write(#json_protocol{context_stack = [CurrCtxt|Stack]} = State0, {exit_context}) ->
|
||||
Type = CurrCtxt#json_context.type,
|
||||
State1 = State0#json_protocol{context_stack = Stack},
|
||||
write_values(State1, [
|
||||
list_to_binary(end_context(Type)),
|
||||
{context_post_item, false}
|
||||
]);
|
||||
|
||||
% writes necessary prelude to field or container depending on current context
|
||||
write(#json_protocol{context_stack = []} = This0,
|
||||
{context_pre_item, _}) -> {This0, ok};
|
||||
write(#json_protocol{context_stack = [Context|_CtxtTail]} = This0,
|
||||
{context_pre_item, MayNeedQuotes}) ->
|
||||
FieldNo = Context#json_context.fields_processed,
|
||||
CtxtType = Context#json_context.type,
|
||||
Rem = FieldNo rem 2,
|
||||
case {CtxtType, FieldNo, Rem, MayNeedQuotes} of
|
||||
{array, N, _, _} when N > 0 -> % array element (not first)
|
||||
write(This0, <<",">>);
|
||||
{object, 0, _, true} -> % non-string object key (first)
|
||||
write(This0, <<"\"">>);
|
||||
{object, N, 0, true} when N > 0 -> % non-string object key (not first)
|
||||
write(This0, <<",\"">>);
|
||||
{object, N, 0, false} when N > 0-> % string object key (not first)
|
||||
write(This0, <<",">>);
|
||||
_ -> % no pre-field necessary
|
||||
{This0, ok}
|
||||
end;
|
||||
|
||||
% writes necessary postlude to field or container depending on current context
|
||||
write(#json_protocol{context_stack = []} = This0,
|
||||
{context_post_item, _}) -> {This0, ok};
|
||||
write(#json_protocol{context_stack = [Context|CtxtTail]} = This0,
|
||||
{context_post_item, MayNeedQuotes}) ->
|
||||
FieldNo = Context#json_context.fields_processed,
|
||||
CtxtType = Context#json_context.type,
|
||||
Rem = FieldNo rem 2,
|
||||
{This1, ok} = case {CtxtType, Rem, MayNeedQuotes} of
|
||||
{object, 0, true} -> % non-string object key
|
||||
write(This0, <<"\":">>);
|
||||
{object, 0, false} -> % string object key
|
||||
write(This0, <<":">>);
|
||||
_ -> % no pre-field necessary
|
||||
{This0, ok}
|
||||
end,
|
||||
NewContext = Context#json_context{fields_processed = FieldNo + 1},
|
||||
{This1#json_protocol{context_stack=[NewContext|CtxtTail]}, ok};
|
||||
|
||||
write(This0, #protocol_message_begin{
|
||||
name = Name,
|
||||
type = Type,
|
||||
seqid = Seqid}) ->
|
||||
write_values(This0, [
|
||||
{enter_context, array},
|
||||
{i32, ?VERSION_1},
|
||||
{string, Name},
|
||||
{i32, Type},
|
||||
{i32, Seqid}
|
||||
]);
|
||||
|
||||
write(This, message_end) ->
|
||||
write_values(This, [{exit_context}]);
|
||||
|
||||
% Example field expression: "1":{"dbl":3.14}
|
||||
write(This0, #protocol_field_begin{
|
||||
name = _Name,
|
||||
type = Type,
|
||||
id = Id}) ->
|
||||
write_values(This0, [
|
||||
% entering 'outer' object
|
||||
{i16, Id},
|
||||
% entering 'outer' object
|
||||
{enter_context, object},
|
||||
{string, typeid_to_json(Type)}
|
||||
]);
|
||||
|
||||
write(This, field_stop) ->
|
||||
{This, ok};
|
||||
|
||||
write(This, field_end) ->
|
||||
write_values(This,[{exit_context}]);
|
||||
|
||||
% Example message with map: [1,"testMap",1,0,{"1":{"map":["i32","i32",3,{"7":77,"8":88,"9":99}]}}]
|
||||
write(This0, #protocol_map_begin{
|
||||
ktype = Ktype,
|
||||
vtype = Vtype,
|
||||
size = Size}) ->
|
||||
write_values(This0, [
|
||||
{enter_context, array},
|
||||
{string, typeid_to_json(Ktype)},
|
||||
{string, typeid_to_json(Vtype)},
|
||||
{i32, Size},
|
||||
{enter_context, object}
|
||||
]);
|
||||
|
||||
write(This, map_end) ->
|
||||
write_values(This,[
|
||||
{exit_context},
|
||||
{exit_context}
|
||||
]);
|
||||
|
||||
write(This0, #protocol_list_begin{
|
||||
etype = Etype,
|
||||
size = Size}) ->
|
||||
write_values(This0, [
|
||||
{enter_context, array},
|
||||
{string, typeid_to_json(Etype)},
|
||||
{i32, Size}
|
||||
]);
|
||||
|
||||
write(This, list_end) ->
|
||||
write_values(This,[
|
||||
{exit_context}
|
||||
]);
|
||||
|
||||
% example message with set: [1,"testSet",1,0,{"1":{"set":["i32",3,1,2,3]}}]
|
||||
write(This0, #protocol_set_begin{
|
||||
etype = Etype,
|
||||
size = Size}) ->
|
||||
write_values(This0, [
|
||||
{enter_context, array},
|
||||
{string, typeid_to_json(Etype)},
|
||||
{i32, Size}
|
||||
]);
|
||||
|
||||
write(This, set_end) ->
|
||||
write_values(This,[
|
||||
{exit_context}
|
||||
]);
|
||||
% example message with struct: [1,"testStruct",1,0,{"1":{"rec":{"1":{"str":"worked"},"4":{"i8":1},"9":{"i32":1073741824},"11":{"i64":1152921504606847000}}}}]
|
||||
write(This, #protocol_struct_begin{}) ->
|
||||
write_values(This, [
|
||||
{enter_context, object}
|
||||
]);
|
||||
|
||||
write(This, struct_end) ->
|
||||
write_values(This,[
|
||||
{exit_context}
|
||||
]);
|
||||
|
||||
write(This, {bool, true}) -> write_values(This, [
|
||||
{context_pre_item, true},
|
||||
<<"true">>,
|
||||
{context_post_item, true}
|
||||
]);
|
||||
|
||||
write(This, {bool, false}) -> write_values(This, [
|
||||
{context_pre_item, true},
|
||||
<<"false">>,
|
||||
{context_post_item, true}
|
||||
]);
|
||||
|
||||
write(This, {byte, Byte}) -> write_values(This, [
|
||||
{context_pre_item, true},
|
||||
list_to_binary(integer_to_list(Byte)),
|
||||
{context_post_item, true}
|
||||
]);
|
||||
|
||||
write(This, {i16, I16}) ->
|
||||
write(This, {byte, I16});
|
||||
|
||||
write(This, {i32, I32}) ->
|
||||
write(This, {byte, I32});
|
||||
|
||||
write(This, {i64, I64}) ->
|
||||
write(This, {byte, I64});
|
||||
|
||||
write(This, {double, Double}) -> write_values(This, [
|
||||
{context_pre_item, true},
|
||||
list_to_binary(io_lib:format("~.*f", [?JSON_DOUBLE_PRECISION,Double])),
|
||||
{context_post_item, true}
|
||||
]);
|
||||
|
||||
write(This0, {string, Str}) -> write_values(This0, [
|
||||
{context_pre_item, false},
|
||||
case is_binary(Str) of
|
||||
true -> Str;
|
||||
false -> <<"\"", (list_to_binary(Str))/binary, "\"">>
|
||||
end,
|
||||
{context_post_item, false}
|
||||
]);
|
||||
|
||||
%% TODO: binary fields should be base64 encoded?
|
||||
|
||||
%% Data :: iolist()
|
||||
write(This = #json_protocol{transport = Trans}, Data) ->
|
||||
%io:format("Data ~p Ctxt ~p~n~n", [Data, This#json_protocol.context_stack]),
|
||||
{NewTransport, Result} = thrift_transport:write(Trans, Data),
|
||||
{This#json_protocol{transport = NewTransport}, Result}.
|
||||
|
||||
write_values(This0, ValueList) ->
|
||||
FinalState = lists:foldl(
|
||||
fun(Val, ThisIn) ->
|
||||
{ThisOut, ok} = write(ThisIn, Val),
|
||||
ThisOut
|
||||
end,
|
||||
This0,
|
||||
ValueList),
|
||||
{FinalState, ok}.
|
||||
|
||||
%% I wish the erlang version of the transport interface included a
|
||||
%% read_all function (like eg. the java implementation). Since it doesn't,
|
||||
%% here's my version (even though it probably shouldn't be in this file).
|
||||
%%
|
||||
%% The resulting binary is immediately send to the JSX stream parser.
|
||||
%% Subsequent calls to read actually operate on the events returned by JSX.
|
||||
read_all(#json_protocol{transport = Transport0} = State) ->
|
||||
{Transport1, Bin} = read_all_1(Transport0, []),
|
||||
P = thrift_json_parser:parser(),
|
||||
[First|Rest] = P(Bin),
|
||||
State#json_protocol{
|
||||
transport = Transport1,
|
||||
jsx = {event, First, Rest}
|
||||
}.
|
||||
|
||||
read_all_1(Transport0, IoList) ->
|
||||
{Transport1, Result} = thrift_transport:read(Transport0, 1),
|
||||
case Result of
|
||||
{ok, <<>>} -> % nothing read: assume we're done
|
||||
{Transport1, iolist_to_binary(lists:reverse(IoList))};
|
||||
{ok, Data} -> % character successfully read; read more
|
||||
read_all_1(Transport1, [Data|IoList]);
|
||||
{error, 'EOF'} -> % we're done
|
||||
{Transport1, iolist_to_binary(lists:reverse(IoList))}
|
||||
end.
|
||||
|
||||
% Expect reads an event from the JSX event stream. It receives an event or data
|
||||
% type as input. Comparing the read event from the one is was passed, it
|
||||
% returns an error if something other than the expected value is encountered.
|
||||
% Expect also maintains the context stack in #json_protocol.
|
||||
expect(#json_protocol{jsx={event, {Type, Data}=Ev, [Next|Rest]}}=State, ExpectedType) ->
|
||||
NextState = State#json_protocol{jsx={event, Next, Rest}},
|
||||
case Type == ExpectedType of
|
||||
true ->
|
||||
{NextState, {ok, convert_data(Type, Data)}};
|
||||
false ->
|
||||
{NextState, {error, {unexpected_json_event, Ev}}}
|
||||
end;
|
||||
|
||||
expect(#json_protocol{jsx={event, Event, Next}}=State, ExpectedEvent) ->
|
||||
expect(State#json_protocol{jsx={event, {Event, none}, Next}}, ExpectedEvent).
|
||||
|
||||
convert_data(integer, I) -> list_to_integer(I);
|
||||
convert_data(float, F) -> list_to_float(F);
|
||||
convert_data(_, D) -> D.
|
||||
|
||||
expect_many(State, ExpectedList) ->
|
||||
expect_many_1(State, ExpectedList, [], ok).
|
||||
|
||||
expect_many_1(State, [], ResultList, Status) ->
|
||||
{State, {Status, lists:reverse(ResultList)}};
|
||||
expect_many_1(State, [Expected|ExpTail], ResultList, _PrevStatus) ->
|
||||
{State1, {Status, Data}} = expect(State, Expected),
|
||||
NewResultList = [Data|ResultList],
|
||||
case Status of
|
||||
% in case of error, end prematurely
|
||||
error -> expect_many_1(State1, [], NewResultList, Status);
|
||||
ok -> expect_many_1(State1, ExpTail, NewResultList, Status)
|
||||
end.
|
||||
|
||||
% wrapper around expect to make life easier for container opening/closing functions
|
||||
expect_nodata(This, ExpectedList) ->
|
||||
case expect_many(This, ExpectedList) of
|
||||
{State, {ok, _}} ->
|
||||
{State, ok};
|
||||
Error ->
|
||||
Error
|
||||
end.
|
||||
|
||||
read_field(#json_protocol{jsx={event, Field, [Next|Rest]}} = State) ->
|
||||
NewState = State#json_protocol{jsx={event, Next, Rest}},
|
||||
{NewState, Field}.
|
||||
|
||||
read(This0, message_begin) ->
|
||||
% call read_all to get the contents of the transport buffer into JSX.
|
||||
This1 = read_all(This0),
|
||||
case expect_many(This1,
|
||||
[start_array, integer, string, integer, integer]) of
|
||||
{This2, {ok, [_, Version, Name, Type, SeqId]}} ->
|
||||
case Version =:= ?VERSION_1 of
|
||||
true ->
|
||||
{This2, #protocol_message_begin{name = Name,
|
||||
type = Type,
|
||||
seqid = SeqId}};
|
||||
false ->
|
||||
{This2, {error, no_json_protocol_version}}
|
||||
end;
|
||||
Other -> Other
|
||||
end;
|
||||
|
||||
read(This, message_end) ->
|
||||
expect_nodata(This, [end_array]);
|
||||
|
||||
read(This, struct_begin) ->
|
||||
expect_nodata(This, [start_object]);
|
||||
|
||||
read(This, struct_end) ->
|
||||
expect_nodata(This, [end_object]);
|
||||
|
||||
read(This0, field_begin) ->
|
||||
{This1, Read} = expect_many(This0,
|
||||
[%field id
|
||||
key,
|
||||
% {} surrounding field
|
||||
start_object,
|
||||
% type of field
|
||||
key]),
|
||||
case Read of
|
||||
{ok, [FieldIdStr, _, FieldType]} ->
|
||||
{This1, #protocol_field_begin{
|
||||
type = json_to_typeid(FieldType),
|
||||
id = list_to_integer(FieldIdStr)}}; % TODO: do we need to wrap this in a try/catch?
|
||||
{error,[{unexpected_json_event, {end_object,none}}]} ->
|
||||
{This1, #protocol_field_begin{type = ?tType_STOP}};
|
||||
Other ->
|
||||
io:format("**** OTHER branch selected ****"),
|
||||
{This1, Other}
|
||||
end;
|
||||
|
||||
read(This, field_end) ->
|
||||
expect_nodata(This, [end_object]);
|
||||
|
||||
% Example message with map: [1,"testMap",1,0,{"1":{"map":["i32","i32",3,{"7":77,"8":88,"9":99}]}}]
|
||||
read(This0, map_begin) ->
|
||||
case expect_many(This0,
|
||||
[start_array,
|
||||
% key type
|
||||
string,
|
||||
% value type
|
||||
string,
|
||||
% size
|
||||
integer,
|
||||
% the following object contains the map
|
||||
start_object]) of
|
||||
{This1, {ok, [_, Ktype, Vtype, Size, _]}} ->
|
||||
{This1, #protocol_map_begin{ktype = Ktype,
|
||||
vtype = Vtype,
|
||||
size = Size}};
|
||||
Other -> Other
|
||||
end;
|
||||
|
||||
read(This, map_end) ->
|
||||
expect_nodata(This, [end_object, end_array]);
|
||||
|
||||
read(This0, list_begin) ->
|
||||
case expect_many(This0,
|
||||
[start_array,
|
||||
% element type
|
||||
string,
|
||||
% size
|
||||
integer]) of
|
||||
{This1, {ok, [_, Etype, Size]}} ->
|
||||
{This1, #protocol_list_begin{
|
||||
etype = Etype,
|
||||
size = Size}};
|
||||
Other -> Other
|
||||
end;
|
||||
|
||||
read(This, list_end) ->
|
||||
expect_nodata(This, [end_array]);
|
||||
|
||||
% example message with set: [1,"testSet",1,0,{"1":{"set":["i32",3,1,2,3]}}]
|
||||
read(This0, set_begin) ->
|
||||
case expect_many(This0,
|
||||
[start_array,
|
||||
% element type
|
||||
string,
|
||||
% size
|
||||
integer]) of
|
||||
{This1, {ok, [_, Etype, Size]}} ->
|
||||
{This1, #protocol_set_begin{
|
||||
etype = Etype,
|
||||
size = Size}};
|
||||
Other -> Other
|
||||
end;
|
||||
|
||||
read(This, set_end) ->
|
||||
expect_nodata(This, [end_array]);
|
||||
|
||||
read(This0, field_stop) ->
|
||||
{This0, ok};
|
||||
%%
|
||||
|
||||
read(This0, bool) ->
|
||||
{This1, Field} = read_field(This0),
|
||||
Value = case Field of
|
||||
{literal, I} ->
|
||||
{ok, I};
|
||||
_Other ->
|
||||
{error, unexpected_event_for_boolean}
|
||||
end,
|
||||
{This1, Value};
|
||||
|
||||
read(This0, byte) ->
|
||||
{This1, Field} = read_field(This0),
|
||||
Value = case Field of
|
||||
{key, K} ->
|
||||
{ok, list_to_integer(K)};
|
||||
{integer, I} ->
|
||||
{ok, list_to_integer(I)};
|
||||
_Other ->
|
||||
{error, unexpected_event_for_integer}
|
||||
end,
|
||||
{This1, Value};
|
||||
|
||||
read(This0, i16) ->
|
||||
read(This0, byte);
|
||||
|
||||
read(This0, i32) ->
|
||||
read(This0, byte);
|
||||
|
||||
read(This0, i64) ->
|
||||
read(This0, byte);
|
||||
|
||||
read(This0, double) ->
|
||||
{This1, Field} = read_field(This0),
|
||||
Value = case Field of
|
||||
{float, I} ->
|
||||
{ok, list_to_float(I)};
|
||||
_Other ->
|
||||
{error, unexpected_event_for_double}
|
||||
end,
|
||||
{This1, Value};
|
||||
|
||||
% returns a binary directly, call binary_to_list if necessary
|
||||
read(This0, string) ->
|
||||
{This1, Field} = read_field(This0),
|
||||
Value = case Field of
|
||||
{string, I} ->
|
||||
{ok, I};
|
||||
{key, J} ->
|
||||
{ok, J};
|
||||
_Other ->
|
||||
{error, unexpected_event_for_string}
|
||||
end,
|
||||
{This1, Value}.
|
||||
|
||||
%%%% FACTORY GENERATION %%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
|
||||
|
||||
%% returns a (fun() -> thrift_protocol())
|
||||
new_protocol_factory(TransportFactory, _Options) ->
|
||||
% Only strice read/write are implemented
|
||||
F = fun() ->
|
||||
{ok, Transport} = TransportFactory(),
|
||||
thrift_json_protocol:new(Transport, [])
|
||||
end,
|
||||
{ok, F}.
|
@ -1,62 +0,0 @@
|
||||
%%
|
||||
%% Licensed to the Apache Software Foundation (ASF) under one
|
||||
%% or more contributor license agreements. See the NOTICE file
|
||||
%% distributed with this work for additional information
|
||||
%% regarding copyright ownership. The ASF licenses this file
|
||||
%% to you under the Apache License, Version 2.0 (the
|
||||
%% "License"); you may not use this file except in compliance
|
||||
%% with the License. You may obtain a copy of the License at
|
||||
%%
|
||||
%% http://www.apache.org/licenses/LICENSE-2.0
|
||||
%%
|
||||
%% Unless required by applicable law or agreed to in writing,
|
||||
%% software distributed under the License is distributed on an
|
||||
%% "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||
%% KIND, either express or implied. See the License for the
|
||||
%% specific language governing permissions and limitations
|
||||
%% under the License.
|
||||
%%
|
||||
|
||||
-module(thrift_memory_buffer).
|
||||
|
||||
-behaviour(thrift_transport).
|
||||
|
||||
%% API
|
||||
-export([new/0, new/1, new_transport_factory/0]).
|
||||
|
||||
%% thrift_transport callbacks
|
||||
-export([write/2, read/2, flush/1, close/1]).
|
||||
|
||||
-record(memory_buffer, {buffer}).
|
||||
-type state() :: #memory_buffer{}.
|
||||
-include("thrift_transport_behaviour.hrl").
|
||||
|
||||
new() ->
|
||||
State = #memory_buffer{buffer = []},
|
||||
thrift_transport:new(?MODULE, State).
|
||||
|
||||
new (Buf) when is_list (Buf) ->
|
||||
State = #memory_buffer{buffer = Buf},
|
||||
thrift_transport:new(?MODULE, State);
|
||||
new (Buf) ->
|
||||
State = #memory_buffer{buffer = [Buf]},
|
||||
thrift_transport:new(?MODULE, State).
|
||||
|
||||
new_transport_factory() ->
|
||||
{ok, fun() -> new() end}.
|
||||
|
||||
%% Writes data into the buffer
|
||||
write(State = #memory_buffer{buffer = Buf}, Data) ->
|
||||
{State#memory_buffer{buffer = [Buf, Data]}, ok}.
|
||||
|
||||
flush(State = #memory_buffer {buffer = Buf}) ->
|
||||
{State#memory_buffer{buffer = []}, Buf}.
|
||||
|
||||
close(State) ->
|
||||
{State, ok}.
|
||||
|
||||
read(State = #memory_buffer{buffer = Buf}, Len) when is_integer(Len) ->
|
||||
Binary = iolist_to_binary(Buf),
|
||||
Give = min(iolist_size(Binary), Len),
|
||||
{Result, Remaining} = split_binary(Binary, Give),
|
||||
{State#memory_buffer{buffer = Remaining}, {ok, Result}}.
|
@ -1,207 +0,0 @@
|
||||
%%
|
||||
%% Licensed to the Apache Software Foundation (ASF) under one
|
||||
%% or more contributor license agreements. See the NOTICE file
|
||||
%% distributed with this work for additional information
|
||||
%% regarding copyright ownership. The ASF licenses this file
|
||||
%% to you under the Apache License, Version 2.0 (the
|
||||
%% "License"); you may not use this file except in compliance
|
||||
%% with the License. You may obtain a copy of the License at
|
||||
%%
|
||||
%% http://www.apache.org/licenses/LICENSE-2.0
|
||||
%%
|
||||
%% Unless required by applicable law or agreed to in writing,
|
||||
%% software distributed under the License is distributed on an
|
||||
%% "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||
%% KIND, either express or implied. See the License for the
|
||||
%% specific language governing permissions and limitations
|
||||
%% under the License.
|
||||
%%
|
||||
|
||||
-module(thrift_processor).
|
||||
|
||||
-export([init/1]).
|
||||
|
||||
-include("thrift_constants.hrl").
|
||||
-include("thrift_protocol.hrl").
|
||||
|
||||
-record(thrift_processor, {handler, protocol, service}).
|
||||
|
||||
init({_Server, ProtoGen, Service, Handler}) when is_function(ProtoGen, 0) ->
|
||||
{ok, Proto} = ProtoGen(),
|
||||
loop(#thrift_processor{protocol = Proto,
|
||||
service = Service,
|
||||
handler = Handler}).
|
||||
|
||||
loop(State0 = #thrift_processor{protocol = Proto0,
|
||||
handler = Handler}) ->
|
||||
{Proto1, MessageBegin} = thrift_protocol:read(Proto0, message_begin),
|
||||
State1 = State0#thrift_processor{protocol = Proto1},
|
||||
case MessageBegin of
|
||||
#protocol_message_begin{name = Function,
|
||||
type = ?tMessageType_CALL,
|
||||
seqid = Seqid} ->
|
||||
case handle_function(State1, list_to_atom(Function), Seqid) of
|
||||
{State2, ok} -> loop(State2);
|
||||
{_State2, {error, Reason}} ->
|
||||
Handler:handle_error(list_to_atom(Function), Reason),
|
||||
thrift_protocol:close_transport(Proto1),
|
||||
ok
|
||||
end;
|
||||
#protocol_message_begin{name = Function,
|
||||
type = ?tMessageType_ONEWAY,
|
||||
seqid = Seqid} ->
|
||||
case handle_function(State1, list_to_atom(Function), Seqid) of
|
||||
{State2, ok} -> loop(State2);
|
||||
{_State2, {error, Reason}} ->
|
||||
Handler:handle_error(list_to_atom(Function), Reason),
|
||||
thrift_protocol:close_transport(Proto1),
|
||||
ok
|
||||
end;
|
||||
{error, timeout = Reason} ->
|
||||
Handler:handle_error(undefined, Reason),
|
||||
thrift_protocol:close_transport(Proto1),
|
||||
ok;
|
||||
{error, closed = Reason} ->
|
||||
%% error_logger:info_msg("Client disconnected~n"),
|
||||
Handler:handle_error(undefined, Reason),
|
||||
thrift_protocol:close_transport(Proto1),
|
||||
exit(shutdown);
|
||||
{error, Reason} ->
|
||||
Handler:handle_error(undefined, Reason),
|
||||
thrift_protocol:close_transport(Proto1),
|
||||
exit(shutdown)
|
||||
end.
|
||||
|
||||
handle_function(State0=#thrift_processor{protocol = Proto0,
|
||||
handler = Handler,
|
||||
service = Service},
|
||||
Function,
|
||||
Seqid) ->
|
||||
InParams = Service:function_info(Function, params_type),
|
||||
|
||||
{Proto1, {ok, Params}} = thrift_protocol:read(Proto0, InParams),
|
||||
State1 = State0#thrift_processor{protocol = Proto1},
|
||||
|
||||
try
|
||||
Result = Handler:handle_function(Function, Params),
|
||||
%% {Micro, Result} = better_timer(Handler, handle_function, [Function, Params]),
|
||||
%% error_logger:info_msg("Processed ~p(~p) in ~.4fms~n",
|
||||
%% [Function, Params, Micro/1000.0]),
|
||||
handle_success(State1, Function, Result, Seqid)
|
||||
catch
|
||||
Type:Data when Type =:= throw orelse Type =:= error ->
|
||||
handle_function_catch(State1, Function, Type, Data, Seqid)
|
||||
end.
|
||||
|
||||
handle_function_catch(State = #thrift_processor{service = Service},
|
||||
Function, ErrType, ErrData, Seqid) ->
|
||||
IsOneway = Service:function_info(Function, reply_type) =:= oneway_void,
|
||||
|
||||
case {ErrType, ErrData} of
|
||||
_ when IsOneway ->
|
||||
Stack = erlang:get_stacktrace(),
|
||||
error_logger:warning_msg(
|
||||
"oneway void ~p threw error which must be ignored: ~p",
|
||||
[Function, {ErrType, ErrData, Stack}]),
|
||||
{State, ok};
|
||||
|
||||
{throw, Exception} when is_tuple(Exception), size(Exception) > 0 ->
|
||||
%error_logger:warning_msg("~p threw exception: ~p~n", [Function, Exception]),
|
||||
handle_exception(State, Function, Exception, Seqid);
|
||||
% we still want to accept more requests from this client
|
||||
|
||||
{error, Error} ->
|
||||
handle_error(State, Function, Error, Seqid)
|
||||
end.
|
||||
|
||||
handle_success(State = #thrift_processor{service = Service},
|
||||
Function,
|
||||
Result,
|
||||
Seqid) ->
|
||||
ReplyType = Service:function_info(Function, reply_type),
|
||||
StructName = atom_to_list(Function) ++ "_result",
|
||||
|
||||
case Result of
|
||||
{reply, ReplyData} ->
|
||||
Reply = {{struct, [{0, ReplyType}]}, {StructName, ReplyData}},
|
||||
send_reply(State, Function, ?tMessageType_REPLY, Reply, Seqid);
|
||||
|
||||
ok when ReplyType == {struct, []} ->
|
||||
send_reply(State, Function, ?tMessageType_REPLY, {ReplyType, {StructName}}, Seqid);
|
||||
|
||||
ok when ReplyType == oneway_void ->
|
||||
%% no reply for oneway void
|
||||
{State, ok}
|
||||
end.
|
||||
|
||||
handle_exception(State = #thrift_processor{service = Service},
|
||||
Function,
|
||||
Exception,
|
||||
Seqid) ->
|
||||
ExceptionType = element(1, Exception),
|
||||
%% Fetch a structure like {struct, [{-2, {struct, {Module, Type}}},
|
||||
%% {-3, {struct, {Module, Type}}}]}
|
||||
|
||||
ReplySpec = Service:function_info(Function, exceptions),
|
||||
{struct, XInfo} = ReplySpec,
|
||||
|
||||
true = is_list(XInfo),
|
||||
|
||||
%% Assuming we had a type1 exception, we'd get: [undefined, Exception, undefined]
|
||||
%% e.g.: [{-1, type0}, {-2, type1}, {-3, type2}]
|
||||
ExceptionList = [case Type of
|
||||
ExceptionType -> Exception;
|
||||
_ -> undefined
|
||||
end
|
||||
|| {_Fid, {struct, {_Module, Type}}} <- XInfo],
|
||||
|
||||
ExceptionTuple = list_to_tuple([Function | ExceptionList]),
|
||||
|
||||
% Make sure we got at least one defined
|
||||
case lists:all(fun(X) -> X =:= undefined end, ExceptionList) of
|
||||
true ->
|
||||
handle_unknown_exception(State, Function, Exception, Seqid);
|
||||
false ->
|
||||
send_reply(State, Function, ?tMessageType_REPLY, {ReplySpec, ExceptionTuple}, Seqid)
|
||||
end.
|
||||
|
||||
%%
|
||||
%% Called when an exception has been explicitly thrown by the service, but it was
|
||||
%% not one of the exceptions that was defined for the function.
|
||||
%%
|
||||
handle_unknown_exception(State, Function, Exception, Seqid) ->
|
||||
handle_error(State, Function, {exception_not_declared_as_thrown,
|
||||
Exception}, Seqid).
|
||||
|
||||
handle_error(State, Function, Error, Seqid) ->
|
||||
Stack = erlang:get_stacktrace(),
|
||||
error_logger:error_msg("~p had an error: ~p~n", [Function, {Error, Stack}]),
|
||||
|
||||
Message =
|
||||
case application:get_env(thrift, exceptions_include_traces) of
|
||||
{ok, true} ->
|
||||
lists:flatten(io_lib:format("An error occurred: ~p~n",
|
||||
[{Error, Stack}]));
|
||||
_ ->
|
||||
"An unknown handler error occurred."
|
||||
end,
|
||||
Reply = {?TApplicationException_Structure,
|
||||
#'TApplicationException'{
|
||||
message = Message,
|
||||
type = ?TApplicationException_UNKNOWN}},
|
||||
send_reply(State, Function, ?tMessageType_EXCEPTION, Reply, Seqid).
|
||||
|
||||
send_reply(State = #thrift_processor{protocol = Proto0}, Function, ReplyMessageType, Reply, Seqid) ->
|
||||
try
|
||||
{Proto1, ok} = thrift_protocol:write(Proto0, #protocol_message_begin{
|
||||
name = atom_to_list(Function),
|
||||
type = ReplyMessageType,
|
||||
seqid = Seqid}),
|
||||
{Proto2, ok} = thrift_protocol:write(Proto1, Reply),
|
||||
{Proto3, ok} = thrift_protocol:write(Proto2, message_end),
|
||||
{Proto4, ok} = thrift_protocol:flush_transport(Proto3),
|
||||
{State#thrift_processor{protocol = Proto4}, ok}
|
||||
catch
|
||||
error:{badmatch, {_, {error, _} = Error}} ->
|
||||
{State, Error}
|
||||
end.
|
@ -1,407 +0,0 @@
|
||||
%%
|
||||
%% Licensed to the Apache Software Foundation (ASF) under one
|
||||
%% or more contributor license agreements. See the NOTICE file
|
||||
%% distributed with this work for additional information
|
||||
%% regarding copyright ownership. The ASF licenses this file
|
||||
%% to you under the Apache License, Version 2.0 (the
|
||||
%% "License"); you may not use this file except in compliance
|
||||
%% with the License. You may obtain a copy of the License at
|
||||
%%
|
||||
%% http://www.apache.org/licenses/LICENSE-2.0
|
||||
%%
|
||||
%% Unless required by applicable law or agreed to in writing,
|
||||
%% software distributed under the License is distributed on an
|
||||
%% "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||
%% KIND, either express or implied. See the License for the
|
||||
%% specific language governing permissions and limitations
|
||||
%% under the License.
|
||||
%%
|
||||
|
||||
-module(thrift_protocol).
|
||||
|
||||
-export([new/2,
|
||||
write/2,
|
||||
read/2,
|
||||
read/3,
|
||||
skip/2,
|
||||
flush_transport/1,
|
||||
close_transport/1,
|
||||
typeid_to_atom/1
|
||||
]).
|
||||
|
||||
-export([behaviour_info/1]).
|
||||
|
||||
-include("thrift_constants.hrl").
|
||||
-include("thrift_protocol.hrl").
|
||||
|
||||
-record(protocol, {module, data}).
|
||||
|
||||
behaviour_info(callbacks) ->
|
||||
[
|
||||
{read, 2},
|
||||
{write, 2},
|
||||
{flush_transport, 1},
|
||||
{close_transport, 1}
|
||||
];
|
||||
behaviour_info(_Else) -> undefined.
|
||||
|
||||
new(Module, Data) when is_atom(Module) ->
|
||||
{ok, #protocol{module = Module,
|
||||
data = Data}}.
|
||||
|
||||
-spec flush_transport(#protocol{}) -> {#protocol{}, ok}.
|
||||
flush_transport(Proto = #protocol{module = Module,
|
||||
data = Data}) ->
|
||||
{NewData, Result} = Module:flush_transport(Data),
|
||||
{Proto#protocol{data = NewData}, Result}.
|
||||
|
||||
-spec close_transport(#protocol{}) -> ok.
|
||||
close_transport(#protocol{module = Module,
|
||||
data = Data}) ->
|
||||
Module:close_transport(Data).
|
||||
|
||||
typeid_to_atom(?tType_STOP) -> field_stop;
|
||||
typeid_to_atom(?tType_VOID) -> void;
|
||||
typeid_to_atom(?tType_BOOL) -> bool;
|
||||
typeid_to_atom(?tType_BYTE) -> byte;
|
||||
typeid_to_atom(?tType_DOUBLE) -> double;
|
||||
typeid_to_atom(?tType_I16) -> i16;
|
||||
typeid_to_atom(?tType_I32) -> i32;
|
||||
typeid_to_atom(?tType_I64) -> i64;
|
||||
typeid_to_atom(?tType_STRING) -> string;
|
||||
typeid_to_atom(?tType_STRUCT) -> struct;
|
||||
typeid_to_atom(?tType_MAP) -> map;
|
||||
typeid_to_atom(?tType_SET) -> set;
|
||||
typeid_to_atom(?tType_LIST) -> list.
|
||||
|
||||
term_to_typeid(void) -> ?tType_VOID;
|
||||
term_to_typeid(bool) -> ?tType_BOOL;
|
||||
term_to_typeid(byte) -> ?tType_BYTE;
|
||||
term_to_typeid(double) -> ?tType_DOUBLE;
|
||||
term_to_typeid(i16) -> ?tType_I16;
|
||||
term_to_typeid(i32) -> ?tType_I32;
|
||||
term_to_typeid(i64) -> ?tType_I64;
|
||||
term_to_typeid(string) -> ?tType_STRING;
|
||||
term_to_typeid({struct, _}) -> ?tType_STRUCT;
|
||||
term_to_typeid({map, _, _}) -> ?tType_MAP;
|
||||
term_to_typeid({set, _}) -> ?tType_SET;
|
||||
term_to_typeid({list, _}) -> ?tType_LIST.
|
||||
|
||||
%% Structure is like:
|
||||
%% [{Fid, Type}, ...]
|
||||
-spec read(#protocol{}, {struct, _StructDef}, atom()) -> {#protocol{}, {ok, tuple()}}.
|
||||
read(IProto0, {struct, Structure}, Tag)
|
||||
when is_list(Structure), is_atom(Tag) ->
|
||||
|
||||
% If we want a tagged tuple, we need to offset all the tuple indices
|
||||
% by 1 to avoid overwriting the tag.
|
||||
Offset = if Tag =/= undefined -> 1; true -> 0 end,
|
||||
IndexList = case length(Structure) of
|
||||
N when N > 0 -> lists:seq(1 + Offset, N + Offset);
|
||||
_ -> []
|
||||
end,
|
||||
|
||||
SWithIndices = [{Fid, {Type, Index}} ||
|
||||
{{Fid, Type}, Index} <-
|
||||
lists:zip(Structure, IndexList)],
|
||||
% Fid -> {Type, Index}
|
||||
SDict = dict:from_list(SWithIndices),
|
||||
|
||||
{IProto1, ok} = read(IProto0, struct_begin),
|
||||
RTuple0 = erlang:make_tuple(length(Structure) + Offset, undefined),
|
||||
RTuple1 = if Tag =/= undefined -> setelement(1, RTuple0, Tag);
|
||||
true -> RTuple0
|
||||
end,
|
||||
|
||||
{IProto2, RTuple2} = read_struct_loop(IProto1, SDict, RTuple1),
|
||||
{IProto2, {ok, RTuple2}}.
|
||||
|
||||
|
||||
%% NOTE: Keep this in sync with thrift_protocol_behaviour:read
|
||||
-spec read
|
||||
(#protocol{}, {struct, _Info}) -> {#protocol{}, {ok, tuple()} | {error, _Reason}};
|
||||
(#protocol{}, tprot_cont_tag()) -> {#protocol{}, {ok, any()} | {error, _Reason}};
|
||||
(#protocol{}, tprot_empty_tag()) -> {#protocol{}, ok | {error, _Reason}};
|
||||
(#protocol{}, tprot_header_tag()) -> {#protocol{}, tprot_header_val() | {error, _Reason}};
|
||||
(#protocol{}, tprot_data_tag()) -> {#protocol{}, {ok, any()} | {error, _Reason}}.
|
||||
|
||||
read(IProto, {struct, {Module, StructureName}}) when is_atom(Module),
|
||||
is_atom(StructureName) ->
|
||||
read(IProto, Module:struct_info(StructureName), StructureName);
|
||||
|
||||
read(IProto, S={struct, Structure}) when is_list(Structure) ->
|
||||
read(IProto, S, undefined);
|
||||
|
||||
read(IProto0, {list, Type}) ->
|
||||
{IProto1, #protocol_list_begin{etype = EType, size = Size}} =
|
||||
read(IProto0, list_begin),
|
||||
{EType, EType} = {term_to_typeid(Type), EType},
|
||||
{List, IProto2} = lists:mapfoldl(fun(_, ProtoS0) ->
|
||||
{ProtoS1, {ok, Item}} = read(ProtoS0, Type),
|
||||
{Item, ProtoS1}
|
||||
end,
|
||||
IProto1,
|
||||
lists:duplicate(Size, 0)),
|
||||
{IProto3, ok} = read(IProto2, list_end),
|
||||
{IProto3, {ok, List}};
|
||||
|
||||
read(IProto0, {map, KeyType, ValType}) ->
|
||||
{IProto1, #protocol_map_begin{size = Size, ktype = KType, vtype = VType}} =
|
||||
read(IProto0, map_begin),
|
||||
{KType, KType} = {term_to_typeid(KeyType), KType},
|
||||
{VType, VType} = {term_to_typeid(ValType), VType},
|
||||
{List, IProto2} = lists:mapfoldl(fun(_, ProtoS0) ->
|
||||
{ProtoS1, {ok, Key}} = read(ProtoS0, KeyType),
|
||||
{ProtoS2, {ok, Val}} = read(ProtoS1, ValType),
|
||||
{{Key, Val}, ProtoS2}
|
||||
end,
|
||||
IProto1,
|
||||
lists:duplicate(Size, 0)),
|
||||
{IProto3, ok} = read(IProto2, map_end),
|
||||
{IProto3, {ok, dict:from_list(List)}};
|
||||
|
||||
read(IProto0, {set, Type}) ->
|
||||
{IProto1, #protocol_set_begin{etype = EType, size = Size}} =
|
||||
read(IProto0, set_begin),
|
||||
{EType, EType} = {term_to_typeid(Type), EType},
|
||||
{List, IProto2} = lists:mapfoldl(fun(_, ProtoS0) ->
|
||||
{ProtoS1, {ok, Item}} = read(ProtoS0, Type),
|
||||
{Item, ProtoS1}
|
||||
end,
|
||||
IProto1,
|
||||
lists:duplicate(Size, 0)),
|
||||
{IProto3, ok} = read(IProto2, set_end),
|
||||
{IProto3, {ok, sets:from_list(List)}};
|
||||
|
||||
read(Protocol, ProtocolType) ->
|
||||
read_specific(Protocol, ProtocolType).
|
||||
|
||||
%% NOTE: Keep this in sync with thrift_protocol_behaviour:read
|
||||
-spec read_specific
|
||||
(#protocol{}, tprot_empty_tag()) -> {#protocol{}, ok | {error, _Reason}};
|
||||
(#protocol{}, tprot_header_tag()) -> {#protocol{}, tprot_header_val() | {error, _Reason}};
|
||||
(#protocol{}, tprot_data_tag()) -> {#protocol{}, {ok, any()} | {error, _Reason}}.
|
||||
read_specific(Proto = #protocol{module = Module,
|
||||
data = ModuleData}, ProtocolType) ->
|
||||
{NewData, Result} = Module:read(ModuleData, ProtocolType),
|
||||
{Proto#protocol{data = NewData}, Result}.
|
||||
|
||||
read_struct_loop(IProto0, SDict, RTuple) ->
|
||||
{IProto1, #protocol_field_begin{type = FType, id = Fid}} =
|
||||
thrift_protocol:read(IProto0, field_begin),
|
||||
case {FType, Fid} of
|
||||
{?tType_STOP, _} ->
|
||||
{IProto1, RTuple};
|
||||
_Else ->
|
||||
case dict:find(Fid, SDict) of
|
||||
{ok, {Type, Index}} ->
|
||||
case term_to_typeid(Type) of
|
||||
FType ->
|
||||
{IProto2, {ok, Val}} = read(IProto1, Type),
|
||||
{IProto3, ok} = thrift_protocol:read(IProto2, field_end),
|
||||
NewRTuple = setelement(Index, RTuple, Val),
|
||||
read_struct_loop(IProto3, SDict, NewRTuple);
|
||||
Expected ->
|
||||
error_logger:info_msg(
|
||||
"Skipping field ~p with wrong type (~p != ~p)~n",
|
||||
[Fid, FType, Expected]),
|
||||
skip_field(FType, IProto1, SDict, RTuple)
|
||||
end;
|
||||
_Else2 ->
|
||||
skip_field(FType, IProto1, SDict, RTuple)
|
||||
end
|
||||
end.
|
||||
|
||||
skip_field(FType, IProto0, SDict, RTuple) ->
|
||||
FTypeAtom = thrift_protocol:typeid_to_atom(FType),
|
||||
{IProto1, ok} = thrift_protocol:skip(IProto0, FTypeAtom),
|
||||
{IProto2, ok} = read(IProto1, field_end),
|
||||
read_struct_loop(IProto2, SDict, RTuple).
|
||||
|
||||
-spec skip(#protocol{}, any()) -> {#protocol{}, ok}.
|
||||
|
||||
skip(Proto0, struct) ->
|
||||
{Proto1, ok} = read(Proto0, struct_begin),
|
||||
{Proto2, ok} = skip_struct_loop(Proto1),
|
||||
{Proto3, ok} = read(Proto2, struct_end),
|
||||
{Proto3, ok};
|
||||
|
||||
skip(Proto0, map) ->
|
||||
{Proto1, Map} = read(Proto0, map_begin),
|
||||
{Proto2, ok} = skip_map_loop(Proto1, Map),
|
||||
{Proto3, ok} = read(Proto2, map_end),
|
||||
{Proto3, ok};
|
||||
|
||||
skip(Proto0, set) ->
|
||||
{Proto1, Set} = read(Proto0, set_begin),
|
||||
{Proto2, ok} = skip_set_loop(Proto1, Set),
|
||||
{Proto3, ok} = read(Proto2, set_end),
|
||||
{Proto3, ok};
|
||||
|
||||
skip(Proto0, list) ->
|
||||
{Proto1, List} = read(Proto0, list_begin),
|
||||
{Proto2, ok} = skip_list_loop(Proto1, List),
|
||||
{Proto3, ok} = read(Proto2, list_end),
|
||||
{Proto3, ok};
|
||||
|
||||
skip(Proto0, Type) when is_atom(Type) ->
|
||||
{Proto1, _Ignore} = read(Proto0, Type),
|
||||
{Proto1, ok}.
|
||||
|
||||
|
||||
skip_struct_loop(Proto0) ->
|
||||
{Proto1, #protocol_field_begin{type = Type}} = read(Proto0, field_begin),
|
||||
case Type of
|
||||
?tType_STOP ->
|
||||
{Proto1, ok};
|
||||
_Else ->
|
||||
{Proto2, ok} = skip(Proto1, Type),
|
||||
{Proto3, ok} = read(Proto2, field_end),
|
||||
skip_struct_loop(Proto3)
|
||||
end.
|
||||
|
||||
skip_map_loop(Proto0, Map = #protocol_map_begin{ktype = Ktype,
|
||||
vtype = Vtype,
|
||||
size = Size}) ->
|
||||
case Size of
|
||||
N when N > 0 ->
|
||||
{Proto1, ok} = skip(Proto0, Ktype),
|
||||
{Proto2, ok} = skip(Proto1, Vtype),
|
||||
skip_map_loop(Proto2,
|
||||
Map#protocol_map_begin{size = Size - 1});
|
||||
0 -> {Proto0, ok}
|
||||
end.
|
||||
|
||||
skip_set_loop(Proto0, Map = #protocol_set_begin{etype = Etype,
|
||||
size = Size}) ->
|
||||
case Size of
|
||||
N when N > 0 ->
|
||||
{Proto1, ok} = skip(Proto0, Etype),
|
||||
skip_set_loop(Proto1,
|
||||
Map#protocol_set_begin{size = Size - 1});
|
||||
0 -> {Proto0, ok}
|
||||
end.
|
||||
|
||||
skip_list_loop(Proto0, Map = #protocol_list_begin{etype = Etype,
|
||||
size = Size}) ->
|
||||
case Size of
|
||||
N when N > 0 ->
|
||||
{Proto1, ok} = skip(Proto0, Etype),
|
||||
skip_list_loop(Proto1,
|
||||
Map#protocol_list_begin{size = Size - 1});
|
||||
0 -> {Proto0, ok}
|
||||
end.
|
||||
|
||||
|
||||
%%--------------------------------------------------------------------
|
||||
%% Function: write(OProto, {Type, Data}) -> ok
|
||||
%%
|
||||
%% Type = {struct, StructDef} |
|
||||
%% {list, Type} |
|
||||
%% {map, KeyType, ValType} |
|
||||
%% {set, Type} |
|
||||
%% BaseType
|
||||
%%
|
||||
%% Data =
|
||||
%% tuple() -- for struct
|
||||
%% | list() -- for list
|
||||
%% | dictionary() -- for map
|
||||
%% | set() -- for set
|
||||
%% | any() -- for base types
|
||||
%%
|
||||
%% Description:
|
||||
%%--------------------------------------------------------------------
|
||||
-spec write(#protocol{}, any()) -> {#protocol{}, ok | {error, _Reason}}.
|
||||
|
||||
write(Proto0, {{struct, StructDef}, Data})
|
||||
when is_list(StructDef), is_tuple(Data), length(StructDef) == size(Data) - 1 ->
|
||||
|
||||
[StructName | Elems] = tuple_to_list(Data),
|
||||
{Proto1, ok} = write(Proto0, #protocol_struct_begin{name = StructName}),
|
||||
{Proto2, ok} = struct_write_loop(Proto1, StructDef, Elems),
|
||||
{Proto3, ok} = write(Proto2, struct_end),
|
||||
{Proto3, ok};
|
||||
|
||||
write(Proto, {{struct, {Module, StructureName}}, Data})
|
||||
when is_atom(Module),
|
||||
is_atom(StructureName),
|
||||
element(1, Data) =:= StructureName ->
|
||||
write(Proto, {Module:struct_info(StructureName), Data});
|
||||
|
||||
write(_, {{struct, {Module, StructureName}}, Data})
|
||||
when is_atom(Module),
|
||||
is_atom(StructureName) ->
|
||||
erlang:error(struct_unmatched, {{provided, element(1, Data)},
|
||||
{expected, StructureName}});
|
||||
|
||||
write(Proto0, {{list, Type}, Data})
|
||||
when is_list(Data) ->
|
||||
{Proto1, ok} = write(Proto0,
|
||||
#protocol_list_begin{
|
||||
etype = term_to_typeid(Type),
|
||||
size = length(Data)
|
||||
}),
|
||||
Proto2 = lists:foldl(fun(Elem, ProtoIn) ->
|
||||
{ProtoOut, ok} = write(ProtoIn, {Type, Elem}),
|
||||
ProtoOut
|
||||
end,
|
||||
Proto1,
|
||||
Data),
|
||||
{Proto3, ok} = write(Proto2, list_end),
|
||||
{Proto3, ok};
|
||||
|
||||
write(Proto0, {{map, KeyType, ValType}, Data}) ->
|
||||
{Proto1, ok} = write(Proto0,
|
||||
#protocol_map_begin{
|
||||
ktype = term_to_typeid(KeyType),
|
||||
vtype = term_to_typeid(ValType),
|
||||
size = dict:size(Data)
|
||||
}),
|
||||
Proto2 = dict:fold(fun(KeyData, ValData, ProtoS0) ->
|
||||
{ProtoS1, ok} = write(ProtoS0, {KeyType, KeyData}),
|
||||
{ProtoS2, ok} = write(ProtoS1, {ValType, ValData}),
|
||||
ProtoS2
|
||||
end,
|
||||
Proto1,
|
||||
Data),
|
||||
{Proto3, ok} = write(Proto2, map_end),
|
||||
{Proto3, ok};
|
||||
|
||||
write(Proto0, {{set, Type}, Data}) ->
|
||||
true = sets:is_set(Data),
|
||||
{Proto1, ok} = write(Proto0,
|
||||
#protocol_set_begin{
|
||||
etype = term_to_typeid(Type),
|
||||
size = sets:size(Data)
|
||||
}),
|
||||
Proto2 = sets:fold(fun(Elem, ProtoIn) ->
|
||||
{ProtoOut, ok} = write(ProtoIn, {Type, Elem}),
|
||||
ProtoOut
|
||||
end,
|
||||
Proto1,
|
||||
Data),
|
||||
{Proto3, ok} = write(Proto2, set_end),
|
||||
{Proto3, ok};
|
||||
|
||||
write(Proto = #protocol{module = Module,
|
||||
data = ModuleData}, Data) ->
|
||||
{NewData, Result} = Module:write(ModuleData, Data),
|
||||
{Proto#protocol{data = NewData}, Result}.
|
||||
|
||||
struct_write_loop(Proto0, [{Fid, Type} | RestStructDef], [Data | RestData]) ->
|
||||
NewProto = case Data of
|
||||
undefined ->
|
||||
Proto0; % null fields are skipped in response
|
||||
_ ->
|
||||
{Proto1, ok} = write(Proto0,
|
||||
#protocol_field_begin{
|
||||
type = term_to_typeid(Type),
|
||||
id = Fid
|
||||
}),
|
||||
{Proto2, ok} = write(Proto1, {Type, Data}),
|
||||
{Proto3, ok} = write(Proto2, field_end),
|
||||
Proto3
|
||||
end,
|
||||
struct_write_loop(NewProto, RestStructDef, RestData);
|
||||
struct_write_loop(Proto, [], []) ->
|
||||
write(Proto, field_stop).
|
@ -1,66 +0,0 @@
|
||||
%%
|
||||
%% Licensed to the Apache Software Foundation (ASF) under one
|
||||
%% or more contributor license agreements. See the NOTICE file
|
||||
%% distributed with this work for additional information
|
||||
%% regarding copyright ownership. The ASF licenses this file
|
||||
%% to you under the Apache License, Version 2.0 (the
|
||||
%% "License"); you may not use this file except in compliance
|
||||
%% with the License. You may obtain a copy of the License at
|
||||
%%
|
||||
%% http://www.apache.org/licenses/LICENSE-2.0
|
||||
%%
|
||||
%% Unless required by applicable law or agreed to in writing,
|
||||
%% software distributed under the License is distributed on an
|
||||
%% "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||
%% KIND, either express or implied. See the License for the
|
||||
%% specific language governing permissions and limitations
|
||||
%% under the License.
|
||||
%%
|
||||
|
||||
-ifndef(THRIFT_PROTOCOL_INCLUDED).
|
||||
-define(THRIFT_PROTOCOL_INCLUDED, true).
|
||||
|
||||
-record(protocol_message_begin, {name, type, seqid}).
|
||||
-record(protocol_struct_begin, {name}).
|
||||
-record(protocol_field_begin, {name, type, id}).
|
||||
-record(protocol_map_begin, {ktype, vtype, size}).
|
||||
-record(protocol_list_begin, {etype, size}).
|
||||
-record(protocol_set_begin, {etype, size}).
|
||||
|
||||
-type tprot_header_val() :: #protocol_message_begin{}
|
||||
| #protocol_struct_begin{}
|
||||
| #protocol_field_begin{}
|
||||
| #protocol_map_begin{}
|
||||
| #protocol_list_begin{}
|
||||
| #protocol_set_begin{}
|
||||
.
|
||||
-type tprot_empty_tag() :: message_end
|
||||
| struct_begin
|
||||
| struct_end
|
||||
| field_end
|
||||
| map_end
|
||||
| list_end
|
||||
| set_end
|
||||
.
|
||||
-type tprot_header_tag() :: message_begin
|
||||
| field_begin
|
||||
| map_begin
|
||||
| list_begin
|
||||
| set_begin
|
||||
.
|
||||
-type tprot_data_tag() :: ui32
|
||||
| bool
|
||||
| byte
|
||||
| i16
|
||||
| i32
|
||||
| i64
|
||||
| double
|
||||
| string
|
||||
.
|
||||
-type tprot_cont_tag() :: {list, _Type}
|
||||
| {map, _KType, _VType}
|
||||
| {set, _Type}
|
||||
.
|
||||
|
||||
|
||||
-endif.
|
@ -1,37 +0,0 @@
|
||||
%%
|
||||
%% Licensed to the Apache Software Foundation (ASF) under one
|
||||
%% or more contributor license agreements. See the NOTICE file
|
||||
%% distributed with this work for additional information
|
||||
%% regarding copyright ownership. The ASF licenses this file
|
||||
%% to you under the Apache License, Version 2.0 (the
|
||||
%% "License"); you may not use this file except in compliance
|
||||
%% with the License. You may obtain a copy of the License at
|
||||
%%
|
||||
%% http://www.apache.org/licenses/LICENSE-2.0
|
||||
%%
|
||||
%% Unless required by applicable law or agreed to in writing,
|
||||
%% software distributed under the License is distributed on an
|
||||
%% "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||
%% KIND, either express or implied. See the License for the
|
||||
%% specific language governing permissions and limitations
|
||||
%% under the License.
|
||||
%%
|
||||
|
||||
%% Signature specifications for protocol implementations.
|
||||
|
||||
-ifndef(THRIFT_PROTOCOL_BEHAVIOUR_INCLUDED).
|
||||
-define(THRIFT_PROTOCOL_BEHAVIOUR_INCLUDED, true).
|
||||
|
||||
-spec flush_transport(state()) -> {state(), ok | {error, _Reason}}.
|
||||
-spec close_transport(state()) -> {state(), ok | {error, _Reason}}.
|
||||
|
||||
-spec write(state(), any()) -> {state(), ok | {error, _Reason}}.
|
||||
|
||||
%% NOTE: Keep this in sync with thrift_protocol:read and read_specific.
|
||||
-spec read
|
||||
(state(), tprot_empty_tag()) -> {state(), ok | {error, _Reason}};
|
||||
(state(), tprot_header_tag()) -> {state(), tprot_header_val() | {error, _Reason}};
|
||||
(state(), tprot_data_tag()) -> {state(), {ok, any()} | {error, _Reason}}.
|
||||
|
||||
|
||||
-endif.
|
@ -1,243 +0,0 @@
|
||||
%%
|
||||
%% Licensed to the Apache Software Foundation (ASF) under one
|
||||
%% or more contributor license agreements. See the NOTICE file
|
||||
%% distributed with this work for additional information
|
||||
%% regarding copyright ownership. The ASF licenses this file
|
||||
%% to you under the Apache License, Version 2.0 (the
|
||||
%% "License"); you may not use this file except in compliance
|
||||
%% with the License. You may obtain a copy of the License at
|
||||
%%
|
||||
%% http://www.apache.org/licenses/LICENSE-2.0
|
||||
%%
|
||||
%% Unless required by applicable law or agreed to in writing,
|
||||
%% software distributed under the License is distributed on an
|
||||
%% "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||
%% KIND, either express or implied. See the License for the
|
||||
%% specific language governing permissions and limitations
|
||||
%% under the License.
|
||||
%%
|
||||
|
||||
-module(thrift_reconnecting_client).
|
||||
|
||||
-behaviour(gen_server).
|
||||
|
||||
%% API
|
||||
-export([ call/3,
|
||||
get_stats/1,
|
||||
get_and_reset_stats/1 ]).
|
||||
|
||||
-export([ start_link/6 ]).
|
||||
|
||||
%% gen_server callbacks
|
||||
-export([ init/1,
|
||||
handle_call/3,
|
||||
handle_cast/2,
|
||||
handle_info/2,
|
||||
terminate/2,
|
||||
code_change/3 ]).
|
||||
|
||||
-record( state, { client = nil,
|
||||
host,
|
||||
port,
|
||||
thrift_svc,
|
||||
thrift_opts,
|
||||
reconn_min,
|
||||
reconn_max,
|
||||
reconn_time,
|
||||
op_cnt_dict,
|
||||
op_time_dict } ).
|
||||
|
||||
%%====================================================================
|
||||
%% API
|
||||
%%====================================================================
|
||||
%%--------------------------------------------------------------------
|
||||
%% Function: start_link() -> {ok,Pid} | ignore | {error,Error}
|
||||
%% Description: Starts the server
|
||||
%%--------------------------------------------------------------------
|
||||
start_link( Host, Port,
|
||||
ThriftSvc, ThriftOpts,
|
||||
ReconnMin, ReconnMax ) ->
|
||||
gen_server:start_link( ?MODULE,
|
||||
[ Host, Port,
|
||||
ThriftSvc, ThriftOpts,
|
||||
ReconnMin, ReconnMax ],
|
||||
[] ).
|
||||
|
||||
call( Pid, Op, Args ) ->
|
||||
gen_server:call( Pid, { call, Op, Args } ).
|
||||
|
||||
get_stats( Pid ) ->
|
||||
gen_server:call( Pid, get_stats ).
|
||||
|
||||
get_and_reset_stats( Pid ) ->
|
||||
gen_server:call( Pid, get_and_reset_stats ).
|
||||
|
||||
%%====================================================================
|
||||
%% gen_server callbacks
|
||||
%%====================================================================
|
||||
|
||||
%%--------------------------------------------------------------------
|
||||
%% Function: init(Args) -> {ok, State} |
|
||||
%% {ok, State, Timeout} |
|
||||
%% ignore |
|
||||
%% {stop, Reason}
|
||||
%% Description: Start the server.
|
||||
%%--------------------------------------------------------------------
|
||||
init( [ Host, Port, TSvc, TOpts, ReconnMin, ReconnMax ] ) ->
|
||||
process_flag( trap_exit, true ),
|
||||
|
||||
State = #state{ host = Host,
|
||||
port = Port,
|
||||
thrift_svc = TSvc,
|
||||
thrift_opts = TOpts,
|
||||
reconn_min = ReconnMin,
|
||||
reconn_max = ReconnMax,
|
||||
op_cnt_dict = dict:new(),
|
||||
op_time_dict = dict:new() },
|
||||
|
||||
{ ok, try_connect( State ) }.
|
||||
|
||||
%%--------------------------------------------------------------------
|
||||
%% Function: %% handle_call(Request, From, State) -> {reply, Reply, State} |
|
||||
%% {reply, Reply, State, Timeout} |
|
||||
%% {noreply, State} |
|
||||
%% {noreply, State, Timeout} |
|
||||
%% {stop, Reason, Reply, State} |
|
||||
%% {stop, Reason, State}
|
||||
%% Description: Handling call messages
|
||||
%%--------------------------------------------------------------------
|
||||
handle_call( { call, Op, _ },
|
||||
_From,
|
||||
State = #state{ client = nil } ) ->
|
||||
{ reply, { error, noconn }, incr_stats( Op, "failfast", 1, State ) };
|
||||
|
||||
handle_call( { call, Op, Args },
|
||||
_From,
|
||||
State=#state{ client = Client } ) ->
|
||||
|
||||
Start = now(),
|
||||
Result = ( catch thrift_client:call( Client, Op, Args) ),
|
||||
Time = timer:now_diff( now(), Start ),
|
||||
|
||||
case Result of
|
||||
{ C, { ok, Reply } } ->
|
||||
S = incr_stats( Op, "success", Time, State#state{ client = C } ),
|
||||
{ reply, {ok, Reply }, S };
|
||||
{ _, { E, Msg } } when E == error; E == exception ->
|
||||
S = incr_stats( Op, "error", Time, try_connect( State ) ),
|
||||
{ reply, { E, Msg }, S };
|
||||
Other ->
|
||||
S = incr_stats( Op, "error", Time, try_connect( State ) ),
|
||||
{ reply, Other, S }
|
||||
end;
|
||||
|
||||
handle_call( get_stats,
|
||||
_From,
|
||||
State = #state{} ) ->
|
||||
{ reply, stats( State ), State };
|
||||
|
||||
handle_call( get_and_reset_stats,
|
||||
_From,
|
||||
State = #state{} ) ->
|
||||
{ reply, stats( State ), reset_stats( State ) }.
|
||||
|
||||
%%--------------------------------------------------------------------
|
||||
%% Function: handle_cast(Msg, State) -> {noreply, State} |
|
||||
%% {noreply, State, Timeout} |
|
||||
%% {stop, Reason, State}
|
||||
%% Description: Handling cast messages
|
||||
%%--------------------------------------------------------------------
|
||||
handle_cast( _Msg, State ) ->
|
||||
{ noreply, State }.
|
||||
|
||||
%%--------------------------------------------------------------------
|
||||
%% Function: handle_info(Info, State) -> {noreply, State} |
|
||||
%% {noreply, State, Timeout} |
|
||||
%% {stop, Reason, State}
|
||||
%% Description: Handling all non call/cast messages
|
||||
%%--------------------------------------------------------------------
|
||||
handle_info( try_connect, State ) ->
|
||||
{ noreply, try_connect( State ) };
|
||||
|
||||
handle_info( _Info, State ) ->
|
||||
{ noreply, State }.
|
||||
|
||||
%%--------------------------------------------------------------------
|
||||
%% Function: terminate(Reason, State) -> void()
|
||||
%% Description: This function is called by a gen_server when it is about to
|
||||
%% terminate. It should be the opposite of Module:init/1 and do any necessary
|
||||
%% cleaning up. When it returns, the gen_server terminates with Reason.
|
||||
%% The return value is ignored.
|
||||
%%--------------------------------------------------------------------
|
||||
terminate( _Reason, #state{ client = Client } ) ->
|
||||
thrift_client:close( Client ),
|
||||
ok.
|
||||
|
||||
%%--------------------------------------------------------------------
|
||||
%% Func: code_change(OldVsn, State, Extra) -> {ok, NewState}
|
||||
%% Description: Convert process state when code is changed
|
||||
%%--------------------------------------------------------------------
|
||||
code_change( _OldVsn, State, _Extra ) ->
|
||||
{ ok, State }.
|
||||
|
||||
%%--------------------------------------------------------------------
|
||||
%%% Internal functions
|
||||
%%--------------------------------------------------------------------
|
||||
try_connect( State = #state{ client = OldClient,
|
||||
host = Host,
|
||||
port = Port,
|
||||
thrift_svc = TSvc,
|
||||
thrift_opts = TOpts } ) ->
|
||||
|
||||
case OldClient of
|
||||
nil -> ok;
|
||||
_ -> ( catch thrift_client:close( OldClient ) )
|
||||
end,
|
||||
|
||||
case catch thrift_client_util:new( Host, Port, TSvc, TOpts ) of
|
||||
{ ok, Client } ->
|
||||
State#state{ client = Client, reconn_time = 0 };
|
||||
{ E, Msg } when E == error; E == exception ->
|
||||
ReconnTime = reconn_time( State ),
|
||||
error_logger:error_msg( "[~w] ~w connect failed (~w), trying again in ~w ms~n",
|
||||
[ self(), TSvc, Msg, ReconnTime ] ),
|
||||
erlang:send_after( ReconnTime, self(), try_connect ),
|
||||
State#state{ client = nil, reconn_time = ReconnTime }
|
||||
end.
|
||||
|
||||
|
||||
reconn_time( #state{ reconn_min = ReconnMin, reconn_time = 0 } ) ->
|
||||
ReconnMin;
|
||||
reconn_time( #state{ reconn_max = ReconnMax, reconn_time = ReconnMax } ) ->
|
||||
ReconnMax;
|
||||
reconn_time( #state{ reconn_max = ReconnMax, reconn_time = R } ) ->
|
||||
Backoff = 2 * R,
|
||||
case Backoff > ReconnMax of
|
||||
true -> ReconnMax;
|
||||
false -> Backoff
|
||||
end.
|
||||
|
||||
|
||||
incr_stats( Op, Result, Time,
|
||||
State = #state{ op_cnt_dict = OpCntDict,
|
||||
op_time_dict = OpTimeDict } ) ->
|
||||
Key = lists:flatten( [ atom_to_list( Op ), [ "_" | Result ] ] ),
|
||||
State#state{ op_cnt_dict = dict:update_counter( Key, 1, OpCntDict ),
|
||||
op_time_dict = dict:update_counter( Key, Time, OpTimeDict ) }.
|
||||
|
||||
|
||||
stats( #state{ thrift_svc = TSvc,
|
||||
op_cnt_dict = OpCntDict,
|
||||
op_time_dict = OpTimeDict } ) ->
|
||||
Svc = atom_to_list(TSvc),
|
||||
|
||||
F = fun( Key, Count, Stats ) ->
|
||||
Name = lists:flatten( [ Svc, [ "_" | Key ] ] ),
|
||||
Micros = dict:fetch( Key, OpTimeDict ),
|
||||
[ { Name, Count, Micros } | Stats ]
|
||||
end,
|
||||
|
||||
dict:fold( F, [], OpCntDict ).
|
||||
|
||||
reset_stats( State = #state{} ) ->
|
||||
State#state{ op_cnt_dict = dict:new(), op_time_dict = dict:new() }.
|
@ -1,183 +0,0 @@
|
||||
%%
|
||||
%% Licensed to the Apache Software Foundation (ASF) under one
|
||||
%% or more contributor license agreements. See the NOTICE file
|
||||
%% distributed with this work for additional information
|
||||
%% regarding copyright ownership. The ASF licenses this file
|
||||
%% to you under the Apache License, Version 2.0 (the
|
||||
%% "License"); you may not use this file except in compliance
|
||||
%% with the License. You may obtain a copy of the License at
|
||||
%%
|
||||
%% http://www.apache.org/licenses/LICENSE-2.0
|
||||
%%
|
||||
%% Unless required by applicable law or agreed to in writing,
|
||||
%% software distributed under the License is distributed on an
|
||||
%% "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||
%% KIND, either express or implied. See the License for the
|
||||
%% specific language governing permissions and limitations
|
||||
%% under the License.
|
||||
%%
|
||||
|
||||
-module(thrift_server).
|
||||
|
||||
-behaviour(gen_server).
|
||||
|
||||
%% API
|
||||
-export([start_link/3, stop/1, take_socket/2]).
|
||||
|
||||
%% gen_server callbacks
|
||||
-export([init/1, handle_call/3, handle_cast/2, handle_info/2,
|
||||
terminate/2, code_change/3]).
|
||||
|
||||
-define(SERVER, ?MODULE).
|
||||
|
||||
-record(state, {listen_socket, acceptor_ref, service, handler}).
|
||||
|
||||
%%====================================================================
|
||||
%% API
|
||||
%%====================================================================
|
||||
%%--------------------------------------------------------------------
|
||||
%% Function: start_link() -> {ok,Pid} | ignore | {error,Error}
|
||||
%% Description: Starts the server
|
||||
%%--------------------------------------------------------------------
|
||||
start_link(Port, Service, HandlerModule) when is_integer(Port), is_atom(HandlerModule) ->
|
||||
gen_server:start_link({local, ?SERVER}, ?MODULE, {Port, Service, HandlerModule}, []).
|
||||
|
||||
%%--------------------------------------------------------------------
|
||||
%% Function: stop(Pid) -> ok, {error, Reason}
|
||||
%% Description: Stops the server.
|
||||
%%--------------------------------------------------------------------
|
||||
stop(Pid) when is_pid(Pid) ->
|
||||
gen_server:call(Pid, stop).
|
||||
|
||||
|
||||
take_socket(Server, Socket) ->
|
||||
gen_server:call(Server, {take_socket, Socket}).
|
||||
|
||||
|
||||
%%====================================================================
|
||||
%% gen_server callbacks
|
||||
%%====================================================================
|
||||
|
||||
%%--------------------------------------------------------------------
|
||||
%% Function: init(Args) -> {ok, State} |
|
||||
%% {ok, State, Timeout} |
|
||||
%% ignore |
|
||||
%% {stop, Reason}
|
||||
%% Description: Initiates the server
|
||||
%%--------------------------------------------------------------------
|
||||
init({Port, Service, Handler}) ->
|
||||
{ok, Socket} = gen_tcp:listen(Port,
|
||||
[binary,
|
||||
{packet, 0},
|
||||
{active, false},
|
||||
{nodelay, true},
|
||||
{reuseaddr, true}]),
|
||||
{ok, Ref} = prim_inet:async_accept(Socket, -1),
|
||||
{ok, #state{listen_socket = Socket,
|
||||
acceptor_ref = Ref,
|
||||
service = Service,
|
||||
handler = Handler}}.
|
||||
|
||||
%%--------------------------------------------------------------------
|
||||
%% Function: %% handle_call(Request, From, State) -> {reply, Reply, State} |
|
||||
%% {reply, Reply, State, Timeout} |
|
||||
%% {noreply, State} |
|
||||
%% {noreply, State, Timeout} |
|
||||
%% {stop, Reason, Reply, State} |
|
||||
%% {stop, Reason, State}
|
||||
%% Description: Handling call messages
|
||||
%%--------------------------------------------------------------------
|
||||
handle_call(stop, _From, State) ->
|
||||
{stop, stopped, ok, State};
|
||||
|
||||
handle_call({take_socket, Socket}, {FromPid, _Tag}, State) ->
|
||||
Result = gen_tcp:controlling_process(Socket, FromPid),
|
||||
{reply, Result, State}.
|
||||
|
||||
%%--------------------------------------------------------------------
|
||||
%% Function: handle_cast(Msg, State) -> {noreply, State} |
|
||||
%% {noreply, State, Timeout} |
|
||||
%% {stop, Reason, State}
|
||||
%% Description: Handling cast messages
|
||||
%%--------------------------------------------------------------------
|
||||
handle_cast(_Msg, State) ->
|
||||
{noreply, State}.
|
||||
|
||||
%%--------------------------------------------------------------------
|
||||
%% Function: handle_info(Info, State) -> {noreply, State} |
|
||||
%% {noreply, State, Timeout} |
|
||||
%% {stop, Reason, State}
|
||||
%% Description: Handling all non call/cast messages
|
||||
%%--------------------------------------------------------------------
|
||||
handle_info({inet_async, ListenSocket, Ref, {ok, ClientSocket}},
|
||||
State = #state{listen_socket = ListenSocket,
|
||||
acceptor_ref = Ref,
|
||||
service = Service,
|
||||
handler = Handler}) ->
|
||||
case set_sockopt(ListenSocket, ClientSocket) of
|
||||
ok ->
|
||||
%% New client connected - start processor
|
||||
start_processor(ClientSocket, Service, Handler),
|
||||
{ok, NewRef} = prim_inet:async_accept(ListenSocket, -1),
|
||||
{noreply, State#state{acceptor_ref = NewRef}};
|
||||
{error, Reason} ->
|
||||
error_logger:error_msg("Couldn't set socket opts: ~p~n",
|
||||
[Reason]),
|
||||
{stop, Reason, State}
|
||||
end;
|
||||
|
||||
handle_info({inet_async, _ListenSocket, _Ref, Error}, State) ->
|
||||
error_logger:error_msg("Error in acceptor: ~p~n", [Error]),
|
||||
{stop, Error, State};
|
||||
|
||||
handle_info(_Info, State) ->
|
||||
{noreply, State}.
|
||||
|
||||
%%--------------------------------------------------------------------
|
||||
%% Function: terminate(Reason, State) -> void()
|
||||
%% Description: This function is called by a gen_server when it is about to
|
||||
%% terminate. It should be the opposite of Module:init/1 and do any necessary
|
||||
%% cleaning up. When it returns, the gen_server terminates with Reason.
|
||||
%% The return value is ignored.
|
||||
%%--------------------------------------------------------------------
|
||||
terminate(_Reason, _State) ->
|
||||
ok.
|
||||
|
||||
%%--------------------------------------------------------------------
|
||||
%% Func: code_change(OldVsn, State, Extra) -> {ok, NewState}
|
||||
%% Description: Convert process state when code is changed
|
||||
%%--------------------------------------------------------------------
|
||||
code_change(_OldVsn, State, _Extra) ->
|
||||
{ok, State}.
|
||||
|
||||
%%--------------------------------------------------------------------
|
||||
%%% Internal functions
|
||||
%%--------------------------------------------------------------------
|
||||
set_sockopt(ListenSocket, ClientSocket) ->
|
||||
true = inet_db:register_socket(ClientSocket, inet_tcp),
|
||||
case prim_inet:getopts(ListenSocket,
|
||||
[active, nodelay, keepalive, delay_send, priority, tos]) of
|
||||
{ok, Opts} ->
|
||||
case prim_inet:setopts(ClientSocket, Opts) of
|
||||
ok -> ok;
|
||||
Error -> gen_tcp:close(ClientSocket),
|
||||
Error
|
||||
end;
|
||||
Error ->
|
||||
gen_tcp:close(ClientSocket),
|
||||
Error
|
||||
end.
|
||||
|
||||
start_processor(Socket, Service, Handler) ->
|
||||
Server = self(),
|
||||
|
||||
ProtoGen = fun() ->
|
||||
% Become the controlling process
|
||||
ok = take_socket(Server, Socket),
|
||||
{ok, SocketTransport} = thrift_socket_transport:new(Socket),
|
||||
{ok, BufferedTransport} = thrift_buffered_transport:new(SocketTransport),
|
||||
{ok, Protocol} = thrift_binary_protocol:new(BufferedTransport),
|
||||
{ok, Protocol}
|
||||
end,
|
||||
|
||||
spawn(thrift_processor, init, [{Server, ProtoGen, Service, Handler}]).
|
@ -1,25 +0,0 @@
|
||||
%%
|
||||
%% Licensed to the Apache Software Foundation (ASF) under one
|
||||
%% or more contributor license agreements. See the NOTICE file
|
||||
%% distributed with this work for additional information
|
||||
%% regarding copyright ownership. The ASF licenses this file
|
||||
%% to you under the Apache License, Version 2.0 (the
|
||||
%% "License"); you may not use this file except in compliance
|
||||
%% with the License. You may obtain a copy of the License at
|
||||
%%
|
||||
%% http://www.apache.org/licenses/LICENSE-2.0
|
||||
%%
|
||||
%% Unless required by applicable law or agreed to in writing,
|
||||
%% software distributed under the License is distributed on an
|
||||
%% "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||
%% KIND, either express or implied. See the License for the
|
||||
%% specific language governing permissions and limitations
|
||||
%% under the License.
|
||||
%%
|
||||
|
||||
-module(thrift_service).
|
||||
|
||||
-export([behaviour_info/1]).
|
||||
|
||||
behaviour_info(callbacks) ->
|
||||
[{function_info, 2}].
|
@ -1,255 +0,0 @@
|
||||
%%
|
||||
%% Licensed to the Apache Software Foundation (ASF) under one
|
||||
%% or more contributor license agreements. See the NOTICE file
|
||||
%% distributed with this work for additional information
|
||||
%% regarding copyright ownership. The ASF licenses this file
|
||||
%% to you under the Apache License, Version 2.0 (the
|
||||
%% "License"); you may not use this file except in compliance
|
||||
%% with the License. You may obtain a copy of the License at
|
||||
%%
|
||||
%% http://www.apache.org/licenses/LICENSE-2.0
|
||||
%%
|
||||
%% Unless required by applicable law or agreed to in writing,
|
||||
%% software distributed under the License is distributed on an
|
||||
%% "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||
%% KIND, either express or implied. See the License for the
|
||||
%% specific language governing permissions and limitations
|
||||
%% under the License.
|
||||
%%
|
||||
|
||||
-module(thrift_socket_server).
|
||||
|
||||
-behaviour(gen_server).
|
||||
|
||||
-export([start/1, stop/1]).
|
||||
|
||||
-export([init/1, handle_call/3, handle_cast/2, terminate/2, code_change/3,
|
||||
handle_info/2]).
|
||||
|
||||
-export([acceptor_loop/1]).
|
||||
|
||||
-record(thrift_socket_server,
|
||||
{port,
|
||||
service,
|
||||
handler,
|
||||
name,
|
||||
max=2048,
|
||||
ip=any,
|
||||
listen=null,
|
||||
acceptor=null,
|
||||
socket_opts=[{recv_timeout, 500}],
|
||||
framed=false
|
||||
}).
|
||||
|
||||
start(State=#thrift_socket_server{}) ->
|
||||
start_server(State);
|
||||
start(Options) ->
|
||||
start(parse_options(Options)).
|
||||
|
||||
stop(Name) when is_atom(Name) ->
|
||||
gen_server:cast(Name, stop);
|
||||
stop(Pid) when is_pid(Pid) ->
|
||||
gen_server:cast(Pid, stop);
|
||||
stop({local, Name}) ->
|
||||
stop(Name);
|
||||
stop({global, Name}) ->
|
||||
stop(Name);
|
||||
stop(Options) ->
|
||||
State = parse_options(Options),
|
||||
stop(State#thrift_socket_server.name).
|
||||
|
||||
%% Internal API
|
||||
|
||||
parse_options(Options) ->
|
||||
parse_options(Options, #thrift_socket_server{}).
|
||||
|
||||
parse_options([], State) ->
|
||||
State;
|
||||
parse_options([{name, L} | Rest], State) when is_list(L) ->
|
||||
Name = {local, list_to_atom(L)},
|
||||
parse_options(Rest, State#thrift_socket_server{name=Name});
|
||||
parse_options([{name, A} | Rest], State) when is_atom(A) ->
|
||||
Name = {local, A},
|
||||
parse_options(Rest, State#thrift_socket_server{name=Name});
|
||||
parse_options([{name, Name} | Rest], State) ->
|
||||
parse_options(Rest, State#thrift_socket_server{name=Name});
|
||||
parse_options([{port, L} | Rest], State) when is_list(L) ->
|
||||
Port = list_to_integer(L),
|
||||
parse_options(Rest, State#thrift_socket_server{port=Port});
|
||||
parse_options([{port, Port} | Rest], State) ->
|
||||
parse_options(Rest, State#thrift_socket_server{port=Port});
|
||||
parse_options([{ip, Ip} | Rest], State) ->
|
||||
ParsedIp = case Ip of
|
||||
any ->
|
||||
any;
|
||||
Ip when is_tuple(Ip) ->
|
||||
Ip;
|
||||
Ip when is_list(Ip) ->
|
||||
{ok, IpTuple} = inet_parse:address(Ip),
|
||||
IpTuple
|
||||
end,
|
||||
parse_options(Rest, State#thrift_socket_server{ip=ParsedIp});
|
||||
parse_options([{socket_opts, L} | Rest], State) when is_list(L), length(L) > 0 ->
|
||||
parse_options(Rest, State#thrift_socket_server{socket_opts=L});
|
||||
parse_options([{handler, Handler} | Rest], State) ->
|
||||
parse_options(Rest, State#thrift_socket_server{handler=Handler});
|
||||
parse_options([{service, Service} | Rest], State) ->
|
||||
parse_options(Rest, State#thrift_socket_server{service=Service});
|
||||
parse_options([{max, Max} | Rest], State) ->
|
||||
MaxInt = case Max of
|
||||
Max when is_list(Max) ->
|
||||
list_to_integer(Max);
|
||||
Max when is_integer(Max) ->
|
||||
Max
|
||||
end,
|
||||
parse_options(Rest, State#thrift_socket_server{max=MaxInt});
|
||||
parse_options([{framed, Framed} | Rest], State) when is_boolean(Framed) ->
|
||||
parse_options(Rest, State#thrift_socket_server{framed=Framed}).
|
||||
|
||||
start_server(State=#thrift_socket_server{name=Name}) ->
|
||||
case Name of
|
||||
undefined ->
|
||||
gen_server:start_link(?MODULE, State, []);
|
||||
_ ->
|
||||
gen_server:start_link(Name, ?MODULE, State, [])
|
||||
end.
|
||||
|
||||
init(State=#thrift_socket_server{ip=Ip, port=Port}) ->
|
||||
process_flag(trap_exit, true),
|
||||
BaseOpts = [binary,
|
||||
{reuseaddr, true},
|
||||
{packet, 0},
|
||||
{backlog, 4096},
|
||||
{recbuf, 8192},
|
||||
{active, false}],
|
||||
Opts = case Ip of
|
||||
any ->
|
||||
BaseOpts;
|
||||
Ip ->
|
||||
[{ip, Ip} | BaseOpts]
|
||||
end,
|
||||
case gen_tcp_listen(Port, Opts, State) of
|
||||
{stop, eacces} ->
|
||||
%% fdsrv module allows another shot to bind
|
||||
%% ports which require root access
|
||||
case Port < 1024 of
|
||||
true ->
|
||||
case fdsrv:start() of
|
||||
{ok, _} ->
|
||||
case fdsrv:bind_socket(tcp, Port) of
|
||||
{ok, Fd} ->
|
||||
gen_tcp_listen(Port, [{fd, Fd} | Opts], State);
|
||||
_ ->
|
||||
{stop, fdsrv_bind_failed}
|
||||
end;
|
||||
_ ->
|
||||
{stop, fdsrv_start_failed}
|
||||
end;
|
||||
false ->
|
||||
{stop, eacces}
|
||||
end;
|
||||
Other ->
|
||||
error_logger:info_msg("thrift service listening on port ~p", [Port]),
|
||||
Other
|
||||
end.
|
||||
|
||||
gen_tcp_listen(Port, Opts, State) ->
|
||||
case gen_tcp:listen(Port, Opts) of
|
||||
{ok, Listen} ->
|
||||
{ok, ListenPort} = inet:port(Listen),
|
||||
{ok, new_acceptor(State#thrift_socket_server{listen=Listen,
|
||||
port=ListenPort})};
|
||||
{error, Reason} ->
|
||||
{stop, Reason}
|
||||
end.
|
||||
|
||||
new_acceptor(State=#thrift_socket_server{max=0}) ->
|
||||
error_logger:error_msg("Not accepting new connections"),
|
||||
State#thrift_socket_server{acceptor=null};
|
||||
new_acceptor(State=#thrift_socket_server{listen=Listen,
|
||||
service=Service, handler=Handler,
|
||||
socket_opts=Opts, framed=Framed
|
||||
}) ->
|
||||
Pid = proc_lib:spawn_link(?MODULE, acceptor_loop,
|
||||
[{self(), Listen, Service, Handler, Opts, Framed}]),
|
||||
State#thrift_socket_server{acceptor=Pid}.
|
||||
|
||||
acceptor_loop({Server, Listen, Service, Handler, SocketOpts, Framed})
|
||||
when is_pid(Server), is_list(SocketOpts) ->
|
||||
case catch gen_tcp:accept(Listen) of % infinite timeout
|
||||
{ok, Socket} ->
|
||||
gen_server:cast(Server, {accepted, self()}),
|
||||
ProtoGen = fun() ->
|
||||
{ok, SocketTransport} = thrift_socket_transport:new(Socket, SocketOpts),
|
||||
{ok, Transport} =
|
||||
case Framed of
|
||||
true -> thrift_framed_transport:new(SocketTransport);
|
||||
false -> thrift_buffered_transport:new(SocketTransport)
|
||||
end,
|
||||
{ok, Protocol} = thrift_binary_protocol:new(Transport),
|
||||
{ok, Protocol}
|
||||
end,
|
||||
thrift_processor:init({Server, ProtoGen, Service, Handler});
|
||||
{error, closed} ->
|
||||
exit({error, closed});
|
||||
Other ->
|
||||
error_logger:error_report(
|
||||
[{application, thrift},
|
||||
"Accept failed error",
|
||||
lists:flatten(io_lib:format("~p", [Other]))]),
|
||||
exit({error, accept_failed})
|
||||
end.
|
||||
|
||||
handle_call({get, port}, _From, State=#thrift_socket_server{port=Port}) ->
|
||||
{reply, Port, State};
|
||||
handle_call(_Message, _From, State) ->
|
||||
Res = error,
|
||||
{reply, Res, State}.
|
||||
|
||||
handle_cast({accepted, Pid},
|
||||
State=#thrift_socket_server{acceptor=Pid, max=Max}) ->
|
||||
% io:format("accepted ~p~n", [Pid]),
|
||||
State1 = State#thrift_socket_server{max=Max - 1},
|
||||
{noreply, new_acceptor(State1)};
|
||||
handle_cast(stop, State) ->
|
||||
{stop, normal, State}.
|
||||
|
||||
terminate(_Reason, #thrift_socket_server{listen=Listen, port=Port}) ->
|
||||
gen_tcp:close(Listen),
|
||||
case Port < 1024 of
|
||||
true ->
|
||||
catch fdsrv:stop(),
|
||||
ok;
|
||||
false ->
|
||||
ok
|
||||
end.
|
||||
|
||||
code_change(_OldVsn, State, _Extra) ->
|
||||
State.
|
||||
|
||||
handle_info({'EXIT', Pid, normal},
|
||||
State=#thrift_socket_server{acceptor=Pid}) ->
|
||||
{noreply, new_acceptor(State)};
|
||||
handle_info({'EXIT', Pid, Reason},
|
||||
State=#thrift_socket_server{acceptor=Pid}) ->
|
||||
error_logger:error_report({?MODULE, ?LINE,
|
||||
{acceptor_error, Reason}}),
|
||||
timer:sleep(100),
|
||||
{noreply, new_acceptor(State)};
|
||||
handle_info({'EXIT', _LoopPid, Reason},
|
||||
State=#thrift_socket_server{acceptor=Pid, max=Max}) ->
|
||||
case Reason of
|
||||
normal -> ok;
|
||||
shutdown -> ok;
|
||||
_ -> error_logger:error_report({?MODULE, ?LINE,
|
||||
{child_error, Reason, erlang:get_stacktrace()}})
|
||||
end,
|
||||
State1 = State#thrift_socket_server{max=Max + 1},
|
||||
State2 = case Pid of
|
||||
null -> new_acceptor(State1);
|
||||
_ -> State1
|
||||
end,
|
||||
{noreply, State2};
|
||||
handle_info(Info, State) ->
|
||||
error_logger:info_report([{'INFO', Info}, {'State', State}]),
|
||||
{noreply, State}.
|
@ -1,124 +0,0 @@
|
||||
%%
|
||||
%% Licensed to the Apache Software Foundation (ASF) under one
|
||||
%% or more contributor license agreements. See the NOTICE file
|
||||
%% distributed with this work for additional information
|
||||
%% regarding copyright ownership. The ASF licenses this file
|
||||
%% to you under the Apache License, Version 2.0 (the
|
||||
%% "License"); you may not use this file except in compliance
|
||||
%% with the License. You may obtain a copy of the License at
|
||||
%%
|
||||
%% http://www.apache.org/licenses/LICENSE-2.0
|
||||
%%
|
||||
%% Unless required by applicable law or agreed to in writing,
|
||||
%% software distributed under the License is distributed on an
|
||||
%% "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||
%% KIND, either express or implied. See the License for the
|
||||
%% specific language governing permissions and limitations
|
||||
%% under the License.
|
||||
%%
|
||||
|
||||
-module(thrift_socket_transport).
|
||||
|
||||
-behaviour(thrift_transport).
|
||||
|
||||
-export([new/1,
|
||||
new/2,
|
||||
write/2, read/2, flush/1, close/1,
|
||||
|
||||
new_transport_factory/3]).
|
||||
|
||||
-record(data, {socket,
|
||||
recv_timeout=infinity}).
|
||||
-type state() :: #data{}.
|
||||
-include("thrift_transport_behaviour.hrl").
|
||||
|
||||
new(Socket) ->
|
||||
new(Socket, []).
|
||||
|
||||
new(Socket, Opts) when is_list(Opts) ->
|
||||
State =
|
||||
case lists:keysearch(recv_timeout, 1, Opts) of
|
||||
{value, {recv_timeout, Timeout}}
|
||||
when is_integer(Timeout), Timeout > 0 ->
|
||||
#data{socket=Socket, recv_timeout=Timeout};
|
||||
_ ->
|
||||
#data{socket=Socket}
|
||||
end,
|
||||
thrift_transport:new(?MODULE, State).
|
||||
|
||||
%% Data :: iolist()
|
||||
write(This = #data{socket = Socket}, Data) ->
|
||||
{This, gen_tcp:send(Socket, Data)}.
|
||||
|
||||
read(This = #data{socket=Socket, recv_timeout=Timeout}, Len)
|
||||
when is_integer(Len), Len >= 0 ->
|
||||
case gen_tcp:recv(Socket, Len, Timeout) of
|
||||
Err = {error, timeout} ->
|
||||
gen_tcp:close(Socket),
|
||||
{This, Err};
|
||||
Data ->
|
||||
{This, Data}
|
||||
end.
|
||||
|
||||
%% We can't really flush - everything is flushed when we write
|
||||
flush(This) ->
|
||||
{This, ok}.
|
||||
|
||||
close(This = #data{socket = Socket}) ->
|
||||
{This, gen_tcp:close(Socket)}.
|
||||
|
||||
|
||||
%%%% FACTORY GENERATION %%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
|
||||
|
||||
|
||||
%% The following "local" record is filled in by parse_factory_options/2
|
||||
%% below. These options can be passed to new_protocol_factory/3 in a
|
||||
%% proplists-style option list. They're parsed like this so it is an O(n)
|
||||
%% operation instead of O(n^2)
|
||||
-record(factory_opts, {connect_timeout = infinity,
|
||||
sockopts = [],
|
||||
framed = false}).
|
||||
|
||||
parse_factory_options([], Opts) ->
|
||||
Opts;
|
||||
parse_factory_options([{framed, Bool} | Rest], Opts) when is_boolean(Bool) ->
|
||||
parse_factory_options(Rest, Opts#factory_opts{framed=Bool});
|
||||
parse_factory_options([{sockopts, OptList} | Rest], Opts) when is_list(OptList) ->
|
||||
parse_factory_options(Rest, Opts#factory_opts{sockopts=OptList});
|
||||
parse_factory_options([{connect_timeout, TO} | Rest], Opts) when TO =:= infinity; is_integer(TO) ->
|
||||
parse_factory_options(Rest, Opts#factory_opts{connect_timeout=TO});
|
||||
parse_factory_options([{recv_timeout, TO} | Rest], Opts) when TO =:= infinity; is_integer(TO) ->
|
||||
parse_factory_options(Rest, Opts).
|
||||
|
||||
|
||||
%%
|
||||
%% Generates a "transport factory" function - a fun which returns a thrift_transport()
|
||||
%% instance.
|
||||
%% This can be passed into a protocol factory to generate a connection to a
|
||||
%% thrift server over a socket.
|
||||
%%
|
||||
new_transport_factory(Host, Port, Options) ->
|
||||
ParsedOpts = parse_factory_options(Options, #factory_opts{}),
|
||||
|
||||
F = fun() ->
|
||||
SockOpts = [binary,
|
||||
{packet, 0},
|
||||
{active, false},
|
||||
{nodelay, true} |
|
||||
ParsedOpts#factory_opts.sockopts],
|
||||
case catch gen_tcp:connect(Host, Port, SockOpts,
|
||||
ParsedOpts#factory_opts.connect_timeout) of
|
||||
{ok, Sock} ->
|
||||
{ok, Transport} =
|
||||
thrift_socket_transport:new(Sock, Options),
|
||||
{ok, BufTransport} =
|
||||
case ParsedOpts#factory_opts.framed of
|
||||
true -> thrift_framed_transport:new(Transport);
|
||||
false -> thrift_buffered_transport:new(Transport)
|
||||
end,
|
||||
{ok, BufTransport};
|
||||
Error ->
|
||||
Error
|
||||
end
|
||||
end,
|
||||
{ok, F}.
|
@ -1,78 +0,0 @@
|
||||
%%
|
||||
%% Licensed to the Apache Software Foundation (ASF) under one
|
||||
%% or more contributor license agreements. See the NOTICE file
|
||||
%% distributed with this work for additional information
|
||||
%% regarding copyright ownership. The ASF licenses this file
|
||||
%% to you under the Apache License, Version 2.0 (the
|
||||
%% "License"); you may not use this file except in compliance
|
||||
%% with the License. You may obtain a copy of the License at
|
||||
%%
|
||||
%% http://www.apache.org/licenses/LICENSE-2.0
|
||||
%%
|
||||
%% Unless required by applicable law or agreed to in writing,
|
||||
%% software distributed under the License is distributed on an
|
||||
%% "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||
%% KIND, either express or implied. See the License for the
|
||||
%% specific language governing permissions and limitations
|
||||
%% under the License.
|
||||
%%
|
||||
|
||||
-module(thrift_transport).
|
||||
|
||||
-export([behaviour_info/1]).
|
||||
|
||||
-export([new/2,
|
||||
write/2,
|
||||
read/2,
|
||||
flush/1,
|
||||
close/1
|
||||
]).
|
||||
|
||||
behaviour_info(callbacks) ->
|
||||
[{read, 2},
|
||||
{write, 2},
|
||||
{flush, 1},
|
||||
{close, 1}
|
||||
].
|
||||
|
||||
-record(transport, {module, data}).
|
||||
|
||||
-ifdef(transport_wrapper_module).
|
||||
-define(debug_wrap(Transport),
|
||||
case Transport#transport.module of
|
||||
?transport_wrapper_module ->
|
||||
Transport;
|
||||
_Else ->
|
||||
{ok, Result} = ?transport_wrapper_module:new(Transport),
|
||||
Result
|
||||
end).
|
||||
-else.
|
||||
-define(debug_wrap(Transport), Transport).
|
||||
-endif.
|
||||
|
||||
new(Module, Data) when is_atom(Module) ->
|
||||
Transport0 = #transport{module = Module, data = Data},
|
||||
Transport1 = ?debug_wrap(Transport0),
|
||||
{ok, Transport1}.
|
||||
|
||||
-spec write(#transport{}, iolist() | binary()) -> {#transport{}, ok | {error, _Reason}}.
|
||||
write(Transport, Data) ->
|
||||
Module = Transport#transport.module,
|
||||
{NewTransData, Result} = Module:write(Transport#transport.data, Data),
|
||||
{Transport#transport{data = NewTransData}, Result}.
|
||||
|
||||
-spec read(#transport{}, non_neg_integer()) -> {#transport{}, {ok, binary()} | {error, _Reason}}.
|
||||
read(Transport, Len) when is_integer(Len) ->
|
||||
Module = Transport#transport.module,
|
||||
{NewTransData, Result} = Module:read(Transport#transport.data, Len),
|
||||
{Transport#transport{data = NewTransData}, Result}.
|
||||
|
||||
-spec flush(#transport{}) -> {#transport{}, ok | {error, _Reason}}.
|
||||
flush(Transport = #transport{module = Module, data = Data}) ->
|
||||
{NewTransData, Result} = Module:flush(Data),
|
||||
{Transport#transport{data = NewTransData}, Result}.
|
||||
|
||||
-spec close(#transport{}) -> {#transport{}, ok | {error, _Reason}}.
|
||||
close(Transport = #transport{module = Module, data = Data}) ->
|
||||
{NewTransData, Result} = Module:close(Data),
|
||||
{Transport#transport{data = NewTransData}, Result}.
|
@ -1,31 +0,0 @@
|
||||
%%
|
||||
%% Licensed to the Apache Software Foundation (ASF) under one
|
||||
%% or more contributor license agreements. See the NOTICE file
|
||||
%% distributed with this work for additional information
|
||||
%% regarding copyright ownership. The ASF licenses this file
|
||||
%% to you under the Apache License, Version 2.0 (the
|
||||
%% "License"); you may not use this file except in compliance
|
||||
%% with the License. You may obtain a copy of the License at
|
||||
%%
|
||||
%% http://www.apache.org/licenses/LICENSE-2.0
|
||||
%%
|
||||
%% Unless required by applicable law or agreed to in writing,
|
||||
%% software distributed under the License is distributed on an
|
||||
%% "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||
%% KIND, either express or implied. See the License for the
|
||||
%% specific language governing permissions and limitations
|
||||
%% under the License.
|
||||
%%
|
||||
|
||||
%% Signature specifications for transport implementations.
|
||||
|
||||
-ifndef(THRIFT_TRANSPORT_BEHAVIOUR_INCLUDED).
|
||||
-define(THRIFT_TRANSPORT_BEHAVIOUR_INCLUDED, true).
|
||||
|
||||
-spec write(state(), iolist() | binary()) -> {state(), ok | {error, _Reason}}.
|
||||
-spec read(state(), non_neg_integer()) -> {state(), {ok, binary()} | {error, _Reason}}.
|
||||
-spec flush(state()) -> {state(), ok | {error, _Reason}}.
|
||||
-spec close(state()) -> {state(), ok | {error, _Reason}}.
|
||||
|
||||
|
||||
-endif.
|
@ -1,117 +0,0 @@
|
||||
%%
|
||||
%% Licensed to the Apache Software Foundation (ASF) under one
|
||||
%% or more contributor license agreements. See the NOTICE file
|
||||
%% distributed with this work for additional information
|
||||
%% regarding copyright ownership. The ASF licenses this file
|
||||
%% to you under the Apache License, Version 2.0 (the
|
||||
%% "License"); you may not use this file except in compliance
|
||||
%% with the License. You may obtain a copy of the License at
|
||||
%%
|
||||
%% http://www.apache.org/licenses/LICENSE-2.0
|
||||
%%
|
||||
%% Unless required by applicable law or agreed to in writing,
|
||||
%% software distributed under the License is distributed on an
|
||||
%% "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||
%% KIND, either express or implied. See the License for the
|
||||
%% specific language governing permissions and limitations
|
||||
%% under the License.
|
||||
%%
|
||||
|
||||
-module(thrift_transport_state_test).
|
||||
|
||||
-behaviour(gen_server).
|
||||
-behaviour(thrift_transport).
|
||||
|
||||
%% API
|
||||
-export([new/1]).
|
||||
|
||||
%% gen_server callbacks
|
||||
-export([init/1, handle_call/3, handle_cast/2, handle_info/2,
|
||||
terminate/2, code_change/3]).
|
||||
|
||||
%% thrift_transport callbacks
|
||||
-export([write/2, read/2, flush/1, close/1]).
|
||||
|
||||
-record(trans, {wrapped, % #thrift_transport{}
|
||||
version :: integer(),
|
||||
counter :: pid()
|
||||
}).
|
||||
-type state() :: #trans{}.
|
||||
-include("thrift_transport_behaviour.hrl").
|
||||
|
||||
-record(state, {cversion :: integer()}).
|
||||
|
||||
|
||||
new(WrappedTransport) ->
|
||||
case gen_server:start_link(?MODULE, [], []) of
|
||||
{ok, Pid} ->
|
||||
Trans = #trans{wrapped = WrappedTransport,
|
||||
version = 0,
|
||||
counter = Pid},
|
||||
thrift_transport:new(?MODULE, Trans);
|
||||
Else ->
|
||||
Else
|
||||
end.
|
||||
|
||||
%%====================================================================
|
||||
%% thrift_transport callbacks
|
||||
%%====================================================================
|
||||
|
||||
write(Transport0 = #trans{wrapped = Wrapped0}, Data) ->
|
||||
Transport1 = check_version(Transport0),
|
||||
{Wrapped1, Result} = thrift_transport:write(Wrapped0, Data),
|
||||
Transport2 = Transport1#trans{wrapped = Wrapped1},
|
||||
{Transport2, Result}.
|
||||
|
||||
flush(Transport0 = #trans{wrapped = Wrapped0}) ->
|
||||
Transport1 = check_version(Transport0),
|
||||
{Wrapped1, Result} = thrift_transport:flush(Wrapped0),
|
||||
Transport2 = Transport1#trans{wrapped = Wrapped1},
|
||||
{Transport2, Result}.
|
||||
|
||||
close(Transport0 = #trans{wrapped = Wrapped0}) ->
|
||||
Transport1 = check_version(Transport0),
|
||||
shutdown_counter(Transport1),
|
||||
{Wrapped1, Result} = thrift_transport:close(Wrapped0),
|
||||
Transport2 = Transport1#trans{wrapped = Wrapped1},
|
||||
{Transport2, Result}.
|
||||
|
||||
read(Transport0 = #trans{wrapped = Wrapped0}, Len) ->
|
||||
Transport1 = check_version(Transport0),
|
||||
{Wrapped1, Result} = thrift_transport:read(Wrapped0, Len),
|
||||
Transport2 = Transport1#trans{wrapped = Wrapped1},
|
||||
{Transport2, Result}.
|
||||
|
||||
|
||||
%%====================================================================
|
||||
%% gen_server callbacks
|
||||
%%====================================================================
|
||||
|
||||
init([]) ->
|
||||
{ok, #state{cversion = 0}}.
|
||||
|
||||
handle_call(check_version, _From, State = #state{cversion = Version}) ->
|
||||
{reply, Version, State#state{cversion = Version+1}}.
|
||||
|
||||
handle_cast(shutdown, State) ->
|
||||
{stop, normal, State}.
|
||||
|
||||
handle_info(_Info, State) -> {noreply, State}.
|
||||
code_change(_OldVsn, State, _Extra) -> {ok, State}.
|
||||
terminate(_Reason, _State) -> ok.
|
||||
|
||||
%%--------------------------------------------------------------------
|
||||
%% Internal functions
|
||||
%%--------------------------------------------------------------------
|
||||
|
||||
check_version(Transport = #trans{version = Version, counter = Counter}) ->
|
||||
case gen_server:call(Counter, check_version) of
|
||||
Version ->
|
||||
Transport#trans{version = Version+1};
|
||||
_Else ->
|
||||
% State wasn't propagated properly. Die.
|
||||
erlang:error(state_not_propagated)
|
||||
end.
|
||||
|
||||
shutdown_counter(#trans{counter = Counter}) ->
|
||||
gen_server:cast(Counter, shutdown).
|
@ -1,4 +1,3 @@
|
||||
ExUnit.configure(exclude: [pending: true])
|
||||
ExUnit.start()
|
||||
|
||||
|
||||
|
Loading…
Reference in New Issue
Block a user