Skip to content

Commit

Permalink
feat(cql) implement decimal CQL type
Browse files Browse the repository at this point in the history
Fix #52
  • Loading branch information
thibaultcha committed Aug 18, 2016
1 parent ee483cf commit 631c357
Show file tree
Hide file tree
Showing 5 changed files with 84 additions and 43 deletions.
104 changes: 61 additions & 43 deletions lib/cassandra/cql.lua
Original file line number Diff line number Diff line change
Expand Up @@ -583,6 +583,24 @@ do
return ldexp(mantissa, exponent - 0x7F)
end

local function marsh_decimal(t)
local scale_t = type(t.scale)
local value_t = type(t.value)
if scale_t ~= 'number' then
error('bad scale (number expected, got '..scale_t..')')
elseif value_t ~= 'number' then
error('bad value (number expected, got '..value_t..')')
end
return marsh_int(t.scale)..marsh_int(t.value)
end

local function unmarsh_decimal(buffer)
return {
scale = buffer:read_int(),
value = buffer:read_int(),
}
end

-------------------
-- Nested CQL types
-------------------
Expand Down Expand Up @@ -705,29 +723,29 @@ do
-- CQL Marshalling
------------------

local cql_marshallers = {
-- custom = 0x00,
[cql_types.ascii] = marsh_raw,
[cql_types.bigint] = marsh_bigint,
[cql_types.blob] = marsh_raw,
[cql_types.boolean] = marsh_boolean,
[cql_types.counter] = marsh_bigint,
-- decimal 0x06
[cql_types.double] = marsh_double,
[cql_types.float] = marsh_float,
[cql_types.inet] = marsh_inet,
[cql_types.int] = marsh_int,
[cql_types.text] = marsh_raw,
[cql_types.list] = marsh_set,
[cql_types.map] = marsh_map,
[cql_types.set] = marsh_set,
[cql_types.uuid] = marsh_uuid,
local cql_marshallers = {
-- custom = 0x00,
[cql_types.ascii] = marsh_raw,
[cql_types.bigint] = marsh_bigint,
[cql_types.blob] = marsh_raw,
[cql_types.boolean] = marsh_boolean,
[cql_types.counter] = marsh_bigint,
[cql_types.decimal] = marsh_decimal,
[cql_types.double] = marsh_double,
[cql_types.float] = marsh_float,
[cql_types.inet] = marsh_inet,
[cql_types.int] = marsh_int,
[cql_types.text] = marsh_raw,
[cql_types.list] = marsh_set,
[cql_types.map] = marsh_map,
[cql_types.set] = marsh_set,
[cql_types.uuid] = marsh_uuid,
[cql_types.timestamp] = marsh_bigint,
[cql_types.varchar] = marsh_raw,
[cql_types.varint] = marsh_int,
[cql_types.timeuuid] = marsh_uuid,
[cql_types.udt] = marsh_udt,
[cql_types.tuple] = marsh_tuple
[cql_types.varchar] = marsh_raw,
[cql_types.varint] = marsh_int,
[cql_types.timeuuid] = marsh_uuid,
[cql_types.udt] = marsh_udt,
[cql_types.tuple] = marsh_tuple
}

marsh_cql_value = function(val, version)
Expand Down Expand Up @@ -776,28 +794,28 @@ do
--------------------

local cql_unmarshallers = {
-- custom = 0x00,
[cql_types.ascii] = unmarsh_raw,
[cql_types.bigint] = unmarsh_bigint,
[cql_types.blob] = unmarsh_raw,
[cql_types.boolean] = unmarsh_boolean,
[cql_types.counter] = unmarsh_bigint,
-- decimal 0x06
[cql_types.double] = unmarsh_double,
[cql_types.float] = unmarsh_float,
[cql_types.inet] = unmarsh_inet,
[cql_types.int] = unmarsh_int,
[cql_types.text] = unmarsh_raw,
[cql_types.list] = unmarsh_set,
[cql_types.map] = unmarsh_map,
[cql_types.set] = unmarsh_set,
[cql_types.uuid] = unmarsh_uuid,
-- custom = 0x00,
[cql_types.ascii] = unmarsh_raw,
[cql_types.bigint] = unmarsh_bigint,
[cql_types.blob] = unmarsh_raw,
[cql_types.boolean] = unmarsh_boolean,
[cql_types.counter] = unmarsh_bigint,
[cql_types.decimal] = unmarsh_decimal,
[cql_types.double] = unmarsh_double,
[cql_types.float] = unmarsh_float,
[cql_types.inet] = unmarsh_inet,
[cql_types.int] = unmarsh_int,
[cql_types.text] = unmarsh_raw,
[cql_types.list] = unmarsh_set,
[cql_types.map] = unmarsh_map,
[cql_types.set] = unmarsh_set,
[cql_types.uuid] = unmarsh_uuid,
[cql_types.timestamp] = unmarsh_bigint,
[cql_types.varchar] = unmarsh_raw,
[cql_types.varint] = unmarsh_int,
[cql_types.timeuuid] = unmarsh_uuid,
[cql_types.udt] = unmarsh_udt,
[cql_types.tuple] = unmarsh_tuple
[cql_types.varchar] = unmarsh_raw,
[cql_types.varint] = unmarsh_int,
[cql_types.timeuuid] = unmarsh_uuid,
[cql_types.udt] = unmarsh_udt,
[cql_types.tuple] = unmarsh_tuple
}

-- Read a CQL value with a given CQL type
Expand Down
1 change: 1 addition & 0 deletions lib/cassandra/init.lua
Original file line number Diff line number Diff line change
Expand Up @@ -622,6 +622,7 @@ end
-- @field counter CQL counter.
-- cassandra.counter(1)
-- @field decimal CQL decimal.
-- cassandra.decimal({value = 256, scale = 5})
-- @field float CQL float.
-- cassandra.float(1.618033)
-- @field int CQL int.
Expand Down
17 changes: 17 additions & 0 deletions spec/01-unit/03-cql_spec.lua
Original file line number Diff line number Diff line change
Expand Up @@ -35,6 +35,23 @@ for protocol_version = 2, 3 do
end)
end

describe("[decimal]", function()
it("errors with invalid value", function()
assert.has_error(function()
local decimal = cassandra.decimal({value = "", scale = 1})
local buf = Buffer.new(protocol_version)
buf:write_cql_value(decimal)
end, "bad value (number expected, got string)")
end)
it("errors with invalid scale", function()
assert.has_error(function()
local decimal = cassandra.decimal({value = 1, scale = ""})
local buf = Buffer.new(protocol_version)
buf:write_cql_value(decimal)
end, "bad scale (number expected, got string)")
end)
end)

it("[list<T>]", function()
local fixtures = helpers.cql_list_fixtures
for i = 1, #fixtures do
Expand Down
1 change: 1 addition & 0 deletions spec/02-integration/01-host_spec.lua
Original file line number Diff line number Diff line change
Expand Up @@ -547,6 +547,7 @@ describe("cassandra (host)", function()
bigint_sample bigint,
blob_sample blob,
boolean_sample boolean,
decimal_sample decimal,
double_sample double,
float_sample float,
int_sample int,
Expand Down
4 changes: 4 additions & 0 deletions spec/helpers.lua
Original file line number Diff line number Diff line change
Expand Up @@ -153,6 +153,10 @@ _M.cql_fixtures = {
bigint = {0, 42, -42, 42000000000},
boolean = {true, false},
-- counter
decimal = {
{value = 256, scale = 5},
{value = 4, scale = 3}
},
double = {0, 1.0000000000000004, -1.0000000000000004},
float = {0, 3.14151, -3.14151},
inet = {
Expand Down

0 comments on commit 631c357

Please sign in to comment.