Remove all Riak code and its dependencies

Closes #75
This commit is contained in:
Greg Karékinian 2018-04-16 11:54:32 +02:00
parent e876f4ba52
commit ac6bec48c4
11 changed files with 5 additions and 2527 deletions

View File

@ -3,8 +3,6 @@ source "https://rubygems.org"
gem "sinatra" gem "sinatra"
gem "sinatra-contrib" gem "sinatra-contrib"
gem "activesupport" gem "activesupport"
gem "riak-client", git: "https://github.com/5apps/riak-ruby-client", branch: "invalid_uri_error"
gem "fog-aws"
gem "rest-client", "~> 2.1.0.rc1" # Fixes a memory leak in Ruby 2.4 gem "rest-client", "~> 2.1.0.rc1" # Fixes a memory leak in Ruby 2.4
gem "redis" gem "redis"
# Remove require when we can update to 3.0, which sets the new storage # Remove require when we can update to 3.0, which sets the new storage

View File

@ -1,15 +1,3 @@
GIT
remote: https://github.com/5apps/riak-ruby-client
revision: 5f21df86b14339aeb252374851d29ad813cca1dd
branch: invalid_uri_error
specs:
riak-client (1.4.0)
beefcake (~> 0.3.7)
builder (>= 2.1.2)
i18n (>= 0.4.0)
innertube (~> 1.0.2)
multi_json (~> 1.0)
GEM GEM
remote: https://rubygems.org/ remote: https://rubygems.org/
specs: specs:
@ -19,36 +7,15 @@ GEM
minitest (~> 5.1) minitest (~> 5.1)
tzinfo (~> 1.1) tzinfo (~> 1.1)
backports (3.8.0) backports (3.8.0)
beefcake (0.3.7)
builder (3.2.3)
concurrent-ruby (1.0.5) concurrent-ruby (1.0.5)
domain_name (0.5.20170404) domain_name (0.5.20170404)
unf (>= 0.0.5, < 1.0.0) unf (>= 0.0.5, < 1.0.0)
excon (0.57.1)
faraday (0.12.1) faraday (0.12.1)
multipart-post (>= 1.2, < 3) multipart-post (>= 1.2, < 3)
fog-aws (1.4.0)
fog-core (~> 1.38)
fog-json (~> 1.0)
fog-xml (~> 0.1)
ipaddress (~> 0.8)
fog-core (1.44.3)
builder
excon (~> 0.49)
formatador (~> 0.2)
fog-json (1.0.2)
fog-core (~> 1.0)
multi_json (~> 1.10)
fog-xml (0.1.3)
fog-core
nokogiri (>= 1.5.11, < 2.0.0)
formatador (0.2.5)
http-accept (1.7.0) http-accept (1.7.0)
http-cookie (1.0.3) http-cookie (1.0.3)
domain_name (~> 0.5) domain_name (~> 0.5)
i18n (0.8.4) i18n (0.8.4)
innertube (1.0.2)
ipaddress (0.8.3)
kgio (2.11.0) kgio (2.11.0)
m (1.5.1) m (1.5.1)
method_source (>= 0.6.7) method_source (>= 0.6.7)
@ -57,15 +24,12 @@ GEM
mime-types (3.1) mime-types (3.1)
mime-types-data (~> 3.2015) mime-types-data (~> 3.2015)
mime-types-data (3.2016.0521) mime-types-data (3.2016.0521)
mini_portile2 (2.2.0)
minitest (5.10.2) minitest (5.10.2)
minitest-stub_any_instance (1.0.1) minitest-stub_any_instance (1.0.1)
multi_json (1.12.1) multi_json (1.12.1)
multipart-post (2.0.0) multipart-post (2.0.0)
mustermann (1.0.0) mustermann (1.0.0)
netrc (0.11.0) netrc (0.11.0)
nokogiri (1.8.0)
mini_portile2 (~> 2.2.0)
purdytest (2.0.0) purdytest (2.0.0)
minitest (~> 5.5) minitest (~> 5.5)
rack (2.0.3) rack (2.0.3)
@ -115,7 +79,6 @@ PLATFORMS
DEPENDENCIES DEPENDENCIES
activesupport activesupport
fog-aws
m m
mime-types mime-types
minitest-stub_any_instance minitest-stub_any_instance
@ -125,10 +88,9 @@ DEPENDENCIES
rake rake
redis redis
rest-client (~> 2.1.0.rc1) rest-client (~> 2.1.0.rc1)
riak-client!
sentry-raven sentry-raven
sinatra sinatra
sinatra-contrib sinatra-contrib
BUNDLED WITH BUNDLED WITH
1.15.1 1.16.0

View File

@ -4,8 +4,8 @@
Liquor Cabinet is where Frank stores all his stuff. It's a Liquor Cabinet is where Frank stores all his stuff. It's a
remoteStorage-compatible storage provider API, based on Sinatra and currently remoteStorage-compatible storage provider API, based on Sinatra and currently
using Riak as backend. You can use it on its own, or e.g. mount it from a Rails using Openstack Swift as backend. You can use it on its own, or e.g. mount it
application. from a Rails application.
It's merely implementing the storage API, not including the Webfinger and OAuth It's merely implementing the storage API, not including the Webfinger and OAuth
parts of remoteStorage. You have to set the authorization keys/values in the parts of remoteStorage. You have to set the authorization keys/values in the

View File

@ -1,19 +1,6 @@
development: &defaults development: &defaults
maintenance: false maintenance: false
# riak: &riak_defaults # # uncomment this section
# host: localhost
# http_port: 8098
# riak_cs:
# credentials_file: "cs_credentials.json"
# endpoint: "http://cs.example.com:8080"
# buckets:
# data: rs_data
# directories: rs_directories
# binaries: rs_binaries
# cs_binaries: rs.binaries
# authorizations: rs_authorizations
# opslog: rs_opslog
# # uncomment this section and comment the riak one
# swift: &swift_defaults # swift: &swift_defaults
# host: "https://swift.example.com" # host: "https://swift.example.com"
# # Redis is needed for the swift backend # # Redis is needed for the swift backend
@ -23,15 +10,6 @@ development: &defaults
test: test:
<<: *defaults <<: *defaults
# riak:
# <<: *riak_defaults
# buckets:
# data: rs_data_test
# directories: rs_directories_test
# binaries: rs_binaries_test
# cs_binaries: rs.binaries.test
# authorizations: rs_authorizations_test
# opslog: rs_opslog_test
swift: swift:
host: "https://swift.example.com" host: "https://swift.example.com"
redis: redis:

View File

@ -1,531 +0,0 @@
require "riak"
require "json"
require "cgi"
require "active_support/core_ext/time/conversions"
require "active_support/core_ext/numeric/time"
module RemoteStorage
class Riak
::Riak.url_decoding = true
attr_accessor :settings, :server, :cs_credentials
def initialize(settings, server)
self.settings = settings
self.server = server
credentials = File.read(settings.riak['riak_cs']['credentials_file'])
self.cs_credentials = JSON.parse(credentials)
end
def authorize_request(user, directory, token, listing=false)
request_method = server.env["REQUEST_METHOD"]
if directory.split("/").first == "public"
return true if ["GET", "HEAD"].include?(request_method) && !listing
end
authorizations = auth_bucket.get("#{user}:#{token}").data
permission = directory_permission(authorizations, directory)
server.halt 401 unless permission
if ["PUT", "DELETE"].include? request_method
server.halt 401 unless permission == "rw"
end
rescue ::Riak::HTTPFailedRequest
server.halt 401
end
def get_head(user, directory, key)
object = data_bucket.get("#{user}:#{directory}:#{key}")
set_object_response_headers(object)
server.halt 200
rescue ::Riak::HTTPFailedRequest
server.halt 404
end
def get_data(user, directory, key)
object = data_bucket.get("#{user}:#{directory}:#{key}")
set_object_response_headers(object)
none_match = (server.env["HTTP_IF_NONE_MATCH"] || "").split(",").map(&:strip)
server.halt 304 if none_match.include? object.etag
if binary_key = object.meta["binary_key"]
object = cs_binary_bucket.files.get(binary_key[0])
case object.content_type[/^[^;\s]+/]
when "application/json"
return object.body.to_json
else
return object.body
end
end
case object.content_type[/^[^;\s]+/]
when "application/json"
return object.data.to_json
else
data = serializer_for(object.content_type) ? object.data : object.raw_data
# Never return nil, always turn data into a string
return data.nil? ? '' : data
end
rescue ::Riak::HTTPFailedRequest
server.halt 404
end
def get_head_directory_listing(user, directory)
directory_object = directory_bucket.get("#{user}:#{directory}")
set_directory_response_headers(directory_object)
server.halt 200
rescue ::Riak::HTTPFailedRequest
server.halt 404
end
def get_directory_listing(user, directory)
directory_object = directory_bucket.get("#{user}:#{directory}")
set_directory_response_headers(directory_object)
none_match = (server.env["HTTP_IF_NONE_MATCH"] || "").split(",").map(&:strip)
server.halt 304 if none_match.include? directory_object.etag
listing = directory_listing(user, directory)
return listing.to_json
rescue ::Riak::HTTPFailedRequest
server.halt 404
end
def put_data(user, directory, key, data, content_type=nil)
server.halt 409 if has_name_collision?(user, directory, key)
object = build_data_object(user, directory, key, data, content_type)
if required_match = server.env["HTTP_IF_MATCH"]
server.halt 412 unless required_match == object.etag
end
object_exists = !object.raw_data.nil? || !object.meta["binary_key"].nil?
existing_object_size = object_size(object)
server.halt 412 if object_exists && server.env["HTTP_IF_NONE_MATCH"] == "*"
timestamp = (Time.now.to_f * 1000).to_i
object.meta["timestamp"] = timestamp
if binary_data?(object.content_type, data)
save_binary_data(object, data) or server.halt 422
new_object_size = data.size
else
set_object_data(object, data) or server.halt 422
new_object_size = object.raw_data.size
end
object.store
log_count = object_exists ? 0 : 1
log_operation(user, directory, log_count, new_object_size, existing_object_size)
update_all_directory_objects(user, directory, timestamp)
server.headers["ETag"] = object.etag
server.halt object_exists ? 200 : 201
rescue ::Riak::HTTPFailedRequest
server.halt 422
end
def delete_data(user, directory, key)
object = data_bucket.get("#{user}:#{directory}:#{key}")
existing_object_size = object_size(object)
etag = object.etag
if required_match = server.env["HTTP_IF_MATCH"]
server.halt 412 unless required_match == etag
end
if binary_key = object.meta["binary_key"]
object = cs_binary_bucket.files.get(binary_key[0])
object.destroy
end
riak_response = data_bucket.delete("#{user}:#{directory}:#{key}")
if riak_response[:code] != 404
log_operation(user, directory, -1, 0, existing_object_size)
end
timestamp = (Time.now.to_f * 1000).to_i
delete_or_update_directory_objects(user, directory, timestamp)
server.halt 200
rescue ::Riak::HTTPFailedRequest
server.halt 404
end
private
def set_object_response_headers(object)
server.headers["Content-Type"] = object.content_type
server.headers["ETag"] = object.etag
server.headers["Content-Length"] = object_size(object).to_s
end
def set_directory_response_headers(directory_object)
server.headers["Content-Type"] = "application/json"
server.headers["ETag"] = directory_object.etag
end
def extract_category(directory)
if directory.match(/^public\//)
"public/#{directory.split('/')[1]}"
else
directory.split('/').first
end
end
def build_data_object(user, directory, key, data, content_type=nil)
object = data_bucket.get_or_new("#{user}:#{directory}:#{key}")
object.content_type = content_type || "text/plain; charset=utf-8"
directory_index = directory == "" ? "/" : directory
object.indexes.merge!({:user_id_bin => [user],
:directory_bin => [directory_index]})
object
end
def log_operation(user, directory, count, new_size=0, old_size=0)
size = (-old_size + new_size)
return if count == 0 && size == 0
log_entry = opslog_bucket.new
log_entry.content_type = "application/json"
log_entry.data = {
"count" => count,
"size" => size,
"category" => extract_category(directory)
}
log_entry.indexes.merge!({:user_id_bin => [user]})
log_entry.store
end
def object_size(object)
if binary_key = object.meta["binary_key"]
response = cs_client.head_object cs_binary_bucket.key, binary_key[0]
response.headers["Content-Length"].to_i
else
object.raw_data.nil? ? 0 : object.raw_data.size
end
end
def escape(string)
::Riak.escaper.escape(string).gsub("+", "%20").gsub('/', "%2F")
end
# A URI object that can be used with HTTP backend methods
def riak_uri(bucket, key)
URI.parse "http://#{settings.riak["host"]}:#{settings.riak["http_port"]}/riak/#{bucket}/#{key}"
end
def serializer_for(content_type)
::Riak::Serializers[content_type[/^[^;\s]+/]]
end
def directory_permission(authorizations, directory)
authorizations = authorizations.map do |auth|
auth.index(":") ? auth.split(":") : [auth, "rw"]
end
authorizations = Hash[*authorizations.flatten]
permission = authorizations[""]
authorizations.each do |key, value|
if directory.match(/^(public\/)?#{key}(\/|$)/)
if permission.nil? || permission == "r"
permission = value
end
return permission if permission == "rw"
end
end
permission
end
def directory_listing(user, directory)
listing = {
"@context" => "http://remotestorage.io/spec/folder-description",
"items" => {}
}
sub_directories(user, directory).each do |entry|
directory_name = entry["name"].split("/").last
etag = entry["etag"]
listing["items"].merge!({ "#{directory_name}/" => { "ETag" => etag }})
end
directory_entries(user, directory).each do |entry|
entry_name = entry["name"]
etag = entry["etag"]
content_type = entry["contentType"]
content_length = entry["contentLength"].to_i
listing["items"].merge!({
entry_name => {
"ETag" => etag,
"Content-Type" => content_type,
"Content-Length" => content_length
}
})
end
listing
end
def directory_entries(user, directory)
all_keys = user_directory_keys(user, directory, data_bucket)
return [] if all_keys.empty?
map_query = <<-EOH
function(v){
var metadata = v.values[0]['metadata'];
var dir_name = metadata['index']['directory_bin'];
if (dir_name === '/') {
dir_name = '';
}
var name = v.key.match(/^[^:]*:(.*)/)[1]; // strip username from key
name = name.replace(dir_name + ':', ''); // strip directory from key
var etag = metadata['X-Riak-VTag'];
var contentType = metadata['content-type'];
var contentLength = metadata['X-Riak-Meta']['X-Riak-Meta-Content_length'] || 0;
return [{
name: name,
etag: etag,
contentType: contentType,
contentLength: contentLength
}];
}
EOH
run_map_reduce(data_bucket, all_keys, map_query)
end
def sub_directories(user, directory)
all_keys = user_directory_keys(user, directory, directory_bucket)
return [] if all_keys.empty?
map_query = <<-EOH
function(v){
var name = v.key.match(/^[^:]*:(.*)/)[1]; // strip username from key
var etag = v.values[0]['metadata']['X-Riak-VTag'];
return [{
name: name,
etag: etag
}];
}
EOH
run_map_reduce(directory_bucket, all_keys, map_query)
end
def user_directory_keys(user, directory, bucket)
directory = "/" if directory == ""
user_keys = bucket.get_index("user_id_bin", user)
directory_keys = bucket.get_index("directory_bin", directory)
user_keys & directory_keys
end
def run_map_reduce(bucket, keys, map_query)
map_reduce = ::Riak::MapReduce.new(client)
keys.each do |key|
map_reduce.add(bucket.name, key)
end
map_reduce.
map(map_query, :keep => true).
run
end
def update_all_directory_objects(user, directory, timestamp)
parent_directories_for(directory).each do |parent_directory|
update_directory_object(user, parent_directory, timestamp)
end
end
def update_directory_object(user, directory, timestamp)
if directory.match(/\//)
parent_directory = directory[0..directory.rindex("/")-1]
elsif directory != ""
parent_directory = "/"
end
directory_object = directory_bucket.new("#{user}:#{directory}")
directory_object.content_type = "text/plain; charset=utf-8"
directory_object.data = timestamp.to_s
directory_object.indexes.merge!({:user_id_bin => [user]})
if parent_directory
directory_object.indexes.merge!({:directory_bin => [parent_directory]})
end
directory_object.store
end
def delete_or_update_directory_objects(user, directory, timestamp)
parent_directories_for(directory).each do |parent_directory|
existing_files = directory_entries(user, parent_directory)
existing_subdirectories = sub_directories(user, parent_directory)
if existing_files.empty? && existing_subdirectories.empty?
directory_bucket.delete "#{user}:#{parent_directory}"
else
update_directory_object(user, parent_directory, timestamp)
end
end
end
def set_object_data(object, data)
if object.content_type[/^[^;\s]+/] == "application/json"
data = "{}" if data.blank?
data = JSON.parse(data)
end
object.meta["content_length"] = data.size
if serializer_for(object.content_type)
object.data = data
else
object.raw_data = data
end
rescue JSON::ParserError
return false
end
def save_binary_data(object, data)
cs_binary_object = cs_binary_bucket.files.create(
:key => object.key,
:body => data,
:content_type => object.content_type
)
object.meta["binary_key"] = cs_binary_object.key
object.meta["content_length"] = cs_binary_object.content_length
object.raw_data = ""
end
def binary_data?(content_type, data)
return true if content_type[/[^;\s]+$/] == "charset=binary"
original_encoding = data.encoding
data.force_encoding("UTF-8")
is_binary = !data.valid_encoding?
data.force_encoding(original_encoding)
is_binary
end
def parent_directories_for(directory)
directories = directory.split("/")
parent_directories = []
while directories.any?
parent_directories << directories.join("/")
directories.pop
end
parent_directories << ""
end
def has_name_collision?(user, directory, key)
parent_directories = parent_directories_for(directory).reverse
parent_directories.shift # remove root dir entry
# check for existing documents with the same name as one of the parent directories
parent_directories.each do |dir|
begin
parts = dir.split("/")
document_key = parts.pop
directory_name = parts.join("/")
data_bucket.get("#{user}:#{directory_name}:#{document_key}")
return true
rescue ::Riak::HTTPFailedRequest
end
end
# check for an existing directory with same name as document
begin
directory_bucket.get("#{user}:#{directory}/#{key}")
return true
rescue ::Riak::HTTPFailedRequest
end
false
end
def client
@client ||= ::Riak::Client.new(:host => settings.riak['host'],
:http_port => settings.riak['http_port'])
end
def data_bucket
@data_bucket ||= begin
bucket = client.bucket(settings.riak['buckets']['data'])
bucket.allow_mult = false
bucket
end
end
def directory_bucket
@directory_bucket ||= begin
bucket = client.bucket(settings.riak['buckets']['directories'])
bucket.allow_mult = false
bucket
end
end
def auth_bucket
@auth_bucket ||= begin
bucket = client.bucket(settings.riak['buckets']['authorizations'])
bucket.allow_mult = false
bucket
end
end
def binary_bucket
@binary_bucket ||= begin
bucket = client.bucket(settings.riak['buckets']['binaries'])
bucket.allow_mult = false
bucket
end
end
def opslog_bucket
@opslog_bucket ||= begin
bucket = client.bucket(settings.riak['buckets']['opslog'])
bucket.allow_mult = false
bucket
end
end
def cs_client
@cs_client ||= Fog::Storage.new({
:provider => 'AWS',
:aws_access_key_id => cs_credentials['key_id'],
:aws_secret_access_key => cs_credentials['key_secret'],
:endpoint => settings.riak['riak_cs']['endpoint']
})
end
def cs_binary_bucket
@cs_binary_bucket ||= cs_client.directories.create(:key => settings.riak['buckets']['cs_binaries'])
end
end
end

View File

@ -4,7 +4,6 @@ require "json"
require "sinatra/base" require "sinatra/base"
require 'sinatra/config_file' require 'sinatra/config_file'
require "sinatra/reloader" require "sinatra/reloader"
require "remote_storage/riak"
require "remote_storage/swift" require "remote_storage/swift"
class LiquorCabinet < Sinatra::Base class LiquorCabinet < Sinatra::Base
@ -129,9 +128,7 @@ class LiquorCabinet < Sinatra::Base
def storage def storage
@storage ||= begin @storage ||= begin
if settings.respond_to? :riak if settings.respond_to? :swift
RemoteStorage::Riak.new(settings, self)
elsif settings.respond_to? :swift
RemoteStorage::Swift.new(settings, self) RemoteStorage::Swift.new(settings, self)
else else
puts <<-EOF puts <<-EOF

View File

@ -1,15 +0,0 @@
require_relative "../spec_helper"
describe "App" do
include Rack::Test::Methods
def app
LiquorCabinet
end
it "returns 404 on non-existing routes" do
get "/virginmargarita"
last_response.status.must_equal 404
end
end

View File

@ -1,640 +0,0 @@
require_relative "../spec_helper"
describe "Directories" do
include Rack::Test::Methods
before do
purge_all_buckets
auth = auth_bucket.new("jimmy:123")
auth.data = [":r", "documents:r", "tasks:rw"]
auth.store
header "Authorization", "Bearer 123"
end
describe "HEAD listing" do
before do
put "/jimmy/tasks/foo", "do the laundry"
put "/jimmy/tasks/http%3A%2F%2F5apps.com", "prettify design"
head "/jimmy/tasks/"
end
it "has an empty body" do
last_response.status.must_equal 200
last_response.body.must_equal ""
end
it "has an ETag header set" do
last_response.status.must_equal 200
last_response.headers["ETag"].wont_be_nil
# check that ETag stays the same
etag = last_response.headers["ETag"]
get "/jimmy/tasks/"
last_response.headers["ETag"].must_equal etag
end
it "has CORS headers set" do
last_response.status.must_equal 200
last_response.headers["Access-Control-Allow-Origin"].must_equal "*"
last_response.headers["Access-Control-Allow-Methods"].must_equal "GET, PUT, DELETE"
last_response.headers["Access-Control-Allow-Headers"].must_equal "Authorization, Content-Type, Origin, If-Match, If-None-Match"
last_response.headers["Access-Control-Expose-Headers"].must_equal "ETag, Content-Length"
end
context "for an empty or absent directory" do
it "responds with 404" do
head "/jimmy/documents/"
last_response.status.must_equal 404
end
end
end
describe "GET listing" do
before do
put "/jimmy/tasks/foo", "do the laundry"
put "/jimmy/tasks/http%3A%2F%2F5apps.com", "prettify design"
put "/jimmy/tasks/%3A/foo%3Abar%40foo.org", "hello world"
end
it "lists the objects with version, length and content-type" do
get "/jimmy/tasks/"
last_response.status.must_equal 200
last_response.content_type.must_equal "application/json"
foo = data_bucket.get("jimmy:tasks:foo")
content = JSON.parse(last_response.body)
content["items"]["http://5apps.com"].wont_be_nil
content["items"][":/"].wont_be_nil
content["items"]["foo"].wont_be_nil
content["items"]["foo"]["ETag"].must_equal foo.etag.gsub(/"/, "")
content["items"]["foo"]["Content-Type"].must_equal "text/plain"
content["items"]["foo"]["Content-Length"].must_equal 14
end
it "has an ETag header set" do
get "/jimmy/tasks/"
last_response.status.must_equal 200
last_response.headers["ETag"].wont_be_nil
# check that ETag stays the same
etag = last_response.headers["ETag"]
get "/jimmy/tasks/"
last_response.headers["ETag"].must_equal etag
end
it "has CORS headers set" do
get "/jimmy/tasks/"
last_response.status.must_equal 200
last_response.headers["Access-Control-Allow-Origin"].must_equal "*"
last_response.headers["Access-Control-Allow-Methods"].must_equal "GET, PUT, DELETE"
last_response.headers["Access-Control-Allow-Headers"].must_equal "Authorization, Content-Type, Origin, If-Match, If-None-Match"
last_response.headers["Access-Control-Expose-Headers"].must_equal "ETag, Content-Length"
end
it "has caching headers set" do
get "/jimmy/tasks/"
last_response.status.must_equal 200
last_response.headers["Expires"].must_equal "0"
end
it "doesn't choke on colons in the directory name" do
get "/jimmy/tasks/%3A/"
last_response.status.must_equal 200
last_response.content_type.must_equal "application/json"
content = JSON.parse(last_response.body)
content["items"]["foo:bar@foo.org"].wont_be_nil
end
context "when If-None-Match header is set" do
before do
get "/jimmy/tasks/"
@etag = last_response.headers["ETag"]
end
it "responds with 'not modified' when it matches the current ETag" do
header "If-None-Match", @etag
get "/jimmy/tasks/"
last_response.status.must_equal 304
last_response.body.must_be_empty
last_response.headers["ETag"].must_equal @etag
end
it "responds normally when it does not match the current ETag" do
header "If-None-Match", "FOO"
get "/jimmy/tasks/"
last_response.status.must_equal 200
last_response.body.wont_be_empty
end
end
describe "when If-None-Match header is set with multiple revisions" do
before do
get "/jimmy/tasks/"
@etag = last_response.headers["ETag"]
end
it "responds with 'not modified' when it contains the current ETag" do
header "If-None-Match", "DEADBEEF,#{@etag} ,F00BA4"
get "/jimmy/tasks/"
last_response.status.must_equal 304
last_response.body.must_be_empty
last_response.headers["ETag"].must_equal @etag
end
it "responds normally when it does not contain the current ETag" do
header "If-None-Match", "FOO,BAR"
get "/jimmy/tasks/"
last_response.status.must_equal 200
last_response.body.wont_be_empty
end
end
context "with sub-directories" do
before do
get "/jimmy/tasks/"
@old_etag = last_response.headers["ETag"]
put "/jimmy/tasks/home/laundry", "do the laundry"
end
it "lists the containing objects as well as the direct sub-directories" do
get "/jimmy/tasks/"
last_response.status.must_equal 200
home = directory_bucket.get("jimmy:tasks/home")
content = JSON.parse(last_response.body)
content["items"]["foo"].wont_be_nil
content["items"]["http://5apps.com"].wont_be_nil
content["items"]["home/"].wont_be_nil
content["items"]["home/"]["ETag"].must_equal home.etag.gsub(/"/, "")
end
it "updates the ETag of the parent directory" do
get "/jimmy/tasks/"
last_response.headers["ETag"].wont_be_nil
last_response.headers["ETag"].wont_equal @old_etag
end
context "for a different user" do
before do
auth = auth_bucket.new("alice:321")
auth.data = [":r", "documents:r", "tasks:rw"]
auth.store
header "Authorization", "Bearer 321"
put "/alice/tasks/homework", "write an essay"
end
it "does not list the directories of jimmy" do
get "/alice/tasks/"
last_response.status.must_equal 200
content = JSON.parse(last_response.body)
content["items"]["/"].must_be_nil
content["items"]["tasks/"].must_be_nil
content["items"]["home/"].must_be_nil
content["items"]["homework"].wont_be_nil
end
end
context "sub-directories without objects" do
it "lists the direct sub-directories" do
put "/jimmy/tasks/private/projects/world-domination/start", "write a manifesto"
get "/jimmy/tasks/private/"
last_response.status.must_equal 200
projects = directory_bucket.get("jimmy:tasks/private/projects")
content = JSON.parse(last_response.body)
content["items"]["projects/"]["ETag"].must_equal projects.etag.gsub(/"/, "")
end
it "updates the timestamps of the existing directory objects" do
directory = directory_bucket.new("jimmy:tasks")
directory.content_type = "text/plain"
directory.data = (2.seconds.ago.to_f * 1000).to_i
directory.store
put "/jimmy/tasks/private/projects/world-domination/start", "write a manifesto"
object = data_bucket.get("jimmy:tasks/private/projects/world-domination:start")
directory = directory_bucket.get("jimmy:tasks")
directory.data.to_i.must_equal object.meta['timestamp'][0].to_i
end
end
context "with binary data" do
context "charset given in content-type header" do
before do
header "Content-Type", "image/jpeg; charset=binary"
filename = File.join(File.expand_path(File.dirname(__FILE__)), "..", "fixtures", "rockrule.jpeg")
@image = File.open(filename, "r").read
put "/jimmy/tasks/jaypeg.jpg", @image
end
it "lists the binary files" do
get "/jimmy/tasks/"
last_response.status.must_equal 200
jaypeg = data_bucket.get("jimmy:tasks:jaypeg.jpg")
content = JSON.parse(last_response.body)
content["items"]["jaypeg.jpg"]["ETag"].must_equal jaypeg.etag.gsub(/"/, "")
content["items"]["jaypeg.jpg"]["Content-Type"].must_equal "image/jpeg"
content["items"]["jaypeg.jpg"]["Content-Length"].must_equal 16044
end
end
context "no charset in content-type header" do
before do
header "Content-Type", "image/jpeg"
filename = File.join(File.expand_path(File.dirname(__FILE__)), "..", "fixtures", "rockrule.jpeg")
@image = File.open(filename, "r").read
put "/jimmy/tasks/jaypeg.jpg", @image
end
it "lists the binary files" do
get "/jimmy/tasks/"
last_response.status.must_equal 200
jaypeg = data_bucket.get("jimmy:tasks:jaypeg.jpg")
content = JSON.parse(last_response.body)
content["items"]["jaypeg.jpg"]["ETag"].must_equal jaypeg.etag.gsub(/"/, "")
content["items"]["jaypeg.jpg"]["Content-Type"].must_equal "image/jpeg"
content["items"]["jaypeg.jpg"]["Content-Length"].must_equal 16044
end
end
end
end
context "for a sub-directory" do
before do
put "/jimmy/tasks/home/laundry", "do the laundry"
end
it "lists the objects with timestamp" do
get "/jimmy/tasks/home/"
last_response.status.must_equal 200
laundry = data_bucket.get("jimmy:tasks/home:laundry")
content = JSON.parse(last_response.body)
content["items"]["laundry"]["ETag"].must_equal laundry.etag.gsub(/"/, "")
end
end
context "for an empty or absent directory" do
it "returns an empty listing" do
get "/jimmy/documents/notfound/"
last_response.status.must_equal 404
end
end
context "special characters in directory name" do
before do
put "/jimmy/tasks/foo~bar/task1", "some task"
end
it "lists the directory in the parent directory" do
get "/jimmy/tasks/"
last_response.status.must_equal 200
content = JSON.parse(last_response.body)
content["items"]["foo~bar/"].wont_be_nil
end
it "lists the containing objects" do
get "/jimmy/tasks/foo~bar/"
last_response.status.must_equal 200
content = JSON.parse(last_response.body)
content["items"]["task1"].wont_be_nil
end
it "returns the requested object" do
get "/jimmy/tasks/foo~bar/task1"
last_response.status.must_equal 200
last_response.body.must_equal "some task"
end
end
context "special characters in object name" do
before do
put "/jimmy/tasks/bla~blub", "some task"
end
it "lists the containing object" do
get "/jimmy/tasks/"
last_response.status.must_equal 200
content = JSON.parse(last_response.body)
content["items"]["bla~blub"].wont_be_nil
end
end
context "for the root directory" do
before do
auth = auth_bucket.new("jimmy:123")
auth.data = [":rw"]
auth.store
put "/jimmy/root-1", "Put my root down"
put "/jimmy/root-2", "Back to the roots"
end
it "lists the containing objects and direct sub-directories" do
get "/jimmy/"
last_response.status.must_equal 200
tasks = directory_bucket.get("jimmy:tasks")
content = JSON.parse(last_response.body)
content["items"]["root-1"].wont_be_nil
content["items"]["root-2"].wont_be_nil
content["items"]["tasks/"].wont_be_nil
content["items"]["tasks/"]["ETag"].must_equal tasks.etag.gsub(/"/, "")
end
it "has an ETag header set" do
get "/jimmy/"
last_response.status.must_equal 200
last_response.headers["ETag"].wont_be_nil
end
end
context "for the public directory" do
before do
auth = auth_bucket.new("jimmy:123")
auth.data = ["documents:r", "bookmarks:rw"]
auth.store
put "/jimmy/public/bookmarks/5apps", "http://5apps.com"
end
context "when authorized for the category" do
it "lists the files" do
get "/jimmy/public/bookmarks/"
last_response.status.must_equal 200
content = JSON.parse(last_response.body)
content["items"]["5apps"].wont_be_nil
end
it "has an ETag header set" do
get "/jimmy/public/bookmarks/"
last_response.status.must_equal 200
last_response.headers["ETag"].wont_be_nil
end
end
context "when directly authorized for the public directory" do
before do
auth = auth_bucket.new("jimmy:123")
auth.data = ["documents:r", "public/bookmarks:rw"]
auth.store
end
it "lists the files" do
get "/jimmy/public/bookmarks/"
last_response.status.must_equal 200
content = JSON.parse(last_response.body)
content["items"]["5apps"].wont_be_nil
end
end
context "when not authorized" do
before do
auth_bucket.delete("jimmy:123")
end
it "does not allow a directory listing of the public root" do
get "/jimmy/public/"
last_response.status.must_equal 401
end
it "does not allow a directory listing of a sub-directory" do
get "/jimmy/public/bookmarks/"
last_response.status.must_equal 401
end
end
end
end
describe "directory object" do
describe "PUT file" do
context "no existing directory object" do
before do
put "/jimmy/tasks/home/trash", "take out the trash"
end
it "creates a new directory object" do
object = data_bucket.get("jimmy:tasks/home:trash")
directory = directory_bucket.get("jimmy:tasks/home")
directory.data.wont_be_nil
directory.data.to_i.must_equal object.meta['timestamp'][0].to_i
end
it "sets the correct index for the directory object" do
object = directory_bucket.get("jimmy:tasks/home")
object.indexes["directory_bin"].must_include "tasks"
end
it "creates directory objects for the parent directories" do
object = directory_bucket.get("jimmy:tasks")
object.indexes["directory_bin"].must_include "/"
object.data.wont_be_nil
object = directory_bucket.get("jimmy:")
object.indexes["directory_bin"].must_be_empty
object.data.wont_be_nil
end
end
context "existing directory object" do
before do
put "/jimmy/tasks/home/trash", "collect some trash"
end
it "updates the timestamp of the directory" do
put "/jimmy/tasks/home/trash", "take out the trash"
last_response.status.must_equal 200
object = data_bucket.get("jimmy:tasks/home:trash")
directory = directory_bucket.get("jimmy:tasks/home")
directory.data.to_i.must_equal object.meta['timestamp'][0].to_i
end
end
end
end
describe "OPTIONS listing" do
it "has CORS headers set" do
options "/jimmy/tasks/"
last_response.status.must_equal 200
last_response.headers["Access-Control-Allow-Origin"].must_equal "*"
last_response.headers["Access-Control-Allow-Methods"].must_equal "GET, PUT, DELETE"
last_response.headers["Access-Control-Allow-Headers"].must_equal "Authorization, Content-Type, Origin, If-Match, If-None-Match"
last_response.headers["Access-Control-Expose-Headers"].must_equal "ETag, Content-Length"
end
context "sub-directories" do
it "has CORS headers set" do
options "/jimmy/tasks/foo/bar/"
last_response.status.must_equal 200
last_response.headers["Access-Control-Allow-Origin"].must_equal "*"
last_response.headers["Access-Control-Allow-Methods"].must_equal "GET, PUT, DELETE"
last_response.headers["Access-Control-Allow-Headers"].must_equal "Authorization, Content-Type, Origin, If-Match, If-None-Match"
last_response.headers["Access-Control-Expose-Headers"].must_equal "ETag, Content-Length"
end
end
context "root directory" do
it "has CORS headers set" do
options "/jimmy/"
last_response.status.must_equal 200
last_response.headers["Access-Control-Allow-Origin"].must_equal "*"
last_response.headers["Access-Control-Allow-Methods"].must_equal "GET, PUT, DELETE"
last_response.headers["Access-Control-Allow-Headers"].must_equal "Authorization, Content-Type, Origin, If-Match, If-None-Match"
last_response.headers["Access-Control-Expose-Headers"].must_equal "ETag, Content-Length"
end
end
end
describe "DELETE file" do
context "last file in directory" do
before do
put "/jimmy/tasks/home/trash", "take out the trash"
end
it "deletes the directory objects for all empty parent directories" do
delete "/jimmy/tasks/home/trash"
last_response.status.must_equal 200
lambda {
directory_bucket.get("jimmy:tasks/home")
}.must_raise Riak::HTTPFailedRequest
lambda {
directory_bucket.get("jimmy:tasks")
}.must_raise Riak::HTTPFailedRequest
lambda {
directory_bucket.get("jimmy:")
}.must_raise Riak::HTTPFailedRequest
end
end
context "with additional files in directory" do
before do
put "/jimmy/tasks/home/trash", "take out the trash"
put "/jimmy/tasks/home/laundry/washing", "wash the clothes"
end
it "does not delete the directory objects for the parent directories" do
delete "/jimmy/tasks/home/trash"
directory_bucket.get("jimmy:tasks/home").wont_be_nil
directory_bucket.get("jimmy:tasks").wont_be_nil
directory_bucket.get("jimmy:").wont_be_nil
end
it "updates the ETag headers of all parent directories" do
get "/jimmy/tasks/home/"
home_etag = last_response.headers["ETag"]
get "/jimmy/tasks/"
tasks_etag = last_response.headers["ETag"]
get "/jimmy/"
root_etag = last_response.headers["ETag"]
delete "/jimmy/tasks/home/trash"
get "/jimmy/tasks/home/"
last_response.headers["ETag"].wont_be_nil
last_response.headers["ETag"].wont_equal home_etag
get "/jimmy/tasks/"
last_response.headers["ETag"].wont_be_nil
last_response.headers["ETag"].wont_equal tasks_etag
get "/jimmy/"
last_response.headers["ETag"].wont_be_nil
last_response.headers["ETag"].wont_equal root_etag
end
describe "timestamps" do
before do
@old_timestamp = (2.seconds.ago.to_f * 1000).to_i
["tasks/home", "tasks", ""].each do |dir|
directory = directory_bucket.get("jimmy:#{dir}")
directory.data = @old_timestamp.to_s
directory.store
end
end
it "updates the timestamp for the parent directories" do
delete "/jimmy/tasks/home/trash"
directory_bucket.get("jimmy:tasks/home").data.to_i.must_be :>, @old_timestamp
directory_bucket.get("jimmy:tasks").data.to_i.must_be :>, @old_timestamp
directory_bucket.get("jimmy:").data.to_i.must_be :>, @old_timestamp
end
end
end
end
end

View File

@ -1,424 +0,0 @@
require_relative "../spec_helper"
describe "Permissions" do
include Rack::Test::Methods
before do
purge_all_buckets
end
describe "GET" do
context "public data" do
before do
object = data_bucket.new("jimmy:public:foo")
object.content_type = "text/plain"
object.data = "some text data"
object.store
object = data_bucket.new("jimmy:public/documents:foo")
object.content_type = "text/plain"
object.data = "some text data"
object.store
end
it "returns the value on all get requests" do
get "/jimmy/public/foo"
last_response.status.must_equal 200
last_response.body.must_equal "some text data"
end
it "returns the value from a sub-directory" do
get "/jimmy/public/documents/foo"
last_response.status.must_equal 200
last_response.body.must_equal "some text data"
end
end
context "private data" do
before do
object = data_bucket.new("jimmy:documents:foo")
object.content_type = "text/plain"
object.data = "some private, authorized text data"
object.store
object = data_bucket.new("jimmy:documents/very/interesting:text")
object.content_type = "text/plain"
object.data = "some very interesting writing"
object.store
object = data_bucket.new("jimmy:confidential:bar")
object.content_type = "text/plain"
object.data = "some private, non-authorized text data"
object.store
auth = auth_bucket.new("jimmy:123")
auth.data = ["documents:r", "tasks:rw"]
auth.store
header "Authorization", "Bearer 123"
end
context "when authorized" do
it "returns the value for a key in a top-level directory" do
get "/jimmy/documents/foo"
last_response.status.must_equal 200
last_response.body.must_equal "some private, authorized text data"
end
it "returns the value for a key in a sub-directory" do
get "/jimmy/documents/very/interesting/text"
last_response.status.must_equal 200
last_response.body.must_equal "some very interesting writing"
end
end
context "when not authorized" do
it "returns a 401 for a key in a top-level directory" do
get "/jimmy/confidential/bar"
last_response.status.must_equal 401
end
end
end
end
describe "PUT" do
before do
auth = auth_bucket.new("jimmy:123")
auth.data = ["documents:r", "contacts:rw", "tasks:r", "tasks/home:rw"]
auth.store
header "Authorization", "Bearer 123"
end
context "to a top-level directory" do
it "saves the value when there are write permissions" do
put "/jimmy/contacts/1", "John Doe"
last_response.status.must_equal 201
data_bucket.get("jimmy:contacts:1").data.must_equal "John Doe"
end
it "returns a 401 when there are read permissions only" do
put "/jimmy/documents/foo", "some text"
last_response.status.must_equal 401
end
end
context "to a sub-directory" do
it "saves the value when there are direct write permissions" do
put "/jimmy/tasks/home/1", "take out the trash"
last_response.status.must_equal 201
data_bucket.get("jimmy:tasks/home:1").data.must_equal "take out the trash"
end
it "saves the value when there are write permissions for a parent directory" do
put "/jimmy/contacts/family/1", "Bobby Brother"
last_response.status.must_equal 201
data_bucket.get("jimmy:contacts/family:1").data.must_equal "Bobby Brother"
end
it "returns a 401 when there are read permissions only" do
put "/jimmy/documents/business/1", "some text"
last_response.status.must_equal 401
end
end
context "to the public directory" do
context "when authorized for the corresponding category" do
it "saves the value" do
put "/jimmy/public/contacts/foo", "Foo Bar"
last_response.status.must_equal 201
data_bucket.get("jimmy:public/contacts:foo").data.must_equal "Foo Bar"
end
it "saves the value to a sub-directory" do
put "/jimmy/public/contacts/family/foo", "Foo Bar"
last_response.status.must_equal 201
data_bucket.get("jimmy:public/contacts/family:foo").data.must_equal "Foo Bar"
end
end
context "when not authorized for the corresponding category" do
it "returns a 401" do
put "/jimmy/public/documents/foo", "Foo Bar"
last_response.status.must_equal 401
end
end
end
end
describe "DELETE" do
before do
auth = auth_bucket.new("jimmy:123")
auth.data = ["documents:r", "tasks:rw"]
auth.store
header "Authorization", "Bearer 123"
end
context "when authorized" do
before do
object = data_bucket.new("jimmy:tasks:1")
object.content_type = "text/plain"
object.data = "do the laundry"
object.store
object = data_bucket.new("jimmy:tasks/home:1")
object.content_type = "text/plain"
object.data = "take out the trash"
object.store
end
it "removes the key from a top-level directory" do
delete "/jimmy/tasks/1"
last_response.status.must_equal 200
lambda {
data_bucket.get("jimmy:tasks:1")
}.must_raise Riak::HTTPFailedRequest
end
it "removes the key from a top-level directory" do
delete "/jimmy/tasks/home/1"
last_response.status.must_equal 200
lambda {
data_bucket.get("jimmy:tasks/home:1")
}.must_raise Riak::HTTPFailedRequest
end
context "public directory" do
before do
object = data_bucket.new("jimmy:public/tasks:open")
object.content_type = "text/plain"
object.data = "hello world"
object.store
end
it "removes the key" do
delete "/jimmy/public/tasks/open"
last_response.status.must_equal 200
lambda {
data_bucket.get("jimmy:public/tasks:open")
}.must_raise Riak::HTTPFailedRequest
end
end
end
context "when not authorized" do
before do
object = data_bucket.new("jimmy:documents:private")
object.content_type = "text/plain"
object.data = "some private, authorized text data"
object.store
object = data_bucket.new("jimmy:documents/business:foo")
object.content_type = "text/plain"
object.data = "some private, authorized text data"
object.store
end
it "returns a 401 for a key in a top-level directory" do
delete "/jimmy/documents/private"
last_response.status.must_equal 401
end
it "returns a 401 for a key in a sub-directory" do
delete "/jimmy/documents/business/foo"
last_response.status.must_equal 401
end
context "public directory" do
before do
object = data_bucket.new("jimmy:public/documents:foo")
object.content_type = "text/plain"
object.data = "some private, authorized text data"
object.store
end
it "returns a 401" do
delete "/jimmy/public/documents/foo"
last_response.status.must_equal 401
end
end
end
end
describe "global permissions" do
before do
object = data_bucket.new("jimmy:documents/very/interesting:text")
object.content_type = "text/plain"
object.data = "some very interesting writing"
object.store
end
context "write all" do
before do
auth = auth_bucket.new("jimmy:123")
auth.data = [":rw", "documents:r"]
auth.store
header "Authorization", "Bearer 123"
end
it "allows GET requests" do
get "/jimmy/documents/very/interesting/text"
last_response.status.must_equal 200
last_response.body.must_equal "some very interesting writing"
end
it "allows PUT requests" do
put "/jimmy/contacts/1", "John Doe"
last_response.status.must_equal 201
data_bucket.get("jimmy:contacts:1").data.must_equal "John Doe"
end
it "allows DELETE requests" do
delete "/jimmy/documents/very/interesting/text"
last_response.status.must_equal 200
lambda {
data_bucket.get("jimmy:documents/very/interesting:text")
}.must_raise Riak::HTTPFailedRequest
end
context "root directory" do
before do
object = data_bucket.new("jimmy::root")
object.content_type = "text/plain"
object.data = "Back to the roots"
object.store
end
it "allows GET requests" do
get "/jimmy/root"
last_response.status.must_equal 200
last_response.body.must_equal "Back to the roots"
end
it "allows PUT requests" do
put "/jimmy/1", "Gonna kick it root down"
last_response.status.must_equal 201
data_bucket.get("jimmy::1").data.must_equal "Gonna kick it root down"
end
it "allows DELETE requests" do
delete "/jimmy/root"
last_response.status.must_equal 200
lambda {
data_bucket.get("jimmy::root")
}.must_raise Riak::HTTPFailedRequest
end
end
context "public directory" do
before do
object = data_bucket.new("jimmy:public/tasks:hello")
object.content_type = "text/plain"
object.data = "Hello World"
object.store
end
it "allows GET requests" do
get "/jimmy/public/tasks/"
last_response.status.must_equal 404
end
it "allows PUT requests" do
put "/jimmy/public/1", "Hello World"
last_response.status.must_equal 201
data_bucket.get("jimmy:public:1").data.must_equal "Hello World"
end
it "allows DELETE requests" do
delete "/jimmy/public/tasks/hello"
last_response.status.must_equal 200
lambda {
data_bucket.get("jimmy:public/tasks:hello")
}.must_raise Riak::HTTPFailedRequest
end
end
end
context "read all" do
before do
auth = auth_bucket.new("jimmy:123")
auth.data = [":r", "contacts:rw"]
auth.store
header "Authorization", "Bearer 123"
end
it "allows GET requests" do
get "/jimmy/documents/very/interesting/text"
last_response.status.must_equal 200
last_response.body.must_equal "some very interesting writing"
end
it "disallows PUT requests" do
put "/jimmy/documents/foo", "some text"
last_response.status.must_equal 401
end
it "disallows DELETE requests" do
delete "/jimmy/documents/very/interesting/text"
last_response.status.must_equal 401
end
context "public directory" do
before do
object = data_bucket.new("jimmy:public/tasks:hello")
object.content_type = "text/plain"
object.data = "Hello World"
object.store
end
it "allows GET requests" do
get "/jimmy/public/tasks/"
last_response.status.must_equal 404
end
it "disallows PUT requests" do
put "/jimmy/public/tasks/foo", "some text"
last_response.status.must_equal 401
end
it "disallows DELETE requests" do
delete "/jimmy/public/tasks/hello"
last_response.status.must_equal 401
end
end
end
end
end

View File

@ -1,775 +0,0 @@
require_relative "../spec_helper"
describe "App with Riak backend" do
include Rack::Test::Methods
before do
purge_all_buckets
end
describe "HEAD public data" do
before do
object = data_bucket.new("jimmy:public:foo")
object.content_type = "text/plain"
object.data = "some text data"
object.store
head "/jimmy/public/foo"
end
it "works" do
last_response.status.must_equal 200
last_response.body.must_equal ""
last_response.headers["ETag"].wont_be_nil
last_response.headers["Content-Length"].must_equal "14"
end
end
describe "GET public data" do
describe "file with content" do
before do
object = data_bucket.new("jimmy:public:foo")
object.content_type = "text/plain"
object.data = "some text data"
object.store
get "/jimmy/public/foo"
end
it "works" do
last_response.status.must_equal 200
last_response.body.must_equal "some text data"
last_response.headers["ETag"].wont_be_nil
last_response.headers["Content-Length"].must_equal "14"
last_response.headers["Expires"].must_equal "0"
end
describe "empty file" do
before do
object = data_bucket.new("jimmy:public:empty")
object.content_type = "text/plain"
object.data = ""
object.store
get "/jimmy/public/empty"
end
it "returns an empty body" do
last_response.status.must_equal 200
# Rack::MockRequest turns the body into a string. We can't use
# `last_response.body` to check for nil, because:
# >> [nil].join
# => ""
last_response.body.must_equal ''
last_response.headers["Content-Length"].must_equal '0'
end
end
end
describe "GET data with custom content type" do
before do
object = data_bucket.new("jimmy:public:magic")
object.content_type = "text/magic"
object.raw_data = "some text data"
object.store
end
it "returns the value with the correct content type" do
get "/jimmy/public/magic"
last_response.status.must_equal 200
last_response.content_type.must_equal "text/magic"
last_response.body.must_equal "some text data"
end
end
describe "private data" do
before do
object = data_bucket.new("jimmy:documents:foo")
object.content_type = "text/plain"
object.data = "some private text data"
object.store
@etag = object.etag
auth = auth_bucket.new("jimmy:123")
auth.data = ["documents", "public"]
auth.store
end
describe "HEAD" do
before do
header "Authorization", "Bearer 123"
head "/jimmy/documents/foo"
end
it "works" do
last_response.status.must_equal 200
last_response.body.must_equal ""
last_response.headers["ETag"].wont_be_nil
last_response.headers["Content-Length"].must_equal "22"
end
end
describe "HEAD nonexisting key" do
it "returns a 404" do
header "Authorization", "Bearer 123"
head "/jimmy/documents/somestupidkey"
last_response.status.must_equal 404
end
end
describe "GET" do
before do
header "Authorization", "Bearer 123"
end
it "returns the value" do
get "/jimmy/documents/foo"
last_response.status.must_equal 200
last_response.body.must_equal "some private text data"
end
describe "when If-None-Match header is set" do
it "responds with 'not modified' when it matches the current ETag" do
header "If-None-Match", @etag
get "/jimmy/documents/foo"
last_response.status.must_equal 304
last_response.body.must_be_empty
last_response.headers["ETag"].must_equal @etag
end
it "responds normally when it does not match the current ETag" do
header "If-None-Match", "FOO"
get "/jimmy/documents/foo"
last_response.status.must_equal 200
last_response.body.must_equal "some private text data"
end
end
describe "when If-None-Match header is set with multiple revisions" do
it "responds with 'not modified' when it contains the current ETag" do
header "If-None-Match", "DEADBEEF,#{@etag},F00BA4"
get "/jimmy/documents/foo"
last_response.status.must_equal 304
last_response.body.must_be_empty
last_response.headers["ETag"].must_equal @etag
end
it "responds normally when it does not contain the current ETag" do
header "If-None-Match", "FOO,BAR"
get "/jimmy/documents/foo"
last_response.status.must_equal 200
last_response.body.must_equal "some private text data"
end
end
end
describe "GET nonexisting key" do
it "returns a 404" do
header "Authorization", "Bearer 123"
get "/jimmy/documents/somestupidkey"
last_response.status.must_equal 404
end
end
describe "PUT" do
before do
header "Authorization", "Bearer 123"
end
describe "with implicit content type" do
before do
put "/jimmy/documents/bar", "another text"
end
it "saves the value" do
last_response.status.must_equal 201
last_response.body.must_equal ""
data_bucket.get("jimmy:documents:bar").data.must_equal "another text"
end
it "stores the data as plain text with utf-8 encoding" do
data_bucket.get("jimmy:documents:bar").content_type.must_equal "text/plain; charset=utf-8"
end
it "sets the ETag header" do
last_response.headers["ETag"].wont_be_nil
end
it "indexes the data set" do
indexes = data_bucket.get("jimmy:documents:bar").indexes
indexes["user_id_bin"].must_be_kind_of Set
indexes["user_id_bin"].must_include "jimmy"
indexes["directory_bin"].must_include "documents"
end
it "logs the operation" do
objects = []
opslog_bucket.keys.each { |k| objects << opslog_bucket.get(k) rescue nil }
log_entry = objects.select{|o| o.data["count"] == 1}.first
log_entry.data["size"].must_equal 12
log_entry.data["category"].must_equal "documents"
log_entry.indexes["user_id_bin"].must_include "jimmy"
end
end
describe "with explicit content type" do
before do
header "Content-Type", "application/json"
put "/jimmy/documents/jason", '{"foo": "bar", "unhosted": 1}'
end
it "saves the value (as JSON)" do
last_response.status.must_equal 201
data_bucket.get("jimmy:documents:jason").data.must_be_kind_of Hash
data_bucket.get("jimmy:documents:jason").data.must_equal({"foo" => "bar", "unhosted" => 1})
end
it "uses the requested content type" do
data_bucket.get("jimmy:documents:jason").content_type.must_equal "application/json"
end
it "delivers the data correctly" do
header "Authorization", "Bearer 123"
get "/jimmy/documents/jason"
last_response.body.must_equal '{"foo":"bar","unhosted":1}'
last_response.content_type.must_equal "application/json"
end
end
describe "with arbitrary content type" do
before do
header "Content-Type", "text/magic"
put "/jimmy/documents/magic", "pure magic"
end
it "saves the value" do
last_response.status.must_equal 201
data_bucket.get("jimmy:documents:magic").raw_data.must_equal "pure magic"
end
it "uses the requested content type" do
data_bucket.get("jimmy:documents:magic").content_type.must_equal "text/magic"
end
it "delivers the data correctly" do
header "Authorization", "Bearer 123"
get "/jimmy/documents/magic"
last_response.body.must_equal "pure magic"
last_response.content_type.must_equal "text/magic"
end
end
describe "with content type containing the encoding" do
before do
header "Content-Type", "application/json; charset=UTF-8"
put "/jimmy/documents/jason", '{"foo": "bar", "unhosted": 1}'
end
it "saves the value (as JSON)" do
last_response.status.must_equal 201
data_bucket.get("jimmy:documents:jason").data.must_be_kind_of Hash
data_bucket.get("jimmy:documents:jason").data.must_equal({"foo" => "bar", "unhosted" => 1})
end
it "uses the requested content type" do
data_bucket.get("jimmy:documents:jason").content_type.must_equal "application/json; charset=UTF-8"
end
it "delivers the data correctly" do
get "/jimmy/documents/jason"
last_response.body.must_equal '{"foo":"bar","unhosted":1}'
last_response.content_type.must_equal "application/json; charset=UTF-8"
end
end
describe "naming collisions between documents and directories" do
before do
put "/jimmy/documents/archive/document", "lorem ipsum"
end
it "responds with 409 when directory with same name already exists" do
put "/jimmy/documents/archive", "some awesome content"
last_response.status.must_equal 409
lambda {
data_bucket.get("jimmy:documents/archive")
}.must_raise Riak::HTTPFailedRequest
end
it "responds with 409 when there is an existing document with same name as one of the directories" do
put "/jimmy/documents/archive/document/subdir/doc", "some awesome content"
last_response.status.must_equal 409
lambda {
data_bucket.get("jimmy:documents/archive/document/subdir/doc")
}.must_raise Riak::HTTPFailedRequest
end
end
describe "with existing content" do
before do
put "/jimmy/documents/archive/foo", "lorem ipsum"
end
it "saves the value" do
put "/jimmy/documents/archive/foo", "some awesome content"
last_response.status.must_equal 200
data_bucket.get("jimmy:documents/archive:foo").data.must_equal "some awesome content"
end
it "logs the operations" do
put "/jimmy/documents/archive/foo", "some awesome content"
objects = []
opslog_bucket.keys.each { |k| objects << opslog_bucket.get(k) rescue nil }
create_entry = objects.select{|o| o.data["count"] == 1}.first
create_entry.data["size"].must_equal 11
create_entry.data["category"].must_equal "documents"
create_entry.indexes["user_id_bin"].must_include "jimmy"
update_entry = objects.select{|o| o.data["count"] == 0}.first
update_entry.data["size"].must_equal 9
update_entry.data["category"].must_equal "documents"
update_entry.indexes["user_id_bin"].must_include "jimmy"
end
it "changes the ETag header" do
old_etag = last_response.headers["ETag"]
put "/jimmy/documents/archive/foo", "some awesome content"
last_response.headers["ETag"].wont_be_nil
last_response.headers["ETag"].wont_equal old_etag
end
describe "when If-Match header is set" do
it "allows the request if the header matches the current ETag" do
old_etag = last_response.headers["ETag"]
header "If-Match", old_etag
put "/jimmy/documents/archive/foo", "some awesome content"
last_response.status.must_equal 200
get "/jimmy/documents/archive/foo"
last_response.body.must_equal "some awesome content"
end
it "fails the request if the header does not match the current ETag" do
header "If-Match", "WONTMATCH"
put "/jimmy/documents/archive/foo", "some awesome content"
last_response.status.must_equal 412
get "/jimmy/documents/archive/foo"
last_response.body.must_equal "lorem ipsum"
end
end
describe "when If-None-Match header is set" do
before do
header "If-None-Match", "*"
end
it "fails when the document already exists" do
put "/jimmy/documents/archive/foo", "some awesome content"
last_response.status.must_equal 412
get "/jimmy/documents/archive/foo"
last_response.body.must_equal "lorem ipsum"
end
it "succeeds when the document does not exist" do
put "/jimmy/documents/archive/bar", "my little content"
last_response.status.must_equal 201
end
end
end
describe "exsting content without serializer registered for the given content-type" do
before do
header "Content-Type", "text/html; charset=UTF-8"
put "/jimmy/documents/html", '<html></html>'
put "/jimmy/documents/html", '<html><body></body></html>'
end
it "saves the value" do
last_response.status.must_equal 200
data_bucket.get("jimmy:documents:html").raw_data.must_equal "<html><body></body></html>"
end
it "uses the requested content type" do
data_bucket.get("jimmy:documents:html").content_type.must_equal "text/html; charset=UTF-8"
end
end
describe "public data" do
before do
put "/jimmy/public/documents/notes/foo", "note to self"
end
it "saves the value" do
last_response.status.must_equal 201
data_bucket.get("jimmy:public/documents/notes:foo").data.must_equal "note to self"
end
it "logs the operation" do
objects = []
opslog_bucket.keys.each { |k| objects << opslog_bucket.get(k) rescue nil }
log_entry = objects.select{|o| o.data["count"] == 1}.first
log_entry.data["size"].must_equal 12
log_entry.data["category"].must_equal "public/documents"
log_entry.indexes["user_id_bin"].must_include "jimmy"
end
end
context "with binary data" do
context "binary charset in content-type header" do
before do
header "Content-Type", "image/jpeg; charset=binary"
filename = File.join(File.expand_path(File.dirname(__FILE__)), "..", "fixtures", "rockrule.jpeg")
@image = File.open(filename, "r").read
put "/jimmy/documents/jaypeg", @image
end
it "uses the requested content type" do
get "/jimmy/documents/jaypeg"
last_response.status.must_equal 200
last_response.content_type.must_equal "image/jpeg; charset=binary"
end
it "delivers the data correctly" do
get "/jimmy/documents/jaypeg"
last_response.status.must_equal 200
last_response.body.must_equal @image
end
it "responds with an ETag header" do
last_response.headers["ETag"].wont_be_nil
etag = last_response.headers["ETag"]
get "/jimmy/documents/jaypeg"
last_response.headers["ETag"].wont_be_nil
last_response.headers["ETag"].must_equal etag
end
it "responds with a Content-Length header" do
get "/jimmy/documents/jaypeg"
last_response.headers["Content-Length"].must_equal "16044"
end
it "changes the ETag when updating the file" do
old_etag = last_response.headers["ETag"]
put "/jimmy/documents/jaypeg", @image
last_response.headers["ETag"].wont_be_nil
last_response.headers["ETag"].wont_equal old_etag
end
it "logs the operation" do
objects = []
opslog_bucket.keys.each { |k| objects << opslog_bucket.get(k) rescue nil }
log_entry = objects.select{|o| o.data["count"] == 1}.first
log_entry.data["size"].must_equal 16044
log_entry.data["category"].must_equal "documents"
log_entry.indexes["user_id_bin"].must_include "jimmy"
end
context "overwriting existing file with same file" do
before do
header "Content-Type", "image/jpeg; charset=binary"
filename = File.join(File.expand_path(File.dirname(__FILE__)), "..", "fixtures", "rockrule.jpeg")
@image = File.open(filename, "r").read
put "/jimmy/documents/jaypeg", @image
end
it "doesn't log the operation" do
objects = []
opslog_bucket.keys.each { |k| objects << opslog_bucket.get(k) rescue nil }
objects.size.must_equal 1
end
end
context "overwriting existing file with different file" do
before do
header "Content-Type", "image/jpeg; charset=binary"
filename = File.join(File.expand_path(File.dirname(__FILE__)), "..", "fixtures", "rockrule.jpeg")
@image = File.open(filename, "r").read
put "/jimmy/documents/jaypeg", @image+"foo"
end
it "logs the operation changing only the size" do
objects = []
opslog_bucket.keys.each { |k| objects << opslog_bucket.get(k) rescue nil }
objects.size.must_equal 2
log_entry = objects.select{|o| o.data["count"] == 0}.first
log_entry.data["size"].must_equal 3
log_entry.data["category"].must_equal "documents"
log_entry.indexes["user_id_bin"].must_include "jimmy"
end
end
end
context "no binary charset in content-type header" do
before do
header "Content-Type", "image/jpeg"
filename = File.join(File.expand_path(File.dirname(__FILE__)), "..", "fixtures", "rockrule.jpeg")
@image = File.open(filename, "r").read
put "/jimmy/documents/jaypeg", @image
end
it "uses the requested content type" do
get "/jimmy/documents/jaypeg"
last_response.status.must_equal 200
last_response.content_type.must_equal "image/jpeg"
end
it "delivers the data correctly" do
get "/jimmy/documents/jaypeg"
last_response.status.must_equal 200
last_response.body.must_equal @image
end
end
end
context "with escaped key" do
before do
put "/jimmy/documents/bar%3Abaz/http%3A%2F%2F5apps.com", "super website"
end
it "delivers the data correctly" do
header "Authorization", "Bearer 123"
get "/jimmy/documents/bar%3Abaz/http%3A%2F%2F5apps.com"
last_response.body.must_equal 'super website'
end
end
context "with unescaped key" do
before do
put "/jimmy/documents/bar:baz/john@doe.com", "John Doe"
end
it "lists the document in the directory" do
get "/jimmy/documents/bar:baz/"
content = JSON.parse(last_response.body)
content["items"]["john@doe.com"].wont_be_nil
end
it "delivers the data correctly" do
get "/jimmy/documents/bar:baz/john@doe.com"
last_response.body.must_equal "John Doe"
end
end
context "escaped square brackets in key" do
before do
put "/jimmy/documents/gracehopper%5B1%5D.jpg", "super image"
end
it "delivers the data correctly" do
header "Authorization", "Bearer 123"
get "/jimmy/documents/gracehopper%5B1%5D.jpg"
last_response.body.must_equal "super image"
end
end
context "invalid JSON" do
context "empty body" do
before do
header "Content-Type", "application/json"
put "/jimmy/documents/jason", ""
end
it "saves an empty JSON object" do
last_response.status.must_equal 201
data_bucket.get("jimmy:documents:jason").data.must_be_kind_of Hash
data_bucket.get("jimmy:documents:jason").data.must_equal({})
end
end
context "unparsable JSON" do
before do
header "Content-Type", "application/json"
put "/jimmy/documents/jason", "foo"
end
it "returns a 422" do
last_response.status.must_equal 422
end
end
end
end
describe "DELETE" do
before do
header "Authorization", "Bearer 123"
end
describe "basics" do
before do
delete "/jimmy/documents/foo"
end
it "removes the key" do
last_response.status.must_equal 200
lambda {
data_bucket.get("jimmy:documents:foo")
}.must_raise Riak::HTTPFailedRequest
end
it "logs the operation" do
objects = []
opslog_bucket.keys.each { |k| objects << opslog_bucket.get(k) rescue nil }
log_entry = objects.select{|o| o.data["count"] == -1}.first
log_entry.data["size"].must_equal(-22)
log_entry.data["category"].must_equal "documents"
log_entry.indexes["user_id_bin"].must_include "jimmy"
end
end
context "non-existing object" do
before do
delete "/jimmy/documents/foozius"
end
it "responds with 404" do
last_response.status.must_equal 404
end
it "doesn't log the operation" do
objects = []
opslog_bucket.keys.each { |k| objects << opslog_bucket.get(k) rescue nil }
objects.select{|o| o.data["count"] == -1}.size.must_equal 0
end
end
context "when an If-Match header is given" do
it "allows the request if it matches the current ETag" do
get "/jimmy/documents/foo"
old_etag = last_response.headers["ETag"]
header "If-Match", old_etag
delete "/jimmy/documents/foo"
last_response.status.must_equal 200
get "/jimmy/documents/foo"
last_response.status.must_equal 404
end
it "fails the request if it does not match the current ETag" do
header "If-Match", "WONTMATCH"
delete "/jimmy/documents/foo"
last_response.status.must_equal 412
get "/jimmy/documents/foo"
last_response.status.must_equal 200
last_response.body.must_equal "some private text data"
end
end
context "binary data" do
before do
header "Content-Type", "image/jpeg; charset=binary"
filename = File.join(File.expand_path(File.dirname(__FILE__)), "..", "fixtures", "rockrule.jpeg")
@image = File.open(filename, "r").read
put "/jimmy/documents/jaypeg", @image
delete "/jimmy/documents/jaypeg"
end
it "removes the main object" do
last_response.status.must_equal 200
lambda {
data_bucket.get("jimmy:documents:jaypeg")
}.must_raise Riak::HTTPFailedRequest
end
it "removes the binary object" do
last_response.status.must_equal 200
binary = cs_binary_bucket.files.get("jimmy:documents:jaypeg")
binary.must_be_nil
end
it "logs the operation" do
objects = []
opslog_bucket.keys.each { |k| objects << opslog_bucket.get(k) rescue nil }
log_entry = objects.select{|o| o.data["count"] == -1 && o.data["size"] == -16044}.first
log_entry.data["category"].must_equal "documents"
log_entry.indexes["user_id_bin"].must_include "jimmy"
end
end
end
end
describe "unauthorized access" do
before do
auth = auth_bucket.new("jimmy:123")
auth.data = ["documents", "public"]
auth.store
header "Authorization", "Bearer 321"
end
describe "GET" do
it "returns a 401" do
get "/jimmy/documents/foo"
last_response.status.must_equal 401
end
end
describe "PUT" do
it "returns a 401" do
put "/jimmy/documents/foo", "some text"
last_response.status.must_equal 401
end
end
describe "DELETE" do
it "returns a 401" do
delete "/jimmy/documents/foo"
last_response.status.must_equal 401
end
end
end
end
end

View File

@ -8,7 +8,6 @@ require_relative '../liquor-cabinet'
require 'minitest/autorun' require 'minitest/autorun'
require 'rack/test' require 'rack/test'
require 'purdytest' require 'purdytest'
require 'riak'
require "redis" require "redis"
require "rest_client" require "rest_client"
require "minitest/stub_any_instance" require "minitest/stub_any_instance"
@ -44,74 +43,3 @@ if app.settings.respond_to? :redis
end end
end end
end end
if app.settings.respond_to? :riak
::Riak.disable_list_keys_warnings = true
def client
@client ||= ::Riak::Client.new(:host => app.settings.riak['host'],
:http_port => app.settings.riak['http_port'])
end
def data_bucket
@data_bucket ||= begin
bucket = client.bucket(app.settings.riak['buckets']['data'])
bucket.allow_mult = false
bucket
end
end
def directory_bucket
@directory_bucket ||= begin
bucket = client.bucket(app.settings.riak['buckets']['directories'])
bucket.allow_mult = false
bucket
end
end
def auth_bucket
@auth_bucket ||= begin
bucket = client.bucket(app.settings.riak['buckets']['authorizations'])
bucket.allow_mult = false
bucket
end
end
def opslog_bucket
@opslog_bucket ||= begin
bucket = client.bucket(app.settings.riak['buckets']['opslog'])
bucket.allow_mult = false
bucket
end
end
def cs_credentials
@cs_credentials ||= begin
credentials = File.read(app.settings.riak['riak_cs']['credentials_file'])
JSON.parse(credentials)
end
end
def cs_client
@cs_client ||= Fog::Storage.new({
:provider => 'AWS',
:aws_access_key_id => cs_credentials['key_id'],
:aws_secret_access_key => cs_credentials['key_secret'],
:endpoint => app.settings.riak['riak_cs']['endpoint']
})
end
def cs_binary_bucket
@cs_binary_bucket ||= cs_client.directories.create(:key => app.settings.riak['buckets']['cs_binaries'])
end
def purge_all_buckets
[data_bucket, directory_bucket, auth_bucket, opslog_bucket].each do |bucket|
bucket.keys.each {|key| bucket.delete key}
end
cs_binary_bucket.files.each do |file|
file.destroy
end
end
end