Merge branch 'master' into stable

This commit is contained in:
Greg Karékinian 2018-05-23 11:14:48 +02:00
commit fa8a829ed9
20 changed files with 1514 additions and 3854 deletions

View File

@ -4,13 +4,10 @@ rvm:
- 2.4.1
services:
- redis-server
before_install:
- sh .travis/install_riakcs.sh
- gem install bundler
before_script:
- cp config.yml.example config.yml
- cp config.yml.example.$BACKEND config.yml
- mkdir -p tmp && echo "swifttoken" > tmp/swift_token.txt
script: ruby spec/swift/*
script: ruby spec/$BACKEND/*
branches:
only:
- master
@ -22,6 +19,8 @@ notifications:
- http://hook-juggler.herokuapp.com/hooks/travis
on_success: always
on_failure: always
# Force legacy Blue Box build for now
sudo: required
group: legacy
env:
- BACKEND=s3
- BACKEND=swift
# Run on Docker infrastructure
sudo: false

View File

@ -3,8 +3,6 @@ source "https://rubygems.org"
gem "sinatra"
gem "sinatra-contrib"
gem "activesupport"
gem "riak-client", git: "https://github.com/5apps/riak-ruby-client", branch: "invalid_uri_error"
gem "fog-aws"
gem "rest-client", "~> 2.1.0.rc1" # Fixes a memory leak in Ruby 2.4
gem "redis"
# Remove require when we can update to 3.0, which sets the new storage
@ -17,6 +15,7 @@ group :test do
gem 'purdytest', :require => false
gem 'm'
gem 'minitest-stub_any_instance'
gem 'webmock'
end
group :staging, :production do

View File

@ -1,121 +1,96 @@
GIT
remote: https://github.com/5apps/riak-ruby-client
revision: 5f21df86b14339aeb252374851d29ad813cca1dd
branch: invalid_uri_error
specs:
riak-client (1.4.0)
beefcake (~> 0.3.7)
builder (>= 2.1.2)
i18n (>= 0.4.0)
innertube (~> 1.0.2)
multi_json (~> 1.0)
GEM
remote: https://rubygems.org/
specs:
activesupport (5.1.2)
activesupport (5.2.0)
concurrent-ruby (~> 1.0, >= 1.0.2)
i18n (~> 0.7)
i18n (>= 0.7, < 2)
minitest (~> 5.1)
tzinfo (~> 1.1)
backports (3.8.0)
beefcake (0.3.7)
builder (3.2.3)
addressable (2.5.2)
public_suffix (>= 2.0.2, < 4.0)
backports (3.11.2)
concurrent-ruby (1.0.5)
crack (0.4.3)
safe_yaml (~> 1.0.0)
domain_name (0.5.20170404)
unf (>= 0.0.5, < 1.0.0)
excon (0.57.1)
faraday (0.12.1)
faraday (0.14.0)
multipart-post (>= 1.2, < 3)
fog-aws (1.4.0)
fog-core (~> 1.38)
fog-json (~> 1.0)
fog-xml (~> 0.1)
ipaddress (~> 0.8)
fog-core (1.44.3)
builder
excon (~> 0.49)
formatador (~> 0.2)
fog-json (1.0.2)
fog-core (~> 1.0)
multi_json (~> 1.10)
fog-xml (0.1.3)
fog-core
nokogiri (>= 1.5.11, < 2.0.0)
formatador (0.2.5)
hashdiff (0.3.7)
http-accept (1.7.0)
http-cookie (1.0.3)
domain_name (~> 0.5)
i18n (0.8.4)
innertube (1.0.2)
ipaddress (0.8.3)
kgio (2.11.0)
i18n (1.0.0)
concurrent-ruby (~> 1.0)
kgio (2.11.2)
m (1.5.1)
method_source (>= 0.6.7)
rake (>= 0.9.2.2)
method_source (0.8.2)
method_source (0.9.0)
mime-types (3.1)
mime-types-data (~> 3.2015)
mime-types-data (3.2016.0521)
mini_portile2 (2.2.0)
minitest (5.10.2)
minitest-stub_any_instance (1.0.1)
multi_json (1.12.1)
minitest (5.11.3)
minitest-stub_any_instance (1.0.2)
multi_json (1.13.1)
multipart-post (2.0.0)
mustermann (1.0.0)
mustermann (1.0.2)
netrc (0.11.0)
nokogiri (1.8.0)
mini_portile2 (~> 2.2.0)
public_suffix (3.0.2)
purdytest (2.0.0)
minitest (~> 5.5)
rack (2.0.3)
rack-protection (2.0.0)
rack (2.0.4)
rack-protection (2.0.1)
rack
rack-test (0.6.3)
rack (>= 1.0)
rack-test (1.0.0)
rack (>= 1.0, < 3)
rainbows (5.1.1)
kgio (~> 2.5)
rack (>= 1.1, < 3.0)
unicorn (~> 5.1)
raindrops (0.18.0)
rake (12.0.0)
redis (3.3.3)
raindrops (0.19.0)
rake (12.3.1)
redis (4.0.1)
rest-client (2.1.0.rc1)
http-accept (>= 1.7.0, < 2.0)
http-cookie (>= 1.0.2, < 2.0)
mime-types (>= 1.16, < 4.0)
netrc (~> 0.8)
sentry-raven (2.5.3)
safe_yaml (1.0.4)
sentry-raven (2.7.2)
faraday (>= 0.7.6, < 1.0)
sinatra (2.0.0)
sinatra (2.0.1)
mustermann (~> 1.0)
rack (~> 2.0)
rack-protection (= 2.0.0)
rack-protection (= 2.0.1)
tilt (~> 2.0)
sinatra-contrib (2.0.0)
sinatra-contrib (2.0.1)
backports (>= 2.0)
multi_json
mustermann (~> 1.0)
rack-protection (= 2.0.0)
sinatra (= 2.0.0)
rack-protection (= 2.0.1)
sinatra (= 2.0.1)
tilt (>= 1.3, < 3)
thread_safe (0.3.6)
tilt (2.0.7)
tzinfo (1.2.3)
tilt (2.0.8)
tzinfo (1.2.5)
thread_safe (~> 0.1)
unf (0.1.4)
unf_ext
unf_ext (0.0.7.4)
unicorn (5.3.0)
unf_ext (0.0.7.5)
unicorn (5.4.0)
kgio (~> 2.6)
raindrops (~> 0.7)
webmock (3.3.0)
addressable (>= 2.3.6)
crack (>= 0.3.2)
hashdiff
PLATFORMS
ruby
DEPENDENCIES
activesupport
fog-aws
m
mime-types
minitest-stub_any_instance
@ -125,10 +100,10 @@ DEPENDENCIES
rake
redis
rest-client (~> 2.1.0.rc1)
riak-client!
sentry-raven
sinatra
sinatra-contrib
webmock
BUNDLED WITH
1.15.1
1.16.0

View File

@ -3,16 +3,19 @@
# Liquor Cabinet
Liquor Cabinet is where Frank stores all his stuff. It's a
remoteStorage-compatible storage provider API, based on Sinatra and currently
using Riak as backend. You can use it on its own, or e.g. mount it from a Rails
application.
[remoteStorage](https://remotestorage.io) HTTP API, based on Sinatra. The
metadata and OAuth tokens are stored in Redis, and documents can be stored in
anything that supports the storage API of either Openstack Swift or Amazon S3.
It's merely implementing the storage API, not including the Webfinger and OAuth
parts of remoteStorage. You have to set the authorization keys/values in the
database yourself.
Liquor Cabinet only implements the storage API part of the remoteStorage
protocol, but does not include the Webfinger and OAuth parts. It is meant to be
added to existing systems and user accounts, so you will have to add your own
OAuth dialog for remoteStorage authorizations and persist the tokens in Redis.
If you have any questions about this thing, drop by #remotestorage on Freenode, and
we'll happily answer them.
If you have any questions about this program, drop by #remotestorage on
Freenode, or [post to the RS
forums](https://community.remotestorage.io/c/server-development), and we'll
happily answer them.
## Contributing

View File

@ -1,45 +0,0 @@
development: &defaults
maintenance: false
# riak: &riak_defaults
# host: localhost
# http_port: 8098
# riak_cs:
# credentials_file: "cs_credentials.json"
# endpoint: "http://cs.example.com:8080"
# buckets:
# data: rs_data
# directories: rs_directories
# binaries: rs_binaries
# cs_binaries: rs.binaries
# authorizations: rs_authorizations
# opslog: rs_opslog
# # uncomment this section and comment the riak one
# swift: &swift_defaults
# host: "https://swift.example.com"
# # Redis is needed for the swift backend
# redis:
# host: localhost
# port: 6379
test:
<<: *defaults
# riak:
# <<: *riak_defaults
# buckets:
# data: rs_data_test
# directories: rs_directories_test
# binaries: rs_binaries_test
# cs_binaries: rs.binaries.test
# authorizations: rs_authorizations_test
# opslog: rs_opslog_test
swift:
host: "https://swift.example.com"
redis:
host: localhost
port: 6379
staging:
<<: *defaults
production:
<<: *defaults

26
config.yml.example.s3 Normal file
View File

@ -0,0 +1,26 @@
development: &defaults
maintenance: false
s3:
endpoint: "https://some-endpoint"
region: "region"
access_key_id: ""
secret_key_id: ""
bucket: "test-bucket"
redis:
host: localhost
port: 6379
test:
<<: *defaults
s3:
endpoint: "https://some-endpoint"
region: "region"
access_key_id: ""
secret_key_id: ""
bucket: "test-bucket"
staging:
<<: *defaults
production:
<<: *defaults

18
config.yml.example.swift Normal file
View File

@ -0,0 +1,18 @@
development: &defaults
maintenance: false
swift: &swift_defaults
host: "https://swift.example.com"
redis:
host: localhost
port: 6379
test:
<<: *defaults
swift:
host: "https://swift.example.com"
staging:
<<: *defaults
production:
<<: *defaults

View File

@ -0,0 +1,503 @@
require "rest_client"
require "json"
require "cgi"
require "active_support/core_ext/time/conversions"
require "active_support/core_ext/numeric/time"
require "active_support/core_ext/hash"
require "redis"
require "digest/md5"
module RemoteStorage
module RestProvider
attr_accessor :settings, :server
def initialize(settings, server)
@settings = settings
@server = server
end
def authorize_request(user, directory, token, listing=false)
request_method = server.env["REQUEST_METHOD"]
if directory.split("/").first == "public"
return true if ["GET", "HEAD"].include?(request_method) && !listing
end
server.halt 401, "Unauthorized" if token.nil? || token.empty?
authorizations = redis.smembers("authorizations:#{user}:#{token}")
permission = directory_permission(authorizations, directory)
server.halt 401, "Unauthorized" unless permission
if ["PUT", "DELETE"].include? request_method
server.halt 401, "Unauthorized" unless permission == "rw"
end
end
def get_head(user, directory, key)
none_match = (server.env["HTTP_IF_NONE_MATCH"] || "").split(",")
.map(&:strip)
.map { |s| s.gsub(/^"?W\//, "") }
metadata = redis.hgetall redis_metadata_object_key(user, directory, key)
server.halt 404 if metadata.empty?
# Set the response headers for a 304 or 200 response
server.headers["ETag"] = %Q("#{metadata["e"]}")
server.headers["Last-Modified"] = Time.at(metadata["m"].to_i / 1000).httpdate
server.headers["Content-Type"] = metadata["t"]
server.headers["Content-Length"] = metadata["s"]
if none_match.include? %Q("#{metadata["e"]}")
server.halt 304
end
end
def get_data(user, directory, key)
none_match = (server.env["HTTP_IF_NONE_MATCH"] || "").split(",")
.map(&:strip)
.map { |s| s.gsub(/^"?W\//, "") }
metadata = redis.hgetall redis_metadata_object_key(user, directory, key)
if none_match.include? %Q("#{metadata["e"]}")
server.headers["ETag"] = %Q("#{metadata["e"]}")
server.headers["Last-Modified"] = Time.at(metadata["m"].to_i / 1000).httpdate
server.halt 304
end
url = url_for_key(user, directory, key)
res = do_get_request(url)
set_response_headers(res.headers)
return res.body
rescue RestClient::ResourceNotFound
server.halt 404, "Not Found"
end
def get_head_directory_listing(user, directory)
get_directory_listing(user, directory)
"" # just return empty body, headers are set by get_directory_listing
end
def get_directory_listing(user, directory)
etag = redis.hget "rs:m:#{user}:#{directory}/", "e"
server.headers["Content-Type"] = "application/ld+json"
none_match = (server.env["HTTP_IF_NONE_MATCH"] || "").split(",")
.map(&:strip)
.map { |s| s.gsub(/^"?W\//, "") }
if etag
server.halt 304 if none_match.include? %Q("#{etag}")
items = get_directory_listing_from_redis_via_lua(user, directory)
else
etag = etag_for(user, directory)
items = {}
server.halt 304 if none_match.include? %Q("#{etag}")
end
server.headers["ETag"] = %Q("#{etag}")
listing = {
"@context" => "http://remotestorage.io/spec/folder-description",
"items" => items
}
listing.to_json
end
def put_data(user, directory, key, data, content_type)
server.halt 400 if server.env["HTTP_CONTENT_RANGE"]
server.halt 409, "Conflict" if has_name_collision?(user, directory, key)
existing_metadata = redis.hgetall redis_metadata_object_key(user, directory, key)
url = url_for_key(user, directory, key)
if required_match = server.env["HTTP_IF_MATCH"]
required_match = required_match.gsub(/^"?W\//, "")
unless required_match == %Q("#{existing_metadata["e"]}")
# get actual metadata and compare in case redis metadata became out of sync
begin
head_res = do_head_request(url)
# The file doesn't exist, return 412
rescue RestClient::ResourceNotFound
server.halt 412, "Precondition Failed"
end
if required_match == format_etag(head_res.headers[:etag])
# log previous size difference that was missed ealier because of redis failure
log_size_difference(user, existing_metadata["s"], head_res.headers[:content_length])
else
server.halt 412, "Precondition Failed"
end
end
end
if server.env["HTTP_IF_NONE_MATCH"] == "*"
server.halt 412, "Precondition Failed" unless existing_metadata.empty?
end
etag, timestamp = do_put_request(url, data, content_type)
metadata = {
e: etag,
s: data.size,
t: content_type,
m: timestamp
}
if update_metadata_object(user, directory, key, metadata)
if metadata_changed?(existing_metadata, metadata)
update_dir_objects(user, directory, timestamp, checksum_for(data))
log_size_difference(user, existing_metadata["s"], metadata[:s])
end
server.headers["ETag"] = %Q("#{etag}")
server.halt existing_metadata.empty? ? 201 : 200
else
server.halt 500
end
end
def delete_data(user, directory, key)
url = url_for_key(user, directory, key)
existing_metadata = redis.hgetall "rs:m:#{user}:#{directory}/#{key}"
if required_match = server.env["HTTP_IF_MATCH"]
unless required_match.gsub(/^"?W\//, "") == %Q("#{existing_metadata["e"]}")
server.halt 412, "Precondition Failed"
end
end
found = try_to_delete(url)
log_size_difference(user, existing_metadata["s"], 0)
delete_metadata_objects(user, directory, key)
delete_dir_objects(user, directory)
if found
server.headers["Etag"] = %Q("#{existing_metadata["e"]}")
server.halt 200
else
server.halt 404, "Not Found"
end
end
private
# Implement this method in your class that includes this module. For example
# %Q("#{etag}") if the ETag does not already have quotes around it
def format_etag(etag)
NotImplementedError
end
def base_url
NotImplementedError
end
def container_url_for(user)
NotImplementedError
end
def default_headers
raise NotImplementedError
end
def set_response_headers(headers)
server.headers["ETag"] = format_etag(headers[:etag])
server.headers["Content-Type"] = headers[:content_type]
server.headers["Content-Length"] = headers[:content_length]
server.headers["Last-Modified"] = headers[:last_modified]
end
def extract_category(directory)
if directory.match(/^public\//)
"public/#{directory.split('/')[1]}"
else
directory.split('/').first
end
end
def directory_permission(authorizations, directory)
authorizations = authorizations.map do |auth|
auth.index(":") ? auth.split(":") : [auth, "rw"]
end
authorizations = Hash[*authorizations.flatten]
permission = authorizations[""]
authorizations.each do |key, value|
if directory.match(/^(public\/)?#{key}(\/|$)/)
if permission.nil? || permission == "r"
permission = value
end
return permission if permission == "rw"
end
end
permission
end
def has_name_collision?(user, directory, key)
lua_script = <<-EOF
local user = ARGV[1]
local directory = ARGV[2]
local key = ARGV[3]
-- build table with parent directories from remaining arguments
local parent_dir_count = #ARGV - 3
local parent_directories = {}
for i = 4, 4 + parent_dir_count do
table.insert(parent_directories, ARGV[i])
end
-- check for existing directory with the same name as the document
local redis_key = "rs:m:"..user..":"
if directory == "" then
redis_key = redis_key..key.."/"
else
redis_key = redis_key..directory.."/"..key.."/"
end
if redis.call("hget", redis_key, "e") then
return true
end
for index, dir in pairs(parent_directories) do
if redis.call("hget", "rs:m:"..user..":"..dir.."/", "e") then
-- the directory already exists, no need to do further checks
return false
else
-- check for existing document with same name as directory
if redis.call("hget", "rs:m:"..user..":"..dir, "e") then
return true
end
end
end
return false
EOF
parent_directories = parent_directories_for(directory)
redis.eval(lua_script, nil, [user, directory, key, *parent_directories])
end
def metadata_changed?(old_metadata, new_metadata)
# check metadata relevant to the directory listing
return old_metadata["e"] != new_metadata[:e] ||
old_metadata["s"] != new_metadata[:s].to_s ||
old_metadata["m"] != new_metadata[:m] ||
old_metadata["t"] != new_metadata[:t]
end
def timestamp_for(date)
return DateTime.parse(date).strftime("%Q").to_i
end
def log_size_difference(user, old_size, new_size)
delta = new_size.to_i - old_size.to_i
redis.incrby "rs:s:#{user}", delta
end
def checksum_for(data)
Digest::MD5.hexdigest(data)
end
def parent_directories_for(directory)
directories = directory.split("/")
parent_directories = []
while directories.any?
parent_directories << directories.join("/")
directories.pop
end
parent_directories << "" # add empty string for the root directory
parent_directories
end
def top_directory(directory)
if directory.match(/\//)
directory.split("/").last
elsif directory != ""
return directory
end
end
def parent_directory_for(directory)
if directory.match(/\//)
return directory[0..directory.rindex("/")]
elsif directory != ""
return "/"
end
end
def update_metadata_object(user, directory, key, metadata)
redis_key = redis_metadata_object_key(user, directory, key)
redis.hmset(redis_key, *metadata)
redis.sadd "rs:m:#{user}:#{directory}/:items", key
true
end
def update_dir_objects(user, directory, timestamp, checksum)
parent_directories_for(directory).each do |dir|
etag = etag_for(dir, timestamp, checksum)
key = "rs:m:#{user}:#{dir}/"
metadata = {e: etag, m: timestamp}
redis.hmset(key, *metadata)
redis.sadd "rs:m:#{user}:#{parent_directory_for(dir)}:items", "#{top_directory(dir)}/"
end
end
def delete_metadata_objects(user, directory, key)
redis.del redis_metadata_object_key(user, directory, key)
redis.srem "rs:m:#{user}:#{directory}/:items", key
end
def delete_dir_objects(user, directory)
timestamp = (Time.now.to_f * 1000).to_i
parent_directories_for(directory).each do |dir|
if dir_empty?(user, dir)
redis.del "rs:m:#{user}:#{dir}/"
redis.srem "rs:m:#{user}:#{parent_directory_for(dir)}:items", "#{top_directory(dir)}/"
else
etag = etag_for(dir, timestamp)
metadata = {e: etag, m: timestamp}
redis.hmset("rs:m:#{user}:#{dir}/", *metadata)
end
end
end
def dir_empty?(user, dir)
redis.smembers("rs:m:#{user}:#{dir}/:items").empty?
end
def redis_metadata_object_key(user, directory, key)
"rs:m:#{user}:#{[directory, key].delete_if(&:empty?).join("/")}"
end
def url_for_key(user, directory, key)
File.join [container_url_for(user), escape(directory), escape(key)].compact
end
def do_put_request(url, data, content_type)
deal_with_unauthorized_requests do
res = RestClient.put(url, data, default_headers.merge({content_type: content_type}))
return [
res.headers[:etag],
timestamp_for(res.headers[:last_modified])
]
end
end
def do_get_request(url, &block)
deal_with_unauthorized_requests do
RestClient.get(url, default_headers, &block)
end
end
def do_head_request(url, &block)
deal_with_unauthorized_requests do
RestClient.head(url, default_headers, &block)
end
end
def do_delete_request(url)
deal_with_unauthorized_requests do
RestClient.delete(url, default_headers)
end
end
def escape(url)
# We want spaces to turn into %20 and slashes to stay slashes
CGI::escape(url).gsub('+', '%20').gsub('%2F', '/')
end
def redis
@redis ||= Redis.new(settings.redis.symbolize_keys)
end
def etag_for(*args)
Digest::MD5.hexdigest args.join(":")
end
def deal_with_unauthorized_requests(&block)
begin
block.call
rescue RestClient::Unauthorized => ex
Raven.capture_exception(ex)
server.halt 500
end
end
def try_to_delete(url)
found = true
begin
do_delete_request(url)
rescue RestClient::ResourceNotFound
found = false
end
found
end
def get_directory_listing_from_redis_via_lua(user, directory)
lua_script = <<-EOF
local user = ARGV[1]
local directory = ARGV[2]
local items = redis.call("smembers", "rs:m:"..user..":"..directory.."/:items")
local listing = {}
for index, name in pairs(items) do
local redis_key = "rs:m:"..user..":"
if directory == "" then
redis_key = redis_key..name
else
redis_key = redis_key..directory.."/"..name
end
local metadata_values = redis.call("hgetall", redis_key)
local metadata = {}
-- redis returns hashes as a single list of alternating keys and values
-- this collates it into a table
for idx = 1, #metadata_values, 2 do
metadata[metadata_values[idx]] = metadata_values[idx + 1]
end
listing[name] = {["ETag"] = metadata["e"]}
if string.sub(name, -1) ~= "/" then
listing[name]["Content-Type"] = metadata["t"]
listing[name]["Content-Length"] = tonumber(metadata["s"])
listing[name]["Last-Modified"] = tonumber(metadata["m"])
end
end
return cjson.encode(listing)
EOF
items = JSON.parse(redis.eval(lua_script, nil, [user, directory]))
items.reject{|k, _| k.end_with? "/"}.each do |_, v|
v["Last-Modified"] = Time.at(v["Last-Modified"]/1000).httpdate
end
items
end
end
end

View File

@ -1,531 +0,0 @@
require "riak"
require "json"
require "cgi"
require "active_support/core_ext/time/conversions"
require "active_support/core_ext/numeric/time"
module RemoteStorage
class Riak
::Riak.url_decoding = true
attr_accessor :settings, :server, :cs_credentials
def initialize(settings, server)
self.settings = settings
self.server = server
credentials = File.read(settings.riak['riak_cs']['credentials_file'])
self.cs_credentials = JSON.parse(credentials)
end
def authorize_request(user, directory, token, listing=false)
request_method = server.env["REQUEST_METHOD"]
if directory.split("/").first == "public"
return true if ["GET", "HEAD"].include?(request_method) && !listing
end
authorizations = auth_bucket.get("#{user}:#{token}").data
permission = directory_permission(authorizations, directory)
server.halt 401 unless permission
if ["PUT", "DELETE"].include? request_method
server.halt 401 unless permission == "rw"
end
rescue ::Riak::HTTPFailedRequest
server.halt 401
end
def get_head(user, directory, key)
object = data_bucket.get("#{user}:#{directory}:#{key}")
set_object_response_headers(object)
server.halt 200
rescue ::Riak::HTTPFailedRequest
server.halt 404
end
def get_data(user, directory, key)
object = data_bucket.get("#{user}:#{directory}:#{key}")
set_object_response_headers(object)
none_match = (server.env["HTTP_IF_NONE_MATCH"] || "").split(",").map(&:strip)
server.halt 304 if none_match.include? object.etag
if binary_key = object.meta["binary_key"]
object = cs_binary_bucket.files.get(binary_key[0])
case object.content_type[/^[^;\s]+/]
when "application/json"
return object.body.to_json
else
return object.body
end
end
case object.content_type[/^[^;\s]+/]
when "application/json"
return object.data.to_json
else
data = serializer_for(object.content_type) ? object.data : object.raw_data
# Never return nil, always turn data into a string
return data.nil? ? '' : data
end
rescue ::Riak::HTTPFailedRequest
server.halt 404
end
def get_head_directory_listing(user, directory)
directory_object = directory_bucket.get("#{user}:#{directory}")
set_directory_response_headers(directory_object)
server.halt 200
rescue ::Riak::HTTPFailedRequest
server.halt 404
end
def get_directory_listing(user, directory)
directory_object = directory_bucket.get("#{user}:#{directory}")
set_directory_response_headers(directory_object)
none_match = (server.env["HTTP_IF_NONE_MATCH"] || "").split(",").map(&:strip)
server.halt 304 if none_match.include? directory_object.etag
listing = directory_listing(user, directory)
return listing.to_json
rescue ::Riak::HTTPFailedRequest
server.halt 404
end
def put_data(user, directory, key, data, content_type=nil)
server.halt 409 if has_name_collision?(user, directory, key)
object = build_data_object(user, directory, key, data, content_type)
if required_match = server.env["HTTP_IF_MATCH"]
server.halt 412 unless required_match == object.etag
end
object_exists = !object.raw_data.nil? || !object.meta["binary_key"].nil?
existing_object_size = object_size(object)
server.halt 412 if object_exists && server.env["HTTP_IF_NONE_MATCH"] == "*"
timestamp = (Time.now.to_f * 1000).to_i
object.meta["timestamp"] = timestamp
if binary_data?(object.content_type, data)
save_binary_data(object, data) or server.halt 422
new_object_size = data.size
else
set_object_data(object, data) or server.halt 422
new_object_size = object.raw_data.size
end
object.store
log_count = object_exists ? 0 : 1
log_operation(user, directory, log_count, new_object_size, existing_object_size)
update_all_directory_objects(user, directory, timestamp)
server.headers["ETag"] = object.etag
server.halt object_exists ? 200 : 201
rescue ::Riak::HTTPFailedRequest
server.halt 422
end
def delete_data(user, directory, key)
object = data_bucket.get("#{user}:#{directory}:#{key}")
existing_object_size = object_size(object)
etag = object.etag
if required_match = server.env["HTTP_IF_MATCH"]
server.halt 412 unless required_match == etag
end
if binary_key = object.meta["binary_key"]
object = cs_binary_bucket.files.get(binary_key[0])
object.destroy
end
riak_response = data_bucket.delete("#{user}:#{directory}:#{key}")
if riak_response[:code] != 404
log_operation(user, directory, -1, 0, existing_object_size)
end
timestamp = (Time.now.to_f * 1000).to_i
delete_or_update_directory_objects(user, directory, timestamp)
server.halt 200
rescue ::Riak::HTTPFailedRequest
server.halt 404
end
private
def set_object_response_headers(object)
server.headers["Content-Type"] = object.content_type
server.headers["ETag"] = object.etag
server.headers["Content-Length"] = object_size(object).to_s
end
def set_directory_response_headers(directory_object)
server.headers["Content-Type"] = "application/json"
server.headers["ETag"] = directory_object.etag
end
def extract_category(directory)
if directory.match(/^public\//)
"public/#{directory.split('/')[1]}"
else
directory.split('/').first
end
end
def build_data_object(user, directory, key, data, content_type=nil)
object = data_bucket.get_or_new("#{user}:#{directory}:#{key}")
object.content_type = content_type || "text/plain; charset=utf-8"
directory_index = directory == "" ? "/" : directory
object.indexes.merge!({:user_id_bin => [user],
:directory_bin => [directory_index]})
object
end
def log_operation(user, directory, count, new_size=0, old_size=0)
size = (-old_size + new_size)
return if count == 0 && size == 0
log_entry = opslog_bucket.new
log_entry.content_type = "application/json"
log_entry.data = {
"count" => count,
"size" => size,
"category" => extract_category(directory)
}
log_entry.indexes.merge!({:user_id_bin => [user]})
log_entry.store
end
def object_size(object)
if binary_key = object.meta["binary_key"]
response = cs_client.head_object cs_binary_bucket.key, binary_key[0]
response.headers["Content-Length"].to_i
else
object.raw_data.nil? ? 0 : object.raw_data.size
end
end
def escape(string)
::Riak.escaper.escape(string).gsub("+", "%20").gsub('/', "%2F")
end
# A URI object that can be used with HTTP backend methods
def riak_uri(bucket, key)
URI.parse "http://#{settings.riak["host"]}:#{settings.riak["http_port"]}/riak/#{bucket}/#{key}"
end
def serializer_for(content_type)
::Riak::Serializers[content_type[/^[^;\s]+/]]
end
def directory_permission(authorizations, directory)
authorizations = authorizations.map do |auth|
auth.index(":") ? auth.split(":") : [auth, "rw"]
end
authorizations = Hash[*authorizations.flatten]
permission = authorizations[""]
authorizations.each do |key, value|
if directory.match(/^(public\/)?#{key}(\/|$)/)
if permission.nil? || permission == "r"
permission = value
end
return permission if permission == "rw"
end
end
permission
end
def directory_listing(user, directory)
listing = {
"@context" => "http://remotestorage.io/spec/folder-description",
"items" => {}
}
sub_directories(user, directory).each do |entry|
directory_name = entry["name"].split("/").last
etag = entry["etag"]
listing["items"].merge!({ "#{directory_name}/" => { "ETag" => etag }})
end
directory_entries(user, directory).each do |entry|
entry_name = entry["name"]
etag = entry["etag"]
content_type = entry["contentType"]
content_length = entry["contentLength"].to_i
listing["items"].merge!({
entry_name => {
"ETag" => etag,
"Content-Type" => content_type,
"Content-Length" => content_length
}
})
end
listing
end
def directory_entries(user, directory)
all_keys = user_directory_keys(user, directory, data_bucket)
return [] if all_keys.empty?
map_query = <<-EOH
function(v){
var metadata = v.values[0]['metadata'];
var dir_name = metadata['index']['directory_bin'];
if (dir_name === '/') {
dir_name = '';
}
var name = v.key.match(/^[^:]*:(.*)/)[1]; // strip username from key
name = name.replace(dir_name + ':', ''); // strip directory from key
var etag = metadata['X-Riak-VTag'];
var contentType = metadata['content-type'];
var contentLength = metadata['X-Riak-Meta']['X-Riak-Meta-Content_length'] || 0;
return [{
name: name,
etag: etag,
contentType: contentType,
contentLength: contentLength
}];
}
EOH
run_map_reduce(data_bucket, all_keys, map_query)
end
def sub_directories(user, directory)
all_keys = user_directory_keys(user, directory, directory_bucket)
return [] if all_keys.empty?
map_query = <<-EOH
function(v){
var name = v.key.match(/^[^:]*:(.*)/)[1]; // strip username from key
var etag = v.values[0]['metadata']['X-Riak-VTag'];
return [{
name: name,
etag: etag
}];
}
EOH
run_map_reduce(directory_bucket, all_keys, map_query)
end
def user_directory_keys(user, directory, bucket)
directory = "/" if directory == ""
user_keys = bucket.get_index("user_id_bin", user)
directory_keys = bucket.get_index("directory_bin", directory)
user_keys & directory_keys
end
def run_map_reduce(bucket, keys, map_query)
map_reduce = ::Riak::MapReduce.new(client)
keys.each do |key|
map_reduce.add(bucket.name, key)
end
map_reduce.
map(map_query, :keep => true).
run
end
def update_all_directory_objects(user, directory, timestamp)
parent_directories_for(directory).each do |parent_directory|
update_directory_object(user, parent_directory, timestamp)
end
end
def update_directory_object(user, directory, timestamp)
if directory.match(/\//)
parent_directory = directory[0..directory.rindex("/")-1]
elsif directory != ""
parent_directory = "/"
end
directory_object = directory_bucket.new("#{user}:#{directory}")
directory_object.content_type = "text/plain; charset=utf-8"
directory_object.data = timestamp.to_s
directory_object.indexes.merge!({:user_id_bin => [user]})
if parent_directory
directory_object.indexes.merge!({:directory_bin => [parent_directory]})
end
directory_object.store
end
def delete_or_update_directory_objects(user, directory, timestamp)
parent_directories_for(directory).each do |parent_directory|
existing_files = directory_entries(user, parent_directory)
existing_subdirectories = sub_directories(user, parent_directory)
if existing_files.empty? && existing_subdirectories.empty?
directory_bucket.delete "#{user}:#{parent_directory}"
else
update_directory_object(user, parent_directory, timestamp)
end
end
end
def set_object_data(object, data)
if object.content_type[/^[^;\s]+/] == "application/json"
data = "{}" if data.blank?
data = JSON.parse(data)
end
object.meta["content_length"] = data.size
if serializer_for(object.content_type)
object.data = data
else
object.raw_data = data
end
rescue JSON::ParserError
return false
end
def save_binary_data(object, data)
cs_binary_object = cs_binary_bucket.files.create(
:key => object.key,
:body => data,
:content_type => object.content_type
)
object.meta["binary_key"] = cs_binary_object.key
object.meta["content_length"] = cs_binary_object.content_length
object.raw_data = ""
end
def binary_data?(content_type, data)
return true if content_type[/[^;\s]+$/] == "charset=binary"
original_encoding = data.encoding
data.force_encoding("UTF-8")
is_binary = !data.valid_encoding?
data.force_encoding(original_encoding)
is_binary
end
def parent_directories_for(directory)
directories = directory.split("/")
parent_directories = []
while directories.any?
parent_directories << directories.join("/")
directories.pop
end
parent_directories << ""
end
def has_name_collision?(user, directory, key)
parent_directories = parent_directories_for(directory).reverse
parent_directories.shift # remove root dir entry
# check for existing documents with the same name as one of the parent directories
parent_directories.each do |dir|
begin
parts = dir.split("/")
document_key = parts.pop
directory_name = parts.join("/")
data_bucket.get("#{user}:#{directory_name}:#{document_key}")
return true
rescue ::Riak::HTTPFailedRequest
end
end
# check for an existing directory with same name as document
begin
directory_bucket.get("#{user}:#{directory}/#{key}")
return true
rescue ::Riak::HTTPFailedRequest
end
false
end
def client
@client ||= ::Riak::Client.new(:host => settings.riak['host'],
:http_port => settings.riak['http_port'])
end
def data_bucket
@data_bucket ||= begin
bucket = client.bucket(settings.riak['buckets']['data'])
bucket.allow_mult = false
bucket
end
end
def directory_bucket
@directory_bucket ||= begin
bucket = client.bucket(settings.riak['buckets']['directories'])
bucket.allow_mult = false
bucket
end
end
def auth_bucket
@auth_bucket ||= begin
bucket = client.bucket(settings.riak['buckets']['authorizations'])
bucket.allow_mult = false
bucket
end
end
def binary_bucket
@binary_bucket ||= begin
bucket = client.bucket(settings.riak['buckets']['binaries'])
bucket.allow_mult = false
bucket
end
end
def opslog_bucket
@opslog_bucket ||= begin
bucket = client.bucket(settings.riak['buckets']['opslog'])
bucket.allow_mult = false
bucket
end
end
def cs_client
@cs_client ||= Fog::Storage.new({
:provider => 'AWS',
:aws_access_key_id => cs_credentials['key_id'],
:aws_secret_access_key => cs_credentials['key_secret'],
:endpoint => settings.riak['riak_cs']['endpoint']
})
end
def cs_binary_bucket
@cs_binary_bucket ||= cs_client.directories.create(:key => settings.riak['buckets']['cs_binaries'])
end
end
end

113
lib/remote_storage/s3.rb Normal file
View File

@ -0,0 +1,113 @@
require "remote_storage/rest_provider"
require "digest"
require "base64"
require "openssl"
require "webrick/httputils"
module RemoteStorage
class S3
include RestProvider
private
# S3 already wraps the ETag around quotes
def format_etag(etag)
etag
end
def do_put_request(url, data, content_type)
deal_with_unauthorized_requests do
md5 = Digest::MD5.base64digest(data)
authorization_headers = authorization_headers_for(
"PUT", url, md5, content_type
).merge({ "Content-Type" => content_type, "Content-Md5" => md5 })
res = RestClient.put(url, data, authorization_headers)
# S3 does not return a Last-Modified response header on PUTs
head_res = do_head_request(url)
return [
res.headers[:etag].delete('"'),
timestamp_for(head_res.headers[:last_modified])
]
end
end
def do_get_request(url, &block)
deal_with_unauthorized_requests do
authorization_headers = authorization_headers_for("GET", url)
RestClient.get(url, authorization_headers, &block)
end
end
def do_head_request(url, &block)
deal_with_unauthorized_requests do
authorization_headers = authorization_headers_for("HEAD", url)
RestClient.head(url, authorization_headers, &block)
end
end
def do_delete_request(url)
deal_with_unauthorized_requests do
authorization_headers = authorization_headers_for("DELETE", url)
RestClient.delete(url, authorization_headers)
end
end
def try_to_delete(url)
found = true
begin
do_head_request(url)
rescue RestClient::ResourceNotFound
found = false
end
do_delete_request(url) if found
return found
end
# This is using the S3 authorizations, not the newer AW V4 Signatures
# (https://s3.amazonaws.com/doc/s3-developer-guide/RESTAuthentication.html)
def authorization_headers_for(http_verb, url, md5 = nil, content_type = nil)
url = File.join("/", url.gsub(base_url, ""))
date = Time.now.httpdate
signed_data = generate_s3_signature(http_verb, md5, content_type, date, url)
{
"Authorization" => "AWS #{credentials[:access_key_id]}:#{signed_data}",
"Date" => date
}
end
def credentials
@credentials ||= { access_key_id: settings.s3["access_key_id"], secret_key_id: settings.s3["secret_key_id"] }
end
def digest(secret, string_to_sign)
Base64.encode64(hmac(secret, string_to_sign)).strip
end
def hmac(key, value)
OpenSSL::HMAC.digest(OpenSSL::Digest.new('sha1'), key, value)
end
def uri_escape(s)
WEBrick::HTTPUtils.escape(s).gsub('%5B', '[').gsub('%5D', ']')
end
def generate_s3_signature(http_verb, md5, content_type, date, url)
string_to_sign = [http_verb, md5, content_type, date, url].join "\n"
signature = digest(credentials[:secret_key_id], string_to_sign)
uri_escape(signature)
end
def base_url
@base_url ||= settings.s3["endpoint"]
end
def container_url_for(user)
"#{base_url}#{settings.s3["bucket"]}/#{user}"
end
end
end

View File

@ -1,459 +1,32 @@
require "rest_client"
require "json"
require "cgi"
require "active_support/core_ext/time/conversions"
require "active_support/core_ext/numeric/time"
require "active_support/core_ext/hash"
require "redis"
require "digest/md5"
require "remote_storage/rest_provider"
module RemoteStorage
class Swift
attr_accessor :settings, :server
def initialize(settings, server)
@settings = settings
@server = server
end
def authorize_request(user, directory, token, listing=false)
request_method = server.env["REQUEST_METHOD"]
if directory.split("/").first == "public"
return true if ["GET", "HEAD"].include?(request_method) && !listing
end
server.halt 401, "Unauthorized" if token.nil? || token.empty?
authorizations = redis.smembers("authorizations:#{user}:#{token}")
permission = directory_permission(authorizations, directory)
server.halt 401, "Unauthorized" unless permission
if ["PUT", "DELETE"].include? request_method
server.halt 401, "Unauthorized" unless permission == "rw"
end
end
def get_head(user, directory, key)
url = url_for_key(user, directory, key)
res = do_head_request(url)
set_response_headers(res)
rescue RestClient::ResourceNotFound
server.halt 404
end
def get_data(user, directory, key)
url = url_for_key(user, directory, key)
res = do_get_request(url)
set_response_headers(res)
none_match = (server.env["HTTP_IF_NONE_MATCH"] || "").split(",")
.map(&:strip)
.map { |s| s.gsub(/^"?W\//, "") }
server.halt 304 if none_match.include? %Q("#{res.headers[:etag]}")
return res.body
rescue RestClient::ResourceNotFound
server.halt 404, "Not Found"
end
def get_head_directory_listing(user, directory)
get_directory_listing(user, directory)
"" # just return empty body, headers are set by get_directory_listing
end
def get_directory_listing(user, directory)
etag = redis.hget "rs:m:#{user}:#{directory}/", "e"
server.headers["Content-Type"] = "application/ld+json"
none_match = (server.env["HTTP_IF_NONE_MATCH"] || "").split(",")
.map(&:strip)
.map { |s| s.gsub(/^"?W\//, "") }
if etag
server.halt 304 if none_match.include? %Q("#{etag}")
items = get_directory_listing_from_redis_via_lua(user, directory)
else
etag = etag_for(user, directory)
items = {}
server.halt 304 if none_match.include? %Q("#{etag}")
end
server.headers["ETag"] = %Q("#{etag}")
listing = {
"@context" => "http://remotestorage.io/spec/folder-description",
"items" => items
}
listing.to_json
end
def get_directory_listing_from_redis_via_lua(user, directory)
lua_script = <<-EOF
local user = ARGV[1]
local directory = ARGV[2]
local items = redis.call("smembers", "rs:m:"..user..":"..directory.."/:items")
local listing = {}
for index, name in pairs(items) do
local redis_key = "rs:m:"..user..":"
if directory == "" then
redis_key = redis_key..name
else
redis_key = redis_key..directory.."/"..name
end
local metadata_values = redis.call("hgetall", redis_key)
local metadata = {}
-- redis returns hashes as a single list of alternating keys and values
-- this collates it into a table
for idx = 1, #metadata_values, 2 do
metadata[metadata_values[idx]] = metadata_values[idx + 1]
end
listing[name] = {["ETag"] = metadata["e"]}
if string.sub(name, -1) ~= "/" then
listing[name]["Content-Type"] = metadata["t"]
listing[name]["Content-Length"] = tonumber(metadata["s"])
end
end
return cjson.encode(listing)
EOF
JSON.parse(redis.eval(lua_script, nil, [user, directory]))
end
def put_data(user, directory, key, data, content_type)
server.halt 400 if server.env["HTTP_CONTENT_RANGE"]
server.halt 409, "Conflict" if has_name_collision?(user, directory, key)
existing_metadata = redis.hgetall redis_metadata_object_key(user, directory, key)
url = url_for_key(user, directory, key)
if required_match = server.env["HTTP_IF_MATCH"]
required_match = required_match.gsub(/^"?W\//, "")
unless required_match == %Q("#{existing_metadata["e"]}")
# get actual metadata and compare in case redis metadata became out of sync
begin
head_res = do_head_request(url)
# The file doesn't exist in Orbit, return 412
rescue RestClient::ResourceNotFound
server.halt 412, "Precondition Failed"
end
if required_match == %Q("#{head_res.headers[:etag]}")
# log previous size difference that was missed ealier because of redis failure
log_size_difference(user, existing_metadata["s"], head_res.headers[:content_length])
else
server.halt 412, "Precondition Failed"
end
end
end
if server.env["HTTP_IF_NONE_MATCH"] == "*"
server.halt 412, "Precondition Failed" unless existing_metadata.empty?
end
res = do_put_request(url, data, content_type)
timestamp = timestamp_for(res.headers[:last_modified])
metadata = {
e: res.headers[:etag],
s: data.size,
t: content_type,
m: timestamp
}
if update_metadata_object(user, directory, key, metadata)
if metadata_changed?(existing_metadata, metadata)
update_dir_objects(user, directory, timestamp, checksum_for(data))
log_size_difference(user, existing_metadata["s"], metadata[:s])
end
server.headers["ETag"] = %Q("#{res.headers[:etag]}")
server.halt existing_metadata.empty? ? 201 : 200
else
server.halt 500
end
end
def log_size_difference(user, old_size, new_size)
delta = new_size.to_i - old_size.to_i
redis.incrby "rs:s:#{user}", delta
end
def checksum_for(data)
Digest::MD5.hexdigest(data)
end
def delete_data(user, directory, key)
url = url_for_key(user, directory, key)
not_found = false
existing_metadata = redis.hgetall "rs:m:#{user}:#{directory}/#{key}"
if required_match = server.env["HTTP_IF_MATCH"]
unless required_match.gsub(/^"?W\//, "") == %Q("#{existing_metadata["e"]}")
server.halt 412, "Precondition Failed"
end
end
begin
do_delete_request(url)
rescue RestClient::ResourceNotFound
not_found = true
end
log_size_difference(user, existing_metadata["s"], 0)
delete_metadata_objects(user, directory, key)
delete_dir_objects(user, directory)
if not_found
server.halt 404, "Not Found"
else
server.headers["Etag"] = %Q("#{existing_metadata["e"]}")
server.halt 200
end
end
include RestProvider
private
def set_response_headers(response)
server.headers["ETag"] = %Q("#{response.headers[:etag]}")
server.headers["Content-Type"] = response.headers[:content_type]
server.headers["Content-Length"] = response.headers[:content_length]
server.headers["Last-Modified"] = response.headers[:last_modified]
end
def extract_category(directory)
if directory.match(/^public\//)
"public/#{directory.split('/')[1]}"
else
directory.split('/').first
end
end
def directory_permission(authorizations, directory)
authorizations = authorizations.map do |auth|
auth.index(":") ? auth.split(":") : [auth, "rw"]
end
authorizations = Hash[*authorizations.flatten]
permission = authorizations[""]
authorizations.each do |key, value|
if directory.match(/^(public\/)?#{key}(\/|$)/)
if permission.nil? || permission == "r"
permission = value
end
return permission if permission == "rw"
end
end
permission
end
def has_name_collision?(user, directory, key)
lua_script = <<-EOF
local user = ARGV[1]
local directory = ARGV[2]
local key = ARGV[3]
-- build table with parent directories from remaining arguments
local parent_dir_count = #ARGV - 3
local parent_directories = {}
for i = 4, 4 + parent_dir_count do
table.insert(parent_directories, ARGV[i])
end
-- check for existing directory with the same name as the document
local redis_key = "rs:m:"..user..":"
if directory == "" then
redis_key = redis_key..key.."/"
else
redis_key = redis_key..directory.."/"..key.."/"
end
if redis.call("hget", redis_key, "e") then
return true
end
for index, dir in pairs(parent_directories) do
if redis.call("hget", "rs:m:"..user..":"..dir.."/", "e") then
-- the directory already exists, no need to do further checks
return false
else
-- check for existing document with same name as directory
if redis.call("hget", "rs:m:"..user..":"..dir, "e") then
return true
end
end
end
return false
EOF
parent_directories = parent_directories_for(directory)
redis.eval(lua_script, nil, [user, directory, key, *parent_directories])
end
def metadata_changed?(old_metadata, new_metadata)
# check metadata relevant to the directory listing
# ie. the timestamp (m) is not relevant, because it's not used in
# the listing
return old_metadata["e"] != new_metadata[:e] ||
old_metadata["s"] != new_metadata[:s].to_s ||
old_metadata["t"] != new_metadata[:t]
end
def timestamp_for(date)
return DateTime.parse(date).strftime("%Q").to_i
end
def parent_directories_for(directory)
directories = directory.split("/")
parent_directories = []
while directories.any?
parent_directories << directories.join("/")
directories.pop
end
parent_directories << "" # add empty string for the root directory
parent_directories
end
def top_directory(directory)
if directory.match(/\//)
directory.split("/").last
elsif directory != ""
return directory
end
end
def parent_directory_for(directory)
if directory.match(/\//)
return directory[0..directory.rindex("/")]
elsif directory != ""
return "/"
end
end
def update_metadata_object(user, directory, key, metadata)
redis_key = redis_metadata_object_key(user, directory, key)
redis.hmset(redis_key, *metadata)
redis.sadd "rs:m:#{user}:#{directory}/:items", key
true
end
def update_dir_objects(user, directory, timestamp, checksum)
parent_directories_for(directory).each do |dir|
etag = etag_for(dir, timestamp, checksum)
key = "rs:m:#{user}:#{dir}/"
metadata = {e: etag, m: timestamp}
redis.hmset(key, *metadata)
redis.sadd "rs:m:#{user}:#{parent_directory_for(dir)}:items", "#{top_directory(dir)}/"
end
end
def delete_metadata_objects(user, directory, key)
redis.del redis_metadata_object_key(user, directory, key)
redis.srem "rs:m:#{user}:#{directory}/:items", key
end
def delete_dir_objects(user, directory)
timestamp = (Time.now.to_f * 1000).to_i
parent_directories_for(directory).each do |dir|
if dir_empty?(user, dir)
redis.del "rs:m:#{user}:#{dir}/"
redis.srem "rs:m:#{user}:#{parent_directory_for(dir)}:items", "#{top_directory(dir)}/"
else
etag = etag_for(dir, timestamp)
metadata = {e: etag, m: timestamp}
redis.hmset("rs:m:#{user}:#{dir}/", *metadata)
end
end
end
def dir_empty?(user, dir)
redis.smembers("rs:m:#{user}:#{dir}/:items").empty?
end
def redis_metadata_object_key(user, directory, key)
"rs:m:#{user}:#{[directory, key].delete_if(&:empty?).join("/")}"
end
def container_url_for(user)
"#{base_url}/rs:documents:#{settings.environment.to_s}/#{user}"
end
def url_for_key(user, directory, key)
File.join [container_url_for(user), escape(directory), escape(key)].compact
# Add quotes around the ETag
def format_etag(etag)
%Q("#{etag}")
end
def base_url
@base_url ||= settings.swift["host"]
end
def container_url_for(user)
"#{base_url}/rs:documents:#{settings.environment.to_s}/#{user}"
end
def default_headers
{"x-auth-token" => swift_token}
end
def do_put_request(url, data, content_type)
deal_with_unauthorized_requests do
RestClient.put(url, data, default_headers.merge({content_type: content_type}))
end
end
def do_get_request(url, &block)
deal_with_unauthorized_requests do
RestClient.get(url, default_headers, &block)
end
end
def do_head_request(url, &block)
deal_with_unauthorized_requests do
RestClient.head(url, default_headers, &block)
end
end
def do_delete_request(url)
deal_with_unauthorized_requests do
RestClient.delete(url, default_headers)
end
end
def escape(url)
# We want spaces to turn into %20 and slashes to stay slashes
CGI::escape(url).gsub('+', '%20').gsub('%2F', '/')
end
def redis
@redis ||= Redis.new(settings.redis.symbolize_keys)
end
def etag_for(*args)
Digest::MD5.hexdigest args.join(":")
end
def reload_swift_token
server.logger.debug "Reloading swift token. Old token: #{settings.swift_token}"
# Remove the line break from the token file. The line break that the

View File

@ -4,8 +4,8 @@ require "json"
require "sinatra/base"
require 'sinatra/config_file'
require "sinatra/reloader"
require "remote_storage/riak"
require "remote_storage/swift"
require "remote_storage/s3"
class LiquorCabinet < Sinatra::Base
@ -129,10 +129,10 @@ class LiquorCabinet < Sinatra::Base
def storage
@storage ||= begin
if settings.respond_to? :riak
RemoteStorage::Riak.new(settings, self)
elsif settings.respond_to? :swift
if settings.respond_to? :swift
RemoteStorage::Swift.new(settings, self)
elsif settings.respond_to? :s3
RemoteStorage::S3.new(settings, self)
else
puts <<-EOF
You need to set one storage backend in your config.yml file.

View File

@ -1,15 +0,0 @@
require_relative "../spec_helper"
describe "App" do
include Rack::Test::Methods
def app
LiquorCabinet
end
it "returns 404 on non-existing routes" do
get "/virginmargarita"
last_response.status.must_equal 404
end
end

View File

@ -1,640 +0,0 @@
require_relative "../spec_helper"
describe "Directories" do
include Rack::Test::Methods
before do
purge_all_buckets
auth = auth_bucket.new("jimmy:123")
auth.data = [":r", "documents:r", "tasks:rw"]
auth.store
header "Authorization", "Bearer 123"
end
describe "HEAD listing" do
before do
put "/jimmy/tasks/foo", "do the laundry"
put "/jimmy/tasks/http%3A%2F%2F5apps.com", "prettify design"
head "/jimmy/tasks/"
end
it "has an empty body" do
last_response.status.must_equal 200
last_response.body.must_equal ""
end
it "has an ETag header set" do
last_response.status.must_equal 200
last_response.headers["ETag"].wont_be_nil
# check that ETag stays the same
etag = last_response.headers["ETag"]
get "/jimmy/tasks/"
last_response.headers["ETag"].must_equal etag
end
it "has CORS headers set" do
last_response.status.must_equal 200
last_response.headers["Access-Control-Allow-Origin"].must_equal "*"
last_response.headers["Access-Control-Allow-Methods"].must_equal "GET, PUT, DELETE"
last_response.headers["Access-Control-Allow-Headers"].must_equal "Authorization, Content-Type, Origin, If-Match, If-None-Match"
last_response.headers["Access-Control-Expose-Headers"].must_equal "ETag, Content-Length"
end
context "for an empty or absent directory" do
it "responds with 404" do
head "/jimmy/documents/"
last_response.status.must_equal 404
end
end
end
describe "GET listing" do
before do
put "/jimmy/tasks/foo", "do the laundry"
put "/jimmy/tasks/http%3A%2F%2F5apps.com", "prettify design"
put "/jimmy/tasks/%3A/foo%3Abar%40foo.org", "hello world"
end
it "lists the objects with version, length and content-type" do
get "/jimmy/tasks/"
last_response.status.must_equal 200
last_response.content_type.must_equal "application/json"
foo = data_bucket.get("jimmy:tasks:foo")
content = JSON.parse(last_response.body)
content["items"]["http://5apps.com"].wont_be_nil
content["items"][":/"].wont_be_nil
content["items"]["foo"].wont_be_nil
content["items"]["foo"]["ETag"].must_equal foo.etag.gsub(/"/, "")
content["items"]["foo"]["Content-Type"].must_equal "text/plain"
content["items"]["foo"]["Content-Length"].must_equal 14
end
it "has an ETag header set" do
get "/jimmy/tasks/"
last_response.status.must_equal 200
last_response.headers["ETag"].wont_be_nil
# check that ETag stays the same
etag = last_response.headers["ETag"]
get "/jimmy/tasks/"
last_response.headers["ETag"].must_equal etag
end
it "has CORS headers set" do
get "/jimmy/tasks/"
last_response.status.must_equal 200
last_response.headers["Access-Control-Allow-Origin"].must_equal "*"
last_response.headers["Access-Control-Allow-Methods"].must_equal "GET, PUT, DELETE"
last_response.headers["Access-Control-Allow-Headers"].must_equal "Authorization, Content-Type, Origin, If-Match, If-None-Match"
last_response.headers["Access-Control-Expose-Headers"].must_equal "ETag, Content-Length"
end
it "has caching headers set" do
get "/jimmy/tasks/"
last_response.status.must_equal 200
last_response.headers["Expires"].must_equal "0"
end
it "doesn't choke on colons in the directory name" do
get "/jimmy/tasks/%3A/"
last_response.status.must_equal 200
last_response.content_type.must_equal "application/json"
content = JSON.parse(last_response.body)
content["items"]["foo:bar@foo.org"].wont_be_nil
end
context "when If-None-Match header is set" do
before do
get "/jimmy/tasks/"
@etag = last_response.headers["ETag"]
end
it "responds with 'not modified' when it matches the current ETag" do
header "If-None-Match", @etag
get "/jimmy/tasks/"
last_response.status.must_equal 304
last_response.body.must_be_empty
last_response.headers["ETag"].must_equal @etag
end
it "responds normally when it does not match the current ETag" do
header "If-None-Match", "FOO"
get "/jimmy/tasks/"
last_response.status.must_equal 200
last_response.body.wont_be_empty
end
end
describe "when If-None-Match header is set with multiple revisions" do
before do
get "/jimmy/tasks/"
@etag = last_response.headers["ETag"]
end
it "responds with 'not modified' when it contains the current ETag" do
header "If-None-Match", "DEADBEEF,#{@etag} ,F00BA4"
get "/jimmy/tasks/"
last_response.status.must_equal 304
last_response.body.must_be_empty
last_response.headers["ETag"].must_equal @etag
end
it "responds normally when it does not contain the current ETag" do
header "If-None-Match", "FOO,BAR"
get "/jimmy/tasks/"
last_response.status.must_equal 200
last_response.body.wont_be_empty
end
end
context "with sub-directories" do
before do
get "/jimmy/tasks/"
@old_etag = last_response.headers["ETag"]
put "/jimmy/tasks/home/laundry", "do the laundry"
end
it "lists the containing objects as well as the direct sub-directories" do
get "/jimmy/tasks/"
last_response.status.must_equal 200
home = directory_bucket.get("jimmy:tasks/home")
content = JSON.parse(last_response.body)
content["items"]["foo"].wont_be_nil
content["items"]["http://5apps.com"].wont_be_nil
content["items"]["home/"].wont_be_nil
content["items"]["home/"]["ETag"].must_equal home.etag.gsub(/"/, "")
end
it "updates the ETag of the parent directory" do
get "/jimmy/tasks/"
last_response.headers["ETag"].wont_be_nil
last_response.headers["ETag"].wont_equal @old_etag
end
context "for a different user" do
before do
auth = auth_bucket.new("alice:321")
auth.data = [":r", "documents:r", "tasks:rw"]
auth.store
header "Authorization", "Bearer 321"
put "/alice/tasks/homework", "write an essay"
end
it "does not list the directories of jimmy" do
get "/alice/tasks/"
last_response.status.must_equal 200
content = JSON.parse(last_response.body)
content["items"]["/"].must_be_nil
content["items"]["tasks/"].must_be_nil
content["items"]["home/"].must_be_nil
content["items"]["homework"].wont_be_nil
end
end
context "sub-directories without objects" do
it "lists the direct sub-directories" do
put "/jimmy/tasks/private/projects/world-domination/start", "write a manifesto"
get "/jimmy/tasks/private/"
last_response.status.must_equal 200
projects = directory_bucket.get("jimmy:tasks/private/projects")
content = JSON.parse(last_response.body)
content["items"]["projects/"]["ETag"].must_equal projects.etag.gsub(/"/, "")
end
it "updates the timestamps of the existing directory objects" do
directory = directory_bucket.new("jimmy:tasks")
directory.content_type = "text/plain"
directory.data = (2.seconds.ago.to_f * 1000).to_i
directory.store
put "/jimmy/tasks/private/projects/world-domination/start", "write a manifesto"
object = data_bucket.get("jimmy:tasks/private/projects/world-domination:start")
directory = directory_bucket.get("jimmy:tasks")
directory.data.to_i.must_equal object.meta['timestamp'][0].to_i
end
end
context "with binary data" do
context "charset given in content-type header" do
before do
header "Content-Type", "image/jpeg; charset=binary"
filename = File.join(File.expand_path(File.dirname(__FILE__)), "..", "fixtures", "rockrule.jpeg")
@image = File.open(filename, "r").read
put "/jimmy/tasks/jaypeg.jpg", @image
end
it "lists the binary files" do
get "/jimmy/tasks/"
last_response.status.must_equal 200
jaypeg = data_bucket.get("jimmy:tasks:jaypeg.jpg")
content = JSON.parse(last_response.body)
content["items"]["jaypeg.jpg"]["ETag"].must_equal jaypeg.etag.gsub(/"/, "")
content["items"]["jaypeg.jpg"]["Content-Type"].must_equal "image/jpeg"
content["items"]["jaypeg.jpg"]["Content-Length"].must_equal 16044
end
end
context "no charset in content-type header" do
before do
header "Content-Type", "image/jpeg"
filename = File.join(File.expand_path(File.dirname(__FILE__)), "..", "fixtures", "rockrule.jpeg")
@image = File.open(filename, "r").read
put "/jimmy/tasks/jaypeg.jpg", @image
end
it "lists the binary files" do
get "/jimmy/tasks/"
last_response.status.must_equal 200
jaypeg = data_bucket.get("jimmy:tasks:jaypeg.jpg")
content = JSON.parse(last_response.body)
content["items"]["jaypeg.jpg"]["ETag"].must_equal jaypeg.etag.gsub(/"/, "")
content["items"]["jaypeg.jpg"]["Content-Type"].must_equal "image/jpeg"
content["items"]["jaypeg.jpg"]["Content-Length"].must_equal 16044
end
end
end
end
context "for a sub-directory" do
before do
put "/jimmy/tasks/home/laundry", "do the laundry"
end
it "lists the objects with timestamp" do
get "/jimmy/tasks/home/"
last_response.status.must_equal 200
laundry = data_bucket.get("jimmy:tasks/home:laundry")
content = JSON.parse(last_response.body)
content["items"]["laundry"]["ETag"].must_equal laundry.etag.gsub(/"/, "")
end
end
context "for an empty or absent directory" do
it "returns an empty listing" do
get "/jimmy/documents/notfound/"
last_response.status.must_equal 404
end
end
context "special characters in directory name" do
before do
put "/jimmy/tasks/foo~bar/task1", "some task"
end
it "lists the directory in the parent directory" do
get "/jimmy/tasks/"
last_response.status.must_equal 200
content = JSON.parse(last_response.body)
content["items"]["foo~bar/"].wont_be_nil
end
it "lists the containing objects" do
get "/jimmy/tasks/foo~bar/"
last_response.status.must_equal 200
content = JSON.parse(last_response.body)
content["items"]["task1"].wont_be_nil
end
it "returns the requested object" do
get "/jimmy/tasks/foo~bar/task1"
last_response.status.must_equal 200
last_response.body.must_equal "some task"
end
end
context "special characters in object name" do
before do
put "/jimmy/tasks/bla~blub", "some task"
end
it "lists the containing object" do
get "/jimmy/tasks/"
last_response.status.must_equal 200
content = JSON.parse(last_response.body)
content["items"]["bla~blub"].wont_be_nil
end
end
context "for the root directory" do
before do
auth = auth_bucket.new("jimmy:123")
auth.data = [":rw"]
auth.store
put "/jimmy/root-1", "Put my root down"
put "/jimmy/root-2", "Back to the roots"
end
it "lists the containing objects and direct sub-directories" do
get "/jimmy/"
last_response.status.must_equal 200
tasks = directory_bucket.get("jimmy:tasks")
content = JSON.parse(last_response.body)
content["items"]["root-1"].wont_be_nil
content["items"]["root-2"].wont_be_nil
content["items"]["tasks/"].wont_be_nil
content["items"]["tasks/"]["ETag"].must_equal tasks.etag.gsub(/"/, "")
end
it "has an ETag header set" do
get "/jimmy/"
last_response.status.must_equal 200
last_response.headers["ETag"].wont_be_nil
end
end
context "for the public directory" do
before do
auth = auth_bucket.new("jimmy:123")
auth.data = ["documents:r", "bookmarks:rw"]
auth.store
put "/jimmy/public/bookmarks/5apps", "http://5apps.com"
end
context "when authorized for the category" do
it "lists the files" do
get "/jimmy/public/bookmarks/"
last_response.status.must_equal 200
content = JSON.parse(last_response.body)
content["items"]["5apps"].wont_be_nil
end
it "has an ETag header set" do
get "/jimmy/public/bookmarks/"
last_response.status.must_equal 200
last_response.headers["ETag"].wont_be_nil
end
end
context "when directly authorized for the public directory" do
before do
auth = auth_bucket.new("jimmy:123")
auth.data = ["documents:r", "public/bookmarks:rw"]
auth.store
end
it "lists the files" do
get "/jimmy/public/bookmarks/"
last_response.status.must_equal 200
content = JSON.parse(last_response.body)
content["items"]["5apps"].wont_be_nil
end
end
context "when not authorized" do
before do
auth_bucket.delete("jimmy:123")
end
it "does not allow a directory listing of the public root" do
get "/jimmy/public/"
last_response.status.must_equal 401
end
it "does not allow a directory listing of a sub-directory" do
get "/jimmy/public/bookmarks/"
last_response.status.must_equal 401
end
end
end
end
describe "directory object" do
describe "PUT file" do
context "no existing directory object" do
before do
put "/jimmy/tasks/home/trash", "take out the trash"
end
it "creates a new directory object" do
object = data_bucket.get("jimmy:tasks/home:trash")
directory = directory_bucket.get("jimmy:tasks/home")
directory.data.wont_be_nil
directory.data.to_i.must_equal object.meta['timestamp'][0].to_i
end
it "sets the correct index for the directory object" do
object = directory_bucket.get("jimmy:tasks/home")
object.indexes["directory_bin"].must_include "tasks"
end
it "creates directory objects for the parent directories" do
object = directory_bucket.get("jimmy:tasks")
object.indexes["directory_bin"].must_include "/"
object.data.wont_be_nil
object = directory_bucket.get("jimmy:")
object.indexes["directory_bin"].must_be_empty
object.data.wont_be_nil
end
end
context "existing directory object" do
before do
put "/jimmy/tasks/home/trash", "collect some trash"
end
it "updates the timestamp of the directory" do
put "/jimmy/tasks/home/trash", "take out the trash"
last_response.status.must_equal 200
object = data_bucket.get("jimmy:tasks/home:trash")
directory = directory_bucket.get("jimmy:tasks/home")
directory.data.to_i.must_equal object.meta['timestamp'][0].to_i
end
end
end
end
describe "OPTIONS listing" do
it "has CORS headers set" do
options "/jimmy/tasks/"
last_response.status.must_equal 200
last_response.headers["Access-Control-Allow-Origin"].must_equal "*"
last_response.headers["Access-Control-Allow-Methods"].must_equal "GET, PUT, DELETE"
last_response.headers["Access-Control-Allow-Headers"].must_equal "Authorization, Content-Type, Origin, If-Match, If-None-Match"
last_response.headers["Access-Control-Expose-Headers"].must_equal "ETag, Content-Length"
end
context "sub-directories" do
it "has CORS headers set" do
options "/jimmy/tasks/foo/bar/"
last_response.status.must_equal 200
last_response.headers["Access-Control-Allow-Origin"].must_equal "*"
last_response.headers["Access-Control-Allow-Methods"].must_equal "GET, PUT, DELETE"
last_response.headers["Access-Control-Allow-Headers"].must_equal "Authorization, Content-Type, Origin, If-Match, If-None-Match"
last_response.headers["Access-Control-Expose-Headers"].must_equal "ETag, Content-Length"
end
end
context "root directory" do
it "has CORS headers set" do
options "/jimmy/"
last_response.status.must_equal 200
last_response.headers["Access-Control-Allow-Origin"].must_equal "*"
last_response.headers["Access-Control-Allow-Methods"].must_equal "GET, PUT, DELETE"
last_response.headers["Access-Control-Allow-Headers"].must_equal "Authorization, Content-Type, Origin, If-Match, If-None-Match"
last_response.headers["Access-Control-Expose-Headers"].must_equal "ETag, Content-Length"
end
end
end
describe "DELETE file" do
context "last file in directory" do
before do
put "/jimmy/tasks/home/trash", "take out the trash"
end
it "deletes the directory objects for all empty parent directories" do
delete "/jimmy/tasks/home/trash"
last_response.status.must_equal 200
lambda {
directory_bucket.get("jimmy:tasks/home")
}.must_raise Riak::HTTPFailedRequest
lambda {
directory_bucket.get("jimmy:tasks")
}.must_raise Riak::HTTPFailedRequest
lambda {
directory_bucket.get("jimmy:")
}.must_raise Riak::HTTPFailedRequest
end
end
context "with additional files in directory" do
before do
put "/jimmy/tasks/home/trash", "take out the trash"
put "/jimmy/tasks/home/laundry/washing", "wash the clothes"
end
it "does not delete the directory objects for the parent directories" do
delete "/jimmy/tasks/home/trash"
directory_bucket.get("jimmy:tasks/home").wont_be_nil
directory_bucket.get("jimmy:tasks").wont_be_nil
directory_bucket.get("jimmy:").wont_be_nil
end
it "updates the ETag headers of all parent directories" do
get "/jimmy/tasks/home/"
home_etag = last_response.headers["ETag"]
get "/jimmy/tasks/"
tasks_etag = last_response.headers["ETag"]
get "/jimmy/"
root_etag = last_response.headers["ETag"]
delete "/jimmy/tasks/home/trash"
get "/jimmy/tasks/home/"
last_response.headers["ETag"].wont_be_nil
last_response.headers["ETag"].wont_equal home_etag
get "/jimmy/tasks/"
last_response.headers["ETag"].wont_be_nil
last_response.headers["ETag"].wont_equal tasks_etag
get "/jimmy/"
last_response.headers["ETag"].wont_be_nil
last_response.headers["ETag"].wont_equal root_etag
end
describe "timestamps" do
before do
@old_timestamp = (2.seconds.ago.to_f * 1000).to_i
["tasks/home", "tasks", ""].each do |dir|
directory = directory_bucket.get("jimmy:#{dir}")
directory.data = @old_timestamp.to_s
directory.store
end
end
it "updates the timestamp for the parent directories" do
delete "/jimmy/tasks/home/trash"
directory_bucket.get("jimmy:tasks/home").data.to_i.must_be :>, @old_timestamp
directory_bucket.get("jimmy:tasks").data.to_i.must_be :>, @old_timestamp
directory_bucket.get("jimmy:").data.to_i.must_be :>, @old_timestamp
end
end
end
end
end

View File

@ -1,424 +0,0 @@
require_relative "../spec_helper"
describe "Permissions" do
include Rack::Test::Methods
before do
purge_all_buckets
end
describe "GET" do
context "public data" do
before do
object = data_bucket.new("jimmy:public:foo")
object.content_type = "text/plain"
object.data = "some text data"
object.store
object = data_bucket.new("jimmy:public/documents:foo")
object.content_type = "text/plain"
object.data = "some text data"
object.store
end
it "returns the value on all get requests" do
get "/jimmy/public/foo"
last_response.status.must_equal 200
last_response.body.must_equal "some text data"
end
it "returns the value from a sub-directory" do
get "/jimmy/public/documents/foo"
last_response.status.must_equal 200
last_response.body.must_equal "some text data"
end
end
context "private data" do
before do
object = data_bucket.new("jimmy:documents:foo")
object.content_type = "text/plain"
object.data = "some private, authorized text data"
object.store
object = data_bucket.new("jimmy:documents/very/interesting:text")
object.content_type = "text/plain"
object.data = "some very interesting writing"
object.store
object = data_bucket.new("jimmy:confidential:bar")
object.content_type = "text/plain"
object.data = "some private, non-authorized text data"
object.store
auth = auth_bucket.new("jimmy:123")
auth.data = ["documents:r", "tasks:rw"]
auth.store
header "Authorization", "Bearer 123"
end
context "when authorized" do
it "returns the value for a key in a top-level directory" do
get "/jimmy/documents/foo"
last_response.status.must_equal 200
last_response.body.must_equal "some private, authorized text data"
end
it "returns the value for a key in a sub-directory" do
get "/jimmy/documents/very/interesting/text"
last_response.status.must_equal 200
last_response.body.must_equal "some very interesting writing"
end
end
context "when not authorized" do
it "returns a 401 for a key in a top-level directory" do
get "/jimmy/confidential/bar"
last_response.status.must_equal 401
end
end
end
end
describe "PUT" do
before do
auth = auth_bucket.new("jimmy:123")
auth.data = ["documents:r", "contacts:rw", "tasks:r", "tasks/home:rw"]
auth.store
header "Authorization", "Bearer 123"
end
context "to a top-level directory" do
it "saves the value when there are write permissions" do
put "/jimmy/contacts/1", "John Doe"
last_response.status.must_equal 201
data_bucket.get("jimmy:contacts:1").data.must_equal "John Doe"
end
it "returns a 401 when there are read permissions only" do
put "/jimmy/documents/foo", "some text"
last_response.status.must_equal 401
end
end
context "to a sub-directory" do
it "saves the value when there are direct write permissions" do
put "/jimmy/tasks/home/1", "take out the trash"
last_response.status.must_equal 201
data_bucket.get("jimmy:tasks/home:1").data.must_equal "take out the trash"
end
it "saves the value when there are write permissions for a parent directory" do
put "/jimmy/contacts/family/1", "Bobby Brother"
last_response.status.must_equal 201
data_bucket.get("jimmy:contacts/family:1").data.must_equal "Bobby Brother"
end
it "returns a 401 when there are read permissions only" do
put "/jimmy/documents/business/1", "some text"
last_response.status.must_equal 401
end
end
context "to the public directory" do
context "when authorized for the corresponding category" do
it "saves the value" do
put "/jimmy/public/contacts/foo", "Foo Bar"
last_response.status.must_equal 201
data_bucket.get("jimmy:public/contacts:foo").data.must_equal "Foo Bar"
end
it "saves the value to a sub-directory" do
put "/jimmy/public/contacts/family/foo", "Foo Bar"
last_response.status.must_equal 201
data_bucket.get("jimmy:public/contacts/family:foo").data.must_equal "Foo Bar"
end
end
context "when not authorized for the corresponding category" do
it "returns a 401" do
put "/jimmy/public/documents/foo", "Foo Bar"
last_response.status.must_equal 401
end
end
end
end
describe "DELETE" do
before do
auth = auth_bucket.new("jimmy:123")
auth.data = ["documents:r", "tasks:rw"]
auth.store
header "Authorization", "Bearer 123"
end
context "when authorized" do
before do
object = data_bucket.new("jimmy:tasks:1")
object.content_type = "text/plain"
object.data = "do the laundry"
object.store
object = data_bucket.new("jimmy:tasks/home:1")
object.content_type = "text/plain"
object.data = "take out the trash"
object.store
end
it "removes the key from a top-level directory" do
delete "/jimmy/tasks/1"
last_response.status.must_equal 200
lambda {
data_bucket.get("jimmy:tasks:1")
}.must_raise Riak::HTTPFailedRequest
end
it "removes the key from a top-level directory" do
delete "/jimmy/tasks/home/1"
last_response.status.must_equal 200
lambda {
data_bucket.get("jimmy:tasks/home:1")
}.must_raise Riak::HTTPFailedRequest
end
context "public directory" do
before do
object = data_bucket.new("jimmy:public/tasks:open")
object.content_type = "text/plain"
object.data = "hello world"
object.store
end
it "removes the key" do
delete "/jimmy/public/tasks/open"
last_response.status.must_equal 200
lambda {
data_bucket.get("jimmy:public/tasks:open")
}.must_raise Riak::HTTPFailedRequest
end
end
end
context "when not authorized" do
before do
object = data_bucket.new("jimmy:documents:private")
object.content_type = "text/plain"
object.data = "some private, authorized text data"
object.store
object = data_bucket.new("jimmy:documents/business:foo")
object.content_type = "text/plain"
object.data = "some private, authorized text data"
object.store
end
it "returns a 401 for a key in a top-level directory" do
delete "/jimmy/documents/private"
last_response.status.must_equal 401
end
it "returns a 401 for a key in a sub-directory" do
delete "/jimmy/documents/business/foo"
last_response.status.must_equal 401
end
context "public directory" do
before do
object = data_bucket.new("jimmy:public/documents:foo")
object.content_type = "text/plain"
object.data = "some private, authorized text data"
object.store
end
it "returns a 401" do
delete "/jimmy/public/documents/foo"
last_response.status.must_equal 401
end
end
end
end
describe "global permissions" do
before do
object = data_bucket.new("jimmy:documents/very/interesting:text")
object.content_type = "text/plain"
object.data = "some very interesting writing"
object.store
end
context "write all" do
before do
auth = auth_bucket.new("jimmy:123")
auth.data = [":rw", "documents:r"]
auth.store
header "Authorization", "Bearer 123"
end
it "allows GET requests" do
get "/jimmy/documents/very/interesting/text"
last_response.status.must_equal 200
last_response.body.must_equal "some very interesting writing"
end
it "allows PUT requests" do
put "/jimmy/contacts/1", "John Doe"
last_response.status.must_equal 201
data_bucket.get("jimmy:contacts:1").data.must_equal "John Doe"
end
it "allows DELETE requests" do
delete "/jimmy/documents/very/interesting/text"
last_response.status.must_equal 200
lambda {
data_bucket.get("jimmy:documents/very/interesting:text")
}.must_raise Riak::HTTPFailedRequest
end
context "root directory" do
before do
object = data_bucket.new("jimmy::root")
object.content_type = "text/plain"
object.data = "Back to the roots"
object.store
end
it "allows GET requests" do
get "/jimmy/root"
last_response.status.must_equal 200
last_response.body.must_equal "Back to the roots"
end
it "allows PUT requests" do
put "/jimmy/1", "Gonna kick it root down"
last_response.status.must_equal 201
data_bucket.get("jimmy::1").data.must_equal "Gonna kick it root down"
end
it "allows DELETE requests" do
delete "/jimmy/root"
last_response.status.must_equal 200
lambda {
data_bucket.get("jimmy::root")
}.must_raise Riak::HTTPFailedRequest
end
end
context "public directory" do
before do
object = data_bucket.new("jimmy:public/tasks:hello")
object.content_type = "text/plain"
object.data = "Hello World"
object.store
end
it "allows GET requests" do
get "/jimmy/public/tasks/"
last_response.status.must_equal 404
end
it "allows PUT requests" do
put "/jimmy/public/1", "Hello World"
last_response.status.must_equal 201
data_bucket.get("jimmy:public:1").data.must_equal "Hello World"
end
it "allows DELETE requests" do
delete "/jimmy/public/tasks/hello"
last_response.status.must_equal 200
lambda {
data_bucket.get("jimmy:public/tasks:hello")
}.must_raise Riak::HTTPFailedRequest
end
end
end
context "read all" do
before do
auth = auth_bucket.new("jimmy:123")
auth.data = [":r", "contacts:rw"]
auth.store
header "Authorization", "Bearer 123"
end
it "allows GET requests" do
get "/jimmy/documents/very/interesting/text"
last_response.status.must_equal 200
last_response.body.must_equal "some very interesting writing"
end
it "disallows PUT requests" do
put "/jimmy/documents/foo", "some text"
last_response.status.must_equal 401
end
it "disallows DELETE requests" do
delete "/jimmy/documents/very/interesting/text"
last_response.status.must_equal 401
end
context "public directory" do
before do
object = data_bucket.new("jimmy:public/tasks:hello")
object.content_type = "text/plain"
object.data = "Hello World"
object.store
end
it "allows GET requests" do
get "/jimmy/public/tasks/"
last_response.status.must_equal 404
end
it "disallows PUT requests" do
put "/jimmy/public/tasks/foo", "some text"
last_response.status.must_equal 401
end
it "disallows DELETE requests" do
delete "/jimmy/public/tasks/hello"
last_response.status.must_equal 401
end
end
end
end
end

View File

@ -1,775 +0,0 @@
require_relative "../spec_helper"
describe "App with Riak backend" do
include Rack::Test::Methods
before do
purge_all_buckets
end
describe "HEAD public data" do
before do
object = data_bucket.new("jimmy:public:foo")
object.content_type = "text/plain"
object.data = "some text data"
object.store
head "/jimmy/public/foo"
end
it "works" do
last_response.status.must_equal 200
last_response.body.must_equal ""
last_response.headers["ETag"].wont_be_nil
last_response.headers["Content-Length"].must_equal "14"
end
end
describe "GET public data" do
describe "file with content" do
before do
object = data_bucket.new("jimmy:public:foo")
object.content_type = "text/plain"
object.data = "some text data"
object.store
get "/jimmy/public/foo"
end
it "works" do
last_response.status.must_equal 200
last_response.body.must_equal "some text data"
last_response.headers["ETag"].wont_be_nil
last_response.headers["Content-Length"].must_equal "14"
last_response.headers["Expires"].must_equal "0"
end
describe "empty file" do
before do
object = data_bucket.new("jimmy:public:empty")
object.content_type = "text/plain"
object.data = ""
object.store
get "/jimmy/public/empty"
end
it "returns an empty body" do
last_response.status.must_equal 200
# Rack::MockRequest turns the body into a string. We can't use
# `last_response.body` to check for nil, because:
# >> [nil].join
# => ""
last_response.body.must_equal ''
last_response.headers["Content-Length"].must_equal '0'
end
end
end
describe "GET data with custom content type" do
before do
object = data_bucket.new("jimmy:public:magic")
object.content_type = "text/magic"
object.raw_data = "some text data"
object.store
end
it "returns the value with the correct content type" do
get "/jimmy/public/magic"
last_response.status.must_equal 200
last_response.content_type.must_equal "text/magic"
last_response.body.must_equal "some text data"
end
end
describe "private data" do
before do
object = data_bucket.new("jimmy:documents:foo")
object.content_type = "text/plain"
object.data = "some private text data"
object.store
@etag = object.etag
auth = auth_bucket.new("jimmy:123")
auth.data = ["documents", "public"]
auth.store
end
describe "HEAD" do
before do
header "Authorization", "Bearer 123"
head "/jimmy/documents/foo"
end
it "works" do
last_response.status.must_equal 200
last_response.body.must_equal ""
last_response.headers["ETag"].wont_be_nil
last_response.headers["Content-Length"].must_equal "22"
end
end
describe "HEAD nonexisting key" do
it "returns a 404" do
header "Authorization", "Bearer 123"
head "/jimmy/documents/somestupidkey"
last_response.status.must_equal 404
end
end
describe "GET" do
before do
header "Authorization", "Bearer 123"
end
it "returns the value" do
get "/jimmy/documents/foo"
last_response.status.must_equal 200
last_response.body.must_equal "some private text data"
end
describe "when If-None-Match header is set" do
it "responds with 'not modified' when it matches the current ETag" do
header "If-None-Match", @etag
get "/jimmy/documents/foo"
last_response.status.must_equal 304
last_response.body.must_be_empty
last_response.headers["ETag"].must_equal @etag
end
it "responds normally when it does not match the current ETag" do
header "If-None-Match", "FOO"
get "/jimmy/documents/foo"
last_response.status.must_equal 200
last_response.body.must_equal "some private text data"
end
end
describe "when If-None-Match header is set with multiple revisions" do
it "responds with 'not modified' when it contains the current ETag" do
header "If-None-Match", "DEADBEEF,#{@etag},F00BA4"
get "/jimmy/documents/foo"
last_response.status.must_equal 304
last_response.body.must_be_empty
last_response.headers["ETag"].must_equal @etag
end
it "responds normally when it does not contain the current ETag" do
header "If-None-Match", "FOO,BAR"
get "/jimmy/documents/foo"
last_response.status.must_equal 200
last_response.body.must_equal "some private text data"
end
end
end
describe "GET nonexisting key" do
it "returns a 404" do
header "Authorization", "Bearer 123"
get "/jimmy/documents/somestupidkey"
last_response.status.must_equal 404
end
end
describe "PUT" do
before do
header "Authorization", "Bearer 123"
end
describe "with implicit content type" do
before do
put "/jimmy/documents/bar", "another text"
end
it "saves the value" do
last_response.status.must_equal 201
last_response.body.must_equal ""
data_bucket.get("jimmy:documents:bar").data.must_equal "another text"
end
it "stores the data as plain text with utf-8 encoding" do
data_bucket.get("jimmy:documents:bar").content_type.must_equal "text/plain; charset=utf-8"
end
it "sets the ETag header" do
last_response.headers["ETag"].wont_be_nil
end
it "indexes the data set" do
indexes = data_bucket.get("jimmy:documents:bar").indexes
indexes["user_id_bin"].must_be_kind_of Set
indexes["user_id_bin"].must_include "jimmy"
indexes["directory_bin"].must_include "documents"
end
it "logs the operation" do
objects = []
opslog_bucket.keys.each { |k| objects << opslog_bucket.get(k) rescue nil }
log_entry = objects.select{|o| o.data["count"] == 1}.first
log_entry.data["size"].must_equal 12
log_entry.data["category"].must_equal "documents"
log_entry.indexes["user_id_bin"].must_include "jimmy"
end
end
describe "with explicit content type" do
before do
header "Content-Type", "application/json"
put "/jimmy/documents/jason", '{"foo": "bar", "unhosted": 1}'
end
it "saves the value (as JSON)" do
last_response.status.must_equal 201
data_bucket.get("jimmy:documents:jason").data.must_be_kind_of Hash
data_bucket.get("jimmy:documents:jason").data.must_equal({"foo" => "bar", "unhosted" => 1})
end
it "uses the requested content type" do
data_bucket.get("jimmy:documents:jason").content_type.must_equal "application/json"
end
it "delivers the data correctly" do
header "Authorization", "Bearer 123"
get "/jimmy/documents/jason"
last_response.body.must_equal '{"foo":"bar","unhosted":1}'
last_response.content_type.must_equal "application/json"
end
end
describe "with arbitrary content type" do
before do
header "Content-Type", "text/magic"
put "/jimmy/documents/magic", "pure magic"
end
it "saves the value" do
last_response.status.must_equal 201
data_bucket.get("jimmy:documents:magic").raw_data.must_equal "pure magic"
end
it "uses the requested content type" do
data_bucket.get("jimmy:documents:magic").content_type.must_equal "text/magic"
end
it "delivers the data correctly" do
header "Authorization", "Bearer 123"
get "/jimmy/documents/magic"
last_response.body.must_equal "pure magic"
last_response.content_type.must_equal "text/magic"
end
end
describe "with content type containing the encoding" do
before do
header "Content-Type", "application/json; charset=UTF-8"
put "/jimmy/documents/jason", '{"foo": "bar", "unhosted": 1}'
end
it "saves the value (as JSON)" do
last_response.status.must_equal 201
data_bucket.get("jimmy:documents:jason").data.must_be_kind_of Hash
data_bucket.get("jimmy:documents:jason").data.must_equal({"foo" => "bar", "unhosted" => 1})
end
it "uses the requested content type" do
data_bucket.get("jimmy:documents:jason").content_type.must_equal "application/json; charset=UTF-8"
end
it "delivers the data correctly" do
get "/jimmy/documents/jason"
last_response.body.must_equal '{"foo":"bar","unhosted":1}'
last_response.content_type.must_equal "application/json; charset=UTF-8"
end
end
describe "naming collisions between documents and directories" do
before do
put "/jimmy/documents/archive/document", "lorem ipsum"
end
it "responds with 409 when directory with same name already exists" do
put "/jimmy/documents/archive", "some awesome content"
last_response.status.must_equal 409
lambda {
data_bucket.get("jimmy:documents/archive")
}.must_raise Riak::HTTPFailedRequest
end
it "responds with 409 when there is an existing document with same name as one of the directories" do
put "/jimmy/documents/archive/document/subdir/doc", "some awesome content"
last_response.status.must_equal 409
lambda {
data_bucket.get("jimmy:documents/archive/document/subdir/doc")
}.must_raise Riak::HTTPFailedRequest
end
end
describe "with existing content" do
before do
put "/jimmy/documents/archive/foo", "lorem ipsum"
end
it "saves the value" do
put "/jimmy/documents/archive/foo", "some awesome content"
last_response.status.must_equal 200
data_bucket.get("jimmy:documents/archive:foo").data.must_equal "some awesome content"
end
it "logs the operations" do
put "/jimmy/documents/archive/foo", "some awesome content"
objects = []
opslog_bucket.keys.each { |k| objects << opslog_bucket.get(k) rescue nil }
create_entry = objects.select{|o| o.data["count"] == 1}.first
create_entry.data["size"].must_equal 11
create_entry.data["category"].must_equal "documents"
create_entry.indexes["user_id_bin"].must_include "jimmy"
update_entry = objects.select{|o| o.data["count"] == 0}.first
update_entry.data["size"].must_equal 9
update_entry.data["category"].must_equal "documents"
update_entry.indexes["user_id_bin"].must_include "jimmy"
end
it "changes the ETag header" do
old_etag = last_response.headers["ETag"]
put "/jimmy/documents/archive/foo", "some awesome content"
last_response.headers["ETag"].wont_be_nil
last_response.headers["ETag"].wont_equal old_etag
end
describe "when If-Match header is set" do
it "allows the request if the header matches the current ETag" do
old_etag = last_response.headers["ETag"]
header "If-Match", old_etag
put "/jimmy/documents/archive/foo", "some awesome content"
last_response.status.must_equal 200
get "/jimmy/documents/archive/foo"
last_response.body.must_equal "some awesome content"
end
it "fails the request if the header does not match the current ETag" do
header "If-Match", "WONTMATCH"
put "/jimmy/documents/archive/foo", "some awesome content"
last_response.status.must_equal 412
get "/jimmy/documents/archive/foo"
last_response.body.must_equal "lorem ipsum"
end
end
describe "when If-None-Match header is set" do
before do
header "If-None-Match", "*"
end
it "fails when the document already exists" do
put "/jimmy/documents/archive/foo", "some awesome content"
last_response.status.must_equal 412
get "/jimmy/documents/archive/foo"
last_response.body.must_equal "lorem ipsum"
end
it "succeeds when the document does not exist" do
put "/jimmy/documents/archive/bar", "my little content"
last_response.status.must_equal 201
end
end
end
describe "exsting content without serializer registered for the given content-type" do
before do
header "Content-Type", "text/html; charset=UTF-8"
put "/jimmy/documents/html", '<html></html>'
put "/jimmy/documents/html", '<html><body></body></html>'
end
it "saves the value" do
last_response.status.must_equal 200
data_bucket.get("jimmy:documents:html").raw_data.must_equal "<html><body></body></html>"
end
it "uses the requested content type" do
data_bucket.get("jimmy:documents:html").content_type.must_equal "text/html; charset=UTF-8"
end
end
describe "public data" do
before do
put "/jimmy/public/documents/notes/foo", "note to self"
end
it "saves the value" do
last_response.status.must_equal 201
data_bucket.get("jimmy:public/documents/notes:foo").data.must_equal "note to self"
end
it "logs the operation" do
objects = []
opslog_bucket.keys.each { |k| objects << opslog_bucket.get(k) rescue nil }
log_entry = objects.select{|o| o.data["count"] == 1}.first
log_entry.data["size"].must_equal 12
log_entry.data["category"].must_equal "public/documents"
log_entry.indexes["user_id_bin"].must_include "jimmy"
end
end
context "with binary data" do
context "binary charset in content-type header" do
before do
header "Content-Type", "image/jpeg; charset=binary"
filename = File.join(File.expand_path(File.dirname(__FILE__)), "..", "fixtures", "rockrule.jpeg")
@image = File.open(filename, "r").read
put "/jimmy/documents/jaypeg", @image
end
it "uses the requested content type" do
get "/jimmy/documents/jaypeg"
last_response.status.must_equal 200
last_response.content_type.must_equal "image/jpeg; charset=binary"
end
it "delivers the data correctly" do
get "/jimmy/documents/jaypeg"
last_response.status.must_equal 200
last_response.body.must_equal @image
end
it "responds with an ETag header" do
last_response.headers["ETag"].wont_be_nil
etag = last_response.headers["ETag"]
get "/jimmy/documents/jaypeg"
last_response.headers["ETag"].wont_be_nil
last_response.headers["ETag"].must_equal etag
end
it "responds with a Content-Length header" do
get "/jimmy/documents/jaypeg"
last_response.headers["Content-Length"].must_equal "16044"
end
it "changes the ETag when updating the file" do
old_etag = last_response.headers["ETag"]
put "/jimmy/documents/jaypeg", @image
last_response.headers["ETag"].wont_be_nil
last_response.headers["ETag"].wont_equal old_etag
end
it "logs the operation" do
objects = []
opslog_bucket.keys.each { |k| objects << opslog_bucket.get(k) rescue nil }
log_entry = objects.select{|o| o.data["count"] == 1}.first
log_entry.data["size"].must_equal 16044
log_entry.data["category"].must_equal "documents"
log_entry.indexes["user_id_bin"].must_include "jimmy"
end
context "overwriting existing file with same file" do
before do
header "Content-Type", "image/jpeg; charset=binary"
filename = File.join(File.expand_path(File.dirname(__FILE__)), "..", "fixtures", "rockrule.jpeg")
@image = File.open(filename, "r").read
put "/jimmy/documents/jaypeg", @image
end
it "doesn't log the operation" do
objects = []
opslog_bucket.keys.each { |k| objects << opslog_bucket.get(k) rescue nil }
objects.size.must_equal 1
end
end
context "overwriting existing file with different file" do
before do
header "Content-Type", "image/jpeg; charset=binary"
filename = File.join(File.expand_path(File.dirname(__FILE__)), "..", "fixtures", "rockrule.jpeg")
@image = File.open(filename, "r").read
put "/jimmy/documents/jaypeg", @image+"foo"
end
it "logs the operation changing only the size" do
objects = []
opslog_bucket.keys.each { |k| objects << opslog_bucket.get(k) rescue nil }
objects.size.must_equal 2
log_entry = objects.select{|o| o.data["count"] == 0}.first
log_entry.data["size"].must_equal 3
log_entry.data["category"].must_equal "documents"
log_entry.indexes["user_id_bin"].must_include "jimmy"
end
end
end
context "no binary charset in content-type header" do
before do
header "Content-Type", "image/jpeg"
filename = File.join(File.expand_path(File.dirname(__FILE__)), "..", "fixtures", "rockrule.jpeg")
@image = File.open(filename, "r").read
put "/jimmy/documents/jaypeg", @image
end
it "uses the requested content type" do
get "/jimmy/documents/jaypeg"
last_response.status.must_equal 200
last_response.content_type.must_equal "image/jpeg"
end
it "delivers the data correctly" do
get "/jimmy/documents/jaypeg"
last_response.status.must_equal 200
last_response.body.must_equal @image
end
end
end
context "with escaped key" do
before do
put "/jimmy/documents/bar%3Abaz/http%3A%2F%2F5apps.com", "super website"
end
it "delivers the data correctly" do
header "Authorization", "Bearer 123"
get "/jimmy/documents/bar%3Abaz/http%3A%2F%2F5apps.com"
last_response.body.must_equal 'super website'
end
end
context "with unescaped key" do
before do
put "/jimmy/documents/bar:baz/john@doe.com", "John Doe"
end
it "lists the document in the directory" do
get "/jimmy/documents/bar:baz/"
content = JSON.parse(last_response.body)
content["items"]["john@doe.com"].wont_be_nil
end
it "delivers the data correctly" do
get "/jimmy/documents/bar:baz/john@doe.com"
last_response.body.must_equal "John Doe"
end
end
context "escaped square brackets in key" do
before do
put "/jimmy/documents/gracehopper%5B1%5D.jpg", "super image"
end
it "delivers the data correctly" do
header "Authorization", "Bearer 123"
get "/jimmy/documents/gracehopper%5B1%5D.jpg"
last_response.body.must_equal "super image"
end
end
context "invalid JSON" do
context "empty body" do
before do
header "Content-Type", "application/json"
put "/jimmy/documents/jason", ""
end
it "saves an empty JSON object" do
last_response.status.must_equal 201
data_bucket.get("jimmy:documents:jason").data.must_be_kind_of Hash
data_bucket.get("jimmy:documents:jason").data.must_equal({})
end
end
context "unparsable JSON" do
before do
header "Content-Type", "application/json"
put "/jimmy/documents/jason", "foo"
end
it "returns a 422" do
last_response.status.must_equal 422
end
end
end
end
describe "DELETE" do
before do
header "Authorization", "Bearer 123"
end
describe "basics" do
before do
delete "/jimmy/documents/foo"
end
it "removes the key" do
last_response.status.must_equal 200
lambda {
data_bucket.get("jimmy:documents:foo")
}.must_raise Riak::HTTPFailedRequest
end
it "logs the operation" do
objects = []
opslog_bucket.keys.each { |k| objects << opslog_bucket.get(k) rescue nil }
log_entry = objects.select{|o| o.data["count"] == -1}.first
log_entry.data["size"].must_equal(-22)
log_entry.data["category"].must_equal "documents"
log_entry.indexes["user_id_bin"].must_include "jimmy"
end
end
context "non-existing object" do
before do
delete "/jimmy/documents/foozius"
end
it "responds with 404" do
last_response.status.must_equal 404
end
it "doesn't log the operation" do
objects = []
opslog_bucket.keys.each { |k| objects << opslog_bucket.get(k) rescue nil }
objects.select{|o| o.data["count"] == -1}.size.must_equal 0
end
end
context "when an If-Match header is given" do
it "allows the request if it matches the current ETag" do
get "/jimmy/documents/foo"
old_etag = last_response.headers["ETag"]
header "If-Match", old_etag
delete "/jimmy/documents/foo"
last_response.status.must_equal 200
get "/jimmy/documents/foo"
last_response.status.must_equal 404
end
it "fails the request if it does not match the current ETag" do
header "If-Match", "WONTMATCH"
delete "/jimmy/documents/foo"
last_response.status.must_equal 412
get "/jimmy/documents/foo"
last_response.status.must_equal 200
last_response.body.must_equal "some private text data"
end
end
context "binary data" do
before do
header "Content-Type", "image/jpeg; charset=binary"
filename = File.join(File.expand_path(File.dirname(__FILE__)), "..", "fixtures", "rockrule.jpeg")
@image = File.open(filename, "r").read
put "/jimmy/documents/jaypeg", @image
delete "/jimmy/documents/jaypeg"
end
it "removes the main object" do
last_response.status.must_equal 200
lambda {
data_bucket.get("jimmy:documents:jaypeg")
}.must_raise Riak::HTTPFailedRequest
end
it "removes the binary object" do
last_response.status.must_equal 200
binary = cs_binary_bucket.files.get("jimmy:documents:jaypeg")
binary.must_be_nil
end
it "logs the operation" do
objects = []
opslog_bucket.keys.each { |k| objects << opslog_bucket.get(k) rescue nil }
log_entry = objects.select{|o| o.data["count"] == -1 && o.data["size"] == -16044}.first
log_entry.data["category"].must_equal "documents"
log_entry.indexes["user_id_bin"].must_include "jimmy"
end
end
end
end
describe "unauthorized access" do
before do
auth = auth_bucket.new("jimmy:123")
auth.data = ["documents", "public"]
auth.store
header "Authorization", "Bearer 321"
end
describe "GET" do
it "returns a 401" do
get "/jimmy/documents/foo"
last_response.status.must_equal 401
end
end
describe "PUT" do
it "returns a 401" do
put "/jimmy/documents/foo", "some text"
last_response.status.must_equal 401
end
end
describe "DELETE" do
it "returns a 401" do
delete "/jimmy/documents/foo"
last_response.status.must_equal 401
end
end
end
end
end

64
spec/s3/app_spec.rb Normal file
View File

@ -0,0 +1,64 @@
require_relative "../spec_helper"
describe "S3 provider" do
def container_url_for(user)
"#{app.settings.s3["endpoint"]}#{app.settings.s3["bucket"]}/#{user}"
end
def storage_class
RemoteStorage::S3
end
before do
stub_request(:put, "#{container_url_for("phil")}/food/aguacate").
to_return(status: 200, headers: { etag: '"0815etag"' })
# Write new content with an If-Match header (a new Etag is returned)
stub_request(:put, "#{container_url_for("phil")}/food/aguacate").
with(body: "aye").
to_return(status: 200, headers: { etag: '"0915etag"' })
stub_request(:head, "#{container_url_for("phil")}/food/aguacate").
to_return(status: 200, headers: { last_modified: "Fri, 04 Mar 2016 12:20:18 GMT" })
stub_request(:get, "#{container_url_for("phil")}/food/aguacate").
to_return(status: 200, body: "rootbody", headers: { etag: '"0817etag"', content_type: "text/plain; charset=utf-8" })
stub_request(:delete, "#{container_url_for("phil")}/food/aguacate").
to_return(status: 200, headers: { etag: '"0815etag"' })
# Write new content to check the metadata in Redis
stub_request(:put, "#{container_url_for("phil")}/food/banano").
with(body: "si").
to_return(status: 200, headers: { etag: '"0815etag"' })
stub_request(:put, "#{container_url_for("phil")}/food/banano").
with(body: "oh, no").
to_return(status: 200, headers: { etag: '"0817etag"' })
stub_request(:head, "#{container_url_for("phil")}/food/banano").
to_return(status: 200, headers: { last_modified: "Fri, 04 Mar 2016 12:20:20 GMT" })
stub_request(:put, "#{container_url_for("phil")}/food/camaron").
to_return(status: 200, headers: { etag: '"0816etag"' })
stub_request(:head, "#{container_url_for("phil")}/food/camaron").
to_return(status: 200, headers: { last_modified: "Fri, 04 Mar 2016 12:20:18 GMT" })
stub_request(:delete, "#{container_url_for("phil")}/food/camaron").
to_return(status: 200, headers: { etag: '"0816etag"' })
stub_request(:put, "#{container_url_for("phil")}/food/desayunos/bolon").
to_return(status: 200, headers: { etag: '"0817etag"' })
stub_request(:head, "#{container_url_for("phil")}/food/desayunos/bolon").
to_return(status: 200, headers: { last_modified: "Fri, 04 Mar 2016 12:20:18 GMT" })
stub_request(:delete, "#{container_url_for("phil")}/food/desayunos/bolon").
to_return(status: 200, headers: { etag: '"0817etag"' })
# objects in root dir
stub_request(:put, "#{container_url_for("phil")}/bamboo.txt").
to_return(status: 200, headers: { etag: '"0818etag"' })
stub_request(:head, "#{container_url_for("phil")}/bamboo.txt").
to_return(status: 200, headers: { last_modified: "Fri, 04 Mar 2016 12:20:18 GMT" })
# 404
stub_request(:head, "#{container_url_for("phil")}/food/steak").
to_return(status: 404)
stub_request(:get, "#{container_url_for("phil")}/food/steak").
to_return(status: 404)
end
it_behaves_like 'a REST adapter'
end

645
spec/shared_examples.rb Normal file
View File

@ -0,0 +1,645 @@
require_relative "./spec_helper"
shared_examples_for 'a REST adapter' do
include Rack::Test::Methods
def container_url_for(user)
raise NotImplementedError
end
def storage_class
raise NotImplementedError
end
it "returns 404 on non-existing routes" do
get "/virginmargarita"
last_response.status.must_equal 404
end
describe "PUT requests" do
before do
purge_redis
end
context "authorized" do
before do
redis.sadd "authorizations:phil:amarillo", [":rw"]
header "Authorization", "Bearer amarillo"
end
it "creates the metadata object in redis" do
put "/phil/food/aguacate", "si"
metadata = redis.hgetall "rs:m:phil:food/aguacate"
metadata["s"].must_equal "2"
metadata["t"].must_equal "text/plain; charset=utf-8"
metadata["e"].must_equal "0815etag"
metadata["m"].length.must_equal 13
end
it "updates the metadata object in redis when it changes" do
put "/phil/food/banano", "si"
put "/phil/food/banano", "oh, no"
metadata = redis.hgetall "rs:m:phil:food/banano"
metadata["s"].must_equal "6"
metadata["t"].must_equal "text/plain; charset=utf-8"
metadata["e"].must_equal "0817etag"
metadata["m"].must_equal "1457094020000"
end
it "creates the directory objects metadata in redis" do
put "/phil/food/aguacate", "si"
put "/phil/food/camaron", "yummi"
metadata = redis.hgetall "rs:m:phil:/"
metadata["e"].must_equal "fe2976909daaf074660981ab563fe65d"
metadata["m"].length.must_equal 13
metadata = redis.hgetall "rs:m:phil:food/"
metadata["e"].must_equal "926f98ff820f2f9764fd3c60a22865ad"
metadata["m"].length.must_equal 13
food_items = redis.smembers "rs:m:phil:food/:items"
food_items.each do |food_item|
["camaron", "aguacate"].must_include food_item
end
root_items = redis.smembers "rs:m:phil:/:items"
root_items.must_equal ["food/"]
end
context "response code" do
it "is 201 for newly created objects" do
put "/phil/food/aguacate", "ci"
last_response.status.must_equal 201
end
it "is 200 for updated objects" do
put "/phil/food/aguacate", "deliciosa"
put "/phil/food/aguacate", "muy deliciosa"
last_response.status.must_equal 200
end
end
context "logging usage size" do
it "logs the complete size when creating new objects" do
put "/phil/food/aguacate", "1234567890"
size_log = redis.get "rs:s:phil"
size_log.must_equal "10"
end
it "logs the size difference when updating existing objects" do
put "/phil/food/camaron", "1234567890"
put "/phil/food/aguacate", "1234567890"
put "/phil/food/aguacate", "123"
size_log = redis.get "rs:s:phil"
size_log.must_equal "13"
end
end
describe "objects in root dir" do
before do
put "/phil/bamboo.txt", "shir kan"
end
it "are listed in the directory listing with all metadata" do
get "phil/"
last_response.status.must_equal 200
last_response.content_type.must_equal "application/ld+json"
content = JSON.parse(last_response.body)
content["items"]["bamboo.txt"].wont_be_nil
content["items"]["bamboo.txt"]["ETag"].must_equal "0818etag"
content["items"]["bamboo.txt"]["Content-Type"].must_equal "text/plain; charset=utf-8"
content["items"]["bamboo.txt"]["Content-Length"].must_equal 8
content["items"]["bamboo.txt"]["Last-Modified"].must_equal "Fri, 04 Mar 2016 12:20:18 GMT"
end
end
describe "name collision checks" do
it "is successful when there is no name collision" do
put "/phil/food/aguacate", "si"
last_response.status.must_equal 201
metadata = redis.hgetall "rs:m:phil:food/aguacate"
metadata["s"].must_equal "2"
end
it "conflicts when there is a directory with same name as document" do
put "/phil/food/aguacate", "si"
put "/phil/food", "wontwork"
last_response.status.must_equal 409
last_response.body.must_equal "Conflict"
metadata = redis.hgetall "rs:m:phil:food"
metadata.must_be_empty
end
it "conflicts when there is a document with same name as directory" do
put "/phil/food/aguacate", "si"
put "/phil/food/aguacate/empanado", "wontwork"
last_response.status.must_equal 409
metadata = redis.hgetall "rs:m:phil:food/aguacate/empanado"
metadata.must_be_empty
end
it "returns 400 when a Content-Range header is sent" do
header "Content-Range", "bytes 0-3/3"
put "/phil/food/aguacate", "si"
last_response.status.must_equal 400
end
end
describe "If-Match header" do
before do
put "/phil/food/aguacate", "si"
end
it "allows the request if the header matches the current ETag" do
header "If-Match", "\"0815etag\""
put "/phil/food/aguacate", "aye"
last_response.status.must_equal 200
last_response.headers["Etag"].must_equal "\"0915etag\""
end
it "allows the request if the header contains a weak ETAG matching the current ETag" do
header "If-Match", "W/\"0815etag\""
put "/phil/food/aguacate", "aye"
last_response.status.must_equal 200
last_response.headers["Etag"].must_equal "\"0915etag\""
end
it "allows the request if the header contains a weak ETAG with leading quote matching the current ETag" do
header "If-Match", "\"W/\"0815etag\""
put "/phil/food/aguacate", "aye"
last_response.status.must_equal 200
last_response.headers["Etag"].must_equal "\"0915etag\""
end
it "fails the request if the header does not match the current ETag" do
header "If-Match", "someotheretag"
put "/phil/food/aguacate", "aye"
last_response.status.must_equal 412
last_response.body.must_equal "Precondition Failed"
end
it "allows the request if redis metadata became out of sync" do
header "If-Match", "\"0815etag\""
put "/phil/food/aguacate", "aye"
last_response.status.must_equal 200
end
end
describe "If-None-Match header set to '*'" do
it "succeeds when the document doesn't exist yet" do
header "If-None-Match", "*"
put "/phil/food/aguacate", "si"
last_response.status.must_equal 201
end
it "fails the request if the document already exists" do
put "/phil/food/aguacate", "si"
header "If-None-Match", "*"
put "/phil/food/aguacate", "si"
last_response.status.must_equal 412
last_response.body.must_equal "Precondition Failed"
end
end
end
end
describe "DELETE requests" do
before do
purge_redis
end
context "not authorized" do
describe "with no token" do
it "says it's not authorized" do
delete "/phil/food/aguacate"
last_response.status.must_equal 401
last_response.body.must_equal "Unauthorized"
end
end
describe "with empty token" do
it "says it's not authorized" do
header "Authorization", "Bearer "
delete "/phil/food/aguacate"
last_response.status.must_equal 401
last_response.body.must_equal "Unauthorized"
end
end
describe "with wrong token" do
it "says it's not authorized" do
header "Authorization", "Bearer wrongtoken"
delete "/phil/food/aguacate"
last_response.status.must_equal 401
last_response.body.must_equal "Unauthorized"
end
end
end
context "authorized" do
before do
redis.sadd "authorizations:phil:amarillo", [":rw"]
header "Authorization", "Bearer amarillo"
put "/phil/food/aguacate", "si"
put "/phil/food/camaron", "yummi"
put "/phil/food/desayunos/bolon", "wow"
end
it "decreases the size log by size of deleted object" do
delete "/phil/food/aguacate"
size_log = redis.get "rs:s:phil"
size_log.must_equal "8"
end
it "deletes the metadata object in redis" do
delete "/phil/food/aguacate"
metadata = redis.hgetall "rs:m:phil:food/aguacate"
metadata.must_be_empty
end
it "deletes the directory objects metadata in redis" do
old_metadata = redis.hgetall "rs:m:phil:food/"
storage_class.stub_any_instance :etag_for, "newetag" do
delete "/phil/food/aguacate"
end
metadata = redis.hgetall "rs:m:phil:food/"
metadata["e"].must_equal "newetag"
metadata["m"].length.must_equal 13
metadata["m"].wont_equal old_metadata["m"]
food_items = redis.smembers "rs:m:phil:food/:items"
food_items.sort.must_equal ["camaron", "desayunos/"]
root_items = redis.smembers "rs:m:phil:/:items"
root_items.must_equal ["food/"]
end
it "deletes the parent directory objects metadata when deleting all items" do
delete "/phil/food/aguacate"
delete "/phil/food/camaron"
delete "/phil/food/desayunos/bolon"
redis.smembers("rs:m:phil:food/desayunos:items").must_be_empty
redis.hgetall("rs:m:phil:food/desayunos/").must_be_empty
redis.smembers("rs:m:phil:food/:items").must_be_empty
redis.hgetall("rs:m:phil:food/").must_be_empty
redis.smembers("rs:m:phil:/:items").must_be_empty
end
it "responds with the ETag of the deleted item in the header" do
delete "/phil/food/aguacate"
last_response.headers["ETag"].must_equal "\"0815etag\""
end
context "when item doesn't exist" do
before do
purge_redis
delete "/phil/food/steak"
end
it "returns a 404" do
last_response.status.must_equal 404
last_response.body.must_equal "Not Found"
end
it "deletes any metadata that might still exist" do
delete "/phil/food/steak"
metadata = redis.hgetall "rs:m:phil:food/steak"
metadata.must_be_empty
redis.smembers("rs:m:phil:food/:items").must_be_empty
redis.hgetall("rs:m:phil:food/").must_be_empty
redis.smembers("rs:m:phil:/:items").must_be_empty
end
end
describe "If-Match header" do
it "succeeds when the header matches the current ETag" do
header "If-Match", "\"0815etag\""
delete "/phil/food/aguacate"
last_response.status.must_equal 200
end
it "succeeds when the header contains a weak ETAG matching the current ETag" do
header "If-Match", "W/\"0815etag\""
delete "/phil/food/aguacate"
last_response.status.must_equal 200
end
it "fails the request if it does not match the current ETag" do
header "If-Match", "someotheretag"
delete "/phil/food/aguacate"
last_response.status.must_equal 412
last_response.body.must_equal "Precondition Failed"
end
end
end
end
describe "GET requests" do
before do
purge_redis
end
context "not authorized" do
describe "without token" do
it "says it's not authorized" do
get "/phil/food/"
last_response.status.must_equal 401
last_response.body.must_equal "Unauthorized"
end
end
describe "with wrong token" do
it "says it's not authorized" do
header "Authorization", "Bearer wrongtoken"
get "/phil/food/"
last_response.status.must_equal 401
last_response.body.must_equal "Unauthorized"
end
end
end
context "authorized" do
before do
redis.sadd "authorizations:phil:amarillo", [":rw"]
header "Authorization", "Bearer amarillo"
put "/phil/food/aguacate", "si"
put "/phil/food/camaron", "yummi"
put "/phil/food/desayunos/bolon", "wow"
end
describe "documents" do
it "returns the required response headers" do
get "/phil/food/aguacate"
last_response.status.must_equal 200
last_response.headers["ETag"].must_equal "\"0817etag\""
last_response.headers["Cache-Control"].must_equal "no-cache"
last_response.headers["Content-Type"].must_equal "text/plain; charset=utf-8"
end
it "returns a 404 when data doesn't exist" do
get "/phil/food/steak"
last_response.status.must_equal 404
last_response.body.must_equal "Not Found"
end
it "responds with 304 when IF_NONE_MATCH header contains the ETag" do
header "If-None-Match", "\"0815etag\""
get "/phil/food/aguacate"
last_response.status.must_equal 304
last_response.headers["ETag"].must_equal "\"0815etag\""
last_response.headers["Last-Modified"].must_equal "Fri, 04 Mar 2016 12:20:18 GMT"
end
it "responds with 304 when IF_NONE_MATCH header contains weak ETAG matching the current ETag" do
header "If-None-Match", "W/\"0815etag\""
get "/phil/food/aguacate"
last_response.status.must_equal 304
last_response.headers["ETag"].must_equal "\"0815etag\""
last_response.headers["Last-Modified"].must_equal "Fri, 04 Mar 2016 12:20:18 GMT"
end
end
describe "directory listings" do
it "returns the correct ETag header" do
get "/phil/food/"
last_response.status.must_equal 200
last_response.headers["ETag"].must_equal "\"f9f85fbf5aa1fa378fd79ac8aa0a457d\""
end
it "returns a Cache-Control header with value 'no-cache'" do
get "/phil/food/"
last_response.status.must_equal 200
last_response.headers["Cache-Control"].must_equal "no-cache"
end
it "responds with 304 when IF_NONE_MATCH header contains the ETag" do
header "If-None-Match", "\"f9f85fbf5aa1fa378fd79ac8aa0a457d\""
get "/phil/food/"
last_response.status.must_equal 304
end
it "responds with 304 when IF_NONE_MATCH header contains weak ETAG matching the ETag" do
header "If-None-Match", "W/\"f9f85fbf5aa1fa378fd79ac8aa0a457d\""
get "/phil/food/"
last_response.status.must_equal 304
end
it "contains all items in the directory" do
get "/phil/food/"
last_response.status.must_equal 200
last_response.content_type.must_equal "application/ld+json"
content = JSON.parse(last_response.body)
content["@context"].must_equal "http://remotestorage.io/spec/folder-description"
content["items"]["aguacate"].wont_be_nil
content["items"]["aguacate"]["Content-Type"].must_equal "text/plain; charset=utf-8"
content["items"]["aguacate"]["Content-Length"].must_equal 2
content["items"]["aguacate"]["ETag"].must_equal "0815etag"
content["items"]["camaron"].wont_be_nil
content["items"]["camaron"]["Content-Type"].must_equal "text/plain; charset=utf-8"
content["items"]["camaron"]["Content-Length"].must_equal 5
content["items"]["camaron"]["ETag"].must_equal "0816etag"
content["items"]["desayunos/"].wont_be_nil
content["items"]["desayunos/"]["ETag"].must_equal "dd36e3cfe52b5f33421150b289a7d48d"
end
it "contains all items in the root directory" do
get "phil/"
last_response.status.must_equal 200
last_response.content_type.must_equal "application/ld+json"
content = JSON.parse(last_response.body)
content["items"]["food/"].wont_be_nil
content["items"]["food/"]["ETag"].must_equal "f9f85fbf5aa1fa378fd79ac8aa0a457d"
end
it "responds with an empty directory liting when directory doesn't exist" do
get "phil/some-non-existing-dir/"
last_response.status.must_equal 200
last_response.content_type.must_equal "application/ld+json"
content = JSON.parse(last_response.body)
content["items"].must_equal({})
end
end
end
end
describe "HEAD requests" do
before do
purge_redis
end
context "not authorized" do
describe "without token" do
it "says it's not authorized" do
head "/phil/food/camarones"
last_response.status.must_equal 401
last_response.body.must_be_empty
end
end
describe "with wrong token" do
it "says it's not authorized" do
header "Authorization", "Bearer wrongtoken"
head "/phil/food/camarones"
last_response.status.must_equal 401
last_response.body.must_be_empty
end
end
end
context "authorized" do
before do
redis.sadd "authorizations:phil:amarillo", [":rw"]
header "Authorization", "Bearer amarillo"
put "/phil/food/aguacate", "si"
put "/phil/food/camaron", "yummi"
put "/phil/food/desayunos/bolon", "wow"
end
describe "directory listings" do
it "returns the correct header information" do
get "/phil/food/"
last_response.status.must_equal 200
last_response.content_type.must_equal "application/ld+json"
last_response.headers["ETag"].must_equal "\"f9f85fbf5aa1fa378fd79ac8aa0a457d\""
end
end
describe "documents" do
context "when the document doesn't exist" do
it "returns a 404" do
head "/phil/food/steak"
last_response.status.must_equal 404
last_response.body.must_be_empty
end
end
context "when the document exists" do
it "returns the required response headers" do
head "/phil/food/aguacate"
last_response.status.must_equal 200
last_response.headers["ETag"].must_equal "\"0815etag\""
last_response.headers["Cache-Control"].must_equal "no-cache"
last_response.headers["Last-Modified"].must_equal "Fri, 04 Mar 2016 12:20:18 GMT"
last_response.headers["Content-Type"].must_equal "text/plain; charset=utf-8"
last_response.headers["Content-Length"].must_equal "2"
end
it "responds with 304 when IF_NONE_MATCH header contains the ETag" do
header "If-None-Match", "\"0815etag\""
head "/phil/food/aguacate"
last_response.status.must_equal 304
last_response.headers["ETag"].must_equal "\"0815etag\""
last_response.headers["Last-Modified"].must_equal "Fri, 04 Mar 2016 12:20:18 GMT"
end
it "responds with 304 when IF_NONE_MATCH header contains weak ETAG matching the current ETag" do
header "If-None-Match", "W/\"0815etag\""
head "/phil/food/aguacate"
last_response.status.must_equal 304
last_response.headers["ETag"].must_equal "\"0815etag\""
last_response.headers["Last-Modified"].must_equal "Fri, 04 Mar 2016 12:20:18 GMT"
end
end
end
end
end
end

View File

@ -6,13 +6,15 @@ Bundler.require
require_relative '../liquor-cabinet'
require 'minitest/autorun'
require "minitest/stub_any_instance"
require 'rack/test'
require 'purdytest'
require 'riak'
require "redis"
require "rest_client"
require "minitest/stub_any_instance"
require "ostruct"
require 'webmock/minitest'
WebMock.disable_net_connect!
def app
LiquorCabinet
@ -20,22 +22,11 @@ end
app.set :environment, :test
def wait_a_second
now = Time.now.to_i
while Time.now.to_i == now; end
end
def write_last_response_to_file(filename = "last_response.html")
File.open(filename, "w") do |f|
f.write last_response.body
end
end
alias context describe
if app.settings.respond_to? :redis
def redis
@redis ||= Redis.new(host: app.settings.redis["host"], port: app.settings.redis["port"])
@redis ||= Redis.new(app.settings.redis.symbolize_keys)
end
def purge_redis
@ -45,73 +36,22 @@ if app.settings.respond_to? :redis
end
end
if app.settings.respond_to? :riak
::Riak.disable_list_keys_warnings = true
def client
@client ||= ::Riak::Client.new(:host => app.settings.riak['host'],
:http_port => app.settings.riak['http_port'])
end
def data_bucket
@data_bucket ||= begin
bucket = client.bucket(app.settings.riak['buckets']['data'])
bucket.allow_mult = false
bucket
end
end
def directory_bucket
@directory_bucket ||= begin
bucket = client.bucket(app.settings.riak['buckets']['directories'])
bucket.allow_mult = false
bucket
end
end
def auth_bucket
@auth_bucket ||= begin
bucket = client.bucket(app.settings.riak['buckets']['authorizations'])
bucket.allow_mult = false
bucket
end
end
def opslog_bucket
@opslog_bucket ||= begin
bucket = client.bucket(app.settings.riak['buckets']['opslog'])
bucket.allow_mult = false
bucket
end
end
def cs_credentials
@cs_credentials ||= begin
credentials = File.read(app.settings.riak['riak_cs']['credentials_file'])
JSON.parse(credentials)
end
end
def cs_client
@cs_client ||= Fog::Storage.new({
:provider => 'AWS',
:aws_access_key_id => cs_credentials['key_id'],
:aws_secret_access_key => cs_credentials['key_secret'],
:endpoint => app.settings.riak['riak_cs']['endpoint']
})
end
def cs_binary_bucket
@cs_binary_bucket ||= cs_client.directories.create(:key => app.settings.riak['buckets']['cs_binaries'])
end
def purge_all_buckets
[data_bucket, directory_bucket, auth_bucket, opslog_bucket].each do |bucket|
bucket.keys.each {|key| bucket.delete key}
end
cs_binary_bucket.files.each do |file|
file.destroy
end
MiniTest::Spec.class_eval do
def self.shared_examples
@shared_examples ||= {}
end
end
module MiniTest::Spec::SharedExamples
def shared_examples_for(desc, &block)
MiniTest::Spec.shared_examples[desc] = block
end
def it_behaves_like(desc)
self.instance_eval(&MiniTest::Spec.shared_examples[desc])
end
end
Object.class_eval { include(MiniTest::Spec::SharedExamples) }
require_relative 'shared_examples'

View File

@ -1,826 +1,58 @@
require_relative "../spec_helper"
describe "App" do
include Rack::Test::Methods
def app
LiquorCabinet
describe "Swift provider" do
def container_url_for(user)
"#{app.settings.swift["host"]}/rs:documents:test/#{user}"
end
it "returns 404 on non-existing routes" do
get "/virginmargarita"
last_response.status.must_equal 404
def storage_class
RemoteStorage::Swift
end
describe "PUT requests" do
before do
purge_redis
end
context "authorized" do
before do
redis.sadd "authorizations:phil:amarillo", [":rw"]
header "Authorization", "Bearer amarillo"
end
it "creates the metadata object in redis" do
put_stub = OpenStruct.new(headers: {
etag: "bla",
last_modified: "Fri, 04 Mar 2016 12:20:18 GMT"
})
RestClient.stub :put, put_stub do
put "/phil/food/aguacate", "si"
end
metadata = redis.hgetall "rs:m:phil:food/aguacate"
metadata["s"].must_equal "2"
metadata["t"].must_equal "text/plain; charset=utf-8"
metadata["e"].must_equal "bla"
metadata["m"].length.must_equal 13
end
it "creates the directory objects metadata in redis" do
put_stub = OpenStruct.new(headers: {
etag: "bla",
last_modified: "Fri, 04 Mar 2016 12:20:18 GMT"
})
get_stub = OpenStruct.new(body: "rootbody")
RestClient.stub :put, put_stub do
RestClient.stub :get, get_stub do
RemoteStorage::Swift.stub_any_instance :etag_for, "newetag" do
put "/phil/food/aguacate", "si"
put "/phil/food/camaron", "yummi"
end
end
end
metadata = redis.hgetall "rs:m:phil:/"
metadata["e"].must_equal "newetag"
metadata["m"].length.must_equal 13
metadata = redis.hgetall "rs:m:phil:food/"
metadata["e"].must_equal "newetag"
metadata["m"].length.must_equal 13
food_items = redis.smembers "rs:m:phil:food/:items"
food_items.each do |food_item|
["camaron", "aguacate"].must_include food_item
end
root_items = redis.smembers "rs:m:phil:/:items"
root_items.must_equal ["food/"]
end
context "response code" do
before do
@put_stub = OpenStruct.new(headers: {
etag: "bla",
last_modified: "Fri, 04 Mar 2016 12:20:18 GMT"
})
end
it "is 201 for newly created objects" do
RestClient.stub :put, @put_stub do
put "/phil/food/aguacate", "muy deliciosa"
end
last_response.status.must_equal 201
end
it "is 200 for updated objects" do
RestClient.stub :put, @put_stub do
put "/phil/food/aguacate", "deliciosa"
put "/phil/food/aguacate", "muy deliciosa"
end
last_response.status.must_equal 200
end
end
context "logging usage size" do
before do
@put_stub = OpenStruct.new(headers: {
etag: "bla",
last_modified: "Fri, 04 Mar 2016 12:20:18 GMT"
})
end
it "logs the complete size when creating new objects" do
RestClient.stub :put, @put_stub do
put "/phil/food/aguacate", "1234567890"
end
size_log = redis.get "rs:s:phil"
size_log.must_equal "10"
end
it "logs the size difference when updating existing objects" do
RestClient.stub :put, @put_stub do
put "/phil/food/camaron", "1234567890"
put "/phil/food/aguacate", "1234567890"
put "/phil/food/aguacate", "123"
end
size_log = redis.get "rs:s:phil"
size_log.must_equal "13"
end
end
describe "objects in root dir" do
before do
put_stub = OpenStruct.new(headers: {
etag: "bla",
last_modified: "Fri, 04 Mar 2016 12:20:18 GMT"
})
RestClient.stub :put, put_stub do
put "/phil/bamboo.txt", "shir kan"
end
end
it "are listed in the directory listing with all metadata" do
get "phil/"
last_response.status.must_equal 200
last_response.content_type.must_equal "application/ld+json"
content = JSON.parse(last_response.body)
content["items"]["bamboo.txt"].wont_be_nil
content["items"]["bamboo.txt"]["ETag"].must_equal "bla"
content["items"]["bamboo.txt"]["Content-Type"].must_equal "text/plain; charset=utf-8"
content["items"]["bamboo.txt"]["Content-Length"].must_equal 8
end
end
describe "name collision checks" do
it "is successful when there is no name collision" do
put_stub = OpenStruct.new(headers: {
etag: "bla",
last_modified: "Fri, 04 Mar 2016 12:20:18 GMT"
})
get_stub = OpenStruct.new(body: "rootbody")
RestClient.stub :put, put_stub do
RestClient.stub :get, get_stub do
RemoteStorage::Swift.stub_any_instance :etag_for, "rootetag" do
put "/phil/food/aguacate", "si"
end
end
end
last_response.status.must_equal 201
metadata = redis.hgetall "rs:m:phil:food/aguacate"
metadata["s"].must_equal "2"
end
it "conflicts when there is a directory with same name as document" do
put_stub = OpenStruct.new(headers: {
etag: "bla",
last_modified: "Fri, 04 Mar 2016 12:20:18 GMT"
})
RestClient.stub :put, put_stub do
put "/phil/food/aguacate", "si"
put "/phil/food", "wontwork"
end
last_response.status.must_equal 409
last_response.body.must_equal "Conflict"
metadata = redis.hgetall "rs:m:phil:food"
metadata.must_be_empty
end
it "conflicts when there is a document with same name as directory" do
put_stub = OpenStruct.new(headers: {
etag: "bla",
last_modified: "Fri, 04 Mar 2016 12:20:18 GMT"
})
RestClient.stub :put, put_stub do
put "/phil/food/aguacate", "si"
put "/phil/food/aguacate/empanado", "wontwork"
end
last_response.status.must_equal 409
metadata = redis.hgetall "rs:m:phil:food/aguacate/empanado"
metadata.must_be_empty
end
it "returns 400 when a Content-Range header is sent" do
header "Content-Range", "bytes 0-3/3"
put "/phil/food/aguacate", "si"
last_response.status.must_equal 400
end
end
describe "If-Match header" do
before do
put_stub = OpenStruct.new(headers: {
etag: "oldetag",
last_modified: "Fri, 04 Mar 2016 12:20:18 GMT"
})
RestClient.stub :put, put_stub do
put "/phil/food/aguacate", "si"
end
end
it "allows the request if the header matches the current ETag" do
header "If-Match", "\"oldetag\""
put_stub = OpenStruct.new(headers: {
etag: "newetag",
last_modified: "Fri, 04 Mar 2016 12:20:18 GMT"
})
RestClient.stub :put, put_stub do
put "/phil/food/aguacate", "aye"
end
last_response.status.must_equal 200
last_response.headers["Etag"].must_equal "\"newetag\""
end
it "allows the request if the header contains a weak ETAG matching the current ETag" do
header "If-Match", "W/\"oldetag\""
put_stub = OpenStruct.new(headers: {
etag: "newetag",
last_modified: "Fri, 04 Mar 2016 12:20:18 GMT"
})
RestClient.stub :put, put_stub do
put "/phil/food/aguacate", "aye"
end
last_response.status.must_equal 200
last_response.headers["Etag"].must_equal "\"newetag\""
end
it "allows the request if the header contains a weak ETAG with leading quote matching the current ETag" do
header "If-Match", "\"W/\"oldetag\""
put_stub = OpenStruct.new(headers: {
etag: "newetag",
last_modified: "Fri, 04 Mar 2016 12:20:18 GMT"
})
RestClient.stub :put, put_stub do
put "/phil/food/aguacate", "aye"
end
last_response.status.must_equal 200
last_response.headers["Etag"].must_equal "\"newetag\""
end
it "fails the request if the header does not match the current ETag" do
header "If-Match", "someotheretag"
head_stub = OpenStruct.new(headers: {
etag: "oldetag",
last_modified: "Fri, 04 Mar 2016 12:20:18 GMT",
content_type: "text/plain",
content_length: 23
})
RestClient.stub :head, head_stub do
put "/phil/food/aguacate", "aye"
end
last_response.status.must_equal 412
last_response.body.must_equal "Precondition Failed"
end
it "allows the request if redis metadata became out of sync" do
header "If-Match", "\"existingetag\""
head_stub = OpenStruct.new(headers: {
etag: "existingetag",
last_modified: "Fri, 04 Mar 2016 12:20:18 GMT",
content_type: "text/plain",
content_length: 23
})
put_stub = OpenStruct.new(headers: {
etag: "newetag",
last_modified: "Fri, 04 Mar 2016 12:20:18 GMT"
})
RestClient.stub :head, head_stub do
RestClient.stub :put, put_stub do
put "/phil/food/aguacate", "aye"
end
end
last_response.status.must_equal 200
end
end
describe "If-None-Match header set to '*'" do
it "succeeds when the document doesn't exist yet" do
put_stub = OpenStruct.new(headers: {
etag: "someetag",
last_modified: "Fri, 04 Mar 2016 12:20:18 GMT"
})
header "If-None-Match", "*"
RestClient.stub :put, put_stub do
put "/phil/food/aguacate", "si"
end
last_response.status.must_equal 201
end
it "fails the request if the document already exists" do
put_stub = OpenStruct.new(headers: {
etag: "someetag",
last_modified: "Fri, 04 Mar 2016 12:20:18 GMT"
})
RestClient.stub :put, put_stub do
put "/phil/food/aguacate", "si"
end
header "If-None-Match", "*"
RestClient.stub :put, put_stub do
put "/phil/food/aguacate", "si"
end
last_response.status.must_equal 412
last_response.body.must_equal "Precondition Failed"
end
end
end
end
describe "DELETE requests" do
before do
purge_redis
end
context "not authorized" do
describe "with no token" do
it "says it's not authorized" do
delete "/phil/food/aguacate"
last_response.status.must_equal 401
last_response.body.must_equal "Unauthorized"
end
end
describe "with empty token" do
it "says it's not authorized" do
header "Authorization", "Bearer "
delete "/phil/food/aguacate"
last_response.status.must_equal 401
last_response.body.must_equal "Unauthorized"
end
end
describe "with wrong token" do
it "says it's not authorized" do
header "Authorization", "Bearer wrongtoken"
delete "/phil/food/aguacate"
last_response.status.must_equal 401
last_response.body.must_equal "Unauthorized"
end
end
end
context "authorized" do
before do
redis.sadd "authorizations:phil:amarillo", [":rw"]
header "Authorization", "Bearer amarillo"
put_stub = OpenStruct.new(headers: {
etag: "bla",
last_modified: "Fri, 04 Mar 2016 12:20:18 GMT"
})
RestClient.stub :put, put_stub do
put "/phil/food/aguacate", "si"
put "/phil/food/camaron", "yummi"
put "/phil/food/desayunos/bolon", "wow"
end
end
it "decreases the size log by size of deleted object" do
RestClient.stub :delete, "" do
RemoteStorage::Swift.stub_any_instance :etag_for, "rootetag" do
delete "/phil/food/aguacate"
end
end
size_log = redis.get "rs:s:phil"
size_log.must_equal "8"
end
it "deletes the metadata object in redis" do
RestClient.stub :delete, "" do
RemoteStorage::Swift.stub_any_instance :etag_for, "rootetag" do
delete "/phil/food/aguacate"
end
end
metadata = redis.hgetall "rs:m:phil:food/aguacate"
metadata.must_be_empty
end
it "deletes the directory objects metadata in redis" do
old_metadata = redis.hgetall "rs:m:phil:food/"
RestClient.stub :delete, "" do
RemoteStorage::Swift.stub_any_instance :etag_for, "newetag" do
delete "/phil/food/aguacate"
end
end
metadata = redis.hgetall "rs:m:phil:food/"
metadata["e"].must_equal "newetag"
metadata["m"].length.must_equal 13
metadata["m"].wont_equal old_metadata["m"]
food_items = redis.smembers "rs:m:phil:food/:items"
food_items.sort.must_equal ["camaron", "desayunos/"]
root_items = redis.smembers "rs:m:phil:/:items"
root_items.must_equal ["food/"]
end
it "deletes the parent directory objects metadata when deleting all items" do
RestClient.stub :delete, "" do
RemoteStorage::Swift.stub_any_instance :etag_for, "rootetag" do
delete "/phil/food/aguacate"
delete "/phil/food/camaron"
delete "/phil/food/desayunos/bolon"
end
end
redis.smembers("rs:m:phil:food/desayunos:items").must_be_empty
redis.hgetall("rs:m:phil:food/desayunos/").must_be_empty
redis.smembers("rs:m:phil:food/:items").must_be_empty
redis.hgetall("rs:m:phil:food/").must_be_empty
redis.smembers("rs:m:phil:/:items").must_be_empty
end
it "responds with the ETag of the deleted item in the header" do
RestClient.stub :delete, "" do
delete "/phil/food/aguacate"
end
last_response.headers["ETag"].must_equal "\"bla\""
end
context "when item doesn't exist" do
before do
purge_redis
put_stub = OpenStruct.new(headers: {
etag: "bla",
last_modified: "Fri, 04 Mar 2016 12:20:18 GMT"
})
RestClient.stub :put, put_stub do
put "/phil/food/steak", "si"
end
raises_exception = ->(url, headers) { raise RestClient::ResourceNotFound.new }
RestClient.stub :delete, raises_exception do
delete "/phil/food/steak"
end
end
it "returns a 404" do
last_response.status.must_equal 404
last_response.body.must_equal "Not Found"
end
it "deletes any metadata that might still exist" do
raises_exception = ->(url, headers) { raise RestClient::ResourceNotFound.new }
RestClient.stub :delete, raises_exception do
delete "/phil/food/steak"
end
metadata = redis.hgetall "rs:m:phil:food/steak"
metadata.must_be_empty
redis.smembers("rs:m:phil:food/:items").must_be_empty
redis.hgetall("rs:m:phil:food/").must_be_empty
redis.smembers("rs:m:phil:/:items").must_be_empty
end
end
describe "If-Match header" do
it "succeeds when the header matches the current ETag" do
header "If-Match", "\"bla\""
RestClient.stub :delete, "" do
delete "/phil/food/aguacate"
end
last_response.status.must_equal 200
end
it "succeeds when the header contains a weak ETAG matching the current ETag" do
header "If-Match", "W/\"bla\""
RestClient.stub :delete, "" do
delete "/phil/food/aguacate"
end
last_response.status.must_equal 200
end
it "fails the request if it does not match the current ETag" do
header "If-Match", "someotheretag"
delete "/phil/food/aguacate"
last_response.status.must_equal 412
last_response.body.must_equal "Precondition Failed"
end
end
end
end
describe "GET requests" do
before do
purge_redis
end
context "not authorized" do
describe "without token" do
it "says it's not authorized" do
get "/phil/food/"
last_response.status.must_equal 401
last_response.body.must_equal "Unauthorized"
end
end
describe "with wrong token" do
it "says it's not authorized" do
header "Authorization", "Bearer wrongtoken"
get "/phil/food/"
last_response.status.must_equal 401
last_response.body.must_equal "Unauthorized"
end
end
end
context "authorized" do
before do
redis.sadd "authorizations:phil:amarillo", [":rw"]
header "Authorization", "Bearer amarillo"
put_stub = OpenStruct.new(headers: {
etag: "bla",
last_modified: "Fri, 04 Mar 2016 12:20:18 GMT"
})
RestClient.stub :put, put_stub do
put "/phil/food/aguacate", "si"
put "/phil/food/camaron", "yummi"
put "/phil/food/desayunos/bolon", "wow"
end
end
describe "documents" do
it "returns the required response headers" do
get_stub = OpenStruct.new(body: "si", headers: {
etag: "0815etag",
last_modified: "Fri, 04 Mar 2016 12:20:18 GMT",
content_type: "text/plain; charset=utf-8",
content_length: 2
})
RestClient.stub :get, get_stub do
get "/phil/food/aguacate"
end
last_response.status.must_equal 200
last_response.headers["ETag"].must_equal "\"0815etag\""
last_response.headers["Cache-Control"].must_equal "no-cache"
last_response.headers["Content-Type"].must_equal "text/plain; charset=utf-8"
end
it "returns a 404 when data doesn't exist" do
raises_exception = ->(url, headers) { raise RestClient::ResourceNotFound.new }
RestClient.stub :get, raises_exception do
get "/phil/food/steak"
end
last_response.status.must_equal 404
last_response.body.must_equal "Not Found"
end
it "responds with 304 when IF_NONE_MATCH header contains the ETag" do
header "If-None-Match", "\"0815etag\""
get_stub = OpenStruct.new(body: "si", headers: {
etag: "0815etag",
last_modified: "Fri, 04 Mar 2016 12:20:18 GMT",
content_type: "text/plain; charset=utf-8",
content_length: 2
})
RestClient.stub :get, get_stub do
get "/phil/food/aguacate"
end
last_response.status.must_equal 304
end
it "responds with 304 when IF_NONE_MATCH header contains weak ETAG matching the current ETag" do
header "If-None-Match", "W/\"0815etag\""
get_stub = OpenStruct.new(body: "si", headers: {
etag: "0815etag",
last_modified: "Fri, 04 Mar 2016 12:20:18 GMT",
content_type: "text/plain; charset=utf-8",
content_length: 2
})
RestClient.stub :get, get_stub do
get "/phil/food/aguacate"
end
last_response.status.must_equal 304
end
end
describe "directory listings" do
it "returns the correct ETag header" do
get "/phil/food/"
last_response.status.must_equal 200
last_response.headers["ETag"].must_equal "\"f9f85fbf5aa1fa378fd79ac8aa0a457d\""
end
it "returns a Cache-Control header with value 'no-cache'" do
get "/phil/food/"
last_response.status.must_equal 200
last_response.headers["Cache-Control"].must_equal "no-cache"
end
it "responds with 304 when IF_NONE_MATCH header contains the ETag" do
header "If-None-Match", "\"f9f85fbf5aa1fa378fd79ac8aa0a457d\""
get "/phil/food/"
last_response.status.must_equal 304
end
it "responds with 304 when IF_NONE_MATCH header contains weak ETAG matching the ETag" do
header "If-None-Match", "W/\"f9f85fbf5aa1fa378fd79ac8aa0a457d\""
get "/phil/food/"
last_response.status.must_equal 304
end
it "contains all items in the directory" do
get "/phil/food/"
last_response.status.must_equal 200
last_response.content_type.must_equal "application/ld+json"
content = JSON.parse(last_response.body)
content["@context"].must_equal "http://remotestorage.io/spec/folder-description"
content["items"]["aguacate"].wont_be_nil
content["items"]["aguacate"]["Content-Type"].must_equal "text/plain; charset=utf-8"
content["items"]["aguacate"]["Content-Length"].must_equal 2
content["items"]["aguacate"]["ETag"].must_equal "bla"
content["items"]["camaron"].wont_be_nil
content["items"]["camaron"]["Content-Type"].must_equal "text/plain; charset=utf-8"
content["items"]["camaron"]["Content-Length"].must_equal 5
content["items"]["camaron"]["ETag"].must_equal "bla"
content["items"]["desayunos/"].wont_be_nil
content["items"]["desayunos/"]["ETag"].must_equal "dd36e3cfe52b5f33421150b289a7d48d"
end
it "contains all items in the root directory" do
get "phil/"
last_response.status.must_equal 200
last_response.content_type.must_equal "application/ld+json"
content = JSON.parse(last_response.body)
content["items"]["food/"].wont_be_nil
content["items"]["food/"]["ETag"].must_equal "f9f85fbf5aa1fa378fd79ac8aa0a457d"
end
it "responds with an empty directory liting when directory doesn't exist" do
get "phil/some-non-existing-dir/"
last_response.status.must_equal 200
last_response.content_type.must_equal "application/ld+json"
content = JSON.parse(last_response.body)
content["items"].must_equal({})
end
end
end
end
describe "HEAD requests" do
before do
purge_redis
end
context "not authorized" do
describe "without token" do
it "says it's not authorized" do
head "/phil/food/camarones"
last_response.status.must_equal 401
last_response.body.must_be_empty
end
end
describe "with wrong token" do
it "says it's not authorized" do
header "Authorization", "Bearer wrongtoken"
head "/phil/food/camarones"
last_response.status.must_equal 401
last_response.body.must_be_empty
end
end
end
context "authorized" do
before do
redis.sadd "authorizations:phil:amarillo", [":rw"]
header "Authorization", "Bearer amarillo"
put_stub = OpenStruct.new(headers: {
etag: "bla",
last_modified: "Fri, 04 Mar 2016 12:20:18 GMT"
})
RestClient.stub :put, put_stub do
put "/phil/food/aguacate", "si"
put "/phil/food/camaron", "yummi"
put "/phil/food/desayunos/bolon", "wow"
end
end
describe "directory listings" do
it "returns the correct header information" do
get "/phil/food/"
last_response.status.must_equal 200
last_response.content_type.must_equal "application/ld+json"
last_response.headers["ETag"].must_equal "\"f9f85fbf5aa1fa378fd79ac8aa0a457d\""
end
end
describe "documents" do
it "returns a 404 when the document doesn't exist" do
raises_exception = ->(url, headers) { raise RestClient::ResourceNotFound.new }
RestClient.stub :head, raises_exception do
head "/phil/food/steak"
end
last_response.status.must_equal 404
last_response.body.must_be_empty
end
end
end
before do
stub_request(:put, "#{container_url_for("phil")}/food/aguacate").
to_return(status: 200, headers: { etag: "0815etag", last_modified: "Fri, 04 Mar 2016 12:20:18 GMT" })
# Write new content with an If-Match header (a new Etag is returned)
stub_request(:put, "#{container_url_for("phil")}/food/aguacate").
with(body: "aye").
to_return(status: 200, headers: { etag: "0915etag", last_modified: "Fri, 04 Mar 2016 12:20:18 GMT" })
stub_request(:head, "#{container_url_for("phil")}/food/aguacate").
to_return(status: 200, headers: { last_modified: "Fri, 04 Mar 2016 12:20:18 GMT" })
stub_request(:get, "#{container_url_for("phil")}/food/aguacate").
to_return(status: 200, body: "rootbody", headers: { etag: "0817etag", content_type: "text/plain; charset=utf-8" })
stub_request(:delete, "#{container_url_for("phil")}/food/aguacate").
to_return(status: 200, headers: { etag: "0815etag" })
# Write new content to check the metadata in Redis
stub_request(:put, "#{container_url_for("phil")}/food/banano").
with(body: "si").
to_return(status: 200, headers: { etag: "0815etag", last_modified: "Fri, 04 Mar 2016 12:20:18 GMT" })
stub_request(:put, "#{container_url_for("phil")}/food/banano").
with(body: "oh, no").
to_return(status: 200, headers: { etag: "0817etag", last_modified: "Fri, 04 Mar 2016 12:20:20 GMT" })
stub_request(:put, "#{container_url_for("phil")}/food/camaron").
to_return(status: 200, headers: { etag: "0816etag", last_modified: "Fri, 04 Mar 2016 12:20:18 GMT" })
stub_request(:delete, "#{container_url_for("phil")}/food/camaron").
to_return(status: 200, headers: { etag: "0816etag" })
stub_request(:put, "#{container_url_for("phil")}/food/desayunos/bolon").
to_return(status: 200, headers: { etag: "0817etag", last_modified: "Fri, 04 Mar 2016 12:20:18 GMT" })
stub_request(:delete, "#{container_url_for("phil")}/food/desayunos/bolon").
to_return(status: 200, headers: { etag: "0817etag" })
# objects in root dir
stub_request(:put, "#{container_url_for("phil")}/bamboo.txt").
to_return(status: 200, headers: { etag: "0818etag", last_modified: "Fri, 04 Mar 2016 12:20:18 GMT" })
# 404
stub_request(:head, "#{container_url_for("phil")}/food/steak").
to_return(status: 404)
stub_request(:get, "#{container_url_for("phil")}/food/steak").
to_return(status: 404)
stub_request(:delete, "#{container_url_for("phil")}/food/steak").
to_return(status: 404)
end
it_behaves_like 'a REST adapter'
end