Merge branch 'master' into stable
This commit is contained in:
commit
396a102755
@ -1,11 +1,16 @@
|
||||
language: ruby
|
||||
cache: bundler
|
||||
rvm:
|
||||
- 2.2
|
||||
- 2.2.4
|
||||
services:
|
||||
- redis-server
|
||||
before_install:
|
||||
- sh .travis/install_riakcs.sh
|
||||
- gem install bundler
|
||||
before_script:
|
||||
- cp config.yml.example config.yml
|
||||
script: bundle exec rake test
|
||||
- mkdir -p tmp && echo "swifttoken" > tmp/swift_token.txt
|
||||
script: ruby spec/swift/*
|
||||
branches:
|
||||
only:
|
||||
- master
|
||||
|
||||
9
Gemfile
9
Gemfile
@ -2,17 +2,20 @@ source "https://rubygems.org"
|
||||
|
||||
gem "sinatra", '~> 1.4'
|
||||
gem "sinatra-contrib"
|
||||
gem "activesupport", '~> 4.2'
|
||||
gem "activesupport"
|
||||
gem "riak-client", :github => "5apps/riak-ruby-client", :branch => "invalid_uri_error"
|
||||
gem "fog"
|
||||
gem "fog-aws"
|
||||
gem "rest-client"
|
||||
gem "redis"
|
||||
gem "mime-types", "~> 2.6.1", require: 'mime/types/columnar'
|
||||
# Remove require when we can update to 3.0, which sets the new storage
|
||||
# format to columnar by default. Increases performance
|
||||
gem "mime-types", "~> 2.99", require: 'mime/types/columnar'
|
||||
|
||||
group :test do
|
||||
gem 'rake'
|
||||
gem 'purdytest', :require => false
|
||||
gem 'm'
|
||||
gem 'minitest-stub_any_instance'
|
||||
end
|
||||
|
||||
group :staging, :production do
|
||||
|
||||
88
Gemfile.lock
88
Gemfile.lock
@ -13,50 +13,56 @@ GIT
|
||||
GEM
|
||||
remote: https://rubygems.org/
|
||||
specs:
|
||||
activesupport (4.2.2)
|
||||
activesupport (4.2.5.1)
|
||||
i18n (~> 0.7)
|
||||
json (~> 1.7, >= 1.7.7)
|
||||
minitest (~> 5.1)
|
||||
thread_safe (~> 0.3, >= 0.3.4)
|
||||
tzinfo (~> 1.1)
|
||||
backports (3.6.4)
|
||||
backports (3.6.8)
|
||||
beefcake (0.3.7)
|
||||
builder (3.2.2)
|
||||
domain_name (0.5.24)
|
||||
domain_name (0.5.20160216)
|
||||
unf (>= 0.0.5, < 1.0.0)
|
||||
excon (0.16.10)
|
||||
faraday (0.9.1)
|
||||
excon (0.45.4)
|
||||
faraday (0.9.2)
|
||||
multipart-post (>= 1.2, < 3)
|
||||
fog (1.7.0)
|
||||
fog-aws (0.8.1)
|
||||
fog-core (~> 1.27)
|
||||
fog-json (~> 1.0)
|
||||
fog-xml (~> 0.1)
|
||||
ipaddress (~> 0.8)
|
||||
fog-core (1.36.0)
|
||||
builder
|
||||
excon (~> 0.14)
|
||||
formatador (~> 0.2.0)
|
||||
mime-types
|
||||
multi_json (~> 1.0)
|
||||
net-scp (~> 1.0.4)
|
||||
net-ssh (>= 2.1.3)
|
||||
nokogiri (~> 1.5.0)
|
||||
ruby-hmac
|
||||
excon (~> 0.45)
|
||||
formatador (~> 0.2)
|
||||
fog-json (1.0.2)
|
||||
fog-core (~> 1.0)
|
||||
multi_json (~> 1.10)
|
||||
fog-xml (0.1.2)
|
||||
fog-core
|
||||
nokogiri (~> 1.5, >= 1.5.11)
|
||||
formatador (0.2.5)
|
||||
http-cookie (1.0.2)
|
||||
domain_name (~> 0.5)
|
||||
i18n (0.7.0)
|
||||
innertube (1.0.2)
|
||||
ipaddress (0.8.3)
|
||||
json (1.8.3)
|
||||
kgio (2.9.3)
|
||||
m (1.3.4)
|
||||
kgio (2.10.0)
|
||||
m (1.4.2)
|
||||
method_source (>= 0.6.7)
|
||||
rake (>= 0.9.2.2)
|
||||
method_source (0.8.2)
|
||||
mime-types (2.6.1)
|
||||
minitest (5.7.0)
|
||||
multi_json (1.11.1)
|
||||
mime-types (2.99.1)
|
||||
mini_portile2 (2.0.0)
|
||||
minitest (5.8.4)
|
||||
minitest-stub_any_instance (1.0.1)
|
||||
multi_json (1.11.2)
|
||||
multipart-post (2.0.0)
|
||||
net-scp (1.0.4)
|
||||
net-ssh (>= 1.99.1)
|
||||
net-ssh (2.6.7)
|
||||
netrc (0.10.3)
|
||||
nokogiri (1.5.11)
|
||||
netrc (0.11.0)
|
||||
nokogiri (1.6.7.2)
|
||||
mini_portile2 (~> 2.0.0.rc2)
|
||||
purdytest (2.0.0)
|
||||
minitest (~> 5.5)
|
||||
rack (1.6.4)
|
||||
@ -64,25 +70,24 @@ GEM
|
||||
rack
|
||||
rack-test (0.6.3)
|
||||
rack (>= 1.0)
|
||||
rainbows (4.6.2)
|
||||
rainbows (5.0.0)
|
||||
kgio (~> 2.5)
|
||||
rack (~> 1.1)
|
||||
unicorn (~> 4.8)
|
||||
raindrops (0.13.0)
|
||||
rake (10.4.2)
|
||||
redis (3.2.1)
|
||||
unicorn (~> 5.0)
|
||||
raindrops (0.15.0)
|
||||
rake (10.5.0)
|
||||
redis (3.2.2)
|
||||
rest-client (1.8.0)
|
||||
http-cookie (>= 1.0.2, < 2.0)
|
||||
mime-types (>= 1.16, < 3.0)
|
||||
netrc (~> 0.7)
|
||||
ruby-hmac (0.4.0)
|
||||
sentry-raven (0.15.2)
|
||||
sentry-raven (0.15.6)
|
||||
faraday (>= 0.7.6)
|
||||
sinatra (1.4.6)
|
||||
rack (~> 1.4)
|
||||
sinatra (1.4.7)
|
||||
rack (~> 1.5)
|
||||
rack-protection (~> 1.4)
|
||||
tilt (>= 1.3, < 3)
|
||||
sinatra-contrib (1.4.4)
|
||||
sinatra-contrib (1.4.6)
|
||||
backports (>= 2.0)
|
||||
multi_json
|
||||
rack-protection
|
||||
@ -90,13 +95,13 @@ GEM
|
||||
sinatra (~> 1.4.0)
|
||||
tilt (>= 1.3, < 3)
|
||||
thread_safe (0.3.5)
|
||||
tilt (2.0.1)
|
||||
tilt (2.0.2)
|
||||
tzinfo (1.2.2)
|
||||
thread_safe (~> 0.1)
|
||||
unf (0.1.4)
|
||||
unf_ext
|
||||
unf_ext (0.0.7.1)
|
||||
unicorn (4.9.0)
|
||||
unf_ext (0.0.7.2)
|
||||
unicorn (5.0.1)
|
||||
kgio (~> 2.6)
|
||||
rack
|
||||
raindrops (~> 0.7)
|
||||
@ -105,10 +110,11 @@ PLATFORMS
|
||||
ruby
|
||||
|
||||
DEPENDENCIES
|
||||
activesupport (~> 4.2)
|
||||
fog
|
||||
activesupport
|
||||
fog-aws
|
||||
m
|
||||
mime-types (~> 2.6.1)
|
||||
mime-types (~> 2.99)
|
||||
minitest-stub_any_instance
|
||||
purdytest
|
||||
rainbows
|
||||
rake
|
||||
@ -120,4 +126,4 @@ DEPENDENCIES
|
||||
sinatra-contrib
|
||||
|
||||
BUNDLED WITH
|
||||
1.10.6
|
||||
1.11.2
|
||||
|
||||
@ -1,18 +1,18 @@
|
||||
development: &defaults
|
||||
maintenance: false
|
||||
riak: &riak_defaults
|
||||
host: localhost
|
||||
http_port: 8098
|
||||
riak_cs:
|
||||
credentials_file: "cs_credentials.json"
|
||||
endpoint: "http://cs.example.com:8080"
|
||||
buckets:
|
||||
data: rs_data
|
||||
directories: rs_directories
|
||||
binaries: rs_binaries
|
||||
cs_binaries: rs.binaries
|
||||
authorizations: rs_authorizations
|
||||
opslog: rs_opslog
|
||||
# riak: &riak_defaults
|
||||
# host: localhost
|
||||
# http_port: 8098
|
||||
# riak_cs:
|
||||
# credentials_file: "cs_credentials.json"
|
||||
# endpoint: "http://cs.example.com:8080"
|
||||
# buckets:
|
||||
# data: rs_data
|
||||
# directories: rs_directories
|
||||
# binaries: rs_binaries
|
||||
# cs_binaries: rs.binaries
|
||||
# authorizations: rs_authorizations
|
||||
# opslog: rs_opslog
|
||||
# # uncomment this section and comment the riak one
|
||||
# swift: &swift_defaults
|
||||
# host: "https://swift.example.com"
|
||||
@ -23,15 +23,20 @@ development: &defaults
|
||||
|
||||
test:
|
||||
<<: *defaults
|
||||
riak:
|
||||
<<: *riak_defaults
|
||||
buckets:
|
||||
data: rs_data_test
|
||||
directories: rs_directories_test
|
||||
binaries: rs_binaries_test
|
||||
cs_binaries: rs.binaries.test
|
||||
authorizations: rs_authorizations_test
|
||||
opslog: rs_opslog_test
|
||||
# riak:
|
||||
# <<: *riak_defaults
|
||||
# buckets:
|
||||
# data: rs_data_test
|
||||
# directories: rs_directories_test
|
||||
# binaries: rs_binaries_test
|
||||
# cs_binaries: rs.binaries.test
|
||||
# authorizations: rs_authorizations_test
|
||||
# opslog: rs_opslog_test
|
||||
swift:
|
||||
host: "https://swift.example.com"
|
||||
redis:
|
||||
host: localhost
|
||||
port: 6379
|
||||
|
||||
staging:
|
||||
<<: *defaults
|
||||
|
||||
@ -3,6 +3,7 @@ require "json"
|
||||
require "cgi"
|
||||
require "active_support/core_ext/time/conversions"
|
||||
require "active_support/core_ext/numeric/time"
|
||||
require "active_support/core_ext/hash"
|
||||
require "redis"
|
||||
require "digest/md5"
|
||||
|
||||
@ -28,6 +29,7 @@ module RemoteStorage
|
||||
|
||||
server.halt 401 unless permission
|
||||
if ["PUT", "DELETE"].include? request_method
|
||||
server.halt 503 if directory_backend(user).match(/locked/)
|
||||
server.halt 401 unless permission == "rw"
|
||||
end
|
||||
end
|
||||
@ -75,6 +77,68 @@ module RemoteStorage
|
||||
end
|
||||
|
||||
def get_directory_listing(user, directory)
|
||||
if directory_backend(user).match(/new/)
|
||||
get_directory_listing_from_redis(user, directory)
|
||||
else
|
||||
get_directory_listing_from_swift(user, directory)
|
||||
end
|
||||
end
|
||||
|
||||
def get_directory_listing_from_redis_via_lua(user, directory)
|
||||
lua_script = <<-EOF
|
||||
local user = ARGV[1]
|
||||
local directory = ARGV[2]
|
||||
local items = redis.call("smembers", "rs:m:"..user..":"..directory.."/:items")
|
||||
local listing = {}
|
||||
|
||||
for index, name in pairs(items) do
|
||||
local redis_key = "rs:m:"..user..":"
|
||||
if directory == "" then
|
||||
redis_key = redis_key..name
|
||||
else
|
||||
redis_key = redis_key..directory.."/"..name
|
||||
end
|
||||
|
||||
local metadata_values = redis.call("hgetall", redis_key)
|
||||
local metadata = {}
|
||||
|
||||
-- redis returns hashes as a single list of alternating keys and values
|
||||
-- this collates it into a table
|
||||
for idx = 1, #metadata_values, 2 do
|
||||
metadata[metadata_values[idx]] = metadata_values[idx + 1]
|
||||
end
|
||||
|
||||
listing[name] = {["ETag"] = metadata["e"]}
|
||||
if string.sub(name, -1) ~= "/" then
|
||||
listing[name]["Content-Type"] = metadata["t"]
|
||||
listing[name]["Content-Length"] = tonumber(metadata["s"])
|
||||
end
|
||||
end
|
||||
|
||||
return cjson.encode(listing)
|
||||
EOF
|
||||
|
||||
JSON.parse(redis.eval(lua_script, nil, [user, directory]))
|
||||
end
|
||||
|
||||
def get_directory_listing_from_redis(user, directory)
|
||||
etag = redis.hget "rs:m:#{user}:#{directory}/", "e"
|
||||
|
||||
none_match = (server.env["HTTP_IF_NONE_MATCH"] || "").split(",").map(&:strip)
|
||||
server.halt 304 if none_match.include? etag
|
||||
|
||||
server.headers["Content-Type"] = "application/json"
|
||||
server.headers["ETag"] = %Q("#{etag}")
|
||||
|
||||
listing = {
|
||||
"@context" => "http://remotestorage.io/spec/folder-description",
|
||||
"items" => get_directory_listing_from_redis_via_lua(user, directory)
|
||||
}
|
||||
|
||||
listing.to_json
|
||||
end
|
||||
|
||||
def get_directory_listing_from_swift(user, directory)
|
||||
is_root_listing = directory.empty?
|
||||
|
||||
server.headers["Content-Type"] = "application/json"
|
||||
@ -125,7 +189,18 @@ module RemoteStorage
|
||||
|
||||
res = do_put_request(url, data, content_type)
|
||||
|
||||
if update_dir_objects(user, directory)
|
||||
# TODO use actual last modified time from the document put request
|
||||
timestamp = (Time.now.to_f * 1000).to_i
|
||||
|
||||
metadata = {
|
||||
e: res.headers[:etag],
|
||||
s: data.size,
|
||||
t: content_type,
|
||||
m: timestamp
|
||||
}
|
||||
|
||||
if update_metadata_object(user, directory, key, metadata) &&
|
||||
update_dir_objects(user, directory, timestamp)
|
||||
server.headers["ETag"] = %Q("#{res.headers[:etag]}")
|
||||
server.halt 200
|
||||
else
|
||||
@ -143,6 +218,7 @@ module RemoteStorage
|
||||
end
|
||||
|
||||
do_delete_request(url)
|
||||
delete_metadata_objects(user, directory, key)
|
||||
delete_dir_objects(user, directory)
|
||||
|
||||
server.halt 200
|
||||
@ -217,6 +293,58 @@ module RemoteStorage
|
||||
end
|
||||
|
||||
def has_name_collision?(user, directory, key)
|
||||
if directory_backend(user).match(/new/)
|
||||
has_name_collision_via_redis?(user, directory, key)
|
||||
else
|
||||
has_name_collision_via_swift?(user, directory, key)
|
||||
end
|
||||
end
|
||||
|
||||
def has_name_collision_via_redis?(user, directory, key)
|
||||
lua_script = <<-EOF
|
||||
local user = ARGV[1]
|
||||
local directory = ARGV[2]
|
||||
local key = ARGV[3]
|
||||
|
||||
-- build table with parent directories from remaining arguments
|
||||
local parent_dir_count = #ARGV - 3
|
||||
local parent_directories = {}
|
||||
for i = 4, 4 + parent_dir_count do
|
||||
table.insert(parent_directories, ARGV[i])
|
||||
end
|
||||
|
||||
-- check for existing directory with the same name as the document
|
||||
local redis_key = "rs:m:"..user..":"
|
||||
if directory == "" then
|
||||
redis_key = redis_key..key.."/"
|
||||
else
|
||||
redis_key = redis_key..directory.."/"..key.."/"
|
||||
end
|
||||
if redis.call("hget", redis_key, "e") then
|
||||
return true
|
||||
end
|
||||
|
||||
for index, dir in pairs(parent_directories) do
|
||||
if redis.call("hget", "rs:m:"..user..":"..dir.."/", "e") then
|
||||
-- the directory already exists, no need to do further checks
|
||||
return false
|
||||
else
|
||||
-- check for existing document with same name as directory
|
||||
if redis.call("hget", "rs:m:"..user..":"..dir, "e") then
|
||||
return true
|
||||
end
|
||||
end
|
||||
end
|
||||
|
||||
return false
|
||||
EOF
|
||||
|
||||
parent_directories = parent_directories_for(directory)
|
||||
|
||||
redis.eval(lua_script, nil, [user, directory, key, *parent_directories])
|
||||
end
|
||||
|
||||
def has_name_collision_via_swift?(user, directory, key)
|
||||
# check for existing directory with the same name as the document
|
||||
url = url_for_key(user, directory, key)
|
||||
do_head_request("#{url}/") do |res|
|
||||
@ -252,39 +380,99 @@ module RemoteStorage
|
||||
directories.pop
|
||||
end
|
||||
|
||||
parent_directories << "" # add empty string for the root directory
|
||||
|
||||
parent_directories
|
||||
end
|
||||
|
||||
def update_dir_objects(user, directory)
|
||||
timestamp = (Time.now.to_f * 1000).to_i
|
||||
def top_directory(directory)
|
||||
if directory.match(/\//)
|
||||
directory.split("/").last
|
||||
elsif directory != ""
|
||||
return directory
|
||||
end
|
||||
end
|
||||
|
||||
def parent_directory_for(directory)
|
||||
if directory.match(/\//)
|
||||
return directory[0..directory.rindex("/")]
|
||||
elsif directory != ""
|
||||
return "/"
|
||||
end
|
||||
end
|
||||
|
||||
def update_metadata_object(user, directory, key, metadata)
|
||||
redis_key = "rs:m:#{user}:#{directory}/#{key}"
|
||||
redis.hmset(redis_key, *metadata)
|
||||
redis.sadd "rs:m:#{user}:#{directory}/:items", key
|
||||
|
||||
true
|
||||
end
|
||||
|
||||
def update_dir_objects(user, directory, timestamp)
|
||||
parent_directories_for(directory).each do |dir|
|
||||
do_put_request("#{url_for_directory(user, dir)}/", timestamp.to_s, "text/plain")
|
||||
unless dir == ""
|
||||
res = do_put_request("#{url_for_directory(user, dir)}/", timestamp.to_s, "text/plain")
|
||||
etag = res.headers[:etag]
|
||||
else
|
||||
get_response = do_get_request("#{container_url_for(user)}/?format=json&path=")
|
||||
etag = etag_for(get_response.body)
|
||||
end
|
||||
|
||||
key = "rs:m:#{user}:#{dir}/"
|
||||
metadata = {e: etag, m: timestamp}
|
||||
redis.hmset(key, *metadata)
|
||||
redis.sadd "rs:m:#{user}:#{parent_directory_for(dir)}:items", "#{top_directory(dir)}/"
|
||||
end
|
||||
|
||||
true
|
||||
rescue
|
||||
parent_directories_for(directory).each do |dir|
|
||||
do_delete_request("#{url_for_directory(user, dir)}/") rescue false
|
||||
unless dir == ""
|
||||
do_delete_request("#{url_for_directory(user, dir)}/") rescue false
|
||||
end
|
||||
end
|
||||
|
||||
false
|
||||
end
|
||||
|
||||
def delete_metadata_objects(user, directory, key)
|
||||
redis_key = "rs:m:#{user}:#{directory}/#{key}"
|
||||
redis.del(redis_key)
|
||||
redis.srem "rs:m:#{user}:#{directory}/:items", key
|
||||
end
|
||||
|
||||
def delete_dir_objects(user, directory)
|
||||
timestamp = (Time.now.to_f * 1000).to_i
|
||||
|
||||
parent_directories_for(directory).each do |dir|
|
||||
if dir_empty?(user, dir)
|
||||
do_delete_request("#{url_for_directory(user, dir)}/")
|
||||
unless dir == ""
|
||||
do_delete_request("#{url_for_directory(user, dir)}/")
|
||||
end
|
||||
redis.del "rs:m:#{user}:#{directory}/"
|
||||
redis.srem "rs:m:#{user}:#{parent_directory_for(dir)}:items", "#{dir}/"
|
||||
else
|
||||
timestamp = (Time.now.to_f * 1000).to_i
|
||||
do_put_request("#{url_for_directory(user, dir)}/", timestamp.to_s, "text/plain")
|
||||
unless dir == ""
|
||||
res = do_put_request("#{url_for_directory(user, dir)}/", timestamp.to_s, "text/plain")
|
||||
etag = res.headers[:etag]
|
||||
else
|
||||
get_response = do_get_request("#{container_url_for(user)}/?format=json&path=")
|
||||
etag = etag_for(get_response.body)
|
||||
end
|
||||
metadata = {e: etag, m: timestamp}
|
||||
redis.hmset("rs:m:#{user}:#{dir}/", *metadata)
|
||||
end
|
||||
end
|
||||
end
|
||||
|
||||
def dir_empty?(user, dir)
|
||||
do_get_request("#{container_url_for(user)}/?format=plain&limit=1&path=#{escape(dir)}/") do |res|
|
||||
return res.headers[:content_length] == "0"
|
||||
if directory_backend(user).match(/new/)
|
||||
redis.smembers("rs:m:#{user}:#{dir}/:items").empty?
|
||||
else
|
||||
do_get_request("#{container_url_for(user)}/?format=plain&limit=1&path=#{escape(dir)}/") do |res|
|
||||
return res.headers[:content_length] == "0"
|
||||
end
|
||||
end
|
||||
end
|
||||
|
||||
@ -346,7 +534,11 @@ module RemoteStorage
|
||||
end
|
||||
|
||||
def redis
|
||||
@redis ||= Redis.new(settings.redis)
|
||||
@redis ||= Redis.new(settings.redis.symbolize_keys)
|
||||
end
|
||||
|
||||
def directory_backend(user)
|
||||
@directory_backend ||= redis.get("rsc:db:#{user}") || "legacy"
|
||||
end
|
||||
|
||||
def etag_for(body)
|
||||
|
||||
248
migrate_metadata_to_redis.rb
Executable file
248
migrate_metadata_to_redis.rb
Executable file
@ -0,0 +1,248 @@
|
||||
#!/usr/bin/env ruby
|
||||
|
||||
require "rubygems"
|
||||
require "bundler/setup"
|
||||
require "rest_client"
|
||||
require "redis"
|
||||
require "yaml"
|
||||
require "logger"
|
||||
require "active_support/core_ext/hash"
|
||||
|
||||
class Migrator
|
||||
|
||||
attr_accessor :username, :base_url, :swift_host, :swift_token,
|
||||
:environment, :dry_run, :settings, :logger
|
||||
|
||||
def initialize(username)
|
||||
@username = username
|
||||
|
||||
@environment = ENV["ENVIRONMENT"] || "staging"
|
||||
@settings = YAML.load(File.read('config.yml'))[@environment]
|
||||
|
||||
@swift_host = @settings["swift"]["host"]
|
||||
@swift_token = File.read("tmp/swift_token.txt").strip
|
||||
|
||||
@dry_run = ENV["DRYRUN"] || false # disables writing anything to Redis when true
|
||||
|
||||
@logger = Logger.new("log/migrate_metadata_to_redis.log")
|
||||
log_level = ENV["LOGLEVEL"] || "INFO"
|
||||
logger.level = Kernel.const_get "Logger::#{log_level}"
|
||||
logger.progname = username
|
||||
end
|
||||
|
||||
def root_url
|
||||
"#{@base_url}/#{@username}"
|
||||
end
|
||||
|
||||
def is_dir?(name)
|
||||
name[-1] == "/"
|
||||
end
|
||||
|
||||
def url_for(directory, parent_directory="")
|
||||
"#{root_url}#{parent_directory}#{directory}"
|
||||
end
|
||||
|
||||
def migrate
|
||||
logger.info "Starting migration for '#{username}'"
|
||||
set_directory_backend("legacy_locked")
|
||||
begin
|
||||
work_on_dir("", "")
|
||||
rescue Exception => ex
|
||||
logger.error "Error migrating metadata for '#{username}': #{ex}"
|
||||
set_directory_backend("legacy")
|
||||
# write username to file for later reference
|
||||
File.open('log/failed_migration.log', 'a') { |f| f.puts username }
|
||||
exit 1
|
||||
end
|
||||
set_directory_backend("new")
|
||||
logger.info "Finished migration for '#{username}'"
|
||||
end
|
||||
|
||||
def set_directory_backend(backend)
|
||||
redis.set("rsc:db:#{username}", backend) unless dry_run
|
||||
end
|
||||
|
||||
def work_on_dir(directory, parent_directory)
|
||||
logger.debug "Retrieving listing for '#{parent_directory}#{directory}'"
|
||||
|
||||
listing = get_directory_listing_from_swift("#{parent_directory}#{directory}")
|
||||
|
||||
timestamp = (Time.now.to_f * 1000).to_i
|
||||
|
||||
if listing["items"].any?
|
||||
items = listing["items"]
|
||||
items.each do |item, data|
|
||||
if is_dir? item
|
||||
save_directory_data("#{parent_directory}#{directory}", item, data, timestamp)
|
||||
|
||||
# get dir listing and repeat
|
||||
work_on_dir(item, "#{parent_directory}#{directory}")
|
||||
else
|
||||
save_document_data("#{parent_directory}#{directory}", item, data)
|
||||
end
|
||||
|
||||
add_item_to_parent_dir("#{parent_directory}#{directory}", item)
|
||||
end
|
||||
end
|
||||
end
|
||||
|
||||
def add_item_to_parent_dir(dir, item)
|
||||
key = "rs:m:#{username}:#{parent_directory_for(dir)}:items"
|
||||
logger.debug "Adding item #{item} to #{key}"
|
||||
redis.sadd(key, item) unless dry_run
|
||||
end
|
||||
|
||||
def save_directory_data(dir, item, data, timestamp)
|
||||
key = "rs:m:#{username}:#{dir.gsub(/^\//, "")}#{item}"
|
||||
metadata = {
|
||||
e: data["ETag"],
|
||||
m: timestamp_for(data["Last-Modified"])
|
||||
}
|
||||
|
||||
logger.debug "Metadata for dir #{key}: #{metadata}"
|
||||
redis.hmset(key, *metadata) unless dry_run
|
||||
end
|
||||
|
||||
def save_document_data(dir, item, data)
|
||||
key = "rs:m:#{username}:#{dir.gsub(/^\//, "")}#{item}"
|
||||
metadata = {
|
||||
e: data["ETag"],
|
||||
s: data["Content-Length"],
|
||||
t: data["Content-Type"],
|
||||
m: timestamp_for(data["Last-Modified"])
|
||||
}
|
||||
logger.debug "Metadata for document #{key}: #{metadata}"
|
||||
redis.hmset(key, *metadata) unless dry_run
|
||||
end
|
||||
|
||||
def parent_directory_for(directory)
|
||||
if directory.match(/\//)
|
||||
return directory[0..directory.rindex("/")]
|
||||
else
|
||||
return "/"
|
||||
end
|
||||
end
|
||||
|
||||
def timestamp_for(date)
|
||||
return DateTime.parse(date).strftime("%Q").to_i
|
||||
end
|
||||
|
||||
def redis
|
||||
@redis ||= Redis.new(@settings["redis"].symbolize_keys)
|
||||
end
|
||||
|
||||
def get_directory_listing_from_swift(directory)
|
||||
is_root_listing = directory.empty?
|
||||
|
||||
get_response = nil
|
||||
|
||||
do_head_request("#{url_for_directory(@username, directory)}") do |response|
|
||||
return directory_listing([]) if response.code == 404
|
||||
|
||||
if is_root_listing
|
||||
get_response = do_get_request("#{container_url_for(@username)}/?format=json&path=")
|
||||
else
|
||||
get_response = do_get_request("#{container_url_for(@username)}/?format=json&path=#{escape(directory)}")
|
||||
end
|
||||
end
|
||||
|
||||
if body = JSON.parse(get_response.body)
|
||||
listing = directory_listing(body)
|
||||
else
|
||||
puts "listing not JSON"
|
||||
end
|
||||
|
||||
listing
|
||||
end
|
||||
|
||||
def directory_listing(res_body)
|
||||
listing = {
|
||||
"@context" => "http://remotestorage.io/spec/folder-description",
|
||||
"items" => {}
|
||||
}
|
||||
|
||||
res_body.each do |entry|
|
||||
name = entry["name"]
|
||||
name.sub!("#{File.dirname(entry["name"])}/", '')
|
||||
if name[-1] == "/" # It's a directory
|
||||
listing["items"].merge!({
|
||||
name => {
|
||||
"ETag" => entry["hash"],
|
||||
"Last-Modified" => entry["last_modified"]
|
||||
}
|
||||
})
|
||||
else # It's a file
|
||||
listing["items"].merge!({
|
||||
name => {
|
||||
"ETag" => entry["hash"],
|
||||
"Content-Type" => entry["content_type"],
|
||||
"Content-Length" => entry["bytes"],
|
||||
"Last-Modified" => entry["last_modified"]
|
||||
}
|
||||
})
|
||||
end
|
||||
end
|
||||
|
||||
listing
|
||||
end
|
||||
|
||||
def etag_for(body)
|
||||
objects = JSON.parse(body)
|
||||
|
||||
if objects.empty?
|
||||
Digest::MD5.hexdigest ""
|
||||
else
|
||||
Digest::MD5.hexdigest objects.map { |o| o["hash"] }.join
|
||||
end
|
||||
end
|
||||
|
||||
def do_head_request(url, &block)
|
||||
RestClient.head(url, default_headers, &block)
|
||||
end
|
||||
|
||||
def do_get_request(url, &block)
|
||||
RestClient.get(url, default_headers, &block)
|
||||
end
|
||||
|
||||
def default_headers
|
||||
{"x-auth-token" => @swift_token}
|
||||
end
|
||||
|
||||
def url_for_directory(user, directory)
|
||||
if directory.empty?
|
||||
container_url_for(user)
|
||||
else
|
||||
"#{container_url_for(user)}/#{escape(directory)}"
|
||||
end
|
||||
end
|
||||
|
||||
def container_url_for(user)
|
||||
"#{base_url}/#{container_for(user)}"
|
||||
end
|
||||
|
||||
def base_url
|
||||
@base_url ||= @swift_host
|
||||
end
|
||||
|
||||
def container_for(user)
|
||||
"rs:#{environment.to_s.chars.first}:#{user}"
|
||||
end
|
||||
|
||||
def escape(url)
|
||||
# We want spaces to turn into %20 and slashes to stay slashes
|
||||
CGI::escape(url).gsub('+', '%20').gsub('%2F', '/')
|
||||
end
|
||||
end
|
||||
|
||||
username = ARGV[0]
|
||||
|
||||
unless username
|
||||
puts "No username given."
|
||||
puts "Usage:"
|
||||
puts "ENVIRONMENT=staging ./migrate_metadata_to_redis.rb <username>"
|
||||
exit 1
|
||||
end
|
||||
|
||||
migrator = Migrator.new username
|
||||
migrator.migrate
|
||||
|
||||
@ -1,4 +1,4 @@
|
||||
require_relative "spec_helper"
|
||||
require_relative "../spec_helper"
|
||||
|
||||
describe "App" do
|
||||
include Rack::Test::Methods
|
||||
@ -11,4 +11,5 @@ describe "App" do
|
||||
get "/virginmargarita"
|
||||
last_response.status.must_equal 404
|
||||
end
|
||||
|
||||
end
|
||||
@ -1,4 +1,4 @@
|
||||
require_relative "spec_helper"
|
||||
require_relative "../spec_helper"
|
||||
|
||||
describe "Directories" do
|
||||
include Rack::Test::Methods
|
||||
@ -251,7 +251,7 @@ describe "Directories" do
|
||||
context "charset given in content-type header" do
|
||||
before do
|
||||
header "Content-Type", "image/jpeg; charset=binary"
|
||||
filename = File.join(File.expand_path(File.dirname(__FILE__)), "fixtures", "rockrule.jpeg")
|
||||
filename = File.join(File.expand_path(File.dirname(__FILE__)), "..", "fixtures", "rockrule.jpeg")
|
||||
@image = File.open(filename, "r").read
|
||||
put "/jimmy/tasks/jaypeg.jpg", @image
|
||||
end
|
||||
@ -273,7 +273,7 @@ describe "Directories" do
|
||||
context "no charset in content-type header" do
|
||||
before do
|
||||
header "Content-Type", "image/jpeg"
|
||||
filename = File.join(File.expand_path(File.dirname(__FILE__)), "fixtures", "rockrule.jpeg")
|
||||
filename = File.join(File.expand_path(File.dirname(__FILE__)), "..", "fixtures", "rockrule.jpeg")
|
||||
@image = File.open(filename, "r").read
|
||||
put "/jimmy/tasks/jaypeg.jpg", @image
|
||||
end
|
||||
@ -1,4 +1,4 @@
|
||||
require_relative "spec_helper"
|
||||
require_relative "../spec_helper"
|
||||
|
||||
describe "Permissions" do
|
||||
include Rack::Test::Methods
|
||||
@ -1,4 +1,4 @@
|
||||
require_relative "spec_helper"
|
||||
require_relative "../spec_helper"
|
||||
|
||||
describe "App with Riak backend" do
|
||||
include Rack::Test::Methods
|
||||
@ -446,7 +446,7 @@ describe "App with Riak backend" do
|
||||
context "binary charset in content-type header" do
|
||||
before do
|
||||
header "Content-Type", "image/jpeg; charset=binary"
|
||||
filename = File.join(File.expand_path(File.dirname(__FILE__)), "fixtures", "rockrule.jpeg")
|
||||
filename = File.join(File.expand_path(File.dirname(__FILE__)), "..", "fixtures", "rockrule.jpeg")
|
||||
@image = File.open(filename, "r").read
|
||||
put "/jimmy/documents/jaypeg", @image
|
||||
end
|
||||
@ -502,7 +502,7 @@ describe "App with Riak backend" do
|
||||
context "overwriting existing file with same file" do
|
||||
before do
|
||||
header "Content-Type", "image/jpeg; charset=binary"
|
||||
filename = File.join(File.expand_path(File.dirname(__FILE__)), "fixtures", "rockrule.jpeg")
|
||||
filename = File.join(File.expand_path(File.dirname(__FILE__)), "..", "fixtures", "rockrule.jpeg")
|
||||
@image = File.open(filename, "r").read
|
||||
put "/jimmy/documents/jaypeg", @image
|
||||
end
|
||||
@ -518,7 +518,7 @@ describe "App with Riak backend" do
|
||||
context "overwriting existing file with different file" do
|
||||
before do
|
||||
header "Content-Type", "image/jpeg; charset=binary"
|
||||
filename = File.join(File.expand_path(File.dirname(__FILE__)), "fixtures", "rockrule.jpeg")
|
||||
filename = File.join(File.expand_path(File.dirname(__FILE__)), "..", "fixtures", "rockrule.jpeg")
|
||||
@image = File.open(filename, "r").read
|
||||
put "/jimmy/documents/jaypeg", @image+"foo"
|
||||
end
|
||||
@ -540,7 +540,7 @@ describe "App with Riak backend" do
|
||||
context "no binary charset in content-type header" do
|
||||
before do
|
||||
header "Content-Type", "image/jpeg"
|
||||
filename = File.join(File.expand_path(File.dirname(__FILE__)), "fixtures", "rockrule.jpeg")
|
||||
filename = File.join(File.expand_path(File.dirname(__FILE__)), "..", "fixtures", "rockrule.jpeg")
|
||||
@image = File.open(filename, "r").read
|
||||
put "/jimmy/documents/jaypeg", @image
|
||||
end
|
||||
@ -705,7 +705,7 @@ describe "App with Riak backend" do
|
||||
context "binary data" do
|
||||
before do
|
||||
header "Content-Type", "image/jpeg; charset=binary"
|
||||
filename = File.join(File.expand_path(File.dirname(__FILE__)), "fixtures", "rockrule.jpeg")
|
||||
filename = File.join(File.expand_path(File.dirname(__FILE__)), "..", "fixtures", "rockrule.jpeg")
|
||||
@image = File.open(filename, "r").read
|
||||
put "/jimmy/documents/jaypeg", @image
|
||||
|
||||
@ -9,6 +9,10 @@ require 'minitest/autorun'
|
||||
require 'rack/test'
|
||||
require 'purdytest'
|
||||
require 'riak'
|
||||
require "redis"
|
||||
require "rest_client"
|
||||
require "minitest/stub_any_instance"
|
||||
require "ostruct"
|
||||
|
||||
def app
|
||||
LiquorCabinet
|
||||
@ -28,6 +32,19 @@ def write_last_response_to_file(filename = "last_response.html")
|
||||
end
|
||||
|
||||
alias context describe
|
||||
|
||||
if app.settings.respond_to? :redis
|
||||
def redis
|
||||
@redis ||= Redis.new(host: app.settings.redis["host"], port: app.settings.redis["port"])
|
||||
end
|
||||
|
||||
def purge_redis
|
||||
redis.keys("rs*").each do |key|
|
||||
redis.del key
|
||||
end
|
||||
end
|
||||
end
|
||||
|
||||
if app.settings.respond_to? :riak
|
||||
::Riak.disable_list_keys_warnings = true
|
||||
|
||||
|
||||
337
spec/swift/app_spec.rb
Normal file
337
spec/swift/app_spec.rb
Normal file
@ -0,0 +1,337 @@
|
||||
require_relative "../spec_helper"
|
||||
|
||||
describe "App" do
|
||||
include Rack::Test::Methods
|
||||
|
||||
def app
|
||||
LiquorCabinet
|
||||
end
|
||||
|
||||
it "returns 404 on non-existing routes" do
|
||||
get "/virginmargarita"
|
||||
last_response.status.must_equal 404
|
||||
end
|
||||
|
||||
describe "PUT requests" do
|
||||
|
||||
before do
|
||||
purge_redis
|
||||
redis.set "rsc:db:phil", "new"
|
||||
end
|
||||
|
||||
context "authorized" do
|
||||
before do
|
||||
redis.sadd "authorizations:phil:amarillo", [":rw"]
|
||||
header "Authorization", "Bearer amarillo"
|
||||
end
|
||||
|
||||
it "creates the metadata object in redis" do
|
||||
put_stub = OpenStruct.new(headers: {etag: "bla"})
|
||||
RestClient.stub :put, put_stub do
|
||||
put "/phil/food/aguacate", "si"
|
||||
end
|
||||
|
||||
metadata = redis.hgetall "rs:m:phil:food/aguacate"
|
||||
metadata["s"].must_equal "2"
|
||||
metadata["t"].must_equal "text/plain; charset=utf-8"
|
||||
metadata["e"].must_equal "bla"
|
||||
metadata["m"].length.must_equal 13
|
||||
end
|
||||
|
||||
it "creates the directory objects metadata in redis" do
|
||||
put_stub = OpenStruct.new(headers: {etag: "bla"})
|
||||
get_stub = OpenStruct.new(body: "rootbody")
|
||||
RestClient.stub :put, put_stub do
|
||||
RestClient.stub :get, get_stub do
|
||||
RemoteStorage::Swift.stub_any_instance :etag_for, "rootetag" do
|
||||
put "/phil/food/aguacate", "si"
|
||||
put "/phil/food/camaron", "yummi"
|
||||
end
|
||||
end
|
||||
end
|
||||
|
||||
metadata = redis.hgetall "rs:m:phil:/"
|
||||
metadata["e"].must_equal "rootetag"
|
||||
metadata["m"].length.must_equal 13
|
||||
|
||||
metadata = redis.hgetall "rs:m:phil:food/"
|
||||
metadata["e"].must_equal "bla"
|
||||
metadata["m"].length.must_equal 13
|
||||
|
||||
food_items = redis.smembers "rs:m:phil:food/:items"
|
||||
food_items.each do |food_item|
|
||||
["camaron", "aguacate"].must_include food_item
|
||||
end
|
||||
|
||||
root_items = redis.smembers "rs:m:phil:/:items"
|
||||
root_items.must_equal ["food/"]
|
||||
end
|
||||
|
||||
describe "name collision checks" do
|
||||
it "is successful when there is no name collision" do
|
||||
put_stub = OpenStruct.new(headers: {etag: "bla"})
|
||||
get_stub = OpenStruct.new(body: "rootbody")
|
||||
RestClient.stub :put, put_stub do
|
||||
RestClient.stub :get, get_stub do
|
||||
RemoteStorage::Swift.stub_any_instance :etag_for, "rootetag" do
|
||||
put "/phil/food/aguacate", "si"
|
||||
end
|
||||
end
|
||||
end
|
||||
|
||||
last_response.status.must_equal 200
|
||||
|
||||
metadata = redis.hgetall "rs:m:phil:food/aguacate"
|
||||
metadata["s"].must_equal "2"
|
||||
end
|
||||
|
||||
it "conflicts when there is a directory with same name as document" do
|
||||
put_stub = OpenStruct.new(headers: {etag: "bla"})
|
||||
RestClient.stub :put, put_stub do
|
||||
put "/phil/food/aguacate", "si"
|
||||
put "/phil/food", "wontwork"
|
||||
end
|
||||
|
||||
last_response.status.must_equal 409
|
||||
|
||||
metadata = redis.hgetall "rs:m:phil:food"
|
||||
metadata.must_be_empty
|
||||
end
|
||||
|
||||
it "conflicts when there is a document with same name as directory" do
|
||||
put_stub = OpenStruct.new(headers: {etag: "bla"})
|
||||
RestClient.stub :put, put_stub do
|
||||
put "/phil/food/aguacate", "si"
|
||||
put "/phil/food/aguacate/empanado", "wontwork"
|
||||
end
|
||||
|
||||
last_response.status.must_equal 409
|
||||
|
||||
metadata = redis.hgetall "rs:m:phil:food/aguacate/empanado"
|
||||
metadata.must_be_empty
|
||||
end
|
||||
end
|
||||
|
||||
describe "directory backend configuration" do
|
||||
context "locked new backed" do
|
||||
before do
|
||||
redis.set "rsc:db:phil", "new-locked"
|
||||
end
|
||||
|
||||
it "responds with 503" do
|
||||
put "/phil/food/aguacate", "si"
|
||||
|
||||
last_response.status.must_equal 503
|
||||
|
||||
metadata = redis.hgetall "rs:m:phil:food/aguacate"
|
||||
metadata.must_be_empty
|
||||
end
|
||||
end
|
||||
|
||||
context "locked legacy backend" do
|
||||
before do
|
||||
redis.set "rsc:db:phil", "legacy-locked"
|
||||
end
|
||||
|
||||
it "responds with 503" do
|
||||
put "/phil/food/aguacate", "si"
|
||||
|
||||
last_response.status.must_equal 503
|
||||
|
||||
metadata = redis.hgetall "rs:m:phil:food/aguacate"
|
||||
metadata.must_be_empty
|
||||
end
|
||||
end
|
||||
end
|
||||
end
|
||||
end
|
||||
|
||||
describe "DELETE requests" do
|
||||
|
||||
before do
|
||||
purge_redis
|
||||
redis.set "rsc:db:phil", "new"
|
||||
end
|
||||
|
||||
context "authorized" do
|
||||
before do
|
||||
redis.sadd "authorizations:phil:amarillo", [":rw"]
|
||||
header "Authorization", "Bearer amarillo"
|
||||
|
||||
put_stub = OpenStruct.new(headers: {etag: "bla"})
|
||||
RestClient.stub :put, put_stub do
|
||||
put "/phil/food/aguacate", "si"
|
||||
put "/phil/food/camaron", "yummi"
|
||||
end
|
||||
end
|
||||
|
||||
it "deletes the metadata object in redis" do
|
||||
put_stub = OpenStruct.new(headers: {etag: "bla"})
|
||||
get_stub = OpenStruct.new(body: "rootbody")
|
||||
RestClient.stub :put, put_stub do
|
||||
RestClient.stub :delete, "" do
|
||||
RestClient.stub :get, get_stub do
|
||||
RemoteStorage::Swift.stub_any_instance :etag_for, "rootetag" do
|
||||
delete "/phil/food/aguacate"
|
||||
end
|
||||
end
|
||||
end
|
||||
end
|
||||
|
||||
metadata = redis.hgetall "rs:m:phil:food/aguacate"
|
||||
metadata.must_be_empty
|
||||
end
|
||||
|
||||
it "deletes the directory objects metadata in redis" do
|
||||
old_metadata = redis.hgetall "rs:m:phil:food/"
|
||||
|
||||
put_stub = OpenStruct.new(headers: {etag: "newetag"})
|
||||
get_stub = OpenStruct.new(body: "rootbody")
|
||||
RestClient.stub :put, put_stub do
|
||||
RestClient.stub :delete, "" do
|
||||
RestClient.stub :get, get_stub do
|
||||
RemoteStorage::Swift.stub_any_instance :etag_for, "rootetag" do
|
||||
delete "/phil/food/aguacate"
|
||||
end
|
||||
end
|
||||
end
|
||||
end
|
||||
|
||||
metadata = redis.hgetall "rs:m:phil:food/"
|
||||
metadata["e"].must_equal "newetag"
|
||||
metadata["m"].length.must_equal 13
|
||||
metadata["m"].wont_equal old_metadata["m"]
|
||||
|
||||
food_items = redis.smembers "rs:m:phil:food/:items"
|
||||
food_items.must_equal ["camaron"]
|
||||
|
||||
root_items = redis.smembers "rs:m:phil:/:items"
|
||||
root_items.must_equal ["food/"]
|
||||
end
|
||||
|
||||
it "deletes the parent directory objects metadata when deleting all items" do
|
||||
put_stub = OpenStruct.new(headers: {etag: "bla"})
|
||||
get_stub = OpenStruct.new(body: "rootbody")
|
||||
RestClient.stub :put, put_stub do
|
||||
RestClient.stub :delete, "" do
|
||||
RestClient.stub :get, get_stub do
|
||||
RemoteStorage::Swift.stub_any_instance :etag_for, "rootetag" do
|
||||
delete "/phil/food/aguacate"
|
||||
delete "/phil/food/camaron"
|
||||
end
|
||||
end
|
||||
end
|
||||
end
|
||||
|
||||
metadata = redis.hgetall "rs:m:phil:food/"
|
||||
metadata.must_be_empty
|
||||
|
||||
food_items = redis.smembers "rs:m:phil:food/:items"
|
||||
food_items.must_be_empty
|
||||
|
||||
root_items = redis.smembers "rs:m:phil:/:items"
|
||||
root_items.must_be_empty
|
||||
end
|
||||
end
|
||||
end
|
||||
|
||||
describe "GET requests" do
|
||||
|
||||
before do
|
||||
purge_redis
|
||||
redis.set "rsc:db:phil", "new"
|
||||
end
|
||||
|
||||
context "authorized" do
|
||||
|
||||
before do
|
||||
redis.sadd "authorizations:phil:amarillo", [":rw"]
|
||||
header "Authorization", "Bearer amarillo"
|
||||
|
||||
put_stub = OpenStruct.new(headers: {etag: "bla"})
|
||||
RestClient.stub :put, put_stub do
|
||||
put "/phil/food/aguacate", "si"
|
||||
put "/phil/food/camaron", "yummi"
|
||||
put "/phil/food/desunyos/bolon", "wow"
|
||||
end
|
||||
end
|
||||
|
||||
describe "directory listings" do
|
||||
|
||||
it "has an ETag in the header" do
|
||||
get "/phil/food/"
|
||||
|
||||
last_response.status.must_equal 200
|
||||
last_response.headers["ETag"].must_equal "\"bla\""
|
||||
end
|
||||
|
||||
it "responds with 304 when IF_NONE_MATCH header contains the ETag" do
|
||||
header "If-None-Match", "bla"
|
||||
get "/phil/food/"
|
||||
|
||||
last_response.status.must_equal 304
|
||||
end
|
||||
|
||||
it "contains all items in the directory" do
|
||||
get "/phil/food/"
|
||||
|
||||
last_response.status.must_equal 200
|
||||
last_response.content_type.must_equal "application/json"
|
||||
|
||||
content = JSON.parse(last_response.body)
|
||||
content["@context"].must_equal "http://remotestorage.io/spec/folder-description"
|
||||
content["items"]["aguacate"].wont_be_nil
|
||||
content["items"]["aguacate"]["Content-Type"].must_equal "text/plain; charset=utf-8"
|
||||
content["items"]["aguacate"]["Content-Length"].must_equal 2
|
||||
content["items"]["aguacate"]["ETag"].must_equal "bla"
|
||||
content["items"]["camaron"].wont_be_nil
|
||||
content["items"]["camaron"]["Content-Type"].must_equal "text/plain; charset=utf-8"
|
||||
content["items"]["camaron"]["Content-Length"].must_equal 5
|
||||
content["items"]["camaron"]["ETag"].must_equal "bla"
|
||||
content["items"]["desunyos/"].wont_be_nil
|
||||
content["items"]["desunyos/"]["ETag"].must_equal "bla"
|
||||
end
|
||||
|
||||
it "contains all items in the root directory" do
|
||||
get "phil/"
|
||||
|
||||
last_response.status.must_equal 200
|
||||
last_response.content_type.must_equal "application/json"
|
||||
|
||||
content = JSON.parse(last_response.body)
|
||||
content["items"]["food/"].wont_be_nil
|
||||
content["items"]["food/"]["ETag"].must_equal "bla"
|
||||
end
|
||||
|
||||
end
|
||||
end
|
||||
|
||||
context "with legacy directory backend" do
|
||||
|
||||
before do
|
||||
redis.sadd "authorizations:phil:amarillo", [":rw"]
|
||||
header "Authorization", "Bearer amarillo"
|
||||
|
||||
put_stub = OpenStruct.new(headers: {etag: "bla"})
|
||||
RestClient.stub :put, put_stub do
|
||||
put "/phil/food/aguacate", "si"
|
||||
put "/phil/food/camaron", "yummi"
|
||||
end
|
||||
|
||||
redis.set "rsc:db:phil", "legacy"
|
||||
end
|
||||
|
||||
it "serves directory listing from Swift backend" do
|
||||
RemoteStorage::Swift.stub_any_instance :get_directory_listing_from_swift, "directory listing" do
|
||||
get "/phil/food/"
|
||||
end
|
||||
|
||||
last_response.status.must_equal 200
|
||||
last_response.body.must_equal "directory listing"
|
||||
end
|
||||
|
||||
end
|
||||
|
||||
end
|
||||
end
|
||||
|
||||
Loading…
x
Reference in New Issue
Block a user