Commits

Michael Granger committed d9c28ba

Fixed up metadata handling to incorporate related resources and other processor-generated stuff.

Comments (0)

Files changed (21)

 	require 'thingfish'
 	require 'thingfish/handler'
 
+	if File.exist?( 'etc/thingfish.conf' )
+		$stderr.puts 'Installing the config in etc/thingfish.conf...'
+		Strelka.load_config( 'etc/thingfish.conf' )
+	end
+
 	Loggability.level = :debug
 	Loggability.format_with( :color )
 
+.env
 ChangeLog
 History.rdoc
 LICENSE
 Manifest.txt
 NEW.md
+Processors.rdoc
+Procfile
 README.rdoc
 Rakefile
+bin/tfprocessord
 bin/thingfish
 bin/thingfishd
-example/.env
-example/Procfile
-example/m2-config.rb
-example/thingfish.conf
+etc/m2-config.rb
+etc/thingfish.conf
+etc/thingfish.conf.example
+lib/strelka/app/metadata.rb
+lib/strelka/httprequest/metadata.rb
 lib/thingfish.rb
 lib/thingfish/datastore.rb
 lib/thingfish/datastore/memory.rb
+lib/thingfish/handler.rb
 lib/thingfish/metastore.rb
 lib/thingfish/metastore/memory.rb
 lib/thingfish/mixins.rb
+lib/thingfish/processor.rb
+lib/thingfish/processor/mp3.rb
+lib/thingfish/processordaemon.rb
 spec/constants.rb
+spec/data/APIC-1-image.mp3
+spec/data/APIC-2-images.mp3
+spec/data/PIC-1-image.mp3
+spec/data/PIC-2-images.mp3
 spec/helpers.rb
 spec/spec.opts
 spec/thingfish/datastore/memory_spec.rb
 spec/thingfish/datastore_spec.rb
+spec/thingfish/handler_spec.rb
 spec/thingfish/metastore/memory_spec.rb
 spec/thingfish/metastore_spec.rb
 spec/thingfish/mixins_spec.rb
+spec/thingfish/processor/mp3_spec.rb
+spec/thingfish/processor_spec.rb
 spec/thingfish_spec.rb
+tmtags

lib/strelka/app/metadata.rb

+# -*- ruby -*-
+#encoding: utf-8
+
+require 'strelka'
+require 'strelka/plugins'
+require 'strelka/httprequest/metadata'
+
+require 'thingfish'
+
+
+# A Strelka plugin for setting up requests to be able to carry Thingfish metadata
+# with it.
+module Strelka::App::Metadata
+	extend Strelka::Plugin
+
+
+	run_outside :routing, :filters
+	run_inside :templating, :parameters
+
+
+	### Extension callback -- extend the HTTPRequest classes with Metadata
+	### support when this plugin is loaded.
+	def self::included( object )
+		self.log.debug "Extending Request with Metadata mixins"
+		Strelka::HTTPRequest.class_eval { include Strelka::HTTPRequest::Metadata }
+		super
+	end
+
+
+	### Start content-negotiation when the response has returned.
+	def handle_request( request )
+		self.log.debug "[:metadata] Attaching Thingfish metadata to request."
+		super
+	end
+
+
+end # module Strelka::App::Metadata
+

lib/strelka/app/thingfish.rb

-# -*- ruby -*-
-#encoding: utf-8
-
-require 'strelka'
-require 'strelka/plugins'
-require 'strelka/httprequest/metadata'
-
-require 'thingfish'
-
-
-# A Strelka plugin for setting up requests to be able to carry Thingfish metadata
-# with it.
-module Strelka::App::Thingfish
-	extend Strelka::Plugin
-
-
-	run_outside :routing, :filters
-	run_inside :templating, :parameters
-
-
-	### Extension callback -- extend the HTTPRequest classes with Metadata
-	### support when this plugin is loaded.
-	def self::included( object )
-		self.log.debug "Extending Request with Metadata mixins"
-		Strelka::HTTPRequest.class_eval { include Strelka::HTTPRequest::Metadata }
-		super
-	end
-
-
-	### Start content-negotiation when the response has returned.
-	def handle_request( request )
-		self.log.debug "[:metadata] Attaching Thingfish metadata to request."
-		super
-	end
-
-
-end # module Strelka::App::Thingfish
-

lib/strelka/httprequest/metadata.rb

 #   request.add_metadata
 #
 module Strelka::HTTPRequest::Metadata
-	include Strelka::Constants
+	include Strelka::Constants,
+	        Thingfish::Normalization
 
 
 	### Set up some data structures for metadata.
 	### Merge the metadata in the given +metadata+ hash into the request's current
 	### metadata.
 	def add_metadata( metadata )
+		self.log.debug "Adding metadata to the request: %p" % [ metadata ]
+		metadata = normalize_keys( metadata )
 		self.metadata.merge!( metadata )
 	end
 
 
 	### Add a resource that's related to the one in the request.
 	def add_related_resource( io, metadata )
+		metadata = normalize_keys( metadata )
+		metadata.merge!( self.extract_related_metadata(io) )
+		self.log.debug "Adding related resource: %p %p" % [ io, metadata ]
 		self.related_resources[ io ] = metadata
 	end
 
+
+	### Extract some default metadata from related resources.
+	def extract_related_metadata( io )
+		metadata = {}
+
+		metadata['extent'] = io.size
+
+		return metadata
+	end
+
 end
 

lib/thingfish/datastore.rb

 # blobs.
 class Thingfish::Datastore
 	extend Pluggability,
-	       Thingfish::AbstractClass
+	       Strelka::AbstractClass
 	include Enumerable,
-	        Thingfish::OIDUtilities
+	        Thingfish::Normalization
 
 
 	# Pluggability API -- set the prefix for implementations of Datastore

lib/thingfish/datastore/memory.rb

 	### Save the +data+ read from the specified +io+ and return an ID that can be
 	### used to fetch it later.
 	def save( io )
-		oid = self.make_object_id
+		oid = make_object_id()
 		offset = io.pos
 		data = io.read.dup
 
 	def replace( oid, io )
 		offset = io.pos
 		data = io.read.dup
-		oid = self.normalize_oid( oid )
+		oid = normalize_oid( oid )
 
 		self.log.debug "Replacing data under OID %s with %d bytes" % [ oid, data.bytesize ]
 		@storage[ oid ] = data
 
 	### Fetch the data corresponding to the given +oid+ as an IOish object.
 	def fetch( oid )
-		oid = self.normalize_oid( oid )
+		oid = normalize_oid( oid )
 		self.log.debug "Fetching data for OID %s" % [ oid ]
 		data = @storage[ oid ] or return nil
 		return StringIO.new( data )
 
 	### Remove the data associated with +oid+ from the Datastore.
 	def remove( oid )
-		oid = self.normalize_oid( oid )
+		oid = normalize_oid( oid )
 		@storage.delete( oid )
 	end
 
 
 	### Return +true+ if the datastore has data associated with the specified +oid+.
 	def include?( oid )
-		oid = self.normalize_oid( oid )
+		oid = normalize_oid( oid )
 		return @storage.include?( oid )
 	end
 

lib/thingfish/handler.rb

 		uploadaddress
 	]
 
+	# Metadata keys that must be provided by plugins for related resources
+	REQUIRED_RELATED_METADATA_KEYS = %w[
+		relationship
+		format
+	]
+
 
 	require 'thingfish/mixins'
 	require 'thingfish/datastore'
 	require 'thingfish/metastore'
-	extend Thingfish::MethodUtilities
+	extend Strelka::MethodUtilities
 
 
 	##
 	### Load the Thingfish::Processors in the given +processor_list+ and return an instance
 	### of each one.
 	def self::load_processors( processor_list )
+		self.log.info "Loading processors"
 		processors = []
 
 		processor_list.each do |processor_type|
 			begin
 				processors << Thingfish::Processor.create( processor_type )
+				self.log.debug "  loaded %s: %p" % [ processor_type, processors.last ]
 			rescue LoadError => err
 				self.log.error "%p: %s while loading the %s processor" %
 					[ err.class, err.message, processor_type ]
 	#
 	# Strelka plugin for Thingfish metadata
 	#
-	plugin :thingfish
+	plugin :metadata
 
 
 	#
-	# Global parmas
+	# Global params
 	#
 	plugin :parameters
 	param :uuid
 	# POST /
 	# Upload a new object.
 	post do |req|
-		metadata = self.extract_header_metadata( req )
-		metadata.merge!( self.extract_default_metadata(req) )
-
-		uuid = self.datastore.save( req.body )
-		self.metastore.save( uuid, metadata )
+		uuid, metadata = self.save_resource( req )
 		self.send_event( :created, :uuid => uuid )
 
 		url = req.base_uri.dup
 
 		res = req.response
 		res.headers.location = url
+		res.headers.x_thingfish_uuid = uuid
 		res.status = HTTP::CREATED
 
 		res.for( :text, :json, :yaml ) { metadata }
 	# PUT /«uuid»
 	# Replace the data associated with +uuid+.
 	put ':uuid' do |req|
-		metadata = self.extract_default_metadata( req )
-
 		uuid = req.params[:uuid]
-		object = self.datastore.fetch( uuid ) or
+		self.datastore.include?( uuid ) or
 			finish_with HTTP::NOT_FOUND, "No such object."
 
-		self.datastore.replace( uuid, req.body )
-		self.metastore.merge( uuid, metadata )
+		self.remove_related_resources( uuid )
+		self.save_resource( req, uuid )
+		self.send_event( :replaced, :uuid => uuid )
 
 		res = req.response
 		res.status = HTTP::NO_CONTENT
 
 		self.datastore.remove( uuid ) or finish_with( HTTP::NOT_FOUND, "No such object." )
 		metadata = self.metastore.remove( uuid )
+		self.remove_related_resources( uuid )
+		self.send_event( :deleted, :uuid => uuid )
 
 		res = req.response
 		res.status = HTTP::OK
 			self.metastore.fetch_value( uuid, key ).nil?
 
 		self.metastore.merge( uuid, key => req.body.read )
+		self.send_event( :metadata_updated, :uuid => uuid, :key => key )
 
 		res = req.response
 		res.headers.location = req.uri.to_s
 		previous_value = self.metastore.fetch( uuid, key )
 
 		self.metastore.merge( uuid, key => req.body.read )
+		self.send_event( :metadata_replaced, :uuid => uuid, :key => key )
 
 		res = req.response
 		res.body = nil
 		op_metadata = self.metastore.fetch( uuid, *OPERATIONAL_METADATA_KEYS )
 		new_metadata = self.extract_metadata( req )
 		self.metastore.save( uuid, new_metadata.merge(op_metadata) )
+		self.send_event( :metadata_replaced, :uuid => uuid )
 
 		res = req.response
 		res.status = HTTP::NO_CONTENT
 
 		new_metadata = self.extract_metadata( req )
 		self.metastore.merge( uuid, new_metadata )
+		self.send_event( :metadata_updated, :uuid => uuid )
 
 		res = req.response
 		res.status = HTTP::NO_CONTENT
 		finish_with( HTTP::NOT_FOUND, "No such object." ) unless self.metastore.include?( uuid )
 
 		self.metastore.remove_except( uuid, *OPERATIONAL_METADATA_KEYS )
+		self.send_event( :metadata_deleted, :uuid => uuid )
 
 		res = req.response
 		res.status = HTTP::NO_CONTENT
 			OPERATIONAL_METADATA_KEYS.include?( key )
 
 		self.metastore.remove( uuid, key )
+		self.send_event( :metadata_deleted, :uuid => uuid, :key => key )
 
 		res = req.response
 		res.status = HTTP::NO_CONTENT
 	#########
 
 
+	### Save the resource in the given +request+'s body and any associated metadata
+	### or additional resources.
+	def save_resource( request, uuid=nil )
+		metadata = request.metadata
+		metadata.merge!( self.extract_header_metadata(request) )
+		metadata.merge!( self.extract_default_metadata(request) )
+
+		if uuid
+			self.log.info "Replacing resource %s" % [ uuid ]
+			self.datastore.replace( uuid, request.body )
+			self.metastore.merge( uuid, metadata )
+		else
+			self.log.info "Saving new resource."
+			uuid = self.datastore.save( request.body )
+			self.metastore.save( uuid, metadata )
+		end
+
+		self.save_related_resources( request, uuid )
+
+		return uuid, metadata
+	end
+
+
+	### Save any related resources in the given +request+ with a relationship to the
+	### resource with the given +uuid+.
+	def save_related_resources( request, uuid )
+		request.related_resources.each do |io, metadata|
+			self.log.debug "Saving a resource related to %s: %p" % [ uuid, metadata ]
+			next unless self.check_related_metadata( metadata )
+			self.log.debug "  related metadata checks passed; storing it."
+			r_uuid = self.datastore.save( io )
+			metadata['relation'] = uuid
+			self.metastore.save( r_uuid, metadata )
+		end
+	end
+
+
+	### Remove any resources that are related to the one with the specified +uuid+.
+	def remove_related_resources( uuid )
+		self.metastore.search( :criteria => {'related' => uuid} ).each do |r_uuid|
+			self.datastore.remove( r_uuid )
+			self.metastore.remove( r_uuid )
+			self.log.info "Removed related resource %s for %s." % [ r_uuid, uuid ]
+		end
+	end
+
+
+	### Do some consistency checks on the given +metadata+ for a related resource,
+	### returning +true+ if it meets the requirements.
+	def check_related_metadata( metadata )
+		REQUIRED_RELATED_METADATA_KEYS.each do |attribute|
+			unless metadata[ attribute ]
+				self.log.error "Metadata for required resource must include '#{attribute}' attribute!"
+				return false
+			end
+		end
+		return true
+	end
+
+
 	### Overridden from the base handler class to allow spooled uploads.
 	def handle_async_upload_start( request )
 		self.log.info "Starting asynchronous upload: %s" %

lib/thingfish/metastore.rb

 # blobs.
 class Thingfish::Metastore
 	extend Pluggability,
-	       Thingfish::AbstractClass
-	include Thingfish::OIDUtilities
+	       Strelka::AbstractClass
+	include Thingfish::Normalization
 
 
 	# Pluggability API -- set the prefix for implementations of Metastore
 	             :search,
 	             :fetch,
 	             :fetch_value,
+	             :fetch_related_uuids,
 	             :merge,
 	             :include?,
 	             :remove,
 	             :remove_except,
-				 :size
+	             :size
 
 	### Return a representation of the object as a String suitable for debugging.
 	def inspect

lib/thingfish/metastore/memory.rb

 # An in-memory metastore for testing and tryout purposes.
 class Thingfish::MemoryMetastore < Thingfish::Metastore
 	extend Loggability
+	include Thingfish::Normalization
 
 	# Loggability API -- log to the :thingfish logger
 	log_to :thingfish
 
 	### Save the +metadata+ Hash for the specified +oid+.
 	def save( oid, metadata )
-		oid = self.normalize_oid( oid )
+		oid = normalize_oid( oid )
 		@storage[ oid ] = metadata.dup
 	end
 
 
 	### Fetch the data corresponding to the given +oid+ as a Hash-ish object.
 	def fetch( oid, *keys )
-		oid = self.normalize_oid( oid )
+		oid = normalize_oid( oid )
 		metadata = @storage[ oid ] or return nil
 
 		if keys.empty?
 			return metadata.dup
 		else
 			self.log.debug "Fetching metadata for %p for OID %s" % [ keys, oid ]
+			keys = normalize_keys( keys )
 			values = metadata.values_at( *keys )
 			return Hash[ [keys, values].transpose ]
 		end
 	### Fetch the value of the metadata associated with the given +key+ for the
 	### specified +oid+.
 	def fetch_value( oid, key )
-		oid = self.normalize_oid( oid )
+		oid = normalize_oid( oid )
+		key = normalize_key( key )
 		data = @storage[ oid ] or return nil
 
 		return data[ key ]
 	end
 
 
+	### Fetch UUIDs related to the given +oid+.
+	def fetch_related_uuids( oid )
+		oid = normalize_oid( oid )
+		self.log.debug "Fetching UUIDs of resources related to %s" % [ oid ]
+		return self.search( :criteria => {:relation => oid} )
+	end
+
+
 	### Search the metastore for UUIDs which match the specified +criteria+ and
 	### return them as an iterator.
-	def search( criteria={} )
+	def search( options={} )
 		ds = @storage.each_key
+		self.log.debug "Starting search with %p" % [ ds ]
 
-		if order_fields = criteria[:order]
+		if criteria = options[:criteria]
+			criteria.each do |field, value|
+				self.log.debug "  applying criteria: %p => %p" % [ field.to_s, value ]
+				ds = ds.select {|uuid| @storage[uuid][field.to_s] == value }
+			end
+		end
+
+		if order_fields = options[:order]
 			fields = order_fields.split( /\s*,\s*/ )
+			self.log.debug "  applying order by fields: %p" % [ fields ]
 			ds = ds.to_a.sort_by {|uuid| @storage[uuid].values_at(*fields) }
 		end
 
-		ds = ds.reverse if criteria[:direction] && criteria[:direction] == 'desc'
+		ds = ds.reverse if options[:direction] && options[:direction] == 'desc'
 
-		if (( limit = criteria[:limit] ))
-			offset = criteria[:offset] || 0
+		if (( limit = options[:limit] ))
+			self.log.debug "  limiting to %s results" % [ limit ]
+			offset = options[:offset] || 0
 			ds = ds.to_a.slice( offset, limit )
 		end
 
 
 	### Update the metadata for the given +oid+ with the specified +values+ hash.
 	def merge( oid, values )
-		oid = self.normalize_oid( oid )
+		oid = normalize_oid( oid )
+		values = normalize_keys( values )
 		@storage[ oid ].merge!( values )
 	end
 
 
 	### Remove all metadata associated with +oid+ from the Metastore.
 	def remove( oid, *keys )
-		oid = self.normalize_oid( oid )
+		oid = normalize_oid( oid )
 		if keys.empty?
 			@storage.delete( oid )
 		else
-			keys = keys.map( &:to_s )
+			keys = normalize_keys( keys )
 			@storage[ oid ].delete_if {|key, _| keys.include?(key) }
 		end
 	end
 
 	### Remove all metadata associated with +oid+ except for the specified +keys+.
 	def remove_except( oid, *keys )
-		oid = self.normalize_oid( oid )
-		keys = keys.map( &:to_s )
+		oid = normalize_oid( oid )
+		keys = normalize_keys( keys )
 		@storage[ oid ].keep_if {|key,_| keys.include?(key) }
 	end
 
 
 	### Returns +true+ if the metastore has metadata associated with the specified +oid+.
 	def include?( oid )
-		oid = self.normalize_oid( oid )
+		oid = normalize_oid( oid )
 		return @storage.include?( oid )
 	end
 

lib/thingfish/mixins.rb

 # vim: set nosta noet ts=4 sw=4:
 # encoding: utf-8
 
+require 'securerandom'
+require 'strelka/mixins'
+
 require 'thingfish' unless defined?( Thingfish )
 
-
 module Thingfish
 
-	# Hides your class's ::new method and adds a +pure_virtual+ method generator for
-	# defining API methods. If subclasses of your class don't provide implementations of
-	# "pure_virtual" methods, NotImplementedErrors will be raised if they are called.
-	#
-	#   # AbstractClass
-	#   class MyBaseClass
-	#       extend Thingfish::AbstractClass
-	#
-	#       # Define a method that will raise a NotImplementedError if called
-	#       pure_virtual :api_method
-	#   end
-	#
-	module AbstractClass
 
-		### Extension callback -- mark the extended object's .new as private
-		def self::extended( mod )
-			super
-			mod.class_eval { private_class_method :new }
-		end
-
-
-		### Define one or more "virtual" methods which will raise
-		### NotImplementedErrors when called via a concrete subclass.
-		def pure_virtual( *syms )
-			syms.each do |sym|
-				define_method( sym ) do |*args|
-					raise ::NotImplementedError,
-					"%p does not provide an implementation of #%s" % [ self.class, sym ],
-					caller(1)
-				end
-			end
-		end
-
-
-		### Inheritance callback -- Turn subclasses' .new methods back to public.
-		def inherited( subclass )
-			subclass.module_eval { public_class_method :new }
-			super
-		end
-
-	end # module AbstractClass
-
-
-	# A collection of methods for declaring other methods.
-	#
-	#   class MyClass
-	#       extend Thingfish::MethodUtilities
-	#
-	#       singleton_attr_accessor :types
-	#       singleton_method_alias :kinds, :types
-	#   end
-	#
-	#   MyClass.types = [ :pheno, :proto, :stereo ]
-	#   MyClass.kinds # => [:pheno, :proto, :stereo]
-	#
-	module MethodUtilities
-
-		### Creates instance variables and corresponding methods that return their
-		### values for each of the specified +symbols+ in the singleton of the
-		### declaring object (e.g., class instance variables and methods if declared
-		### in a Class).
-		def singleton_attr_reader( *symbols )
-			symbols.each do |sym|
-				singleton_class.__send__( :attr_reader, sym )
-			end
-		end
-
-		### Creates methods that allow assignment to the attributes of the singleton
-		### of the declaring object that correspond to the specified +symbols+.
-		def singleton_attr_writer( *symbols )
-			symbols.each do |sym|
-				singleton_class.__send__( :attr_writer, sym )
-			end
-		end
-
-		### Creates readers and writers that allow assignment to the attributes of
-		### the singleton of the declaring object that correspond to the specified
-		### +symbols+.
-		def singleton_attr_accessor( *symbols )
-			symbols.each do |sym|
-				singleton_class.__send__( :attr_accessor, sym )
-			end
-		end
-
-		### Creates an alias for the +original+ method named +newname+.
-		def singleton_method_alias( newname, original )
-			singleton_class.__send__( :alias_method, newname, original )
-		end
-
-
-		### Create a reader in the form of a predicate for the given +attrname+.
-		def attr_predicate( attrname )
-			attrname = attrname.to_s.chomp( '?' )
-			define_method( "#{attrname}?" ) do
-				instance_variable_get( "@#{attrname}" ) ? true : false
-			end
-		end
-
-
-		### Create a reader in the form of a predicate for the given +attrname+
-		### as well as a regular writer method.
-		def attr_predicate_accessor( attrname )
-			attrname = attrname.to_s.chomp( '?' )
-			attr_writer( attrname )
-			attr_predicate( attrname )
-		end
-
-	end # module MethodUtilities
-
-
-	# A collection of data-manipulation functions.
-	module DataUtilities
+	# A collection of functions for dealing with object IDs.
+	module Normalization
 
 		###############
 		module_function
 		###############
 
-		### Recursively copy the specified +obj+ and return the result.
-		def deep_copy( obj )
-
-			# Handle mocks during testing
-			return obj if obj.class.name == 'RSpec::Mocks::Mock'
-
-			return case obj
-				when NilClass, Numeric, TrueClass, FalseClass, Symbol, Module, Encoding
-					obj
-
-				when Array
-					obj.map {|o| deep_copy(o) }
-
-				when Hash
-					newhash = {}
-					newhash.default_proc = obj.default_proc if obj.default_proc
-					obj.each do |k,v|
-						newhash[ deep_copy(k) ] = deep_copy( v )
-					end
-					newhash
-
-				else
-					obj.clone
-				end
-		end
-
-	end # module DataUtilities
-
-
-	# A collection of functions for dealing with object IDs.
-	module OIDUtilities
-
-		#########
-		protected
-		#########
-
 		### Generate a new object ID.
 		def make_object_id
 			return normalize_oid( SecureRandom.uuid )
 			return oid.to_s.downcase
 		end
 
-	end # module OIDUtilities
+
+		### Return a copy of the given +collection+ after being normalized.
+		def normalize_keys( collection )
+			if collection.respond_to?( :keys )
+				return collection.each_with_object({}) do |(key,val),new_hash|
+					n_key = normalize_key( key )
+					new_hash[ n_key ] = val
+				end
+
+			elsif collection.respond_to?( :map )
+				return collection.map {|key| normalize_key(key) }
+			end
+
+			return nil
+		end
+
+
+		### Return a normalized copy of +key+.
+		def normalize_key( key )
+			return key.to_s.downcase.gsub( /[^\w:]+/, '_' )
+		end
+
+	end # module Normalization
 
 
 end # module Thingfish

lib/thingfish/processor.rb

 
 	### Get/set the list of media types this processor can handle.
 	def self::handled_types( *mediatypes )
-		unless mediatypes.empty?
+		if mediatypes.empty?
+			@handled_types ||= []
+		else
 			@handled_types = mediatypes.collect {|type| Strelka::HTTPRequest::MediaType.parse(type) }
 		end
 

lib/thingfish/processor/mp3.rb

 			images.each do |img|
 				blob, mime = img.unpack( APIC_FORMAT ).values_at( 4, 1 )
 				yield( StringIO.new(blob),
-					:format   => mime,
-					:extent   => blob.length,
-					:relation => 'album-art' )
+					:format       => mime,
+					:extent       => blob.length,
+					:relationship => 'album-art' )
 			end
 
 		elsif mp3info.tag2.PIC
 				blob, type = img.unpack( PIC_FORMAT ).values_at( 4, 1 )
 				mime = Mongrel2::Config.mimetypes[ ".#{type.downcase}" ] or next
 				yield( StringIO.new(blob),
-					:format   => mime,
-					:extent   => blob.length,
-					:relation => 'album-art' )
+					:format       => mime,
+					:extent       => blob.length,
+					:relationship => 'album-art' )
 			end
 
 		else

lib/thingfish/processordaemon.rb

 require 'thingfish' unless defined?( Thingfish )
 
 
-# 
+# Currently just a placeholder for what will eventually be the runner for
+# async processors.
 class Thingfish::ProcessorDaemon
-
-	
-
 end # class Thingfish::ProcessorDaemon
 
 
 	c.include( Strelka::Constants )
 	c.include( Strelka::Testing )
 	c.include( Thingfish::SpecHelpers )
-
-	c.before( :all ) { setup_logging() }
 end
 
 # vim: set nosta noet ts=4 sw=4:

spec/thingfish/handler_spec.rb

 describe Thingfish::Handler do
 
 	before( :all ) do
-		setup_logging()
 		Thingfish::Handler.configure
+		Thingfish::Handler.install_plugins
 	end
 
 	before( :each ) do
 			expect( res.status_line ).to match( /201 created/i )
 			expect( res.headers.location.to_s ).to match( %r:/#{UUID_PATTERN}$: )
 
-			uuid = res.headers.location.to_s[ %r:/(?<uuid>#{UUID_PATTERN})$:, :uuid ]
+			uuid = res.headers.x_thingfish_uuid
 			expect( @handler.metastore.fetch_value(uuid, 'title') ).
 				to eq( 'Muffin the Panda Goes To School' )
 			expect( @handler.metastore.fetch_value(uuid, 'tags') ).to eq( 'rapper,ukraine,potap' )
 		end
 
 
-		it "doesn't case about the case of the UUID when replacing content via PUT" do
+		it "doesn't care about the case of the UUID when replacing content via PUT" do
 			uuid = @handler.datastore.save( @text_io )
 			@handler.metastore.save( uuid, {'format' => 'text/plain'} )
 
 
 	context "processors" do
 
-		after( :each ) do
+		before( :all ) do
+			@original_filters = described_class.filters.dup
+			described_class.filters.replace({ :request => [], :response => [], :both => [] })
+		end
+
+		after( :all ) do
+			described_class.filters.replace( @original_filters )
+		end
+
+		before( :each ) do
 			described_class.processors.clear
+			described_class.filters.values.each( &:clear )
 		end
 
+
 		let( :factory ) do
 			Mongrel2::RequestFactory.new(
 				:route => '/',
 				:headers => {:accept => '*/*'})
 		end
 
-		let!( :test_filter ) do
+		let!( :test_processor ) do
 			klass = Class.new( Thingfish::Processor ) do
+				extend Loggability
+				log_to :thingfish
+
+				handled_types 'text/plain'
+
 				def self::name; 'Thingfish::Processor::Test'; end
-				def initialize
-					@requests = []
-					@responses = []
+				def on_request( request )
+					self.log.debug "Adding a comment to a request."
+					request.add_metadata( 'test:comment' => "Yo, it totally worked." )
+
+					io = StringIO.new( "Chunkers!" )
+					io.rewind
+					related_metadata = { 'format' => 'text/plain', 'relationship' => 'comment' }
+					request.add_related_resource( io, related_metadata )
 				end
-				attr_accessor :requests, :responses
-				def process_request( request )
-					self.requests << request
-				end
-				def process_response( response )
-					self.responses << response
+				def on_response( response )
+					content = response.body.read
+					response.body.rewind
+					response.body.print( content.reverse )
 				end
 			end
 			# Re-call inherited so it associates the processor plugin with its name
 
 
 		it "loads configured processors when it is instantiated" do
+			logger = Loggability[ described_class ]
+			logger.debug( "*** %p" % described_class.filters )
+			logger.debug( "*** %p" % @original_filters )
+
 			described_class.configure( :processors => %w[test] )
 
 			expect( described_class.processors ).to be_an( Array )
 
 			processor = described_class.processors.first
-			expect( processor ).to be_an_instance_of( test_filter )
+			expect( processor ).to be_an_instance_of( test_processor )
 		end
 
 
-		it "processes requests and responses" do
+		it "processes requests", :logging => :debug do
 			described_class.configure( :processors => %w[test] )
 
 			req = factory.post( '/', TEST_TEXT_DATA, content_type: 'text/plain' )
 			res = @handler.handle( req )
+			uuid = res.headers.x_thingfish_uuid
 
-			processor = described_class.processors.first
-			expect( processor.requests ).to eq([ req ])
-			expect( processor.responses ).to eq([ res ])
+			Thingfish.logger.debug "Metastore contains: %p" % [ @handler.metastore.storage ]
+
+			expect( @handler.metastore.fetch(uuid) ).
+				to include( 'test:comment' => 'Yo, it totally worked.')
+			related_uuids = @handler.metastore.fetch_related_uuids( uuid )
+			expect( related_uuids ).to have( 1 ).member
+
+			r_uuid = related_uuids.first.downcase
+			expect( @handler.metastore.fetch_value(r_uuid, 'relation') ).to eq( uuid )
+			expect( @handler.metastore.fetch_value(r_uuid, 'format') ).to eq( 'text/plain' )
+			expect( @handler.metastore.fetch_value(r_uuid, 'extent') ).to eq( 9 )
+			expect( @handler.metastore.fetch_value(r_uuid, 'relationship') ).to eq( 'comment' )
+
+			expect( @handler.datastore.fetch(r_uuid).read ).to eq( 'Chunkers!' )
 		end
 
 	end

spec/thingfish/metastore/memory_spec.rb

 
 require_relative '../../helpers'
 
+require 'securerandom'
 require 'rspec'
 require 'thingfish/metastore'
 
 
 	it "can fetch a single metadata value for a given oid" do
 		@store.save( TEST_UUID, TEST_METADATA.first )
-		expect( @store.fetch_value(TEST_UUID, :format) ).to eq( TEST_METADATA.first[:format] )
-		expect( @store.fetch_value(TEST_UUID, :extent) ).to eq( TEST_METADATA.first[:extent] )
+		expect( @store.fetch_value(TEST_UUID, :format) ).to eq( TEST_METADATA.first['format'] )
+		expect( @store.fetch_value(TEST_UUID, :extent) ).to eq( TEST_METADATA.first['extent'] )
 	end
 
 
 	it "can fetch a slice of data for a given oid" do
 		@store.save( TEST_UUID, TEST_METADATA.first )
 		expect( @store.fetch(TEST_UUID, :format, :extent) ).to eq({
-			:format => TEST_METADATA.first[:format],
-			:extent => TEST_METADATA.first[:extent],
+			'format' => TEST_METADATA.first['format'],
+			'extent' => TEST_METADATA.first['extent'],
 		})
 	end
 
 
 	it "doesn't care about the case of the UUID when fetching data" do
 		@store.save( TEST_UUID, TEST_METADATA.first )
-		expect( @store.fetch_value(TEST_UUID.downcase, :format) ).to eq( TEST_METADATA.first[:format] )
+		expect( @store.fetch_value(TEST_UUID.downcase, :format) ).to eq( TEST_METADATA.first['format'] )
 	end
 
 
 	end
 
 
+	it "knows how to fetch UUIDs for related resources" do
+		rel_uuid1 = SecureRandom.uuid
+		rel_uuid2 = SecureRandom.uuid
+		unrel_uuid = SecureRandom.uuid
+
+		@store.save( rel_uuid1, TEST_METADATA[0].merge('relation' => TEST_UUID.downcase) )
+		@store.save( rel_uuid2, TEST_METADATA[1].merge('relation' => TEST_UUID.downcase) )
+		@store.save( unrel_uuid, TEST_METADATA[2] )
+
+		uuids = @store.fetch_related_uuids( TEST_UUID )
+
+		expect( uuids ).to include( rel_uuid1, rel_uuid2 )
+		expect( uuids ).to_not include( unrel_uuid )
+	end
+
+
 	context "with some uploaded metadata" do
 
 		before( :each ) do
 			expect( @store.search.to_a ).to eq( @store.keys )
 		end
 
+		it "can apply criteria to searches" do
+			results = @store.search( :criteria => {'format' => 'audio/mp3'} )
+			expect( results ).to have( 2 ).matches
+			results.each do |uuid|
+				expect( @store.fetch_value(uuid, 'format') ).to eq( 'audio/mp3' )
+			end
+		end
+
 		it "can limit the number of results returned from a search" do
 			expect( @store.search( limit: 2 ).to_a ).to eq( @store.keys[0,2] )
 		end

spec/thingfish/metastore_spec.rb

 			expect { store.size }.to raise_error( NotImplementedError, /size/ )
 		end
 
+		it "raises an error if it doesn't implement #fetch_related_uuids" do
+			expect {
+				store.fetch_related_uuids( TEST_UUID )
+			}.to raise_error( NotImplementedError, /fetch_related_uuids/i )
+		end
+
 		it "provides a transactional block method" do
 			expect {|block| store.transaction(&block) }.to yield_with_no_args
 		end

spec/thingfish/mixins_spec.rb

 require 'thingfish/mixins'
 
 
-describe Thingfish, "mixins" do
+describe Thingfish, 'mixins' do
 
-	describe Thingfish::AbstractClass do
+	# A collection of functions for dealing with object IDs.
+	describe 'Normalization' do
 
-		context "mixed into a class" do
-			it "will cause the including class to hide its ::new method" do
-				testclass = Class.new { extend Thingfish::AbstractClass }
-
-				expect {
-					testclass.new
-				}.to raise_error( NoMethodError, /private/ )
-			end
-
+		it 'can generate a new object ID' do
+			expect( Thingfish::Normalization.make_object_id ).to match( UUID_PATTERN )
 		end
 
-
-		context "mixed into a superclass" do
-
-			let( :testclass ) do
-				Class.new do
-					extend Thingfish::AbstractClass
-					pure_virtual :test_method
-				end
-			end
-
-			let( :subclass ) do
-				Class.new( testclass )
-			end
-
-			let( :instance ) do
-				subclass.new
-			end
-
-
-			it "raises a NotImplementedError when unimplemented API methods are called" do
-				expect {
-					instance.test_method
-				}.to raise_error( NotImplementedError, /does not provide an implementation of/ )
-			end
-
-			it "declares the virtual methods so that they can be used with arguments under Ruby 1.9" do
-				expect {
-					instance.test_method( :some, :arguments )
-				}.to raise_error( NotImplementedError, /does not provide an implementation of/ )
-			end
-
+		it 'can normalize an object ID' do
+			expect(
+				Thingfish::Normalization.normalize_oid( TEST_UUID.upcase )
+			).to_not match( /[A-Z]/ )
 		end
 
-	end
-
-
-	describe Thingfish::DataUtilities do
-
-		it "doesn't try to dup immediate objects" do
-			Thingfish::DataUtilities.deep_copy( nil ).should be( nil )
-			Thingfish::DataUtilities.deep_copy( 112 ).should be( 112 )
-			Thingfish::DataUtilities.deep_copy( true ).should be( true )
-			Thingfish::DataUtilities.deep_copy( false ).should be( false )
-			Thingfish::DataUtilities.deep_copy( :a_symbol ).should be( :a_symbol )
+		it 'can normalize Hash metadata keys' do
+			metadata = { :pork => 1, :sausaged => 2 }
+			expect( Thingfish::Normalization.normalize_keys(metadata) ).
+				to eq({ 'pork' => 1, 'sausaged' => 2 })
 		end
 
-		it "doesn't try to dup modules/classes" do
-			klass = Class.new
-			Thingfish::DataUtilities.deep_copy( klass ).should be( klass )
+		it 'can normalize an Array of metadata keys' do
+			values = [ :pork, :sausaged ]
+			expect( Thingfish::Normalization.normalize_keys(values) ).
+				to eq([ 'pork', 'sausaged' ])
+			expect( values.first ).to be( :pork )
 		end
 
-		it "makes distinct copies of arrays and their members" do
-			original = [ 'foom', Set.new([ 1,2 ]), :a_symbol ]
+		it "won't modify the original array of metadata keys" do
+			values = [ :pork, :sausaged ]
+			normalized = Thingfish::Normalization.normalize_keys( values )
 
-			copy = Thingfish::DataUtilities.deep_copy( original )
-
-			copy.should == original
-			copy.should_not be( original )
-			copy[0].should == original[0]
-			copy[0].should_not be( original[0] )
-			copy[1].should == original[1]
-			copy[1].should_not be( original[1] )
-			copy[2].should == original[2]
-			copy[2].should be( original[2] ) # Immediate
+			expect( values.first ).to be( :pork )
+			expect( normalized ).to_not be( values )
 		end
 
-		it "makes recursive copies of deeply-nested Arrays" do
-			original = [ 1, [ 2, 3, [4], 5], 6, [7, [8, 9], 0] ]
-
-			copy = Thingfish::DataUtilities.deep_copy( original )
-
-			copy.should == original
-			copy.should_not be( original )
-			copy[1].should_not be( original[1] )
-			copy[1][2].should_not be( original[1][2] )
-			copy[3].should_not be( original[3] )
-			copy[3][1].should_not be( original[3][1] )
+		it "replaces non metadata key characters with underscores" do
+			expect( Thingfish::Normalization::normalize_key('Sausaged!') ).to eq( 'sausaged_' )
+			expect( Thingfish::Normalization::normalize_key('SO sausaged') ).to eq( 'so_sausaged' )
+			expect( Thingfish::Normalization::normalize_key('*/porky+-') ).to eq( '_porky_' )
 		end
 
-		it "makes distinct copies of Hashes and their members" do
-			original = {
-				:a => 1,
-				'b' => 2,
-				3 => 'c',
-			}
-
-			copy = Thingfish::DataUtilities.deep_copy( original )
-
-			copy.should == original
-			copy.should_not be( original )
-			copy[:a].should == 1
-			copy.key( 2 ).should == 'b'
-			copy.key( 2 ).should_not be( original.key(2) )
-			copy[3].should == 'c'
-			copy[3].should_not be( original[3] )
+		it "preserves colons in metadata keys" do
+			expect( Thingfish::Normalization::normalize_key('pork:sausaged') ).
+				to eq( 'pork:sausaged' )
 		end
 
-		it "makes distinct copies of deeply-nested Hashes" do
-			original = {
-				:a => {
-					:b => {
-						:c => 'd',
-						:e => 'f',
-					},
-					:g => 'h',
-				},
-				:i => 'j',
-			}
 
-			copy = Thingfish::DataUtilities.deep_copy( original )
+	end # module Normalization
 
-			copy.should == original
-			copy[:a][:b][:c].should == 'd'
-			copy[:a][:b][:c].should_not be( original[:a][:b][:c] )
-			copy[:a][:b][:e].should == 'f'
-			copy[:a][:b][:e].should_not be( original[:a][:b][:e] )
-			copy[:a][:g].should == 'h'
-			copy[:a][:g].should_not be( original[:a][:g] )
-			copy[:i].should == 'j'
-			copy[:i].should_not be( original[:i] )
-		end
-
-		it "copies the default proc of copied Hashes" do
-			original = Hash.new {|h,k| h[ k ] = Set.new }
-
-			copy = Thingfish::DataUtilities.deep_copy( original )
-
-			copy.default_proc.should == original.default_proc
-		end
-
-		it "preserves taintedness of copied objects" do
-			original = Object.new
-			original.taint
-
-			copy = Thingfish::DataUtilities.deep_copy( original )
-
-			copy.should_not be( original )
-			copy.should be_tainted()
-		end
-
-		it "preserves frozen-ness of copied objects" do
-			original = Object.new
-			original.freeze
-
-			copy = Thingfish::DataUtilities.deep_copy( original )
-
-			copy.should_not be( original )
-			copy.should be_frozen()
-		end
-	end
 
 end
 

spec/thingfish/processor/mp3_spec.rb

 		processor.process_request( req )
 
 		related = req.related_resources
-		expect( related ).to have( 1 ).image
+		expect( related.size ).to eq( 1 )
 		expect( related.values.first ).
-			to include( :format => 'image/jpeg', :extent => 7369, :relation => 'album-art' )
+			to include( 'format' => 'image/jpeg', 'extent' => 7369, 'relationship' => 'album-art' )
 		expect( related.keys.first ).to respond_to( :read )
 	end
 

spec/thingfish_spec.rb

+#!/usr/bin/env ruby
+
+require_relative 'helpers'
+
+require 'rspec'
+require 'thingfish'
+
+
+describe Thingfish do
+
+	it "returns a version string if asked" do
+		expect( described_class.version_string ).to match( /\w+ [\d.]+/ )
+	end
+
+
+	it "returns a version string with a build number if asked" do
+		expect( described_class.version_string(true) ).
+			to match(/\w+ [\d.]+ \(build [[:xdigit:]]+\)/)
+	end
+
+end
+
+# vim: set nosta noet ts=4 sw=4 ft=rspec:
Tip: Filter by directory path e.g. /media app.js to search for public/media/app.js.
Tip: Use camelCasing e.g. ProjME to search for ProjectModifiedEvent.java.
Tip: Filter by extension type e.g. /repo .js to search for all .js files in the /repo directory.
Tip: Separate your search with spaces e.g. /ssh pom.xml to search for src/ssh/pom.xml.
Tip: Use ↑ and ↓ arrow keys to navigate and return to view the file.
Tip: You can also navigate files with Ctrl+j (next) and Ctrl+k (previous) and view the file with Ctrl+o.
Tip: You can also navigate files with Alt+j (next) and Alt+k (previous) and view the file with Alt+o.