00001
00002
00003
00004
00005
00006
00007
00008
00009
00010
00011
00012
00013
00014
00015
00016
00017
00018
00019 #ifndef INDRI_QUERYRESPONSEPACKER_HPP
00020 #define INDRI_QUERYRESPONSEPACKER_HPP
00021
00022 #include "indri/InferenceNetwork.hpp"
00023 #include "lemur-compat.hpp"
00024
00025 class QueryResponsePacker {
00026 private:
00027 InferenceNetwork::MAllResults& _results;
00028
00029 public:
00030 QueryResponsePacker( InferenceNetwork::MAllResults& results ) :
00031 _results(results)
00032 {
00033 }
00034
00035 void write( NetworkMessageStream* stream ) {
00036 InferenceNetwork::MAllResults::iterator iter;
00037 EvaluatorNode::MResults::iterator nodeIter;
00038
00039 for( iter = _results.begin(); iter != _results.end(); iter++ ) {
00040 const std::string& nodeName = iter->first;
00041
00042 for( nodeIter = iter->second.begin(); nodeIter != iter->second.end(); nodeIter++ ) {
00043 const std::string& listName = nodeIter->first;
00044 std::string resultName = nodeName + ":" + listName;
00045 const std::vector<ScoredExtentResult>& resultList = nodeIter->second;
00046
00047
00048 ScoredExtentResult networkResults[100];
00049 size_t resultsSent = 0;
00050
00051 while( resultList.size() > resultsSent ) {
00052 size_t sendChunk = lemur_compat::min<size_t>( resultList.size() - resultsSent, 100 );
00053
00054 for( size_t i=0; i<sendChunk; i++ ) {
00055 networkResults[i].begin = htonl(resultList[i + resultsSent].begin);
00056 networkResults[i].end = htonl(resultList[i + resultsSent].end);
00057 networkResults[i].score = lemur_compat::htond( resultList[i + resultsSent].score );
00058 networkResults[i].document = htonl( resultList[i + resultsSent].document );
00059 }
00060
00061 stream->reply( resultName, &networkResults, int(sendChunk * sizeof(ScoredExtentResult)) );
00062 resultsSent += sendChunk;
00063 }
00064 }
00065 }
00066
00067 stream->replyDone();
00068 }
00069 };
00070
00071 #endif // INDRI_QUERYRESPONSEPACKER_HPP
00072