/*------------------------------------------------------------------------------
* Copyright (C) 2003-2006 Ben van Klinken and the CLucene Team
*
* Distributable under the terms of either the Apache License (Version 2.0) or
* the GNU Lesser General Public License, as specified in the COPYING file.
------------------------------------------------------------------------------*/
#include <CLucene.h>
#include "SnowballAnalyzer.h"
#include "SnowballFilter.h"
extern "C" {
#include "lib.h"
#include "buffer.h"
#include "unichar.h"
#include "lucene-wrapper.h"
};
/** Builds the named analyzer with no stop words. */
{
}
{
if (prevstream)
}
/** Builds the named analyzer with the given stop words.
*/
{
}
}
/** Constructs a {@link StandardTokenizer} filtered by a {@link
StandardFilter}, a {@link LowerCaseFilter} and a {@link StopFilter}. */
TokenStream* SnowballAnalyzer::tokenStream(const TCHAR* fieldName, CL_NS(util)::Reader* reader, bool deleteReader) {
if ( bufferedReader == NULL )
else
return result;
}
TokenStream* SnowballAnalyzer::reusableTokenStream(const TCHAR* fieldName, CL_NS(util)::Reader* reader) {
return prevstream;
}
/** Construct the named stemming filter.
*
* @param in the input tokens to stem
* @param name the name of a stemmer
*/
SnowballFilter::SnowballFilter(TokenStream* in, normalizer_func_t *normalizer, const char* language, bool deleteTS):
{
this->normalizer = normalizer;
}
}
}
/** Returns the next input Token, after being stemmed */
return NULL;
if (normalizer == NULL) {
unsigned int tchartext_size =
} else T_BEGIN {
unsigned int tchartext_size =
} T_END;
return token;
}