Home
last modified time | relevance | path

Searched refs:ngram_size (Results 1 – 10 of 10) sorted by relevance

/external/tensorflow/tensorflow/contrib/lite/kernels/
Dskip_gram.cc62 return size <= params->ngram_size; in ShouldIncludeCurrentNgram()
64 return size == params->ngram_size; in ShouldIncludeCurrentNgram()
72 if (stack_idx < params->ngram_size && stack[stack_idx] + 1 < num_words) { in ShouldStepInRecursion()
109 if (words.size() < params->ngram_size) { in Eval()
116 std::vector<int> stack(params->ngram_size, 0); in Eval()
128 if (stack_idx < params->ngram_size) { in Eval()
Dskip_gram_test.cc34 SkipGramOp(int ngram_size, int max_skip_size, bool include_all_ngrams) { in SkipGramOp() argument
39 CreateSkipGramOptions(builder_, ngram_size, max_skip_size, in SkipGramOp()
/external/tensorflow/tensorflow/contrib/lite/
Dbuiltin_op_data.h168 int ngram_size; member
Dmodel.cc496 params->ngram_size = skip_gram_params->ngram_size(); in ParseOpData()
/external/tensorflow/tensorflow/contrib/lite/schema/
Dschema_v1.fbs228 ngram_size: int;
Dschema_v2.fbs236 ngram_size: int;
Dschema_v3.fbs246 ngram_size: int;
Dschema.fbs307 ngram_size: int;
Dschema_generated.h2833 int32_t ngram_size;
2837 : ngram_size(0),
2850 int32_t ngram_size() const {
2874 void add_ngram_size(int32_t ngram_size) {
2875 fbb_.AddElement<int32_t>(SkipGramOptions::VT_NGRAM_SIZE, ngram_size, 0);
2897 int32_t ngram_size = 0,
2902 builder_.add_ngram_size(ngram_size);
5080 { auto _e = ngram_size(); _o->ngram_size = _e; };
5093 auto _ngram_size = _o->ngram_size;
/external/tensorflow/tensorflow/contrib/lite/models/smartreply/g3doc/
DREADME.md117 skip grams. The configurable parameters are `ngram_size` and