Lines Matching refs:start_
90 PixOrCopy* start_; // data start member
127 c.cur_pos = c.cur_block_->start_; in VP8LRefsCursorInit()
138 c->cur_pos = (b == NULL) ? NULL : b->start_; in VP8LRefsCursorNextBlock()
139 c->last_pos_ = (b == NULL) ? NULL : b->start_ + b->size_; in VP8LRefsCursorNextBlock()
148 sizeof(*b) + refs->block_size_ * sizeof(*b->start_); in BackwardRefsNewBlock()
154 b->start_ = (PixOrCopy*)((uint8_t*)b + sizeof(*b)); // not always aligned in BackwardRefsNewBlock()
173 b->start_[b->size_++] = v; in BackwardRefsCursorAdd()
184 memcpy(new_b->start_, b->start_, b->size_ * sizeof(*b->start_)); in VP8LBackwardRefsCopy()
695 int start_; member
710 int start_; member
870 IsCostCacheIntervalWritable(cur->start_, cur->end_); in CostManagerInit()
888 cur->start_ = i; in CostManagerInit()
1025 while (current != NULL && current->start_ <= i) { in UpdateCostPerIndex()
1047 while (previous != NULL && current->start_ < previous->start_) { in PositionOrphanInterval()
1051 previous->next_->start_ < current->start_) { in PositionOrphanInterval()
1097 interval_new->start_ = start; in InsertInterval()
1108 if (IsCostCacheIntervalWritable(interval->start_, interval->end_)) { in RepositionInterval()
1110 UpdateCostPerInterval(manager, interval->start_, interval->end_, in RepositionInterval()
1118 interval->previous_->start_ <= interval->start_) && in RepositionInterval()
1120 interval->start_ <= interval->next_->start_)) { in RepositionInterval()
1142 cost_cache_intervals[i].start_ < last; in PushInterval()
1145 int start = index + cost_cache_intervals[i].start_; in PushInterval()
1159 for (; interval != NULL && interval->start_ < end && start < end; in PushInterval()
1181 index, start, interval->start_); in PushInterval()
1198 } else if (start <= interval->start_ && interval->end_ <= end) { in PushInterval()
1206 if (interval->start_ <= start && end <= interval->end_) { in PushInterval()
1217 } else if (interval->start_ < start) { in PushInterval()
1228 interval->start_ = end; in PushInterval()