/external/grpc-grpc/src/core/lib/security/credentials/jwt/ |
D | jwt_credentials.cc | 34 GRPC_MDELEM_UNREF(c->cached.jwt_md); in jwt_reset_cache() 35 c->cached.jwt_md = GRPC_MDNULL; in jwt_reset_cache() 36 if (c->cached.service_url != nullptr) { in jwt_reset_cache() 37 gpr_free(c->cached.service_url); in jwt_reset_cache() 38 c->cached.service_url = nullptr; in jwt_reset_cache() 40 c->cached.jwt_expiration = gpr_inf_past(GPR_CLOCK_REALTIME); in jwt_reset_cache() 66 if (c->cached.service_url != nullptr && in jwt_get_request_metadata() 67 strcmp(c->cached.service_url, context.service_url) == 0 && in jwt_get_request_metadata() 68 !GRPC_MDISNULL(c->cached.jwt_md) && in jwt_get_request_metadata() 69 (gpr_time_cmp(gpr_time_sub(c->cached.jwt_expiration, in jwt_get_request_metadata() [all …]
|
/external/python/cpython3/Lib/test/test_importlib/ |
D | test_spec.py | 70 self.cached = self.util.cache_from_source(self.path) 85 self.assertIs(spec.cached, None) 96 self.assertIs(spec.cached, None) 108 self.assertIs(spec.cached, None) 120 self.assertIs(spec.cached, None) 136 cached=None, 147 cached=self.cached, 158 cached=None, 184 before = self.spec.cached 185 self.spec.cached = 'there' [all …]
|
/external/grpc-grpc-java/core/src/main/java/io/grpc/internal/ |
D | SharedResourceHolder.java | 119 final Instance cached = instances.get(resource); in releaseInternal() local 120 if (cached == null) { in releaseInternal() 123 Preconditions.checkArgument(instance == cached.payload, "Releasing the wrong instance"); in releaseInternal() 124 Preconditions.checkState(cached.refcount > 0, "Refcount has already reached zero"); in releaseInternal() 125 cached.refcount--; in releaseInternal() 126 if (cached.refcount == 0) { in releaseInternal() 134 Preconditions.checkState(cached.destroyTask == null, "Destroy task already scheduled"); in releaseInternal() 139 cached.destroyTask = destroyer.schedule(new LogExceptionRunnable(new Runnable() { in releaseInternal() 144 if (cached.refcount == 0) { in releaseInternal()
|
/external/ltp/testcases/kernel/syscalls/readahead/ |
D | readahead02.c | 162 unsigned long *cached) in read_testfile() argument 184 *cached = get_cached_size(); in read_testfile() 185 if (*cached > cached_start) { in read_testfile() 187 (*cached - cached_start)); in read_testfile() 199 *cached = get_cached_size(); in read_testfile() 223 *cached = get_cached_size(); in read_testfile() 240 unsigned long cached_high, cached_low, cached, cached_ra; in test_readahead() local 256 &cached); in test_readahead() 265 &cached); in test_readahead() 266 if (cached > cached_low) in test_readahead() [all …]
|
/external/perfetto/src/traced/probes/ps/ |
D | process_stats_data_source.cc | 394 CachedProcessStats& cached = process_stats_cache_[pid]; in WriteAllProcessStats() local 396 if (counter != cached.oom_score_adj) { in WriteAllProcessStats() 398 cached.oom_score_adj = counter; in WriteAllProcessStats() 417 CachedProcessStats& cached = process_stats_cache_[pid]; in WriteMemCounters() local 444 if (counter != cached.vm_size_kb) { in WriteMemCounters() 446 cached.vm_size_kb = counter; in WriteMemCounters() 450 if (counter != cached.vm_locked_kb) { in WriteMemCounters() 452 cached.vm_locked_kb = counter; in WriteMemCounters() 456 if (counter != cached.vm_hvm_kb) { in WriteMemCounters() 458 cached.vm_hvm_kb = counter; in WriteMemCounters() [all …]
|
/external/skia/src/gpu/vk/ |
D | GrVkPipelineStateBuilder.cpp | 111 int GrVkPipelineStateBuilder::loadShadersFromCache(const SkData& cached, in loadShadersFromCache() argument 129 shader_size vertSize = *((shader_size*) ((char*) cached.data() + offset)); in loadShadersFromCache() 131 SkSL::String vert((char*) cached.data() + offset, vertSize); in loadShadersFromCache() 134 memcpy(&vertInputs, (char*) cached.data() + offset, sizeof(vertInputs)); in loadShadersFromCache() 138 shader_size fragSize = *((shader_size*) ((char*) cached.data() + offset)); in loadShadersFromCache() 140 SkSL::String frag((char*) cached.data() + offset, fragSize); in loadShadersFromCache() 143 memcpy(&fragInputs, (char*) cached.data() + offset, sizeof(fragInputs)); in loadShadersFromCache() 147 shader_size geomSize = *((shader_size*) ((char*) cached.data() + offset)); in loadShadersFromCache() 149 SkSL::String geom((char*) cached.data() + offset, geomSize); in loadShadersFromCache() 152 memcpy(&geomInputs, (char*) cached.data() + offset, sizeof(geomInputs)); in loadShadersFromCache() [all …]
|
/external/skqp/src/gpu/vk/ |
D | GrVkPipelineStateBuilder.cpp | 111 int GrVkPipelineStateBuilder::loadShadersFromCache(const SkData& cached, in loadShadersFromCache() argument 129 shader_size vertSize = *((shader_size*) ((char*) cached.data() + offset)); in loadShadersFromCache() 131 SkSL::String vert((char*) cached.data() + offset, vertSize); in loadShadersFromCache() 134 memcpy(&vertInputs, (char*) cached.data() + offset, sizeof(vertInputs)); in loadShadersFromCache() 138 shader_size fragSize = *((shader_size*) ((char*) cached.data() + offset)); in loadShadersFromCache() 140 SkSL::String frag((char*) cached.data() + offset, fragSize); in loadShadersFromCache() 143 memcpy(&fragInputs, (char*) cached.data() + offset, sizeof(fragInputs)); in loadShadersFromCache() 147 shader_size geomSize = *((shader_size*) ((char*) cached.data() + offset)); in loadShadersFromCache() 149 SkSL::String geom((char*) cached.data() + offset, geomSize); in loadShadersFromCache() 152 memcpy(&geomInputs, (char*) cached.data() + offset, sizeof(geomInputs)); in loadShadersFromCache() [all …]
|
/external/tensorflow/tensorflow/core/api_def/base_api/ |
D | api_def_BoostedTreesTrainingPredict.pbtxt | 7 Rank 1 Tensor containing cached tree ids which is the starting 14 Rank 1 Tensor containing cached node id which is the starting 28 Rank 2 Tensor containing logits update (with respect to cached 59 computes the update to cached logits. It is designed to be used during training. 60 It traverses the trees starting from cached tree id and cached node id and
|
/external/ltp/testcases/cve/ |
D | meltdown.c | 239 long cached, uncached, i; in set_cache_hit_threshold() local 241 for (cached = 0, i = 0; i < ESTIMATE_CYCLES; i++) in set_cache_hit_threshold() 242 cached += get_access_time(target_array); in set_cache_hit_threshold() 244 for (cached = 0, i = 0; i < ESTIMATE_CYCLES; i++) in set_cache_hit_threshold() 245 cached += get_access_time(target_array); in set_cache_hit_threshold() 252 cached /= ESTIMATE_CYCLES; in set_cache_hit_threshold() 255 cache_hit_threshold = mysqrt(cached * uncached); in set_cache_hit_threshold() 259 cached, uncached, cache_hit_threshold); in set_cache_hit_threshold()
|
/external/glide/library/src/main/java/com/bumptech/glide/load/engine/ |
D | Engine.java | 151 EngineResource<?> cached = getFromCache(key); in load() local 152 if (cached != null) { in load() 153 cached.acquire(); in load() 154 … activeResources.put(key, new ResourceWeakReference(key, cached, resourceReferenceQueue)); in load() 155 cb.onResourceReady(cached); in load() 206 Resource<?> cached = cache.remove(key); in getFromCache() local 209 if (cached == null) { in getFromCache() 211 } else if (cached instanceof EngineResource) { in getFromCache() 213 result = (EngineResource) cached; in getFromCache() 215 result = new EngineResource(cached, true /*isCacheable*/); in getFromCache()
|
/external/guice/core/src/com/google/inject/internal/util/ |
D | StackTraceElements.java | 143 InMemoryStackTraceElement cached = elementCache.get(inMemoryStackTraceElement); in weakIntern() local 144 if (cached != null) { in weakIntern() 145 return cached; in weakIntern() 157 String cached = stringCache.get(s); in weakIntern() local 158 if (cached != null) { in weakIntern() 159 return cached; in weakIntern()
|
/external/subsampling-scale-image-view/library/src/main/java/com/davemorrissey/labs/subscaleview/ |
D | ImageSource.java | 31 private boolean cached; field in ImageSource 33 private ImageSource(Bitmap bitmap, boolean cached) { in ImageSource() argument 40 this.cached = cached; in ImageSource() 242 return cached; in isCached()
|
/external/libunwind/doc/ |
D | unw_flush_cache.tex | 8 …David Mosberger-Tang}{Programming Library}{unw\_flush\_cache}unw\_flush\_cache -- flush cached info 19 The \Func{unw\_flush\_cache}() routine flushes all cached info as it 21 target address-space \Var{as}. In addition, all info cached for 24 list is not tied to a code-range and its cached value (if any) is 32 information cached on behalf of address space \Var{as} is flushed.
|
/external/skqp/src/core/ |
D | SkXfermode.cpp | 95 static SkXfermode* cached[COUNT_BLENDMODES]; in Make() local 99 cached[(int)mode] = xfermode; in Make() 101 cached[(int)mode] = new SkProcCoeffXfermode(mode); in Make() 104 return sk_ref_sp(cached[(int)mode]); in Make()
|
/external/skia/src/core/ |
D | SkXfermode.cpp | 95 static SkXfermode* cached[COUNT_BLENDMODES]; in Make() local 99 cached[(int)mode] = xfermode; in Make() 101 cached[(int)mode] = new SkProcCoeffXfermode(mode); in Make() 104 return sk_ref_sp(cached[(int)mode]); in Make()
|
/external/vixl/tools/ |
D | lint.py | 238 def RunLinter(files, jobs=1, progress_prefix='', cached=True): argument 239 results = {} if not cached else ReadCachedResults() 260 cached = not args.no_cache variable 261 retcode = RunLinter(files, jobs=args.jobs, cached=cached)
|
/external/python/cpython3/Lib/importlib/ |
D | _bootstrap.py | 398 self.cached == other.cached and 404 def cached(self): member in ModuleSpec 412 @cached.setter 413 def cached(self, cached): member in ModuleSpec 414 self._cached = cached 489 cached = module.__cached__ 491 cached = None 499 spec.cached = cached 568 if spec.cached is not None: 570 module.__cached__ = spec.cached
|
/external/iproute2/lib/ |
D | names.c | 162 if (db->cached && strcmp(db->cached->name, name) == 0) { in name_to_id() 163 *id = db->cached->id; in name_to_id() 173 db->cached = entry; in name_to_id()
|
/external/lzma/CPP/7zip/Crypto/ |
D | 7zAes.cpp | 83 const CKeyInfo &cached = Keys[i]; in GetKey() local 84 if (key.IsEqualTo(cached)) in GetKey() 87 key.Key[j] = cached.Key[j]; in GetKey() 100 const CKeyInfo &cached = Keys[i]; in FindAndAdd() local 101 if (key.IsEqualTo(cached)) in FindAndAdd()
|
/external/python/cpython2/Lib/test/ |
D | test_linecache.py | 79 cached = [] 82 cached.append(filename) 86 cached_empty = [fn for fn in cached if fn not in linecache.cache] 91 cached_empty = [fn for fn in cached if fn in linecache.cache]
|
/external/python/cpython3/Lib/test/ |
D | test_linecache.py | 134 cached = [] 137 cached.append(filename) 141 self.assertNotEqual(cached, []) 142 cached_empty = [fn for fn in cached if fn not in linecache.cache] 147 cached_empty = [fn for fn in cached if fn in linecache.cache]
|
/external/bcc/tools/ |
D | cachestat.py | 165 cached = int(mem["Cached"]) / 1024 variable 171 (total, misses, hits, mbd, buff, cached)) 180 cached = 0 variable
|
/external/skqp/src/gpu/gl/builders/ |
D | GrGLProgramBuilder.cpp | 244 bool cached = fCached.get() != nullptr; in finalize() local 246 if (cached) { in finalize() 262 cached = this->checkLinkStatus(programID); in finalize() 264 if (cached) { in finalize() 269 cached = false; in finalize() 276 if (!cached || !fGpu->glCaps().programBinarySupport()) { in finalize() 379 if (!cached) { in finalize()
|
/external/python/httplib2/ |
D | README.md | 49 we have already cached. This implements Section 3.2 of 100 The first request will be cached and since this is a request 101 to bitworking.org it will be set to be cached for two hours, 108 that the cached copy must not be used when handling this request.
|
/external/swiftshader/third_party/llvm-7.0/llvm/test/tools/gold/X86/ |
D | cache.ll | 26 ; Two cached objects, plus a timestamp file 40 ; Two cached objects, plus a timestamp file and "foo", minus the file we removed. 64 ; With save-temps we can confirm that the cached files were copied into temp 65 ; files to avoid a race condition with the cached files being pruned, since the
|