Searched refs:PGSIZE (Results 1 – 14 of 14) sorted by relevance
| /sys/kern/ |
| D | palloc.c | 97 if ((start % PGSIZE) != 0) in PAlloc_AddRegion() 99 if ((len % PGSIZE) != 0) in PAlloc_AddRegion() 115 pageInfoLength = ROUNDUP(end / PGSIZE * sizeof(PageInfo), PGSIZE); in PAlloc_AddRegion() 121 for (i = 0; i < (base / PGSIZE); i++) { in PAlloc_AddRegion() 124 for (i = (base / PGSIZE); i < (end / PGSIZE); i++) { in PAlloc_AddRegion() 127 for (i = 0; i < (pageInfoLength / PGSIZE); i++) { in PAlloc_AddRegion() 128 pageInfoTable[i + (base / PGSIZE)].refCount = 1; in PAlloc_AddRegion() 141 uintptr_t newLength = ROUNDUP(end / PGSIZE * sizeof(PageInfo), PGSIZE); in PAlloc_AddRegion() 147 for (i = (base / PGSIZE); i < (end / PGSIZE); i++) { in PAlloc_AddRegion() 153 for (i = 0; i < len; i += PGSIZE) in PAlloc_AddRegion() [all …]
|
| D | loader.c | 66 if ((vaddr % PGSIZE) != 0) { in LoaderLoadSegment() 67 uintptr_t maxlen = PGSIZE - (vaddr % PGSIZE); in LoaderLoadSegment() 77 while (len > PGSIZE) { in LoaderLoadSegment() 79 VFS_Read(vn, raddr, offset, PGSIZE); in LoaderLoadSegment() 80 vaddr += PGSIZE; in LoaderLoadSegment() 81 offset += PGSIZE; in LoaderLoadSegment() 82 len -= PGSIZE; in LoaderLoadSegment() 102 if ((vaddr % PGSIZE) != 0) { in LoaderZeroSegment() 103 uintptr_t maxlen = PGSIZE - (vaddr % PGSIZE); in LoaderZeroSegment() 112 while (len > PGSIZE) { in LoaderZeroSegment() [all …]
|
| D | slab.c | 70 inc = ROUNDUP(realObjSz * 64, PGSIZE); in SlabExtend() 71 if (inc < 4 * PGSIZE) { in SlabExtend() 72 inc = 4 * PGSIZE; in SlabExtend()
|
| D | syscall.c | 152 Thread_SetupUThread(thr, proc->entrypoint, MEM_USERSPACE_STKTOP - PGSIZE); in Syscall_Spawn() 155 argstart = (char *)DMPA2VA(PMap_Translate(thr->space, MEM_USERSPACE_STKTOP - PGSIZE)); in Syscall_Spawn() 199 for (p = 0; p < len; p += PGSIZE) in Syscall_MUnmap() 204 PMap_Unmap(cur->space, addr, len /= PGSIZE); in Syscall_MUnmap()
|
| /sys/amd64/ |
| D | xmem.c | 68 for (off = 0; off < xmem->length; off += PGSIZE) { in XMem_Destroy() 69 PMap_SystemLookup(xmem->base + off, &entry, PGSIZE); in XMem_Destroy() 104 for (off = xmem->length; off < length; off += PGSIZE) { in XMem_Allocate() 111 xmem->length += PGSIZE; in XMem_Allocate()
|
| D | pmap.c | 335 ASSERT(size == PGSIZE); in PMapLookupEntry() 361 uint64_t va = virt + PGSIZE * i; in PMap_Map() 362 PMapLookupEntry(as, va, &entry, PGSIZE); in PMap_Map() 368 *entry = (phys + PGSIZE * i) | PTE_P | PTE_W | PTE_U | flags; in PMap_Map() 393 uint64_t vai = va + PGSIZE * i; in PMap_Unmap() 394 PMapLookupEntry(as, vai, &entry, PGSIZE); in PMap_Unmap() 426 uint64_t pages = (len + PGSIZE - 1) / PGSIZE; in PMap_AllocMap() 432 uint64_t va = virt + PGSIZE * i; in PMap_AllocMap() 433 PMapLookupEntry(as, va, &entry, PGSIZE); in PMap_AllocMap() 520 uint64_t va = virt + PGSIZE * i; in PMap_SystemMap() [all …]
|
| D | thread.c | 30 uint64_t stacktop = thr->kstack + PGSIZE; in Thread_SetupKThread() 66 tf.rsp = (uint64_t)arg2 + MEM_USERSPACE_STKLEN - PGSIZE; in ThreadEnterUserLevelCB()
|
| D | mp.c | 66 args[1] = PGSIZE + (uint64_t)PAlloc_AllocPage(); in MPBootAP() 82 PAlloc_Release((void *)(args[1] - PGSIZE)); in MPBootAP()
|
| D | mbentry.c | 21 #define PAGE_ALIGN __attribute__((aligned(PGSIZE)))
|
| /sys/amd64/include/ |
| D | pmap.h | 45 #define PPN2DMVA(ppn) (((ppn) << PGSIZE) + MEM_DIRECTMAP_BASE) 46 #define DMVA2PPN(dmva) (((dmva) - MEM_DIRECTMAP_BASE) >> PGSIZE)
|
| D | amd64.h | 21 #define PGSIZE (1 << PGSHIFT) macro 22 #define PGMASK (PGSIZE - 1)
|
| /lib/libc/ |
| D | malloc.c | 21 #define PGSIZE 4096 macro 22 #define HEAP_INCREMENT (PGSIZE / 64)
|
| /lib/libc/posix/ |
| D | mman.c | 21 pagesize[0] = PGSIZE; in getpagesizes()
|
| /sys/dev/ |
| D | ahci.c | 203 #define PGSIZE 4096 in AHCI_Init() macro 204 ASSERT(sizeof(AHCI) <= PGSIZE); in AHCI_Init() 205 ASSERT(sizeof(AHCICommandList) <= PGSIZE); in AHCI_Init() 206 ASSERT(sizeof(AHCIRecvFIS) <= PGSIZE); in AHCI_Init()
|