Book a Demo!
CoCalc Logo Icon
StoreFeaturesDocsShareSupportNewsAboutPoliciesSign UpSign In
att
GitHub Repository: att/ast
Path: blob/master/src/lib/libast/vmalloc/vmlast.c
1810 views
1
/***********************************************************************
2
* *
3
* This software is part of the ast package *
4
* Copyright (c) 1985-2012 AT&T Intellectual Property *
5
* and is licensed under the *
6
* Eclipse Public License, Version 1.0 *
7
* by AT&T Intellectual Property *
8
* *
9
* A copy of the License is available at *
10
* http://www.eclipse.org/org/documents/epl-v10.html *
11
* (with md5 checksum b35adb5213ca9657e911e9befb180842) *
12
* *
13
* Information and Software Systems Research *
14
* AT&T Research *
15
* Florham Park NJ *
16
* *
17
* Glenn Fowler <[email protected]> *
18
* David Korn <[email protected]> *
19
* Phong Vo <[email protected]> *
20
* *
21
***********************************************************************/
22
#if defined(_UWIN) && defined(_BLD_ast)
23
24
void _STUB_vmlast(){}
25
26
#else
27
28
#include "vmhdr.h"
29
30
/* Allocation with freeing and reallocing of last allocated block only.
31
**
32
** Written by Kiem-Phong Vo, [email protected], 01/16/94.
33
*/
34
35
#if __STD_C
36
static Void_t* lastalloc(Vmalloc_t* vm, size_t size, int local)
37
#else
38
static Void_t* lastalloc(vm, size, local)
39
Vmalloc_t* vm;
40
size_t size;
41
int local;
42
#endif
43
{
44
Block_t *tp, *next;
45
Seg_t *seg, *last;
46
size_t s;
47
Vmdata_t *vd = vm->data;
48
size_t orgsize = size;
49
50
SETLOCK(vm, local);
51
52
size = size < ALIGN ? ALIGN : ROUND(size,ALIGN);
53
for(last = NIL(Seg_t*), seg = vd->seg; seg; last = seg, seg = seg->next)
54
{ if(!(tp = seg->free) || (SIZE(tp)+sizeof(Head_t)) < size)
55
continue;
56
if(last)
57
{ last->next = seg->next;
58
seg->next = vd->seg;
59
vd->seg = seg;
60
}
61
goto got_block;
62
}
63
64
/* there is no usable free space in region, try extending */
65
if((tp = (*_Vmextend)(vm,size,NIL(Vmsearch_f))) )
66
{ seg = SEG(tp);
67
goto got_block;
68
}
69
else goto done;
70
71
got_block:
72
if((s = SIZE(tp)) >= size)
73
{ next = (Block_t*)((Vmuchar_t*)tp+size);
74
SIZE(next) = s - size;
75
SEG(next) = seg;
76
seg->free = next;
77
}
78
else seg->free = NIL(Block_t*);
79
80
vd->free = seg->last = tp;
81
82
if(!local && (vd->mode&VM_TRACE) && _Vmtrace)
83
(*_Vmtrace)(vm, NIL(Vmuchar_t*), (Vmuchar_t*)tp, orgsize, 0);
84
85
done:
86
CLRLOCK(vm, local);
87
88
return (Void_t*)tp;
89
}
90
91
#if __STD_C
92
static int lastfree(Vmalloc_t* vm, reg Void_t* data, int local )
93
#else
94
static int lastfree(vm, data, local)
95
Vmalloc_t* vm;
96
Void_t* data;
97
int local;
98
#endif
99
{
100
Seg_t *seg;
101
Block_t *fp;
102
size_t s;
103
Vmdata_t *vd = vm->data;
104
105
if(!data)
106
return 0;
107
108
SETLOCK(vm, local);
109
110
if(data != (Void_t*)vd->free)
111
data = NIL(Void_t*); /* signaling an error */
112
else
113
{ seg = vd->seg;
114
if(!local && (vd->mode&VM_TRACE) && _Vmtrace)
115
{ if(seg->free )
116
s = (Vmuchar_t*)(seg->free) - (Vmuchar_t*)data;
117
else s = (Vmuchar_t*)BLOCK(seg->baddr) - (Vmuchar_t*)data;
118
(*_Vmtrace)(vm, (Vmuchar_t*)data, NIL(Vmuchar_t*), s, 0);
119
}
120
121
vd->free = NIL(Block_t*);
122
fp = (Block_t*)data;
123
SEG(fp) = seg;
124
SIZE(fp) = ((Vmuchar_t*)BLOCK(seg->baddr) - (Vmuchar_t*)data) - sizeof(Head_t);
125
seg->free = fp;
126
seg->last = NIL(Block_t*);
127
}
128
129
CLRLOCK(vm, local);
130
131
return data ? 0 : -1;
132
}
133
134
#if __STD_C
135
static Void_t* lastresize(Vmalloc_t* vm, reg Void_t* data, size_t size, int type, int local)
136
#else
137
static Void_t* lastresize(vm, data, size, type, local )
138
Vmalloc_t* vm;
139
reg Void_t* data;
140
size_t size;
141
int type;
142
int local;
143
#endif
144
{
145
Block_t *tp;
146
Seg_t *seg;
147
ssize_t s, ds;
148
Void_t *addr;
149
size_t oldsize = 0;
150
Void_t *orgdata = data;
151
size_t orgsize = size;
152
Vmdata_t *vd = vm->data;
153
154
if(!data)
155
{ data = lastalloc(vm, size, local);
156
if(data && (type&VM_RSZERO) )
157
memset(data, 0, size);
158
return data;
159
}
160
if(size <= 0)
161
{ (void)lastfree(vm, data, local);
162
return NIL(Void_t*);
163
}
164
165
SETLOCK(vm, local);
166
167
if(data == (Void_t*)vd->free)
168
seg = vd->seg;
169
else
170
{ /* see if it was one of ours */
171
for(seg = vd->seg; seg; seg = seg->next)
172
if(data >= seg->addr && data < (Void_t*)seg->baddr)
173
break;
174
if(!seg || (VLONG(data)%ALIGN) != 0 ||
175
(seg->last && (Vmuchar_t*)data > (Vmuchar_t*)seg->last) )
176
{ data = NIL(Void_t*);
177
goto done;
178
}
179
}
180
181
/* set 's' to be the current available space */
182
if(data != seg->last)
183
{ if(seg->last && (Vmuchar_t*)data < (Vmuchar_t*)seg->last)
184
oldsize = (Vmuchar_t*)seg->last - (Vmuchar_t*)data;
185
else oldsize = (Vmuchar_t*)BLOCK(seg->baddr) - (Vmuchar_t*)data;
186
s = -1;
187
}
188
else
189
{ s = (Vmuchar_t*)BLOCK(seg->baddr) - (Vmuchar_t*)data;
190
if(!(tp = seg->free) )
191
oldsize = s;
192
else
193
{ oldsize = (Vmuchar_t*)tp - (Vmuchar_t*)data;
194
seg->free = NIL(Block_t*);
195
}
196
}
197
198
size = size < ALIGN ? ALIGN : ROUND(size,ALIGN);
199
if(s < 0 || (ssize_t)size > s)
200
{ if(s >= 0) /* amount to extend */
201
{ ds = size-s; ds = ROUND(ds,vd->incr);
202
addr = (*vm->disc->memoryf)(vm, seg->addr, seg->extent,
203
seg->extent+ds, vm->disc);
204
if(addr == seg->addr)
205
{ s += ds;
206
seg->size += ds;
207
seg->extent += ds;
208
seg->baddr += ds;
209
SIZE(BLOCK(seg->baddr)) = BUSY;
210
}
211
else goto do_alloc;
212
}
213
else
214
{ do_alloc:
215
if(!(type&(VM_RSMOVE|VM_RSCOPY)) )
216
data = NIL(Void_t*);
217
else
218
{ tp = vd->free;
219
if(!(addr = KPVALLOC(vm,size,lastalloc)) )
220
{ vd->free = tp;
221
data = NIL(Void_t*);
222
}
223
else
224
{ if(type&VM_RSCOPY)
225
{ ds = oldsize < size ? oldsize : size;
226
memcpy(addr, data, ds);
227
}
228
229
if(s >= 0 && seg != vd->seg)
230
{ tp = (Block_t*)data;
231
SEG(tp) = seg;
232
SIZE(tp) = s - sizeof(Head_t);
233
seg->free = tp;
234
}
235
236
/* new block and size */
237
data = addr;
238
seg = vd->seg;
239
s = (Vmuchar_t*)BLOCK(seg->baddr) -
240
(Vmuchar_t*)data;
241
seg->free = NIL(Block_t*);
242
}
243
}
244
}
245
}
246
247
if(data)
248
{ if(s >= (ssize_t)(size+sizeof(Head_t)) )
249
{ tp = (Block_t*)((Vmuchar_t*)data + size);
250
SEG(tp) = seg;
251
SIZE(tp) = (s - size) - sizeof(Head_t);
252
seg->free = tp;
253
}
254
255
vd->free = seg->last = (Block_t*)data;
256
257
if(!local && (vd->mode&VM_TRACE) && _Vmtrace)
258
(*_Vmtrace)(vm,(Vmuchar_t*)orgdata,(Vmuchar_t*)data,orgsize,0);
259
260
if((type&VM_RSZERO) && size > oldsize)
261
memset((Void_t*)((Vmuchar_t*)data + oldsize), 0, size-oldsize);
262
}
263
264
done: CLRLOCK(vm, local);
265
266
return data;
267
}
268
269
270
#if __STD_C
271
static long lastaddr(Vmalloc_t* vm, Void_t* addr, int local)
272
#else
273
static long lastaddr(vm, addr, local)
274
Vmalloc_t* vm;
275
Void_t* addr;
276
int local;
277
#endif
278
{
279
long offset;
280
Vmdata_t *vd = vm->data;
281
282
SETLOCK(vm, local);
283
284
if(!vd->free || addr < (Void_t*)vd->free || addr >= (Void_t*)vd->seg->baddr)
285
offset = -1L;
286
else offset = (long)((Vmuchar_t*)addr - (Vmuchar_t*)vd->free);
287
288
CLRLOCK(vm, local);
289
290
return offset;
291
}
292
293
#if __STD_C
294
static long lastsize(Vmalloc_t* vm, Void_t* addr, int local)
295
#else
296
static long lastsize(vm, addr, local)
297
Vmalloc_t* vm;
298
Void_t* addr;
299
int local;
300
#endif
301
{
302
long size;
303
Vmdata_t *vd = vm->data;
304
305
SETLOCK(vm, local);
306
307
if(!vd->free || addr != (Void_t*)vd->free )
308
size = -1L;
309
else if(vd->seg->free)
310
size = (long)((Vmuchar_t*)vd->seg->free - (Vmuchar_t*)addr);
311
else size = (long)((Vmuchar_t*)vd->seg->baddr - (Vmuchar_t*)addr - sizeof(Head_t));
312
313
CLRLOCK(vm, local);
314
315
return size;
316
}
317
318
#if __STD_C
319
static int lastcompact(Vmalloc_t* vm, int local)
320
#else
321
static int lastcompact(vm, local)
322
Vmalloc_t* vm;
323
int local;
324
#endif
325
{
326
ssize_t s;
327
Block_t *fp;
328
Seg_t *seg, *next;
329
Vmdata_t *vd = vm->data;
330
331
SETLOCK(vm, local);
332
333
for(seg = vd->seg; seg; seg = next)
334
{ next = seg->next;
335
336
if(!(fp = seg->free))
337
continue;
338
339
seg->free = NIL(Block_t*);
340
if(seg->size == (s = SIZE(fp)&~BITS))
341
s = seg->extent;
342
else s += sizeof(Head_t);
343
344
if((*_Vmtruncate)(vm,seg,s,1) == s)
345
seg->free = fp;
346
}
347
348
if((vd->mode&VM_TRACE) && _Vmtrace)
349
(*_Vmtrace)(vm,(Vmuchar_t*)0,(Vmuchar_t*)0,0,0);
350
351
CLRLOCK(vm, local);
352
return 0;
353
}
354
355
#if __STD_C
356
static Void_t* lastalign(Vmalloc_t* vm, size_t size, size_t align, int local)
357
#else
358
static Void_t* lastalign(vm, size, align, local)
359
Vmalloc_t* vm;
360
size_t size;
361
size_t align;
362
int local;
363
#endif
364
{
365
Vmuchar_t *data;
366
Seg_t *seg;
367
Block_t *next;
368
size_t s, orgsize = size, orgalign = align;
369
Vmdata_t *vd = vm->data;
370
371
if(size <= 0 || align <= 0)
372
return NIL(Void_t*);
373
374
SETLOCK(vm, local);
375
376
size = size <= TINYSIZE ? TINYSIZE : ROUND(size,ALIGN);
377
align = MULTIPLE(align,ALIGN);
378
379
s = size + align;
380
if(!(data = (Vmuchar_t*)KPVALLOC(vm,s,lastalloc)) )
381
goto done;
382
383
/* find the segment containing this block */
384
for(seg = vd->seg; seg; seg = seg->next)
385
if(seg->last == (Block_t*)data)
386
break;
387
/**/ASSERT(seg);
388
389
/* get a suitably aligned address */
390
if((s = (size_t)(VLONG(data)%align)) != 0)
391
data += align-s; /**/ASSERT((VLONG(data)%align) == 0);
392
393
/* free the unused tail */
394
next = (Block_t*)(data+size);
395
if((s = (seg->baddr - (Vmuchar_t*)next)) >= sizeof(Block_t))
396
{ SEG(next) = seg;
397
SIZE(next) = s - sizeof(Head_t);
398
seg->free = next;
399
}
400
401
vd->free = seg->last = (Block_t*)data;
402
403
if(!local && (vd->mode&VM_TRACE) && _Vmtrace)
404
(*_Vmtrace)(vm,NIL(Vmuchar_t*),data,orgsize,orgalign);
405
406
done:
407
CLRLOCK(vm, local);
408
409
return (Void_t*)data;
410
}
411
412
/* Public method for free-1 allocation */
413
static Vmethod_t _Vmlast =
414
{
415
lastalloc,
416
lastresize,
417
lastfree,
418
lastaddr,
419
lastsize,
420
lastcompact,
421
lastalign,
422
VM_MTLAST
423
};
424
425
__DEFINE__(Vmethod_t*,Vmlast,&_Vmlast);
426
427
#ifdef NoF
428
NoF(vmlast)
429
#endif
430
431
#endif
432
433