Mercurial > hg > xemacs-beta
comparison src/ralloc.c @ 2500:3d8143fc88e1
[xemacs-hg @ 2005-01-24 23:33:30 by ben]
get working with VC7
config.inc.samp: Declare OPTIONAL_LIBRARY_DIR as root of library directories.
Redo all graphics library defaults to mirror the versions and
directories in the current binary aux distribution on xemacs
web site. Enable TIFF and COMPFACE by default since you can
now compile with them and binary libs are provided.
xemacs.mak: Put our own directories first in case of conflict (e.g. config.h
in compface).
xemacs.mak: Use MSVCRT to avoid link problems.
s/windowsnt.h:
bytecode.c, print.c: Add casts to avoid warning.
compiler.h: Add MSC_VERSION and include definitions of DOESNT_RETURN and
friends here, like for GCC. Need different definitions for VC7
and VC6.
s/windowsnt.h: Remove stuff moved to compiler.h. Disable warning 4646 ("function
declared with __declspec(noreturn) has non-void return type")
on VC7 since lots of Lisp primitives trigger this and there is
no easy way to kludge around the warning.
glyphs-eimage.c: Some really nasty hacks to allow TIFF and JPEG to both be compiled.
#### The better solution is to move the TIFF and JPEG code to
different files.
glyphs-msw.c: Define __STDC__ to avoid problems with compface.h.
intl-auto-encap-win32.c, intl-auto-encap-win32.h, intl-encap-win32.c, syswindows.h: Those wankers at Microsoft cannot leave well enough alone.
Various functions change parameter types semi-randomly between
VC6 and VC7, so we need to include our own versions that
can handle both kinds with appropriate casting.
EmacsFrame.c, EmacsShell-sub.c, EmacsShell.c, alloc.c, alloca.c, buffer.c, bytecode.c, charset.h, chartab.c, cm.c, console-stream.c, console.c, data.c, debug.h, device-msw.c, device-tty.c, device-x.c, doprnt.c, dumper.c, dynarr.c, elhash.c, emacs.c, eval.c, event-Xt.c, event-gtk.c, event-msw.c, event-stream.c, events.c, extents.c, faces.c, file-coding.c, fileio.c, fns.c, font-lock.c, frame-gtk.c, frame-x.c, frame.c, free-hook.c, gccache-gtk.c, glyphs-eimage.c, glyphs-gtk.c, glyphs-msw.c, glyphs-x.c, glyphs.c, gtk-glue.c, gutter.c, input-method-xlib.c, insdel.c, intl-win32.c, keymap.c, lisp.h, lread.c, lstream.c, macros.c, malloc.c, menubar-gtk.c, menubar-msw.c, menubar-x.c, mule-coding.c, native-gtk-toolbar.c, number.c, objects-msw.c, objects.c, print.c, process-nt.c, process-unix.c, process.c, ralloc.c, rangetab.c, redisplay-gtk.c, redisplay-msw.c, redisplay-output.c, redisplay-tty.c, redisplay-x.c, redisplay.c, regex.c, scrollbar-gtk.c, scrollbar-x.c, search.c, select-x.c, signal.c, specifier.c, specifier.h, strftime.c, sunplay.c, symbols.c, sysdep.c, sysproc.h, text.c, text.h, toolbar-common.c, toolbar-msw.c, toolbar.c, ui-gtk.c, unexnt.c, unicode.c, win32.c, window.c, xgccache.c, s/windowsnt.h: abort() -> ABORT(). Eliminate preprocessor games with abort()
since it creates huge problems in VC7, solvable only by including
massive amounts of files in every compile (and not worth it).
author | ben |
---|---|
date | Mon, 24 Jan 2005 23:34:34 +0000 |
parents | 19c89a2e24b6 |
children | d3bdda4872ac |
comparison
equal
deleted
inserted
replaced
2499:4c5ee4d2e921 | 2500:3d8143fc88e1 |
---|---|
254 if (heap->start <= address && address <= heap->end) | 254 if (heap->start <= address && address <= heap->end) |
255 break; | 255 break; |
256 } | 256 } |
257 | 257 |
258 if (! heap) | 258 if (! heap) |
259 abort (); | 259 ABORT (); |
260 | 260 |
261 /* If we can't fit SIZE bytes in that heap, | 261 /* If we can't fit SIZE bytes in that heap, |
262 try successive later heaps. */ | 262 try successive later heaps. */ |
263 while (heap && address + size > heap->end) | 263 while (heap && address + size > heap->end) |
264 { | 264 { |
360 if ((char *)last_heap->end - (char *)last_heap->bloc_start <= excess) | 360 if ((char *)last_heap->end - (char *)last_heap->bloc_start <= excess) |
361 { | 361 { |
362 /* This heap should have no blocs in it. */ | 362 /* This heap should have no blocs in it. */ |
363 if (last_heap->first_bloc != NIL_BLOC | 363 if (last_heap->first_bloc != NIL_BLOC |
364 || last_heap->last_bloc != NIL_BLOC) | 364 || last_heap->last_bloc != NIL_BLOC) |
365 abort (); | 365 ABORT (); |
366 | 366 |
367 /* Return the last heap, with its header, to the system. */ | 367 /* Return the last heap, with its header, to the system. */ |
368 excess = (char *)last_heap->end - (char *)last_heap->start; | 368 excess = (char *)last_heap->end - (char *)last_heap->start; |
369 last_heap = last_heap->prev; | 369 last_heap = last_heap->prev; |
370 last_heap->next = NIL_HEAP; | 370 last_heap->next = NIL_HEAP; |
375 - (char *) ROUNDUP ((char *)last_heap->end - excess); | 375 - (char *) ROUNDUP ((char *)last_heap->end - excess); |
376 last_heap->end -= excess; | 376 last_heap->end -= excess; |
377 } | 377 } |
378 | 378 |
379 if ((*real_morecore) (- excess) == 0) | 379 if ((*real_morecore) (- excess) == 0) |
380 abort (); | 380 ABORT (); |
381 } | 381 } |
382 } | 382 } |
383 | 383 |
384 /* Return the total size in use by relocating allocator, | 384 /* Return the total size in use by relocating allocator, |
385 above where malloc gets space. */ | 385 above where malloc gets space. */ |
478 { | 478 { |
479 register bloc_ptr b = bloc; | 479 register bloc_ptr b = bloc; |
480 | 480 |
481 /* No need to ever call this if arena is frozen, bug somewhere! */ | 481 /* No need to ever call this if arena is frozen, bug somewhere! */ |
482 if (r_alloc_freeze_level) | 482 if (r_alloc_freeze_level) |
483 abort(); | 483 ABORT(); |
484 | 484 |
485 while (b) | 485 while (b) |
486 { | 486 { |
487 /* If bloc B won't fit within HEAP, | 487 /* If bloc B won't fit within HEAP, |
488 move to the next heap and try again. */ | 488 move to the next heap and try again. */ |
633 POINTER address; | 633 POINTER address; |
634 size_t old_size; | 634 size_t old_size; |
635 | 635 |
636 /* No need to ever call this if arena is frozen, bug somewhere! */ | 636 /* No need to ever call this if arena is frozen, bug somewhere! */ |
637 if (r_alloc_freeze_level) | 637 if (r_alloc_freeze_level) |
638 abort(); | 638 ABORT(); |
639 | 639 |
640 if (bloc == NIL_BLOC || size == bloc->size) | 640 if (bloc == NIL_BLOC || size == bloc->size) |
641 return 1; | 641 return 1; |
642 | 642 |
643 for (heap = first_heap; heap != NIL_HEAP; heap = heap->next) | 643 for (heap = first_heap; heap != NIL_HEAP; heap = heap->next) |
645 if (heap->bloc_start <= bloc->data && bloc->data <= heap->end) | 645 if (heap->bloc_start <= bloc->data && bloc->data <= heap->end) |
646 break; | 646 break; |
647 } | 647 } |
648 | 648 |
649 if (heap == NIL_HEAP) | 649 if (heap == NIL_HEAP) |
650 abort (); | 650 ABORT (); |
651 | 651 |
652 old_size = bloc->size; | 652 old_size = bloc->size; |
653 bloc->size = size; | 653 bloc->size = size; |
654 | 654 |
655 /* Note that bloc could be moved into the previous heap. */ | 655 /* Note that bloc could be moved into the previous heap. */ |
976 if (! r_alloc_initialized) | 976 if (! r_alloc_initialized) |
977 init_ralloc (); | 977 init_ralloc (); |
978 | 978 |
979 dead_bloc = find_bloc (ptr); | 979 dead_bloc = find_bloc (ptr); |
980 if (dead_bloc == NIL_BLOC) | 980 if (dead_bloc == NIL_BLOC) |
981 abort (); | 981 ABORT (); |
982 | 982 |
983 free_bloc (dead_bloc); | 983 free_bloc (dead_bloc); |
984 *ptr = 0; | 984 *ptr = 0; |
985 | 985 |
986 #ifdef emacs | 986 #ifdef emacs |
1021 return r_alloc (ptr, 0); | 1021 return r_alloc (ptr, 0); |
1022 } | 1022 } |
1023 | 1023 |
1024 bloc = find_bloc (ptr); | 1024 bloc = find_bloc (ptr); |
1025 if (bloc == NIL_BLOC) | 1025 if (bloc == NIL_BLOC) |
1026 abort (); | 1026 ABORT (); |
1027 | 1027 |
1028 if (size < bloc->size) | 1028 if (size < bloc->size) |
1029 { | 1029 { |
1030 /* Wouldn't it be useful to actually resize the bloc here? */ | 1030 /* Wouldn't it be useful to actually resize the bloc here? */ |
1031 /* I think so too, but not if it's too expensive... */ | 1031 /* I think so too, but not if it's too expensive... */ |
1091 | 1091 |
1092 if (! r_alloc_initialized) | 1092 if (! r_alloc_initialized) |
1093 init_ralloc (); | 1093 init_ralloc (); |
1094 | 1094 |
1095 if (--r_alloc_freeze_level < 0) | 1095 if (--r_alloc_freeze_level < 0) |
1096 abort (); | 1096 ABORT (); |
1097 | 1097 |
1098 /* This frees all unused blocs. It is not too inefficient, as the resize | 1098 /* This frees all unused blocs. It is not too inefficient, as the resize |
1099 and memmove is done only once. Afterwards, all unreferenced blocs are | 1099 and memmove is done only once. Afterwards, all unreferenced blocs are |
1100 already shrunk to zero size. */ | 1100 already shrunk to zero size. */ |
1101 if (!r_alloc_freeze_level) | 1101 if (!r_alloc_freeze_level) |
1135 first_heap = last_heap = &heap_base; | 1135 first_heap = last_heap = &heap_base; |
1136 first_heap->next = first_heap->prev = NIL_HEAP; | 1136 first_heap->next = first_heap->prev = NIL_HEAP; |
1137 first_heap->start = first_heap->bloc_start | 1137 first_heap->start = first_heap->bloc_start |
1138 = virtual_break_value = break_value = (*real_morecore) (0); | 1138 = virtual_break_value = break_value = (*real_morecore) (0); |
1139 if (break_value == NIL) | 1139 if (break_value == NIL) |
1140 abort (); | 1140 ABORT (); |
1141 | 1141 |
1142 page_size = PAGE; | 1142 page_size = PAGE; |
1143 extra_bytes = ROUNDUP (50000); | 1143 extra_bytes = ROUNDUP (50000); |
1144 | 1144 |
1145 #ifdef DOUG_LEA_MALLOC | 1145 #ifdef DOUG_LEA_MALLOC |
1811 ADDRESS_CHAIN p = addr_chain; | 1811 ADDRESS_CHAIN p = addr_chain; |
1812 for (; p; p = p->next ) | 1812 for (; p; p = p->next ) |
1813 { | 1813 { |
1814 if (p->addr == addr) | 1814 if (p->addr == addr) |
1815 { | 1815 { |
1816 if (p->sz != sz) abort(); /* ACK! Shouldn't happen at all. */ | 1816 if (p->sz != sz) ABORT(); /* ACK! Shouldn't happen at all. */ |
1817 munmap( (VM_ADDR) p->addr, p->sz ); | 1817 munmap( (VM_ADDR) p->addr, p->sz ); |
1818 p->flag = empty; | 1818 p->flag = empty; |
1819 break; | 1819 break; |
1820 } | 1820 } |
1821 } | 1821 } |
1822 if (!p) abort(); /* Can't happen... we've got a block to free which is not in | 1822 if (!p) ABORT(); /* Can't happen... we've got a block to free which is not in |
1823 the address list. */ | 1823 the address list. */ |
1824 Coalesce_Addr_Blocks(); | 1824 Coalesce_Addr_Blocks(); |
1825 } | 1825 } |
1826 #else /* !MMAP_GENERATE_ADDRESSES */ | 1826 #else /* !MMAP_GENERATE_ADDRESSES */ |
1827 /* This is an alternate (simpler) implementation in cases where the | 1827 /* This is an alternate (simpler) implementation in cases where the |
1865 REGEX_MALLOC_CHECK (); | 1865 REGEX_MALLOC_CHECK (); |
1866 | 1866 |
1867 switch(r_alloc_initialized) | 1867 switch(r_alloc_initialized) |
1868 { | 1868 { |
1869 case 0: | 1869 case 0: |
1870 abort(); | 1870 ABORT(); |
1871 case 1: | 1871 case 1: |
1872 *ptr = (POINTER) UNDERLYING_MALLOC(size); | 1872 *ptr = (POINTER) UNDERLYING_MALLOC(size); |
1873 break; | 1873 break; |
1874 default: | 1874 default: |
1875 mh = new_mmap_handle( size ); | 1875 mh = new_mmap_handle( size ); |
1908 { | 1908 { |
1909 REGEX_MALLOC_CHECK (); | 1909 REGEX_MALLOC_CHECK (); |
1910 | 1910 |
1911 switch( r_alloc_initialized) { | 1911 switch( r_alloc_initialized) { |
1912 case 0: | 1912 case 0: |
1913 abort(); | 1913 ABORT(); |
1914 | 1914 |
1915 case 1: | 1915 case 1: |
1916 UNDERLYING_FREE( *ptr ); /* Certain this is from the heap. */ | 1916 UNDERLYING_FREE( *ptr ); /* Certain this is from the heap. */ |
1917 break; | 1917 break; |
1918 | 1918 |
1952 { | 1952 { |
1953 REGEX_MALLOC_CHECK (); | 1953 REGEX_MALLOC_CHECK (); |
1954 | 1954 |
1955 if (r_alloc_initialized == 0) | 1955 if (r_alloc_initialized == 0) |
1956 { | 1956 { |
1957 abort (); | 1957 ABORT (); |
1958 return 0; /* suppress compiler warning */ | 1958 return 0; /* suppress compiler warning */ |
1959 } | 1959 } |
1960 else if (r_alloc_initialized == 1) | 1960 else if (r_alloc_initialized == 1) |
1961 { | 1961 { |
1962 POINTER tmp = (POINTER) realloc(*ptr, sz); | 1962 POINTER tmp = (POINTER) realloc(*ptr, sz); |