gen-build.py 6.6 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230
  1. #!/usr/bin/env python
  2. #
  3. # USAGE: gen-build.py TYPE
  4. #
  5. # where TYPE is one of: make, dsp, vcproj
  6. #
  7. # It reads build.conf from the current directory, and produces its output
  8. # into the current directory.
  9. #
  10. import os
  11. import ConfigParser
  12. import getopt
  13. import string
  14. import glob
  15. import re
  16. #import ezt
  17. #
  18. # legal platforms: aix, beos, netware, os2, os390, unix, win32
  19. # 'make' users: aix, beos, os2, os390, unix, win32 (mingw)
  20. #
  21. PLATFORMS = [ 'aix', 'beos', 'netware', 'os2', 'os390', 'unix', 'win32' ]
  22. MAKE_PLATFORMS = [
  23. ('unix', None),
  24. ('aix', 'unix'),
  25. ('beos', 'unix'),
  26. ('os2', 'unix'),
  27. ('os390', 'unix'),
  28. ('win32', 'unix'),
  29. ]
  30. # note: MAKE_PLATFORMS is an ordered set. we want to generate unix symbols
  31. # first, so that the later platforms can reference them.
  32. def main():
  33. parser = ConfigParser.ConfigParser()
  34. parser.read('build.conf')
  35. if parser.has_option('options', 'dsp'):
  36. dsp_file = parser.get('options', 'dsp')
  37. else:
  38. dsp_file = None
  39. headers = get_files(parser.get('options', 'headers'))
  40. # compute the relevant headers, along with the implied includes
  41. legal_deps = { }
  42. for fname in headers:
  43. legal_deps[os.path.basename(fname)] = fname
  44. h_deps = { }
  45. for fname in headers:
  46. h_deps[os.path.basename(fname)] = extract_deps(fname, legal_deps)
  47. resolve_deps(h_deps)
  48. f = open('build-outputs.mk', 'w')
  49. f.write('# DO NOT EDIT. AUTOMATICALLY GENERATED.\n\n')
  50. # write out the platform-independent files
  51. files = get_files(parser.get('options', 'paths'))
  52. objects, dirs = write_objects(f, legal_deps, h_deps, files)
  53. f.write('\nOBJECTS_all = %s\n\n' % string.join(objects))
  54. # for each platform and each subdirectory holding platform-specific files,
  55. # write out their compilation rules, and an OBJECT_<subdir>_<plat> symbol.
  56. for platform, parent in MAKE_PLATFORMS:
  57. # record the object symbols to build for each platform
  58. group = [ '$(OBJECTS_all)' ]
  59. # If we're doing win32, we're going to look in the libapr.dsp file
  60. # for those files that we have to manually add to our list.
  61. inherit_parent = { }
  62. if platform == 'win32' and dsp_file:
  63. for line in open(dsp_file).readlines():
  64. if line[:7] != 'SOURCE=':
  65. continue
  66. if line[7:].find('unix') != -1:
  67. # skip the leading .\ and split it out
  68. inherit_files = line[9:].strip().split('\\')
  69. # change the .c to .lo
  70. assert inherit_files[-1][-2:] == '.c'
  71. inherit_files[-1] = inherit_files[-1][:-2] + '.lo'
  72. # replace the \\'s with /'s
  73. inherit_line = '/'.join(inherit_files)
  74. if not inherit_parent.has_key(inherit_files[0]):
  75. inherit_parent[inherit_files[0]] = []
  76. inherit_parent[inherit_files[0]].append(inherit_line)
  77. for subdir in string.split(parser.get('options', 'platform_dirs')):
  78. path = '%s/%s' % (subdir, platform)
  79. if not os.path.exists(path):
  80. # this subdir doesn't have a subdir for this platform, so we'll
  81. # use the parent-platform's set of symbols
  82. if parent:
  83. group.append('$(OBJECTS_%s_%s)' % (subdir, parent))
  84. continue
  85. # remember that this directory has files/objects
  86. dirs[path] = None
  87. # write out the compilation lines for this subdir
  88. files = get_files(path + '/*.c')
  89. objects, _unused = write_objects(f, legal_deps, h_deps, files)
  90. if inherit_parent.has_key(subdir):
  91. objects = objects + inherit_parent[subdir]
  92. symname = 'OBJECTS_%s_%s' % (subdir, platform)
  93. objects.sort()
  94. # and write the symbol for the whole group
  95. f.write('\n%s = %s\n\n' % (symname, string.join(objects)))
  96. # and include that symbol in the group
  97. group.append('$(%s)' % symname)
  98. group.sort()
  99. # write out a symbol which contains the necessary files
  100. f.write('OBJECTS_%s = %s\n\n' % (platform, string.join(group)))
  101. f.write('HEADERS = $(top_srcdir)/%s\n\n' % string.join(headers, ' $(top_srcdir)/'))
  102. f.write('SOURCE_DIRS = %s $(EXTRA_SOURCE_DIRS)\n\n' % string.join(dirs.keys()))
  103. if parser.has_option('options', 'modules'):
  104. modules = parser.get('options', 'modules')
  105. for mod in string.split(modules):
  106. files = get_files(parser.get(mod, 'paths'))
  107. objects, _unused = write_objects(f, legal_deps, h_deps, files)
  108. flat_objects = string.join(objects)
  109. f.write('OBJECTS_%s = %s\n' % (mod, flat_objects))
  110. if parser.has_option(mod, 'target'):
  111. target = parser.get(mod, 'target')
  112. f.write('MODULE_%s = %s\n' % (mod, target))
  113. f.write('%s: %s\n' % (target, flat_objects))
  114. f.write('\t$(LINK_MODULE) -o $@ $(OBJECTS_%s) $(LDADD_%s)\n' % (mod, mod))
  115. f.write('\n')
  116. # Build a list of all necessary directories in build tree
  117. alldirs = { }
  118. for dir in dirs.keys():
  119. d = dir
  120. while d:
  121. alldirs[d] = None
  122. d = os.path.dirname(d)
  123. # Sort so 'foo' is before 'foo/bar'
  124. keys = alldirs.keys()
  125. keys.sort()
  126. f.write('BUILD_DIRS = %s\n\n' % string.join(keys))
  127. f.write('.make.dirs: $(srcdir)/build-outputs.mk\n' \
  128. '\t@for d in $(BUILD_DIRS); do test -d $$d || mkdir $$d; done\n' \
  129. '\t@echo timestamp > $@\n')
  130. def write_objects(f, legal_deps, h_deps, files):
  131. dirs = { }
  132. objects = [ ]
  133. for file in files:
  134. if file[-10:] == '/apr_app.c':
  135. continue
  136. assert file[-2:] == '.c'
  137. obj = file[:-2] + '.lo'
  138. objects.append(obj)
  139. dirs[os.path.dirname(file)] = None
  140. # what headers does this file include, along with the implied headers
  141. deps = extract_deps(file, legal_deps)
  142. for hdr in deps.keys():
  143. deps.update(h_deps.get(hdr, {}))
  144. vals = deps.values()
  145. vals.sort()
  146. f.write('%s: %s .make.dirs %s\n' % (obj, file, string.join(vals)))
  147. objects.sort()
  148. return objects, dirs
  149. def extract_deps(fname, legal_deps):
  150. "Extract the headers this file includes."
  151. deps = { }
  152. for line in open(fname).readlines():
  153. if line[:8] != '#include':
  154. continue
  155. inc = _re_include.match(line).group(1)
  156. if inc in legal_deps.keys():
  157. deps[inc] = legal_deps[inc]
  158. return deps
  159. _re_include = re.compile('#include *["<](.*)[">]')
  160. def resolve_deps(header_deps):
  161. "Alter the provided dictionary to flatten includes-of-includes."
  162. altered = 1
  163. while altered:
  164. altered = 0
  165. for hdr, deps in header_deps.items():
  166. # print hdr, deps
  167. start = len(deps)
  168. for dep in deps.keys():
  169. deps.update(header_deps.get(dep, {}))
  170. if len(deps) != start:
  171. altered = 1
  172. def clean_path(path):
  173. return path.replace("\\", "/")
  174. def get_files(patterns):
  175. files = [ ]
  176. for pat in string.split(patterns):
  177. files.extend(map(clean_path, glob.glob(pat)))
  178. files.sort()
  179. return files
  180. if __name__ == '__main__':
  181. main()