root/trunk/libffado/admin/doxygen.py

Revision 2803, 6.8 kB (checked in by jwoithe, 3 years ago)

Cosmetic: capitalise "L" in "Linux".

"Linux" is a proper noun so it should start with a capital letter. These
changes are almost all within comments.

This patch was originally proposed by pander on the ffado-devel mailing
list. It has been expanded to cover all similar cases to maintain
consistency throughout the source tree.

Line 
1 #!/usr/bin/python
2 #
3 # Copyright (C) 2007-2008 Arnold Krille
4 #
5 # This file is part of FFADO
6 # FFADO = Free FireWire (pro-)audio drivers for Linux
7 #
8 # FFADO is based upon FreeBoB.
9 #
10 # This program is free software: you can redistribute it and/or modify
11 # it under the terms of the GNU General Public License as published by
12 # the Free Software Foundation, either version 2 of the License, or
13 # (at your option) version 3 of the License.
14 #
15 # This program is distributed in the hope that it will be useful,
16 # but WITHOUT ANY WARRANTY; without even the implied warranty of
17 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
18 # GNU General Public License for more details.
19 #
20 # You should have received a copy of the GNU General Public License
21 # along with this program.  If not, see <http://www.gnu.org/licenses/>.
22 #
23
24 #
25 # Astxx, the Asterisk C++ API and Utility Library.
26 # Copyright (C) 2005, 2006  Matthew A. Nicholson
27 # Copyright (C) 2006  Tim Blechmann
28 #
29 # This library is free software; you can redistribute it and/or
30 # modify it under the terms of the GNU Lesser General Public
31 # License version 2.1 as published by the Free Software Foundation.
32 #
33 # This library is distributed in the hope that it will be useful,
34 # but WITHOUT ANY WARRANTY; without even the implied warranty of
35 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the GNU
36 # Lesser General Public License for more details.
37 #
38 # You should have received a copy of the GNU Lesser General Public
39 # License along with this library; if not, write to the Free Software
40 # Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA  02111-1307  USA
41
42 import os
43 import os.path
44 import glob
45 from fnmatch import fnmatch
46 from functools import reduce
47
48 def DoxyfileParse(file_contents):
49    """
50    Parse a Doxygen source file and return a dictionary of all the values.
51    Values will be strings and lists of strings.
52    """
53    data = {}
54
55    import shlex
56    lex = shlex.shlex(instream = file_contents.decode(), posix = True)
57    lex.wordchars += "*+./-:"
58    lex.whitespace = lex.whitespace.replace("\n", "")
59    lex.escape = ""
60
61    lineno = lex.lineno
62    token = lex.get_token()
63    key = token   # the first token should be a key
64    last_token = ""
65    key_token = False
66    next_key = False
67    new_data = True
68
69    def append_data(data, key, new_data, token):
70       if new_data or len(data[key]) == 0:
71          data[key].append(token)
72       else:
73          data[key][-1] += token
74
75    while token:
76       if token in ['\n']:
77          if last_token not in ['\\']:
78             key_token = True
79       elif token in ['\\']:
80          pass
81       elif key_token:
82          key = token
83          key_token = False
84       else:
85          if token == "+=":
86             if not key in data:
87                data[key] = list()
88          elif token == "=":
89             data[key] = list()
90          else:
91             append_data( data, key, new_data, token )
92             new_data = True
93
94       last_token = token
95       token = lex.get_token()
96
97       if last_token == '\\' and token != '\n':
98          new_data = False
99          append_data( data, key, new_data, '\\' )
100
101    # compress lists of len 1 into single strings
102    to_pop = []
103    for (k, v) in data.items():
104       if len(v) == 0:
105          # data.pop(k)  # Shouldn't modify dictionary while looping
106          to_pop.append(k)
107
108       # items in the following list will be kept as lists and not converted to strings
109       if k in ["INPUT", "FILE_PATTERNS", "EXCLUDE_PATTERNS"]:
110          continue
111
112       if len(v) == 1:
113          data[k] = v[0]
114
115    for k in to_pop:
116       data.pop(k)
117
118    return data
119
120 def DoxySourceScan(node, env, path):
121    """
122    Doxygen Doxyfile source scanner.  This should scan the Doxygen file and add
123    any files used to generate docs to the list of source files.
124    """
125    default_file_patterns = [
126       '*.c', '*.cc', '*.cxx', '*.cpp', '*.c++', '*.java', '*.ii', '*.ixx',
127       '*.ipp', '*.i++', '*.inl', '*.h', '*.hh ', '*.hxx', '*.hpp', '*.h++',
128       '*.idl', '*.odl', '*.cs', '*.php', '*.php3', '*.inc', '*.m', '*.mm',
129       '*.py',
130    ]
131
132    default_exclude_patterns = [
133       '*~',
134    ]
135
136    sources = []
137
138    data = DoxyfileParse(node.get_contents())
139
140    if data.get("RECURSIVE", "NO") == "YES":
141       recursive = True
142    else:
143       recursive = False
144
145    file_patterns = data.get("FILE_PATTERNS", default_file_patterns)
146    exclude_patterns = data.get("EXCLUDE_PATTERNS", default_exclude_patterns)
147
148    for node in data.get("INPUT", []):
149       if os.path.isfile(node):
150          sources.append(node)
151       elif os.path.isdir(node):
152          if recursive:
153             for root, dirs, files in os.walk(node):
154                for f in files:
155                   filename = os.path.join(root, f)
156
157                   pattern_check = reduce(lambda x, y: x or bool(fnmatch(filename, y)), file_patterns, False)
158                   exclude_check = reduce(lambda x, y: x and fnmatch(filename, y), exclude_patterns, True)
159
160                   if pattern_check and not exclude_check:
161                      sources.append(filename)
162          else:
163             for pattern in file_patterns:
164                sources.extend(glob.glob("/".join([node, pattern])))
165
166    sources = map( lambda path: env.File(path), sources )
167    return sources
168
169
170 def DoxySourceScanCheck(node, env):
171    """Check if we should scan this file"""
172    return os.path.isfile(node.path)
173
174 def DoxyEmitter(source, target, env):
175    """Doxygen Doxyfile emitter"""
176    # possible output formats and their default values and output locations
177    output_formats = {
178       "HTML": ("YES", "html"),
179       "LATEX": ("YES", "latex"),
180       "RTF": ("NO", "rtf"),
181       "MAN": ("YES", "man"),
182       "XML": ("NO", "xml"),
183    }
184
185    data = DoxyfileParse(source[0].get_contents())
186
187    targets = []
188    out_dir = data.get("OUTPUT_DIRECTORY", ".")
189
190    # add our output locations
191    for (k, v) in output_formats.items():
192       if data.get("GENERATE_" + k, v[0]) == "YES":
193          targets.append(env.Dir( os.path.join(out_dir, data.get(k + "_OUTPUT", v[1]))) )
194
195    # don't clobber targets
196    for node in targets:
197       env.Precious(node)
198
199    # set up cleaning stuff
200    for node in targets:
201       env.Clean(node, node)
202
203    return (targets, source)
204
205 def generate(env):
206    """
207    Add builders and construction variables for the
208    Doxygen tool.  This is currently for Doxygen 1.4.6.
209    """
210    doxyfile_scanner = env.Scanner(
211       DoxySourceScan,
212       "DoxySourceScan",
213       scan_check = DoxySourceScanCheck,
214    )
215
216    import SCons.Builder
217    doxyfile_builder = SCons.Builder.Builder(
218       action = "cd ${SOURCE.dir}  &&  ${DOXYGEN} ${SOURCE.file}",
219       emitter = DoxyEmitter,
220       target_factory = env.fs.Entry,
221       single_source = True,
222       source_scanner =  doxyfile_scanner,
223    )
224
225    env.Append(BUILDERS = {
226       'Doxygen': doxyfile_builder,
227    })
228
229    env.AppendUnique(
230       DOXYGEN = 'doxygen',
231    )
232
233 def exists(env):
234    """
235    Make sure doxygen exists.
236    """
237    return env.Detect("doxygen")
Note: See TracBrowser for help on using the browser.