# Add any Sphinx extension module names here, as strings. They can be extensions
# coming with Sphinx (named 'sphinx.ext.*') or your custom ones.
-extensions = ['sphinx.ext.pngmath'] # , 'sphinx.ext.intersphinx']
+extensions = ['sphinx.ext.pngmath', 'sphinx.ext.doctest'] # , 'sphinx.ext.intersphinx']
+doctest_test_doctest_blocks = 'block'
# Add any paths that contain templates here, relative to this directory.
templates_path = ['_templates']
tput clear
python latex.py ../online-opencv.tex || exit
+export PYTHONPATH=$HOME/local/lib/python2.5/site-packages:$PYTHONPATH
+for D in py
+do
+ TEXINPUTS=$PWD: sphinx-build -w $D/sphinx.errors -D "lang=$D" -b doctest -d _build/doctrees/$D $D _build/doctest/$D
+done
+exit
+
for D in c cpp py
do
mkdir -p $D
self.state = None
self.envstack = []
self.tags = {}
- self.errors = open('errors', 'wt')
+ self.errors = open('errors.%s' % language, 'wt')
self.unhandled_commands = set()
self.freshline = True
self.function_props = {}
self.description += s
else:
self.write(s)
- if 'lstlisting' in self.ee():
- self.listing += s
def docmd(self, c):
if self.state == 'math':
self.report_error(c, "No cvdefPy for function %s" % self.function_props['name'])
self.indent += 1
elif s == 'lstlisting':
- print >>self, "\n::\n"
- self.indent += 1
- print >>self
- self.listing = ""
+ # Set indent to zero while collecting code; so later write will not double-indent
+ self.saved_f = self.f
+ self.saved_indent = self.indent
+ self.f = StringIO.StringIO()
+ self.indent = 0
elif s in ['itemize', 'enumerate']:
self.indent += 1
elif s == 'tabular':
self.f = self.f_section
self.f.write(self.handle_table(tabletxt))
elif s == 'lstlisting':
+ listing = self.f.getvalue()
+
+ self.f = self.saved_f
+ self.indent = self.saved_indent
+ print >>self
+ if (self.language == 'py') and ('>>>' in listing):
+ print >>self, "\n.. doctest::\n"
+ else:
+ print >>self, "\n::\n"
+ self.indent += 1
print >>self
+ self.write(listing)
self.indent -= 1
print >>self
+ print >>self
print >>self, ".." # otherwise a following :param: gets treated as more listing
- if '\\_' in self.listing:
- self.report_error(c, "backslash _ in listing")
elif s == 'document':
pass
else:
if len(c.params) != 2:
self.report_error(c, "Malformed cvarg")
return
- is_func_arg = self.ee() == ['description'] and not 'done' in self.function_props
+ is_func_arg = (self.ee() == ['description']) and (not 'done' in self.function_props)
+ e = self.ee()
if is_func_arg:
nm = self.render(c.params[0].str)
print >>self, '\n:param %s: ' % nm,
type = arg.ty
else:
self.report_error(c, 'cannot find arg %s in code' % nm)
- elif self.ee() == ['description']:
- print >>self, '\n* **%s** ' % self.render(c.params[0].str),
- elif self.ee() == ['description', 'description']:
+ elif len(e) > 0 and e[-1] == 'description':
print >>self, '\n* **%s** ' % self.render(c.params[0].str),
else:
- print 'strange env', self.envstack
+ self.report_error(c, "unexpected env (%s) for cvarg" % ",".join(e))
self.indent += 1
self.doL(c.params[1].str, False)
self.indent -= 1
sl('}'))
r = (pp.ZeroOrMore(entry) | pp.Suppress('#' + pp.ZeroOrMore(pp.CharsNotIn('\n'))) + pp.StringEnd()).parseFile(filename)
- bibliography = open(os.path.join(language, "bibliography.rst"), 'wt')
+ bibliography = QOpen(os.path.join(language, "bibliography.rst"), 'wt')
print >>bibliography, "Bibliography"
print >>bibliography, "============"
print >>bibliography
(etype, tag, attrs) = str(e[0][1:]), str(e[1]), dict([(str(a), str(b)) for (a,b) in e[2]])
representations = {
- 'article' : '$author, "$title". $journal $volume $number, $pages ($year)',
+ 'article' : '$author, "$title". $journal $volume $number, pp $pages ($year)',
'inproceedings' : '$author "$title", $booktitle, $year',
'misc' : '$author "$title", $year',
+ 'techreport' : '$author "$title", $edition, $edition ($year)',
}
if etype in representations:
if 0:
print >>bibliography, ".. [%s] %s" % (tag, Template(representations[etype]).safe_substitute(attrs))
print >>bibliography
+ bibliography.close()
if 1:
- sources = ['../' + f for f in os.listdir('..') if f.endswith('.tex')]
- if distutils.dep_util.newer_group(["latexparser.py"] + sources, "pickled"):
- fulldoc = latexparser(sys.argv[1], 0)
- pickle.dump(fulldoc, open("pickled", 'wb'))
- raw = open('raw.full', 'w')
- for x in fulldoc:
- print >>raw, repr(x)
- raw.close()
- else:
- fulldoc = pickle.load(open("pickled", "rb"))
+ fulldoc = latexparser(sys.argv[1])
+
+ raw = open('raw.full', 'w')
+ for x in fulldoc:
+ print >>raw, repr(x)
+ raw.close()
# Filter on target language
def preprocess_conditionals(fd, conditionals):
-from pyparsing import Word, CharsNotIn, Optional, OneOrMore, ZeroOrMore, Group, Forward, ParseException, Literal, Suppress, replaceWith, StringEnd, lineno, QuotedString, White, NotAny, ParserElement, MatchFirst
import sys
+import hashlib
+import cPickle as pickle
+import os
+
+from pyparsing import Word, CharsNotIn, Optional, OneOrMore, ZeroOrMore, Group, Forward, ParseException, Literal, Suppress, replaceWith, StringEnd, lineno, QuotedString, White, NotAny, ParserElement, MatchFirst
class Argument:
def __init__(self, s, loc, toks):
singles = bs("[") | bs("]") | bs("{") | bs("}") | bs("\\") | bs("&") | bs("_") | bs(",") | bs("#") | bs("\n") | bs(";") | bs("|") | bs("%") | bs("*")
texcmd << (singles | Word("\\", "abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789", min = 2)) + ZeroOrMoreAsList(arg) + ZeroOrMoreAsList(param)
def texcmdfun(s, loc, toks):
- if str(toks[0])[1:] == 'input':
- filename = "../" + toks[2].asList()[0].str[0] + ".tex"
- print 'Now parsing', filename, loc
- return latexparser(filename, lineno(loc, s))
- else:
- return TexCmd(s, loc, toks)
+ return TexCmd(s, loc, toks)
texcmd.setParseAction(texcmdfun)
#legal = "".join([chr(x) for x in set(range(32, 127)) - set(backslash)])
print '====>', t
sys.exit(-1)
-def latexparser(filename, startline):
+selfstr = open( __file__).read() # Own source as a string. Used as part of hash.
+hashbase = hashlib.md5(selfstr)
+
+def tokenize(filename):
f = open(filename, "rt")
- lines = list(f)
def uncomment(s):
if '%' in s and not '\\%' in s:
return s[:s.index('%')] + '\n'
else:
return s
- lines = [uncomment(l) for l in lines]
- print len(lines), "lines"
-
- docstr = "".join(lines)
- # document.setFailAction(None)
+ docstr = "".join([uncomment(l) for l in f])
+ hash = hashbase.copy()
+ hash.update(docstr)
+ cache_filename = os.path.join("parse-cache", hash.hexdigest())
try:
- r = document.parseString(docstr)
- for x in r:
- if isinstance(x, TexCmd) and not x.filename:
- x.filename = filename
- return r
- except ParseException, pe:
- print 'Fatal problem at %s line %d col %d' % (filename, pe.lineno, pe.col)
- print pe.line
- sys.exit(1)
-
-
+ return pickle.load(open(cache_filename))
+ except IOError:
+ print "parsing"
+ try:
+ r = document.parseString(docstr)
+ for x in r:
+ if isinstance(x, TexCmd) and not x.filename:
+ x.filename = filename
+ pickle.dump(r, open(cache_filename, 'w'))
+ return r
+ except ParseException, pe:
+ print 'Fatal problem at %s line %d col %d' % (filename, pe.lineno, pe.col)
+ print pe.line
+ sys.exit(1)
+
+def latexparser(filename):
+ tokens = tokenize(filename)
+ def expand(t):
+ if isinstance(t, TexCmd) and t.cmd == "input":
+ filename = "../" + str(t.params[0].str[0]) + ".tex"
+ print filename
+ return latexparser(filename)
+ else:
+ return [t]
+ return sum([expand(t) for t in tokens], [])