Commit bc00e503 authored by Per Cederqvist's avatar Per Cederqvist

(lexer.__init__): Initialize __amindex, __defined_asyncs and

	__implemented_asyncs.
(lexer): Ignore @defcodeindex and @syncodeindex.
(lexer.toplevel_node): Check for undocumentet arguments to
	asynchronous messages.
(lexer.toplevel_findex): Check for @amindex entries in @findex nodes.
(lexer.toplevel_amindex): New method.
(lexer.__parse_async): New method.
(lexer.toplevel_aarg): Implemented.
(lexer.toplevel_bye): Check for unterminated @amindex nodes and
	undocumented asynchronous messages.
parent 83391371
......@@ -211,6 +211,7 @@ class lexer:
def __init__(self, file):
self.__reader = reader(file)
self.__findex = None
self.__amindex = None
self.__linkhere = None
self.__builtin_types = ["BOOL", "INT8", "INT16", "INT32",
"HOLLERITH"]
......@@ -224,13 +225,22 @@ class lexer:
defined_types[t] = prot_a_builtin('*builtin*')
self.__defined_requests = {}
self.__defined_asyncs = {}
self.__implemented_reqs = {}
f = open("numbered-requests.tmp", "r")
f = open("requests-numbered.tmp", "r")
for line in f.readlines():
[nr, name] = line.split(" ")
self.__implemented_reqs[name.strip()] = nr
f.close()
self.__implemented_asyncs = {}
f = open("asyncs-numbered.tmp", "r")
for line in f.readlines():
[nr, name] = line.split(" ")
self.__implemented_asyncs[name.strip()] = nr
f.close()
def run(self):
while 1:
c = self.__reader.getc_eofok()
......@@ -361,6 +371,8 @@ class lexer:
toplevel_unmacro = ignore
toplevel_dots = ignore
toplevel_tex = ignore
toplevel_defcodeindex = ignore
toplevel_syncodeindex = ignore
def toplevel_node(self, arg, line_no):
if self.__findex != None:
......@@ -369,7 +381,12 @@ class lexer:
if usage == 0:
self.error(lineno,
"Undocumented argument ``%s''" % (argname, ))
self.__findex = None
if self.__amindex != None:
self.__amindex = None
for (argname, [lineno, usage]) in self.__args.items():
if usage == 0:
self.error(lineno,
"Undocumented argument ``%s''" % (argname, ))
self.__node_name = arg
self.__node_start = line_no
self.__assert_no_linkhere()
......@@ -380,6 +397,9 @@ class lexer:
self.error(line_no, "@node/@findex mismatch: %s..." % arg)
self.error(line_no, "...inside node %s" % self.__node_name)
return
if self.__amindex != None:
self.error(line_no, "@findex and @amindex used in the same node")
return
if self.__findex != None:
self.error(line_no, "multiple @findex in single @node")
return
......@@ -403,13 +423,121 @@ class lexer:
return
self.__args[arg][1] += 1
def toplevel_amindex(self, arg, line_no):
if self.__node_name != arg:
self.error(line_no, "@node/@amindex mismatch: %s..." % arg)
self.error(line_no, "...inside node %s" % self.__node_name)
return
if self.__findex != None:
self.error(line_no, "@findex and @amindex used in the same node")
return
if self.__amindex != None:
self.error(line_no, "multiple @amindex in single @node")
return
self.__amindex = arg
self.__args = {}
if self.__defined_asyncs.has_key(arg):
self.error(line_no, "async message ``%s'' redefined" % arg)
self.error(self.__defined_asyncs[arg], "previous definition")
self.__defined_asyncs[arg] = line_no
token = self.__get_token()
obsolete = 0
if token == '@c':
token = self.__get_token()
if token == 'obsolete':
obsolete = 1
else:
self.error(self.__reader.line_no(),
"broken comment within @amindex block")
return
token = self.__get_token()
if token != '@example':
self.error(self.__reader.line_no(), "missing @example")
return
self.__parse_async(obsolete)
def __parse_async(self, obsolete):
self.__tokens = []
async = self.__get_token()
if async != self.__amindex:
self.error(self.__reader.line_no(),
"wrong async name ``%s''" % async)
return
if self.__get_token() != '[':
self.error(self.__reader.line_no(), "missing ``[''")
return
nr = self.__get_token()
if type(nr) != types.IntType:
self.error(self.__reader.line_no(), "bad async number")
if self.__implemented_asyncs.has_key(async):
if self.__implemented_asyncs[async] != str(nr):
self.error(self.__reader.line_no(),
"``%s'' is implemented as asynchronous message %s, "
"not %s" % (async, self.__implemented_asyncs[async],
nr))
elif not obsolete:
self.error(self.__reader.line_no(),
"asynchronous message ``%s'' not implemented" % async)
if self.__get_token() != ']':
self.error(self.__reader.line_no(), "missing ``]''")
return
paren = self.__get_token()
if paren == '(':
next = self.__get_token()
if next != ')':
self.__unget_token(next)
self.__parse_request_arg()
next = self.__get_token()
if next != ')':
self.error(self.__reader.line_no(),
"missing close parenthesis after arguments")
return
elif paren == '((':
self.__parse_request_arg()
next = self.__get_token()
while next == ';':
self.__parse_request_arg()
next = self.__get_token()
if next != '))':
self.error(self.__reader.line_no(),
"missing double close parenthesis after arguments")
return
else:
self.error(self.__reader.line_no(),
"missing argument list")
return
if self.__get_token() != ';':
self.error(self.__reader.line_no(), "missing final ``;''")
return
if self.__get_token() != '@end':
self.error(self.__reader.line_no(), "extra garbage found")
return
return
def toplevel_aarg(self, arg, line_no):
pass
# FIXME
if self.__amindex == None:
self.error(line_no, "@aarg outside @amindex node")
return
if not self.__args.has_key(arg):
self.error(line_no, "undefined argument ``%s''" % (arg, ))
return
self.__args[arg][1] += 1
def toplevel_bye(self, arg, line_no):
if self.__findex != None:
self.error(self.__reader.line_no(), "unterminated @findex node")
if self.__amindex != None:
self.error(self.__reader.line_no(), "unterminated @amindex node")
# Check types.
for (n, o) in defined_types.items():
......@@ -422,6 +550,12 @@ class lexer:
self.error(self.__reader.line_no(),
"request ``%s'' not documented" % req)
# Check async messages.
for req in self.__implemented_asyncs.keys():
if not self.__defined_asyncs.has_key(req):
self.error(self.__reader.line_no(),
"asynchronous message ``%s'' not documented" % req)
def toplevel_reqdlink(self, arg, line_no):
self.__assert_no_linkhere()
......
Markdown is supported
0% or .
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment