0 Replies - 123 Views - Last Post: 25 October 2013 - 01:09 PM Rate Topic: -----

#1 EverythingisObject  Icon User is offline

  • New D.I.C Head

Reputation: 0
  • View blog
  • Posts: 5
  • Joined: 28-September 12

AssertionError

Posted 25 October 2013 - 01:09 PM

I am converting my interpreter project from java to python and I am running into an error I can not resolve. I am getting an assertion error from my getTokenType method in regards to my columnNum. Here is the traceback and code
Traceback (most recent call last):
  File "C:\\EclipseWorkspace\PyInterpreter\Interpreter.py", line 12, in Interpreter
    par = Parser("test3.for")
  File "C:\\EclipseWorkspace\PyInterpreter\Parser.py", line 34, in __init__
    self.lex = LexicalAnalyzer(fileName)
  File "C:\EclipseWorkspace\PyInterpreter\LexicalAnalyzer.py", line 30, in __init__
    self.processLine(line, lineNum)
  File "C:\EclipseWorkspace\PyInterpreter\LexicalAnalyzer.py", line 155, in processLine
    tokType = self.getTokenType(lexeme, lineNum, index)
  File "C:\EclipseWorkspace\PyInterpreter\LexicalAnalyzer.py", line 74, in getTokenType
    assert columnNum >= 1
Assertionerror
. unknown error occurred - terminating
<class 'Assertionerror'>
()


and the code

from LexException import LexException
from TokenType import TokenType
from Token import Token
#import fileinput

class LexicalAnalyzer():
   
    

    # 
    # 	 * @param fileName - cannot be null
    # 	 * @throws FileNotFoundException if file was not found
    # 	 * @throws LexException 
    # 	 * @throws IllegalArgumentException if fileName is null
    # 	 
    def __init__(self, fileName):
        
        self.fileName = fileName
        if fileName == None:
            raise IOError("null file name argument")
        tokenList = []
        
        lineNum = 1
        file = open(fileName, 'r')
        
        for line in file:
            
            self.processLine(line, lineNum)
            lineNum += 1
            
        tokenList.apend(("EOS", lineNum, 1, TokenType.EOS_TOK))
        file.close()
    
    def skip_white_space(self, line, index):
        self.line = line
        self.index = index
        
        assert line != None
        assert index >= 0
       
        while index < len(line) and line[index].isspace():
            index += 1
         
       
        return index
    
    def getLexeme(self, line, index, lineNum):
        self.line = line
        self.index = index
        self.lineNum = lineNum
        
        assert line != None
        assert index >= 0
        assert lineNum > 0
        i = index
        
        while i < len(line) and not (line[index].isspace()):
            i += 1
        
        return line[index:i]

    def getTokenType(self, lexeme, lineNum, columnNum):
      
        self.lexeme = lexeme
        self.linuNum = lineNum
        self.columnNum = columnNum
        
        
        assert lexeme != None
        assert  len(lexeme) > 0
        assert lineNum >= 1
        assert columnNum >= 1
        
        tokType = TokenType.EOS_TOK
        lexeme = lexeme.lower()
        if (lexeme[0].isalpha()):
            if 1 == len(lexeme):
                tokType = TokenType.ID_TOK
            else:
                if lexeme=="program":
                    tokType = TokenType.PROGRAM_TOK
                elif lexeme=="end":
                    tokType = TokenType.END_TOK
                elif lexeme=="if":
                    tokType = TokenType.IF_TOK
                elif lexeme=="then":
                    tokType = TokenType.THEN_TOK
                elif lexeme=="else":
                    tokType = TokenType.ELSE_TOK
                elif lexeme=="do":
                    tokType = TokenType.DO_TOK
                elif lexeme=="write":
                    tokType = TokenType.WRITE_TOK
                else:
                    raise LexException("invalid lexeme", lineNum, columnNum)
        elif lexeme[0].isdigit():
            i = 0
            while i < len(lexeme) and lexeme[i].isdigit():
                i += 1
            if i == len(lexeme):
                tokType = TokenType.INT_TOK
            else:
                raise LexException("invalid integer constant", lineNum, columnNum)
        else:
            if lexeme=="(":
                tokType = TokenType.LEFT_PAREN_TOK
            elif lexeme==")":
                tokType = TokenType.RIGHT_PAREN_TOK
            elif lexeme=="=":
                tokType = TokenType.ASSIGNMENT_TOK
            elif lexeme=="<=":
                tokType = TokenType.LE_TOK
            elif lexeme=="<":
                tokType = TokenType.LT_TOK
            elif lexeme==">=":
                tokType = TokenType.GE_TOK
            elif lexeme==">":
                tokType = TokenType.GT_TOK
            elif lexeme=="==":
                tokType = TokenType.EQ_TOK
            elif lexeme=="/=":
                tokType = TokenType.NE_TOK
            elif lexeme=="+":
                tokType = TokenType.ADD_TOK
            elif lexeme=="-":
                tokType = TokenType.SUB_TOK
            elif lexeme=="*":
                tokType = TokenType.MUL_TOK
            elif lexeme=="/":
                tokType = TokenType.DIV_TOK
            elif lexeme==",":
                tokType = TokenType.COMMA_TOK
                
            else:
                raise LexException("invalid lexeme", lineNum, columnNum)
        return tokType

    def processLine(self, line, lineNum):
        
        self.line = line
        self.lineNum = lineNum
        
        assert line != None
        index = 0
        
        index = self.skip_white_space(line, index)
        
        
        
        while index < len(line):
            
            lexeme = self.getLexeme(line, index, lineNum)
            tokType = self.getTokenType(lexeme, lineNum, index)
            tok = Token(lexeme, lineNum, index + 1, tokType)
            self.tokenList.append(tok)
            index += len(lexeme)
            index = self.skip_white_space(line, index)
            
   

    # 
    # 	 * precondition: there are more tokens
    # 	 * @return next token
    # 	 * postcondition: returned token has been removed
    # 	 * @throws RuntimeException if there are no more tokens
    # 	 
    def getNextToken(self):
        
        if self.tokenList.isEmpty():
            raise IOError("no more tokens")
        return self.tokenList.remove(0)

    # 
    # 	 * precondition: there are more tokens
    # 	 * @return copy of next token
    # 	 * @throws RuntimeException if there are no more tokens
    # 	 
    def getLookaheadToken(self):
        
        if self.tokenList.isEmpty():
            raise IOError("no more tokens")
        return self.tokenList.get(0)



Is This A Good Question/Topic? 0
  • +

Page 1 of 1