PL λαβ
Lab 2.1a[김예령]: 환경 본문
~ Completely Unfinished ~
Code
from enum import Enum
class Type(Enum):
NIL = 0
INT = 1
PAIR = 2
SYMBOL = 3
class Data:
def __init__(self, type=Type.NIL, value=0):
self.type = type
self.value = value
def car(self):
return self.value[0]
def cdr(self):
return self.value[1]
def __str__(self):
if self.type == Type.NIL:
return "NIL"
elif self.type == Type.PAIR:
try:
if(self.cdr() == int(self.cdr())):
return "("+str(self.car())+" . "+str(self.cdr())+")"
except:
retStr = '('
retStr+=str(self.car())
atom = self.cdr()
while(atom.type != Type.NIL):
if(atom.type == Type.PAIR):
retStr+=' . '
retStr+=str(atom.car())
atom = atom.cdr()
else:
retStr+=' . '
retStr+=str(atom)
break
retStr+=')'
return str(retStr)
else:
return str(self.value)
#return "babo~ya"
def cons(d1, d2):
return Data(Type.PAIR, (d1, d2))
def mkint(n):
return Data(Type.INT, n)
def mksym(s):
return Data(Type.SYMBOL, s)
def nilp():
return Data(Type.NIL)
class T_Type(Enum):
NIL = 0
OP = 1
CP = 2
SYM = 3
ID = 4
INT = 5
class Token:
def __init__(self, type=T_Type.NIL, value=None):
self.type = type
self.value = value
def __str__(self):
if self.type == T_Type.SYM or \
self.type == T_Type.ID or \
self.type == T_Type.INT:
return f"Token [{self.type}, Value: {self.value}]"
else:
return f"Token [{self.type}]"
def Lexer(lists):
TokenList = []
for i in range(len(lists)):
for j in range(len(lists[i])):
LA = lists[i][j]
if(LA == '('):
TokenList.append(Token(T_Type.OP))
elif(LA == ')'):
TokenList.append(Token(T_Type.CP))
elif(LA == 'quote'):
TokenList.append(Token(T_Type.SYM, 'QUOTE'))
elif(LA == 'define'):
TokenList.append(Token(T_Type.SYM, 'DEF'))
elif(LA == 'lambda'):
TokenList.append(Token(T_Type.SYM, 'LAM'))
elif(LA == 'if'):
TokenList.append(Token(T_Type.SYM, 'IF'))
elif(LA == '+'):
TokenList.append(Token(T_Type.SYM, 'PLUS'))
elif(LA == '-'):
TokenList.append(Token(T_Type.SYM, 'MINUS'))
elif(LA == '>'):
TokenList.append(Token(T_Type.SYM, 'GT'))
elif(LA == '<'):
TokenList.append(Token(T_Type.SYM, 'LT'))
else:
try:
iL = int(LA)
isinstance(iL, int)
TokenList.append(Token(T_Type.INT, iL))
except:
TokenList.append(Token(T_Type.ID, LA))
return TokenList
def iCons(d_list):
if len(d_list) != 1:
return cons(d_list[0], iCons(d_list[1:]))
else:
return cons(d_list[0], nilp())
def Parser(tokenlist):
if len(tokenlist) == 0:
return "[ERROR] Empty List"
LA = tokenlist.pop(0)
if LA.type == T_Type.OP:
if tokenlist[0].type == T_Type.CP:
return nilp()
L = []
while tokenlist[0].type != T_Type.CP:
L.append(Parser(tokenlist))
tokenlist.pop(0)
LR = iCons(L)
return LR
elif LA.type == T_Type.CP:
return "[ERROR] Unexpected ')'"
else:
try:
if int(LA.value):
return Data(Type.INT, int(LA.value))
except:
return Data(Type.SYMBOL, LA.value)
def env_init(parent):
return [parent, "Nil"]
def eval_expr(p_list, env): # undone. it's just a hard coding that doesn't work...
PA = p_list.car()
if PA.type == Type.SYMBOL:
if PA.value == 'DEF':
PD = p_list.cdr()
while(PD.type == Type.PAIR):
if PD.car().type == Type.PAIR:
env.append({PD.car().car().value:PD.car().cdr().value})
PD = PD.cdr()
else:
break
return env
if __name__ == "__main__":
env = env_init('Nil')
inputList = []
while True:
line = input()
if line:
inputList.append(line.replace('(', ' ( ').replace(')', ' ) ').split())
else:
break
tokenlist = Lexer(inputList)
for token in tokenlist:
print(token)
parsedlist = Parser(tokenlist)
print(parsedlist)
print(env)
rets = eval_expr(parsedlist, env)
for ret in rets:
print(ret, end=' ')
Result
'kos' 카테고리의 다른 글
Lab 2.1b[김예령]: 환경 (0) | 2021.04.15 |
---|---|
Lab 2.1a[최준혁]: 환경 (0) | 2021.04.13 |
Lab 1.2c[최준혁]: 구문 분석기 (0) | 2021.04.13 |
Lab 2.1: 환경 (0) | 2021.04.13 |
Lab 1.2b[박인철]: 구문 분석기 (0) | 2021.04.13 |
Comments