def read_sequence(init, open, close)
token = self.next
parse_error "expected '#{open}', got EOF" unless token
- parse_error "expected '#{open}', got #{token}" unless token[0] == open
+ parse_error "expected '#{open}', got #{token}" unless token[0] == open
loop do
token = peek
parse_error "expected Atom but got EOF" unless token
Mal::Type.new case
- when token =~ /^-?\d+$/ then token.to_i
+ when token =~ /^-?\d+$/ then token.to_i64
when token == "true" then true
when token == "false" then false
when token == "nil" then nil
- when token[0] == '"' then token[1..-2].gsub(/\\"/, "\"")
- .gsub(/\\n/, "\n")
- .gsub(/\\\\/, "\\")
- when token[0] == ':' then "\u029e#{token[1..-1]}"
- else Mal::Symbol.new token
+ when token[0] == '"'
+ parse_error "expected '\"', got EOF" if token[-1] != '"'
+ token[1..-2].gsub(/\\(.)/, {"\\\"" => "\"",
+ "\\n" => "\n",
+ "\\\\" => "\\"})
+ when token[0] == ':' then "\u029e#{token[1..-1]}"
+ else Mal::Symbol.new token
end
end
else read_atom
end
end
-
end
def tokenize(str)
- regex = /[\s,]*(~@|[\[\]{}()'`~^@]|"(?:\\.|[^\\"])*"|;.*|[^\s\[\]{}('"`,;)]*)/
- str.scan(regex).map{|m| m[1]}.reject(&.empty?)
+ regex = /[\s,]*(~@|[\[\]{}()'`~^@]|"(?:\\.|[^\\"])*"?|;.*|[^\s\[\]{}('"`,;)]*)/
+ str.scan(regex).map { |m| m[1] }.reject(&.empty?)
end
def read_str(str)
end
end
end
-