Removing unused _tokennames from lexer.h
authorWolfgang (Blub) Bumiller <blub@speed.at>
Thu, 22 Nov 2012 19:42:55 +0000 (20:42 +0100)
committerWolfgang (Blub) Bumiller <blub@speed.at>
Thu, 22 Nov 2012 19:42:55 +0000 (20:42 +0100)
lexer.h

diff --git a/lexer.h b/lexer.h
index a6f910062ff962623c63107381ce8350be0a4bd6..e58a98d13e62ba854870b19bcc0e6a46d2c90e71 100644 (file)
--- a/lexer.h
+++ b/lexer.h
@@ -69,30 +69,6 @@ enum {
     TOKEN_FATAL /* internal error, eg out of memory */
 };
 
-static const char *_tokennames[] = {
-    "TOKEN_START",
-    "TOKEN_IDENT",
-    "TOKEN_TYPENAME",
-    "TOKEN_OPERATOR",
-    "TOKEN_KEYWORD",
-    "TOKEN_DOTS",
-    "TOKEN_STRINGCONST",
-    "TOKEN_CHARCONST",
-    "TOKEN_VECTORCONST",
-    "TOKEN_INTCONST",
-    "TOKEN_FLOATCONST",
-    "TOKEN_WHITE",
-    "TOKEN_EOL",
-    "TOKEN_EOF",
-    "TOKEN_ERROR",
-    "TOKEN_FATAL",
-};
-typedef int
-_all_tokennames_added_[
-       ((TOKEN_FATAL - TOKEN_START + 1) ==
-        (sizeof(_tokennames)/sizeof(_tokennames[0])))
-       ? 1 : -1];
-
 typedef struct {
     char *name;
     int   value;