token.go 5.9 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332
  1. package influxql
  2. import (
  3. "strings"
  4. )
  5. // Token is a lexical token of the InfluxQL language.
  6. type Token int
  7. // These are a comprehensive list of InfluxQL language tokens.
  8. const (
  9. // ILLEGAL Token, EOF, WS are Special InfluxQL tokens.
  10. ILLEGAL Token = iota
  11. EOF
  12. WS
  13. COMMENT
  14. literalBeg
  15. // IDENT and the following are InfluxQL literal tokens.
  16. IDENT // main
  17. BOUNDPARAM // $param
  18. NUMBER // 12345.67
  19. INTEGER // 12345
  20. DURATIONVAL // 13h
  21. STRING // "abc"
  22. BADSTRING // "abc
  23. BADESCAPE // \q
  24. TRUE // true
  25. FALSE // false
  26. REGEX // Regular expressions
  27. BADREGEX // `.*
  28. literalEnd
  29. operatorBeg
  30. // ADD and the following are InfluxQL Operators
  31. ADD // +
  32. SUB // -
  33. MUL // *
  34. DIV // /
  35. MOD // %
  36. BITWISE_AND // &
  37. BITWISE_OR // |
  38. BITWISE_XOR // ^
  39. AND // AND
  40. OR // OR
  41. EQ // =
  42. NEQ // !=
  43. EQREGEX // =~
  44. NEQREGEX // !~
  45. LT // <
  46. LTE // <=
  47. GT // >
  48. GTE // >=
  49. operatorEnd
  50. LPAREN // (
  51. RPAREN // )
  52. COMMA // ,
  53. COLON // :
  54. DOUBLECOLON // ::
  55. SEMICOLON // ;
  56. DOT // .
  57. keywordBeg
  58. // ALL and the following are InfluxQL Keywords
  59. ALL
  60. ALTER
  61. ANALYZE
  62. ANY
  63. AS
  64. ASC
  65. BEGIN
  66. BY
  67. CARDINALITY
  68. CREATE
  69. CONTINUOUS
  70. DATABASE
  71. DATABASES
  72. DEFAULT
  73. DELETE
  74. DESC
  75. DESTINATIONS
  76. DIAGNOSTICS
  77. DISTINCT
  78. DROP
  79. DURATION
  80. END
  81. EVERY
  82. EXACT
  83. EXPLAIN
  84. FIELD
  85. FOR
  86. FROM
  87. GRANT
  88. GRANTS
  89. GROUP
  90. GROUPS
  91. IN
  92. INF
  93. INSERT
  94. INTO
  95. KEY
  96. KEYS
  97. KILL
  98. LIMIT
  99. MEASUREMENT
  100. MEASUREMENTS
  101. NAME
  102. OFFSET
  103. ON
  104. ORDER
  105. PASSWORD
  106. POLICY
  107. POLICIES
  108. PRIVILEGES
  109. QUERIES
  110. QUERY
  111. READ
  112. REPLICATION
  113. RESAMPLE
  114. RETENTION
  115. REVOKE
  116. SELECT
  117. SERIES
  118. SET
  119. SHOW
  120. SHARD
  121. SHARDS
  122. SLIMIT
  123. SOFFSET
  124. STATS
  125. SUBSCRIPTION
  126. SUBSCRIPTIONS
  127. TAG
  128. TO
  129. USER
  130. USERS
  131. VALUES
  132. WHERE
  133. WITH
  134. WRITE
  135. keywordEnd
  136. )
  137. var tokens = [...]string{
  138. ILLEGAL: "ILLEGAL",
  139. EOF: "EOF",
  140. WS: "WS",
  141. IDENT: "IDENT",
  142. NUMBER: "NUMBER",
  143. DURATIONVAL: "DURATIONVAL",
  144. STRING: "STRING",
  145. BADSTRING: "BADSTRING",
  146. BADESCAPE: "BADESCAPE",
  147. TRUE: "TRUE",
  148. FALSE: "FALSE",
  149. REGEX: "REGEX",
  150. ADD: "+",
  151. SUB: "-",
  152. MUL: "*",
  153. DIV: "/",
  154. MOD: "%",
  155. BITWISE_AND: "&",
  156. BITWISE_OR: "|",
  157. BITWISE_XOR: "^",
  158. AND: "AND",
  159. OR: "OR",
  160. EQ: "=",
  161. NEQ: "!=",
  162. EQREGEX: "=~",
  163. NEQREGEX: "!~",
  164. LT: "<",
  165. LTE: "<=",
  166. GT: ">",
  167. GTE: ">=",
  168. LPAREN: "(",
  169. RPAREN: ")",
  170. COMMA: ",",
  171. COLON: ":",
  172. DOUBLECOLON: "::",
  173. SEMICOLON: ";",
  174. DOT: ".",
  175. ALL: "ALL",
  176. ALTER: "ALTER",
  177. ANALYZE: "ANALYZE",
  178. ANY: "ANY",
  179. AS: "AS",
  180. ASC: "ASC",
  181. BEGIN: "BEGIN",
  182. BY: "BY",
  183. CARDINALITY: "CARDINALITY",
  184. CREATE: "CREATE",
  185. CONTINUOUS: "CONTINUOUS",
  186. DATABASE: "DATABASE",
  187. DATABASES: "DATABASES",
  188. DEFAULT: "DEFAULT",
  189. DELETE: "DELETE",
  190. DESC: "DESC",
  191. DESTINATIONS: "DESTINATIONS",
  192. DIAGNOSTICS: "DIAGNOSTICS",
  193. DISTINCT: "DISTINCT",
  194. DROP: "DROP",
  195. DURATION: "DURATION",
  196. END: "END",
  197. EVERY: "EVERY",
  198. EXACT: "EXACT",
  199. EXPLAIN: "EXPLAIN",
  200. FIELD: "FIELD",
  201. FOR: "FOR",
  202. FROM: "FROM",
  203. GRANT: "GRANT",
  204. GRANTS: "GRANTS",
  205. GROUP: "GROUP",
  206. GROUPS: "GROUPS",
  207. IN: "IN",
  208. INF: "INF",
  209. INSERT: "INSERT",
  210. INTO: "INTO",
  211. KEY: "KEY",
  212. KEYS: "KEYS",
  213. KILL: "KILL",
  214. LIMIT: "LIMIT",
  215. MEASUREMENT: "MEASUREMENT",
  216. MEASUREMENTS: "MEASUREMENTS",
  217. NAME: "NAME",
  218. OFFSET: "OFFSET",
  219. ON: "ON",
  220. ORDER: "ORDER",
  221. PASSWORD: "PASSWORD",
  222. POLICY: "POLICY",
  223. POLICIES: "POLICIES",
  224. PRIVILEGES: "PRIVILEGES",
  225. QUERIES: "QUERIES",
  226. QUERY: "QUERY",
  227. READ: "READ",
  228. REPLICATION: "REPLICATION",
  229. RESAMPLE: "RESAMPLE",
  230. RETENTION: "RETENTION",
  231. REVOKE: "REVOKE",
  232. SELECT: "SELECT",
  233. SERIES: "SERIES",
  234. SET: "SET",
  235. SHOW: "SHOW",
  236. SHARD: "SHARD",
  237. SHARDS: "SHARDS",
  238. SLIMIT: "SLIMIT",
  239. SOFFSET: "SOFFSET",
  240. STATS: "STATS",
  241. SUBSCRIPTION: "SUBSCRIPTION",
  242. SUBSCRIPTIONS: "SUBSCRIPTIONS",
  243. TAG: "TAG",
  244. TO: "TO",
  245. USER: "USER",
  246. USERS: "USERS",
  247. VALUES: "VALUES",
  248. WHERE: "WHERE",
  249. WITH: "WITH",
  250. WRITE: "WRITE",
  251. }
  252. var keywords map[string]Token
  253. func init() {
  254. keywords = make(map[string]Token)
  255. for tok := keywordBeg + 1; tok < keywordEnd; tok++ {
  256. keywords[strings.ToLower(tokens[tok])] = tok
  257. }
  258. for _, tok := range []Token{AND, OR} {
  259. keywords[strings.ToLower(tokens[tok])] = tok
  260. }
  261. keywords["true"] = TRUE
  262. keywords["false"] = FALSE
  263. }
  264. // String returns the string representation of the token.
  265. func (tok Token) String() string {
  266. if tok >= 0 && tok < Token(len(tokens)) {
  267. return tokens[tok]
  268. }
  269. return ""
  270. }
  271. // Precedence returns the operator precedence of the binary operator token.
  272. func (tok Token) Precedence() int {
  273. switch tok {
  274. case OR:
  275. return 1
  276. case AND:
  277. return 2
  278. case EQ, NEQ, EQREGEX, NEQREGEX, LT, LTE, GT, GTE:
  279. return 3
  280. case ADD, SUB, BITWISE_OR, BITWISE_XOR:
  281. return 4
  282. case MUL, DIV, MOD, BITWISE_AND:
  283. return 5
  284. }
  285. return 0
  286. }
  287. // isOperator returns true for operator tokens.
  288. func (tok Token) isOperator() bool { return tok > operatorBeg && tok < operatorEnd }
  289. // tokstr returns a literal if provided, otherwise returns the token string.
  290. func tokstr(tok Token, lit string) string {
  291. if lit != "" {
  292. return lit
  293. }
  294. return tok.String()
  295. }
  296. // Lookup returns the token associated with a given string.
  297. func Lookup(ident string) Token {
  298. if tok, ok := keywords[strings.ToLower(ident)]; ok {
  299. return tok
  300. }
  301. return IDENT
  302. }
  303. // Pos specifies the line and character position of a token.
  304. // The Char and Line are both zero-based indexes.
  305. type Pos struct {
  306. Line int
  307. Char int
  308. }