<lexer> <config> <name>Odin</name> <alias>odin</alias> <filename>*.odin</filename> <mime_type>text/odin</mime_type> </config> <rules> <state name="NestedComment"> <rule pattern = "/[*]"> <token type = "CommentMultiline"/> <push/> </rule> <rule pattern = "[*]/"> <token type = "CommentMultiline"/> <pop depth = "1"/> </rule> <rule pattern = "[\s\S]"> <token type = "CommentMultiline"/> </rule> </state> <state name="root"> <rule pattern = "\n"> <token type = "TextWhitespace"/> </rule> <rule pattern = "\s+"> <token type = "TextWhitespace"/> </rule> <rule pattern = "//.*?\n"> <token type = "CommentSingle"/> </rule> <rule pattern = "/[*]"> <token type = "CommentMultiline"/> <push state="NestedComment"/> </rule> <rule pattern = "(import|package)\b"> <token type = "KeywordNamespace"/> </rule> <rule pattern = "(proc|struct|map|enum|union)\b"> <token type = "KeywordDeclaration"/> </rule> <rule pattern = "(asm|auto_cast|bit_set|break|case|cast|context|continue|defer|distinct|do|dynamic|else|enum|fallthrough|for|foreign|if|import|in|map|not_in|or_else|or_return|package|proc|return|struct|switch|transmute|typeid|union|using|when|where|panic|real|imag|len|cap|append|copy|delete|new|make|clearpanic|real|imag|len|cap|append|copy|delete|new|make|clear)\b"> <token type = "Keyword"/> </rule> <rule pattern = "(true|false|nil)\b"> <token type = "KeywordConstant"/> </rule> <rule pattern = "(uint|u8|u16|u32|u64|int|i8|i16|i32|i64|i16le|i32le|i64le|i128le|u16le|u32le|u64le|u128le|i16be|i32be|i64be|i128be|u16be|u32be|u64be|u128be|f16|f32|f64|complex32|complex64|complex128|quaternion64|quaternion128|quaternion256|byte|rune|string|cstring|typeid|any|bool|b8|b16|b32|b64|uintptr|rawptr)\b"> <token type = "KeywordType"/> </rule> <rule pattern = "\#[a-zA-Z_]+\b"> <token type = "NameDecorator"/> </rule> <rule pattern = "\@(\([a-zA-Z_]+\b\s*.*\)|\(?[a-zA-Z_]+\)?)"> <token type = "NameAttribute"/> </rule> <rule pattern="[a-zA-Z_]\w*"> <token type="Name"/> </rule> <rule pattern="([a-zA-Z_]\w*)(\s*)(\()"> <token type="NameFunction"/> </rule> <rule pattern="[^\W\d]\w*"> <token type="NameOther"/> </rule> <rule pattern = "\d+i"> <token type = "LiteralNumber"/> </rule> <rule pattern = "\d+\.\d*([Ee][-+]\d+)?i"> <token type = "LiteralNumber"/> </rule> <rule pattern = "\.\d+([Ee][-+]\d+)?i"> <token type = "LiteralNumber"/> </rule> <rule pattern = "\d+[Ee][-+]\d+i"> <token type = "LiteralNumber"/> </rule> <rule pattern = "\d+(\.\d+[eE][+\-]?\d+|\.\d*|[eE][+\-]?\d+)"> <token type = "LiteralNumberFloat"/> </rule> <rule pattern = "\.\d+([eE][+\-]?\d+)?"> <token type = "LiteralNumberFloat"/> </rule> <rule pattern = "0o[0-7]+"> <token type = "LiteralNumberOct"/> </rule> <rule pattern = "0x[0-9a-fA-F_]+"> <token type = "LiteralNumberHex"/> </rule> <rule pattern = "0b[01_]+"> <token type = "LiteralNumberBin"/> </rule> <rule pattern = "(0|[1-9][0-9_]*)"> <token type = "LiteralNumberInteger"/> </rule> <rule pattern = "'(\\['"\\abfnrtv]|\\x[0-9a-fA-F]{2}|\\[0-7]{1,3}|\\u[0-9a-fA-F]{4}|\\U[0-9a-fA-F]{8}|[^\\])'" > <token type = "LiteralStringChar"/> </rule> <rule pattern = "(`)([^`]*)(`)" > <token type = "LiteralString"/> </rule> <rule pattern = ""(\\\\|\\"|[^"])*"" > <token type = "LiteralString"/> </rule> <rule pattern = "(<<=|>>=|<<|>>|<=|>=|&=|&|\+=|-=|\*=|/=|%=|\||\^|=|&&|\|\||--|->|=|==|!=|:=|:|::|\.\.\<|\.\.=|[<>+\-*/%&])" > <token type = "Operator"/> </rule> <rule pattern="[{}()\[\],.;]"> <token type="Punctuation"/> </rule> </state> </rules> </lexer>