// CodeMirror, copyright (c) by Marijn Haverbeke and others
// Distributed under an MIT license: http://codemirror.net/LICENSE

(function (mod) {
  if (typeof exports == "object" && typeof module == "object") // CommonJS
    mod(require("../../lib/codemirror"));
  else if (typeof define == "function" && define.amd) // AMD
    define(["../../lib/codemirror"], mod);
  else // Plain browser env
    mod(CodeMirror);
})(function (CodeMirror) {
  "use strict";

  CodeMirror.defineMode("ruby", function (config) {
    function wordObj(words) {
      var o = {};
      for (var i = 0, e = words.length; i < e; ++i) o[words[i]] = true;
      return o;
    }
    var keywords = wordObj([
      "alias", "and", "BEGIN", "begin", "break", "case", "class", "def", "defined?", "do", "else",
      "elsif", "END", "end", "ensure", "false", "for", "if", "in", "module", "next", "not", "or",
      "redo", "rescue", "retry", "return", "self", "super", "then", "true", "undef", "unless",
      "until", "when", "while", "yield", "nil", "raise", "throw", "catch", "fail", "loop", "callcc",
      "caller", "lambda", "proc", "public", "protected", "private", "require", "load",
      "require_relative", "extend", "autoload", "__END__", "__FILE__", "__LINE__", "__dir__"
    ]);
    var indentWords = wordObj(["def", "class", "case", "for", "while", "module", "then",
                             "catch", "loop", "proc", "begin"]);
    var dedentWords = wordObj(["end", "until"]);
    var matching = {"[": "]", "{": "}", "(": ")"};
    var curPunc;

    function chain(newtok, stream, state) {
      state.tokenize.push(newtok);
      return newtok(stream, state);
    }

    function tokenBase(stream, state) {
      curPunc = null;
      if (stream.sol() && stream.match("=begin") && stream.eol()) {
        state.tokenize.push(readBlockComment);
        return "comment";
      }
      if (stream.eatSpace()) return null;
      var ch = stream.next(), m;
      if (ch == "`" || ch == "'" || ch == '"') {
        return chain(readQuoted(ch, "string", ch == '"' || ch == "`"), stream, state);
      } else if (ch == "/") {
        var currentIndex = stream.current().length;
        if (stream.skipTo("/")) {
          var search_till = stream.current().length;
          stream.backUp(stream.current().length - currentIndex);
          var balance = 0;  // balance brackets
          while (stream.current().length < search_till) {
            var chchr = stream.next();
            if (chchr == "(") balance += 1;
            else if (chchr == ")") balance -= 1;
            if (balance < 0) break;
          }
          stream.backUp(stream.current().length - currentIndex);
          if (balance == 0)
            return chain(readQuoted(ch, "string-2", true), stream, state);
        }
        return "operator";
      } else if (ch == "%") {
        var style = "string", embed = true;
        if (stream.eat("s")) style = "atom";
        else if (stream.eat(/[WQ]/)) style = "string";
      else if (stream.eat(/[r]/)) style = "string-2";
      else if (stream.eat(/[wxq]/)) { style = "string"; embed = false; }
        var delim = stream.eat(/[^\w\s=]/);
        if (!delim) return "operator";
        if (matching.propertyIsEnumerable(delim)) delim = matching[delim];
        return chain(readQuoted(delim, style, embed, true), stream, state);
      } else if (ch == "#") {
        stream.skipToEnd();
        return "comment";
      } else if (ch == "<" && (m = stream.match(/^<-?[\`\"\']?([a-zA-Z_?]\w*)[\`\"\']?(?:;|$)/))) {
        return chain(readHereDoc(m[1]), stream, state);
      } else if (ch == "0") {
        if (stream.eat("x")) stream.eatWhile(/[\da-fA-F]/);
        else if (stream.eat("b")) stream.eatWhile(/[01]/);
      else stream.eatWhile(/[0-7]/);
        return "number";
      } else if (/\d/.test(ch)) {
        stream.match(/^[\d_]*(?:\.[\d_]+)?(?:[eE][+\-]?[\d_]+)?/);
        return "number";
      } else if (ch == "?") {
      while (stream.match(/^\\[CM]-/)) {}
      if (stream.eat("\\")) stream.eatWhile(/\w/);
      else stream.next();
      return "string";
    } else if (ch == ":") {
      if (stream.eat("'")) return chain(readQuoted("'", "atom", false), stream, state);
      if (stream.eat('"')) return chain(readQuoted('"', "atom", true), stream, state);

      // :> :>> :< :<< are valid symbols
      if (stream.eat(/[\<\>]/)) {
        stream.eat(/[\<\>]/);
        return "atom";
      }

      // :+ :- :/ :* :| :& :! are valid symbols
      if (stream.eat(/[\+\-\*\/\&\|\:\!]/)) {
        return "atom";
      }

      // Symbols can't start by a digit
      if (stream.eat(/[a-zA-Z$@_\xa1-\uffff]/)) {
        stream.eatWhile(/[\w$\xa1-\uffff]/);
        // Only one ? ! = is allowed and only as the last character
        stream.eat(/[\?\!\=]/);
        return "atom";
      }
      return "operator";
    } else if (ch == "@" && stream.match(/^@?[a-zA-Z_\xa1-\uffff]/)) {
      stream.eat("@");
      stream.eatWhile(/[\w\xa1-\uffff]/);
      return "variable-2";
    } else if (ch == "$") {
      if (stream.eat(/[a-zA-Z_]/)) {
        stream.eatWhile(/[\w]/);
      } else if (stream.eat(/\d/)) {
        stream.eat(/\d/);
      } else {
        stream.next(); // Must be a special global like $: or $!
      }
      return "variable-3";
    } else if (/[a-zA-Z_\xa1-\uffff]/.test(ch)) {
      stream.eatWhile(/[\w\xa1-\uffff]/);
      stream.eat(/[\?\!]/);
      if (stream.eat(":")) return "atom";
      return "ident";
    } else if (ch == "|" && (state.varList || state.lastTok == "{" || state.lastTok == "do")) {
      curPunc = "|";
      return null;
    } else if (/[\(\)\[\]{}\\;]/.test(ch)) {
      curPunc = ch;
      return null;
    } else if (ch == "-" && stream.eat(">")) {
      return "arrow";
    } else if (/[=+\-\/*:\.^%<>~|]/.test(ch)) {
      var more = stream.eatWhile(/[=+\-\/*:\.^%<>~|]/);
      if (ch == "." && !more) curPunc = ".";
      return "operator";
    } else {
      return null;
    }
    }

    function tokenBaseUntilBrace(depth) {
      if (!depth) depth = 1;
      return function (stream, state) {
        if (stream.peek() == "}") {
          if (depth == 1) {
            state.tokenize.pop();
            return state.tokenize[state.tokenize.length - 1](stream, state);
          } else {
            state.tokenize[state.tokenize.length - 1] = tokenBaseUntilBrace(depth - 1);
          }
        } else if (stream.peek() == "{") {
          state.tokenize[state.tokenize.length - 1] = tokenBaseUntilBrace(depth + 1);
        }
        return tokenBase(stream, state);
      };
    }
    function tokenBaseOnce() {
      var alreadyCalled = false;
      return function (stream, state) {
        if (alreadyCalled) {
          state.tokenize.pop();
          return state.tokenize[state.tokenize.length - 1](stream, state);
        }
        alreadyCalled = true;
        return tokenBase(stream, state);
      };
    }
    function readQuoted(quote, style, embed, unescaped) {
      return function (stream, state) {
        var escaped = false, ch;

        if (state.context.type === "read-quoted-paused") {
          state.context = state.context.prev;
          stream.eat("}");
        }

        while ((ch = stream.next()) != null) {
          if (ch == quote && (unescaped || !escaped)) {
            state.tokenize.pop();
            break;
          }
          if (embed && ch == "#" && !escaped) {
            if (stream.eat("{")) {
              if (quote == "}") {
                state.context = {prev: state.context, type: "read-quoted-paused"};
              }
              state.tokenize.push(tokenBaseUntilBrace());
              break;
            } else if (/[@\$]/.test(stream.peek())) {
              state.tokenize.push(tokenBaseOnce());
              break;
            }
          }
          escaped = !escaped && ch == "\\";
        }
        return style;
      };
    }
    function readHereDoc(phrase) {
      return function (stream, state) {
        if (stream.match(phrase)) state.tokenize.pop();
        else stream.skipToEnd();
        return "string";
      };
    }
    function readBlockComment(stream, state) {
      if (stream.sol() && stream.match("=end") && stream.eol())
        state.tokenize.pop();
      stream.skipToEnd();
      return "comment";
    }

    return {
      startState: function () {
        return {tokenize: [tokenBase],
              indented: 0,
              context: {type: "top", indented: -config.indentUnit},
              continuedLine: false,
              lastTok: null,
              varList: false};
      },

      token: function (stream, state) {
        if (stream.sol()) state.indented = stream.indentation();
        var style = state.tokenize[state.tokenize.length - 1](stream, state), kwtype;
        var thisTok = curPunc;
        if (style == "ident") {
          var word = stream.current();
          style = state.lastTok == "." ? "property"
          : keywords.propertyIsEnumerable(stream.current()) ? "keyword"
          : /^[A-Z]/.test(word) ? "tag"
          : (state.lastTok == "def" || state.lastTok == "class" || state.varList) ? "def"
          : "variable";
          if (style == "keyword") {
            thisTok = word;
            if (indentWords.propertyIsEnumerable(word)) kwtype = "indent";
            else if (dedentWords.propertyIsEnumerable(word)) kwtype = "dedent";
          else if ((word == "if" || word == "unless") && stream.column() == stream.indentation())
            kwtype = "indent";
          else if (word == "do" && state.context.indented < state.indented)
            kwtype = "indent";
          }
        }
        if (curPunc || (style && style != "comment")) state.lastTok = thisTok;
        if (curPunc == "|") state.varList = !state.varList;

        if (kwtype == "indent" || /[\(\[\{]/.test(curPunc))
          state.context = {prev: state.context, type: curPunc || style, indented: state.indented};
        else if ((kwtype == "dedent" || /[\)\]\}]/.test(curPunc)) && state.context.prev)
          state.context = state.context.prev;

        if (stream.eol())
          state.continuedLine = (curPunc == "\\" || style == "operator");
        return style;
      },

      indent: function (state, textAfter) {
        if (state.tokenize[state.tokenize.length - 1] != tokenBase) return 0;
        var firstChar = textAfter && textAfter.charAt(0);
        var ct = state.context;
        var closing = ct.type == matching[firstChar] ||
        ct.type == "keyword" && /^(?:end|until|else|elsif|when|rescue)\b/.test(textAfter);
        return ct.indented + (closing ? 0 : config.indentUnit) +
        (state.continuedLine ? config.indentUnit : 0);
      },

      electricChars: "}de", // enD and rescuE
      lineComment: "#"
    };
  });

  CodeMirror.defineMIME("text/x-ruby", "ruby");

});