Why does matching on the result of Regex::find complain about expecting a struct regex::Match but found...












1














I copied this code from Code Review into IntelliJ IDEA to try and play around with it. I have a homework assignment that is similar to this one (I need to write a version of Linux's bc in Rust), so I am using this code only for reference purposes.



use std::io;
extern crate regex;
#[macro_use]
extern crate lazy_static;

use regex::Regex;

fn main() {
let tokenizer = Tokenizer::new();

loop {
println!("Enter input:");
let mut input = String::new();
io::stdin()
.read_line(&mut input)
.expect("Failed to read line");
let tokens = tokenizer.tokenize(&input);
let stack = shunt(tokens);
let res = calculate(stack);
println!("{}", res);
}
}

#[derive(Debug, PartialEq)]
enum Token {
Number(i64),
Plus,
Sub,
Mul,
Div,
LeftParen,
RightParen,
}

impl Token {
/// Returns the precedence of op
fn precedence(&self) -> usize {
match *self {
Token::Plus | Token::Sub => 1,
Token::Mul | Token::Div => 2,
_ => 0,
}
}
}

struct Tokenizer {
number: Regex,
}

impl Tokenizer {
fn new() -> Tokenizer {
Tokenizer {
number: Regex::new(r"^[0-9]+").expect("Unable to create the regex"),
}
}

/// Tokenizes the input string into a Vec of Tokens.
fn tokenize(&self, mut input: &str) -> Vec<Token> {
let mut res = vec!;

loop {
input = input.trim_left();
if input.is_empty() { break }

let (token, rest) = match self.number.find(input) {
Some((_, end)) => {
let (num, rest) = input.split_at(end);
(Token::Number(num.parse().unwrap()), rest)
},
_ => {
match input.chars().next() {
Some(chr) => {
(match chr {
'+' => Token::Plus,
'-' => Token::Sub,
'*' => Token::Mul,
'/' => Token::Div,
'(' => Token::LeftParen,
')' => Token::RightParen,
_ => panic!("Unknown character!"),
}, &input[chr.len_utf8()..])
}
None => panic!("Ran out of input"),
}
}
};

res.push(token);
input = rest;
}

res
}
}

/// Transforms the tokens created by `tokenize` into RPN using the
/// [Shunting-yard algorithm](https://en.wikipedia.org/wiki/Shunting-yard_algorithm)
fn shunt(tokens: Vec<Token>) -> Vec<Token> {
let mut queue = vec!;
let mut stack: Vec<Token> = vec!;
for token in tokens {
match token {
Token::Number(_) => queue.push(token),
Token::Plus | Token::Sub | Token::Mul | Token::Div => {
while let Some(o) = stack.pop() {
if token.precedence() <= o.precedence() {
queue.push(o);
} else {
stack.push(o);
break;
}
}
stack.push(token)
},
Token::LeftParen => stack.push(token),
Token::RightParen => {
let mut found_paren = false;
while let Some(op) = stack.pop() {
match op {
Token::LeftParen => {
found_paren = true;
break;
},
_ => queue.push(op),
}
}
assert!(found_paren)
},
}
}
while let Some(op) = stack.pop() {
queue.push(op);
}
queue
}

/// Takes a Vec of Tokens converted to RPN by `shunt` and calculates the result
fn calculate(tokens: Vec<Token>) -> i64 {
let mut stack = vec!;
for token in tokens {
match token {
Token::Number(n) => stack.push(n),
Token::Plus => {
let (b, a) = (stack.pop().unwrap(), stack.pop().unwrap());
stack.push(a + b);
},
Token::Sub => {
let (b, a) = (stack.pop().unwrap(), stack.pop().unwrap());
stack.push(a - b);
},
Token::Mul => {
let (b, a) = (stack.pop().unwrap(), stack.pop().unwrap());
stack.push(a * b);
},
Token::Div => {
let (b, a) = (stack.pop().unwrap(), stack.pop().unwrap());
stack.push(a / b);
},
_ => {
// By the time the token stream gets here, all the LeftParen
// and RightParen tokens will have been removed by shunt()
unreachable!();
},
}
}
stack[0]
}


When I run it, however, it gives me this error:



error[E0308]: mismatched types
--> srcmain.rs:66:22
|
66 | Some((_, end)) => {
| ^^^^^^^^ expected struct `regex::Match`, found tuple
|
= note: expected type `regex::Match<'_>`
found type `(_, _)`


It's complaining that I am using a tuple for the Some() method when I am supposed to use a token. I am not sure what to pass for the token, because it appears that the tuple is traversing through the Token options. How do I re-write this to make the Some() method recognize the tuple as a Token? I have been working on this for a day but I have not found any really good solutions.










share|improve this question





























    1














    I copied this code from Code Review into IntelliJ IDEA to try and play around with it. I have a homework assignment that is similar to this one (I need to write a version of Linux's bc in Rust), so I am using this code only for reference purposes.



    use std::io;
    extern crate regex;
    #[macro_use]
    extern crate lazy_static;

    use regex::Regex;

    fn main() {
    let tokenizer = Tokenizer::new();

    loop {
    println!("Enter input:");
    let mut input = String::new();
    io::stdin()
    .read_line(&mut input)
    .expect("Failed to read line");
    let tokens = tokenizer.tokenize(&input);
    let stack = shunt(tokens);
    let res = calculate(stack);
    println!("{}", res);
    }
    }

    #[derive(Debug, PartialEq)]
    enum Token {
    Number(i64),
    Plus,
    Sub,
    Mul,
    Div,
    LeftParen,
    RightParen,
    }

    impl Token {
    /// Returns the precedence of op
    fn precedence(&self) -> usize {
    match *self {
    Token::Plus | Token::Sub => 1,
    Token::Mul | Token::Div => 2,
    _ => 0,
    }
    }
    }

    struct Tokenizer {
    number: Regex,
    }

    impl Tokenizer {
    fn new() -> Tokenizer {
    Tokenizer {
    number: Regex::new(r"^[0-9]+").expect("Unable to create the regex"),
    }
    }

    /// Tokenizes the input string into a Vec of Tokens.
    fn tokenize(&self, mut input: &str) -> Vec<Token> {
    let mut res = vec!;

    loop {
    input = input.trim_left();
    if input.is_empty() { break }

    let (token, rest) = match self.number.find(input) {
    Some((_, end)) => {
    let (num, rest) = input.split_at(end);
    (Token::Number(num.parse().unwrap()), rest)
    },
    _ => {
    match input.chars().next() {
    Some(chr) => {
    (match chr {
    '+' => Token::Plus,
    '-' => Token::Sub,
    '*' => Token::Mul,
    '/' => Token::Div,
    '(' => Token::LeftParen,
    ')' => Token::RightParen,
    _ => panic!("Unknown character!"),
    }, &input[chr.len_utf8()..])
    }
    None => panic!("Ran out of input"),
    }
    }
    };

    res.push(token);
    input = rest;
    }

    res
    }
    }

    /// Transforms the tokens created by `tokenize` into RPN using the
    /// [Shunting-yard algorithm](https://en.wikipedia.org/wiki/Shunting-yard_algorithm)
    fn shunt(tokens: Vec<Token>) -> Vec<Token> {
    let mut queue = vec!;
    let mut stack: Vec<Token> = vec!;
    for token in tokens {
    match token {
    Token::Number(_) => queue.push(token),
    Token::Plus | Token::Sub | Token::Mul | Token::Div => {
    while let Some(o) = stack.pop() {
    if token.precedence() <= o.precedence() {
    queue.push(o);
    } else {
    stack.push(o);
    break;
    }
    }
    stack.push(token)
    },
    Token::LeftParen => stack.push(token),
    Token::RightParen => {
    let mut found_paren = false;
    while let Some(op) = stack.pop() {
    match op {
    Token::LeftParen => {
    found_paren = true;
    break;
    },
    _ => queue.push(op),
    }
    }
    assert!(found_paren)
    },
    }
    }
    while let Some(op) = stack.pop() {
    queue.push(op);
    }
    queue
    }

    /// Takes a Vec of Tokens converted to RPN by `shunt` and calculates the result
    fn calculate(tokens: Vec<Token>) -> i64 {
    let mut stack = vec!;
    for token in tokens {
    match token {
    Token::Number(n) => stack.push(n),
    Token::Plus => {
    let (b, a) = (stack.pop().unwrap(), stack.pop().unwrap());
    stack.push(a + b);
    },
    Token::Sub => {
    let (b, a) = (stack.pop().unwrap(), stack.pop().unwrap());
    stack.push(a - b);
    },
    Token::Mul => {
    let (b, a) = (stack.pop().unwrap(), stack.pop().unwrap());
    stack.push(a * b);
    },
    Token::Div => {
    let (b, a) = (stack.pop().unwrap(), stack.pop().unwrap());
    stack.push(a / b);
    },
    _ => {
    // By the time the token stream gets here, all the LeftParen
    // and RightParen tokens will have been removed by shunt()
    unreachable!();
    },
    }
    }
    stack[0]
    }


    When I run it, however, it gives me this error:



    error[E0308]: mismatched types
    --> srcmain.rs:66:22
    |
    66 | Some((_, end)) => {
    | ^^^^^^^^ expected struct `regex::Match`, found tuple
    |
    = note: expected type `regex::Match<'_>`
    found type `(_, _)`


    It's complaining that I am using a tuple for the Some() method when I am supposed to use a token. I am not sure what to pass for the token, because it appears that the tuple is traversing through the Token options. How do I re-write this to make the Some() method recognize the tuple as a Token? I have been working on this for a day but I have not found any really good solutions.










    share|improve this question



























      1












      1








      1







      I copied this code from Code Review into IntelliJ IDEA to try and play around with it. I have a homework assignment that is similar to this one (I need to write a version of Linux's bc in Rust), so I am using this code only for reference purposes.



      use std::io;
      extern crate regex;
      #[macro_use]
      extern crate lazy_static;

      use regex::Regex;

      fn main() {
      let tokenizer = Tokenizer::new();

      loop {
      println!("Enter input:");
      let mut input = String::new();
      io::stdin()
      .read_line(&mut input)
      .expect("Failed to read line");
      let tokens = tokenizer.tokenize(&input);
      let stack = shunt(tokens);
      let res = calculate(stack);
      println!("{}", res);
      }
      }

      #[derive(Debug, PartialEq)]
      enum Token {
      Number(i64),
      Plus,
      Sub,
      Mul,
      Div,
      LeftParen,
      RightParen,
      }

      impl Token {
      /// Returns the precedence of op
      fn precedence(&self) -> usize {
      match *self {
      Token::Plus | Token::Sub => 1,
      Token::Mul | Token::Div => 2,
      _ => 0,
      }
      }
      }

      struct Tokenizer {
      number: Regex,
      }

      impl Tokenizer {
      fn new() -> Tokenizer {
      Tokenizer {
      number: Regex::new(r"^[0-9]+").expect("Unable to create the regex"),
      }
      }

      /// Tokenizes the input string into a Vec of Tokens.
      fn tokenize(&self, mut input: &str) -> Vec<Token> {
      let mut res = vec!;

      loop {
      input = input.trim_left();
      if input.is_empty() { break }

      let (token, rest) = match self.number.find(input) {
      Some((_, end)) => {
      let (num, rest) = input.split_at(end);
      (Token::Number(num.parse().unwrap()), rest)
      },
      _ => {
      match input.chars().next() {
      Some(chr) => {
      (match chr {
      '+' => Token::Plus,
      '-' => Token::Sub,
      '*' => Token::Mul,
      '/' => Token::Div,
      '(' => Token::LeftParen,
      ')' => Token::RightParen,
      _ => panic!("Unknown character!"),
      }, &input[chr.len_utf8()..])
      }
      None => panic!("Ran out of input"),
      }
      }
      };

      res.push(token);
      input = rest;
      }

      res
      }
      }

      /// Transforms the tokens created by `tokenize` into RPN using the
      /// [Shunting-yard algorithm](https://en.wikipedia.org/wiki/Shunting-yard_algorithm)
      fn shunt(tokens: Vec<Token>) -> Vec<Token> {
      let mut queue = vec!;
      let mut stack: Vec<Token> = vec!;
      for token in tokens {
      match token {
      Token::Number(_) => queue.push(token),
      Token::Plus | Token::Sub | Token::Mul | Token::Div => {
      while let Some(o) = stack.pop() {
      if token.precedence() <= o.precedence() {
      queue.push(o);
      } else {
      stack.push(o);
      break;
      }
      }
      stack.push(token)
      },
      Token::LeftParen => stack.push(token),
      Token::RightParen => {
      let mut found_paren = false;
      while let Some(op) = stack.pop() {
      match op {
      Token::LeftParen => {
      found_paren = true;
      break;
      },
      _ => queue.push(op),
      }
      }
      assert!(found_paren)
      },
      }
      }
      while let Some(op) = stack.pop() {
      queue.push(op);
      }
      queue
      }

      /// Takes a Vec of Tokens converted to RPN by `shunt` and calculates the result
      fn calculate(tokens: Vec<Token>) -> i64 {
      let mut stack = vec!;
      for token in tokens {
      match token {
      Token::Number(n) => stack.push(n),
      Token::Plus => {
      let (b, a) = (stack.pop().unwrap(), stack.pop().unwrap());
      stack.push(a + b);
      },
      Token::Sub => {
      let (b, a) = (stack.pop().unwrap(), stack.pop().unwrap());
      stack.push(a - b);
      },
      Token::Mul => {
      let (b, a) = (stack.pop().unwrap(), stack.pop().unwrap());
      stack.push(a * b);
      },
      Token::Div => {
      let (b, a) = (stack.pop().unwrap(), stack.pop().unwrap());
      stack.push(a / b);
      },
      _ => {
      // By the time the token stream gets here, all the LeftParen
      // and RightParen tokens will have been removed by shunt()
      unreachable!();
      },
      }
      }
      stack[0]
      }


      When I run it, however, it gives me this error:



      error[E0308]: mismatched types
      --> srcmain.rs:66:22
      |
      66 | Some((_, end)) => {
      | ^^^^^^^^ expected struct `regex::Match`, found tuple
      |
      = note: expected type `regex::Match<'_>`
      found type `(_, _)`


      It's complaining that I am using a tuple for the Some() method when I am supposed to use a token. I am not sure what to pass for the token, because it appears that the tuple is traversing through the Token options. How do I re-write this to make the Some() method recognize the tuple as a Token? I have been working on this for a day but I have not found any really good solutions.










      share|improve this question















      I copied this code from Code Review into IntelliJ IDEA to try and play around with it. I have a homework assignment that is similar to this one (I need to write a version of Linux's bc in Rust), so I am using this code only for reference purposes.



      use std::io;
      extern crate regex;
      #[macro_use]
      extern crate lazy_static;

      use regex::Regex;

      fn main() {
      let tokenizer = Tokenizer::new();

      loop {
      println!("Enter input:");
      let mut input = String::new();
      io::stdin()
      .read_line(&mut input)
      .expect("Failed to read line");
      let tokens = tokenizer.tokenize(&input);
      let stack = shunt(tokens);
      let res = calculate(stack);
      println!("{}", res);
      }
      }

      #[derive(Debug, PartialEq)]
      enum Token {
      Number(i64),
      Plus,
      Sub,
      Mul,
      Div,
      LeftParen,
      RightParen,
      }

      impl Token {
      /// Returns the precedence of op
      fn precedence(&self) -> usize {
      match *self {
      Token::Plus | Token::Sub => 1,
      Token::Mul | Token::Div => 2,
      _ => 0,
      }
      }
      }

      struct Tokenizer {
      number: Regex,
      }

      impl Tokenizer {
      fn new() -> Tokenizer {
      Tokenizer {
      number: Regex::new(r"^[0-9]+").expect("Unable to create the regex"),
      }
      }

      /// Tokenizes the input string into a Vec of Tokens.
      fn tokenize(&self, mut input: &str) -> Vec<Token> {
      let mut res = vec!;

      loop {
      input = input.trim_left();
      if input.is_empty() { break }

      let (token, rest) = match self.number.find(input) {
      Some((_, end)) => {
      let (num, rest) = input.split_at(end);
      (Token::Number(num.parse().unwrap()), rest)
      },
      _ => {
      match input.chars().next() {
      Some(chr) => {
      (match chr {
      '+' => Token::Plus,
      '-' => Token::Sub,
      '*' => Token::Mul,
      '/' => Token::Div,
      '(' => Token::LeftParen,
      ')' => Token::RightParen,
      _ => panic!("Unknown character!"),
      }, &input[chr.len_utf8()..])
      }
      None => panic!("Ran out of input"),
      }
      }
      };

      res.push(token);
      input = rest;
      }

      res
      }
      }

      /// Transforms the tokens created by `tokenize` into RPN using the
      /// [Shunting-yard algorithm](https://en.wikipedia.org/wiki/Shunting-yard_algorithm)
      fn shunt(tokens: Vec<Token>) -> Vec<Token> {
      let mut queue = vec!;
      let mut stack: Vec<Token> = vec!;
      for token in tokens {
      match token {
      Token::Number(_) => queue.push(token),
      Token::Plus | Token::Sub | Token::Mul | Token::Div => {
      while let Some(o) = stack.pop() {
      if token.precedence() <= o.precedence() {
      queue.push(o);
      } else {
      stack.push(o);
      break;
      }
      }
      stack.push(token)
      },
      Token::LeftParen => stack.push(token),
      Token::RightParen => {
      let mut found_paren = false;
      while let Some(op) = stack.pop() {
      match op {
      Token::LeftParen => {
      found_paren = true;
      break;
      },
      _ => queue.push(op),
      }
      }
      assert!(found_paren)
      },
      }
      }
      while let Some(op) = stack.pop() {
      queue.push(op);
      }
      queue
      }

      /// Takes a Vec of Tokens converted to RPN by `shunt` and calculates the result
      fn calculate(tokens: Vec<Token>) -> i64 {
      let mut stack = vec!;
      for token in tokens {
      match token {
      Token::Number(n) => stack.push(n),
      Token::Plus => {
      let (b, a) = (stack.pop().unwrap(), stack.pop().unwrap());
      stack.push(a + b);
      },
      Token::Sub => {
      let (b, a) = (stack.pop().unwrap(), stack.pop().unwrap());
      stack.push(a - b);
      },
      Token::Mul => {
      let (b, a) = (stack.pop().unwrap(), stack.pop().unwrap());
      stack.push(a * b);
      },
      Token::Div => {
      let (b, a) = (stack.pop().unwrap(), stack.pop().unwrap());
      stack.push(a / b);
      },
      _ => {
      // By the time the token stream gets here, all the LeftParen
      // and RightParen tokens will have been removed by shunt()
      unreachable!();
      },
      }
      }
      stack[0]
      }


      When I run it, however, it gives me this error:



      error[E0308]: mismatched types
      --> srcmain.rs:66:22
      |
      66 | Some((_, end)) => {
      | ^^^^^^^^ expected struct `regex::Match`, found tuple
      |
      = note: expected type `regex::Match<'_>`
      found type `(_, _)`


      It's complaining that I am using a tuple for the Some() method when I am supposed to use a token. I am not sure what to pass for the token, because it appears that the tuple is traversing through the Token options. How do I re-write this to make the Some() method recognize the tuple as a Token? I have been working on this for a day but I have not found any really good solutions.







      rust tuples






      share|improve this question















      share|improve this question













      share|improve this question




      share|improve this question








      edited Nov 12 at 0:11









      Shepmaster

      147k12282416




      147k12282416










      asked Nov 11 at 23:18









      fread

      237




      237
























          1 Answer
          1






          active

          oldest

          votes


















          2














          The code you are referencing is over two years old. Notably, that predates regex 1.0. Version 0.1.80 defines Regex::find as:



          fn find(&self, text: &str) -> Option<(usize, usize)>


          while version 1.0.6 defines it as:



          pub fn find<'t>(&self, text: &'t str) -> Option<Match<'t>>


          However, Match defines methods to get the starting and ending indices the code was written assuming. In this case, since you only care about the end index, you can call Match::end:



          let (token, rest) = match self.number.find(input).map(|x| x.end()) {
          Some(end) => {
          // ...





          share|improve this answer





















            Your Answer






            StackExchange.ifUsing("editor", function () {
            StackExchange.using("externalEditor", function () {
            StackExchange.using("snippets", function () {
            StackExchange.snippets.init();
            });
            });
            }, "code-snippets");

            StackExchange.ready(function() {
            var channelOptions = {
            tags: "".split(" "),
            id: "1"
            };
            initTagRenderer("".split(" "), "".split(" "), channelOptions);

            StackExchange.using("externalEditor", function() {
            // Have to fire editor after snippets, if snippets enabled
            if (StackExchange.settings.snippets.snippetsEnabled) {
            StackExchange.using("snippets", function() {
            createEditor();
            });
            }
            else {
            createEditor();
            }
            });

            function createEditor() {
            StackExchange.prepareEditor({
            heartbeatType: 'answer',
            autoActivateHeartbeat: false,
            convertImagesToLinks: true,
            noModals: true,
            showLowRepImageUploadWarning: true,
            reputationToPostImages: 10,
            bindNavPrevention: true,
            postfix: "",
            imageUploader: {
            brandingHtml: "Powered by u003ca class="icon-imgur-white" href="https://imgur.com/"u003eu003c/au003e",
            contentPolicyHtml: "User contributions licensed under u003ca href="https://creativecommons.org/licenses/by-sa/3.0/"u003ecc by-sa 3.0 with attribution requiredu003c/au003e u003ca href="https://stackoverflow.com/legal/content-policy"u003e(content policy)u003c/au003e",
            allowUrls: true
            },
            onDemand: true,
            discardSelector: ".discard-answer"
            ,immediatelyShowMarkdownHelp:true
            });


            }
            });














            draft saved

            draft discarded


















            StackExchange.ready(
            function () {
            StackExchange.openid.initPostLogin('.new-post-login', 'https%3a%2f%2fstackoverflow.com%2fquestions%2f53254221%2fwhy-does-matching-on-the-result-of-regexfind-complain-about-expecting-a-struct%23new-answer', 'question_page');
            }
            );

            Post as a guest















            Required, but never shown

























            1 Answer
            1






            active

            oldest

            votes








            1 Answer
            1






            active

            oldest

            votes









            active

            oldest

            votes






            active

            oldest

            votes









            2














            The code you are referencing is over two years old. Notably, that predates regex 1.0. Version 0.1.80 defines Regex::find as:



            fn find(&self, text: &str) -> Option<(usize, usize)>


            while version 1.0.6 defines it as:



            pub fn find<'t>(&self, text: &'t str) -> Option<Match<'t>>


            However, Match defines methods to get the starting and ending indices the code was written assuming. In this case, since you only care about the end index, you can call Match::end:



            let (token, rest) = match self.number.find(input).map(|x| x.end()) {
            Some(end) => {
            // ...





            share|improve this answer


























              2














              The code you are referencing is over two years old. Notably, that predates regex 1.0. Version 0.1.80 defines Regex::find as:



              fn find(&self, text: &str) -> Option<(usize, usize)>


              while version 1.0.6 defines it as:



              pub fn find<'t>(&self, text: &'t str) -> Option<Match<'t>>


              However, Match defines methods to get the starting and ending indices the code was written assuming. In this case, since you only care about the end index, you can call Match::end:



              let (token, rest) = match self.number.find(input).map(|x| x.end()) {
              Some(end) => {
              // ...





              share|improve this answer
























                2












                2








                2






                The code you are referencing is over two years old. Notably, that predates regex 1.0. Version 0.1.80 defines Regex::find as:



                fn find(&self, text: &str) -> Option<(usize, usize)>


                while version 1.0.6 defines it as:



                pub fn find<'t>(&self, text: &'t str) -> Option<Match<'t>>


                However, Match defines methods to get the starting and ending indices the code was written assuming. In this case, since you only care about the end index, you can call Match::end:



                let (token, rest) = match self.number.find(input).map(|x| x.end()) {
                Some(end) => {
                // ...





                share|improve this answer












                The code you are referencing is over two years old. Notably, that predates regex 1.0. Version 0.1.80 defines Regex::find as:



                fn find(&self, text: &str) -> Option<(usize, usize)>


                while version 1.0.6 defines it as:



                pub fn find<'t>(&self, text: &'t str) -> Option<Match<'t>>


                However, Match defines methods to get the starting and ending indices the code was written assuming. In this case, since you only care about the end index, you can call Match::end:



                let (token, rest) = match self.number.find(input).map(|x| x.end()) {
                Some(end) => {
                // ...






                share|improve this answer












                share|improve this answer



                share|improve this answer










                answered Nov 12 at 0:18









                Shepmaster

                147k12282416




                147k12282416






























                    draft saved

                    draft discarded




















































                    Thanks for contributing an answer to Stack Overflow!


                    • Please be sure to answer the question. Provide details and share your research!

                    But avoid



                    • Asking for help, clarification, or responding to other answers.

                    • Making statements based on opinion; back them up with references or personal experience.


                    To learn more, see our tips on writing great answers.





                    Some of your past answers have not been well-received, and you're in danger of being blocked from answering.


                    Please pay close attention to the following guidance:


                    • Please be sure to answer the question. Provide details and share your research!

                    But avoid



                    • Asking for help, clarification, or responding to other answers.

                    • Making statements based on opinion; back them up with references or personal experience.


                    To learn more, see our tips on writing great answers.




                    draft saved


                    draft discarded














                    StackExchange.ready(
                    function () {
                    StackExchange.openid.initPostLogin('.new-post-login', 'https%3a%2f%2fstackoverflow.com%2fquestions%2f53254221%2fwhy-does-matching-on-the-result-of-regexfind-complain-about-expecting-a-struct%23new-answer', 'question_page');
                    }
                    );

                    Post as a guest















                    Required, but never shown





















































                    Required, but never shown














                    Required, but never shown












                    Required, but never shown







                    Required, but never shown

































                    Required, but never shown














                    Required, but never shown












                    Required, but never shown







                    Required, but never shown







                    這個網誌中的熱門文章

                    Xamarin.form Move up view when keyboard appear

                    Post-Redirect-Get with Spring WebFlux and Thymeleaf

                    Anylogic : not able to use stopDelay()