Changeset 13614e7 for t


Ignore:
Timestamp:
Jul 27, 2009, 11:23:57 PM (12 years ago)
Author:
Nelson Elhage <nelhage@mit.edu>
Branches:
master, release-1.4, release-1.5, release-1.6, release-1.7, release-1.8, release-1.9
Children:
d7bcff8
Parents:
8eac1a5
git-author:
Nelson Elhage <nelhage@mit.edu> (07/25/09 00:37:50)
git-committer:
Nelson Elhage <nelhage@mit.edu> (07/27/09 23:23:57)
Message:
Context.pm: Return the boundaries of the current word.

We need this in order to be able to replace it when completing.
File:
1 edited

Legend:

Unmodified
Added
Removed
  • t/completion.t

    r8eac1a5 r13614e7  
    2424    my $word_point = shift;
    2525
     26    my $word_start = shift;
     27    my $word_end   = shift;
     28
    2629    my $ctx = BarnOwl::Completion::Context->new($before_point,
    2730                                                $after_point);
     
    3336        is($ctx->word, $word, "Correct current word.");
    3437        is($ctx->word_point, $word_point, "Correct point within word.");
     38        is($ctx->word_start, $word_start, "Correct start of word");
     39        is($ctx->word_end,   $word_end, "Correct end of word");
    3540    }
    3641}
     
    4146no warnings 'qw';
    4247test_tokenize('Hello, W', 'orld',
    43               [qw(Hello, World)], 1, 1);
     48              [qw(Hello, World)], 1, 1, 7, 12);
    4449
    4550test_tokenize('Hello, World', '',
    46               [qw(Hello, World)], 1, 5);
     51              [qw(Hello, World)], 1, 5, 7, 12);
    4752
    4853test_tokenize('', '',
    49               [qw()], 0, 0);
     54              [qw()], 0, 0, 0, 0);
    5055
    5156test_tokenize('Hello', 'World',
    52               [qw(HelloWorld)], 0, 5);
     57              [qw(HelloWorld)], 0, 5, 0, 10);
    5358
    5459test_tokenize('lorem ipsum dolor ', 'sit amet',
    55               [qw(lorem ipsum dolor sit amet)], 3, 0);
     60              [qw(lorem ipsum dolor sit amet)],
     61              3, 0, 18, 21);
    5662
    5763test_tokenize(q{error "ls -l failed"}, q{},
    58               ['error', 'ls -l failed'], 1, 12);
     64              ['error', 'ls -l failed'],
     65              1, 12, 6, 20);
    5966
    6067test_tokenize(q{"a long"' word'}, q{},
    6168              ['a long word']);
    6269
    63 test_tokenize(q{"'"}, q{}, [q{'}]);
     70test_tokenize(q{"'"}, q{}, [q{'}], 0, 1, 0, 3);
    6471
    6572test_tokenize(q{"Hello, }, q{World"},
    66               [q{Hello, World}], 0, 7);
     73              [q{Hello, World}],
     74              0, 7, 0, 14);
    6775
    6876test_tokenize(q{But 'Hello, }, q{World'},
    69               ['But', q{Hello, World}], 1, 7);
     77              ['But', q{Hello, World}],
     78              1, 7, 4, 18);
    7079
    7180test_tokenize(q{But "Hello, }, q{World"''''''""},
    72               ['But', q{Hello, World}], 1, 7);
     81              ['But', q{Hello, World}],
     82              1, 7, 4, 26);
    7383
    7484test_tokenize(q{}, q{''Hello},
    75               ['Hello'], 0, 0);
     85              ['Hello'],
     86              0, 0, 0, 7);
    7687
    7788test_tokenize(q{"Hello, }, q{World},
    78               [q{Hello, World}], 0, 7);
     89              [q{Hello, World}],
     90              0, 7, 0, 13);
    7991
    8092test_tokenize(q{Hello    }, q{World},
    81               [qw{Hello World}], 1, 0);
     93              [qw{Hello World}],
     94              1, 0, 9, 14);
    8295
    8396test_tokenize(q{Hello '' ""}, q{ World},
    84               ["Hello", '', '', 'World'], 2, 0);
     97              ["Hello", '', '', 'World'],
     98              2, 0, 9, 11);
    8599
    86100# It's not entirely clear what we should do here. Make a test for the
    87101# current behavior, so we'll notice if it changes.
    88102test_tokenize(q{Hello }, q{ World},
    89               [qw(Hello World)], 1, -1);
     103              [qw(Hello World)],
     104              1, -1, 7, 12);
    90105
    911061;
Note: See TracChangeset for help on using the changeset viewer.