0
0
mirror of https://github.com/mongodb/mongo.git synced 2024-11-30 09:06:21 +01:00

Merge branch 'master' of git@github.com:mongodb/mongo

This commit is contained in:
dwight 2010-02-25 08:08:45 -05:00
commit b845f5e28e
9 changed files with 81 additions and 11 deletions

View File

@ -341,7 +341,8 @@ namespace mongo {
}
void forgetEndKey() { endKey = BSONObj(); }
virtual bool useMatcher();
private:
/* Our btrees may (rarely) have "unused" keys when items are deleted.
Skip past them.
@ -377,6 +378,7 @@ namespace mongo {
DiskLoc locAtKeyOfs;
BoundList bounds_;
unsigned boundIndex_;
const IndexSpec& _spec;
};
#pragma pack()

View File

@ -36,7 +36,8 @@ namespace mongo {
indexDetails( _id ),
order( _id.keyPattern() ),
direction( _direction ),
boundIndex_()
boundIndex_(),
_spec( _id.getSpec() )
{
audit();
init();
@ -51,7 +52,8 @@ namespace mongo {
order( _id.keyPattern() ),
direction( _direction ),
bounds_( _bounds ),
boundIndex_()
boundIndex_(),
_spec( _id.getSpec() )
{
assert( !bounds_.empty() );
audit();
@ -74,6 +76,10 @@ namespace mongo {
}
void BtreeCursor::init() {
if ( _spec.getType() ){
startKey = _spec.getType()->fixKey( startKey );
endKey = _spec.getType()->fixKey( endKey );
}
bool found;
bucket = indexDetails.head.btree()->
locate(indexDetails, indexDetails.head, startKey, order, keyOfs, found, direction > 0 ? minDiskLoc : maxDiskLoc, direction);
@ -88,6 +94,10 @@ namespace mongo {
init();
} while ( !ok() && ++boundIndex_ < bounds_.size() );
}
bool BtreeCursor::useMatcher(){
return _spec.getType() == 0;
}
/* skip unused keys. */
void BtreeCursor::skipUnusedKeys() {

View File

@ -95,6 +95,9 @@ namespace mongo {
virtual BSONObj prettyEndKey() const { return BSONObj(); }
virtual bool capped() const { return false; }
/* TODO: get rid of this, kind of a temp hack for geo */
virtual bool useMatcher() { return true; }
};
// strategy object implementing direction of traversal.

View File

@ -45,6 +45,8 @@ namespace mongo {
const IndexPlugin * getPlugin() const { return _plugin; }
virtual BSONObj fixKey( const BSONObj& in ) { return in; }
protected:
const IndexPlugin * _plugin;
};

View File

@ -230,6 +230,20 @@ namespace mongo {
}
virtual BSONObj fixKey( const BSONObj& in ) {
if ( ! in.firstElement().isABSONObj() )
return in;
BSONObjBuilder b;
b.append( "" , _hash( in.firstElement().embeddedObject() ) );
BSONObjIterator i(in);
i.next();
while ( i.more() )
b.append( i.next() );
return b.obj();
}
virtual void getKeys( const BSONObj &obj, BSONObjSetDefaultOrder &keys ) const {
BSONObjBuilder b(64);

View File

@ -486,7 +486,10 @@ namespace mongo {
_c = qp().newCursor();
}
_matcher.reset(new CoveredIndexMatcher(qp().query(), qp().indexKey()));
if ( ! _c.get() || _c->useMatcher() )
_matcher.reset(new CoveredIndexMatcher( qp().query() , qp().indexKey()));
else
_matcher.reset(new CoveredIndexMatcher( BSONObj() , qp().indexKey()));
if ( qp().scanAndOrderRequired() ) {
_inMemSort = true;

View File

@ -39,6 +39,21 @@ namespace mongo {
uassert( 10111 , (string)"table scans not allowed:" + ns , ! cmdLine.notablescan );
}
bool anyElementNamesMatch( const BSONObj& a , const BSONObj& b ){
BSONObjIterator x(a);
while ( x.more() ){
BSONElement e = x.next();
BSONObjIterator y(b);
while ( y.more() ){
BSONElement f = y.next();
FieldCompareResult res = compareDottedFieldNames( e.fieldName() , f.fieldName() );
if ( res == SAME || res == LEFT_SUBFIELD || res == RIGHT_SUBFIELD )
return true;
}
}
return false;
}
double elementDirection( const BSONElement &e ) {
if ( e.isNumber() )
@ -353,8 +368,18 @@ namespace mongo {
return;
}
bool normalQuery = hint_.isEmpty() && min_.isEmpty() && max_.isEmpty();
PlanSet plans;
for( int i = 0; i < d->nIndexes; ++i ) {
IndexDetails& id = d->idx(i);
const IndexSpec& spec = id.getSpec();
if ( normalQuery ){
if ( anyElementNamesMatch( spec.keyPattern , query_ ) == 0 &&
anyElementNamesMatch( spec.keyPattern , order_ ) == 0 )
continue;
}
PlanPtr p( new QueryPlan( d, i, fbs_, order_ ) );
if ( p->optimal() ) {
addPlan( p, checkFirst );
@ -587,9 +612,11 @@ namespace mongo {
while( i.more() ) {
IndexDetails& ii = i.next();
if ( indexWorks( ii.keyPattern(), min.isEmpty() ? max : min, ret.first, ret.second ) ) {
id = &ii;
keyPattern = ii.keyPattern();
break;
if ( ii.getSpec().getType() == 0 ){
id = &ii;
keyPattern = ii.keyPattern();
break;
}
}
}

View File

@ -33,8 +33,8 @@ t.find( { "a.x" : 1 } ).count();
t.find( { "a.x" : { $gt : 1 } } ).count();
res = t.find( { "a" : { $elemMatch : { x : { $gt : 2 } } } } ).explain()
assert( res.cursor.indexOf( "BtreeC" ) == 0 , "C1" );
assert.eq( 2 , t.find( { a : { $elemMatch : { x : { $gt : 2 } } } } ).count() , "D2" );
assert( res.cursor.indexOf( "BtreeC" ) == 0 , "D2" );
assert.eq( 2 , t.find( { a : { $elemMatch : { x : { $gt : 2 } } } } ).count() , "D3" );
assert.eq( 2 , t.find( { a : { $ne:2, $elemMatch : { x : { $gt : 2 } } } } ).count() , "E1" );
assert( t.find( { a : { $ne:2, $elemMatch : { x : { $gt : 2 } } } } ).explain().cursor.indexOf( "BtreeC" ) == 0 , "E2" );

View File

@ -38,11 +38,20 @@ t.insert( { loc : [ 200 , 200 ] } )
assert( db.getLastError() , "B2" )
assert.eq( 3 , t.count() , "B3" );
// test normal access
wb = t.findOne( { zip : "06525" } )
assert( wb , "C1" );
assert.eq( "06525" , t.find( { loc : wb.loc } ).hint( { "$natural" : 1 } )[0].zip , "C2" )
assert.eq( "06525" , t.find( { loc : wb.loc } )[0].zip , "C3" )
assert.eq( 1 , t.find( { loc : wb.loc } ).explain().nscanned , "C4" )
// test config options
t.drop();
t.ensureIndex( { loc : "2d" } , { min : -500 , max : 500 , bits : 4 } );
t.insert( { loc : [ 200 , 200 ] } )
assert.isnull( db.getLastError() , "C1" )
assert.eq( 8 , t.find( {} ).hint( { loc : "2d" } )._addSpecial( "$returnKey" , true ).next().loc.length , "C2" )
assert.isnull( db.getLastError() , "D1" )
assert.eq( 8 , t.find( {} ).hint( { loc : "2d" } )._addSpecial( "$returnKey" , true ).next().loc.length , "D2" )