diff --git a/Gopkg.lock b/Gopkg.lock index f33a1ae15..9c49b71e6 100644 --- a/Gopkg.lock +++ b/Gopkg.lock @@ -397,7 +397,7 @@ "plumbing/transport/ssh", "storage", "storage/filesystem", - "storage/filesystem/internal/dotgit", + "storage/filesystem/dotgit", "storage/memory", "utils/binary", "utils/diff", @@ -408,9 +408,8 @@ "utils/merkletrie/internal/frame", "utils/merkletrie/noder" ] - revision = "1d28459504251497e0ce6132a0fadd5eb44ffd22" + revision = "0710c6cb710a0cdab04ab7f61cc62e23cfcacbee" source = "github.com/src-d/go-git" - version = "v4.2.0" [[projects]] name = "gopkg.in/src-d/go-mysql-server.v0" @@ -473,6 +472,6 @@ [solve-meta] analyzer-name = "dep" analyzer-version = 1 - inputs-digest = "6abe7ca012dfe4335e71168bb653994ce974beed86f6dabd253586fcfa52ba96" + inputs-digest = "f64276b9d0a2816ec92e200752139d87f1b31807c1bf61d3c6020478377bf7d4" solver-name = "gps-cdcl" solver-version = 1 diff --git a/Gopkg.toml b/Gopkg.toml index 8bcabb8d9..4f3905c80 100644 --- a/Gopkg.toml +++ b/Gopkg.toml @@ -17,6 +17,7 @@ [[constraint]] name = "gopkg.in/src-d/go-git.v4" source = "github.com/src-d/go-git" + revision = "0710c6cb710a0cdab04ab7f61cc62e23cfcacbee" [[constraint]] name = "gopkg.in/src-d/go-git-fixtures.v3" diff --git a/blobs.go b/blobs.go index 7b68e8c4a..617a0a561 100644 --- a/blobs.go +++ b/blobs.go @@ -7,6 +7,7 @@ import ( "gopkg.in/src-d/go-mysql-server.v0/sql" "gopkg.in/src-d/go-mysql-server.v0/sql/expression" + "gopkg.in/src-d/go-mysql-server.v0/sql/plan" "gopkg.in/src-d/go-git.v4/plumbing" "gopkg.in/src-d/go-git.v4/plumbing/object" @@ -39,14 +40,13 @@ var BlobsSchema = sql.Schema{ var _ sql.PushdownProjectionAndFiltersTable = (*blobsTable)(nil) func newBlobsTable() Indexable { - return &indexableTable{ - PushdownTable: new(blobsTable), - buildIterWithSelectors: blobsIterBuilder, - } + return new(blobsTable) } var _ Table = (*blobsTable)(nil) +var _ Squashable = (*blobsTable)(nil) +func (blobsTable) isSquashable() {} func (blobsTable) isGitbaseTable() {} func (blobsTable) String() string { @@ -118,6 +118,50 @@ func (r *blobsTable) WithProjectAndFilters( return sql.NewSpanIter(span, iter), nil } +// IndexKeyValueIter implements the sql.Indexable interface. +func (*blobsTable) IndexKeyValueIter( + ctx *sql.Context, + colNames []string, +) (sql.IndexKeyValueIter, error) { + s, ok := ctx.Session.(*Session) + if !ok || s == nil { + return nil, ErrInvalidGitbaseSession.New(ctx.Session) + } + + return newBlobsKeyValueIter(s.Pool, colNames), nil +} + +// WithProjectFiltersAndIndex implements sql.Indexable interface. +func (*blobsTable) WithProjectFiltersAndIndex( + ctx *sql.Context, + columns, filters []sql.Expression, + index sql.IndexValueIter, +) (sql.RowIter, error) { + span, ctx := ctx.Span("gitbase.BlobsTable.WithProjectFiltersAndIndex") + s, ok := ctx.Session.(*Session) + if !ok || s == nil { + span.Finish() + return nil, ErrInvalidGitbaseSession.New(ctx.Session) + } + + session, err := getSession(ctx) + if err != nil { + return nil, err + } + + var iter sql.RowIter = &blobsIndexIter{ + index: index, + pool: session.Pool, + readContent: shouldReadContent(columns), + } + + if len(filters) > 0 { + iter = plan.NewFilterIter(ctx, expression.JoinAnd(filters...), iter) + } + + return sql.NewSpanIter(span, iter), nil +} + func blobsIterBuilder(_ *sql.Context, selectors selectors, columns []sql.Expression) (RowRepoIter, error) { if len(selectors["blob_hash"]) == 0 { return &blobIter{readContent: shouldReadContent(columns)}, nil @@ -138,7 +182,6 @@ type blobIter struct { repoID string iter *object.BlobIter readContent bool - lastHash string } func (i *blobIter) NewIterator(repo *Repository) (RowRepoIter, error) { @@ -150,17 +193,11 @@ func (i *blobIter) NewIterator(repo *Repository) (RowRepoIter, error) { return &blobIter{repoID: repo.ID, iter: iter, readContent: i.readContent}, nil } -func (i *blobIter) Repository() string { return i.repoID } - -func (i *blobIter) LastObject() string { return i.lastHash } - func (i *blobIter) Next() (sql.Row, error) { o, err := i.iter.Next() if err != nil { return nil, err } - - i.lastHash = o.Hash.String() return blobToRow(i.repoID, o, i.readContent) } @@ -177,17 +214,12 @@ type blobsByHashIter struct { pos int hashes []string readContent bool - lastHash string } func (i *blobsByHashIter) NewIterator(repo *Repository) (RowRepoIter, error) { - return &blobsByHashIter{repo, 0, i.hashes, i.readContent, ""}, nil + return &blobsByHashIter{repo, 0, i.hashes, i.readContent}, nil } -func (i *blobsByHashIter) Repository() string { return i.repo.ID } - -func (i *blobsByHashIter) LastObject() string { return i.lastHash } - func (i *blobsByHashIter) Next() (sql.Row, error) { for { if i.pos >= len(i.hashes) { @@ -205,7 +237,6 @@ func (i *blobsByHashIter) Next() (sql.Row, error) { return nil, err } - i.lastHash = hash.String() return blobToRow(i.repo.ID, blob, i.readContent) } } @@ -304,3 +335,106 @@ func shouldReadContent(columns []sql.Expression) bool { } return false } + +type blobsKeyValueIter struct { + iter *objectIter + columns []string +} + +func newBlobsKeyValueIter(pool *RepositoryPool, columns []string) *blobsKeyValueIter { + return &blobsKeyValueIter{ + iter: newObjectIter(pool, plumbing.BlobObject), + columns: columns, + } +} + +func (i *blobsKeyValueIter) Next() ([]interface{}, []byte, error) { + obj, err := i.iter.Next() + if err != nil { + return nil, nil, err + } + + key, err := encodeIndexKey(packOffsetIndexKey{ + Repository: obj.RepositoryID, + Packfile: obj.Packfile.String(), + Offset: int64(obj.Offset), + }) + if err != nil { + return nil, nil, err + } + + blob, ok := obj.Object.(*object.Blob) + if !ok { + ErrInvalidObjectType.New(obj.Object, "*object.Blob") + } + + row, err := blobToRow(obj.RepositoryID, blob, stringContains(i.columns, "blob_content")) + if err != nil { + return nil, nil, err + } + + values, err := rowIndexValues(row, i.columns, BlobsSchema) + if err != nil { + return nil, nil, err + } + + return values, key, nil +} + +func (i *blobsKeyValueIter) Close() error { return i.iter.Close() } + +type blobsIndexIter struct { + index sql.IndexValueIter + pool *RepositoryPool + decoder *objectDecoder + readContent bool +} + +func (i *blobsIndexIter) Next() (sql.Row, error) { + data, err := i.index.Next() + if err != nil { + return nil, err + } + + var key packOffsetIndexKey + if err := decodeIndexKey(data, &key); err != nil { + return nil, err + } + + packfile := plumbing.NewHash(key.Packfile) + if i.decoder == nil || !i.decoder.equals(key.Repository, packfile) { + if i.decoder != nil { + if err := i.decoder.Close(); err != nil { + return nil, err + } + } + + i.decoder, err = newObjectDecoder(i.pool.repositories[key.Repository], packfile) + if err != nil { + return nil, err + } + } + + obj, err := i.decoder.get(key.Offset) + if err != nil { + return nil, err + } + + blob, ok := obj.(*object.Blob) + if !ok { + return nil, ErrInvalidObjectType.New(obj, "*object.Blob") + } + + return blobToRow(key.Repository, blob, i.readContent) +} + +func (i *blobsIndexIter) Close() error { + if i.decoder != nil { + if err := i.decoder.Close(); err != nil { + _ = i.index.Close() + return err + } + } + + return i.index.Close() +} diff --git a/blobs_test.go b/blobs_test.go index 1ef921859..942aadb9b 100644 --- a/blobs_test.go +++ b/blobs_test.go @@ -135,3 +135,139 @@ func TestBlobsPushdown(t *testing.T) { require.NoError(err) require.Len(rows, 0) } + +func TestBlobsIndexKeyValueIter(t *testing.T) { + require := require.New(t) + ctx, path, cleanup := setup(t) + defer cleanup() + + table := new(blobsTable) + iter, err := table.IndexKeyValueIter(ctx, []string{"blob_hash", "blob_size"}) + require.NoError(err) + + var expected = []keyValue{ + { + assertEncodeKey(t, packOffsetIndexKey{ + Repository: path, + Packfile: "323a4b6b5de684f9966953a043bc800154e5dbfa", + Offset: 1591, + }), + []interface{}{ + "32858aad3c383ed1ff0a0f9bdf231d54a00c9e88", + int64(189), + }, + }, + { + assertEncodeKey(t, packOffsetIndexKey{ + Repository: path, + Packfile: "323a4b6b5de684f9966953a043bc800154e5dbfa", + Offset: 79864, + }), + []interface{}{ + "49c6bb89b17060d7b4deacb7b338fcc6ea2352a9", + int64(217848), + }, + }, + { + assertEncodeKey(t, packOffsetIndexKey{ + Repository: path, + Packfile: "323a4b6b5de684f9966953a043bc800154e5dbfa", + Offset: 2418, + }), + []interface{}{ + "7e59600739c96546163833214c36459e324bad0a", + int64(9), + }, + }, + { + assertEncodeKey(t, packOffsetIndexKey{ + Repository: path, + Packfile: "323a4b6b5de684f9966953a043bc800154e5dbfa", + Offset: 78932, + }), + []interface{}{ + "880cd14280f4b9b6ed3986d6671f907d7cc2a198", + int64(2780), + }, + }, + { + assertEncodeKey(t, packOffsetIndexKey{ + Repository: path, + Packfile: "323a4b6b5de684f9966953a043bc800154e5dbfa", + Offset: 82000, + }), + []interface{}{ + "9a48f23120e880dfbe41f7c9b7b708e9ee62a492", + int64(11488), + }, + }, + { + assertEncodeKey(t, packOffsetIndexKey{ + Repository: path, + Packfile: "323a4b6b5de684f9966953a043bc800154e5dbfa", + Offset: 85438, + }), + []interface{}{ + "9dea2395f5403188298c1dabe8bdafe562c491e3", + int64(78), + }, + }, + { + assertEncodeKey(t, packOffsetIndexKey{ + Repository: path, + Packfile: "323a4b6b5de684f9966953a043bc800154e5dbfa", + Offset: 1780, + }), + []interface{}{ + "c192bd6a24ea1ab01d78686e417c8bdc7c3d197f", + int64(1072), + }, + }, + { + assertEncodeKey(t, packOffsetIndexKey{ + Repository: path, + Packfile: "323a4b6b5de684f9966953a043bc800154e5dbfa", + Offset: 81707, + }), + []interface{}{ + "c8f1d8c61f9da76f4cb49fd86322b6e685dba956", + int64(706), + }, + }, + { + assertEncodeKey(t, packOffsetIndexKey{ + Repository: path, + Packfile: "323a4b6b5de684f9966953a043bc800154e5dbfa", + Offset: 1752, + }), + []interface{}{ + "d3ff53e0564a9f87d8e84b6e28e5060e517008aa", + int64(18), + }, + }, + { + assertEncodeKey(t, packOffsetIndexKey{ + Repository: path, + Packfile: "323a4b6b5de684f9966953a043bc800154e5dbfa", + Offset: 2436, + }), + []interface{}{ + "d5c0f4ab811897cadf03aec358ae60d21f91c50d", + int64(76110), + }, + }, + } + + assertIndexKeyValueIter(t, iter, expected) +} + +func TestBlobsIndex(t *testing.T) { + testTableIndex( + t, + new(blobsTable), + []sql.Expression{expression.NewEquals( + expression.NewGetField(1, sql.Text, "commit_hash", false), + expression.NewLiteral("af2d6a6954d532f8ffb47615169c8fdf9d383a1a", sql.Text), + )}, + ) +} diff --git a/commit_blobs.go b/commit_blobs.go index 690c3299a..542ffd365 100644 --- a/commit_blobs.go +++ b/commit_blobs.go @@ -5,6 +5,8 @@ import ( "gopkg.in/src-d/go-git.v4/plumbing/object" "gopkg.in/src-d/go-mysql-server.v0/sql" + "gopkg.in/src-d/go-mysql-server.v0/sql/expression" + "gopkg.in/src-d/go-mysql-server.v0/sql/plan" ) type commitBlobsTable struct{} @@ -19,10 +21,7 @@ var CommitBlobsSchema = sql.Schema{ var _ sql.PushdownProjectionAndFiltersTable = (*commitBlobsTable)(nil) func newCommitBlobsTable() Indexable { - return &indexableTable{ - PushdownTable: new(commitBlobsTable), - buildIterWithSelectors: commitBlobsIterBuilder, - } + return new(commitBlobsTable) } func (commitBlobsTable) isGitbaseTable() {} @@ -84,6 +83,46 @@ func (t *commitBlobsTable) WithProjectAndFilters( return sql.NewSpanIter(span, iter), nil } +// IndexKeyValueIter implements the sql.Indexable interface. +func (*commitBlobsTable) IndexKeyValueIter( + ctx *sql.Context, + colNames []string, +) (sql.IndexKeyValueIter, error) { + s, ok := ctx.Session.(*Session) + if !ok || s == nil { + return nil, ErrInvalidGitbaseSession.New(ctx.Session) + } + + iter, err := NewRowRepoIter(ctx, new(commitBlobsIter)) + if err != nil { + return nil, err + } + + return &rowKeyValueIter{iter, colNames, CommitBlobsSchema}, nil +} + +// WithProjectFiltersAndIndex implements sql.Indexable interface. +func (*commitBlobsTable) WithProjectFiltersAndIndex( + ctx *sql.Context, + columns, filters []sql.Expression, + index sql.IndexValueIter, +) (sql.RowIter, error) { + span, ctx := ctx.Span("gitbase.CommitBlobsTable.WithProjectFiltersAndIndex") + s, ok := ctx.Session.(*Session) + if !ok || s == nil { + span.Finish() + return nil, ErrInvalidGitbaseSession.New(ctx.Session) + } + + var iter sql.RowIter = &rowIndexIter{index} + + if len(filters) > 0 { + iter = plan.NewFilterIter(ctx, expression.JoinAnd(filters...), iter) + } + + return sql.NewSpanIter(span, iter), nil +} + func commitBlobsIterBuilder(ctx *sql.Context, selectors selectors, columns []sql.Expression) (RowRepoIter, error) { repos, err := selectors.textValues("repository_id") if err != nil { diff --git a/commit_blobs_test.go b/commit_blobs_test.go index c8e7b2d86..b18300c5c 100644 --- a/commit_blobs_test.go +++ b/commit_blobs_test.go @@ -163,3 +163,39 @@ func TestCommitBlobsTablePushdown(t *testing.T) { }) } } + +func TestCommitBlobsIndexKeyValueIter(t *testing.T) { + require := require.New(t) + ctx, _, cleanup := setup(t) + defer cleanup() + + table := new(commitBlobsTable) + iter, err := table.IndexKeyValueIter(ctx, []string{"blob_hash", "commit_hash"}) + require.NoError(err) + + i, err := table.RowIter(ctx) + require.NoError(err) + rows, err := sql.RowIterToRows(i) + require.NoError(err) + + var expected []keyValue + for _, row := range rows { + var kv keyValue + kv.key = assertEncodeKey(t, row) + kv.values = append(kv.values, row[2], row[1]) + expected = append(expected, kv) + } + + assertIndexKeyValueIter(t, iter, expected) +} + +func TestCommitBlobsIndex(t *testing.T) { + testTableIndex( + t, + new(commitBlobsTable), + []sql.Expression{expression.NewEquals( + expression.NewGetField(1, sql.Text, "commit_hash", false), + expression.NewLiteral("af2d6a6954d532f8ffb47615169c8fdf9d383a1a", sql.Text), + )}, + ) +} diff --git a/commit_trees.go b/commit_trees.go index d085a2c63..d6625cde5 100644 --- a/commit_trees.go +++ b/commit_trees.go @@ -8,6 +8,8 @@ import ( "gopkg.in/src-d/go-git.v4/plumbing/filemode" "gopkg.in/src-d/go-git.v4/plumbing/object" "gopkg.in/src-d/go-mysql-server.v0/sql" + "gopkg.in/src-d/go-mysql-server.v0/sql/expression" + "gopkg.in/src-d/go-mysql-server.v0/sql/plan" ) type commitTreesTable struct{} @@ -22,12 +24,12 @@ var CommitTreesSchema = sql.Schema{ var _ sql.PushdownProjectionAndFiltersTable = (*commitTreesTable)(nil) func newCommitTreesTable() Indexable { - return &indexableTable{ - PushdownTable: new(commitTreesTable), - buildIterWithSelectors: commitTreesIterBuilder, - } + return new(commitTreesTable) } +var _ Squashable = (*commitTreesTable)(nil) + +func (commitTreesTable) isSquashable() {} func (commitTreesTable) isGitbaseTable() {} func (commitTreesTable) String() string { @@ -87,6 +89,46 @@ func (t *commitTreesTable) WithProjectAndFilters( return sql.NewSpanIter(span, iter), nil } +// IndexKeyValueIter implements the sql.Indexable interface. +func (*commitTreesTable) IndexKeyValueIter( + ctx *sql.Context, + colNames []string, +) (sql.IndexKeyValueIter, error) { + s, ok := ctx.Session.(*Session) + if !ok || s == nil { + return nil, ErrInvalidGitbaseSession.New(ctx.Session) + } + + iter, err := NewRowRepoIter(ctx, &commitTreesIter{ctx: ctx}) + if err != nil { + return nil, err + } + + return &rowKeyValueIter{iter, colNames, CommitTreesSchema}, nil +} + +// WithProjectFiltersAndIndex implements sql.Indexable interface. +func (*commitTreesTable) WithProjectFiltersAndIndex( + ctx *sql.Context, + columns, filters []sql.Expression, + index sql.IndexValueIter, +) (sql.RowIter, error) { + span, ctx := ctx.Span("gitbase.CommitTreesTable.WithProjectFiltersAndIndex") + s, ok := ctx.Session.(*Session) + if !ok || s == nil { + span.Finish() + return nil, ErrInvalidGitbaseSession.New(ctx.Session) + } + + var iter sql.RowIter = &rowIndexIter{index} + + if len(filters) > 0 { + iter = plan.NewFilterIter(ctx, expression.JoinAnd(filters...), iter) + } + + return sql.NewSpanIter(span, iter), nil +} + func commitTreesIterBuilder(ctx *sql.Context, selectors selectors, columns []sql.Expression) (RowRepoIter, error) { repos, err := selectors.textValues("repository_id") if err != nil { diff --git a/commit_trees_test.go b/commit_trees_test.go index 803dcfc47..d4ad112e3 100644 --- a/commit_trees_test.go +++ b/commit_trees_test.go @@ -117,3 +117,39 @@ func TestCommitTreesPushdown(t *testing.T) { }) } } + +func TestCommitTreesIndexKeyValueIter(t *testing.T) { + require := require.New(t) + ctx, _, cleanup := setup(t) + defer cleanup() + + table := new(commitTreesTable) + iter, err := table.IndexKeyValueIter(ctx, []string{"tree_hash", "commit_hash"}) + require.NoError(err) + + i, err := table.RowIter(ctx) + require.NoError(err) + rows, err := sql.RowIterToRows(i) + require.NoError(err) + + var expected []keyValue + for _, row := range rows { + var kv keyValue + kv.key = assertEncodeKey(t, row) + kv.values = append(kv.values, row[2], row[1]) + expected = append(expected, kv) + } + + assertIndexKeyValueIter(t, iter, expected) +} + +func TestCommitTreesIndex(t *testing.T) { + testTableIndex( + t, + new(commitTreesTable), + []sql.Expression{expression.NewEquals( + expression.NewGetField(1, sql.Text, "commit_hash", false), + expression.NewLiteral("af2d6a6954d532f8ffb47615169c8fdf9d383a1a", sql.Text), + )}, + ) +} diff --git a/commits.go b/commits.go index 2f161fc25..1fd6aae1d 100644 --- a/commits.go +++ b/commits.go @@ -4,6 +4,8 @@ import ( "io" "gopkg.in/src-d/go-mysql-server.v0/sql" + "gopkg.in/src-d/go-mysql-server.v0/sql/expression" + "gopkg.in/src-d/go-mysql-server.v0/sql/plan" "gopkg.in/src-d/go-git.v4/plumbing" "gopkg.in/src-d/go-git.v4/plumbing/object" @@ -29,14 +31,13 @@ var CommitsSchema = sql.Schema{ var _ sql.PushdownProjectionAndFiltersTable = (*commitsTable)(nil) func newCommitsTable() Indexable { - return &indexableTable{ - PushdownTable: new(commitsTable), - buildIterWithSelectors: commitsIterBuilder, - } + return new(commitsTable) } var _ Table = (*commitsTable)(nil) +var _ Squashable = (*commitsTable)(nil) +func (commitsTable) isSquashable() {} func (commitsTable) isGitbaseTable() {} func (commitsTable) String() string { @@ -108,6 +109,46 @@ func (r *commitsTable) WithProjectAndFilters( return sql.NewSpanIter(span, iter), nil } +// IndexKeyValueIter implements the sql.Indexable interface. +func (*commitsTable) IndexKeyValueIter( + ctx *sql.Context, + colNames []string, +) (sql.IndexKeyValueIter, error) { + s, ok := ctx.Session.(*Session) + if !ok || s == nil { + return nil, ErrInvalidGitbaseSession.New(ctx.Session) + } + + return newCommitsKeyValueIter(s.Pool, colNames), nil +} + +// WithProjectFiltersAndIndex implements sql.Indexable interface. +func (*commitsTable) WithProjectFiltersAndIndex( + ctx *sql.Context, + columns, filters []sql.Expression, + index sql.IndexValueIter, +) (sql.RowIter, error) { + span, ctx := ctx.Span("gitbase.CommitsTable.WithProjectFiltersAndIndex") + s, ok := ctx.Session.(*Session) + if !ok || s == nil { + span.Finish() + return nil, ErrInvalidGitbaseSession.New(ctx.Session) + } + + session, err := getSession(ctx) + if err != nil { + return nil, err + } + + var iter sql.RowIter = &commitsIndexIter{index: index, pool: session.Pool} + + if len(filters) > 0 { + iter = plan.NewFilterIter(ctx, expression.JoinAnd(filters...), iter) + } + + return sql.NewSpanIter(span, iter), nil +} + func commitsIterBuilder(_ *sql.Context, selectors selectors, _ []sql.Expression) (RowRepoIter, error) { hashes, err := selectors.textValues("commit_hash") if err != nil { @@ -118,10 +159,9 @@ func commitsIterBuilder(_ *sql.Context, selectors selectors, _ []sql.Expression) } type commitIter struct { - repoID string - iter object.CommitIter - hashes []string - lastHash string + repoID string + iter object.CommitIter + hashes []string } func (i *commitIter) NewIterator(repo *Repository) (RowRepoIter, error) { @@ -133,17 +173,12 @@ func (i *commitIter) NewIterator(repo *Repository) (RowRepoIter, error) { return &commitIter{repoID: repo.ID, iter: iter}, nil } -func (i *commitIter) Repository() string { return i.repoID } - -func (i *commitIter) LastObject() string { return i.lastHash } - func (i *commitIter) Next() (sql.Row, error) { o, err := i.iter.Next() if err != nil { return nil, err } - i.lastHash = o.Hash.String() return commitToRow(i.repoID, o), nil } @@ -261,3 +296,101 @@ func (i *commitsByHashIter) nextList() (*object.Commit, error) { return commit, nil } } + +type commitsKeyValueIter struct { + iter *objectIter + columns []string +} + +func newCommitsKeyValueIter(pool *RepositoryPool, columns []string) *commitsKeyValueIter { + return &commitsKeyValueIter{ + iter: newObjectIter(pool, plumbing.CommitObject), + columns: columns, + } +} + +func (i *commitsKeyValueIter) Next() ([]interface{}, []byte, error) { + obj, err := i.iter.Next() + if err != nil { + return nil, nil, err + } + + key, err := encodeIndexKey(packOffsetIndexKey{ + Repository: obj.RepositoryID, + Packfile: obj.Packfile.String(), + Offset: int64(obj.Offset), + }) + if err != nil { + return nil, nil, err + } + + commit, ok := obj.Object.(*object.Commit) + if !ok { + ErrInvalidObjectType.New(obj.Object, "*object.Commit") + } + + row := commitToRow(obj.RepositoryID, commit) + values, err := rowIndexValues(row, i.columns, CommitsSchema) + if err != nil { + return nil, nil, err + } + + return values, key, nil +} + +func (i *commitsKeyValueIter) Close() error { return i.iter.Close() } + +type commitsIndexIter struct { + index sql.IndexValueIter + pool *RepositoryPool + decoder *objectDecoder +} + +func (i *commitsIndexIter) Next() (sql.Row, error) { + data, err := i.index.Next() + if err != nil { + return nil, err + } + + var key packOffsetIndexKey + if err := decodeIndexKey(data, &key); err != nil { + return nil, err + } + + packfile := plumbing.NewHash(key.Packfile) + if i.decoder == nil || !i.decoder.equals(key.Repository, packfile) { + if i.decoder != nil { + if err := i.decoder.Close(); err != nil { + return nil, err + } + } + + i.decoder, err = newObjectDecoder(i.pool.repositories[key.Repository], packfile) + if err != nil { + return nil, err + } + } + + obj, err := i.decoder.get(key.Offset) + if err != nil { + return nil, err + } + + commit, ok := obj.(*object.Commit) + if !ok { + return nil, ErrInvalidObjectType.New(obj, "*object.Commit") + } + + return commitToRow(key.Repository, commit), nil +} + +func (i *commitsIndexIter) Close() error { + if i.decoder != nil { + if err := i.decoder.Close(); err != nil { + _ = i.index.Close() + return err + } + } + + return i.index.Close() +} diff --git a/commits_test.go b/commits_test.go index c5853d9b6..75f821b8c 100644 --- a/commits_test.go +++ b/commits_test.go @@ -202,5 +202,129 @@ func TestCommitsParents(t *testing.T) { require.ElementsMatch(t, test.parents, parents) }) } +} + +func TestCommitsIndexKeyValueIter(t *testing.T) { + require := require.New(t) + ctx, path, cleanup := setup(t) + defer cleanup() + + table := new(commitsTable) + iter, err := table.IndexKeyValueIter(ctx, []string{"commit_hash", "commit_author_email"}) + require.NoError(err) + + var expected = []keyValue{ + { + assertEncodeKey(t, packOffsetIndexKey{ + Repository: path, + Packfile: "323a4b6b5de684f9966953a043bc800154e5dbfa", + Offset: 682, + }), + []interface{}{ + "1669dce138d9b841a518c64b10914d88f5e488ea", + "mcuadros@gmail.com", + }, + }, + { + assertEncodeKey(t, packOffsetIndexKey{ + Repository: path, + Packfile: "323a4b6b5de684f9966953a043bc800154e5dbfa", + Offset: 1292, + }), + []interface{}{ + "35e85108805c84807bc66a02d91535e1e24b38b9", + "mcuadros@gmail.com", + }, + }, + { + assertEncodeKey(t, packOffsetIndexKey{ + Repository: path, + Packfile: "323a4b6b5de684f9966953a043bc800154e5dbfa", + Offset: 186, + }), + []interface{}{ + "6ecf0ef2c2dffb796033e5a02219af86ec6584e5", + "mcuadros@gmail.com", + }, + }, + { + assertEncodeKey(t, packOffsetIndexKey{ + Repository: path, + Packfile: "323a4b6b5de684f9966953a043bc800154e5dbfa", + Offset: 353, + }), + []interface{}{ + "918c48b83bd081e863dbe1b80f8998f058cd8294", + "mcuadros@gmail.com", + }, + }, + { + assertEncodeKey(t, packOffsetIndexKey{ + Repository: path, + Packfile: "323a4b6b5de684f9966953a043bc800154e5dbfa", + Offset: 905, + }), + []interface{}{ + "a5b8b09e2f8fcb0bb99d3ccb0958157b40890d69", + "mcuadros@gmail.com", + }, + }, + { + assertEncodeKey(t, packOffsetIndexKey{ + Repository: path, + Packfile: "323a4b6b5de684f9966953a043bc800154e5dbfa", + Offset: 516, + }), + []interface{}{ + "af2d6a6954d532f8ffb47615169c8fdf9d383a1a", + "mcuadros@gmail.com", + }, + }, + { + assertEncodeKey(t, packOffsetIndexKey{ + Repository: path, + Packfile: "323a4b6b5de684f9966953a043bc800154e5dbfa", + Offset: 1459, + }), + []interface{}{ + "b029517f6300c2da0f4b651b8642506cd6aaf45d", + "mcuadros@gmail.com", + }, + }, + { + assertEncodeKey(t, packOffsetIndexKey{ + Repository: path, + Packfile: "323a4b6b5de684f9966953a043bc800154e5dbfa", + Offset: 1130, + }), + []interface{}{ + "b8e471f58bcbca63b07bda20e428190409c2db47", + "daniel@lordran.local", + }, + }, + { + assertEncodeKey(t, packOffsetIndexKey{ + Repository: path, + Packfile: "323a4b6b5de684f9966953a043bc800154e5dbfa", + Offset: 12, + }), + []interface{}{ + "e8d3ffab552895c19b9fcf7aa264d277cde33881", + "mcuadros@gmail.com", + }, + }, + } + + assertIndexKeyValueIter(t, iter, expected) +} +func TestCommitsIndex(t *testing.T) { + testTableIndex( + t, + new(commitsTable), + []sql.Expression{expression.NewEquals( + expression.NewGetField(1, sql.Text, "commit_hash", false), + expression.NewLiteral("af2d6a6954d532f8ffb47615169c8fdf9d383a1a", sql.Text), + )}, + ) } diff --git a/files.go b/files.go index d961cd723..90e9a213a 100644 --- a/files.go +++ b/files.go @@ -4,8 +4,11 @@ import ( "io" "gopkg.in/src-d/go-git.v4/plumbing" + "gopkg.in/src-d/go-git.v4/plumbing/filemode" "gopkg.in/src-d/go-git.v4/plumbing/object" "gopkg.in/src-d/go-mysql-server.v0/sql" + "gopkg.in/src-d/go-mysql-server.v0/sql/expression" + "gopkg.in/src-d/go-mysql-server.v0/sql/plan" ) type filesTable struct{} @@ -21,12 +24,13 @@ var FilesSchema = sql.Schema{ {Name: "blob_size", Type: sql.Int64, Source: "files"}, } -func newFilesTable() sql.Table { +func newFilesTable() Indexable { return new(filesTable) } var _ sql.PushdownProjectionAndFiltersTable = (*filesTable)(nil) +func (filesTable) isGitbaseTable() {} func (filesTable) Resolved() bool { return true } func (filesTable) Name() string { return FilesTableName } func (filesTable) Schema() sql.Schema { return FilesSchema } @@ -104,6 +108,50 @@ func (filesTable) WithProjectAndFilters( return sql.NewSpanIter(span, iter), nil } +// IndexKeyValueIter implements the sql.Indexable interface. +func (*filesTable) IndexKeyValueIter( + ctx *sql.Context, + colNames []string, +) (sql.IndexKeyValueIter, error) { + s, ok := ctx.Session.(*Session) + if !ok || s == nil { + return nil, ErrInvalidGitbaseSession.New(ctx.Session) + } + + return newFilesKeyValueIter(s.Pool, colNames) +} + +// WithProjectFiltersAndIndex implements sql.Indexable interface. +func (*filesTable) WithProjectFiltersAndIndex( + ctx *sql.Context, + columns, filters []sql.Expression, + index sql.IndexValueIter, +) (sql.RowIter, error) { + span, ctx := ctx.Span("gitbase.FilesTable.WithProjectFiltersAndIndex") + s, ok := ctx.Session.(*Session) + if !ok || s == nil { + span.Finish() + return nil, ErrInvalidGitbaseSession.New(ctx.Session) + } + + session, err := getSession(ctx) + if err != nil { + return nil, err + } + + var iter sql.RowIter = &filesIndexIter{ + index: index, + pool: session.Pool, + readContent: shouldReadContent(columns), + } + + if len(filters) > 0 { + iter = plan.NewFilterIter(ctx, expression.JoinAnd(filters...), iter) + } + + return sql.NewSpanIter(span, iter), nil +} + func (filesTable) String() string { return printTable(FilesTableName, FilesSchema) } @@ -251,3 +299,195 @@ func fileToRow( file.Size, ), nil } + +type fileIndexKey struct { + Repository string + Packfile string + Offset int64 + Name string + Mode int64 + Tree string +} + +type filesKeyValueIter struct { + pool *RepositoryPool + repo *Repository + repos *RepositoryIter + commits object.CommitIter + files *object.FileIter + commit *object.Commit + idx *repositoryIndex + columns []string + seen map[plumbing.Hash]struct{} +} + +func newFilesKeyValueIter(pool *RepositoryPool, columns []string) (*filesKeyValueIter, error) { + repos, err := pool.RepoIter() + if err != nil { + return nil, err + } + + return &filesKeyValueIter{ + pool: pool, + repos: repos, + columns: columns, + }, nil +} + +func (i *filesKeyValueIter) Next() ([]interface{}, []byte, error) { + for { + if i.commits == nil { + var err error + i.repo, err = i.repos.Next() + if err != nil { + return nil, nil, err + } + + i.seen = make(map[plumbing.Hash]struct{}) + + i.commits, err = i.repo.Repo.CommitObjects() + if err != nil { + return nil, nil, err + } + + repo := i.pool.repositories[i.repo.ID] + i.idx, err = newRepositoryIndex(repo.path, repo.kind) + if err != nil { + return nil, nil, err + } + } + + if i.files == nil { + var err error + i.commit, err = i.commits.Next() + if err != nil { + if err == io.EOF { + i.commits = nil + continue + } + return nil, nil, err + } + + if _, ok := i.seen[i.commit.TreeHash]; ok { + continue + } + i.seen[i.commit.TreeHash] = struct{}{} + + i.files, err = i.commit.Files() + if err != nil { + return nil, nil, err + } + } + + f, err := i.files.Next() + if err != nil { + if err == io.EOF { + i.files = nil + continue + } + } + + offset, packfile, err := i.idx.find(f.Blob.Hash) + if err != nil { + return nil, nil, err + } + + key, err := encodeIndexKey(fileIndexKey{ + Repository: i.repo.ID, + Packfile: packfile.String(), + Offset: offset, + Name: f.Name, + Tree: i.commit.TreeHash.String(), + Mode: int64(f.Mode), + }) + if err != nil { + return nil, nil, err + } + + row, err := fileToRow(i.repo.ID, i.commit.TreeHash, f, stringContains(i.columns, "blob_content")) + if err != nil { + return nil, nil, err + } + + values, err := rowIndexValues(row, i.columns, FilesSchema) + if err != nil { + return nil, nil, err + } + + return values, key, nil + } +} + +func (i *filesKeyValueIter) Close() error { + if i.commits != nil { + i.commits.Close() + } + + if i.files != nil { + i.files.Close() + } + + return i.repos.Close() +} + +type filesIndexIter struct { + index sql.IndexValueIter + pool *RepositoryPool + decoder *objectDecoder + readContent bool +} + +func (i *filesIndexIter) Next() (sql.Row, error) { + data, err := i.index.Next() + if err != nil { + return nil, err + } + + var key fileIndexKey + if err := decodeIndexKey(data, &key); err != nil { + return nil, err + } + + packfile := plumbing.NewHash(key.Packfile) + if i.decoder == nil || !i.decoder.equals(key.Repository, packfile) { + if i.decoder != nil { + if err := i.decoder.Close(); err != nil { + return nil, err + } + } + + i.decoder, err = newObjectDecoder(i.pool.repositories[key.Repository], packfile) + if err != nil { + return nil, err + } + } + + obj, err := i.decoder.get(key.Offset) + if err != nil { + return nil, err + } + + blob, ok := obj.(*object.Blob) + if !ok { + return nil, ErrInvalidObjectType.New(obj, "*object.Blob") + } + + file := &object.File{ + Blob: *blob, + Name: key.Name, + Mode: filemode.FileMode(key.Mode), + } + + return fileToRow(key.Repository, plumbing.NewHash(key.Tree), file, i.readContent) +} + +func (i *filesIndexIter) Close() error { + if i.decoder != nil { + if err := i.decoder.Close(); err != nil { + _ = i.index.Close() + return err + } + } + + return i.index.Close() +} diff --git a/files_test.go b/files_test.go index c1f539257..44dd66724 100644 --- a/files_test.go +++ b/files_test.go @@ -155,3 +155,602 @@ func TestFilesTablePushdown(t *testing.T) { }) } } + +func TestFilesIndexKeyValueIter(t *testing.T) { + require := require.New(t) + ctx, path, cleanup := setup(t) + defer cleanup() + + table := new(filesTable) + iter, err := table.IndexKeyValueIter(ctx, []string{"file_path", "blob_hash"}) + require.NoError(err) + + var expected = []keyValue{ + { + assertEncodeKey(t, fileIndexKey{ + Repository: path, + Packfile: "323a4b6b5de684f9966953a043bc800154e5dbfa", + Offset: 1591, + Name: ".gitignore", + Mode: 33188, + Tree: "dbd3641b371024f44d0e469a9c8f5457b0660de1", + }), + []interface{}{ + ".gitignore", + "32858aad3c383ed1ff0a0f9bdf231d54a00c9e88", + }, + }, { + assertEncodeKey(t, fileIndexKey{ + Repository: path, + Packfile: "323a4b6b5de684f9966953a043bc800154e5dbfa", + Offset: 1752, + Name: "CHANGELOG", + Mode: 33188, + Tree: "dbd3641b371024f44d0e469a9c8f5457b0660de1", + }), + []interface{}{ + "CHANGELOG", + "d3ff53e0564a9f87d8e84b6e28e5060e517008aa", + }, + }, { + assertEncodeKey(t, fileIndexKey{ + Repository: path, + Packfile: "323a4b6b5de684f9966953a043bc800154e5dbfa", + Offset: 1780, + Name: "LICENSE", + Mode: 33188, + Tree: "dbd3641b371024f44d0e469a9c8f5457b0660de1", + }), + []interface{}{ + "LICENSE", + "c192bd6a24ea1ab01d78686e417c8bdc7c3d197f", + }, + }, { + assertEncodeKey(t, fileIndexKey{ + Repository: path, + Packfile: "323a4b6b5de684f9966953a043bc800154e5dbfa", + Offset: 2418, + Name: "README", + Mode: 33188, + Tree: "dbd3641b371024f44d0e469a9c8f5457b0660de1", + }), + []interface{}{ + "README", + "7e59600739c96546163833214c36459e324bad0a", + }, + }, { + assertEncodeKey(t, fileIndexKey{ + Repository: path, + Packfile: "323a4b6b5de684f9966953a043bc800154e5dbfa", + Offset: 2436, + Name: "binary.jpg", + Mode: 33188, + Tree: "dbd3641b371024f44d0e469a9c8f5457b0660de1", + }), + []interface{}{ + "binary.jpg", + "d5c0f4ab811897cadf03aec358ae60d21f91c50d", + }, + }, { + assertEncodeKey(t, fileIndexKey{ + Repository: path, + Packfile: "323a4b6b5de684f9966953a043bc800154e5dbfa", + Offset: 78932, + Name: "go/example.go", + Mode: 33188, + Tree: "dbd3641b371024f44d0e469a9c8f5457b0660de1", + }), + []interface{}{ + "go/example.go", + "880cd14280f4b9b6ed3986d6671f907d7cc2a198", + }, + }, { + assertEncodeKey(t, fileIndexKey{ + Repository: path, + Packfile: "323a4b6b5de684f9966953a043bc800154e5dbfa", + Offset: 79864, + Name: "json/long.json", + Mode: 33188, + Tree: "dbd3641b371024f44d0e469a9c8f5457b0660de1", + }), + []interface{}{ + "json/long.json", + "49c6bb89b17060d7b4deacb7b338fcc6ea2352a9", + }, + }, { + assertEncodeKey(t, fileIndexKey{ + Repository: path, + Packfile: "323a4b6b5de684f9966953a043bc800154e5dbfa", + Offset: 81707, + Name: "json/short.json", + Mode: 33188, + Tree: "dbd3641b371024f44d0e469a9c8f5457b0660de1", + }), + []interface{}{ + "json/short.json", + "c8f1d8c61f9da76f4cb49fd86322b6e685dba956", + }, + }, { + assertEncodeKey(t, fileIndexKey{ + Repository: path, + Packfile: "323a4b6b5de684f9966953a043bc800154e5dbfa", + Offset: 82000, + Name: "php/crappy.php", + Mode: 33188, + Tree: "dbd3641b371024f44d0e469a9c8f5457b0660de1", + }), + []interface{}{ + "php/crappy.php", + "9a48f23120e880dfbe41f7c9b7b708e9ee62a492", + }, + }, { + assertEncodeKey(t, fileIndexKey{ + Repository: path, + Packfile: "323a4b6b5de684f9966953a043bc800154e5dbfa", + Offset: 1591, + Name: ".gitignore", + Mode: 33188, + Tree: "a8d315b2b1c615d43042c3a62402b8a54288cf5c", + }), + []interface{}{ + ".gitignore", + "32858aad3c383ed1ff0a0f9bdf231d54a00c9e88", + }, + }, { + assertEncodeKey(t, fileIndexKey{ + Repository: path, + Packfile: "323a4b6b5de684f9966953a043bc800154e5dbfa", + Offset: 1752, + Name: "CHANGELOG", + Mode: 33188, + Tree: "a8d315b2b1c615d43042c3a62402b8a54288cf5c", + }), + []interface{}{ + "CHANGELOG", + "d3ff53e0564a9f87d8e84b6e28e5060e517008aa", + }, + }, { + assertEncodeKey(t, fileIndexKey{ + Repository: path, + Packfile: "323a4b6b5de684f9966953a043bc800154e5dbfa", + Offset: 1780, + Name: "LICENSE", + Mode: 33188, + Tree: "a8d315b2b1c615d43042c3a62402b8a54288cf5c", + }), + []interface{}{ + "LICENSE", + "c192bd6a24ea1ab01d78686e417c8bdc7c3d197f", + }, + }, { + assertEncodeKey(t, fileIndexKey{ + Repository: path, + Packfile: "323a4b6b5de684f9966953a043bc800154e5dbfa", + Offset: 2436, + Name: "binary.jpg", + Mode: 33188, + Tree: "a8d315b2b1c615d43042c3a62402b8a54288cf5c", + }), + []interface{}{ + "binary.jpg", + "d5c0f4ab811897cadf03aec358ae60d21f91c50d", + }, + }, { + assertEncodeKey(t, fileIndexKey{ + Repository: path, + Packfile: "323a4b6b5de684f9966953a043bc800154e5dbfa", + Offset: 78932, + Name: "go/example.go", + Mode: 33188, + Tree: "a8d315b2b1c615d43042c3a62402b8a54288cf5c", + }), + []interface{}{ + "go/example.go", + "880cd14280f4b9b6ed3986d6671f907d7cc2a198", + }, + }, { + assertEncodeKey(t, fileIndexKey{ + Repository: path, + Packfile: "323a4b6b5de684f9966953a043bc800154e5dbfa", + Offset: 79864, + Name: "json/long.json", + Mode: 33188, + Tree: "a8d315b2b1c615d43042c3a62402b8a54288cf5c", + }), + []interface{}{ + "json/long.json", + "49c6bb89b17060d7b4deacb7b338fcc6ea2352a9", + }, + }, { + assertEncodeKey(t, fileIndexKey{ + Repository: path, + Packfile: "323a4b6b5de684f9966953a043bc800154e5dbfa", + Offset: 81707, + Name: "json/short.json", + Mode: 33188, + Tree: "a8d315b2b1c615d43042c3a62402b8a54288cf5c", + }), + []interface{}{ + "json/short.json", + "c8f1d8c61f9da76f4cb49fd86322b6e685dba956", + }, + }, { + assertEncodeKey(t, fileIndexKey{ + Repository: path, + Packfile: "323a4b6b5de684f9966953a043bc800154e5dbfa", + Offset: 82000, + Name: "php/crappy.php", + Mode: 33188, + Tree: "a8d315b2b1c615d43042c3a62402b8a54288cf5c", + }), + []interface{}{ + "php/crappy.php", + "9a48f23120e880dfbe41f7c9b7b708e9ee62a492", + }, + }, { + assertEncodeKey(t, fileIndexKey{ + Repository: path, + Packfile: "323a4b6b5de684f9966953a043bc800154e5dbfa", + Offset: 85438, + Name: "vendor/foo.go", + Mode: 33188, + Tree: "a8d315b2b1c615d43042c3a62402b8a54288cf5c", + }), + []interface{}{ + "vendor/foo.go", + "9dea2395f5403188298c1dabe8bdafe562c491e3", + }, + }, { + assertEncodeKey(t, fileIndexKey{ + Repository: path, + Packfile: "323a4b6b5de684f9966953a043bc800154e5dbfa", + Offset: 1591, + Name: ".gitignore", + Mode: 33188, + Tree: "fb72698cab7617ac416264415f13224dfd7a165e", + }), + []interface{}{ + ".gitignore", + "32858aad3c383ed1ff0a0f9bdf231d54a00c9e88", + }, + }, { + assertEncodeKey(t, fileIndexKey{ + Repository: path, + Packfile: "323a4b6b5de684f9966953a043bc800154e5dbfa", + Offset: 1752, + Name: "CHANGELOG", + Mode: 33188, + Tree: "fb72698cab7617ac416264415f13224dfd7a165e", + }), + []interface{}{ + "CHANGELOG", + "d3ff53e0564a9f87d8e84b6e28e5060e517008aa", + }, + }, { + assertEncodeKey(t, fileIndexKey{ + Repository: path, + Packfile: "323a4b6b5de684f9966953a043bc800154e5dbfa", + Offset: 1780, + Name: "LICENSE", + Mode: 33188, + Tree: "fb72698cab7617ac416264415f13224dfd7a165e", + }), + []interface{}{ + "LICENSE", + "c192bd6a24ea1ab01d78686e417c8bdc7c3d197f", + }, + }, { + assertEncodeKey(t, fileIndexKey{ + Repository: path, + Packfile: "323a4b6b5de684f9966953a043bc800154e5dbfa", + Offset: 2436, + Name: "binary.jpg", + Mode: 33188, + Tree: "fb72698cab7617ac416264415f13224dfd7a165e", + }), + []interface{}{ + "binary.jpg", + "d5c0f4ab811897cadf03aec358ae60d21f91c50d", + }, + }, { + assertEncodeKey(t, fileIndexKey{ + Repository: path, + Packfile: "323a4b6b5de684f9966953a043bc800154e5dbfa", + Offset: 78932, + Name: "go/example.go", + Mode: 33188, + Tree: "fb72698cab7617ac416264415f13224dfd7a165e", + }), + []interface{}{ + "go/example.go", + "880cd14280f4b9b6ed3986d6671f907d7cc2a198", + }, + }, { + assertEncodeKey(t, fileIndexKey{ + Repository: path, + Packfile: "323a4b6b5de684f9966953a043bc800154e5dbfa", + Offset: 79864, + Name: "json/long.json", + Mode: 33188, + Tree: "fb72698cab7617ac416264415f13224dfd7a165e", + }), + []interface{}{ + "json/long.json", + "49c6bb89b17060d7b4deacb7b338fcc6ea2352a9", + }, + }, { + assertEncodeKey(t, fileIndexKey{ + Repository: path, + Packfile: "323a4b6b5de684f9966953a043bc800154e5dbfa", + Offset: 81707, + Name: "json/short.json", + Mode: 33188, + Tree: "fb72698cab7617ac416264415f13224dfd7a165e", + }), + []interface{}{ + "json/short.json", + "c8f1d8c61f9da76f4cb49fd86322b6e685dba956", + }, + }, { + assertEncodeKey(t, fileIndexKey{ + Repository: path, + Packfile: "323a4b6b5de684f9966953a043bc800154e5dbfa", + Offset: 82000, + Name: "php/crappy.php", + Mode: 33188, + Tree: "fb72698cab7617ac416264415f13224dfd7a165e", + }), + []interface{}{ + "php/crappy.php", + "9a48f23120e880dfbe41f7c9b7b708e9ee62a492", + }, + }, { + assertEncodeKey(t, fileIndexKey{ + Repository: path, + Packfile: "323a4b6b5de684f9966953a043bc800154e5dbfa", + Offset: 1591, + Name: ".gitignore", + Mode: 33188, + Tree: "4d081c50e250fa32ea8b1313cf8bb7c2ad7627fd", + }), + []interface{}{ + ".gitignore", + "32858aad3c383ed1ff0a0f9bdf231d54a00c9e88", + }, + }, { + assertEncodeKey(t, fileIndexKey{ + Repository: path, + Packfile: "323a4b6b5de684f9966953a043bc800154e5dbfa", + Offset: 1752, + Name: "CHANGELOG", + Mode: 33188, + Tree: "4d081c50e250fa32ea8b1313cf8bb7c2ad7627fd", + }), + []interface{}{ + "CHANGELOG", + "d3ff53e0564a9f87d8e84b6e28e5060e517008aa", + }, + }, { + assertEncodeKey(t, fileIndexKey{ + Repository: path, + Packfile: "323a4b6b5de684f9966953a043bc800154e5dbfa", + Offset: 1780, + Name: "LICENSE", + Mode: 33188, + Tree: "4d081c50e250fa32ea8b1313cf8bb7c2ad7627fd", + }), + []interface{}{ + "LICENSE", + "c192bd6a24ea1ab01d78686e417c8bdc7c3d197f", + }, + }, { + assertEncodeKey(t, fileIndexKey{ + Repository: path, + Packfile: "323a4b6b5de684f9966953a043bc800154e5dbfa", + Offset: 2436, + Name: "binary.jpg", + Mode: 33188, + Tree: "4d081c50e250fa32ea8b1313cf8bb7c2ad7627fd", + }), + []interface{}{ + "binary.jpg", + "d5c0f4ab811897cadf03aec358ae60d21f91c50d", + }, + }, { + assertEncodeKey(t, fileIndexKey{ + Repository: path, + Packfile: "323a4b6b5de684f9966953a043bc800154e5dbfa", + Offset: 79864, + Name: "json/long.json", + Mode: 33188, + Tree: "4d081c50e250fa32ea8b1313cf8bb7c2ad7627fd", + }), + []interface{}{ + "json/long.json", + "49c6bb89b17060d7b4deacb7b338fcc6ea2352a9", + }, + }, { + assertEncodeKey(t, fileIndexKey{ + Repository: path, + Packfile: "323a4b6b5de684f9966953a043bc800154e5dbfa", + Offset: 81707, + Name: "json/short.json", + Mode: 33188, + Tree: "4d081c50e250fa32ea8b1313cf8bb7c2ad7627fd", + }), + []interface{}{ + "json/short.json", + "c8f1d8c61f9da76f4cb49fd86322b6e685dba956", + }, + }, { + assertEncodeKey(t, fileIndexKey{ + Repository: path, + Packfile: "323a4b6b5de684f9966953a043bc800154e5dbfa", + Offset: 1591, + Name: ".gitignore", + Mode: 33188, + Tree: "eba74343e2f15d62adedfd8c883ee0262b5c8021", + }), + []interface{}{ + ".gitignore", + "32858aad3c383ed1ff0a0f9bdf231d54a00c9e88", + }, + }, { + assertEncodeKey(t, fileIndexKey{ + Repository: path, + Packfile: "323a4b6b5de684f9966953a043bc800154e5dbfa", + Offset: 1752, + Name: "CHANGELOG", + Mode: 33188, + Tree: "eba74343e2f15d62adedfd8c883ee0262b5c8021", + }), + []interface{}{ + "CHANGELOG", + "d3ff53e0564a9f87d8e84b6e28e5060e517008aa", + }, + }, { + assertEncodeKey(t, fileIndexKey{ + Repository: path, + Packfile: "323a4b6b5de684f9966953a043bc800154e5dbfa", + Offset: 1780, + Name: "LICENSE", + Mode: 33188, + Tree: "eba74343e2f15d62adedfd8c883ee0262b5c8021", + }), + []interface{}{ + "LICENSE", + "c192bd6a24ea1ab01d78686e417c8bdc7c3d197f", + }, + }, { + assertEncodeKey(t, fileIndexKey{ + Repository: path, + Packfile: "323a4b6b5de684f9966953a043bc800154e5dbfa", + Offset: 2436, + Name: "binary.jpg", + Mode: 33188, + Tree: "eba74343e2f15d62adedfd8c883ee0262b5c8021", + }), + []interface{}{ + "binary.jpg", + "d5c0f4ab811897cadf03aec358ae60d21f91c50d", + }, + }, { + assertEncodeKey(t, fileIndexKey{ + Repository: path, + Packfile: "323a4b6b5de684f9966953a043bc800154e5dbfa", + Offset: 1591, + Name: ".gitignore", + Mode: 33188, + Tree: "c2d30fa8ef288618f65f6eed6e168e0d514886f4", + }), + []interface{}{ + ".gitignore", + "32858aad3c383ed1ff0a0f9bdf231d54a00c9e88", + }, + }, { + assertEncodeKey(t, fileIndexKey{ + Repository: path, + Packfile: "323a4b6b5de684f9966953a043bc800154e5dbfa", + Offset: 1752, + Name: "CHANGELOG", + Mode: 33188, + Tree: "c2d30fa8ef288618f65f6eed6e168e0d514886f4", + }), + []interface{}{ + "CHANGELOG", + "d3ff53e0564a9f87d8e84b6e28e5060e517008aa", + }, + }, { + assertEncodeKey(t, fileIndexKey{ + Repository: path, + Packfile: "323a4b6b5de684f9966953a043bc800154e5dbfa", + Offset: 1780, + Name: "LICENSE", + Mode: 33188, + Tree: "c2d30fa8ef288618f65f6eed6e168e0d514886f4", + }), + []interface{}{ + "LICENSE", + "c192bd6a24ea1ab01d78686e417c8bdc7c3d197f", + }, + }, { + assertEncodeKey(t, fileIndexKey{ + Repository: path, + Packfile: "323a4b6b5de684f9966953a043bc800154e5dbfa", + Offset: 1591, + Name: ".gitignore", + Mode: 33188, + Tree: "8dcef98b1d52143e1e2dbc458ffe38f925786bf2", + }), + []interface{}{ + ".gitignore", + "32858aad3c383ed1ff0a0f9bdf231d54a00c9e88", + }, + }, { + assertEncodeKey(t, fileIndexKey{ + Repository: path, + Packfile: "323a4b6b5de684f9966953a043bc800154e5dbfa", + Offset: 1780, + Name: "LICENSE", + Mode: 33188, + Tree: "8dcef98b1d52143e1e2dbc458ffe38f925786bf2", + }), + []interface{}{ + "LICENSE", + "c192bd6a24ea1ab01d78686e417c8bdc7c3d197f", + }, + }, { + assertEncodeKey(t, fileIndexKey{ + Repository: path, + Packfile: "323a4b6b5de684f9966953a043bc800154e5dbfa", + Offset: 2436, + Name: "binary.jpg", + Mode: 33188, + Tree: "8dcef98b1d52143e1e2dbc458ffe38f925786bf2", + }), + []interface{}{ + "binary.jpg", + "d5c0f4ab811897cadf03aec358ae60d21f91c50d", + }, + }, { + assertEncodeKey(t, fileIndexKey{ + Repository: path, + Packfile: "323a4b6b5de684f9966953a043bc800154e5dbfa", + Offset: 1591, + Name: ".gitignore", + Mode: 33188, + Tree: "aa9b383c260e1d05fbbf6b30a02914555e20c725", + }), + []interface{}{ + ".gitignore", + "32858aad3c383ed1ff0a0f9bdf231d54a00c9e88", + }, + }, { + assertEncodeKey(t, fileIndexKey{ + Repository: path, + Packfile: "323a4b6b5de684f9966953a043bc800154e5dbfa", + Offset: 1780, + Name: "LICENSE", + Mode: 33188, + Tree: "aa9b383c260e1d05fbbf6b30a02914555e20c725", + }), + []interface{}{ + "LICENSE", + "c192bd6a24ea1ab01d78686e417c8bdc7c3d197f", + }, + }, + } + + assertIndexKeyValueIter(t, iter, expected) +} + +func TestFilesIndex(t *testing.T) { + testTableIndex( + t, + new(filesTable), + []sql.Expression{expression.NewEquals( + expression.NewGetField(1, sql.Text, "file_path", false), + expression.NewLiteral("LICENSE", sql.Text), + )}, + ) +} diff --git a/index.go b/index.go index 887b0e20c..df70e0b00 100644 --- a/index.go +++ b/index.go @@ -3,239 +3,109 @@ package gitbase import ( "bytes" "encoding/gob" + "time" errors "gopkg.in/src-d/go-errors.v1" "gopkg.in/src-d/go-mysql-server.v0/sql" - "gopkg.in/src-d/go-mysql-server.v0/sql/expression" - "gopkg.in/src-d/go-mysql-server.v0/sql/plan" ) var ( // ErrColumnNotFound is returned when a given column is not found in the table's schema. ErrColumnNotFound = errors.NewKind("column %s not found for table %s") - // ErrCreateIndexValue is returned if an index value can't be generated. - ErrCreateIndexValue = errors.NewKind("couldn't create index value, missing %s") - // ErrIndexValue is returned when a index value is malformed. - ErrIndexValue = errors.NewKind("wrong index value found") + // ErrInvalidObjectType is returned when the received object is not of the correct type. + ErrInvalidObjectType = errors.NewKind("got object of type %T, expecting %s") ) // Indexable represents an indexable gitbase table. type Indexable interface { sql.Indexable - PushdownTable + Table } -// PushdownTable represents a gitbase table that is able to pushdown projections and filters. -type PushdownTable interface { - sql.PushdownProjectionAndFiltersTable - gitBase - handledColumns() []string +func encodeIndexKey(k interface{}) ([]byte, error) { + var buf bytes.Buffer + err := gob.NewEncoder(&buf).Encode(k) + return buf.Bytes(), err } -type indexableTable struct { - PushdownTable - buildIterWithSelectors iteratorBuilder +func decodeIndexKey(data []byte, k interface{}) error { + return gob.NewDecoder(bytes.NewBuffer(data)).Decode(k) } -var _ sql.Indexable = (*indexableTable)(nil) - -// IndexKeyValueIter implements sql.Indexable interface. -func (i *indexableTable) IndexKeyValueIter(ctx *sql.Context, colNames []string) (sql.IndexKeyValueIter, error) { - s, ok := ctx.Session.(*Session) - if !ok || s == nil { - return nil, ErrInvalidGitbaseSession.New(ctx.Session) - } - - colIndexes := []int{} - columns := []sql.Expression{} - for _, colName := range colNames { - idx := i.Schema().IndexOf(colName, i.Name()) - if idx < 0 { - return nil, ErrColumnNotFound.New(colName, i.Name()) +func rowIndexValues(row sql.Row, columns []string, schema sql.Schema) ([]interface{}, error) { + var values = make([]interface{}, len(columns)) + for i, col := range columns { + var found bool + for j, c := range schema { + if c.Name == col { + values[i] = row[j] + found = true + break + } } - colIndexes = append(colIndexes, idx) - - col := expression.NewGetFieldWithTable( - idx, - i.Schema()[idx].Type, - i.Schema()[idx].Source, - i.Schema()[idx].Name, - i.Schema()[idx].Nullable, - ) - - columns = append(columns, col) - } - - rIter, err := s.Pool.RepoIter() - if err != nil { - return nil, err - } - - tableIter, err := i.buildIterWithSelectors(ctx, nil, columns) - if err != nil { - return nil, err - } - - repoIter := &rowRepoIter{ - currRepoIter: nil, - repositoryIter: rIter, - iter: tableIter, - session: s, - ctx: ctx, + if !found { + return nil, ErrColumnNotFound.New(col, schema[0].Source) + } } - - return &indexKVIter{ - repoIter: repoIter, - colIndexes: colIndexes, - }, nil + return values, nil } -// WithProjectFiltersAndIndex implements sql.Indexable interface. -func (i *indexableTable) WithProjectFiltersAndIndex(ctx *sql.Context, columns, filters []sql.Expression, index sql.IndexValueIter) (sql.RowIter, error) { - s, ok := ctx.Session.(*Session) - if !ok || s == nil { - return nil, ErrInvalidGitbaseSession.New(ctx.Session) - } +type rowKeyValueIter struct { + iter sql.RowIter + columns []string + schema sql.Schema +} - selectors, filters, err := classifyFilters(i.Schema(), i.Name(), - filters, i.handledColumns()...) +func (i *rowKeyValueIter) Next() ([]interface{}, []byte, error) { + row, err := i.iter.Next() if err != nil { - return nil, err + return nil, nil, err } - rowRepoIter, err := i.buildIterWithSelectors(ctx, selectors, columns) + key, err := encodeIndexKey(row) if err != nil { - return nil, err - } - - indexIter := &indexIter{ - iter: rowRepoIter, - idxValueIter: index, - pool: s.Pool, + return nil, nil, err } - if len(filters) == 0 { - return indexIter, nil + values, err := rowIndexValues(row, i.columns, i.schema) + if err != nil { + return nil, nil, err } - return plan.NewFilterIter(ctx, expression.JoinAnd(filters...), indexIter), nil -} - -type indexIter struct { - repoID string - iter RowRepoIter - currIter RowRepoIter - - pool *RepositoryPool - idxValueIter sql.IndexValueIter -} - -var _ sql.RowIter = (*indexIter)(nil) - -func (i *indexIter) Next() (sql.Row, error) { - for { - v, err := i.idxValueIter.Next() - if err != nil { - return nil, err - } - - idxVal, err := unmarshalIndexValue(v) - if err != nil || idxVal.ID == "" || idxVal.Object == "" { - return nil, ErrIndexValue.New() - } - - if i.repoID != idxVal.ID { - repo, err := i.pool.GetRepo(idxVal.ID) - if err != nil { - return nil, err - } - - iter, err := i.iter.NewIterator(repo) - if err != nil { - return nil, err - } - - i.repoID = repo.ID - i.currIter = iter - } - - return i.currIter.Next() - } + return values, key, nil } -func (i *indexIter) Close() error { - if i.currIter != nil { - i.currIter.Close() - } - - return nil -} +func (i *rowKeyValueIter) Close() error { return i.iter.Close() } -type indexValue struct { - ID string - Object string +type rowIndexIter struct { + index sql.IndexValueIter } -func marshalIndexValue(value *indexValue) ([]byte, error) { - var raw bytes.Buffer - enc := gob.NewEncoder(&raw) - if err := enc.Encode(value); err != nil { +func (i *rowIndexIter) Next() (sql.Row, error) { + data, err := i.index.Next() + if err != nil { return nil, err } - return raw.Bytes(), nil -} - -func unmarshalIndexValue(raw []byte) (*indexValue, error) { - value := bytes.NewReader(raw) - dec := gob.NewDecoder(value) - idxValue := &indexValue{} - if err := dec.Decode(idxValue); err != nil { + var row sql.Row + if err := decodeIndexKey(data, &row); err != nil { return nil, err } - return idxValue, nil + return row, nil } -type indexKVIter struct { - repoIter *rowRepoIter - colIndexes []int -} - -var _ sql.IndexKeyValueIter = (*indexKVIter)(nil) - -func (i *indexKVIter) Next() ([]interface{}, []byte, error) { - row, err := i.repoIter.Next() - if err != nil { - return nil, nil, err - } - - repoID := i.repoIter.currRepoIter.Repository() - if repoID == "" { - return nil, nil, ErrCreateIndexValue.New("repository id") - } - - object := i.repoIter.currRepoIter.LastObject() - if object == "" { - return nil, nil, ErrCreateIndexValue.New("object") - } - - idxValue := &indexValue{repoID, object} - - colValues := []interface{}{} - for _, idx := range i.colIndexes { - colValues = append(colValues, row[idx]) - } - - value, err := marshalIndexValue(idxValue) - if err != nil { - return nil, nil, err - } +func (i *rowIndexIter) Close() error { return i.index.Close() } - return colValues, value, nil +type packOffsetIndexKey struct { + Repository string + Packfile string + Offset int64 } -func (i *indexKVIter) Close() error { - return i.repoIter.Close() +func init() { + gob.Register(sql.Row{}) + gob.Register(time.Time{}) + gob.Register([]interface{}{}) } diff --git a/index_test.go b/index_test.go index 2c1e0cfed..e7c8055e3 100644 --- a/index_test.go +++ b/index_test.go @@ -3,1219 +3,116 @@ package gitbase import ( "io" "testing" - "time" - - "gopkg.in/src-d/go-mysql-server.v0/sql" - "gopkg.in/src-d/go-mysql-server.v0/sql/expression" "github.com/stretchr/testify/require" + "gopkg.in/src-d/go-mysql-server.v0/sql" ) -type indexTest struct { - name string - node sql.Indexable - colNames []string - expectedKVs []expectedKV - filters []sql.Expression - columns []sql.Expression - expectedRows []sql.Row +func assertEncodeKey(t *testing.T, key interface{}) []byte { + data, err := encodeIndexKey(key) + require.NoError(t, err) + return data } -type expectedKV struct { - key []interface{} - idxValue *indexValue +type indexValueIter struct { + values [][]byte + pos int } -func TestIndexableTable(t *testing.T) { - ctx, paths, cleanup := setupRepos(t) - defer cleanup() +func newIndexValueIter(values ...[]byte) sql.IndexValueIter { + return &indexValueIter{values, 0} +} - expectedRepos := []expectedKV{} - for _, path := range paths { - expectedRepos = append(expectedRepos, expectedKV{ - key: []interface{}{path}, - idxValue: &indexValue{path, path}, - }) +func (i *indexValueIter) Next() ([]byte, error) { + if i.pos >= len(i.values) { + return nil, io.EOF } - var tests = []*indexTest{ - { - name: "blobs indexable table", - node: newBlobsTable(), - colNames: []string{"blob_hash", "blob_size"}, - expectedKVs: []expectedKV{ - { - key: []interface{}{"32858aad3c383ed1ff0a0f9bdf231d54a00c9e88", int64(189)}, - idxValue: &indexValue{paths[0], "32858aad3c383ed1ff0a0f9bdf231d54a00c9e88"}, - }, - { - key: []interface{}{"d3ff53e0564a9f87d8e84b6e28e5060e517008aa", int64(18)}, - idxValue: &indexValue{paths[0], "d3ff53e0564a9f87d8e84b6e28e5060e517008aa"}, - }, - { - key: []interface{}{"c192bd6a24ea1ab01d78686e417c8bdc7c3d197f", int64(1072)}, - idxValue: &indexValue{paths[0], "c192bd6a24ea1ab01d78686e417c8bdc7c3d197f"}, - }, - { - key: []interface{}{"7e59600739c96546163833214c36459e324bad0a", int64(9)}, - idxValue: &indexValue{paths[0], "7e59600739c96546163833214c36459e324bad0a"}, - }, - { - key: []interface{}{"d5c0f4ab811897cadf03aec358ae60d21f91c50d", int64(76110)}, - idxValue: &indexValue{paths[0], "d5c0f4ab811897cadf03aec358ae60d21f91c50d"}, - }, - { - key: []interface{}{"880cd14280f4b9b6ed3986d6671f907d7cc2a198", int64(2780)}, - idxValue: &indexValue{paths[0], "880cd14280f4b9b6ed3986d6671f907d7cc2a198"}, - }, - { - key: []interface{}{"49c6bb89b17060d7b4deacb7b338fcc6ea2352a9", int64(217848)}, - idxValue: &indexValue{paths[0], "49c6bb89b17060d7b4deacb7b338fcc6ea2352a9"}, - }, - { - key: []interface{}{"c8f1d8c61f9da76f4cb49fd86322b6e685dba956", int64(706)}, - idxValue: &indexValue{paths[0], "c8f1d8c61f9da76f4cb49fd86322b6e685dba956"}, - }, - { - key: []interface{}{"9a48f23120e880dfbe41f7c9b7b708e9ee62a492", int64(11488)}, - idxValue: &indexValue{paths[0], "9a48f23120e880dfbe41f7c9b7b708e9ee62a492"}, - }, - { - key: []interface{}{"9dea2395f5403188298c1dabe8bdafe562c491e3", int64(78)}, - idxValue: &indexValue{paths[0], "9dea2395f5403188298c1dabe8bdafe562c491e3"}, - }, - { - key: []interface{}{"278871477afb195f908155a65b5c651f1cfd02d3", int64(172)}, - idxValue: &indexValue{paths[1], "278871477afb195f908155a65b5c651f1cfd02d3"}, - }, - { - key: []interface{}{"97b013ecd2cc7f572960509f659d8068798d59ca", int64(83)}, - idxValue: &indexValue{paths[1], "97b013ecd2cc7f572960509f659d8068798d59ca"}, - }, - { - key: []interface{}{"b4f017e8c030d24aef161569b9ade3e55931ba01", int64(20)}, - idxValue: &indexValue{paths[1], "b4f017e8c030d24aef161569b9ade3e55931ba01"}, - }, - }, - columns: []sql.Expression{ - expression.NewGetFieldWithTable(1, sql.Text, BlobsTableName, "blob_hash", false), - }, - filters: []sql.Expression{ - expression.NewGreaterThanOrEqual( - expression.NewGetFieldWithTable(2, sql.Int64, BlobsTableName, "blob_size", false), - expression.NewLiteral(int64(75000), sql.Int64), - ), - }, - expectedRows: []sql.Row{ - sql.NewRow(paths[0], "d5c0f4ab811897cadf03aec358ae60d21f91c50d", int64(76110), []uint8(nil)), - sql.NewRow(paths[0], "49c6bb89b17060d7b4deacb7b338fcc6ea2352a9", int64(217848), []uint8(nil)), - }, - }, - { - name: "tree_entries indexable table", - node: newTreeEntriesTable(), - colNames: []string{"tree_entry_name", "blob_hash"}, - expectedKVs: []expectedKV{ - { - key: []interface{}{"example.go", "880cd14280f4b9b6ed3986d6671f907d7cc2a198"}, - idxValue: &indexValue{paths[0], "a39771a7651f97faf5c72e08224d857fc35133db"}, - }, - { - key: []interface{}{"long.json", "49c6bb89b17060d7b4deacb7b338fcc6ea2352a9"}, - idxValue: &indexValue{paths[0], "5a877e6a906a2743ad6e45d99c1793642aaf8eda"}, - }, - { - key: []interface{}{"short.json", "c8f1d8c61f9da76f4cb49fd86322b6e685dba956"}, - idxValue: &indexValue{paths[0], "5a877e6a906a2743ad6e45d99c1793642aaf8eda"}, - }, - { - key: []interface{}{"crappy.php", "9a48f23120e880dfbe41f7c9b7b708e9ee62a492"}, - idxValue: &indexValue{paths[0], "586af567d0bb5e771e49bdd9434f5e0fb76d25fa"}, - }, - { - key: []interface{}{"foo.go", "9dea2395f5403188298c1dabe8bdafe562c491e3"}, - idxValue: &indexValue{paths[0], "cf4aa3b38974fb7d81f367c0830f7d78d65ab86b"}, - }, - { - key: []interface{}{".gitignore", "32858aad3c383ed1ff0a0f9bdf231d54a00c9e88"}, - idxValue: &indexValue{paths[0], "a8d315b2b1c615d43042c3a62402b8a54288cf5c"}, - }, - { - key: []interface{}{"CHANGELOG", "d3ff53e0564a9f87d8e84b6e28e5060e517008aa"}, - idxValue: &indexValue{paths[0], "a8d315b2b1c615d43042c3a62402b8a54288cf5c"}, - }, - { - key: []interface{}{"LICENSE", "c192bd6a24ea1ab01d78686e417c8bdc7c3d197f"}, - idxValue: &indexValue{paths[0], "a8d315b2b1c615d43042c3a62402b8a54288cf5c"}, - }, - { - key: []interface{}{"binary.jpg", "d5c0f4ab811897cadf03aec358ae60d21f91c50d"}, - idxValue: &indexValue{paths[0], "a8d315b2b1c615d43042c3a62402b8a54288cf5c"}, - }, - { - key: []interface{}{"go", "a39771a7651f97faf5c72e08224d857fc35133db"}, - idxValue: &indexValue{paths[0], "a8d315b2b1c615d43042c3a62402b8a54288cf5c"}, - }, - { - key: []interface{}{"json", "5a877e6a906a2743ad6e45d99c1793642aaf8eda"}, - idxValue: &indexValue{paths[0], "a8d315b2b1c615d43042c3a62402b8a54288cf5c"}, - }, - { - key: []interface{}{"php", "586af567d0bb5e771e49bdd9434f5e0fb76d25fa"}, - idxValue: &indexValue{paths[0], "a8d315b2b1c615d43042c3a62402b8a54288cf5c"}, - }, - { - key: []interface{}{"vendor", "cf4aa3b38974fb7d81f367c0830f7d78d65ab86b"}, - idxValue: &indexValue{paths[0], "a8d315b2b1c615d43042c3a62402b8a54288cf5c"}, - }, - { - key: []interface{}{".gitignore", "32858aad3c383ed1ff0a0f9bdf231d54a00c9e88"}, - idxValue: &indexValue{paths[0], "fb72698cab7617ac416264415f13224dfd7a165e"}, - }, - { - key: []interface{}{"CHANGELOG", "d3ff53e0564a9f87d8e84b6e28e5060e517008aa"}, - idxValue: &indexValue{paths[0], "fb72698cab7617ac416264415f13224dfd7a165e"}, - }, - { - key: []interface{}{"LICENSE", "c192bd6a24ea1ab01d78686e417c8bdc7c3d197f"}, - idxValue: &indexValue{paths[0], "fb72698cab7617ac416264415f13224dfd7a165e"}, - }, - { - key: []interface{}{"binary.jpg", "d5c0f4ab811897cadf03aec358ae60d21f91c50d"}, - idxValue: &indexValue{paths[0], "fb72698cab7617ac416264415f13224dfd7a165e"}, - }, - { - key: []interface{}{"go", "a39771a7651f97faf5c72e08224d857fc35133db"}, - idxValue: &indexValue{paths[0], "fb72698cab7617ac416264415f13224dfd7a165e"}, - }, - { - key: []interface{}{"json", "5a877e6a906a2743ad6e45d99c1793642aaf8eda"}, - idxValue: &indexValue{paths[0], "fb72698cab7617ac416264415f13224dfd7a165e"}, - }, - { - key: []interface{}{"php", "586af567d0bb5e771e49bdd9434f5e0fb76d25fa"}, - idxValue: &indexValue{paths[0], "fb72698cab7617ac416264415f13224dfd7a165e"}, - }, - { - key: []interface{}{".gitignore", "32858aad3c383ed1ff0a0f9bdf231d54a00c9e88"}, - idxValue: &indexValue{paths[0], "dbd3641b371024f44d0e469a9c8f5457b0660de1"}, - }, - { - key: []interface{}{"CHANGELOG", "d3ff53e0564a9f87d8e84b6e28e5060e517008aa"}, - idxValue: &indexValue{paths[0], "dbd3641b371024f44d0e469a9c8f5457b0660de1"}, - }, - { - key: []interface{}{"LICENSE", "c192bd6a24ea1ab01d78686e417c8bdc7c3d197f"}, - idxValue: &indexValue{paths[0], "dbd3641b371024f44d0e469a9c8f5457b0660de1"}, - }, - { - key: []interface{}{"README", "7e59600739c96546163833214c36459e324bad0a"}, - idxValue: &indexValue{paths[0], "dbd3641b371024f44d0e469a9c8f5457b0660de1"}, - }, - { - key: []interface{}{"binary.jpg", "d5c0f4ab811897cadf03aec358ae60d21f91c50d"}, - idxValue: &indexValue{paths[0], "dbd3641b371024f44d0e469a9c8f5457b0660de1"}, - }, - { - key: []interface{}{"go", "a39771a7651f97faf5c72e08224d857fc35133db"}, - idxValue: &indexValue{paths[0], "dbd3641b371024f44d0e469a9c8f5457b0660de1"}, - }, - { - key: []interface{}{"json", "5a877e6a906a2743ad6e45d99c1793642aaf8eda"}, - idxValue: &indexValue{paths[0], "dbd3641b371024f44d0e469a9c8f5457b0660de1"}, - }, - { - key: []interface{}{"php", "586af567d0bb5e771e49bdd9434f5e0fb76d25fa"}, - idxValue: &indexValue{paths[0], "dbd3641b371024f44d0e469a9c8f5457b0660de1"}, - }, - { - key: []interface{}{".gitignore", "32858aad3c383ed1ff0a0f9bdf231d54a00c9e88"}, - idxValue: &indexValue{paths[0], "4d081c50e250fa32ea8b1313cf8bb7c2ad7627fd"}, - }, - { - key: []interface{}{"CHANGELOG", "d3ff53e0564a9f87d8e84b6e28e5060e517008aa"}, - idxValue: &indexValue{paths[0], "4d081c50e250fa32ea8b1313cf8bb7c2ad7627fd"}, - }, - { - key: []interface{}{"LICENSE", "c192bd6a24ea1ab01d78686e417c8bdc7c3d197f"}, - idxValue: &indexValue{paths[0], "4d081c50e250fa32ea8b1313cf8bb7c2ad7627fd"}, - }, - { - key: []interface{}{"binary.jpg", "d5c0f4ab811897cadf03aec358ae60d21f91c50d"}, - idxValue: &indexValue{paths[0], "4d081c50e250fa32ea8b1313cf8bb7c2ad7627fd"}, - }, - { - key: []interface{}{"json", "5a877e6a906a2743ad6e45d99c1793642aaf8eda"}, - idxValue: &indexValue{paths[0], "4d081c50e250fa32ea8b1313cf8bb7c2ad7627fd"}, - }, - { - key: []interface{}{".gitignore", "32858aad3c383ed1ff0a0f9bdf231d54a00c9e88"}, - idxValue: &indexValue{paths[0], "eba74343e2f15d62adedfd8c883ee0262b5c8021"}, - }, - { - key: []interface{}{"CHANGELOG", "d3ff53e0564a9f87d8e84b6e28e5060e517008aa"}, - idxValue: &indexValue{paths[0], "eba74343e2f15d62adedfd8c883ee0262b5c8021"}, - }, - { - key: []interface{}{"LICENSE", "c192bd6a24ea1ab01d78686e417c8bdc7c3d197f"}, - idxValue: &indexValue{paths[0], "eba74343e2f15d62adedfd8c883ee0262b5c8021"}, - }, - { - key: []interface{}{"binary.jpg", "d5c0f4ab811897cadf03aec358ae60d21f91c50d"}, - idxValue: &indexValue{paths[0], "eba74343e2f15d62adedfd8c883ee0262b5c8021"}, - }, - { - key: []interface{}{".gitignore", "32858aad3c383ed1ff0a0f9bdf231d54a00c9e88"}, - idxValue: &indexValue{paths[0], "c2d30fa8ef288618f65f6eed6e168e0d514886f4"}, - }, - { - key: []interface{}{"CHANGELOG", "d3ff53e0564a9f87d8e84b6e28e5060e517008aa"}, - idxValue: &indexValue{paths[0], "c2d30fa8ef288618f65f6eed6e168e0d514886f4"}, - }, - { - key: []interface{}{"LICENSE", "c192bd6a24ea1ab01d78686e417c8bdc7c3d197f"}, - idxValue: &indexValue{paths[0], "c2d30fa8ef288618f65f6eed6e168e0d514886f4"}, - }, - { - key: []interface{}{".gitignore", "32858aad3c383ed1ff0a0f9bdf231d54a00c9e88"}, - idxValue: &indexValue{paths[0], "8dcef98b1d52143e1e2dbc458ffe38f925786bf2"}, - }, - { - key: []interface{}{"LICENSE", "c192bd6a24ea1ab01d78686e417c8bdc7c3d197f"}, - idxValue: &indexValue{paths[0], "8dcef98b1d52143e1e2dbc458ffe38f925786bf2"}, - }, - { - key: []interface{}{"binary.jpg", "d5c0f4ab811897cadf03aec358ae60d21f91c50d"}, - idxValue: &indexValue{paths[0], "8dcef98b1d52143e1e2dbc458ffe38f925786bf2"}, - }, - { - key: []interface{}{".gitignore", "32858aad3c383ed1ff0a0f9bdf231d54a00c9e88"}, - idxValue: &indexValue{paths[0], "aa9b383c260e1d05fbbf6b30a02914555e20c725"}, - }, - { - key: []interface{}{"LICENSE", "c192bd6a24ea1ab01d78686e417c8bdc7c3d197f"}, - idxValue: &indexValue{paths[0], "aa9b383c260e1d05fbbf6b30a02914555e20c725"}, - }, - { - key: []interface{}{".gitmodules", "278871477afb195f908155a65b5c651f1cfd02d3"}, - idxValue: &indexValue{paths[1], "3bf5d30ad4f23cf517676fee232e3bcb8537c1d0"}, - }, - { - key: []interface{}{"README.md", "b4f017e8c030d24aef161569b9ade3e55931ba01"}, - idxValue: &indexValue{paths[1], "3bf5d30ad4f23cf517676fee232e3bcb8537c1d0"}, - }, - { - key: []interface{}{"basic", "6ecf0ef2c2dffb796033e5a02219af86ec6584e5"}, - idxValue: &indexValue{paths[1], "3bf5d30ad4f23cf517676fee232e3bcb8537c1d0"}, - }, - { - key: []interface{}{"itself", "47770b26e71b0f69c0ecd494b1066f8d1da4fc03"}, - idxValue: &indexValue{paths[1], "3bf5d30ad4f23cf517676fee232e3bcb8537c1d0"}, - }, - { - key: []interface{}{".gitmodules", "278871477afb195f908155a65b5c651f1cfd02d3"}, - idxValue: &indexValue{paths[1], "8ac3015df16d47179e903d0379b52267359c1499"}, - }, - { - key: []interface{}{"basic", "6ecf0ef2c2dffb796033e5a02219af86ec6584e5"}, - idxValue: &indexValue{paths[1], "8ac3015df16d47179e903d0379b52267359c1499"}, - }, - { - key: []interface{}{"itself", "c7431b5bc9d45fb64a87d4a895ce3d1073c898d2"}, - idxValue: &indexValue{paths[1], "8ac3015df16d47179e903d0379b52267359c1499"}, - }, - { - key: []interface{}{".gitmodules", "278871477afb195f908155a65b5c651f1cfd02d3"}, - idxValue: &indexValue{paths[1], "c4db5d7fc75aa3bef9004122d0cf2a2679935ef8"}, - }, - { - key: []interface{}{"basic", "6ecf0ef2c2dffb796033e5a02219af86ec6584e5"}, - idxValue: &indexValue{paths[1], "c4db5d7fc75aa3bef9004122d0cf2a2679935ef8"}, - }, - { - key: []interface{}{"itself", "47770b26e71b0f69c0ecd494b1066f8d1da4fc03"}, - idxValue: &indexValue{paths[1], "c4db5d7fc75aa3bef9004122d0cf2a2679935ef8"}, - }, - { - key: []interface{}{".gitmodules", "97b013ecd2cc7f572960509f659d8068798d59ca"}, - idxValue: &indexValue{paths[1], "efe525d0f1372593df812e3f6faa4e05bb91f498"}, - }, - { - key: []interface{}{"basic", "6ecf0ef2c2dffb796033e5a02219af86ec6584e5"}, - idxValue: &indexValue{paths[1], "efe525d0f1372593df812e3f6faa4e05bb91f498"}, - }, - }, - columns: []sql.Expression{ - expression.NewGetFieldWithTable(1, sql.Text, TreeEntriesTableName, "tree_hash", false), - expression.NewGetFieldWithTable(2, sql.Text, TreeEntriesTableName, "blob_hash", false), - }, - filters: []sql.Expression{ - expression.NewEquals( - expression.NewGetFieldWithTable(1, sql.Text, TreeEntriesTableName, "tree_entry_name", false), - expression.NewLiteral("LICENSE", sql.Text), - ), - }, - expectedRows: []sql.Row{ - sql.NewRow(paths[0], "LICENSE", "c192bd6a24ea1ab01d78686e417c8bdc7c3d197f", "a8d315b2b1c615d43042c3a62402b8a54288cf5c", "100644"), - sql.NewRow(paths[0], "LICENSE", "c192bd6a24ea1ab01d78686e417c8bdc7c3d197f", "fb72698cab7617ac416264415f13224dfd7a165e", "100644"), - sql.NewRow(paths[0], "LICENSE", "c192bd6a24ea1ab01d78686e417c8bdc7c3d197f", "dbd3641b371024f44d0e469a9c8f5457b0660de1", "100644"), - sql.NewRow(paths[0], "LICENSE", "c192bd6a24ea1ab01d78686e417c8bdc7c3d197f", "4d081c50e250fa32ea8b1313cf8bb7c2ad7627fd", "100644"), - sql.NewRow(paths[0], "LICENSE", "c192bd6a24ea1ab01d78686e417c8bdc7c3d197f", "eba74343e2f15d62adedfd8c883ee0262b5c8021", "100644"), - sql.NewRow(paths[0], "LICENSE", "c192bd6a24ea1ab01d78686e417c8bdc7c3d197f", "c2d30fa8ef288618f65f6eed6e168e0d514886f4", "100644"), - sql.NewRow(paths[0], "LICENSE", "c192bd6a24ea1ab01d78686e417c8bdc7c3d197f", "8dcef98b1d52143e1e2dbc458ffe38f925786bf2", "100644"), - sql.NewRow(paths[0], "LICENSE", "c192bd6a24ea1ab01d78686e417c8bdc7c3d197f", "aa9b383c260e1d05fbbf6b30a02914555e20c725", "100644"), - }, - }, - { - name: "commits indexable table", - node: newCommitsTable(), - colNames: []string{"commit_hash", "committer_name"}, - expectedKVs: []expectedKV{ - { - key: []interface{}{"e8d3ffab552895c19b9fcf7aa264d277cde33881", "Máximo Cuadros Ortiz"}, - idxValue: &indexValue{paths[0], "e8d3ffab552895c19b9fcf7aa264d277cde33881"}, - }, - { - key: []interface{}{"6ecf0ef2c2dffb796033e5a02219af86ec6584e5", "Máximo Cuadros Ortiz"}, - idxValue: &indexValue{paths[0], "6ecf0ef2c2dffb796033e5a02219af86ec6584e5"}, - }, - { - key: []interface{}{"918c48b83bd081e863dbe1b80f8998f058cd8294", "Máximo Cuadros Ortiz"}, - idxValue: &indexValue{paths[0], "918c48b83bd081e863dbe1b80f8998f058cd8294"}, - }, - { - key: []interface{}{"af2d6a6954d532f8ffb47615169c8fdf9d383a1a", "Máximo Cuadros Ortiz"}, - idxValue: &indexValue{paths[0], "af2d6a6954d532f8ffb47615169c8fdf9d383a1a"}, - }, - { - key: []interface{}{"1669dce138d9b841a518c64b10914d88f5e488ea", "Máximo Cuadros Ortiz"}, - idxValue: &indexValue{paths[0], "1669dce138d9b841a518c64b10914d88f5e488ea"}, - }, - { - key: []interface{}{"a5b8b09e2f8fcb0bb99d3ccb0958157b40890d69", "Máximo Cuadros"}, - idxValue: &indexValue{paths[0], "a5b8b09e2f8fcb0bb99d3ccb0958157b40890d69"}, - }, - { - key: []interface{}{"b8e471f58bcbca63b07bda20e428190409c2db47", "Daniel Ripolles"}, - idxValue: &indexValue{paths[0], "b8e471f58bcbca63b07bda20e428190409c2db47"}, - }, - { - key: []interface{}{"35e85108805c84807bc66a02d91535e1e24b38b9", "Máximo Cuadros Ortiz"}, - idxValue: &indexValue{paths[0], "35e85108805c84807bc66a02d91535e1e24b38b9"}, - }, - { - key: []interface{}{"b029517f6300c2da0f4b651b8642506cd6aaf45d", "Máximo Cuadros"}, - idxValue: &indexValue{paths[0], "b029517f6300c2da0f4b651b8642506cd6aaf45d"}, - }, - { - key: []interface{}{"47770b26e71b0f69c0ecd494b1066f8d1da4fc03", "Máximo Cuadros"}, - idxValue: &indexValue{paths[1], "47770b26e71b0f69c0ecd494b1066f8d1da4fc03"}, - }, - { - key: []interface{}{"b685400c1f9316f350965a5993d350bc746b0bf4", "Máximo Cuadros"}, - idxValue: &indexValue{paths[1], "b685400c1f9316f350965a5993d350bc746b0bf4"}, - }, - { - key: []interface{}{"c7431b5bc9d45fb64a87d4a895ce3d1073c898d2", "Máximo Cuadros"}, - idxValue: &indexValue{paths[1], "c7431b5bc9d45fb64a87d4a895ce3d1073c898d2"}, - }, - { - key: []interface{}{"f52d9c374365fec7f9962f11ebf517588b9e236e", "Máximo Cuadros"}, - idxValue: &indexValue{paths[1], "f52d9c374365fec7f9962f11ebf517588b9e236e"}, - }, - }, - columns: []sql.Expression{ - expression.NewGetFieldWithTable(1, sql.Text, CommitsTableName, "commit_hash", false), - expression.NewGetFieldWithTable(10, sql.JSON, CommitsTableName, "commit_parents", false), - }, - filters: []sql.Expression{ - expression.NewEquals( - expression.NewGetFieldWithTable(5, sql.Text, CommitsTableName, "committer_name", false), - expression.NewLiteral("Máximo Cuadros", sql.Text), - ), - }, - expectedRows: []sql.Row{ - sql.NewRow( - paths[0], - "a5b8b09e2f8fcb0bb99d3ccb0958157b40890d69", - "Máximo Cuadros", "mcuadros@gmail.com", - time.Date(2015, 3, 31, 13, 47, 14, 0, time.FixedZone("", int((2*time.Hour).Seconds()))), - "Máximo Cuadros", "mcuadros@gmail.com", - time.Date(2015, 3, 31, 13, 47, 14, 0, time.FixedZone("", int((2*time.Hour).Seconds()))), - "Merge pull request #1 from dripolles/feature\n\nCreating changelog", - "c2d30fa8ef288618f65f6eed6e168e0d514886f4", - []interface{}{"b029517f6300c2da0f4b651b8642506cd6aaf45d", "b8e471f58bcbca63b07bda20e428190409c2db47"}, - ), - sql.NewRow( - paths[0], - "b029517f6300c2da0f4b651b8642506cd6aaf45d", - "Máximo Cuadros", "mcuadros@gmail.com", - time.Date(2015, 3, 31, 13, 42, 21, 0, time.FixedZone("", int((2*time.Hour).Seconds()))), - "Máximo Cuadros", "mcuadros@gmail.com", - time.Date(2015, 3, 31, 13, 42, 21, 0, time.FixedZone("", int((2*time.Hour).Seconds()))), - "Initial commit\n", - "aa9b383c260e1d05fbbf6b30a02914555e20c725", - []interface{}{}, - ), - }, - }, - { - name: "references indexable table", - node: newReferencesTable(), - colNames: []string{"commit_hash"}, - expectedKVs: []expectedKV{ - { - key: []interface{}{"6ecf0ef2c2dffb796033e5a02219af86ec6584e5"}, - idxValue: &indexValue{paths[0], "HEAD"}, - }, - { - key: []interface{}{"6ecf0ef2c2dffb796033e5a02219af86ec6584e5"}, - idxValue: &indexValue{paths[0], "refs/heads/master"}, - }, - { - key: []interface{}{"e8d3ffab552895c19b9fcf7aa264d277cde33881"}, - idxValue: &indexValue{paths[0], "refs/remotes/origin/branch"}, - }, - { - key: []interface{}{"6ecf0ef2c2dffb796033e5a02219af86ec6584e5"}, - idxValue: &indexValue{paths[0], "refs/remotes/origin/master"}, - }, - { - key: []interface{}{"b685400c1f9316f350965a5993d350bc746b0bf4"}, - idxValue: &indexValue{paths[1], "HEAD"}, - }, - { - key: []interface{}{"b685400c1f9316f350965a5993d350bc746b0bf4"}, - idxValue: &indexValue{paths[1], "refs/heads/master"}, - }, - { - key: []interface{}{"b685400c1f9316f350965a5993d350bc746b0bf4"}, - idxValue: &indexValue{paths[1], "refs/remotes/origin/master"}, - }, - }, - columns: []sql.Expression{ - expression.NewGetFieldWithTable(1, sql.Text, ReferencesTableName, "ref_name", false), - expression.NewGetFieldWithTable(2, sql.Text, ReferencesTableName, "commit_hash", false), - }, - filters: []sql.Expression{ - expression.NewEquals( - expression.NewGetFieldWithTable(1, sql.Text, ReferencesTableName, "ref_name", false), - expression.NewLiteral("refs/heads/master", sql.Text), - ), - }, - expectedRows: []sql.Row{ - sql.NewRow(paths[0], "refs/heads/master", "6ecf0ef2c2dffb796033e5a02219af86ec6584e5"), - }, - }, - { - name: "remotes indexable table", - node: newRemotesTable(), - colNames: []string{"remote_push_refspec", "remote_fetch_refspec"}, - expectedKVs: []expectedKV{ - { - key: []interface{}{"+refs/heads/*:refs/remotes/origin/*", "+refs/heads/*:refs/remotes/origin/*"}, - idxValue: &indexValue{paths[0], "origin"}, - }, - { - key: []interface{}{"+refs/heads/*:refs/remotes/origin/*", "+refs/heads/*:refs/remotes/origin/*"}, - idxValue: &indexValue{paths[1], "origin"}, - }, - }, - columns: []sql.Expression{ - expression.NewGetFieldWithTable(1, sql.Text, RemotesTableName, "remote_name", false), - }, - filters: []sql.Expression{ - expression.NewEquals( - expression.NewGetFieldWithTable(4, sql.Text, RemotesTableName, "remote_push_refspec", false), - expression.NewLiteral("+refs/heads/*:refs/remotes/origin/*", sql.Text), - ), - }, - expectedRows: []sql.Row{ - sql.NewRow( - paths[0], - "origin", - "git@github.com:git-fixtures/basic.git", - "git@github.com:git-fixtures/basic.git", - "+refs/heads/*:refs/remotes/origin/*", - "+refs/heads/*:refs/remotes/origin/*", - ), - }, - }, - { - name: "repositories indexable table", - node: newRepositoriesTable(), - colNames: []string{"repository_id"}, - expectedKVs: expectedRepos, - columns: []sql.Expression{ - expression.NewGetFieldWithTable(0, sql.Text, ReferencesTableName, "repository_id", false), - }, - filters: []sql.Expression{ - expression.NewEquals( - expression.NewGetFieldWithTable(0, sql.Text, ReferencesTableName, "repository_id", false), - expression.NewLiteral(paths[0], sql.Text), - ), - }, - expectedRows: []sql.Row{ - sql.NewRow(paths[0]), - }, - }, - { - name: "ref_commits indexable table", - node: newRefCommitsTable(), - colNames: []string{"commit_hash"}, - expectedKVs: []expectedKV{ - { - key: []interface{}{"6ecf0ef2c2dffb796033e5a02219af86ec6584e5"}, - idxValue: &indexValue{paths[0], "HEAD"}, - }, - { - key: []interface{}{"918c48b83bd081e863dbe1b80f8998f058cd8294"}, - idxValue: &indexValue{paths[0], "HEAD"}, - }, - { - key: []interface{}{"af2d6a6954d532f8ffb47615169c8fdf9d383a1a"}, - idxValue: &indexValue{paths[0], "HEAD"}, - }, - { - key: []interface{}{"1669dce138d9b841a518c64b10914d88f5e488ea"}, - idxValue: &indexValue{paths[0], "HEAD"}, - }, - { - key: []interface{}{"35e85108805c84807bc66a02d91535e1e24b38b9"}, - idxValue: &indexValue{paths[0], "HEAD"}, - }, - { - key: []interface{}{"b029517f6300c2da0f4b651b8642506cd6aaf45d"}, - idxValue: &indexValue{paths[0], "HEAD"}, - }, - { - key: []interface{}{"a5b8b09e2f8fcb0bb99d3ccb0958157b40890d69"}, - idxValue: &indexValue{paths[0], "HEAD"}, - }, - { - key: []interface{}{"b8e471f58bcbca63b07bda20e428190409c2db47"}, - idxValue: &indexValue{paths[0], "HEAD"}, - }, - { - key: []interface{}{"6ecf0ef2c2dffb796033e5a02219af86ec6584e5"}, - idxValue: &indexValue{paths[0], "refs/heads/master"}, - }, - { - key: []interface{}{"918c48b83bd081e863dbe1b80f8998f058cd8294"}, - idxValue: &indexValue{paths[0], "refs/heads/master"}, - }, - { - key: []interface{}{"af2d6a6954d532f8ffb47615169c8fdf9d383a1a"}, - idxValue: &indexValue{paths[0], "refs/heads/master"}, - }, - { - key: []interface{}{"1669dce138d9b841a518c64b10914d88f5e488ea"}, - idxValue: &indexValue{paths[0], "refs/heads/master"}, - }, - { - key: []interface{}{"35e85108805c84807bc66a02d91535e1e24b38b9"}, - idxValue: &indexValue{paths[0], "refs/heads/master"}, - }, - { - key: []interface{}{"b029517f6300c2da0f4b651b8642506cd6aaf45d"}, - idxValue: &indexValue{paths[0], "refs/heads/master"}, - }, - { - key: []interface{}{"a5b8b09e2f8fcb0bb99d3ccb0958157b40890d69"}, - idxValue: &indexValue{paths[0], "refs/heads/master"}, - }, - { - key: []interface{}{"b8e471f58bcbca63b07bda20e428190409c2db47"}, - idxValue: &indexValue{paths[0], "refs/heads/master"}, - }, - { - key: []interface{}{"e8d3ffab552895c19b9fcf7aa264d277cde33881"}, - idxValue: &indexValue{paths[0], "refs/remotes/origin/branch"}, - }, - { - key: []interface{}{"918c48b83bd081e863dbe1b80f8998f058cd8294"}, - idxValue: &indexValue{paths[0], "refs/remotes/origin/branch"}, - }, - { - key: []interface{}{"af2d6a6954d532f8ffb47615169c8fdf9d383a1a"}, - idxValue: &indexValue{paths[0], "refs/remotes/origin/branch"}, - }, - { - key: []interface{}{"1669dce138d9b841a518c64b10914d88f5e488ea"}, - idxValue: &indexValue{paths[0], "refs/remotes/origin/branch"}, - }, - { - key: []interface{}{"35e85108805c84807bc66a02d91535e1e24b38b9"}, - idxValue: &indexValue{paths[0], "refs/remotes/origin/branch"}, - }, - { - key: []interface{}{"b029517f6300c2da0f4b651b8642506cd6aaf45d"}, - idxValue: &indexValue{paths[0], "refs/remotes/origin/branch"}, - }, - { - key: []interface{}{"a5b8b09e2f8fcb0bb99d3ccb0958157b40890d69"}, - idxValue: &indexValue{paths[0], "refs/remotes/origin/branch"}, - }, - { - key: []interface{}{"b8e471f58bcbca63b07bda20e428190409c2db47"}, - idxValue: &indexValue{paths[0], "refs/remotes/origin/branch"}, - }, - { - key: []interface{}{"6ecf0ef2c2dffb796033e5a02219af86ec6584e5"}, - idxValue: &indexValue{paths[0], "refs/remotes/origin/master"}, - }, - { - key: []interface{}{"918c48b83bd081e863dbe1b80f8998f058cd8294"}, - idxValue: &indexValue{paths[0], "refs/remotes/origin/master"}, - }, - { - key: []interface{}{"af2d6a6954d532f8ffb47615169c8fdf9d383a1a"}, - idxValue: &indexValue{paths[0], "refs/remotes/origin/master"}, - }, - { - key: []interface{}{"1669dce138d9b841a518c64b10914d88f5e488ea"}, - idxValue: &indexValue{paths[0], "refs/remotes/origin/master"}, - }, - { - key: []interface{}{"35e85108805c84807bc66a02d91535e1e24b38b9"}, - idxValue: &indexValue{paths[0], "refs/remotes/origin/master"}, - }, - { - key: []interface{}{"b029517f6300c2da0f4b651b8642506cd6aaf45d"}, - idxValue: &indexValue{paths[0], "refs/remotes/origin/master"}, - }, - { - key: []interface{}{"a5b8b09e2f8fcb0bb99d3ccb0958157b40890d69"}, - idxValue: &indexValue{paths[0], "refs/remotes/origin/master"}, - }, - { - key: []interface{}{"b8e471f58bcbca63b07bda20e428190409c2db47"}, - idxValue: &indexValue{paths[0], "refs/remotes/origin/master"}, - }, - { - key: []interface{}{"b685400c1f9316f350965a5993d350bc746b0bf4"}, - idxValue: &indexValue{paths[1], "HEAD"}, - }, - { - key: []interface{}{"f52d9c374365fec7f9962f11ebf517588b9e236e"}, - idxValue: &indexValue{paths[1], "HEAD"}, - }, - { - key: []interface{}{"47770b26e71b0f69c0ecd494b1066f8d1da4fc03"}, - idxValue: &indexValue{paths[1], "HEAD"}, - }, - { - key: []interface{}{"c7431b5bc9d45fb64a87d4a895ce3d1073c898d2"}, - idxValue: &indexValue{paths[1], "HEAD"}, - }, - { - key: []interface{}{"b685400c1f9316f350965a5993d350bc746b0bf4"}, - idxValue: &indexValue{paths[1], "refs/heads/master"}, - }, - { - key: []interface{}{"f52d9c374365fec7f9962f11ebf517588b9e236e"}, - idxValue: &indexValue{paths[1], "refs/heads/master"}, - }, - { - key: []interface{}{"47770b26e71b0f69c0ecd494b1066f8d1da4fc03"}, - idxValue: &indexValue{paths[1], "refs/heads/master"}, - }, - { - key: []interface{}{"c7431b5bc9d45fb64a87d4a895ce3d1073c898d2"}, - idxValue: &indexValue{paths[1], "refs/heads/master"}, - }, - { - key: []interface{}{"b685400c1f9316f350965a5993d350bc746b0bf4"}, - idxValue: &indexValue{paths[1], "refs/remotes/origin/master"}, - }, - { - key: []interface{}{"f52d9c374365fec7f9962f11ebf517588b9e236e"}, - idxValue: &indexValue{paths[1], "refs/remotes/origin/master"}, - }, - { - key: []interface{}{"47770b26e71b0f69c0ecd494b1066f8d1da4fc03"}, - idxValue: &indexValue{paths[1], "refs/remotes/origin/master"}, - }, - { - key: []interface{}{"c7431b5bc9d45fb64a87d4a895ce3d1073c898d2"}, - idxValue: &indexValue{paths[1], "refs/remotes/origin/master"}, - }, - }, - columns: []sql.Expression{ - expression.NewGetFieldWithTable(1, sql.Text, RefCommitsTableName, "commit_hash", false), - expression.NewGetFieldWithTable(2, sql.Text, RefCommitsTableName, "ref_name", false), - expression.NewGetFieldWithTable(3, sql.Int64, RefCommitsTableName, "index", false), - }, - filters: []sql.Expression{ - expression.NewEquals( - expression.NewGetFieldWithTable(2, sql.Text, RefCommitsTableName, "ref_name", false), - expression.NewLiteral("refs/heads/master", sql.Text), - ), - }, - expectedRows: []sql.Row{ - sql.NewRow(paths[0], "6ecf0ef2c2dffb796033e5a02219af86ec6584e5", "refs/heads/master", int64(0)), - sql.NewRow(paths[0], "918c48b83bd081e863dbe1b80f8998f058cd8294", "refs/heads/master", int64(1)), - sql.NewRow(paths[0], "af2d6a6954d532f8ffb47615169c8fdf9d383a1a", "refs/heads/master", int64(2)), - sql.NewRow(paths[0], "1669dce138d9b841a518c64b10914d88f5e488ea", "refs/heads/master", int64(3)), - sql.NewRow(paths[0], "35e85108805c84807bc66a02d91535e1e24b38b9", "refs/heads/master", int64(4)), - sql.NewRow(paths[0], "b029517f6300c2da0f4b651b8642506cd6aaf45d", "refs/heads/master", int64(5)), - sql.NewRow(paths[0], "a5b8b09e2f8fcb0bb99d3ccb0958157b40890d69", "refs/heads/master", int64(4)), - sql.NewRow(paths[0], "b8e471f58bcbca63b07bda20e428190409c2db47", "refs/heads/master", int64(5)), - }, - }, - { - name: "commit_trees indexable table", - node: newCommitTreesTable(), - colNames: []string{"commit_hash", "tree_hash"}, - expectedKVs: []expectedKV{ - { - key: []interface{}{"e8d3ffab552895c19b9fcf7aa264d277cde33881", "dbd3641b371024f44d0e469a9c8f5457b0660de1"}, - idxValue: &indexValue{paths[0], "e8d3ffab552895c19b9fcf7aa264d277cde33881"}, - }, - { - key: []interface{}{"e8d3ffab552895c19b9fcf7aa264d277cde33881", "a39771a7651f97faf5c72e08224d857fc35133db"}, - idxValue: &indexValue{paths[0], "e8d3ffab552895c19b9fcf7aa264d277cde33881"}, - }, - { - key: []interface{}{"e8d3ffab552895c19b9fcf7aa264d277cde33881", "5a877e6a906a2743ad6e45d99c1793642aaf8eda"}, - idxValue: &indexValue{paths[0], "e8d3ffab552895c19b9fcf7aa264d277cde33881"}, - }, - { - key: []interface{}{"e8d3ffab552895c19b9fcf7aa264d277cde33881", "586af567d0bb5e771e49bdd9434f5e0fb76d25fa"}, - idxValue: &indexValue{paths[0], "e8d3ffab552895c19b9fcf7aa264d277cde33881"}, - }, - { - key: []interface{}{"6ecf0ef2c2dffb796033e5a02219af86ec6584e5", "a8d315b2b1c615d43042c3a62402b8a54288cf5c"}, - idxValue: &indexValue{paths[0], "6ecf0ef2c2dffb796033e5a02219af86ec6584e5"}, - }, - { - key: []interface{}{"6ecf0ef2c2dffb796033e5a02219af86ec6584e5", "a39771a7651f97faf5c72e08224d857fc35133db"}, - idxValue: &indexValue{paths[0], "6ecf0ef2c2dffb796033e5a02219af86ec6584e5"}, - }, - { - key: []interface{}{"6ecf0ef2c2dffb796033e5a02219af86ec6584e5", "5a877e6a906a2743ad6e45d99c1793642aaf8eda"}, - idxValue: &indexValue{paths[0], "6ecf0ef2c2dffb796033e5a02219af86ec6584e5"}, - }, - { - key: []interface{}{"6ecf0ef2c2dffb796033e5a02219af86ec6584e5", "586af567d0bb5e771e49bdd9434f5e0fb76d25fa"}, - idxValue: &indexValue{paths[0], "6ecf0ef2c2dffb796033e5a02219af86ec6584e5"}, - }, - { - key: []interface{}{"6ecf0ef2c2dffb796033e5a02219af86ec6584e5", "cf4aa3b38974fb7d81f367c0830f7d78d65ab86b"}, - idxValue: &indexValue{paths[0], "6ecf0ef2c2dffb796033e5a02219af86ec6584e5"}, - }, - { - key: []interface{}{"918c48b83bd081e863dbe1b80f8998f058cd8294", "fb72698cab7617ac416264415f13224dfd7a165e"}, - idxValue: &indexValue{paths[0], "918c48b83bd081e863dbe1b80f8998f058cd8294"}, - }, - { - key: []interface{}{"918c48b83bd081e863dbe1b80f8998f058cd8294", "a39771a7651f97faf5c72e08224d857fc35133db"}, - idxValue: &indexValue{paths[0], "918c48b83bd081e863dbe1b80f8998f058cd8294"}, - }, - { - key: []interface{}{"918c48b83bd081e863dbe1b80f8998f058cd8294", "5a877e6a906a2743ad6e45d99c1793642aaf8eda"}, - idxValue: &indexValue{paths[0], "918c48b83bd081e863dbe1b80f8998f058cd8294"}, - }, - { - key: []interface{}{"918c48b83bd081e863dbe1b80f8998f058cd8294", "586af567d0bb5e771e49bdd9434f5e0fb76d25fa"}, - idxValue: &indexValue{paths[0], "918c48b83bd081e863dbe1b80f8998f058cd8294"}, - }, - { - key: []interface{}{"af2d6a6954d532f8ffb47615169c8fdf9d383a1a", "4d081c50e250fa32ea8b1313cf8bb7c2ad7627fd"}, - idxValue: &indexValue{paths[0], "af2d6a6954d532f8ffb47615169c8fdf9d383a1a"}, - }, - { - key: []interface{}{"af2d6a6954d532f8ffb47615169c8fdf9d383a1a", "5a877e6a906a2743ad6e45d99c1793642aaf8eda"}, - idxValue: &indexValue{paths[0], "af2d6a6954d532f8ffb47615169c8fdf9d383a1a"}, - }, - { - key: []interface{}{"1669dce138d9b841a518c64b10914d88f5e488ea", "eba74343e2f15d62adedfd8c883ee0262b5c8021"}, - idxValue: &indexValue{paths[0], "1669dce138d9b841a518c64b10914d88f5e488ea"}, - }, - { - key: []interface{}{"a5b8b09e2f8fcb0bb99d3ccb0958157b40890d69", "c2d30fa8ef288618f65f6eed6e168e0d514886f4"}, - idxValue: &indexValue{paths[0], "a5b8b09e2f8fcb0bb99d3ccb0958157b40890d69"}, - }, - { - key: []interface{}{"b8e471f58bcbca63b07bda20e428190409c2db47", "c2d30fa8ef288618f65f6eed6e168e0d514886f4"}, - idxValue: &indexValue{paths[0], "b8e471f58bcbca63b07bda20e428190409c2db47"}, - }, - { - key: []interface{}{"35e85108805c84807bc66a02d91535e1e24b38b9", "8dcef98b1d52143e1e2dbc458ffe38f925786bf2"}, - idxValue: &indexValue{paths[0], "35e85108805c84807bc66a02d91535e1e24b38b9"}, - }, - { - key: []interface{}{"b029517f6300c2da0f4b651b8642506cd6aaf45d", "aa9b383c260e1d05fbbf6b30a02914555e20c725"}, - idxValue: &indexValue{paths[0], "b029517f6300c2da0f4b651b8642506cd6aaf45d"}, - }, - { - key: []interface{}{"47770b26e71b0f69c0ecd494b1066f8d1da4fc03", "8ac3015df16d47179e903d0379b52267359c1499"}, - idxValue: &indexValue{paths[1], "47770b26e71b0f69c0ecd494b1066f8d1da4fc03"}, - }, - { - key: []interface{}{"b685400c1f9316f350965a5993d350bc746b0bf4", "3bf5d30ad4f23cf517676fee232e3bcb8537c1d0"}, - idxValue: &indexValue{paths[1], "b685400c1f9316f350965a5993d350bc746b0bf4"}, - }, - { - key: []interface{}{"c7431b5bc9d45fb64a87d4a895ce3d1073c898d2", "efe525d0f1372593df812e3f6faa4e05bb91f498"}, - idxValue: &indexValue{paths[1], "c7431b5bc9d45fb64a87d4a895ce3d1073c898d2"}, - }, - { - key: []interface{}{"f52d9c374365fec7f9962f11ebf517588b9e236e", "c4db5d7fc75aa3bef9004122d0cf2a2679935ef8"}, - idxValue: &indexValue{paths[1], "f52d9c374365fec7f9962f11ebf517588b9e236e"}, - }, - }, - columns: []sql.Expression{ - expression.NewGetFieldWithTable(1, sql.Text, CommitTreesTableName, "commit_hash", false), - expression.NewGetFieldWithTable(2, sql.Text, CommitTreesTableName, "tree_hash", false), - }, - filters: []sql.Expression{ - expression.NewEquals( - expression.NewGetFieldWithTable(1, sql.Text, CommitTreesTableName, "commit_hash", false), - expression.NewLiteral("918c48b83bd081e863dbe1b80f8998f058cd8294", sql.Text), - ), - }, - expectedRows: []sql.Row{ - sql.NewRow(paths[0], "918c48b83bd081e863dbe1b80f8998f058cd8294", "fb72698cab7617ac416264415f13224dfd7a165e"), - sql.NewRow(paths[0], "918c48b83bd081e863dbe1b80f8998f058cd8294", "a39771a7651f97faf5c72e08224d857fc35133db"), - sql.NewRow(paths[0], "918c48b83bd081e863dbe1b80f8998f058cd8294", "5a877e6a906a2743ad6e45d99c1793642aaf8eda"), - sql.NewRow(paths[0], "918c48b83bd081e863dbe1b80f8998f058cd8294", "586af567d0bb5e771e49bdd9434f5e0fb76d25fa"), - }, - }, - { - name: "commit_blobs indexable table", - node: newCommitBlobsTable(), - colNames: []string{"commit_hash", "blob_hash"}, - expectedKVs: []expectedKV{ - { - key: []interface{}{"e8d3ffab552895c19b9fcf7aa264d277cde33881", "32858aad3c383ed1ff0a0f9bdf231d54a00c9e88"}, - idxValue: &indexValue{paths[0], "e8d3ffab552895c19b9fcf7aa264d277cde33881"}, - }, - { - key: []interface{}{"e8d3ffab552895c19b9fcf7aa264d277cde33881", "d3ff53e0564a9f87d8e84b6e28e5060e517008aa"}, - idxValue: &indexValue{paths[0], "e8d3ffab552895c19b9fcf7aa264d277cde33881"}, - }, - { - key: []interface{}{"e8d3ffab552895c19b9fcf7aa264d277cde33881", "c192bd6a24ea1ab01d78686e417c8bdc7c3d197f"}, - idxValue: &indexValue{paths[0], "e8d3ffab552895c19b9fcf7aa264d277cde33881"}, - }, - { - key: []interface{}{"e8d3ffab552895c19b9fcf7aa264d277cde33881", "7e59600739c96546163833214c36459e324bad0a"}, - idxValue: &indexValue{paths[0], "e8d3ffab552895c19b9fcf7aa264d277cde33881"}, - }, - { - key: []interface{}{"e8d3ffab552895c19b9fcf7aa264d277cde33881", "d5c0f4ab811897cadf03aec358ae60d21f91c50d"}, - idxValue: &indexValue{paths[0], "e8d3ffab552895c19b9fcf7aa264d277cde33881"}, - }, - { - key: []interface{}{"e8d3ffab552895c19b9fcf7aa264d277cde33881", "880cd14280f4b9b6ed3986d6671f907d7cc2a198"}, - idxValue: &indexValue{paths[0], "e8d3ffab552895c19b9fcf7aa264d277cde33881"}, - }, - { - key: []interface{}{"e8d3ffab552895c19b9fcf7aa264d277cde33881", "49c6bb89b17060d7b4deacb7b338fcc6ea2352a9"}, - idxValue: &indexValue{paths[0], "e8d3ffab552895c19b9fcf7aa264d277cde33881"}, - }, - { - key: []interface{}{"e8d3ffab552895c19b9fcf7aa264d277cde33881", "c8f1d8c61f9da76f4cb49fd86322b6e685dba956"}, - idxValue: &indexValue{paths[0], "e8d3ffab552895c19b9fcf7aa264d277cde33881"}, - }, - { - key: []interface{}{"e8d3ffab552895c19b9fcf7aa264d277cde33881", "9a48f23120e880dfbe41f7c9b7b708e9ee62a492"}, - idxValue: &indexValue{paths[0], "e8d3ffab552895c19b9fcf7aa264d277cde33881"}, - }, - { - key: []interface{}{"6ecf0ef2c2dffb796033e5a02219af86ec6584e5", "32858aad3c383ed1ff0a0f9bdf231d54a00c9e88"}, - idxValue: &indexValue{paths[0], "6ecf0ef2c2dffb796033e5a02219af86ec6584e5"}, - }, - { - key: []interface{}{"6ecf0ef2c2dffb796033e5a02219af86ec6584e5", "d3ff53e0564a9f87d8e84b6e28e5060e517008aa"}, - idxValue: &indexValue{paths[0], "6ecf0ef2c2dffb796033e5a02219af86ec6584e5"}, - }, - { - key: []interface{}{"6ecf0ef2c2dffb796033e5a02219af86ec6584e5", "c192bd6a24ea1ab01d78686e417c8bdc7c3d197f"}, - idxValue: &indexValue{paths[0], "6ecf0ef2c2dffb796033e5a02219af86ec6584e5"}, - }, - { - key: []interface{}{"6ecf0ef2c2dffb796033e5a02219af86ec6584e5", "d5c0f4ab811897cadf03aec358ae60d21f91c50d"}, - idxValue: &indexValue{paths[0], "6ecf0ef2c2dffb796033e5a02219af86ec6584e5"}, - }, - { - key: []interface{}{"6ecf0ef2c2dffb796033e5a02219af86ec6584e5", "880cd14280f4b9b6ed3986d6671f907d7cc2a198"}, - idxValue: &indexValue{paths[0], "6ecf0ef2c2dffb796033e5a02219af86ec6584e5"}, - }, - { - key: []interface{}{"6ecf0ef2c2dffb796033e5a02219af86ec6584e5", "49c6bb89b17060d7b4deacb7b338fcc6ea2352a9"}, - idxValue: &indexValue{paths[0], "6ecf0ef2c2dffb796033e5a02219af86ec6584e5"}, - }, - { - key: []interface{}{"6ecf0ef2c2dffb796033e5a02219af86ec6584e5", "c8f1d8c61f9da76f4cb49fd86322b6e685dba956"}, - idxValue: &indexValue{paths[0], "6ecf0ef2c2dffb796033e5a02219af86ec6584e5"}, - }, - { - key: []interface{}{"6ecf0ef2c2dffb796033e5a02219af86ec6584e5", "9a48f23120e880dfbe41f7c9b7b708e9ee62a492"}, - idxValue: &indexValue{paths[0], "6ecf0ef2c2dffb796033e5a02219af86ec6584e5"}, - }, - { - key: []interface{}{"6ecf0ef2c2dffb796033e5a02219af86ec6584e5", "9dea2395f5403188298c1dabe8bdafe562c491e3"}, - idxValue: &indexValue{paths[0], "6ecf0ef2c2dffb796033e5a02219af86ec6584e5"}, - }, - { - key: []interface{}{"918c48b83bd081e863dbe1b80f8998f058cd8294", "32858aad3c383ed1ff0a0f9bdf231d54a00c9e88"}, - idxValue: &indexValue{paths[0], "918c48b83bd081e863dbe1b80f8998f058cd8294"}, - }, - { - key: []interface{}{"918c48b83bd081e863dbe1b80f8998f058cd8294", "d3ff53e0564a9f87d8e84b6e28e5060e517008aa"}, - idxValue: &indexValue{paths[0], "918c48b83bd081e863dbe1b80f8998f058cd8294"}, - }, - { - key: []interface{}{"918c48b83bd081e863dbe1b80f8998f058cd8294", "c192bd6a24ea1ab01d78686e417c8bdc7c3d197f"}, - idxValue: &indexValue{paths[0], "918c48b83bd081e863dbe1b80f8998f058cd8294"}, - }, - { - key: []interface{}{"918c48b83bd081e863dbe1b80f8998f058cd8294", "d5c0f4ab811897cadf03aec358ae60d21f91c50d"}, - idxValue: &indexValue{paths[0], "918c48b83bd081e863dbe1b80f8998f058cd8294"}, - }, - { - key: []interface{}{"918c48b83bd081e863dbe1b80f8998f058cd8294", "880cd14280f4b9b6ed3986d6671f907d7cc2a198"}, - idxValue: &indexValue{paths[0], "918c48b83bd081e863dbe1b80f8998f058cd8294"}, - }, - { - key: []interface{}{"918c48b83bd081e863dbe1b80f8998f058cd8294", "49c6bb89b17060d7b4deacb7b338fcc6ea2352a9"}, - idxValue: &indexValue{paths[0], "918c48b83bd081e863dbe1b80f8998f058cd8294"}, - }, - { - key: []interface{}{"918c48b83bd081e863dbe1b80f8998f058cd8294", "c8f1d8c61f9da76f4cb49fd86322b6e685dba956"}, - idxValue: &indexValue{paths[0], "918c48b83bd081e863dbe1b80f8998f058cd8294"}, - }, - { - key: []interface{}{"918c48b83bd081e863dbe1b80f8998f058cd8294", "9a48f23120e880dfbe41f7c9b7b708e9ee62a492"}, - idxValue: &indexValue{paths[0], "918c48b83bd081e863dbe1b80f8998f058cd8294"}, - }, - { - key: []interface{}{"af2d6a6954d532f8ffb47615169c8fdf9d383a1a", "32858aad3c383ed1ff0a0f9bdf231d54a00c9e88"}, - idxValue: &indexValue{paths[0], "af2d6a6954d532f8ffb47615169c8fdf9d383a1a"}, - }, - { - key: []interface{}{"af2d6a6954d532f8ffb47615169c8fdf9d383a1a", "d3ff53e0564a9f87d8e84b6e28e5060e517008aa"}, - idxValue: &indexValue{paths[0], "af2d6a6954d532f8ffb47615169c8fdf9d383a1a"}, - }, - { - key: []interface{}{"af2d6a6954d532f8ffb47615169c8fdf9d383a1a", "c192bd6a24ea1ab01d78686e417c8bdc7c3d197f"}, - idxValue: &indexValue{paths[0], "af2d6a6954d532f8ffb47615169c8fdf9d383a1a"}, - }, - { - key: []interface{}{"af2d6a6954d532f8ffb47615169c8fdf9d383a1a", "d5c0f4ab811897cadf03aec358ae60d21f91c50d"}, - idxValue: &indexValue{paths[0], "af2d6a6954d532f8ffb47615169c8fdf9d383a1a"}, - }, - { - key: []interface{}{"af2d6a6954d532f8ffb47615169c8fdf9d383a1a", "49c6bb89b17060d7b4deacb7b338fcc6ea2352a9"}, - idxValue: &indexValue{paths[0], "af2d6a6954d532f8ffb47615169c8fdf9d383a1a"}, - }, - { - key: []interface{}{"af2d6a6954d532f8ffb47615169c8fdf9d383a1a", "c8f1d8c61f9da76f4cb49fd86322b6e685dba956"}, - idxValue: &indexValue{paths[0], "af2d6a6954d532f8ffb47615169c8fdf9d383a1a"}, - }, - { - key: []interface{}{"1669dce138d9b841a518c64b10914d88f5e488ea", "32858aad3c383ed1ff0a0f9bdf231d54a00c9e88"}, - idxValue: &indexValue{paths[0], "1669dce138d9b841a518c64b10914d88f5e488ea"}, - }, - { - key: []interface{}{"1669dce138d9b841a518c64b10914d88f5e488ea", "d3ff53e0564a9f87d8e84b6e28e5060e517008aa"}, - idxValue: &indexValue{paths[0], "1669dce138d9b841a518c64b10914d88f5e488ea"}, - }, - { - key: []interface{}{"1669dce138d9b841a518c64b10914d88f5e488ea", "c192bd6a24ea1ab01d78686e417c8bdc7c3d197f"}, - idxValue: &indexValue{paths[0], "1669dce138d9b841a518c64b10914d88f5e488ea"}, - }, - { - key: []interface{}{"1669dce138d9b841a518c64b10914d88f5e488ea", "d5c0f4ab811897cadf03aec358ae60d21f91c50d"}, - idxValue: &indexValue{paths[0], "1669dce138d9b841a518c64b10914d88f5e488ea"}, - }, - { - key: []interface{}{"a5b8b09e2f8fcb0bb99d3ccb0958157b40890d69", "32858aad3c383ed1ff0a0f9bdf231d54a00c9e88"}, - idxValue: &indexValue{paths[0], "a5b8b09e2f8fcb0bb99d3ccb0958157b40890d69"}, - }, - { - key: []interface{}{"a5b8b09e2f8fcb0bb99d3ccb0958157b40890d69", "d3ff53e0564a9f87d8e84b6e28e5060e517008aa"}, - idxValue: &indexValue{paths[0], "a5b8b09e2f8fcb0bb99d3ccb0958157b40890d69"}, - }, - { - key: []interface{}{"a5b8b09e2f8fcb0bb99d3ccb0958157b40890d69", "c192bd6a24ea1ab01d78686e417c8bdc7c3d197f"}, - idxValue: &indexValue{paths[0], "a5b8b09e2f8fcb0bb99d3ccb0958157b40890d69"}, - }, - { - key: []interface{}{"b8e471f58bcbca63b07bda20e428190409c2db47", "32858aad3c383ed1ff0a0f9bdf231d54a00c9e88"}, - idxValue: &indexValue{paths[0], "b8e471f58bcbca63b07bda20e428190409c2db47"}, - }, - { - key: []interface{}{"b8e471f58bcbca63b07bda20e428190409c2db47", "d3ff53e0564a9f87d8e84b6e28e5060e517008aa"}, - idxValue: &indexValue{paths[0], "b8e471f58bcbca63b07bda20e428190409c2db47"}, - }, - { - key: []interface{}{"b8e471f58bcbca63b07bda20e428190409c2db47", "c192bd6a24ea1ab01d78686e417c8bdc7c3d197f"}, - idxValue: &indexValue{paths[0], "b8e471f58bcbca63b07bda20e428190409c2db47"}, - }, - { - key: []interface{}{"35e85108805c84807bc66a02d91535e1e24b38b9", "32858aad3c383ed1ff0a0f9bdf231d54a00c9e88"}, - idxValue: &indexValue{paths[0], "35e85108805c84807bc66a02d91535e1e24b38b9"}, - }, - { - key: []interface{}{"35e85108805c84807bc66a02d91535e1e24b38b9", "c192bd6a24ea1ab01d78686e417c8bdc7c3d197f"}, - idxValue: &indexValue{paths[0], "35e85108805c84807bc66a02d91535e1e24b38b9"}, - }, - { - key: []interface{}{"35e85108805c84807bc66a02d91535e1e24b38b9", "d5c0f4ab811897cadf03aec358ae60d21f91c50d"}, - idxValue: &indexValue{paths[0], "35e85108805c84807bc66a02d91535e1e24b38b9"}, - }, - { - key: []interface{}{"b029517f6300c2da0f4b651b8642506cd6aaf45d", "32858aad3c383ed1ff0a0f9bdf231d54a00c9e88"}, - idxValue: &indexValue{paths[0], "b029517f6300c2da0f4b651b8642506cd6aaf45d"}, - }, - { - key: []interface{}{"b029517f6300c2da0f4b651b8642506cd6aaf45d", "c192bd6a24ea1ab01d78686e417c8bdc7c3d197f"}, - idxValue: &indexValue{paths[0], "b029517f6300c2da0f4b651b8642506cd6aaf45d"}, - }, - { - key: []interface{}{"47770b26e71b0f69c0ecd494b1066f8d1da4fc03", "278871477afb195f908155a65b5c651f1cfd02d3"}, - idxValue: &indexValue{paths[1], "47770b26e71b0f69c0ecd494b1066f8d1da4fc03"}, - }, - { - key: []interface{}{"b685400c1f9316f350965a5993d350bc746b0bf4", "278871477afb195f908155a65b5c651f1cfd02d3"}, - idxValue: &indexValue{paths[1], "b685400c1f9316f350965a5993d350bc746b0bf4"}, - }, - { - key: []interface{}{"b685400c1f9316f350965a5993d350bc746b0bf4", "b4f017e8c030d24aef161569b9ade3e55931ba01"}, - idxValue: &indexValue{paths[1], "b685400c1f9316f350965a5993d350bc746b0bf4"}, - }, - { - key: []interface{}{"c7431b5bc9d45fb64a87d4a895ce3d1073c898d2", "97b013ecd2cc7f572960509f659d8068798d59ca"}, - idxValue: &indexValue{paths[1], "c7431b5bc9d45fb64a87d4a895ce3d1073c898d2"}, - }, - { - key: []interface{}{"f52d9c374365fec7f9962f11ebf517588b9e236e", "278871477afb195f908155a65b5c651f1cfd02d3"}, - idxValue: &indexValue{paths[1], "f52d9c374365fec7f9962f11ebf517588b9e236e"}, - }, - }, - columns: []sql.Expression{ - expression.NewGetFieldWithTable(1, sql.Text, CommitBlobsTableName, "commit_hash", false), - expression.NewGetFieldWithTable(2, sql.Text, CommitBlobsTableName, "blob_hash", false), - }, - filters: []sql.Expression{ - expression.NewEquals( - expression.NewGetFieldWithTable(1, sql.Text, CommitBlobsTableName, "commit_hash", false), - expression.NewLiteral("1669dce138d9b841a518c64b10914d88f5e488ea", sql.Text), - ), - }, - expectedRows: []sql.Row{ - sql.NewRow(paths[0], "1669dce138d9b841a518c64b10914d88f5e488ea", "32858aad3c383ed1ff0a0f9bdf231d54a00c9e88"), - sql.NewRow(paths[0], "1669dce138d9b841a518c64b10914d88f5e488ea", "d3ff53e0564a9f87d8e84b6e28e5060e517008aa"), - sql.NewRow(paths[0], "1669dce138d9b841a518c64b10914d88f5e488ea", "c192bd6a24ea1ab01d78686e417c8bdc7c3d197f"), - sql.NewRow(paths[0], "1669dce138d9b841a518c64b10914d88f5e488ea", "d5c0f4ab811897cadf03aec358ae60d21f91c50d"), - }, - }, - } + v := i.values[i.pos] + i.pos++ + return v, nil +} - for _, test := range tests { - t.Run(test.name, func(t *testing.T) { - require := require.New(t) +func (i *indexValueIter) Close() error { return nil } - idxValues := testIndexKeyValue(t, ctx, test) - require.True(len(idxValues) > 0) +type keyValue struct { + key []byte + values []interface{} +} - // iter only on values from one path to check that - // just that repository is used to produce rows. - filteredValues := []*indexValue{} - for _, value := range idxValues { - if value.ID == paths[0] { - filteredValues = append(filteredValues, value) - } - } +func assertIndexKeyValueIter(t *testing.T, iter sql.IndexKeyValueIter, expected []keyValue) { + t.Helper() + var result []keyValue + for { + values, key, err := iter.Next() + if err == io.EOF { + break + } + require.NoError(t, err) + + result = append(result, keyValue{key, values}) + } - idxValIter, err := newTestIndexValueIter(filteredValues) - require.NoError(err) - require.NotNil(idxValIter) + require.Equal(t, len(expected), len(result), "size does not match") - testWithProjectFiltersAndIndex(t, ctx, test, idxValIter) - }) + for i, r := range result { + require.Equal(t, expected[i], r, "at position %d", i) } } -func testWithProjectFiltersAndIndex(t *testing.T, ctx *sql.Context, test *indexTest, idxValIter sql.IndexValueIter) { - require := require.New(t) - rowIter, err := test.node.WithProjectFiltersAndIndex( - ctx, - test.columns, - test.filters, - idxValIter, - ) - require.NoError(err) +func tableIndexValues(t *testing.T, table Indexable, ctx *sql.Context) sql.IndexValueIter { + kvIter, err := table.IndexKeyValueIter(ctx, nil) + require.NoError(t, err) - for _, expected := range test.expectedRows { - row, err := rowIter.Next() - require.NoError(err) - require.Exactly(expected, row) + var values [][]byte + for { + _, val, err := kvIter.Next() + if err == io.EOF { + break + } + require.NoError(t, err) + values = append(values, val) } - _, err = rowIter.Next() - require.EqualError(err, io.EOF.Error()) + return newIndexValueIter(values...) } -func testIndexKeyValue(t *testing.T, ctx *sql.Context, test *indexTest) []*indexValue { +func testTableIndex( + t *testing.T, + table Indexable, + filters []sql.Expression, +) { + t.Helper() require := require.New(t) - kvIter, err := test.node.IndexKeyValueIter(ctx, test.colNames) - require.NoError(err) - - idxValues := []*indexValue{} - for _, expected := range test.expectedKVs { - k, v, err := kvIter.Next() - require.NoError(err) - - require.Len(k, len(test.colNames)) - - idxValue, err := unmarshalIndexValue(v) - require.NoError(err) - - idxValues = append(idxValues, idxValue) - - require.Exactly(expected.key, k) - require.Equal(expected.idxValue, idxValue) - } + ctx, _, cleanup := setup(t) + defer cleanup() - _, _, err = kvIter.Next() - require.EqualError(err, io.EOF.Error()) + i, err := table.WithProjectAndFilters(ctx, nil, nil) + require.NoError(err) + expected, err := sql.RowIterToRows(i) + require.NoError(err) - return idxValues -} + index := tableIndexValues(t, table, ctx) + iter, err := table.WithProjectFiltersAndIndex(ctx, nil, nil, index) + require.NoError(err) -type testIndexValueIter struct { - values [][]byte - pos int -} + rows, err := sql.RowIterToRows(iter) + require.NoError(err) -var _ sql.IndexValueIter = (*testIndexValueIter)(nil) + require.ElementsMatch(expected, rows) -func newTestIndexValueIter(idxValues []*indexValue) (*testIndexValueIter, error) { - values := [][]byte{} - for _, v := range idxValues { - raw, err := marshalIndexValue(v) - if err != nil { - return nil, err - } - - values = append(values, raw) - } + iter, err = table.WithProjectAndFilters(ctx, nil, filters) + require.NoError(err) - return &testIndexValueIter{values: values}, nil -} + expected, err = sql.RowIterToRows(iter) + require.NoError(err) -func (i *testIndexValueIter) Next() ([]byte, error) { - if i.pos >= len(i.values) { - return nil, io.EOF - } + index = tableIndexValues(t, table, ctx) + iter, err = table.WithProjectFiltersAndIndex(ctx, nil, filters, index) + require.NoError(err) - defer func() { i.pos++ }() + rows, err = sql.RowIterToRows(iter) + require.NoError(err) - return i.values[i.pos], nil + require.ElementsMatch(expected, rows) } - -func (i *testIndexValueIter) Close() error { return nil } diff --git a/internal/rule/squashjoins.go b/internal/rule/squashjoins.go index cc7384038..7fedfb2bb 100644 --- a/internal/rule/squashjoins.go +++ b/internal/rule/squashjoins.go @@ -652,7 +652,7 @@ func isJoinLeafSquashable(node sql.Node) bool { plan.Inspect(node, func(node sql.Node) bool { switch node := node.(type) { case *plan.PushdownProjectionAndFiltersTable: - _, ok := node.PushdownProjectionAndFiltersTable.(gitbase.Table) + _, ok := node.PushdownProjectionAndFiltersTable.(gitbase.Squashable) if !ok { hasUnsquashableNodes = true return false diff --git a/packfiles.go b/packfiles.go new file mode 100644 index 000000000..34dcd38d4 --- /dev/null +++ b/packfiles.go @@ -0,0 +1,390 @@ +package gitbase + +import ( + "fmt" + "io" + "io/ioutil" + "os" + "path/filepath" + + errors "gopkg.in/src-d/go-errors.v1" + "gopkg.in/src-d/go-git.v4/plumbing/object" + "gopkg.in/src-d/go-git.v4/plumbing/storer" + "gopkg.in/src-d/go-git.v4/storage/filesystem" + "gopkg.in/src-d/go-git.v4/storage/filesystem/dotgit" + + "gopkg.in/src-d/go-billy-siva.v4" + billy "gopkg.in/src-d/go-billy.v4" + "gopkg.in/src-d/go-billy.v4/osfs" + "gopkg.in/src-d/go-git.v4/plumbing" + "gopkg.in/src-d/go-git.v4/plumbing/format/idxfile" + "gopkg.in/src-d/go-git.v4/plumbing/format/packfile" +) + +type packRepository struct { + packs map[plumbing.Hash]packfile.Index +} + +func repositoryPackfiles(path string, kind repoKind) (*dotgit.DotGit, []plumbing.Hash, error) { + fs, err := repoFilesystem(path, kind) + if err != nil { + return nil, nil, err + } + + fs, err = findDotGit(fs) + if err != nil { + return nil, nil, err + } + + dot := dotgit.New(fs) + packfiles, err := dot.ObjectPacks() + if err != nil { + return nil, nil, err + } + + return dot, packfiles, nil +} + +type packfileIndex struct { + packfile plumbing.Hash + idx *packfile.Index +} + +type repositoryIndex []*packfileIndex + +func newRepositoryIndex(path string, kind repoKind) (*repositoryIndex, error) { + dot, packfiles, err := repositoryPackfiles(path, kind) + if err != nil { + return nil, err + } + + var result repositoryIndex + for _, p := range packfiles { + idx, err := openPackfileIndex(dot, p) + if err != nil { + return nil, err + } + + result = append(result, &packfileIndex{p, idx}) + } + + return &result, nil +} + +func openPackfileIndex( + dotGit *dotgit.DotGit, + hash plumbing.Hash, +) (*packfile.Index, error) { + f, err := dotGit.ObjectPackIdx(hash) + if err != nil { + return nil, err + } + defer f.Close() + + idx := idxfile.NewIdxfile() + if err := idxfile.NewDecoder(f).Decode(idx); err != nil { + return nil, err + } + + return packfile.NewIndexFromIdxFile(idx), nil +} + +var errHashNotInIndex = errors.NewKind("object hash %s is not in repository") + +func (i repositoryIndex) find(hash plumbing.Hash) (int64, plumbing.Hash, error) { + for _, idx := range i { + if entry, ok := idx.idx.LookupHash(hash); ok { + return int64(entry.Offset), idx.packfile, nil + } + } + return 0, plumbing.NewHash(""), errHashNotInIndex.New(hash) +} + +func repoFilesystem(path string, kind repoKind) (billy.Filesystem, error) { + if kind == sivaRepo { + localfs := osfs.New(filepath.Dir(path)) + + tmpDir, err := ioutil.TempDir(os.TempDir(), "gitbase-siva") + if err != nil { + return nil, err + } + + tmpfs := osfs.New(tmpDir) + + return sivafs.NewFilesystem(localfs, filepath.Base(path), tmpfs) + } + + return osfs.New(path), nil +} + +func findDotGit(fs billy.Filesystem) (billy.Filesystem, error) { + fi, err := fs.Stat(".git") + if err != nil && !os.IsNotExist(err) { + return nil, err + } + + if fi != nil && fi.IsDir() { + return fs.Chroot(".git") + } + + return fs, nil +} + +type objectIter struct { + packs *packIter + packObjects *packObjectIter +} + +func newObjectIter( + pool *RepositoryPool, + typ plumbing.ObjectType, +) *objectIter { + return &objectIter{packs: newPackIter(pool, typ)} +} + +type encodedObject struct { + object.Object + RepositoryID string + Packfile plumbing.Hash + Offset uint64 +} + +func (i *objectIter) Next() (*encodedObject, error) { + for { + if i.packObjects == nil { + var err error + i.packObjects, err = i.packs.Next() + if err != nil { + return nil, err + } + } + + obj, offset, err := i.packObjects.Next() + if err != nil { + if err == io.EOF { + if err := i.packObjects.Close(); err != nil { + return nil, err + } + + i.packObjects = nil + continue + } + return nil, err + } + + return &encodedObject{ + Object: obj, + Offset: offset, + RepositoryID: i.packs.repo.path, + Packfile: i.packs.packfiles[i.packs.packpos-1], + }, nil + } +} + +func (i *objectIter) Close() error { + if i.packObjects != nil { + return i.packObjects.Close() + } + return nil +} + +type packIter struct { + typ plumbing.ObjectType + pool *RepositoryPool + pos int + + repo *repository + + storage storer.EncodedObjectStorer + dotGit *dotgit.DotGit + packfiles []plumbing.Hash + packpos int +} + +func newPackIter(pool *RepositoryPool, typ plumbing.ObjectType) *packIter { + return &packIter{pool: pool, typ: typ} +} + +func (i *packIter) Next() (*packObjectIter, error) { + for { + if i.repo == nil { + if i.pos >= len(i.pool.repositories) { + return nil, io.EOF + } + + repo := i.pool.repositories[i.pool.idOrder[i.pos]] + i.repo = &repo + i.pos++ + } + + if len(i.packfiles) == 0 { + var err error + i.dotGit, i.packfiles, err = repositoryPackfiles(i.repo.path, i.repo.kind) + if err != nil { + return nil, err + } + i.packpos = 0 + + storage, err := filesystem.NewObjectStorage(i.dotGit) + if err != nil { + return nil, err + } + i.storage = &storage + } + + if i.packpos >= len(i.packfiles) { + i.packfiles = nil + i.repo = nil + continue + } + + pf := i.packfiles[i.packpos] + i.packpos++ + + return newPackObjectIter(i.repo.path, i.dotGit, pf, i.storage, i.typ) + } +} + +type packObjectIter struct { + hash plumbing.Hash + close func() error + idx *idxfile.Idxfile + dec *packfile.Decoder + pos int + typ plumbing.ObjectType + storage storer.EncodedObjectStorer +} + +func newPackObjectIter( + path string, + dotGit *dotgit.DotGit, + hash plumbing.Hash, + storage storer.EncodedObjectStorer, + typ plumbing.ObjectType, +) (*packObjectIter, error) { + packf, err := dotGit.ObjectPack(hash) + if err != nil { + return nil, err + } + + idxf, err := dotGit.ObjectPackIdx(hash) + if err != nil { + return nil, err + } + defer idxf.Close() + + i := idxfile.NewIdxfile() + if err := idxfile.NewDecoder(idxf).Decode(i); err != nil { + return nil, err + } + + decoder, err := packfile.NewDecoder(packfile.NewScanner(packf), storage) + if err != nil { + return nil, err + } + + return &packObjectIter{ + hash: hash, + idx: i, + dec: decoder, + typ: typ, + storage: storage, + close: func() error { return decoder.Close() }, + }, nil +} + +func (i *packObjectIter) Next() (object.Object, uint64, error) { + for { + if i.close != nil { + if err := i.close(); err != nil { + return nil, 0, err + } + } + + if i.pos >= len(i.idx.Entries) { + return nil, 0, io.EOF + } + + offset := i.idx.Entries[i.pos].Offset + i.pos++ + obj, err := i.dec.DecodeObjectAt(int64(offset)) + if err != nil { + return nil, 0, err + } + + if obj.Type() != i.typ { + continue + } + + decodedObj, err := object.DecodeObject(i.storage, obj) + if err != nil { + return nil, 0, err + } + + return decodedObj, offset, nil + } +} + +func (i *packObjectIter) Close() error { return i.close() } + +type objectDecoder struct { + repo string + packfile plumbing.Hash + decoder *packfile.Decoder + storage storer.EncodedObjectStorer +} + +func newObjectDecoder( + repo repository, + hash plumbing.Hash, +) (*objectDecoder, error) { + fs, err := repoFilesystem(repo.path, repo.kind) + if err != nil { + return nil, err + } + + fs, err = findDotGit(fs) + if err != nil { + return nil, err + } + + packfilePath := fs.Join("objects", "pack", fmt.Sprintf("pack-%s.pack", hash)) + packf, err := fs.Open(packfilePath) + if err != nil { + return nil, err + } + + storage, err := filesystem.NewStorage(fs) + if err != nil { + _ = packf.Close() + return nil, err + } + + decoder, err := packfile.NewDecoder(packfile.NewScanner(packf), storage) + if err != nil { + _ = packf.Close() + return nil, err + } + + return &objectDecoder{ + repo: repo.path, + packfile: hash, + decoder: decoder, + storage: storage, + }, nil +} + +func (d *objectDecoder) equals(repo string, packfile plumbing.Hash) bool { + return d.repo == repo && d.packfile == packfile +} + +func (d *objectDecoder) get(offset int64) (object.Object, error) { + encodedObj, err := d.decoder.DecodeObjectAt(offset) + if err != nil { + return nil, err + } + + return object.DecodeObject(d.storage, encodedObj) +} + +func (d *objectDecoder) Close() error { return d.decoder.Close() } diff --git a/packfiles_test.go b/packfiles_test.go new file mode 100644 index 000000000..0e8aeafa9 --- /dev/null +++ b/packfiles_test.go @@ -0,0 +1,57 @@ +package gitbase + +import ( + "fmt" + "path/filepath" + "testing" + + "github.com/stretchr/testify/require" + "gopkg.in/src-d/go-git.v4/plumbing" +) + +var testSivaFilePath = filepath.Join("_testdata", "fff7062de8474d10a67d417ccea87ba6f58ca81d.siva") + +func TestRepositoryPackfiles(t *testing.T) { + require := require.New(t) + + fs, packfiles, err := repositoryPackfiles(testSivaFilePath, sivaRepo) + + fmt.Println(packfiles[0].String()) + require.NoError(err) + require.Equal([]plumbing.Hash{ + plumbing.NewHash("5d2ce6a45cb07803f9b0c8040e730f5715fc7144"), + plumbing.NewHash("433e5205f6e26099e7d34ba5e5306f69e4cef12b"), + }, packfiles) + require.NotNil(fs) +} + +func TestRepositoryIndex(t *testing.T) { + idx, err := newRepositoryIndex(testSivaFilePath, sivaRepo) + require.NoError(t, err) + + testCases := []struct { + hash string + offset int64 + packfile string + }{ + { + "52c853392c25d3a670446641f4b44b22770b3bbe", + 3046713, + "5d2ce6a45cb07803f9b0c8040e730f5715fc7144", + }, + { + "aa7ef7dafd292737ed493b7d74c0abfa761344f4", + 3046902, + "5d2ce6a45cb07803f9b0c8040e730f5715fc7144", + }, + } + + for _, tt := range testCases { + t.Run(tt.hash, func(t *testing.T) { + offset, packfile, err := idx.find(plumbing.NewHash(tt.hash)) + require.NoError(t, err) + require.Equal(t, tt.offset, offset) + require.Equal(t, tt.packfile, packfile.String()) + }) + } +} diff --git a/ref_commits.go b/ref_commits.go index 6593e4d91..58f0b19e2 100644 --- a/ref_commits.go +++ b/ref_commits.go @@ -1,6 +1,7 @@ package gitbase import ( + "fmt" "io" "strings" @@ -10,6 +11,8 @@ import ( "gopkg.in/src-d/go-git.v4/plumbing/object" "gopkg.in/src-d/go-git.v4/plumbing/storer" "gopkg.in/src-d/go-mysql-server.v0/sql" + "gopkg.in/src-d/go-mysql-server.v0/sql/expression" + "gopkg.in/src-d/go-mysql-server.v0/sql/plan" ) type refCommitsTable struct{} @@ -25,12 +28,12 @@ var RefCommitsSchema = sql.Schema{ var _ sql.PushdownProjectionAndFiltersTable = (*refCommitsTable)(nil) func newRefCommitsTable() Indexable { - return &indexableTable{ - PushdownTable: new(refCommitsTable), - buildIterWithSelectors: refCommitsIterBuilder, - } + return new(refCommitsTable) } +var _ Squashable = (*refCommitsTable)(nil) + +func (refCommitsTable) isSquashable() {} func (refCommitsTable) isGitbaseTable() {} func (refCommitsTable) String() string { @@ -90,6 +93,46 @@ func (t *refCommitsTable) WithProjectAndFilters( return sql.NewSpanIter(span, iter), nil } +// IndexKeyValueIter implements the sql.Indexable interface. +func (*refCommitsTable) IndexKeyValueIter( + ctx *sql.Context, + colNames []string, +) (sql.IndexKeyValueIter, error) { + s, ok := ctx.Session.(*Session) + if !ok || s == nil { + return nil, ErrInvalidGitbaseSession.New(ctx.Session) + } + + iter, err := NewRowRepoIter(ctx, &refCommitsIter{ctx: ctx}) + if err != nil { + return nil, err + } + + return &rowKeyValueIter{iter, colNames, RefCommitsSchema}, nil +} + +// WithProjectFiltersAndIndex implements sql.Indexable interface. +func (*refCommitsTable) WithProjectFiltersAndIndex( + ctx *sql.Context, + columns, filters []sql.Expression, + index sql.IndexValueIter, +) (sql.RowIter, error) { + span, ctx := ctx.Span("gitbase.RefCommitsTable.WithProjectFiltersAndIndex") + s, ok := ctx.Session.(*Session) + if !ok || s == nil { + span.Finish() + return nil, ErrInvalidGitbaseSession.New(ctx.Session) + } + + var iter sql.RowIter = &rowIndexIter{index} + + if len(filters) > 0 { + iter = plan.NewFilterIter(ctx, expression.JoinAnd(filters...), iter) + } + + return sql.NewSpanIter(span, iter), nil +} + func refCommitsIterBuilder(ctx *sql.Context, selectors selectors, columns []sql.Expression) (RowRepoIter, error) { repos, err := selectors.textValues("repository_id") if err != nil { @@ -105,6 +148,8 @@ func refCommitsIterBuilder(ctx *sql.Context, selectors selectors, columns []sql. names[i] = strings.ToLower(names[i]) } + fmt.Println("CTX from builder", ctx) + return &refCommitsIter{ ctx: ctx, refNames: names, diff --git a/ref_commits_test.go b/ref_commits_test.go index 3b1c41803..e1cad700d 100644 --- a/ref_commits_test.go +++ b/ref_commits_test.go @@ -130,3 +130,39 @@ func TestRefCommitsPushdown(t *testing.T) { }) } } + +func TestRefCommitsIndexKeyValueIter(t *testing.T) { + require := require.New(t) + ctx, _, cleanup := setup(t) + defer cleanup() + + table := new(refCommitsTable) + iter, err := table.IndexKeyValueIter(ctx, []string{"ref_name", "commit_hash"}) + require.NoError(err) + + i, err := table.RowIter(ctx) + require.NoError(err) + rows, err := sql.RowIterToRows(i) + require.NoError(err) + + var expected []keyValue + for _, row := range rows { + var kv keyValue + kv.key = assertEncodeKey(t, row) + kv.values = append(kv.values, row[2], row[1]) + expected = append(expected, kv) + } + + assertIndexKeyValueIter(t, iter, expected) +} + +func TestRefCommitsIndex(t *testing.T) { + testTableIndex( + t, + new(refCommitsTable), + []sql.Expression{expression.NewEquals( + expression.NewGetField(2, sql.Text, "ref_name", false), + expression.NewLiteral("HEAD", sql.Text), + )}, + ) +} diff --git a/references.go b/references.go index 518a3b955..c80295b48 100644 --- a/references.go +++ b/references.go @@ -5,6 +5,8 @@ import ( "github.com/sirupsen/logrus" "gopkg.in/src-d/go-mysql-server.v0/sql" + "gopkg.in/src-d/go-mysql-server.v0/sql/expression" + "gopkg.in/src-d/go-mysql-server.v0/sql/plan" "gopkg.in/src-d/go-git.v4/plumbing" "gopkg.in/src-d/go-git.v4/plumbing/storer" @@ -22,14 +24,13 @@ var RefsSchema = sql.Schema{ var _ sql.PushdownProjectionAndFiltersTable = (*referencesTable)(nil) func newReferencesTable() Indexable { - return &indexableTable{ - PushdownTable: new(referencesTable), - buildIterWithSelectors: referencesIterBuilder, - } + return new(referencesTable) } var _ Table = (*referencesTable)(nil) +var _ Squashable = (*referencesTable)(nil) +func (referencesTable) isSquashable() {} func (referencesTable) isGitbaseTable() {} func (r referencesTable) String() string { @@ -99,6 +100,46 @@ func (r *referencesTable) WithProjectAndFilters( return sql.NewSpanIter(span, iter), nil } +// IndexKeyValueIter implements the sql.Indexable interface. +func (*referencesTable) IndexKeyValueIter( + ctx *sql.Context, + colNames []string, +) (sql.IndexKeyValueIter, error) { + s, ok := ctx.Session.(*Session) + if !ok || s == nil { + return nil, ErrInvalidGitbaseSession.New(ctx.Session) + } + + iter, err := NewRowRepoIter(ctx, new(referenceIter)) + if err != nil { + return nil, err + } + + return &rowKeyValueIter{iter, colNames, RefsSchema}, nil +} + +// WithProjectFiltersAndIndex implements sql.Indexable interface. +func (*referencesTable) WithProjectFiltersAndIndex( + ctx *sql.Context, + columns, filters []sql.Expression, + index sql.IndexValueIter, +) (sql.RowIter, error) { + span, ctx := ctx.Span("gitbase.ReferencesTable.WithProjectFiltersAndIndex") + s, ok := ctx.Session.(*Session) + if !ok || s == nil { + span.Finish() + return nil, ErrInvalidGitbaseSession.New(ctx.Session) + } + + var iter sql.RowIter = &rowIndexIter{index} + + if len(filters) > 0 { + iter = plan.NewFilterIter(ctx, expression.JoinAnd(filters...), iter) + } + + return sql.NewSpanIter(span, iter), nil +} + func referencesIterBuilder(_ *sql.Context, selectors selectors, _ []sql.Expression) (RowRepoIter, error) { if len(selectors["commit_hash"]) == 0 && len(selectors["ref_name"]) == 0 { return new(referenceIter), nil @@ -125,7 +166,6 @@ type referenceIter struct { head *plumbing.Reference repositoryID string iter storer.ReferenceIter - lastRef string } func (i *referenceIter) NewIterator(repo *Repository) (RowRepoIter, error) { @@ -150,16 +190,11 @@ func (i *referenceIter) NewIterator(repo *Repository) (RowRepoIter, error) { }, nil } -func (i *referenceIter) Repository() string { return i.repositoryID } - -func (i *referenceIter) LastObject() string { return i.lastRef } - func (i *referenceIter) Next() (sql.Row, error) { for { if i.head != nil { o := i.head i.head = nil - i.lastRef = "HEAD" return sql.NewRow( i.repositoryID, "HEAD", @@ -180,7 +215,6 @@ func (i *referenceIter) Next() (sql.Row, error) { continue } - i.lastRef = o.Name().String() return referenceToRow(i.repositoryID, o), nil } } @@ -194,12 +228,11 @@ func (i *referenceIter) Close() error { } type filteredReferencesIter struct { - head *plumbing.Reference - hashes []plumbing.Hash - names []string - repoID string - iter storer.ReferenceIter - lastRef string + head *plumbing.Reference + hashes []plumbing.Hash + names []string + repoID string + iter storer.ReferenceIter } func (i *filteredReferencesIter) NewIterator(repo *Repository) (RowRepoIter, error) { @@ -226,10 +259,6 @@ func (i *filteredReferencesIter) NewIterator(repo *Repository) (RowRepoIter, err }, nil } -func (i *filteredReferencesIter) Repository() string { return i.repoID } - -func (i *filteredReferencesIter) LastObject() string { return i.lastRef } - func (i *filteredReferencesIter) Next() (sql.Row, error) { for { if i.head != nil { @@ -272,7 +301,6 @@ func (i *filteredReferencesIter) Next() (sql.Row, error) { continue } - i.lastRef = o.Name().String() return referenceToRow(i.repoID, o), nil } } diff --git a/references_test.go b/references_test.go index 86c5db8bd..21427d445 100644 --- a/references_test.go +++ b/references_test.go @@ -110,3 +110,38 @@ func TestReferencesPushdown(t *testing.T) { require.NoError(err) require.Len(rows, 0) } + +func TestReferencesIndexKeyValueIter(t *testing.T) { + require := require.New(t) + ctx, _, cleanup := setup(t) + defer cleanup() + + iter, err := new(referencesTable).IndexKeyValueIter(ctx, []string{"ref_name"}) + require.NoError(err) + + i, err := new(referencesTable).RowIter(ctx) + require.NoError(err) + rows, err := sql.RowIterToRows(i) + require.NoError(err) + + var expected []keyValue + for _, row := range rows { + var kv keyValue + kv.key = assertEncodeKey(t, row) + kv.values = append(kv.values, row[1]) + expected = append(expected, kv) + } + + assertIndexKeyValueIter(t, iter, expected) +} + +func TestReferencesIndex(t *testing.T) { + testTableIndex( + t, + new(referencesTable), + []sql.Expression{expression.NewEquals( + expression.NewGetField(1, sql.Text, "ref_name", false), + expression.NewLiteral("HEAD", sql.Text), + )}, + ) +} diff --git a/remotes.go b/remotes.go index 78cec58f4..ef679ee22 100644 --- a/remotes.go +++ b/remotes.go @@ -1,10 +1,14 @@ package gitbase import ( + "fmt" "io" git "gopkg.in/src-d/go-git.v4" + "gopkg.in/src-d/go-git.v4/config" "gopkg.in/src-d/go-mysql-server.v0/sql" + "gopkg.in/src-d/go-mysql-server.v0/sql/expression" + "gopkg.in/src-d/go-mysql-server.v0/sql/plan" ) type remotesTable struct{} @@ -22,14 +26,13 @@ var RemotesSchema = sql.Schema{ var _ sql.PushdownProjectionAndFiltersTable = (*remotesTable)(nil) func newRemotesTable() Indexable { - return &indexableTable{ - PushdownTable: new(remotesTable), - buildIterWithSelectors: remotesIterBuilder, - } + return new(remotesTable) } var _ Table = (*remotesTable)(nil) +var _ Squashable = (*remotesTable)(nil) +func (remotesTable) isSquashable() {} func (remotesTable) isGitbaseTable() {} func (remotesTable) Resolved() bool { @@ -99,6 +102,46 @@ func (r *remotesTable) WithProjectAndFilters( return sql.NewSpanIter(span, iter), nil } +// IndexKeyValueIter implements the sql.Indexable interface. +func (*remotesTable) IndexKeyValueIter( + ctx *sql.Context, + colNames []string, +) (sql.IndexKeyValueIter, error) { + s, ok := ctx.Session.(*Session) + if !ok || s == nil { + return nil, ErrInvalidGitbaseSession.New(ctx.Session) + } + + iter, err := s.Pool.RepoIter() + if err != nil { + return nil, err + } + + return &remotesKeyValueIter{repos: iter, columns: colNames}, nil +} + +// WithProjectFiltersAndIndex implements sql.Indexable interface. +func (*remotesTable) WithProjectFiltersAndIndex( + ctx *sql.Context, + columns, filters []sql.Expression, + index sql.IndexValueIter, +) (sql.RowIter, error) { + span, ctx := ctx.Span("gitbase.ReferencesTable.WithProjectFiltersAndIndex") + s, ok := ctx.Session.(*Session) + if !ok || s == nil { + span.Finish() + return nil, ErrInvalidGitbaseSession.New(ctx.Session) + } + + var iter sql.RowIter = &remotesIndexIter{index: index, pool: s.Pool} + + if len(filters) > 0 { + iter = plan.NewFilterIter(ctx, expression.JoinAnd(filters...), iter) + } + + return sql.NewSpanIter(span, iter), nil +} + func remotesIterBuilder(_ *sql.Context, _ selectors, _ []sql.Expression) (RowRepoIter, error) { // it's not worth to manually filter with the selectors return new(remotesIter), nil @@ -109,7 +152,6 @@ type remotesIter struct { remotes []*git.Remote remotePos int urlPos int - lastRemote string } func (i *remotesIter) NewIterator(repo *Repository) (RowRepoIter, error) { @@ -125,10 +167,6 @@ func (i *remotesIter) NewIterator(repo *Repository) (RowRepoIter, error) { urlPos: 0}, nil } -func (i *remotesIter) Repository() string { return i.repositoryID } - -func (i *remotesIter) LastObject() string { return i.lastRemote } - func (i *remotesIter) Next() (sql.Row, error) { if i.remotePos >= len(i.remotes) { return nil, io.EOF @@ -148,21 +186,126 @@ func (i *remotesIter) Next() (sql.Row, error) { i.urlPos = 0 } - row := sql.NewRow( - i.repositoryID, - config.Name, - config.URLs[i.urlPos], - config.URLs[i.urlPos], - config.Fetch[i.urlPos].String(), - config.Fetch[i.urlPos].String(), - ) - + row := remoteToRow(i.repositoryID, config, i.urlPos) i.urlPos++ - i.lastRemote = config.Name return row, nil } func (i *remotesIter) Close() error { return nil } + +func remoteToRow(repoID string, config *config.RemoteConfig, pos int) sql.Row { + return sql.NewRow( + repoID, + config.Name, + config.URLs[pos], + config.URLs[pos], + config.Fetch[pos].String(), + config.Fetch[pos].String(), + ) +} + +type remoteIndexKey struct { + Repository string + Pos int + URLPos int +} + +type remotesKeyValueIter struct { + repos *RepositoryIter + repo *Repository + columns []string + remotes []*git.Remote + pos int + urlPos int +} + +func (i *remotesKeyValueIter) Next() ([]interface{}, []byte, error) { + for { + if len(i.remotes) == 0 { + var err error + i.repo, err = i.repos.Next() + if err != nil { + return nil, nil, err + } + + i.remotes, err = i.repo.Repo.Remotes() + if err != nil { + return nil, nil, err + } + + i.pos = 0 + i.urlPos = 0 + } + + if i.pos >= len(i.remotes) { + i.remotes = nil + continue + } + + cfg := i.remotes[i.pos].Config() + if i.urlPos >= len(cfg.URLs) { + i.pos++ + continue + } + + i.urlPos++ + + fmt.Println(remoteIndexKey{i.repo.ID, i.pos, i.urlPos - 1}) + + key, err := encodeIndexKey(remoteIndexKey{i.repo.ID, i.pos, i.urlPos - 1}) + if err != nil { + return nil, nil, err + } + + row := remoteToRow(i.repo.ID, cfg, i.urlPos-1) + values, err := rowIndexValues(row, i.columns, RemotesSchema) + if err != nil { + return nil, nil, err + } + + return values, key, nil + } +} + +func (i *remotesKeyValueIter) Close() error { + return i.repos.Close() +} + +type remotesIndexIter struct { + index sql.IndexValueIter + pool *RepositoryPool + repo *Repository + remotes []*git.Remote +} + +func (i *remotesIndexIter) Next() (sql.Row, error) { + data, err := i.index.Next() + if err != nil { + return nil, err + } + + var key remoteIndexKey + if err := decodeIndexKey(data, &key); err != nil { + return nil, err + } + + if i.repo == nil || i.repo.ID != key.Repository { + i.repo, err = i.pool.GetRepo(key.Repository) + if err != nil { + return nil, err + } + + i.remotes, err = i.repo.Repo.Remotes() + if err != nil { + return nil, err + } + } + + config := i.remotes[key.Pos].Config() + return remoteToRow(key.Repository, config, key.URLPos), nil +} + +func (i *remotesIndexIter) Close() error { return i.index.Close() } diff --git a/remotes_test.go b/remotes_test.go index 9b9882977..9c59c1540 100644 --- a/remotes_test.go +++ b/remotes_test.go @@ -124,3 +124,33 @@ func TestRemotesPushdown(t *testing.T) { require.NoError(err) require.Len(rows, 1) } + +func TestRemotesIndexKeyValueIter(t *testing.T) { + require := require.New(t) + ctx, path, cleanup := setup(t) + defer cleanup() + + table := new(remotesTable) + iter, err := table.IndexKeyValueIter(ctx, []string{"remote_name", "remote_push_url"}) + require.NoError(err) + + var expected = []keyValue{ + { + key: assertEncodeKey(t, remoteIndexKey{path, 0, 0}), + values: []interface{}{"origin", "git@github.com:git-fixtures/basic.git"}, + }, + } + + assertIndexKeyValueIter(t, iter, expected) +} + +func TestRemotesIndex(t *testing.T) { + testTableIndex( + t, + new(remotesTable), + []sql.Expression{expression.NewEquals( + expression.NewGetField(1, sql.Text, "remote_name", false), + expression.NewLiteral("foo", sql.Text), + )}, + ) +} diff --git a/repositories.go b/repositories.go index 2082649cf..b88c01db1 100644 --- a/repositories.go +++ b/repositories.go @@ -4,6 +4,8 @@ import ( "io" "gopkg.in/src-d/go-mysql-server.v0/sql" + "gopkg.in/src-d/go-mysql-server.v0/sql/expression" + "gopkg.in/src-d/go-mysql-server.v0/sql/plan" ) type repositoriesTable struct{} @@ -16,14 +18,13 @@ var RepositoriesSchema = sql.Schema{ var _ sql.PushdownProjectionAndFiltersTable = (*repositoriesTable)(nil) func newRepositoriesTable() Indexable { - return &indexableTable{ - PushdownTable: new(repositoriesTable), - buildIterWithSelectors: repositoriesIterBuilder, - } + return new(repositoriesTable) } var _ Table = (*repositoriesTable)(nil) +var _ Squashable = (*repositoriesTable)(nil) +func (repositoriesTable) isSquashable() {} func (repositoriesTable) isGitbaseTable() {} func (repositoriesTable) Resolved() bool { @@ -93,6 +94,46 @@ func (r *repositoriesTable) WithProjectAndFilters( return sql.NewSpanIter(span, iter), nil } +// IndexKeyValueIter implements the sql.Indexable interface. +func (*repositoriesTable) IndexKeyValueIter( + ctx *sql.Context, + colNames []string, +) (sql.IndexKeyValueIter, error) { + s, ok := ctx.Session.(*Session) + if !ok || s == nil { + return nil, ErrInvalidGitbaseSession.New(ctx.Session) + } + + iter, err := NewRowRepoIter(ctx, new(repositoriesIter)) + if err != nil { + return nil, err + } + + return &rowKeyValueIter{iter, colNames, RepositoriesSchema}, nil +} + +// WithProjectFiltersAndIndex implements sql.Indexable interface. +func (r *repositoriesTable) WithProjectFiltersAndIndex( + ctx *sql.Context, + columns, filters []sql.Expression, + index sql.IndexValueIter, +) (sql.RowIter, error) { + span, ctx := ctx.Span("gitbase.RepositoriesTable.WithProjectFiltersAndIndex") + s, ok := ctx.Session.(*Session) + if !ok || s == nil { + span.Finish() + return nil, ErrInvalidGitbaseSession.New(ctx.Session) + } + + var iter sql.RowIter = &rowIndexIter{index} + + if len(filters) > 0 { + iter = plan.NewFilterIter(ctx, expression.JoinAnd(filters...), iter) + } + + return sql.NewSpanIter(span, iter), nil +} + func repositoriesIterBuilder(_ *sql.Context, _ selectors, _ []sql.Expression) (RowRepoIter, error) { // it's not worth to manually filter with the selectors return new(repositoriesIter), nil @@ -110,10 +151,6 @@ func (i *repositoriesIter) NewIterator(repo *Repository) (RowRepoIter, error) { }, nil } -func (i *repositoriesIter) Repository() string { return i.id } - -func (i *repositoriesIter) LastObject() string { return i.id } - func (i *repositoriesIter) Next() (sql.Row, error) { if i.visited { return nil, io.EOF diff --git a/repositories_test.go b/repositories_test.go index 1a799dc70..44c484d49 100644 --- a/repositories_test.go +++ b/repositories_test.go @@ -109,3 +109,29 @@ func TestRepositoriesPushdown(t *testing.T) { require.NoError(err) require.Len(rows, 1) } + +func TestRepositoriesIndexKeyValueIter(t *testing.T) { + require := require.New(t) + ctx, path, cleanup := setup(t) + defer cleanup() + + iter, err := new(repositoriesTable).IndexKeyValueIter(ctx, []string{"repository_id"}) + require.NoError(err) + + assertIndexKeyValueIter(t, iter, + []keyValue{ + { + assertEncodeKey(t, sql.NewRow(path)), + []interface{}{path}, + }, + }, + ) +} + +func TestRepositoriesIndex(t *testing.T) { + testTableIndex( + t, + new(repositoriesTable), + nil, + ) +} diff --git a/repository_pool.go b/repository_pool.go index 409455628..af6731b02 100644 --- a/repository_pool.go +++ b/repository_pool.go @@ -269,8 +269,6 @@ func (i *RepositoryIter) Close() error { // implementation type RowRepoIter interface { NewIterator(*Repository) (RowRepoIter, error) - Repository() string - LastObject() string Next() (sql.Row, error) Close() error } diff --git a/repository_pool_test.go b/repository_pool_test.go index b2d6e3de4..e79c6af05 100644 --- a/repository_pool_test.go +++ b/repository_pool_test.go @@ -137,9 +137,8 @@ func TestRepositoryPoolIterator(t *testing.T) { } type testCommitIter struct { - iter object.CommitIter - repoID string - lastHash string + iter object.CommitIter + repoID string } func (d *testCommitIter) NewIterator( @@ -153,17 +152,13 @@ func (d *testCommitIter) NewIterator( return &testCommitIter{iter: iter, repoID: repo.ID}, nil } -func (d *testCommitIter) Repository() string { return d.repoID } - -func (d *testCommitIter) LastObject() string { return d.lastHash } - func (d *testCommitIter) Next() (sql.Row, error) { - c, err := d.iter.Next() - if err == nil { - d.lastHash = c.Hash.String() + commit, err := d.iter.Next() + if err != nil { + return nil, err } - return nil, err + return commitToRow(d.repoID, commit), nil } func (d *testCommitIter) Close() error { @@ -188,7 +183,7 @@ func testRepoIter(num int, require *require.Assertions, ctx *sql.Context) { break } - require.Nil(row) + require.NotNil(row) count++ } diff --git a/table.go b/table.go index 842c8a668..c915e4dd3 100644 --- a/table.go +++ b/table.go @@ -8,10 +8,15 @@ import ( // Table represents a gitbase table. type Table interface { - sql.Table + sql.PushdownProjectionAndFiltersTable gitBase } +// Squashable represents a table that can be squashed. +type Squashable interface { + isSquashable() +} + type gitBase interface { isGitbaseTable() } diff --git a/tree_entries.go b/tree_entries.go index e4a7c1642..81854498d 100644 --- a/tree_entries.go +++ b/tree_entries.go @@ -5,6 +5,8 @@ import ( "strconv" "gopkg.in/src-d/go-mysql-server.v0/sql" + "gopkg.in/src-d/go-mysql-server.v0/sql/expression" + "gopkg.in/src-d/go-mysql-server.v0/sql/plan" "gopkg.in/src-d/go-git.v4/plumbing" "gopkg.in/src-d/go-git.v4/plumbing/object" @@ -24,10 +26,7 @@ var TreeEntriesSchema = sql.Schema{ var _ sql.PushdownProjectionAndFiltersTable = (*treeEntriesTable)(nil) func newTreeEntriesTable() Indexable { - return &indexableTable{ - PushdownTable: new(treeEntriesTable), - buildIterWithSelectors: treeEntriesIterBuilder, - } + return new(treeEntriesTable) } var _ Table = (*treeEntriesTable)(nil) @@ -102,6 +101,46 @@ func (r *treeEntriesTable) WithProjectAndFilters( return sql.NewSpanIter(span, iter), nil } +// IndexKeyValueIter implements the sql.Indexable interface. +func (*treeEntriesTable) IndexKeyValueIter( + ctx *sql.Context, + colNames []string, +) (sql.IndexKeyValueIter, error) { + s, ok := ctx.Session.(*Session) + if !ok || s == nil { + return nil, ErrInvalidGitbaseSession.New(ctx.Session) + } + + return newTreeEntriesKeyValueIter(s.Pool, colNames), nil +} + +// WithProjectFiltersAndIndex implements sql.Indexable interface. +func (*treeEntriesTable) WithProjectFiltersAndIndex( + ctx *sql.Context, + columns, filters []sql.Expression, + index sql.IndexValueIter, +) (sql.RowIter, error) { + span, ctx := ctx.Span("gitbase.TreeEntriesTable.WithProjectFiltersAndIndex") + s, ok := ctx.Session.(*Session) + if !ok || s == nil { + span.Finish() + return nil, ErrInvalidGitbaseSession.New(ctx.Session) + } + + session, err := getSession(ctx) + if err != nil { + return nil, err + } + + var iter sql.RowIter = &treeEntriesIndexIter{index: index, pool: session.Pool} + + if len(filters) > 0 { + iter = plan.NewFilterIter(ctx, expression.JoinAnd(filters...), iter) + } + + return sql.NewSpanIter(span, iter), nil +} + func treeEntriesIterBuilder(_ *sql.Context, selectors selectors, _ []sql.Expression) (RowRepoIter, error) { if len(selectors["tree_hash"]) == 0 { return new(treeEntryIter), nil @@ -120,11 +159,10 @@ func (r treeEntriesTable) String() string { } type treeEntryIter struct { - i *object.TreeIter - tree *object.Tree - cursor int - repoID string - lastHash string + i *object.TreeIter + tree *object.Tree + cursor int + repoID string } func (i *treeEntryIter) NewIterator(repo *Repository) (RowRepoIter, error) { @@ -136,10 +174,6 @@ func (i *treeEntryIter) NewIterator(repo *Repository) (RowRepoIter, error) { return &treeEntryIter{repoID: repo.ID, i: iter}, nil } -func (i *treeEntryIter) Repository() string { return i.repoID } - -func (i *treeEntryIter) LastObject() string { return i.lastHash } - func (i *treeEntryIter) Next() (sql.Row, error) { for { if i.tree == nil { @@ -159,7 +193,6 @@ func (i *treeEntryIter) Next() (sql.Row, error) { entry := &TreeEntry{i.tree.Hash, i.tree.Entries[i.cursor]} i.cursor++ - i.lastHash = i.tree.Hash.String() return treeEntryToRow(i.repoID, entry), nil } @@ -174,22 +207,17 @@ func (i *treeEntryIter) Close() error { } type treeEntriesByHashIter struct { - hashes []string - pos int - tree *object.Tree - cursor int - repo *Repository - lastHash string + hashes []string + pos int + tree *object.Tree + cursor int + repo *Repository } func (i *treeEntriesByHashIter) NewIterator(repo *Repository) (RowRepoIter, error) { return &treeEntriesByHashIter{hashes: i.hashes, repo: repo}, nil } -func (i *treeEntriesByHashIter) Repository() string { return i.repo.ID } - -func (i *treeEntriesByHashIter) LastObject() string { return i.lastHash } - func (i *treeEntriesByHashIter) Next() (sql.Row, error) { for { if i.pos >= len(i.hashes) && i.tree == nil { @@ -217,7 +245,6 @@ func (i *treeEntriesByHashIter) Next() (sql.Row, error) { } entry := &TreeEntry{i.tree.Hash, i.tree.Entries[i.cursor]} - i.lastHash = i.tree.Hash.String() i.cursor++ return treeEntryToRow(i.repo.ID, entry), nil @@ -243,3 +270,140 @@ func treeEntryToRow(repoID string, entry *TreeEntry) sql.Row { strconv.FormatInt(int64(entry.Mode), 8), ) } + +type treeEntriesIndexKey struct { + Repository string + Packfile string + Offset int64 + Pos int +} + +type treeEntriesKeyValueIter struct { + iter *objectIter + obj *encodedObject + tree *object.Tree + pos int + columns []string +} + +func newTreeEntriesKeyValueIter(pool *RepositoryPool, columns []string) *treeEntriesKeyValueIter { + return &treeEntriesKeyValueIter{ + iter: newObjectIter(pool, plumbing.TreeObject), + columns: columns, + } +} + +func (i *treeEntriesKeyValueIter) Next() ([]interface{}, []byte, error) { + for { + if i.tree == nil { + var err error + i.obj, err = i.iter.Next() + if err != nil { + return nil, nil, err + } + + var ok bool + i.tree, ok = i.obj.Object.(*object.Tree) + if !ok { + ErrInvalidObjectType.New(i.obj.Object, "*object.Tree") + } + + i.pos = 0 + } + + if i.pos >= len(i.tree.Entries) { + i.tree = nil + continue + } + + entry := i.tree.Entries[i.pos] + i.pos++ + + key, err := encodeIndexKey(treeEntriesIndexKey{ + Repository: i.obj.RepositoryID, + Packfile: i.obj.Packfile.String(), + Offset: int64(i.obj.Offset), + Pos: i.pos - 1, + }) + if err != nil { + return nil, nil, err + } + + row := treeEntryToRow(i.obj.RepositoryID, &TreeEntry{i.tree.Hash, entry}) + values, err := rowIndexValues(row, i.columns, TreeEntriesSchema) + if err != nil { + return nil, nil, err + } + + return values, key, nil + } +} + +func (i *treeEntriesKeyValueIter) Close() error { return i.iter.Close() } + +type treeEntriesIndexIter struct { + index sql.IndexValueIter + pool *RepositoryPool + decoder *objectDecoder + prevTreeOffset int64 + tree *object.Tree +} + +func (i *treeEntriesIndexIter) Next() (sql.Row, error) { + data, err := i.index.Next() + if err != nil { + return nil, err + } + + var key treeEntriesIndexKey + if err := decodeIndexKey(data, &key); err != nil { + return nil, err + } + + packfile := plumbing.NewHash(key.Packfile) + if i.decoder == nil || !i.decoder.equals(key.Repository, packfile) { + if i.decoder != nil { + if err := i.decoder.Close(); err != nil { + return nil, err + } + } + + i.decoder, err = newObjectDecoder(i.pool.repositories[key.Repository], packfile) + if err != nil { + return nil, err + } + } + + var tree *object.Tree + if i.prevTreeOffset == key.Offset { + tree = i.tree + } else { + obj, err := i.decoder.get(key.Offset) + if err != nil { + return nil, err + } + + var ok bool + i.tree, ok = obj.(*object.Tree) + if !ok { + return nil, ErrInvalidObjectType.New(obj, "*object.Tree") + } + + tree = i.tree + } + + i.prevTreeOffset = key.Offset + entry := &TreeEntry{tree.Hash, tree.Entries[key.Pos]} + return treeEntryToRow(key.Repository, entry), nil +} + +func (i *treeEntriesIndexIter) Close() error { + if i.decoder != nil { + if err := i.decoder.Close(); err != nil { + _ = i.index.Close() + return err + } + } + + return i.index.Close() +} diff --git a/tree_entries_test.go b/tree_entries_test.go index cc27168cf..884e04ae5 100644 --- a/tree_entries_test.go +++ b/tree_entries_test.go @@ -107,3 +107,569 @@ func TestTreeEntriesPushdown(t *testing.T) { require.NoError(err) require.Len(rows, 5) } + +func TestTreeEntriesKeyValueIter(t *testing.T) { + require := require.New(t) + ctx, path, cleanup := setup(t) + defer cleanup() + + table := new(treeEntriesTable) + iter, err := table.IndexKeyValueIter(ctx, []string{"tree_entry_name", "tree_hash"}) + require.NoError(err) + + var expected = []keyValue{ + { + assertEncodeKey(t, treeEntriesIndexKey{ + Repository: path, + Packfile: "323a4b6b5de684f9966953a043bc800154e5dbfa", + Offset: 78685, + Pos: 0, + }), + []interface{}{ + ".gitignore", + "4d081c50e250fa32ea8b1313cf8bb7c2ad7627fd", + }, + }, + { + assertEncodeKey(t, treeEntriesIndexKey{ + Repository: path, + Packfile: "323a4b6b5de684f9966953a043bc800154e5dbfa", + Offset: 78685, + Pos: 1, + }), + []interface{}{ + "CHANGELOG", + "4d081c50e250fa32ea8b1313cf8bb7c2ad7627fd", + }, + }, + { + assertEncodeKey(t, treeEntriesIndexKey{ + Repository: path, + Packfile: "323a4b6b5de684f9966953a043bc800154e5dbfa", + Offset: 78685, + Pos: 2, + }), + []interface{}{ + "LICENSE", + "4d081c50e250fa32ea8b1313cf8bb7c2ad7627fd", + }, + }, + { + assertEncodeKey(t, treeEntriesIndexKey{ + Repository: path, + Packfile: "323a4b6b5de684f9966953a043bc800154e5dbfa", + Offset: 78685, + Pos: 3, + }), + []interface{}{ + "binary.jpg", + "4d081c50e250fa32ea8b1313cf8bb7c2ad7627fd", + }, + }, + { + assertEncodeKey(t, treeEntriesIndexKey{ + Repository: path, + Packfile: "323a4b6b5de684f9966953a043bc800154e5dbfa", + Offset: 78685, + Pos: 4, + }), + []interface{}{ + "json", + "4d081c50e250fa32ea8b1313cf8bb7c2ad7627fd", + }, + }, + { + assertEncodeKey(t, treeEntriesIndexKey{ + Repository: path, + Packfile: "323a4b6b5de684f9966953a043bc800154e5dbfa", + Offset: 78264, + Pos: 0, + }), + []interface{}{ + "crappy.php", + "586af567d0bb5e771e49bdd9434f5e0fb76d25fa", + }, + }, + { + assertEncodeKey(t, treeEntriesIndexKey{ + Repository: path, + Packfile: "323a4b6b5de684f9966953a043bc800154e5dbfa", + Offset: 78184, + Pos: 0, + }), + []interface{}{ + "long.json", + "5a877e6a906a2743ad6e45d99c1793642aaf8eda", + }, + }, + { + assertEncodeKey(t, treeEntriesIndexKey{ + Repository: path, + Packfile: "323a4b6b5de684f9966953a043bc800154e5dbfa", + Offset: 78184, + Pos: 1, + }), + []interface{}{ + "short.json", + "5a877e6a906a2743ad6e45d99c1793642aaf8eda", + }, + }, + { + assertEncodeKey(t, treeEntriesIndexKey{ + Repository: path, + Packfile: "323a4b6b5de684f9966953a043bc800154e5dbfa", + Offset: 78833, + Pos: 0, + }), + []interface{}{ + ".gitignore", + "8dcef98b1d52143e1e2dbc458ffe38f925786bf2", + }, + }, + { + assertEncodeKey(t, treeEntriesIndexKey{ + Repository: path, + Packfile: "323a4b6b5de684f9966953a043bc800154e5dbfa", + Offset: 78833, + Pos: 1, + }), + []interface{}{ + "LICENSE", + "8dcef98b1d52143e1e2dbc458ffe38f925786bf2", + }, + }, + { + assertEncodeKey(t, treeEntriesIndexKey{ + Repository: path, + Packfile: "323a4b6b5de684f9966953a043bc800154e5dbfa", + Offset: 78833, + Pos: 2, + }), + []interface{}{ + "binary.jpg", + "8dcef98b1d52143e1e2dbc458ffe38f925786bf2", + }, + }, + { + assertEncodeKey(t, treeEntriesIndexKey{ + Repository: path, + Packfile: "323a4b6b5de684f9966953a043bc800154e5dbfa", + Offset: 78135, + Pos: 0, + }), + []interface{}{ + "example.go", + "a39771a7651f97faf5c72e08224d857fc35133db", + }, + }, + { + assertEncodeKey(t, treeEntriesIndexKey{ + Repository: path, + Packfile: "323a4b6b5de684f9966953a043bc800154e5dbfa", + Offset: 78358, + Pos: 0, + }), + []interface{}{ + ".gitignore", + "a8d315b2b1c615d43042c3a62402b8a54288cf5c", + }, + }, + { + assertEncodeKey(t, treeEntriesIndexKey{ + Repository: path, + Packfile: "323a4b6b5de684f9966953a043bc800154e5dbfa", + Offset: 78358, + Pos: 1, + }), + []interface{}{ + "CHANGELOG", + "a8d315b2b1c615d43042c3a62402b8a54288cf5c", + }, + }, + { + assertEncodeKey(t, treeEntriesIndexKey{ + Repository: path, + Packfile: "323a4b6b5de684f9966953a043bc800154e5dbfa", + Offset: 78358, + Pos: 2, + }), + []interface{}{ + "LICENSE", + "a8d315b2b1c615d43042c3a62402b8a54288cf5c", + }, + }, + { + assertEncodeKey(t, treeEntriesIndexKey{ + Repository: path, + Packfile: "323a4b6b5de684f9966953a043bc800154e5dbfa", + Offset: 78358, + Pos: 3, + }), + []interface{}{ + "binary.jpg", + "a8d315b2b1c615d43042c3a62402b8a54288cf5c", + }, + }, + { + assertEncodeKey(t, treeEntriesIndexKey{ + Repository: path, + Packfile: "323a4b6b5de684f9966953a043bc800154e5dbfa", + Offset: 78358, + Pos: 4, + }), + []interface{}{ + "go", + "a8d315b2b1c615d43042c3a62402b8a54288cf5c", + }, + }, + { + assertEncodeKey(t, treeEntriesIndexKey{ + Repository: path, + Packfile: "323a4b6b5de684f9966953a043bc800154e5dbfa", + Offset: 78358, + Pos: 5, + }), + []interface{}{ + "json", + "a8d315b2b1c615d43042c3a62402b8a54288cf5c", + }, + }, + { + assertEncodeKey(t, treeEntriesIndexKey{ + Repository: path, + Packfile: "323a4b6b5de684f9966953a043bc800154e5dbfa", + Offset: 78358, + Pos: 6, + }), + []interface{}{ + "php", + "a8d315b2b1c615d43042c3a62402b8a54288cf5c", + }, + }, + { + assertEncodeKey(t, treeEntriesIndexKey{ + Repository: path, + Packfile: "323a4b6b5de684f9966953a043bc800154e5dbfa", + Offset: 78358, + Pos: 7, + }), + []interface{}{ + "vendor", + "a8d315b2b1c615d43042c3a62402b8a54288cf5c", + }, + }, + { + assertEncodeKey(t, treeEntriesIndexKey{ + Repository: path, + Packfile: "323a4b6b5de684f9966953a043bc800154e5dbfa", + Offset: 78852, + Pos: 0, + }), + []interface{}{ + ".gitignore", + "aa9b383c260e1d05fbbf6b30a02914555e20c725", + }, + }, + { + assertEncodeKey(t, treeEntriesIndexKey{ + Repository: path, + Packfile: "323a4b6b5de684f9966953a043bc800154e5dbfa", + Offset: 78852, + Pos: 1, + }), + []interface{}{ + "LICENSE", + "aa9b383c260e1d05fbbf6b30a02914555e20c725", + }, + }, + { + assertEncodeKey(t, treeEntriesIndexKey{ + Repository: path, + Packfile: "323a4b6b5de684f9966953a043bc800154e5dbfa", + Offset: 78720, + Pos: 0, + }), + []interface{}{ + ".gitignore", + "c2d30fa8ef288618f65f6eed6e168e0d514886f4", + }, + }, + { + assertEncodeKey(t, treeEntriesIndexKey{ + Repository: path, + Packfile: "323a4b6b5de684f9966953a043bc800154e5dbfa", + Offset: 78720, + Pos: 1, + }), + []interface{}{ + "CHANGELOG", + "c2d30fa8ef288618f65f6eed6e168e0d514886f4", + }, + }, + { + assertEncodeKey(t, treeEntriesIndexKey{ + Repository: path, + Packfile: "323a4b6b5de684f9966953a043bc800154e5dbfa", + Offset: 78720, + Pos: 2, + }), + []interface{}{ + "LICENSE", + "c2d30fa8ef288618f65f6eed6e168e0d514886f4", + }, + }, + { + assertEncodeKey(t, treeEntriesIndexKey{ + Repository: path, + Packfile: "323a4b6b5de684f9966953a043bc800154e5dbfa", + Offset: 78313, + Pos: 0, + }), + []interface{}{ + "foo.go", + "cf4aa3b38974fb7d81f367c0830f7d78d65ab86b", + }, + }, + { + assertEncodeKey(t, treeEntriesIndexKey{ + Repository: path, + Packfile: "323a4b6b5de684f9966953a043bc800154e5dbfa", + Offset: 78636, + Pos: 0, + }), + []interface{}{ + ".gitignore", + "dbd3641b371024f44d0e469a9c8f5457b0660de1", + }, + }, + { + assertEncodeKey(t, treeEntriesIndexKey{ + Repository: path, + Packfile: "323a4b6b5de684f9966953a043bc800154e5dbfa", + Offset: 78636, + Pos: 1, + }), + []interface{}{ + "CHANGELOG", + "dbd3641b371024f44d0e469a9c8f5457b0660de1", + }, + }, + { + assertEncodeKey(t, treeEntriesIndexKey{ + Repository: path, + Packfile: "323a4b6b5de684f9966953a043bc800154e5dbfa", + Offset: 78636, + Pos: 2, + }), + []interface{}{ + "LICENSE", + "dbd3641b371024f44d0e469a9c8f5457b0660de1", + }, + }, + { + assertEncodeKey(t, treeEntriesIndexKey{ + Repository: path, + Packfile: "323a4b6b5de684f9966953a043bc800154e5dbfa", + Offset: 78636, + Pos: 3, + }), + []interface{}{ + "README", + "dbd3641b371024f44d0e469a9c8f5457b0660de1", + }, + }, + { + assertEncodeKey(t, treeEntriesIndexKey{ + Repository: path, + Packfile: "323a4b6b5de684f9966953a043bc800154e5dbfa", + Offset: 78636, + Pos: 4, + }), + []interface{}{ + "binary.jpg", + "dbd3641b371024f44d0e469a9c8f5457b0660de1", + }, + }, + { + assertEncodeKey(t, treeEntriesIndexKey{ + Repository: path, + Packfile: "323a4b6b5de684f9966953a043bc800154e5dbfa", + Offset: 78636, + Pos: 5, + }), + []interface{}{ + "go", + "dbd3641b371024f44d0e469a9c8f5457b0660de1", + }, + }, + { + assertEncodeKey(t, treeEntriesIndexKey{ + Repository: path, + Packfile: "323a4b6b5de684f9966953a043bc800154e5dbfa", + Offset: 78636, + Pos: 6, + }), + []interface{}{ + "json", + "dbd3641b371024f44d0e469a9c8f5457b0660de1", + }, + }, + { + assertEncodeKey(t, treeEntriesIndexKey{ + Repository: path, + Packfile: "323a4b6b5de684f9966953a043bc800154e5dbfa", + Offset: 78636, + Pos: 7, + }), + []interface{}{ + "php", + "dbd3641b371024f44d0e469a9c8f5457b0660de1", + }, + }, + { + assertEncodeKey(t, treeEntriesIndexKey{ + Repository: path, + Packfile: "323a4b6b5de684f9966953a043bc800154e5dbfa", + Offset: 78704, + Pos: 0, + }), + []interface{}{ + ".gitignore", + "eba74343e2f15d62adedfd8c883ee0262b5c8021", + }, + }, + { + assertEncodeKey(t, treeEntriesIndexKey{ + Repository: path, + Packfile: "323a4b6b5de684f9966953a043bc800154e5dbfa", + Offset: 78704, + Pos: 1, + }), + []interface{}{ + "CHANGELOG", + "eba74343e2f15d62adedfd8c883ee0262b5c8021", + }, + }, + { + assertEncodeKey(t, treeEntriesIndexKey{ + Repository: path, + Packfile: "323a4b6b5de684f9966953a043bc800154e5dbfa", + Offset: 78704, + Pos: 2, + }), + []interface{}{ + "LICENSE", + "eba74343e2f15d62adedfd8c883ee0262b5c8021", + }, + }, + { + assertEncodeKey(t, treeEntriesIndexKey{ + Repository: path, + Packfile: "323a4b6b5de684f9966953a043bc800154e5dbfa", + Offset: 78704, + Pos: 3, + }), + []interface{}{ + "binary.jpg", + "eba74343e2f15d62adedfd8c883ee0262b5c8021", + }, + }, + { + assertEncodeKey(t, treeEntriesIndexKey{ + Repository: path, + Packfile: "323a4b6b5de684f9966953a043bc800154e5dbfa", + Offset: 78619, + Pos: 0, + }), + []interface{}{ + ".gitignore", + "fb72698cab7617ac416264415f13224dfd7a165e", + }, + }, + { + assertEncodeKey(t, treeEntriesIndexKey{ + Repository: path, + Packfile: "323a4b6b5de684f9966953a043bc800154e5dbfa", + Offset: 78619, + Pos: 1, + }), + []interface{}{ + "CHANGELOG", + "fb72698cab7617ac416264415f13224dfd7a165e", + }, + }, + { + assertEncodeKey(t, treeEntriesIndexKey{ + Repository: path, + Packfile: "323a4b6b5de684f9966953a043bc800154e5dbfa", + Offset: 78619, + Pos: 2, + }), + []interface{}{ + "LICENSE", + "fb72698cab7617ac416264415f13224dfd7a165e", + }, + }, + { + assertEncodeKey(t, treeEntriesIndexKey{ + Repository: path, + Packfile: "323a4b6b5de684f9966953a043bc800154e5dbfa", + Offset: 78619, + Pos: 3, + }), + []interface{}{ + "binary.jpg", + "fb72698cab7617ac416264415f13224dfd7a165e", + }, + }, + { + assertEncodeKey(t, treeEntriesIndexKey{ + Repository: path, + Packfile: "323a4b6b5de684f9966953a043bc800154e5dbfa", + Offset: 78619, + Pos: 4, + }), + []interface{}{ + "go", + "fb72698cab7617ac416264415f13224dfd7a165e", + }, + }, + { + assertEncodeKey(t, treeEntriesIndexKey{ + Repository: path, + Packfile: "323a4b6b5de684f9966953a043bc800154e5dbfa", + Offset: 78619, + Pos: 5, + }), + []interface{}{ + "json", + "fb72698cab7617ac416264415f13224dfd7a165e", + }, + }, + { + assertEncodeKey(t, treeEntriesIndexKey{ + Repository: path, + Packfile: "323a4b6b5de684f9966953a043bc800154e5dbfa", + Offset: 78619, + Pos: 6, + }), + []interface{}{ + "php", + "fb72698cab7617ac416264415f13224dfd7a165e", + }, + }, + } + + assertIndexKeyValueIter(t, iter, expected) +} + +func TestTreeEntriesIndex(t *testing.T) { + testTableIndex( + t, + new(treeEntriesTable), + []sql.Expression{expression.NewEquals( + expression.NewGetField(1, sql.Text, "tree_entry_name", false), + expression.NewLiteral("LICENSE", sql.Text), + )}, + ) +} diff --git a/vendor/gopkg.in/src-d/go-git.v4/.gitignore b/vendor/gopkg.in/src-d/go-git.v4/.gitignore index 2d830686d..038dd9f1e 100644 --- a/vendor/gopkg.in/src-d/go-git.v4/.gitignore +++ b/vendor/gopkg.in/src-d/go-git.v4/.gitignore @@ -1 +1,4 @@ coverage.out +*~ +coverage.txt +profile.out diff --git a/vendor/gopkg.in/src-d/go-git.v4/.travis.yml b/vendor/gopkg.in/src-d/go-git.v4/.travis.yml index 49d860839..6484425e3 100644 --- a/vendor/gopkg.in/src-d/go-git.v4/.travis.yml +++ b/vendor/gopkg.in/src-d/go-git.v4/.travis.yml @@ -1,7 +1,6 @@ language: go go: - - 1.8.x - 1.9.x - "1.10" diff --git a/vendor/gopkg.in/src-d/go-git.v4/_examples/branch/main.go b/vendor/gopkg.in/src-d/go-git.v4/_examples/branch/main.go index fa1ad0182..ff33ead54 100644 --- a/vendor/gopkg.in/src-d/go-git.v4/_examples/branch/main.go +++ b/vendor/gopkg.in/src-d/go-git.v4/_examples/branch/main.go @@ -28,7 +28,7 @@ func main() { // Create a new plumbing.HashReference object with the name of the branch // and the hash from the HEAD. The reference name should be a full reference - // name and now a abbreviated one, as is used on the git cli. + // name and not an abbreviated one, as is used on the git cli. // // For tags we should use `refs/tags/%s` instead of `refs/heads/%s` used // for branches. diff --git a/vendor/gopkg.in/src-d/go-git.v4/blame.go b/vendor/gopkg.in/src-d/go-git.v4/blame.go index 3c5840f24..349cdd9b6 100644 --- a/vendor/gopkg.in/src-d/go-git.v4/blame.go +++ b/vendor/gopkg.in/src-d/go-git.v4/blame.go @@ -109,12 +109,15 @@ type Line struct { Text string // Date is when the original text of the line was introduced Date time.Time + // Hash is the commit hash that introduced the original line + Hash plumbing.Hash } -func newLine(author, text string, date time.Time) *Line { +func newLine(author, text string, date time.Time, hash plumbing.Hash) *Line { return &Line{ Author: author, Text: text, + Hash: hash, Date: date, } } @@ -125,7 +128,7 @@ func newLines(contents []string, commits []*object.Commit) ([]*Line, error) { } result := make([]*Line, 0, len(contents)) for i := range contents { - l := newLine(commits[i].Author.Email, contents[i], commits[i].Author.When) + l := newLine(commits[i].Author.Email, contents[i], commits[i].Author.When, commits[i].Hash) result = append(result, l) } return result, nil diff --git a/vendor/gopkg.in/src-d/go-git.v4/blame_test.go b/vendor/gopkg.in/src-d/go-git.v4/blame_test.go index 51c546a96..92911b1e0 100644 --- a/vendor/gopkg.in/src-d/go-git.v4/blame_test.go +++ b/vendor/gopkg.in/src-d/go-git.v4/blame_test.go @@ -32,6 +32,10 @@ func (s *BlameSuite) TestBlame(c *C) { obt, err := Blame(commit, t.path) c.Assert(err, IsNil) c.Assert(obt, DeepEquals, exp) + + for i, l := range obt.Lines { + c.Assert(l.Hash.String(), Equals, t.blames[i]) + } } } @@ -54,6 +58,7 @@ func (s *BlameSuite) mockBlame(c *C, t blameTest, r *Repository) (blame *BlameRe Author: commit.Author.Email, Text: lines[i], Date: commit.Author.When, + Hash: commit.Hash, } blamedLines = append(blamedLines, l) } diff --git a/vendor/gopkg.in/src-d/go-git.v4/config/branch.go b/vendor/gopkg.in/src-d/go-git.v4/config/branch.go new file mode 100644 index 000000000..e18073c96 --- /dev/null +++ b/vendor/gopkg.in/src-d/go-git.v4/config/branch.go @@ -0,0 +1,71 @@ +package config + +import ( + "errors" + + "gopkg.in/src-d/go-git.v4/plumbing" + format "gopkg.in/src-d/go-git.v4/plumbing/format/config" +) + +var ( + errBranchEmptyName = errors.New("branch config: empty name") + errBranchInvalidMerge = errors.New("branch config: invalid merge") +) + +// Branch contains information on the +// local branches and which remote to track +type Branch struct { + // Name of branch + Name string + // Remote name of remote to track + Remote string + // Merge is the local refspec for the branch + Merge plumbing.ReferenceName + + raw *format.Subsection +} + +// Validate validates fields of branch +func (b *Branch) Validate() error { + if b.Name == "" { + return errBranchEmptyName + } + + if b.Merge != "" && !b.Merge.IsBranch() { + return errBranchInvalidMerge + } + + return nil +} + +func (b *Branch) marshal() *format.Subsection { + if b.raw == nil { + b.raw = &format.Subsection{} + } + + b.raw.Name = b.Name + + if b.Remote == "" { + b.raw.RemoveOption(remoteSection) + } else { + b.raw.SetOption(remoteSection, b.Remote) + } + + if b.Merge == "" { + b.raw.RemoveOption(mergeKey) + } else { + b.raw.SetOption(mergeKey, string(b.Merge)) + } + + return b.raw +} + +func (b *Branch) unmarshal(s *format.Subsection) error { + b.raw = s + + b.Name = b.raw.Name + b.Remote = b.raw.Options.Get(remoteSection) + b.Merge = plumbing.ReferenceName(b.raw.Options.Get(mergeKey)) + + return b.Validate() +} diff --git a/vendor/gopkg.in/src-d/go-git.v4/config/branch_test.go b/vendor/gopkg.in/src-d/go-git.v4/config/branch_test.go new file mode 100644 index 000000000..d74122e2d --- /dev/null +++ b/vendor/gopkg.in/src-d/go-git.v4/config/branch_test.go @@ -0,0 +1,76 @@ +package config + +import ( + . "gopkg.in/check.v1" + "gopkg.in/src-d/go-git.v4/plumbing" +) + +type BranchSuite struct{} + +var _ = Suite(&BranchSuite{}) + +func (b *BranchSuite) TestValidateName(c *C) { + goodBranch := Branch{ + Name: "master", + Remote: "some_remote", + Merge: "refs/heads/master", + } + badBranch := Branch{ + Remote: "some_remote", + Merge: "refs/heads/master", + } + c.Assert(goodBranch.Validate(), IsNil) + c.Assert(badBranch.Validate(), NotNil) +} + +func (b *BranchSuite) TestValidateMerge(c *C) { + goodBranch := Branch{ + Name: "master", + Remote: "some_remote", + Merge: "refs/heads/master", + } + badBranch := Branch{ + Name: "master", + Remote: "some_remote", + Merge: "blah", + } + c.Assert(goodBranch.Validate(), IsNil) + c.Assert(badBranch.Validate(), NotNil) +} + +func (b *BranchSuite) TestMarshall(c *C) { + expected := []byte(`[core] + bare = false +[branch "branch-tracking-on-clone"] + remote = fork + merge = refs/heads/branch-tracking-on-clone +`) + + cfg := NewConfig() + cfg.Branches["branch-tracking-on-clone"] = &Branch{ + Name: "branch-tracking-on-clone", + Remote: "fork", + Merge: plumbing.ReferenceName("refs/heads/branch-tracking-on-clone"), + } + + actual, err := cfg.Marshal() + c.Assert(err, IsNil) + c.Assert(string(actual), Equals, string(expected)) +} + +func (b *BranchSuite) TestUnmarshall(c *C) { + input := []byte(`[core] + bare = false +[branch "branch-tracking-on-clone"] + remote = fork + merge = refs/heads/branch-tracking-on-clone +`) + + cfg := NewConfig() + err := cfg.Unmarshal(input) + c.Assert(err, IsNil) + branch := cfg.Branches["branch-tracking-on-clone"] + c.Assert(branch.Name, Equals, "branch-tracking-on-clone") + c.Assert(branch.Remote, Equals, "fork") + c.Assert(branch.Merge, Equals, plumbing.ReferenceName("refs/heads/branch-tracking-on-clone")) +} diff --git a/vendor/gopkg.in/src-d/go-git.v4/config/config.go b/vendor/gopkg.in/src-d/go-git.v4/config/config.go index 87a847d92..ce6506dae 100644 --- a/vendor/gopkg.in/src-d/go-git.v4/config/config.go +++ b/vendor/gopkg.in/src-d/go-git.v4/config/config.go @@ -25,7 +25,7 @@ type ConfigStorer interface { } var ( - ErrInvalid = errors.New("config invalid remote") + ErrInvalid = errors.New("config invalid key in remote or branch") ErrRemoteConfigNotFound = errors.New("remote config not found") ErrRemoteConfigEmptyURL = errors.New("remote config: empty URL") ErrRemoteConfigEmptyName = errors.New("remote config: empty name") @@ -55,7 +55,9 @@ type Config struct { // Submodules list of repository submodules, the key of the map is the name // of the submodule, should equal to Submodule.Name. Submodules map[string]*Submodule - + // Branches list of branches, the key is the branch name and should + // equal Branch.Name + Branches map[string]*Branch // Raw contains the raw information of a config file. The main goal is // preserve the parsed information from the original format, to avoid // dropping unsupported fields. @@ -67,6 +69,7 @@ func NewConfig() *Config { config := &Config{ Remotes: make(map[string]*RemoteConfig), Submodules: make(map[string]*Submodule), + Branches: make(map[string]*Branch), Raw: format.New(), } @@ -87,12 +90,23 @@ func (c *Config) Validate() error { } } + for name, b := range c.Branches { + if b.Name != name { + return ErrInvalid + } + + if err := b.Validate(); err != nil { + return err + } + } + return nil } const ( remoteSection = "remote" submoduleSection = "submodule" + branchSection = "branch" coreSection = "core" packSection = "pack" fetchKey = "fetch" @@ -100,6 +114,7 @@ const ( bareKey = "bare" worktreeKey = "worktree" windowKey = "window" + mergeKey = "merge" // DefaultPackWindow holds the number of previous objects used to // generate deltas. The value 10 is the same used by git command. @@ -120,7 +135,12 @@ func (c *Config) Unmarshal(b []byte) error { if err := c.unmarshalPack(); err != nil { return err } - c.unmarshalSubmodules() + unmarshalSubmodules(c.Raw, c.Submodules) + + if err := c.unmarshalBranches(); err != nil { + return err + } + return c.unmarshalRemotes() } @@ -162,22 +182,41 @@ func (c *Config) unmarshalRemotes() error { return nil } -func (c *Config) unmarshalSubmodules() { - s := c.Raw.Section(submoduleSection) +func unmarshalSubmodules(fc *format.Config, submodules map[string]*Submodule) { + s := fc.Section(submoduleSection) for _, sub := range s.Subsections { m := &Submodule{} m.unmarshal(sub) - c.Submodules[m.Name] = m + if m.Validate() == ErrModuleBadPath { + continue + } + + submodules[m.Name] = m } } +func (c *Config) unmarshalBranches() error { + bs := c.Raw.Section(branchSection) + for _, sub := range bs.Subsections { + b := &Branch{} + + if err := b.unmarshal(sub); err != nil { + return err + } + + c.Branches[b.Name] = b + } + return nil +} + // Marshal returns Config encoded as a git-config file. func (c *Config) Marshal() ([]byte, error) { c.marshalCore() c.marshalPack() c.marshalRemotes() c.marshalSubmodules() + c.marshalBranches() buf := bytes.NewBuffer(nil) if err := format.NewEncoder(buf).Encode(c.Raw); err != nil { @@ -245,6 +284,33 @@ func (c *Config) marshalSubmodules() { } } +func (c *Config) marshalBranches() { + s := c.Raw.Section(branchSection) + newSubsections := make(format.Subsections, 0, len(c.Branches)) + added := make(map[string]bool) + for _, subsection := range s.Subsections { + if branch, ok := c.Branches[subsection.Name]; ok { + newSubsections = append(newSubsections, branch.marshal()) + added[subsection.Name] = true + } + } + + branchNames := make([]string, 0, len(c.Branches)) + for name := range c.Branches { + branchNames = append(branchNames, name) + } + + sort.Strings(branchNames) + + for _, name := range branchNames { + if !added[name] { + newSubsections = append(newSubsections, c.Branches[name].marshal()) + } + } + + s.Subsections = newSubsections +} + // RemoteConfig contains the configuration for a given remote repository. type RemoteConfig struct { // Name of the remote diff --git a/vendor/gopkg.in/src-d/go-git.v4/config/config_test.go b/vendor/gopkg.in/src-d/go-git.v4/config/config_test.go index 1f120c018..5cd713e45 100644 --- a/vendor/gopkg.in/src-d/go-git.v4/config/config_test.go +++ b/vendor/gopkg.in/src-d/go-git.v4/config/config_test.go @@ -1,6 +1,9 @@ package config -import . "gopkg.in/check.v1" +import ( + . "gopkg.in/check.v1" + "gopkg.in/src-d/go-git.v4/plumbing" +) type ConfigSuite struct{} @@ -47,7 +50,8 @@ func (s *ConfigSuite) TestUnmarshall(c *C) { c.Assert(cfg.Submodules["qux"].Name, Equals, "qux") c.Assert(cfg.Submodules["qux"].URL, Equals, "https://github.com/foo/qux.git") c.Assert(cfg.Submodules["qux"].Branch, Equals, "bar") - + c.Assert(cfg.Branches["master"].Remote, Equals, "origin") + c.Assert(cfg.Branches["master"].Merge, Equals, plumbing.ReferenceName("refs/heads/master")) } func (s *ConfigSuite) TestMarshall(c *C) { @@ -65,6 +69,9 @@ func (s *ConfigSuite) TestMarshall(c *C) { url = git@github.com:mcuadros/go-git.git [submodule "qux"] url = https://github.com/foo/qux.git +[branch "master"] + remote = origin + merge = refs/heads/master `) cfg := NewConfig() @@ -87,6 +94,12 @@ func (s *ConfigSuite) TestMarshall(c *C) { URL: "https://github.com/foo/qux.git", } + cfg.Branches["master"] = &Branch{ + Name: "master", + Remote: "origin", + Merge: "refs/heads/master", + } + b, err := cfg.Marshal() c.Assert(err, IsNil) @@ -118,6 +131,29 @@ func (s *ConfigSuite) TestUnmarshallMarshall(c *C) { c.Assert(string(output), DeepEquals, string(input)) } +func (s *ConfigSuite) TestValidateConfig(c *C) { + config := &Config{ + Remotes: map[string]*RemoteConfig{ + "bar": { + Name: "bar", + URLs: []string{"http://foo/bar"}, + }, + }, + Branches: map[string]*Branch{ + "bar": { + Name: "bar", + }, + "foo": { + Name: "foo", + Remote: "origin", + Merge: plumbing.ReferenceName("refs/heads/foo"), + }, + }, + } + + c.Assert(config.Validate(), IsNil) +} + func (s *ConfigSuite) TestValidateInvalidRemote(c *C) { config := &Config{ Remotes: map[string]*RemoteConfig{ @@ -128,7 +164,7 @@ func (s *ConfigSuite) TestValidateInvalidRemote(c *C) { c.Assert(config.Validate(), Equals, ErrRemoteConfigEmptyURL) } -func (s *ConfigSuite) TestValidateInvalidKey(c *C) { +func (s *ConfigSuite) TestValidateInvalidRemoteKey(c *C) { config := &Config{ Remotes: map[string]*RemoteConfig{ "bar": {Name: "foo"}, @@ -157,10 +193,44 @@ func (s *ConfigSuite) TestRemoteConfigValidateDefault(c *C) { c.Assert(fetch[0].String(), Equals, "+refs/heads/*:refs/remotes/foo/*") } +func (s *ConfigSuite) TestValidateInvalidBranchKey(c *C) { + config := &Config{ + Branches: map[string]*Branch{ + "foo": { + Name: "bar", + Remote: "origin", + Merge: plumbing.ReferenceName("refs/heads/bar"), + }, + }, + } + + c.Assert(config.Validate(), Equals, ErrInvalid) +} + +func (s *ConfigSuite) TestValidateInvalidBranch(c *C) { + config := &Config{ + Branches: map[string]*Branch{ + "bar": { + Name: "bar", + Remote: "origin", + Merge: plumbing.ReferenceName("refs/heads/bar"), + }, + "foo": { + Name: "foo", + Remote: "origin", + Merge: plumbing.ReferenceName("baz"), + }, + }, + } + + c.Assert(config.Validate(), Equals, errBranchInvalidMerge) +} + func (s *ConfigSuite) TestRemoteConfigDefaultValues(c *C) { config := NewConfig() c.Assert(config.Remotes, HasLen, 0) + c.Assert(config.Branches, HasLen, 0) c.Assert(config.Submodules, HasLen, 0) c.Assert(config.Raw, NotNil) c.Assert(config.Pack.Window, Equals, DefaultPackWindow) diff --git a/vendor/gopkg.in/src-d/go-git.v4/config/modules.go b/vendor/gopkg.in/src-d/go-git.v4/config/modules.go index b20898405..90758d932 100644 --- a/vendor/gopkg.in/src-d/go-git.v4/config/modules.go +++ b/vendor/gopkg.in/src-d/go-git.v4/config/modules.go @@ -3,6 +3,7 @@ package config import ( "bytes" "errors" + "regexp" format "gopkg.in/src-d/go-git.v4/plumbing/format/config" ) @@ -10,6 +11,12 @@ import ( var ( ErrModuleEmptyURL = errors.New("module config: empty URL") ErrModuleEmptyPath = errors.New("module config: empty path") + ErrModuleBadPath = errors.New("submodule has an invalid path") +) + +var ( + // Matches module paths with dotdot ".." components. + dotdotPath = regexp.MustCompile(`(^|[/\\])\.\.([/\\]|$)`) ) // Modules defines the submodules properties, represents a .gitmodules file @@ -44,14 +51,7 @@ func (m *Modules) Unmarshal(b []byte) error { return err } - s := m.raw.Section(submoduleSection) - for _, sub := range s.Subsections { - mod := &Submodule{} - mod.unmarshal(sub) - - m.Submodules[mod.Path] = mod - } - + unmarshalSubmodules(m.raw, m.Submodules) return nil } @@ -102,6 +102,10 @@ func (m *Submodule) Validate() error { return ErrModuleEmptyURL } + if dotdotPath.MatchString(m.Path) { + return ErrModuleBadPath + } + return nil } diff --git a/vendor/gopkg.in/src-d/go-git.v4/config/modules_test.go b/vendor/gopkg.in/src-d/go-git.v4/config/modules_test.go index 36cd93f73..8e10d70f1 100644 --- a/vendor/gopkg.in/src-d/go-git.v4/config/modules_test.go +++ b/vendor/gopkg.in/src-d/go-git.v4/config/modules_test.go @@ -11,6 +11,29 @@ func (s *ModulesSuite) TestValidateMissingURL(c *C) { c.Assert(m.Validate(), Equals, ErrModuleEmptyURL) } +func (s *ModulesSuite) TestValidateBadPath(c *C) { + input := []string{ + `..`, + `../`, + `../bar`, + + `/..`, + `/../bar`, + + `foo/..`, + `foo/../`, + `foo/../bar`, + } + + for _, p := range input { + m := &Submodule{ + Path: p, + URL: "https://example.com/", + } + c.Assert(m.Validate(), Equals, ErrModuleBadPath) + } +} + func (s *ModulesSuite) TestValidateMissingName(c *C) { m := &Submodule{URL: "bar"} c.Assert(m.Validate(), Equals, ErrModuleEmptyPath) @@ -39,6 +62,9 @@ func (s *ModulesSuite) TestUnmarshall(c *C) { path = foo/bar url = https://github.com/foo/bar.git branch = dev +[submodule "suspicious"] + path = ../../foo/bar + url = https://github.com/foo/bar.git `) cfg := NewModules() diff --git a/vendor/gopkg.in/src-d/go-git.v4/config/refspec.go b/vendor/gopkg.in/src-d/go-git.v4/config/refspec.go index af7e73205..c9b9d524f 100644 --- a/vendor/gopkg.in/src-d/go-git.v4/config/refspec.go +++ b/vendor/gopkg.in/src-d/go-git.v4/config/refspec.go @@ -62,7 +62,13 @@ func (s RefSpec) IsDelete() bool { // Src return the src side. func (s RefSpec) Src() string { spec := string(s) - start := strings.Index(spec, refSpecForce) + 1 + + var start int + if s.IsForceUpdate() { + start = 1 + } else { + start = 0 + } end := strings.Index(spec, refSpecSeparator) return spec[start:end] diff --git a/vendor/gopkg.in/src-d/go-git.v4/config/refspec_test.go b/vendor/gopkg.in/src-d/go-git.v4/config/refspec_test.go index 5ee610893..675e075cc 100644 --- a/vendor/gopkg.in/src-d/go-git.v4/config/refspec_test.go +++ b/vendor/gopkg.in/src-d/go-git.v4/config/refspec_test.go @@ -62,8 +62,17 @@ func (s *RefSpecSuite) TestRefSpecSrc(c *C) { spec := RefSpec("refs/heads/*:refs/remotes/origin/*") c.Assert(spec.Src(), Equals, "refs/heads/*") + spec = RefSpec("+refs/heads/*:refs/remotes/origin/*") + c.Assert(spec.Src(), Equals, "refs/heads/*") + spec = RefSpec(":refs/heads/master") c.Assert(spec.Src(), Equals, "") + + spec = RefSpec("refs/heads/love+hate:refs/heads/love+hate") + c.Assert(spec.Src(), Equals, "refs/heads/love+hate") + + spec = RefSpec("+refs/heads/love+hate:refs/heads/love+hate") + c.Assert(spec.Src(), Equals, "refs/heads/love+hate") } func (s *RefSpecSuite) TestRefSpecMatch(c *C) { @@ -71,9 +80,19 @@ func (s *RefSpecSuite) TestRefSpecMatch(c *C) { c.Assert(spec.Match(plumbing.ReferenceName("refs/heads/foo")), Equals, false) c.Assert(spec.Match(plumbing.ReferenceName("refs/heads/master")), Equals, true) + spec = RefSpec("+refs/heads/master:refs/remotes/origin/master") + c.Assert(spec.Match(plumbing.ReferenceName("refs/heads/foo")), Equals, false) + c.Assert(spec.Match(plumbing.ReferenceName("refs/heads/master")), Equals, true) + spec = RefSpec(":refs/heads/master") c.Assert(spec.Match(plumbing.ReferenceName("")), Equals, true) c.Assert(spec.Match(plumbing.ReferenceName("refs/heads/master")), Equals, false) + + spec = RefSpec("refs/heads/love+hate:heads/love+hate") + c.Assert(spec.Match(plumbing.ReferenceName("refs/heads/love+hate")), Equals, true) + + spec = RefSpec("+refs/heads/love+hate:heads/love+hate") + c.Assert(spec.Match(plumbing.ReferenceName("refs/heads/love+hate")), Equals, true) } func (s *RefSpecSuite) TestRefSpecMatchGlob(c *C) { diff --git a/vendor/gopkg.in/src-d/go-git.v4/example_test.go b/vendor/gopkg.in/src-d/go-git.v4/example_test.go index e9d8e8b46..ef7e3d375 100644 --- a/vendor/gopkg.in/src-d/go-git.v4/example_test.go +++ b/vendor/gopkg.in/src-d/go-git.v4/example_test.go @@ -24,12 +24,18 @@ func ExampleClone() { // Clones the repository into the worktree (fs) and storer all the .git // content into the storer - _, _ = git.Clone(storer, fs, &git.CloneOptions{ + _, err := git.Clone(storer, fs, &git.CloneOptions{ URL: "https://github.com/git-fixtures/basic.git", }) + if err != nil { + log.Fatal(err) + } // Prints the content of the CHANGELOG file from the cloned repository - changelog, _ := fs.Open("CHANGELOG") + changelog, err := fs.Open("CHANGELOG") + if err != nil { + log.Fatal(err) + } io.Copy(os.Stdout, changelog) // Output: Initial changelog diff --git a/vendor/gopkg.in/src-d/go-git.v4/options.go b/vendor/gopkg.in/src-d/go-git.v4/options.go index f0c2fd8d3..885980ef0 100644 --- a/vendor/gopkg.in/src-d/go-git.v4/options.go +++ b/vendor/gopkg.in/src-d/go-git.v4/options.go @@ -420,3 +420,14 @@ func (o *GrepOptions) Validate(w *Worktree) error { return nil } + +// PlainOpenOptions describes how opening a plain repository should be +// performed. +type PlainOpenOptions struct { + // DetectDotGit defines whether parent directories should be + // walked until a .git directory or file is found. + DetectDotGit bool +} + +// Validate validates the fields and sets the default values. +func (o *PlainOpenOptions) Validate() error { return nil } diff --git a/vendor/gopkg.in/src-d/go-git.v4/plumbing/format/gitignore/dir.go b/vendor/gopkg.in/src-d/go-git.v4/plumbing/format/gitignore/dir.go index 41dd62497..1e88970ef 100644 --- a/vendor/gopkg.in/src-d/go-git.v4/plumbing/format/gitignore/dir.go +++ b/vendor/gopkg.in/src-d/go-git.v4/plumbing/format/gitignore/dir.go @@ -1,24 +1,31 @@ package gitignore import ( + "bytes" "io/ioutil" "os" + "os/user" "strings" "gopkg.in/src-d/go-billy.v4" + "gopkg.in/src-d/go-git.v4/plumbing/format/config" + gioutil "gopkg.in/src-d/go-git.v4/utils/ioutil" ) const ( commentPrefix = "#" + coreSection = "core" eol = "\n" + excludesfile = "excludesfile" gitDir = ".git" gitignoreFile = ".gitignore" + gitconfigFile = ".gitconfig" + systemFile = "/etc/gitconfig" ) -// ReadPatterns reads gitignore patterns recursively traversing through the directory -// structure. The result is in the ascending order of priority (last higher). -func ReadPatterns(fs billy.Filesystem, path []string) (ps []Pattern, err error) { - f, err := fs.Open(fs.Join(append(path, gitignoreFile)...)) +// readIgnoreFile reads a specific git ignore file. +func readIgnoreFile(fs billy.Filesystem, path []string, ignoreFile string) (ps []Pattern, err error) { + f, err := fs.Open(fs.Join(append(path, ignoreFile)...)) if err == nil { defer f.Close() @@ -33,6 +40,14 @@ func ReadPatterns(fs billy.Filesystem, path []string) (ps []Pattern, err error) return nil, err } + return +} + +// ReadPatterns reads gitignore patterns recursively traversing through the directory +// structure. The result is in the ascending order of priority (last higher). +func ReadPatterns(fs billy.Filesystem, path []string) (ps []Pattern, err error) { + ps, _ = readIgnoreFile(fs, path, gitignoreFile) + var fis []os.FileInfo fis, err = fs.ReadDir(fs.Join(path...)) if err != nil { @@ -55,3 +70,67 @@ func ReadPatterns(fs billy.Filesystem, path []string) (ps []Pattern, err error) return } + +func loadPatterns(fs billy.Filesystem, path string) (ps []Pattern, err error) { + f, err := fs.Open(path) + if err != nil { + if os.IsNotExist(err) { + return nil, nil + } + return nil, err + } + + defer gioutil.CheckClose(f, &err) + + b, err := ioutil.ReadAll(f) + if err != nil { + return + } + + d := config.NewDecoder(bytes.NewBuffer(b)) + + raw := config.New() + if err = d.Decode(raw); err != nil { + return + } + + s := raw.Section(coreSection) + efo := s.Options.Get(excludesfile) + if efo == "" { + return nil, nil + } + + ps, err = readIgnoreFile(fs, nil, efo) + if os.IsNotExist(err) { + return nil, nil + } + + return +} + +// LoadGlobalPatterns loads gitignore patterns from from the gitignore file +// declared in a user's ~/.gitconfig file. If the ~/.gitconfig file does not +// exist the function will return nil. If the core.excludesfile property +// is not declared, the function will return nil. If the file pointed to by +// the core.excludesfile property does not exist, the function will return nil. +// +// The function assumes fs is rooted at the root filesystem. +func LoadGlobalPatterns(fs billy.Filesystem) (ps []Pattern, err error) { + usr, err := user.Current() + if err != nil { + return + } + + return loadPatterns(fs, fs.Join(usr.HomeDir, gitconfigFile)) +} + +// LoadSystemPatterns loads gitignore patterns from from the gitignore file +// declared in a system's /etc/gitconfig file. If the ~/.gitconfig file does +// not exist the function will return nil. If the core.excludesfile property +// is not declared, the function will return nil. If the file pointed to by +// the core.excludesfile property does not exist, the function will return nil. +// +// The function assumes fs is rooted at the root filesystem. +func LoadSystemPatterns(fs billy.Filesystem) (ps []Pattern, err error) { + return loadPatterns(fs, systemFile) +} diff --git a/vendor/gopkg.in/src-d/go-git.v4/plumbing/format/gitignore/dir_test.go b/vendor/gopkg.in/src-d/go-git.v4/plumbing/format/gitignore/dir_test.go index b8a545317..13e2d82b7 100644 --- a/vendor/gopkg.in/src-d/go-git.v4/plumbing/format/gitignore/dir_test.go +++ b/vendor/gopkg.in/src-d/go-git.v4/plumbing/format/gitignore/dir_test.go @@ -2,6 +2,8 @@ package gitignore import ( "os" + "os/user" + "strconv" . "gopkg.in/check.v1" "gopkg.in/src-d/go-billy.v4" @@ -9,12 +11,19 @@ import ( ) type MatcherSuite struct { - FS billy.Filesystem + GFS billy.Filesystem // git repository root + RFS billy.Filesystem // root that contains user home + MCFS billy.Filesystem // root that contains user home, but missing ~/.gitconfig + MEFS billy.Filesystem // root that contains user home, but missing excludesfile entry + MIFS billy.Filesystem // root that contains user home, but missing .gitnignore + + SFS billy.Filesystem // root that contains /etc/gitconfig } var _ = Suite(&MatcherSuite{}) func (s *MatcherSuite) SetUpTest(c *C) { + // setup generic git repository root fs := memfs.New() f, err := fs.Create(".gitignore") c.Assert(err, IsNil) @@ -36,11 +45,127 @@ func (s *MatcherSuite) SetUpTest(c *C) { fs.MkdirAll("vendor/github.com", os.ModePerm) fs.MkdirAll("vendor/gopkg.in", os.ModePerm) - s.FS = fs + s.GFS = fs + + // setup root that contains user home + usr, err := user.Current() + c.Assert(err, IsNil) + + fs = memfs.New() + err = fs.MkdirAll(usr.HomeDir, os.ModePerm) + c.Assert(err, IsNil) + + f, err = fs.Create(fs.Join(usr.HomeDir, gitconfigFile)) + c.Assert(err, IsNil) + _, err = f.Write([]byte("[core]\n")) + c.Assert(err, IsNil) + _, err = f.Write([]byte(" excludesfile = " + strconv.Quote(fs.Join(usr.HomeDir, ".gitignore_global")) + "\n")) + c.Assert(err, IsNil) + err = f.Close() + c.Assert(err, IsNil) + + f, err = fs.Create(fs.Join(usr.HomeDir, ".gitignore_global")) + c.Assert(err, IsNil) + _, err = f.Write([]byte("# IntelliJ\n")) + c.Assert(err, IsNil) + _, err = f.Write([]byte(".idea/\n")) + c.Assert(err, IsNil) + _, err = f.Write([]byte("*.iml\n")) + c.Assert(err, IsNil) + err = f.Close() + c.Assert(err, IsNil) + + s.RFS = fs + + // root that contains user home, but missing ~/.gitconfig + fs = memfs.New() + err = fs.MkdirAll(usr.HomeDir, os.ModePerm) + c.Assert(err, IsNil) + + f, err = fs.Create(fs.Join(usr.HomeDir, ".gitignore_global")) + c.Assert(err, IsNil) + _, err = f.Write([]byte("# IntelliJ\n")) + c.Assert(err, IsNil) + _, err = f.Write([]byte(".idea/\n")) + c.Assert(err, IsNil) + _, err = f.Write([]byte("*.iml\n")) + c.Assert(err, IsNil) + err = f.Close() + c.Assert(err, IsNil) + + s.MCFS = fs + + // setup root that contains user home, but missing excludesfile entry + fs = memfs.New() + err = fs.MkdirAll(usr.HomeDir, os.ModePerm) + c.Assert(err, IsNil) + + f, err = fs.Create(fs.Join(usr.HomeDir, gitconfigFile)) + c.Assert(err, IsNil) + _, err = f.Write([]byte("[core]\n")) + c.Assert(err, IsNil) + err = f.Close() + c.Assert(err, IsNil) + + f, err = fs.Create(fs.Join(usr.HomeDir, ".gitignore_global")) + c.Assert(err, IsNil) + _, err = f.Write([]byte("# IntelliJ\n")) + c.Assert(err, IsNil) + _, err = f.Write([]byte(".idea/\n")) + c.Assert(err, IsNil) + _, err = f.Write([]byte("*.iml\n")) + c.Assert(err, IsNil) + err = f.Close() + c.Assert(err, IsNil) + + s.MEFS = fs + + // setup root that contains user home, but missing .gitnignore + fs = memfs.New() + err = fs.MkdirAll(usr.HomeDir, os.ModePerm) + c.Assert(err, IsNil) + + f, err = fs.Create(fs.Join(usr.HomeDir, gitconfigFile)) + c.Assert(err, IsNil) + _, err = f.Write([]byte("[core]\n")) + c.Assert(err, IsNil) + _, err = f.Write([]byte(" excludesfile = " + strconv.Quote(fs.Join(usr.HomeDir, ".gitignore_global")) + "\n")) + c.Assert(err, IsNil) + err = f.Close() + c.Assert(err, IsNil) + + s.MIFS = fs + + // setup root that contains user home + fs = memfs.New() + err = fs.MkdirAll("etc", os.ModePerm) + c.Assert(err, IsNil) + + f, err = fs.Create(systemFile) + c.Assert(err, IsNil) + _, err = f.Write([]byte("[core]\n")) + c.Assert(err, IsNil) + _, err = f.Write([]byte(" excludesfile = /etc/gitignore_global\n")) + c.Assert(err, IsNil) + err = f.Close() + c.Assert(err, IsNil) + + f, err = fs.Create("/etc/gitignore_global") + c.Assert(err, IsNil) + _, err = f.Write([]byte("# IntelliJ\n")) + c.Assert(err, IsNil) + _, err = f.Write([]byte(".idea/\n")) + c.Assert(err, IsNil) + _, err = f.Write([]byte("*.iml\n")) + c.Assert(err, IsNil) + err = f.Close() + c.Assert(err, IsNil) + + s.SFS = fs } func (s *MatcherSuite) TestDir_ReadPatterns(c *C) { - ps, err := ReadPatterns(s.FS, nil) + ps, err := ReadPatterns(s.GFS, nil) c.Assert(err, IsNil) c.Assert(ps, HasLen, 2) @@ -48,3 +173,41 @@ func (s *MatcherSuite) TestDir_ReadPatterns(c *C) { c.Assert(m.Match([]string{"vendor", "gopkg.in"}, true), Equals, true) c.Assert(m.Match([]string{"vendor", "github.com"}, true), Equals, false) } + +func (s *MatcherSuite) TestDir_LoadGlobalPatterns(c *C) { + ps, err := LoadGlobalPatterns(s.RFS) + c.Assert(err, IsNil) + c.Assert(ps, HasLen, 2) + + m := NewMatcher(ps) + c.Assert(m.Match([]string{"go-git.v4.iml"}, true), Equals, true) + c.Assert(m.Match([]string{".idea"}, true), Equals, true) +} + +func (s *MatcherSuite) TestDir_LoadGlobalPatternsMissingGitconfig(c *C) { + ps, err := LoadGlobalPatterns(s.MCFS) + c.Assert(err, IsNil) + c.Assert(ps, HasLen, 0) +} + +func (s *MatcherSuite) TestDir_LoadGlobalPatternsMissingExcludesfile(c *C) { + ps, err := LoadGlobalPatterns(s.MEFS) + c.Assert(err, IsNil) + c.Assert(ps, HasLen, 0) +} + +func (s *MatcherSuite) TestDir_LoadGlobalPatternsMissingGitignore(c *C) { + ps, err := LoadGlobalPatterns(s.MIFS) + c.Assert(err, IsNil) + c.Assert(ps, HasLen, 0) +} + +func (s *MatcherSuite) TestDir_LoadSystemPatterns(c *C) { + ps, err := LoadSystemPatterns(s.SFS) + c.Assert(err, IsNil) + c.Assert(ps, HasLen, 2) + + m := NewMatcher(ps) + c.Assert(m.Match([]string{"go-git.v4.iml"}, true), Equals, true) + c.Assert(m.Match([]string{".idea"}, true), Equals, true) +} diff --git a/vendor/gopkg.in/src-d/go-git.v4/plumbing/format/idxfile/decoder.go b/vendor/gopkg.in/src-d/go-git.v4/plumbing/format/idxfile/decoder.go index f36121322..45afb1ec0 100644 --- a/vendor/gopkg.in/src-d/go-git.v4/plumbing/format/idxfile/decoder.go +++ b/vendor/gopkg.in/src-d/go-git.v4/plumbing/format/idxfile/decoder.go @@ -6,7 +6,6 @@ import ( "errors" "io" - "gopkg.in/src-d/go-git.v4/plumbing" "gopkg.in/src-d/go-git.v4/utils/binary" ) @@ -98,13 +97,14 @@ func readFanout(idx *Idxfile, r io.Reader) error { func readObjectNames(idx *Idxfile, r io.Reader) error { c := int(idx.ObjectCount) + new := make([]Entry, c) for i := 0; i < c; i++ { - var ref plumbing.Hash - if _, err := io.ReadFull(r, ref[:]); err != nil { + e := &new[i] + if _, err := io.ReadFull(r, e.Hash[:]); err != nil { return err } - idx.Entries = append(idx.Entries, &Entry{Hash: ref}) + idx.Entries = append(idx.Entries, e) } return nil diff --git a/vendor/gopkg.in/src-d/go-git.v4/plumbing/format/packfile/common.go b/vendor/gopkg.in/src-d/go-git.v4/plumbing/format/packfile/common.go index 7dad1f6d6..beb015d3e 100644 --- a/vendor/gopkg.in/src-d/go-git.v4/plumbing/format/packfile/common.go +++ b/vendor/gopkg.in/src-d/go-git.v4/plumbing/format/packfile/common.go @@ -40,8 +40,7 @@ func UpdateObjectStorage(s storer.EncodedObjectStorer, packfile io.Reader) error return err } -func writePackfileToObjectStorage(sw storer.PackfileWriter, packfile io.Reader) error { - var err error +func writePackfileToObjectStorage(sw storer.PackfileWriter, packfile io.Reader) (err error) { w, err := sw.PackfileWriter() if err != nil { return err diff --git a/vendor/gopkg.in/src-d/go-git.v4/plumbing/format/packfile/delta_test.go b/vendor/gopkg.in/src-d/go-git.v4/plumbing/format/packfile/delta_test.go index 42b777ae8..98f53f6d6 100644 --- a/vendor/gopkg.in/src-d/go-git.v4/plumbing/format/packfile/delta_test.go +++ b/vendor/gopkg.in/src-d/go-git.v4/plumbing/format/packfile/delta_test.go @@ -62,7 +62,7 @@ func (s *DeltaSuite) SetUpSuite(c *C) { target: []piece{{"1", 30}, {"2", 20}, {"7", 40}, {"4", 400}, {"5", 10}}, }, { - description: "A copy operation bigger tan 64kb", + description: "A copy operation bigger than 64kb", base: []piece{{bigRandStr, 1}, {"1", 200}}, target: []piece{{bigRandStr, 1}}, }} @@ -72,12 +72,16 @@ var bigRandStr = randStringBytes(100 * 1024) const letterBytes = "abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ" -func randStringBytes(n int) string { +func randBytes(n int) []byte { b := make([]byte, n) for i := range b { b[i] = letterBytes[rand.Intn(len(letterBytes))] } - return string(b) + return b +} + +func randStringBytes(n int) string { + return string(randBytes(n)) } func (s *DeltaSuite) TestAddDelta(c *C) { @@ -110,3 +114,14 @@ func (s *DeltaSuite) TestIncompleteDelta(c *C) { c.Assert(err, NotNil) c.Assert(result, IsNil) } + +func (s *DeltaSuite) TestMaxCopySizeDelta(c *C) { + baseBuf := randBytes(maxCopySize) + targetBuf := baseBuf[0:] + targetBuf = append(targetBuf, byte(1)) + + delta := DiffDelta(baseBuf, targetBuf) + result, err := PatchDelta(baseBuf, delta) + c.Assert(err, IsNil) + c.Assert(result, DeepEquals, targetBuf) +} diff --git a/vendor/gopkg.in/src-d/go-git.v4/plumbing/format/packfile/diff_delta.go b/vendor/gopkg.in/src-d/go-git.v4/plumbing/format/packfile/diff_delta.go index 4d56dc103..d35e78aea 100644 --- a/vendor/gopkg.in/src-d/go-git.v4/plumbing/format/packfile/diff_delta.go +++ b/vendor/gopkg.in/src-d/go-git.v4/plumbing/format/packfile/diff_delta.go @@ -111,7 +111,7 @@ func diffDelta(index *deltaIndex, src []byte, tgt []byte) []byte { rl := l aOffset := offset - for { + for rl > 0 { if rl < maxCopySize { buf.Write(encodeCopyOperation(aOffset, rl)) break diff --git a/vendor/gopkg.in/src-d/go-git.v4/plumbing/format/packfile/encoder_advanced_test.go b/vendor/gopkg.in/src-d/go-git.v4/plumbing/format/packfile/encoder_advanced_test.go index 107587526..8cc7180da 100644 --- a/vendor/gopkg.in/src-d/go-git.v4/plumbing/format/packfile/encoder_advanced_test.go +++ b/vendor/gopkg.in/src-d/go-git.v4/plumbing/format/packfile/encoder_advanced_test.go @@ -3,6 +3,7 @@ package packfile_test import ( "bytes" "math/rand" + "testing" "gopkg.in/src-d/go-git.v4/plumbing" . "gopkg.in/src-d/go-git.v4/plumbing/format/packfile" @@ -21,6 +22,10 @@ type EncoderAdvancedSuite struct { var _ = Suite(&EncoderAdvancedSuite{}) func (s *EncoderAdvancedSuite) TestEncodeDecode(c *C) { + if testing.Short() { + c.Skip("skipping test in short mode.") + } + fixs := fixtures.Basic().ByTag("packfile").ByTag(".git") fixs = append(fixs, fixtures.ByURL("https://github.com/src-d/go-git.git"). ByTag("packfile").ByTag(".git").One()) @@ -33,6 +38,10 @@ func (s *EncoderAdvancedSuite) TestEncodeDecode(c *C) { } func (s *EncoderAdvancedSuite) TestEncodeDecodeNoDeltaCompression(c *C) { + if testing.Short() { + c.Skip("skipping test in short mode.") + } + fixs := fixtures.Basic().ByTag("packfile").ByTag(".git") fixs = append(fixs, fixtures.ByURL("https://github.com/src-d/go-git.git"). ByTag("packfile").ByTag(".git").One()) diff --git a/vendor/gopkg.in/src-d/go-git.v4/plumbing/format/packfile/index.go b/vendor/gopkg.in/src-d/go-git.v4/plumbing/format/packfile/index.go index 2c5f98f8f..7d8f2ad10 100644 --- a/vendor/gopkg.in/src-d/go-git.v4/plumbing/format/packfile/index.go +++ b/vendor/gopkg.in/src-d/go-git.v4/plumbing/format/packfile/index.go @@ -1,6 +1,8 @@ package packfile import ( + "sort" + "gopkg.in/src-d/go-git.v4/plumbing" "gopkg.in/src-d/go-git.v4/plumbing/format/idxfile" ) @@ -10,7 +12,7 @@ import ( // or to store them. type Index struct { byHash map[plumbing.Hash]*idxfile.Entry - byOffset map[uint64]*idxfile.Entry + byOffset []*idxfile.Entry // sorted by their offset } // NewIndex creates a new empty index with the given size. Size is a hint and @@ -19,7 +21,7 @@ type Index struct { func NewIndex(size int) *Index { return &Index{ byHash: make(map[plumbing.Hash]*idxfile.Entry, size), - byOffset: make(map[uint64]*idxfile.Entry, size), + byOffset: make([]*idxfile.Entry, 0, size), } } @@ -27,28 +29,54 @@ func NewIndex(size int) *Index { func NewIndexFromIdxFile(idxf *idxfile.Idxfile) *Index { idx := &Index{ byHash: make(map[plumbing.Hash]*idxfile.Entry, idxf.ObjectCount), - byOffset: make(map[uint64]*idxfile.Entry, idxf.ObjectCount), + byOffset: make([]*idxfile.Entry, 0, idxf.ObjectCount), } for _, e := range idxf.Entries { - idx.add(e) + idx.addUnsorted(e) } + sort.Sort(orderByOffset(idx.byOffset)) return idx } +// orderByOffset is a sort.Interface adapter that arranges +// a slice of entries by their offset. +type orderByOffset []*idxfile.Entry + +func (o orderByOffset) Len() int { return len(o) } +func (o orderByOffset) Less(i, j int) bool { return o[i].Offset < o[j].Offset } +func (o orderByOffset) Swap(i, j int) { o[i], o[j] = o[j], o[i] } + // Add adds a new Entry with the given values to the index. func (idx *Index) Add(h plumbing.Hash, offset uint64, crc32 uint32) { - e := idxfile.Entry{ + e := &idxfile.Entry{ Hash: h, Offset: offset, CRC32: crc32, } - idx.add(&e) + idx.byHash[e.Hash] = e + + // Find the right position in byOffset. + // Look for the first position whose offset is *greater* than e.Offset. + i := sort.Search(len(idx.byOffset), func(i int) bool { + return idx.byOffset[i].Offset > offset + }) + if i == len(idx.byOffset) { + // Simple case: add it to the end. + idx.byOffset = append(idx.byOffset, e) + return + } + // Harder case: shift existing entries down by one to make room. + // Append a nil entry first so we can use existing capacity in case + // the index was carefully preallocated. + idx.byOffset = append(idx.byOffset, nil) + copy(idx.byOffset[i+1:], idx.byOffset[i:len(idx.byOffset)-1]) + idx.byOffset[i] = e } -func (idx *Index) add(e *idxfile.Entry) { +func (idx *Index) addUnsorted(e *idxfile.Entry) { idx.byHash[e.Hash] = e - idx.byOffset[e.Offset] = e + idx.byOffset = append(idx.byOffset, e) } // LookupHash looks an entry up by its hash. An idxfile.Entry is returned and @@ -61,8 +89,13 @@ func (idx *Index) LookupHash(h plumbing.Hash) (*idxfile.Entry, bool) { // LookupHash looks an entry up by its offset in the packfile. An idxfile.Entry // is returned and a bool, which is true if it was found or false if it wasn't. func (idx *Index) LookupOffset(offset uint64) (*idxfile.Entry, bool) { - e, ok := idx.byOffset[offset] - return e, ok + i := sort.Search(len(idx.byOffset), func(i int) bool { + return idx.byOffset[i].Offset >= offset + }) + if i >= len(idx.byOffset) || idx.byOffset[i].Offset != offset { + return nil, false // not present + } + return idx.byOffset[i], true } // Size returns the number of entries in the index. diff --git a/vendor/gopkg.in/src-d/go-git.v4/plumbing/format/packfile/index_test.go b/vendor/gopkg.in/src-d/go-git.v4/plumbing/format/packfile/index_test.go index 67147046f..8de886dac 100644 --- a/vendor/gopkg.in/src-d/go-git.v4/plumbing/format/packfile/index_test.go +++ b/vendor/gopkg.in/src-d/go-git.v4/plumbing/format/packfile/index_test.go @@ -3,6 +3,7 @@ package packfile import ( "strconv" "strings" + "testing" "gopkg.in/src-d/go-git.v4/plumbing" @@ -26,12 +27,12 @@ func (s *IndexSuite) TestLookupOffset(c *C) { e, ok := idx.LookupOffset(uint64(o2)) c.Assert(ok, Equals, true) c.Assert(e, NotNil) - c.Assert(e.Hash, Equals, s.toHash(o2)) + c.Assert(e.Hash, Equals, toHash(o2)) c.Assert(e.Offset, Equals, uint64(o2)) } } - h1 := s.toHash(o1) + h1 := toHash(o1) idx.Add(h1, uint64(o1), 0) for o2 := 0; o2 < 10000; o2 += 100 { @@ -43,7 +44,7 @@ func (s *IndexSuite) TestLookupOffset(c *C) { e, ok := idx.LookupOffset(uint64(o2)) c.Assert(ok, Equals, true) c.Assert(e, NotNil) - c.Assert(e.Hash, Equals, s.toHash(o2)) + c.Assert(e.Hash, Equals, toHash(o2)) c.Assert(e.Offset, Equals, uint64(o2)) } } @@ -56,31 +57,31 @@ func (s *IndexSuite) TestLookupHash(c *C) { for o1 := 0; o1 < 10000; o1 += 100 { for o2 := 0; o2 < 10000; o2 += 100 { if o2 >= o1 { - e, ok := idx.LookupHash(s.toHash(o2)) + e, ok := idx.LookupHash(toHash(o2)) c.Assert(ok, Equals, false) c.Assert(e, IsNil) } else { - e, ok := idx.LookupHash(s.toHash(o2)) + e, ok := idx.LookupHash(toHash(o2)) c.Assert(ok, Equals, true) c.Assert(e, NotNil) - c.Assert(e.Hash, Equals, s.toHash(o2)) + c.Assert(e.Hash, Equals, toHash(o2)) c.Assert(e.Offset, Equals, uint64(o2)) } } - h1 := s.toHash(o1) + h1 := toHash(o1) idx.Add(h1, uint64(o1), 0) for o2 := 0; o2 < 10000; o2 += 100 { if o2 > o1 { - e, ok := idx.LookupHash(s.toHash(o2)) + e, ok := idx.LookupHash(toHash(o2)) c.Assert(ok, Equals, false) c.Assert(e, IsNil) } else { - e, ok := idx.LookupHash(s.toHash(o2)) + e, ok := idx.LookupHash(toHash(o2)) c.Assert(ok, Equals, true) c.Assert(e, NotNil) - c.Assert(e.Hash, Equals, s.toHash(o2)) + c.Assert(e.Hash, Equals, toHash(o2)) c.Assert(e.Offset, Equals, uint64(o2)) } } @@ -92,7 +93,7 @@ func (s *IndexSuite) TestSize(c *C) { for o1 := 0; o1 < 1000; o1++ { c.Assert(idx.Size(), Equals, o1) - h1 := s.toHash(o1) + h1 := toHash(o1) idx.Add(h1, uint64(o1), 0) } } @@ -107,7 +108,7 @@ func (s *IndexSuite) TestIdxFileEmpty(c *C) { func (s *IndexSuite) TestIdxFile(c *C) { idx := NewIndex(0) for o1 := 0; o1 < 1000; o1++ { - h1 := s.toHash(o1) + h1 := toHash(o1) idx.Add(h1, uint64(o1), 0) } @@ -115,8 +116,18 @@ func (s *IndexSuite) TestIdxFile(c *C) { c.Assert(idx, DeepEquals, idx2) } -func (s *IndexSuite) toHash(i int) plumbing.Hash { +func toHash(i int) plumbing.Hash { is := strconv.Itoa(i) padding := strings.Repeat("a", 40-len(is)) return plumbing.NewHash(padding + is) } + +func BenchmarkIndexConstruction(b *testing.B) { + b.ReportAllocs() + + idx := NewIndex(0) + for o := 0; o < 1e6*b.N; o += 100 { + h1 := toHash(o) + idx.Add(h1, uint64(o), 0) + } +} diff --git a/vendor/gopkg.in/src-d/go-git.v4/plumbing/format/packfile/scanner.go b/vendor/gopkg.in/src-d/go-git.v4/plumbing/format/packfile/scanner.go index 8c216f11b..6fc183b94 100644 --- a/vendor/gopkg.in/src-d/go-git.v4/plumbing/format/packfile/scanner.go +++ b/vendor/gopkg.in/src-d/go-git.v4/plumbing/format/packfile/scanner.go @@ -279,14 +279,15 @@ func (s *Scanner) NextObject(w io.Writer) (written int64, crc32 uint32, err erro // from it zlib stream in an object entry in the packfile. func (s *Scanner) copyObject(w io.Writer) (n int64, err error) { if s.zr == nil { - zr, err := zlib.NewReader(s.r) + var zr io.ReadCloser + zr, err = zlib.NewReader(s.r) if err != nil { return 0, fmt.Errorf("zlib initialization error: %s", err) } s.zr = zr.(readerResetter) } else { - if err := s.zr.Reset(s.r, nil); err != nil { + if err = s.zr.Reset(s.r, nil); err != nil { return 0, fmt.Errorf("zlib reset error: %s", err) } } diff --git a/vendor/gopkg.in/src-d/go-git.v4/plumbing/format/pktline/encoder.go b/vendor/gopkg.in/src-d/go-git.v4/plumbing/format/pktline/encoder.go index eae85cc4a..6d409795b 100644 --- a/vendor/gopkg.in/src-d/go-git.v4/plumbing/format/pktline/encoder.go +++ b/vendor/gopkg.in/src-d/go-git.v4/plumbing/format/pktline/encoder.go @@ -17,6 +17,9 @@ type Encoder struct { const ( // MaxPayloadSize is the maximum payload size of a pkt-line in bytes. MaxPayloadSize = 65516 + + // For compatibility with canonical Git implementation, accept longer pkt-lines + OversizePayloadMax = 65520 ) var ( diff --git a/vendor/gopkg.in/src-d/go-git.v4/plumbing/format/pktline/scanner.go b/vendor/gopkg.in/src-d/go-git.v4/plumbing/format/pktline/scanner.go index 4af254f00..99aab46e8 100644 --- a/vendor/gopkg.in/src-d/go-git.v4/plumbing/format/pktline/scanner.go +++ b/vendor/gopkg.in/src-d/go-git.v4/plumbing/format/pktline/scanner.go @@ -97,7 +97,7 @@ func (s *Scanner) readPayloadLen() (int, error) { return 0, nil case n <= lenSize: return 0, ErrInvalidPktLen - case n > MaxPayloadSize+lenSize: + case n > OversizePayloadMax+lenSize: return 0, ErrInvalidPktLen default: return n - lenSize, nil diff --git a/vendor/gopkg.in/src-d/go-git.v4/plumbing/format/pktline/scanner_test.go b/vendor/gopkg.in/src-d/go-git.v4/plumbing/format/pktline/scanner_test.go index 048ea3829..9660c2d48 100644 --- a/vendor/gopkg.in/src-d/go-git.v4/plumbing/format/pktline/scanner_test.go +++ b/vendor/gopkg.in/src-d/go-git.v4/plumbing/format/pktline/scanner_test.go @@ -20,7 +20,7 @@ func (s *SuiteScanner) TestInvalid(c *C) { for _, test := range [...]string{ "0001", "0002", "0003", "0004", "0001asdfsadf", "0004foo", - "fff1", "fff2", + "fff5", "ffff", "gorka", "0", "003", " 5a", "5 a", "5 \n", @@ -34,6 +34,20 @@ func (s *SuiteScanner) TestInvalid(c *C) { } } +func (s *SuiteScanner) TestDecodeOversizePktLines(c *C) { + for _, test := range [...]string{ + "fff1" + strings.Repeat("a", 0xfff1), + "fff2" + strings.Repeat("a", 0xfff2), + "fff3" + strings.Repeat("a", 0xfff3), + "fff4" + strings.Repeat("a", 0xfff4), + } { + r := strings.NewReader(test) + sc := pktline.NewScanner(r) + _ = sc.Scan() + c.Assert(sc.Err(), IsNil) + } +} + func (s *SuiteScanner) TestEmptyReader(c *C) { r := strings.NewReader("") sc := pktline.NewScanner(r) diff --git a/vendor/gopkg.in/src-d/go-git.v4/plumbing/object/blob.go b/vendor/gopkg.in/src-d/go-git.v4/plumbing/object/blob.go index 2608477a8..f376baa65 100644 --- a/vendor/gopkg.in/src-d/go-git.v4/plumbing/object/blob.go +++ b/vendor/gopkg.in/src-d/go-git.v4/plumbing/object/blob.go @@ -67,7 +67,7 @@ func (b *Blob) Decode(o plumbing.EncodedObject) error { } // Encode transforms a Blob into a plumbing.EncodedObject. -func (b *Blob) Encode(o plumbing.EncodedObject) error { +func (b *Blob) Encode(o plumbing.EncodedObject) (err error) { o.SetType(plumbing.BlobObject) w, err := o.Writer() diff --git a/vendor/gopkg.in/src-d/go-git.v4/plumbing/object/commit.go b/vendor/gopkg.in/src-d/go-git.v4/plumbing/object/commit.go index a3177144e..c9a4c0ee8 100644 --- a/vendor/gopkg.in/src-d/go-git.v4/plumbing/object/commit.go +++ b/vendor/gopkg.in/src-d/go-git.v4/plumbing/object/commit.go @@ -226,7 +226,7 @@ func (b *Commit) Encode(o plumbing.EncodedObject) error { return b.encode(o, true) } -func (b *Commit) encode(o plumbing.EncodedObject, includeSig bool) error { +func (b *Commit) encode(o plumbing.EncodedObject, includeSig bool) (err error) { o.SetType(plumbing.CommitObject) w, err := o.Writer() if err != nil { diff --git a/vendor/gopkg.in/src-d/go-git.v4/plumbing/object/file.go b/vendor/gopkg.in/src-d/go-git.v4/plumbing/object/file.go index 40b5206b5..1c5fdbb38 100644 --- a/vendor/gopkg.in/src-d/go-git.v4/plumbing/object/file.go +++ b/vendor/gopkg.in/src-d/go-git.v4/plumbing/object/file.go @@ -44,7 +44,7 @@ func (f *File) Contents() (content string, err error) { } // IsBinary returns if the file is binary or not -func (f *File) IsBinary() (bool, error) { +func (f *File) IsBinary() (bin bool, err error) { reader, err := f.Reader() if err != nil { return false, err diff --git a/vendor/gopkg.in/src-d/go-git.v4/plumbing/object/tag.go b/vendor/gopkg.in/src-d/go-git.v4/plumbing/object/tag.go index 19e55cfe7..905206bda 100644 --- a/vendor/gopkg.in/src-d/go-git.v4/plumbing/object/tag.go +++ b/vendor/gopkg.in/src-d/go-git.v4/plumbing/object/tag.go @@ -95,7 +95,8 @@ func (t *Tag) Decode(o plumbing.EncodedObject) (err error) { r := bufio.NewReader(reader) for { - line, err := r.ReadBytes('\n') + var line []byte + line, err = r.ReadBytes('\n') if err != nil && err != io.EOF { return err } @@ -168,7 +169,7 @@ func (t *Tag) Encode(o plumbing.EncodedObject) error { return t.encode(o, true) } -func (t *Tag) encode(o plumbing.EncodedObject, includeSig bool) error { +func (t *Tag) encode(o plumbing.EncodedObject, includeSig bool) (err error) { o.SetType(plumbing.TagObject) w, err := o.Writer() if err != nil { diff --git a/vendor/gopkg.in/src-d/go-git.v4/plumbing/object/tree.go b/vendor/gopkg.in/src-d/go-git.v4/plumbing/object/tree.go index 2fcd979f5..30bbcb038 100644 --- a/vendor/gopkg.in/src-d/go-git.v4/plumbing/object/tree.go +++ b/vendor/gopkg.in/src-d/go-git.v4/plumbing/object/tree.go @@ -6,6 +6,7 @@ import ( "fmt" "io" "path" + "path/filepath" "strings" "gopkg.in/src-d/go-git.v4/plumbing" @@ -34,6 +35,7 @@ type Tree struct { s storer.EncodedObjectStorer m map[string]*TreeEntry + t map[string]*Tree // tree path cache } // GetTree gets a tree from an object storer and decodes it. @@ -111,14 +113,37 @@ func (t *Tree) TreeEntryFile(e *TreeEntry) (*File, error) { // FindEntry search a TreeEntry in this tree or any subtree. func (t *Tree) FindEntry(path string) (*TreeEntry, error) { + if t.t == nil { + t.t = make(map[string]*Tree) + } + pathParts := strings.Split(path, "/") + startingTree := t + pathCurrent := "" + + // search for the longest path in the tree path cache + for i := len(pathParts); i > 1; i-- { + path := filepath.Join(pathParts[:i]...) + + tree, ok := t.t[path] + if ok { + startingTree = tree + pathParts = pathParts[i:] + pathCurrent = path + + break + } + } var tree *Tree var err error - for tree = t; len(pathParts) > 1; pathParts = pathParts[1:] { + for tree = startingTree; len(pathParts) > 1; pathParts = pathParts[1:] { if tree, err = tree.dir(pathParts[0]); err != nil { return nil, err } + + pathCurrent = filepath.Join(pathCurrent, pathParts[0]) + t.t[pathCurrent] = tree } return tree.entry(pathParts[0]) @@ -233,7 +258,7 @@ func (t *Tree) Decode(o plumbing.EncodedObject) (err error) { } // Encode transforms a Tree into a plumbing.EncodedObject. -func (t *Tree) Encode(o plumbing.EncodedObject) error { +func (t *Tree) Encode(o plumbing.EncodedObject) (err error) { o.SetType(plumbing.TreeObject) w, err := o.Writer() if err != nil { @@ -242,7 +267,7 @@ func (t *Tree) Encode(o plumbing.EncodedObject) error { defer ioutil.CheckClose(w, &err) for _, entry := range t.Entries { - if _, err := fmt.Fprintf(w, "%o %s", entry.Mode, entry.Name); err != nil { + if _, err = fmt.Fprintf(w, "%o %s", entry.Mode, entry.Name); err != nil { return err } diff --git a/vendor/gopkg.in/src-d/go-git.v4/plumbing/protocol/packp/advrefs.go b/vendor/gopkg.in/src-d/go-git.v4/plumbing/protocol/packp/advrefs.go index 7d644bcb0..684e76a56 100644 --- a/vendor/gopkg.in/src-d/go-git.v4/plumbing/protocol/packp/advrefs.go +++ b/vendor/gopkg.in/src-d/go-git.v4/plumbing/protocol/packp/advrefs.go @@ -2,6 +2,7 @@ package packp import ( "fmt" + "sort" "strings" "gopkg.in/src-d/go-git.v4/plumbing" @@ -68,30 +69,119 @@ func (a *AdvRefs) AddReference(r *plumbing.Reference) error { func (a *AdvRefs) AllReferences() (memory.ReferenceStorage, error) { s := memory.ReferenceStorage{} - if err := addRefs(s, a); err != nil { + if err := a.addRefs(s); err != nil { return s, plumbing.NewUnexpectedError(err) } return s, nil } -func addRefs(s storer.ReferenceStorer, ar *AdvRefs) error { - for name, hash := range ar.References { +func (a *AdvRefs) addRefs(s storer.ReferenceStorer) error { + for name, hash := range a.References { ref := plumbing.NewReferenceFromStrings(name, hash.String()) if err := s.SetReference(ref); err != nil { return err } } - return addSymbolicRefs(s, ar) + if a.supportSymrefs() { + return a.addSymbolicRefs(s) + } + + return a.resolveHead(s) } -func addSymbolicRefs(s storer.ReferenceStorer, ar *AdvRefs) error { - if !hasSymrefs(ar) { +// If the server does not support symrefs capability, +// we need to guess the reference where HEAD is pointing to. +// +// Git versions prior to 1.8.4.3 has an special procedure to get +// the reference where is pointing to HEAD: +// - Check if a reference called master exists. If exists and it +// has the same hash as HEAD hash, we can say that HEAD is pointing to master +// - If master does not exists or does not have the same hash as HEAD, +// order references and check in that order if that reference has the same +// hash than HEAD. If yes, set HEAD pointing to that branch hash +// - If no reference is found, throw an error +func (a *AdvRefs) resolveHead(s storer.ReferenceStorer) error { + if a.Head == nil { + return nil + } + + ref, err := s.Reference(plumbing.ReferenceName(plumbing.Master)) + + // check first if HEAD is pointing to master + if err == nil { + ok, err := a.createHeadIfCorrectReference(ref, s) + if err != nil { + return err + } + + if ok { + return nil + } + } + + if err != nil && err != plumbing.ErrReferenceNotFound { + return err + } + + // From here we are trying to guess the branch that HEAD is pointing + refIter, err := s.IterReferences() + if err != nil { + return err + } + + var refNames []string + err = refIter.ForEach(func(r *plumbing.Reference) error { + refNames = append(refNames, string(r.Name())) return nil + }) + if err != nil { + return err + } + + sort.Strings(refNames) + + var headSet bool + for _, refName := range refNames { + ref, err := s.Reference(plumbing.ReferenceName(refName)) + if err != nil { + return err + } + ok, err := a.createHeadIfCorrectReference(ref, s) + if err != nil { + return err + } + if ok { + headSet = true + break + } + } + + if !headSet { + return plumbing.ErrReferenceNotFound } - for _, symref := range ar.Capabilities.Get(capability.SymRef) { + return nil +} + +func (a *AdvRefs) createHeadIfCorrectReference( + reference *plumbing.Reference, + s storer.ReferenceStorer) (bool, error) { + if reference.Hash() == *a.Head { + headRef := plumbing.NewSymbolicReference(plumbing.HEAD, reference.Name()) + if err := s.SetReference(headRef); err != nil { + return false, err + } + + return true, nil + } + + return false, nil +} + +func (a *AdvRefs) addSymbolicRefs(s storer.ReferenceStorer) error { + for _, symref := range a.Capabilities.Get(capability.SymRef) { chunks := strings.Split(symref, ":") if len(chunks) != 2 { err := fmt.Errorf("bad number of `:` in symref value (%q)", symref) @@ -108,6 +198,6 @@ func addSymbolicRefs(s storer.ReferenceStorer, ar *AdvRefs) error { return nil } -func hasSymrefs(ar *AdvRefs) bool { - return ar.Capabilities.Supports(capability.SymRef) +func (a *AdvRefs) supportSymrefs() bool { + return a.Capabilities.Supports(capability.SymRef) } diff --git a/vendor/gopkg.in/src-d/go-git.v4/plumbing/protocol/packp/advrefs_test.go b/vendor/gopkg.in/src-d/go-git.v4/plumbing/protocol/packp/advrefs_test.go index 0180fd3f1..bb8d0321a 100644 --- a/vendor/gopkg.in/src-d/go-git.v4/plumbing/protocol/packp/advrefs_test.go +++ b/vendor/gopkg.in/src-d/go-git.v4/plumbing/protocol/packp/advrefs_test.go @@ -79,6 +79,79 @@ func (s *AdvRefSuite) TestAllReferencesBadSymref(c *C) { c.Assert(err, NotNil) } +func (s *AdvRefSuite) TestNoSymRefCapabilityHeadToMaster(c *C) { + a := NewAdvRefs() + headHash := plumbing.NewHash("5dc01c595e6c6ec9ccda4f6f69c131c0dd945f8c") + a.Head = &headHash + ref := plumbing.NewHashReference(plumbing.Master, plumbing.NewHash("5dc01c595e6c6ec9ccda4f6f69c131c0dd945f8c")) + + err := a.AddReference(ref) + c.Assert(err, IsNil) + + storage, err := a.AllReferences() + c.Assert(err, IsNil) + + head, err := storage.Reference(plumbing.HEAD) + c.Assert(err, IsNil) + c.Assert(head.Target(), Equals, ref.Name()) +} + +func (s *AdvRefSuite) TestNoSymRefCapabilityHeadToOtherThanMaster(c *C) { + a := NewAdvRefs() + headHash := plumbing.NewHash("0000000000000000000000000000000000000000") + a.Head = &headHash + ref1 := plumbing.NewHashReference(plumbing.Master, plumbing.NewHash("5dc01c595e6c6ec9ccda4f6f69c131c0dd945f8c")) + ref2 := plumbing.NewHashReference("other/ref", plumbing.NewHash("0000000000000000000000000000000000000000")) + + err := a.AddReference(ref1) + c.Assert(err, IsNil) + err = a.AddReference(ref2) + c.Assert(err, IsNil) + + storage, err := a.AllReferences() + c.Assert(err, IsNil) + + head, err := storage.Reference(plumbing.HEAD) + c.Assert(err, IsNil) + c.Assert(head.Hash(), Equals, ref2.Hash()) +} + +func (s *AdvRefSuite) TestNoSymRefCapabilityHeadToNoRef(c *C) { + a := NewAdvRefs() + headHash := plumbing.NewHash("0000000000000000000000000000000000000000") + a.Head = &headHash + ref := plumbing.NewHashReference(plumbing.Master, plumbing.NewHash("5dc01c595e6c6ec9ccda4f6f69c131c0dd945f8c")) + + err := a.AddReference(ref) + c.Assert(err, IsNil) + + _, err = a.AllReferences() + c.Assert(err, NotNil) +} + +func (s *AdvRefSuite) TestNoSymRefCapabilityHeadToNoMasterAlphabeticallyOrdered(c *C) { + a := NewAdvRefs() + headHash := plumbing.NewHash("5dc01c595e6c6ec9ccda4f6f69c131c0dd945f8c") + a.Head = &headHash + ref1 := plumbing.NewHashReference(plumbing.Master, plumbing.NewHash("0000000000000000000000000000000000000000")) + ref2 := plumbing.NewHashReference("aaaaaaaaaaaaaaa", plumbing.NewHash("5dc01c595e6c6ec9ccda4f6f69c131c0dd945f8c")) + ref3 := plumbing.NewHashReference("bbbbbbbbbbbbbbb", plumbing.NewHash("5dc01c595e6c6ec9ccda4f6f69c131c0dd945f8c")) + + err := a.AddReference(ref1) + c.Assert(err, IsNil) + err = a.AddReference(ref3) + c.Assert(err, IsNil) + err = a.AddReference(ref2) + c.Assert(err, IsNil) + + storage, err := a.AllReferences() + c.Assert(err, IsNil) + + head, err := storage.Reference(plumbing.HEAD) + c.Assert(err, IsNil) + c.Assert(head.Target(), Equals, ref2.Name()) +} + type AdvRefsDecodeEncodeSuite struct{} var _ = Suite(&AdvRefsDecodeEncodeSuite{}) diff --git a/vendor/gopkg.in/src-d/go-git.v4/plumbing/transport/http/common.go b/vendor/gopkg.in/src-d/go-git.v4/plumbing/transport/http/common.go index 24e63a4d4..c03484646 100644 --- a/vendor/gopkg.in/src-d/go-git.v4/plumbing/transport/http/common.go +++ b/vendor/gopkg.in/src-d/go-git.v4/plumbing/transport/http/common.go @@ -31,7 +31,7 @@ func applyHeadersToRequest(req *http.Request, content *bytes.Buffer, host string const infoRefsPath = "/info/refs" -func advertisedReferences(s *session, serviceName string) (*packp.AdvRefs, error) { +func advertisedReferences(s *session, serviceName string) (ref *packp.AdvRefs, err error) { url := fmt.Sprintf( "%s%s?service=%s", s.endpoint.String(), infoRefsPath, serviceName, @@ -52,12 +52,12 @@ func advertisedReferences(s *session, serviceName string) (*packp.AdvRefs, error s.ModifyEndpointIfRedirect(res) defer ioutil.CheckClose(res.Body, &err) - if err := NewErr(res); err != nil { + if err = NewErr(res); err != nil { return nil, err } ar := packp.NewAdvRefs() - if err := ar.Decode(res.Body); err != nil { + if err = ar.Decode(res.Body); err != nil { if err == packp.ErrEmptyAdvRefs { err = transport.ErrEmptyRemoteRepository } @@ -201,6 +201,31 @@ func (a *BasicAuth) String() string { return fmt.Sprintf("%s - %s:%s", a.Name(), a.Username, masked) } +// TokenAuth implements the go-git http.AuthMethod and transport.AuthMethod interfaces +type TokenAuth struct { + Token string +} + +func (a *TokenAuth) setAuth(r *http.Request) { + if a == nil { + return + } + r.Header.Add("Authorization", fmt.Sprintf("Bearer %s", a.Token)) +} + +// Name is name of the auth +func (a *TokenAuth) Name() string { + return "http-token-auth" +} + +func (a *TokenAuth) String() string { + masked := "*******" + if a.Token == "" { + masked = "" + } + return fmt.Sprintf("%s - %s", a.Name(), masked) +} + // Err is a dedicated error to return errors based on status code type Err struct { Response *http.Response diff --git a/vendor/gopkg.in/src-d/go-git.v4/plumbing/transport/http/common_test.go b/vendor/gopkg.in/src-d/go-git.v4/plumbing/transport/http/common_test.go index 8d57996c5..71eede48f 100644 --- a/vendor/gopkg.in/src-d/go-git.v4/plumbing/transport/http/common_test.go +++ b/vendor/gopkg.in/src-d/go-git.v4/plumbing/transport/http/common_test.go @@ -54,6 +54,19 @@ func (s *ClientSuite) TestNewBasicAuth(c *C) { c.Assert(a.String(), Equals, "http-basic-auth - foo:*******") } +func (s *ClientSuite) TestNewTokenAuth(c *C) { + a := &TokenAuth{"OAUTH-TOKEN-TEXT"} + + c.Assert(a.Name(), Equals, "http-token-auth") + c.Assert(a.String(), Equals, "http-token-auth - *******") + + // Check header is set correctly + req, err := http.NewRequest("GET", "https://github.com/git-fixtures/basic", nil) + c.Assert(err, Equals, nil) + a.setAuth(req) + c.Assert(req.Header.Get("Authorization"), Equals, "Bearer OAUTH-TOKEN-TEXT") +} + func (s *ClientSuite) TestNewErrOK(c *C) { res := &http.Response{StatusCode: http.StatusOK} err := NewErr(res) diff --git a/vendor/gopkg.in/src-d/go-git.v4/plumbing/transport/test/receive_pack.go b/vendor/gopkg.in/src-d/go-git.v4/plumbing/transport/test/receive_pack.go index a68329e77..57f602dd8 100644 --- a/vendor/gopkg.in/src-d/go-git.v4/plumbing/transport/test/receive_pack.go +++ b/vendor/gopkg.in/src-d/go-git.v4/plumbing/transport/test/receive_pack.go @@ -8,6 +8,8 @@ import ( "context" "io" "io/ioutil" + "os" + "path/filepath" "gopkg.in/src-d/go-git.v4/plumbing" "gopkg.in/src-d/go-git.v4/plumbing/format/packfile" @@ -225,6 +227,24 @@ func (s *ReceivePackSuite) receivePackNoCheck(c *C, ep *transport.Endpoint, ep.String(), url, callAdvertisedReferences, ) + // Set write permissions to endpoint directory files. By default + // fixtures are generated with read only permissions, this casuses + // errors deleting or modifying files. + rootPath := ep.Path + stat, err := os.Stat(ep.Path) + + if rootPath != "" && err == nil && stat.IsDir() { + objectPath := filepath.Join(rootPath, "objects/pack") + files, err := ioutil.ReadDir(objectPath) + c.Assert(err, IsNil) + + for _, file := range files { + path := filepath.Join(objectPath, file.Name()) + err = os.Chmod(path, 0644) + c.Assert(err, IsNil) + } + } + r, err := s.Client.NewReceivePackSession(ep, s.EmptyAuth) c.Assert(err, IsNil, comment) defer func() { c.Assert(r.Close(), IsNil, comment) }() diff --git a/vendor/gopkg.in/src-d/go-git.v4/remote.go b/vendor/gopkg.in/src-d/go-git.v4/remote.go index 8db645c83..60461d611 100644 --- a/vendor/gopkg.in/src-d/go-git.v4/remote.go +++ b/vendor/gopkg.in/src-d/go-git.v4/remote.go @@ -73,7 +73,7 @@ func (r *Remote) Push(o *PushOptions) error { // The provided Context must be non-nil. If the context expires before the // operation is complete, an error is returned. The context only affects to the // transport operations. -func (r *Remote) PushContext(ctx context.Context, o *PushOptions) error { +func (r *Remote) PushContext(ctx context.Context, o *PushOptions) (err error) { if err := o.Validate(); err != nil { return err } @@ -243,12 +243,12 @@ func (r *Remote) Fetch(o *FetchOptions) error { return r.FetchContext(context.Background(), o) } -func (r *Remote) fetch(ctx context.Context, o *FetchOptions) (storer.ReferenceStorer, error) { +func (r *Remote) fetch(ctx context.Context, o *FetchOptions) (sto storer.ReferenceStorer, err error) { if o.RemoteName == "" { o.RemoteName = r.c.Name } - if err := o.Validate(); err != nil { + if err = o.Validate(); err != nil { return nil, err } @@ -295,7 +295,7 @@ func (r *Remote) fetch(ctx context.Context, o *FetchOptions) (storer.ReferenceSt return nil, err } - if err := r.fetchPack(ctx, o, s, req); err != nil { + if err = r.fetchPack(ctx, o, s, req); err != nil { return nil, err } } @@ -354,7 +354,7 @@ func (r *Remote) fetchPack(ctx context.Context, o *FetchOptions, s transport.Upl defer ioutil.CheckClose(reader, &err) - if err := r.updateShallow(o, reader); err != nil { + if err = r.updateShallow(o, reader); err != nil { return err } @@ -371,14 +371,22 @@ func (r *Remote) addReferencesToUpdate( refspecs []config.RefSpec, localRefs []*plumbing.Reference, remoteRefs storer.ReferenceStorer, - req *packp.ReferenceUpdateRequest) error { + req *packp.ReferenceUpdateRequest, +) error { + // This references dictionary will be used to search references by name. + refsDict := make(map[string]*plumbing.Reference) + for _, ref := range localRefs { + refsDict[ref.Name().String()] = ref + } + for _, rs := range refspecs { if rs.IsDelete() { if err := r.deleteReferences(rs, remoteRefs, req); err != nil { return err } } else { - if err := r.addOrUpdateReferences(rs, localRefs, remoteRefs, req); err != nil { + err := r.addOrUpdateReferences(rs, localRefs, refsDict, remoteRefs, req) + if err != nil { return err } } @@ -390,9 +398,21 @@ func (r *Remote) addReferencesToUpdate( func (r *Remote) addOrUpdateReferences( rs config.RefSpec, localRefs []*plumbing.Reference, + refsDict map[string]*plumbing.Reference, remoteRefs storer.ReferenceStorer, req *packp.ReferenceUpdateRequest, ) error { + // If it is not a wilcard refspec we can directly search for the reference + // in the references dictionary. + if !rs.IsWildcard() { + ref, ok := refsDict[rs.Src()] + if !ok { + return nil + } + + return r.addReferenceIfRefSpecMatches(rs, remoteRefs, ref, req) + } + for _, ref := range localRefs { err := r.addReferenceIfRefSpecMatches(rs, remoteRefs, ref, req) if err != nil { @@ -872,7 +892,7 @@ func (r *Remote) buildFetchedTags(refs memory.ReferenceStorage) (updated bool, e } // List the references on the remote repository. -func (r *Remote) List(o *ListOptions) ([]*plumbing.Reference, error) { +func (r *Remote) List(o *ListOptions) (rfs []*plumbing.Reference, err error) { s, err := newUploadPackSession(r.c.URLs[0], o.Auth) if err != nil { return nil, err @@ -976,9 +996,24 @@ func pushHashes( } func (r *Remote) updateShallow(o *FetchOptions, resp *packp.UploadPackResponse) error { - if o.Depth == 0 { + if o.Depth == 0 || len(resp.Shallows) == 0 { return nil } - return r.s.SetShallow(resp.Shallows) + shallows, err := r.s.Shallow() + if err != nil { + return err + } + +outer: + for _, s := range resp.Shallows { + for _, oldS := range shallows { + if s == oldS { + continue outer + } + } + shallows = append(shallows, s) + } + + return r.s.SetShallow(shallows) } diff --git a/vendor/gopkg.in/src-d/go-git.v4/remote_test.go b/vendor/gopkg.in/src-d/go-git.v4/remote_test.go index e586e7a7d..82ec1fc29 100644 --- a/vendor/gopkg.in/src-d/go-git.v4/remote_test.go +++ b/vendor/gopkg.in/src-d/go-git.v4/remote_test.go @@ -9,6 +9,7 @@ import ( "gopkg.in/src-d/go-git.v4/config" "gopkg.in/src-d/go-git.v4/plumbing" + "gopkg.in/src-d/go-git.v4/plumbing/protocol/packp" "gopkg.in/src-d/go-git.v4/plumbing/storer" "gopkg.in/src-d/go-git.v4/storage" "gopkg.in/src-d/go-git.v4/storage/filesystem" @@ -741,3 +742,54 @@ func (s *RemoteSuite) TestList(c *C) { c.Assert(found, Equals, true) } } + +func (s *RemoteSuite) TestUpdateShallows(c *C) { + hashes := []plumbing.Hash{ + plumbing.NewHash("0000000000000000000000000000000000000001"), + plumbing.NewHash("0000000000000000000000000000000000000002"), + plumbing.NewHash("0000000000000000000000000000000000000003"), + plumbing.NewHash("0000000000000000000000000000000000000004"), + plumbing.NewHash("0000000000000000000000000000000000000005"), + plumbing.NewHash("0000000000000000000000000000000000000006"), + } + + tests := []struct { + hashes []plumbing.Hash + result []plumbing.Hash + }{ + // add to empty shallows + {hashes[0:2], hashes[0:2]}, + // add new hashes + {hashes[2:4], hashes[0:4]}, + // add some hashes already in shallow list + {hashes[2:6], hashes[0:6]}, + // add all hashes + {hashes[0:6], hashes[0:6]}, + // add empty list + {nil, hashes[0:6]}, + } + + remote := newRemote(memory.NewStorage(), &config.RemoteConfig{ + Name: DefaultRemoteName, + }) + + shallows, err := remote.s.Shallow() + c.Assert(err, IsNil) + c.Assert(len(shallows), Equals, 0) + + resp := new(packp.UploadPackResponse) + o := &FetchOptions{ + Depth: 1, + } + + for _, t := range tests { + resp.Shallows = t.hashes + err = remote.updateShallow(o, resp) + c.Assert(err, IsNil) + + shallow, err := remote.s.Shallow() + c.Assert(err, IsNil) + c.Assert(len(shallow), Equals, len(t.result)) + c.Assert(shallow, DeepEquals, t.result) + } +} diff --git a/vendor/gopkg.in/src-d/go-git.v4/repository.go b/vendor/gopkg.in/src-d/go-git.v4/repository.go index 98558d925..717381bdb 100644 --- a/vendor/gopkg.in/src-d/go-git.v4/repository.go +++ b/vendor/gopkg.in/src-d/go-git.v4/repository.go @@ -24,12 +24,19 @@ import ( "gopkg.in/src-d/go-billy.v4/osfs" ) +// GitDirName this is a special folder where all the git stuff is. +const GitDirName = ".git" + var ( + // ErrBranchExists an error stating the specified branch already exists + ErrBranchExists = errors.New("branch already exists") + // ErrBranchNotFound an error stating the specified branch does not exist + ErrBranchNotFound = errors.New("branch not found") ErrInvalidReference = errors.New("invalid reference, should be a tag or a branch") ErrRepositoryNotExists = errors.New("repository does not exist") ErrRepositoryAlreadyExists = errors.New("repository already exists") ErrRemoteNotFound = errors.New("remote not found") - ErrRemoteExists = errors.New("remote already exists ") + ErrRemoteExists = errors.New("remote already exists") ErrWorktreeNotProvided = errors.New("worktree should be provided") ErrIsBareRepository = errors.New("worktree not available in a bare repository") ErrUnableToResolveCommit = errors.New("unable to resolve commit") @@ -109,12 +116,12 @@ func createDotGitFile(worktree, storage billy.Filesystem) error { path = storage.Root() } - if path == ".git" { + if path == GitDirName { // not needed, since the folder is the default place return nil } - f, err := worktree.Create(".git") + f, err := worktree.Create(GitDirName) if err != nil { return err } @@ -210,7 +217,7 @@ func PlainInit(path string, isBare bool) (*Repository, error) { dot = osfs.New(path) } else { wt = osfs.New(path) - dot, _ = wt.Chroot(".git") + dot, _ = wt.Chroot(GitDirName) } s, err := filesystem.NewStorage(dot) @@ -225,7 +232,14 @@ func PlainInit(path string, isBare bool) (*Repository, error) { // repository is bare or a normal one. If the path doesn't contain a valid // repository ErrRepositoryNotExists is returned func PlainOpen(path string) (*Repository, error) { - dot, wt, err := dotGitToOSFilesystems(path) + return PlainOpenWithOptions(path, &PlainOpenOptions{}) +} + +// PlainOpen opens a git repository from the given path. It detects if the +// repository is bare or a normal one. If the path doesn't contain a valid +// repository ErrRepositoryNotExists is returned +func PlainOpenWithOptions(path string, o *PlainOpenOptions) (*Repository, error) { + dot, wt, err := dotGitToOSFilesystems(path, o.DetectDotGit) if err != nil { return nil, err } @@ -246,19 +260,38 @@ func PlainOpen(path string) (*Repository, error) { return Open(s, wt) } -func dotGitToOSFilesystems(path string) (dot, wt billy.Filesystem, err error) { - fs := osfs.New(path) - fi, err := fs.Stat(".git") - if err != nil { +func dotGitToOSFilesystems(path string, detect bool) (dot, wt billy.Filesystem, err error) { + if path, err = filepath.Abs(path); err != nil { + return nil, nil, err + } + var fs billy.Filesystem + var fi os.FileInfo + for { + fs = osfs.New(path) + fi, err = fs.Stat(GitDirName) + if err == nil { + // no error; stop + break + } if !os.IsNotExist(err) { + // unknown error; stop return nil, nil, err } - + if detect { + // try its parent as long as we haven't reached + // the root dir + if dir := filepath.Dir(path); dir != path { + path = dir + continue + } + } + // not detecting via parent dirs and the dir does not exist; + // stop return fs, nil, nil } if fi.IsDir() { - dot, err = fs.Chroot(".git") + dot, err = fs.Chroot(GitDirName) return dot, fs, err } @@ -270,10 +303,8 @@ func dotGitToOSFilesystems(path string) (dot, wt billy.Filesystem, err error) { return dot, fs, nil } -func dotGitFileToOSFilesystem(path string, fs billy.Filesystem) (billy.Filesystem, error) { - var err error - - f, err := fs.Open(".git") +func dotGitFileToOSFilesystem(path string, fs billy.Filesystem) (bfs billy.Filesystem, err error) { + f, err := fs.Open(GitDirName) if err != nil { return nil, err } @@ -404,6 +435,55 @@ func (r *Repository) DeleteRemote(name string) error { return r.Storer.SetConfig(cfg) } +// Branch return a Branch if exists +func (r *Repository) Branch(name string) (*config.Branch, error) { + cfg, err := r.Storer.Config() + if err != nil { + return nil, err + } + + b, ok := cfg.Branches[name] + if !ok { + return nil, ErrBranchNotFound + } + + return b, nil +} + +// CreateBranch creates a new Branch +func (r *Repository) CreateBranch(c *config.Branch) error { + if err := c.Validate(); err != nil { + return err + } + + cfg, err := r.Storer.Config() + if err != nil { + return err + } + + if _, ok := cfg.Branches[c.Name]; ok { + return ErrBranchExists + } + + cfg.Branches[c.Name] = c + return r.Storer.SetConfig(cfg) +} + +// DeleteBranch delete a Branch from the repository and delete the config +func (r *Repository) DeleteBranch(name string) error { + cfg, err := r.Storer.Config() + if err != nil { + return err + } + + if _, ok := cfg.Branches[name]; !ok { + return ErrBranchNotFound + } + + delete(cfg.Branches, name) + return r.Storer.SetConfig(cfg) +} + func (r *Repository) resolveToCommitHash(h plumbing.Hash) (plumbing.Hash, error) { obj, err := r.Storer.EncodedObject(plumbing.AnyObject, h) if err != nil { @@ -477,7 +557,29 @@ func (r *Repository) clone(ctx context.Context, o *CloneOptions) error { } } - return r.updateRemoteConfigIfNeeded(o, c, ref) + if err := r.updateRemoteConfigIfNeeded(o, c, ref); err != nil { + return err + } + + if ref.Name().IsBranch() { + branchRef := ref.Name() + branchName := strings.Split(string(branchRef), "refs/heads/")[1] + + b := &config.Branch{ + Name: branchName, + Merge: branchRef, + } + if o.RemoteName == "" { + b.Remote = "origin" + } else { + b.Remote = o.RemoteName + } + if err := r.CreateBranch(b); err != nil { + return err + } + } + + return nil } const ( @@ -922,6 +1024,8 @@ func (r *Repository) ResolveRevision(rev plumbing.Revision) (*plumbing.Hash, err case revision.Ref: revisionRef := item.(revision.Ref) var ref *plumbing.Reference + var hashCommit, refCommit *object.Commit + var rErr, hErr error for _, rule := range append([]string{"%s"}, plumbing.RefRevParseRules...) { ref, err = storer.ResolveReference(r.Storer, plumbing.ReferenceName(fmt.Sprintf(rule, revisionRef))) @@ -931,14 +1035,27 @@ func (r *Repository) ResolveRevision(rev plumbing.Revision) (*plumbing.Hash, err } } - if ref == nil { - return &plumbing.ZeroHash, plumbing.ErrReferenceNotFound + if ref != nil { + refCommit, rErr = r.CommitObject(ref.Hash()) + } else { + rErr = plumbing.ErrReferenceNotFound } - commit, err = r.CommitObject(ref.Hash()) + isHash := plumbing.NewHash(string(revisionRef)).String() == string(revisionRef) - if err != nil { - return &plumbing.ZeroHash, err + if isHash { + hashCommit, hErr = r.CommitObject(plumbing.NewHash(string(revisionRef))) + } + + switch { + case rErr == nil && !isHash: + commit = refCommit + case rErr != nil && isHash && hErr == nil: + commit = hashCommit + case rErr == nil && isHash && hErr == nil: + return &plumbing.ZeroHash, fmt.Errorf(`refname "%s" is ambiguous`, revisionRef) + default: + return &plumbing.ZeroHash, plumbing.ErrReferenceNotFound } case revision.CaretPath: depth := item.(revision.CaretPath).Depth @@ -1092,7 +1209,7 @@ func (r *Repository) createNewObjectPack(cfg *RepackConfig) (h plumbing.Hash, er if los, ok := r.Storer.(storer.LooseObjectStorer); ok { err = los.ForEachObjectHash(func(hash plumbing.Hash) error { if ow.isSeen(hash) { - err := los.DeleteLooseObject(hash) + err = los.DeleteLooseObject(hash) if err != nil { return err } diff --git a/vendor/gopkg.in/src-d/go-git.v4/repository_test.go b/vendor/gopkg.in/src-d/go-git.v4/repository_test.go index 1ad1607fd..b78fbb70b 100644 --- a/vendor/gopkg.in/src-d/go-git.v4/repository_test.go +++ b/vendor/gopkg.in/src-d/go-git.v4/repository_test.go @@ -10,6 +10,7 @@ import ( "os/exec" "path/filepath" "strings" + "testing" "time" "gopkg.in/src-d/go-git.v4/config" @@ -243,6 +244,119 @@ func (s *RepositorySuite) TestDeleteRemote(c *C) { c.Assert(alt, IsNil) } +func (s *RepositorySuite) TestCreateBranchAndBranch(c *C) { + r, _ := Init(memory.NewStorage(), nil) + testBranch := &config.Branch{ + Name: "foo", + Remote: "origin", + Merge: "refs/heads/foo", + } + err := r.CreateBranch(testBranch) + + c.Assert(err, IsNil) + cfg, err := r.Config() + c.Assert(err, IsNil) + c.Assert(len(cfg.Branches), Equals, 1) + branch := cfg.Branches["foo"] + c.Assert(branch.Name, Equals, testBranch.Name) + c.Assert(branch.Remote, Equals, testBranch.Remote) + c.Assert(branch.Merge, Equals, testBranch.Merge) + + branch, err = r.Branch("foo") + c.Assert(err, IsNil) + c.Assert(branch.Name, Equals, testBranch.Name) + c.Assert(branch.Remote, Equals, testBranch.Remote) + c.Assert(branch.Merge, Equals, testBranch.Merge) +} + +func (s *RepositorySuite) TestCreateBranchUnmarshal(c *C) { + r, _ := Init(memory.NewStorage(), nil) + + expected := []byte(`[core] + bare = true +[remote "foo"] + url = http://foo/foo.git + fetch = +refs/heads/*:refs/remotes/foo/* +[branch "foo"] + remote = origin + merge = refs/heads/foo +[branch "master"] + remote = origin + merge = refs/heads/master +`) + + _, err := r.CreateRemote(&config.RemoteConfig{ + Name: "foo", + URLs: []string{"http://foo/foo.git"}, + }) + c.Assert(err, IsNil) + testBranch1 := &config.Branch{ + Name: "master", + Remote: "origin", + Merge: "refs/heads/master", + } + testBranch2 := &config.Branch{ + Name: "foo", + Remote: "origin", + Merge: "refs/heads/foo", + } + err = r.CreateBranch(testBranch1) + err = r.CreateBranch(testBranch2) + + c.Assert(err, IsNil) + cfg, err := r.Config() + c.Assert(err, IsNil) + marshaled, err := cfg.Marshal() + c.Assert(string(expected), Equals, string(marshaled)) +} + +func (s *RepositorySuite) TestBranchInvalid(c *C) { + r, _ := Init(memory.NewStorage(), nil) + branch, err := r.Branch("foo") + + c.Assert(err, NotNil) + c.Assert(branch, IsNil) +} + +func (s *RepositorySuite) TestCreateBranchInvalid(c *C) { + r, _ := Init(memory.NewStorage(), nil) + err := r.CreateBranch(&config.Branch{}) + + c.Assert(err, NotNil) + + testBranch := &config.Branch{ + Name: "foo", + Remote: "origin", + Merge: "refs/heads/foo", + } + err = r.CreateBranch(testBranch) + c.Assert(err, IsNil) + err = r.CreateBranch(testBranch) + c.Assert(err, NotNil) +} + +func (s *RepositorySuite) TestDeleteBranch(c *C) { + r, _ := Init(memory.NewStorage(), nil) + testBranch := &config.Branch{ + Name: "foo", + Remote: "origin", + Merge: "refs/heads/foo", + } + err := r.CreateBranch(testBranch) + + c.Assert(err, IsNil) + + err = r.DeleteBranch("foo") + c.Assert(err, IsNil) + + b, err := r.Branch("foo") + c.Assert(err, Equals, ErrBranchNotFound) + c.Assert(b, IsNil) + + err = r.DeleteBranch("foo") + c.Assert(err, Equals, ErrBranchNotFound) +} + func (s *RepositorySuite) TestPlainInit(c *C) { dir, err := ioutil.TempDir("", "plain-init") c.Assert(err, IsNil) @@ -406,6 +520,36 @@ func (s *RepositorySuite) TestPlainOpenNotExists(c *C) { c.Assert(r, IsNil) } +func (s *RepositorySuite) TestPlainOpenDetectDotGit(c *C) { + dir, err := ioutil.TempDir("", "plain-open") + c.Assert(err, IsNil) + defer os.RemoveAll(dir) + + subdir := filepath.Join(dir, "a", "b") + err = os.MkdirAll(subdir, 0755) + c.Assert(err, IsNil) + + r, err := PlainInit(dir, false) + c.Assert(err, IsNil) + c.Assert(r, NotNil) + + opt := &PlainOpenOptions{DetectDotGit: true} + r, err = PlainOpenWithOptions(subdir, opt) + c.Assert(err, IsNil) + c.Assert(r, NotNil) +} + +func (s *RepositorySuite) TestPlainOpenNotExistsDetectDotGit(c *C) { + dir, err := ioutil.TempDir("", "plain-open") + c.Assert(err, IsNil) + defer os.RemoveAll(dir) + + opt := &PlainOpenOptions{DetectDotGit: true} + r, err := PlainOpenWithOptions(dir, opt) + c.Assert(err, Equals, ErrRepositoryNotExists) + c.Assert(r, IsNil) +} + func (s *RepositorySuite) TestPlainClone(c *C) { r, err := PlainClone(c.MkDir(), false, &CloneOptions{ URL: s.GetBasicLocalRepositoryURL(), @@ -416,6 +560,10 @@ func (s *RepositorySuite) TestPlainClone(c *C) { remotes, err := r.Remotes() c.Assert(err, IsNil) c.Assert(remotes, HasLen, 1) + cfg, err := r.Config() + c.Assert(err, IsNil) + c.Assert(cfg.Branches, HasLen, 1) + c.Assert(cfg.Branches["master"].Name, Equals, "master") } func (s *RepositorySuite) TestPlainCloneContext(c *C) { @@ -430,6 +578,10 @@ func (s *RepositorySuite) TestPlainCloneContext(c *C) { } func (s *RepositorySuite) TestPlainCloneWithRecurseSubmodules(c *C) { + if testing.Short() { + c.Skip("skipping test in short mode.") + } + dir, err := ioutil.TempDir("", "plain-clone-submodule") c.Assert(err, IsNil) defer os.RemoveAll(dir) @@ -445,6 +597,7 @@ func (s *RepositorySuite) TestPlainCloneWithRecurseSubmodules(c *C) { cfg, err := r.Config() c.Assert(err, IsNil) c.Assert(cfg.Remotes, HasLen, 1) + c.Assert(cfg.Branches, HasLen, 1) c.Assert(cfg.Submodules, HasLen, 2) } @@ -580,6 +733,8 @@ func (s *RepositorySuite) TestCloneConfig(c *C) { c.Assert(cfg.Remotes, HasLen, 1) c.Assert(cfg.Remotes["origin"].Name, Equals, "origin") c.Assert(cfg.Remotes["origin"].URLs, HasLen, 1) + c.Assert(cfg.Branches, HasLen, 1) + c.Assert(cfg.Branches["master"].Name, Equals, "master") } func (s *RepositorySuite) TestCloneSingleBranchAndNonHEAD(c *C) { @@ -601,6 +756,13 @@ func (s *RepositorySuite) TestCloneSingleBranchAndNonHEAD(c *C) { c.Assert(err, IsNil) c.Assert(remotes, HasLen, 1) + cfg, err := r.Config() + c.Assert(err, IsNil) + c.Assert(cfg.Branches, HasLen, 1) + c.Assert(cfg.Branches["branch"].Name, Equals, "branch") + c.Assert(cfg.Branches["branch"].Remote, Equals, "origin") + c.Assert(cfg.Branches["branch"].Merge, Equals, plumbing.ReferenceName("refs/heads/branch")) + head, err = r.Reference(plumbing.HEAD, false) c.Assert(err, IsNil) c.Assert(head, NotNil) @@ -637,6 +799,13 @@ func (s *RepositorySuite) TestCloneSingleBranch(c *C) { c.Assert(err, IsNil) c.Assert(remotes, HasLen, 1) + cfg, err := r.Config() + c.Assert(err, IsNil) + c.Assert(cfg.Branches, HasLen, 1) + c.Assert(cfg.Branches["master"].Name, Equals, "master") + c.Assert(cfg.Branches["master"].Remote, Equals, "origin") + c.Assert(cfg.Branches["master"].Merge, Equals, plumbing.ReferenceName("refs/heads/master")) + head, err = r.Reference(plumbing.HEAD, false) c.Assert(err, IsNil) c.Assert(head, NotNil) @@ -663,6 +832,10 @@ func (s *RepositorySuite) TestCloneDetachedHEAD(c *C) { }) c.Assert(err, IsNil) + cfg, err := r.Config() + c.Assert(err, IsNil) + c.Assert(cfg.Branches, HasLen, 0) + head, err := r.Reference(plumbing.HEAD, false) c.Assert(err, IsNil) c.Assert(head, NotNil) @@ -686,6 +859,10 @@ func (s *RepositorySuite) TestCloneDetachedHEADAndShallow(c *C) { c.Assert(err, IsNil) + cfg, err := r.Config() + c.Assert(err, IsNil) + c.Assert(cfg.Branches, HasLen, 0) + head, err := r.Reference(plumbing.HEAD, false) c.Assert(err, IsNil) c.Assert(head, NotNil) @@ -707,6 +884,10 @@ func (s *RepositorySuite) TestCloneDetachedHEADAnnotatedTag(c *C) { }) c.Assert(err, IsNil) + cfg, err := r.Config() + c.Assert(err, IsNil) + c.Assert(cfg.Branches, HasLen, 0) + head, err := r.Reference(plumbing.HEAD, false) c.Assert(err, IsNil) c.Assert(head, NotNil) @@ -1313,6 +1494,7 @@ func (s *RepositorySuite) TestResolveRevision(c *C) { "branch~1": "918c48b83bd081e863dbe1b80f8998f058cd8294", "v1.0.0~1": "918c48b83bd081e863dbe1b80f8998f058cd8294", "master~1": "918c48b83bd081e863dbe1b80f8998f058cd8294", + "918c48b83bd081e863dbe1b80f8998f058cd8294": "918c48b83bd081e863dbe1b80f8998f058cd8294", } for rev, hash := range datas { @@ -1332,10 +1514,19 @@ func (s *RepositorySuite) TestResolveRevisionWithErrors(c *C) { err := r.clone(context.Background(), &CloneOptions{URL: url}) c.Assert(err, IsNil) + headRef, err := r.Head() + c.Assert(err, IsNil) + + ref := plumbing.NewHashReference("refs/heads/918c48b83bd081e863dbe1b80f8998f058cd8294", headRef.Hash()) + err = r.Storer.SetReference(ref) + c.Assert(err, IsNil) + datas := map[string]string{ - "efs/heads/master~": "reference not found", - "HEAD^3": `Revision invalid : "3" found must be 0, 1 or 2 after "^"`, - "HEAD^{/whatever}": `No commit message match regexp : "whatever"`, + "efs/heads/master~": "reference not found", + "HEAD^3": `Revision invalid : "3" found must be 0, 1 or 2 after "^"`, + "HEAD^{/whatever}": `No commit message match regexp : "whatever"`, + "4e1243bd22c66e76c2ba9eddc1f91394e57f9f83": "reference not found", + "918c48b83bd081e863dbe1b80f8998f058cd8294": `refname "918c48b83bd081e863dbe1b80f8998f058cd8294" is ambiguous`, } for rev, rerr := range datas { @@ -1396,10 +1587,18 @@ func (s *RepositorySuite) testRepackObjects( } func (s *RepositorySuite) TestRepackObjects(c *C) { + if testing.Short() { + c.Skip("skipping test in short mode.") + } + s.testRepackObjects(c, time.Time{}, 1) } func (s *RepositorySuite) TestRepackObjectsWithNoDelete(c *C) { + if testing.Short() { + c.Skip("skipping test in short mode.") + } + s.testRepackObjects(c, time.Unix(0, 1), 3) } @@ -1426,3 +1625,66 @@ func executeOnPath(path, cmd string) error { return c.Run() } + +func (s *RepositorySuite) TestBrokenMultipleShallowFetch(c *C) { + r, _ := Init(memory.NewStorage(), nil) + _, err := r.CreateRemote(&config.RemoteConfig{ + Name: DefaultRemoteName, + URLs: []string{s.GetBasicLocalRepositoryURL()}, + }) + c.Assert(err, IsNil) + + c.Assert(r.Fetch(&FetchOptions{ + Depth: 2, + RefSpecs: []config.RefSpec{config.RefSpec("refs/heads/master:refs/heads/master")}, + }), IsNil) + + shallows, err := r.Storer.Shallow() + c.Assert(err, IsNil) + c.Assert(len(shallows), Equals, 1) + + ref, err := r.Reference("refs/heads/master", true) + c.Assert(err, IsNil) + cobj, err := r.CommitObject(ref.Hash()) + c.Assert(err, IsNil) + c.Assert(cobj, NotNil) + err = object.NewCommitPreorderIter(cobj, nil, nil).ForEach(func(c *object.Commit) error { + for _, ph := range c.ParentHashes { + for _, h := range shallows { + if ph == h { + return storer.ErrStop + } + } + } + + return nil + }) + c.Assert(err, IsNil) + + c.Assert(r.Fetch(&FetchOptions{ + Depth: 5, + RefSpecs: []config.RefSpec{config.RefSpec("refs/heads/*:refs/heads/*")}, + }), IsNil) + + shallows, err = r.Storer.Shallow() + c.Assert(err, IsNil) + c.Assert(len(shallows), Equals, 3) + + ref, err = r.Reference("refs/heads/master", true) + c.Assert(err, IsNil) + cobj, err = r.CommitObject(ref.Hash()) + c.Assert(err, IsNil) + c.Assert(cobj, NotNil) + err = object.NewCommitPreorderIter(cobj, nil, nil).ForEach(func(c *object.Commit) error { + for _, ph := range c.ParentHashes { + for _, h := range shallows { + if ph == h { + return storer.ErrStop + } + } + } + + return nil + }) + c.Assert(err, IsNil) +} diff --git a/vendor/gopkg.in/src-d/go-git.v4/storage/filesystem/config.go b/vendor/gopkg.in/src-d/go-git.v4/storage/filesystem/config.go index a2cc17378..be812e424 100644 --- a/vendor/gopkg.in/src-d/go-git.v4/storage/filesystem/config.go +++ b/vendor/gopkg.in/src-d/go-git.v4/storage/filesystem/config.go @@ -5,7 +5,7 @@ import ( "os" "gopkg.in/src-d/go-git.v4/config" - "gopkg.in/src-d/go-git.v4/storage/filesystem/internal/dotgit" + "gopkg.in/src-d/go-git.v4/storage/filesystem/dotgit" "gopkg.in/src-d/go-git.v4/utils/ioutil" ) @@ -13,7 +13,7 @@ type ConfigStorage struct { dir *dotgit.DotGit } -func (c *ConfigStorage) Config() (*config.Config, error) { +func (c *ConfigStorage) Config() (conf *config.Config, err error) { cfg := config.NewConfig() f, err := c.dir.Config() @@ -32,15 +32,15 @@ func (c *ConfigStorage) Config() (*config.Config, error) { return nil, err } - if err := cfg.Unmarshal(b); err != nil { + if err = cfg.Unmarshal(b); err != nil { return nil, err } return cfg, err } -func (c *ConfigStorage) SetConfig(cfg *config.Config) error { - if err := cfg.Validate(); err != nil { +func (c *ConfigStorage) SetConfig(cfg *config.Config) (err error) { + if err = cfg.Validate(); err != nil { return err } diff --git a/vendor/gopkg.in/src-d/go-git.v4/storage/filesystem/config_test.go b/vendor/gopkg.in/src-d/go-git.v4/storage/filesystem/config_test.go index cc03119d3..71c947da6 100644 --- a/vendor/gopkg.in/src-d/go-git.v4/storage/filesystem/config_test.go +++ b/vendor/gopkg.in/src-d/go-git.v4/storage/filesystem/config_test.go @@ -5,7 +5,7 @@ import ( "os" "gopkg.in/src-d/go-git.v4/config" - "gopkg.in/src-d/go-git.v4/storage/filesystem/internal/dotgit" + "gopkg.in/src-d/go-git.v4/storage/filesystem/dotgit" . "gopkg.in/check.v1" "gopkg.in/src-d/go-billy.v4/osfs" diff --git a/vendor/gopkg.in/src-d/go-git.v4/storage/filesystem/internal/dotgit/dotgit.go b/vendor/gopkg.in/src-d/go-git.v4/storage/filesystem/dotgit/dotgit.go similarity index 97% rename from vendor/gopkg.in/src-d/go-git.v4/storage/filesystem/internal/dotgit/dotgit.go rename to vendor/gopkg.in/src-d/go-git.v4/storage/filesystem/dotgit/dotgit.go index 027ef83b0..52b621c58 100644 --- a/vendor/gopkg.in/src-d/go-git.v4/storage/filesystem/internal/dotgit/dotgit.go +++ b/vendor/gopkg.in/src-d/go-git.v4/storage/filesystem/dotgit/dotgit.go @@ -162,8 +162,11 @@ func (d *DotGit) ObjectPacks() ([]plumbing.Hash, error) { n := f.Name() h := plumbing.NewHash(n[5 : len(n)-5]) //pack-(hash).pack + if h.IsZero() { + // Ignore files with badly-formatted names. + continue + } packs = append(packs, h) - } return packs, nil @@ -255,7 +258,12 @@ func (d *DotGit) ForEachObjectHash(fun func(plumbing.Hash) error) error { } for _, o := range d { - err = fun(plumbing.NewHash(base + o.Name())) + h := plumbing.NewHash(base + o.Name()) + if h.IsZero() { + // Ignore files with badly-formatted names. + continue + } + err = fun(h) if err != nil { return err } @@ -375,7 +383,7 @@ func (d *DotGit) findPackedRefsInFile(f billy.File) ([]*plumbing.Reference, erro return refs, s.Err() } -func (d *DotGit) findPackedRefs() ([]*plumbing.Reference, error) { +func (d *DotGit) findPackedRefs() (r []*plumbing.Reference, err error) { f, err := d.fs.Open(packedRefsPath) if err != nil { if os.IsNotExist(err) { @@ -676,7 +684,7 @@ func (d *DotGit) PackRefs() (err error) { // Gather all refs using addRefsFromRefDir and addRefsFromPackedRefs. var refs []*plumbing.Reference seen := make(map[plumbing.ReferenceName]bool) - if err := d.addRefsFromRefDir(&refs, seen); err != nil { + if err = d.addRefsFromRefDir(&refs, seen); err != nil { return err } if len(refs) == 0 { @@ -684,7 +692,7 @@ func (d *DotGit) PackRefs() (err error) { return nil } numLooseRefs := len(refs) - if err := d.addRefsFromPackedRefsFile(&refs, f, seen); err != nil { + if err = d.addRefsFromPackedRefsFile(&refs, f, seen); err != nil { return err } @@ -701,7 +709,7 @@ func (d *DotGit) PackRefs() (err error) { w := bufio.NewWriter(tmp) for _, ref := range refs { - _, err := w.WriteString(ref.String() + "\n") + _, err = w.WriteString(ref.String() + "\n") if err != nil { return err } diff --git a/vendor/gopkg.in/src-d/go-git.v4/storage/filesystem/internal/dotgit/dotgit_rewrite_packed_refs_nix.go b/vendor/gopkg.in/src-d/go-git.v4/storage/filesystem/dotgit/dotgit_rewrite_packed_refs_nix.go similarity index 100% rename from vendor/gopkg.in/src-d/go-git.v4/storage/filesystem/internal/dotgit/dotgit_rewrite_packed_refs_nix.go rename to vendor/gopkg.in/src-d/go-git.v4/storage/filesystem/dotgit/dotgit_rewrite_packed_refs_nix.go diff --git a/vendor/gopkg.in/src-d/go-git.v4/storage/filesystem/internal/dotgit/dotgit_rewrite_packed_refs_norwfs.go b/vendor/gopkg.in/src-d/go-git.v4/storage/filesystem/dotgit/dotgit_rewrite_packed_refs_norwfs.go similarity index 100% rename from vendor/gopkg.in/src-d/go-git.v4/storage/filesystem/internal/dotgit/dotgit_rewrite_packed_refs_norwfs.go rename to vendor/gopkg.in/src-d/go-git.v4/storage/filesystem/dotgit/dotgit_rewrite_packed_refs_norwfs.go diff --git a/vendor/gopkg.in/src-d/go-git.v4/storage/filesystem/internal/dotgit/dotgit_rewrite_packed_refs_windows.go b/vendor/gopkg.in/src-d/go-git.v4/storage/filesystem/dotgit/dotgit_rewrite_packed_refs_windows.go similarity index 100% rename from vendor/gopkg.in/src-d/go-git.v4/storage/filesystem/internal/dotgit/dotgit_rewrite_packed_refs_windows.go rename to vendor/gopkg.in/src-d/go-git.v4/storage/filesystem/dotgit/dotgit_rewrite_packed_refs_windows.go diff --git a/vendor/gopkg.in/src-d/go-git.v4/storage/filesystem/internal/dotgit/dotgit_setref.go b/vendor/gopkg.in/src-d/go-git.v4/storage/filesystem/dotgit/dotgit_setref.go similarity index 97% rename from vendor/gopkg.in/src-d/go-git.v4/storage/filesystem/internal/dotgit/dotgit_setref.go rename to vendor/gopkg.in/src-d/go-git.v4/storage/filesystem/dotgit/dotgit_setref.go index c732c9fa9..d27c1a303 100644 --- a/vendor/gopkg.in/src-d/go-git.v4/storage/filesystem/internal/dotgit/dotgit_setref.go +++ b/vendor/gopkg.in/src-d/go-git.v4/storage/filesystem/dotgit/dotgit_setref.go @@ -9,7 +9,7 @@ import ( "gopkg.in/src-d/go-git.v4/utils/ioutil" ) -func (d *DotGit) setRef(fileName, content string, old *plumbing.Reference) error { +func (d *DotGit) setRef(fileName, content string, old *plumbing.Reference) (err error) { // If we are not checking an old ref, just truncate the file. mode := os.O_RDWR | os.O_CREATE if old == nil { diff --git a/vendor/gopkg.in/src-d/go-git.v4/storage/filesystem/internal/dotgit/dotgit_setref_norwfs.go b/vendor/gopkg.in/src-d/go-git.v4/storage/filesystem/dotgit/dotgit_setref_norwfs.go similarity index 100% rename from vendor/gopkg.in/src-d/go-git.v4/storage/filesystem/internal/dotgit/dotgit_setref_norwfs.go rename to vendor/gopkg.in/src-d/go-git.v4/storage/filesystem/dotgit/dotgit_setref_norwfs.go diff --git a/vendor/gopkg.in/src-d/go-git.v4/storage/filesystem/internal/dotgit/dotgit_test.go b/vendor/gopkg.in/src-d/go-git.v4/storage/filesystem/dotgit/dotgit_test.go similarity index 97% rename from vendor/gopkg.in/src-d/go-git.v4/storage/filesystem/internal/dotgit/dotgit_test.go rename to vendor/gopkg.in/src-d/go-git.v4/storage/filesystem/dotgit/dotgit_test.go index 2c432951b..7733eef78 100644 --- a/vendor/gopkg.in/src-d/go-git.v4/storage/filesystem/internal/dotgit/dotgit_test.go +++ b/vendor/gopkg.in/src-d/go-git.v4/storage/filesystem/dotgit/dotgit_test.go @@ -151,6 +151,7 @@ func (s *SuiteDotGit) TestRefsFromReferenceFile(c *C) { } func BenchmarkRefMultipleTimes(b *testing.B) { + fixtures.Init() fs := fixtures.Basic().ByTag(".git").One().DotGit() refname := plumbing.ReferenceName("refs/remotes/origin/branch") @@ -418,7 +419,7 @@ func findReference(refs []*plumbing.Reference, name string) *plumbing.Reference return nil } -func (s *SuiteDotGit) TestObjectsPack(c *C) { +func (s *SuiteDotGit) TestObjectPacks(c *C) { f := fixtures.Basic().ByTag(".git").One() fs := f.DotGit() dir := New(fs) @@ -427,6 +428,18 @@ func (s *SuiteDotGit) TestObjectsPack(c *C) { c.Assert(err, IsNil) c.Assert(hashes, HasLen, 1) c.Assert(hashes[0], Equals, f.PackfileHash) + + // Make sure that a random file in the pack directory doesn't + // break everything. + badFile, err := fs.Create("objects/pack/OOPS_THIS_IS_NOT_RIGHT.pack") + c.Assert(err, IsNil) + err = badFile.Close() + c.Assert(err, IsNil) + + hashes2, err := dir.ObjectPacks() + c.Assert(err, IsNil) + c.Assert(hashes2, HasLen, 1) + c.Assert(hashes[0], Equals, hashes2[0]) } func (s *SuiteDotGit) TestObjectPack(c *C) { diff --git a/vendor/gopkg.in/src-d/go-git.v4/storage/filesystem/internal/dotgit/writers.go b/vendor/gopkg.in/src-d/go-git.v4/storage/filesystem/dotgit/writers.go similarity index 100% rename from vendor/gopkg.in/src-d/go-git.v4/storage/filesystem/internal/dotgit/writers.go rename to vendor/gopkg.in/src-d/go-git.v4/storage/filesystem/dotgit/writers.go diff --git a/vendor/gopkg.in/src-d/go-git.v4/storage/filesystem/internal/dotgit/writers_test.go b/vendor/gopkg.in/src-d/go-git.v4/storage/filesystem/dotgit/writers_test.go similarity index 100% rename from vendor/gopkg.in/src-d/go-git.v4/storage/filesystem/internal/dotgit/writers_test.go rename to vendor/gopkg.in/src-d/go-git.v4/storage/filesystem/dotgit/writers_test.go diff --git a/vendor/gopkg.in/src-d/go-git.v4/storage/filesystem/index.go b/vendor/gopkg.in/src-d/go-git.v4/storage/filesystem/index.go index 14ab09a96..2ebf57e61 100644 --- a/vendor/gopkg.in/src-d/go-git.v4/storage/filesystem/index.go +++ b/vendor/gopkg.in/src-d/go-git.v4/storage/filesystem/index.go @@ -4,7 +4,7 @@ import ( "os" "gopkg.in/src-d/go-git.v4/plumbing/format/index" - "gopkg.in/src-d/go-git.v4/storage/filesystem/internal/dotgit" + "gopkg.in/src-d/go-git.v4/storage/filesystem/dotgit" "gopkg.in/src-d/go-git.v4/utils/ioutil" ) @@ -12,7 +12,7 @@ type IndexStorage struct { dir *dotgit.DotGit } -func (s *IndexStorage) SetIndex(idx *index.Index) error { +func (s *IndexStorage) SetIndex(idx *index.Index) (err error) { f, err := s.dir.IndexWriter() if err != nil { return err @@ -25,7 +25,7 @@ func (s *IndexStorage) SetIndex(idx *index.Index) error { return err } -func (s *IndexStorage) Index() (*index.Index, error) { +func (s *IndexStorage) Index() (i *index.Index, err error) { idx := &index.Index{ Version: 2, } diff --git a/vendor/gopkg.in/src-d/go-git.v4/storage/filesystem/module.go b/vendor/gopkg.in/src-d/go-git.v4/storage/filesystem/module.go index 6f3de3f28..7c8c8d866 100644 --- a/vendor/gopkg.in/src-d/go-git.v4/storage/filesystem/module.go +++ b/vendor/gopkg.in/src-d/go-git.v4/storage/filesystem/module.go @@ -2,7 +2,7 @@ package filesystem import ( "gopkg.in/src-d/go-git.v4/storage" - "gopkg.in/src-d/go-git.v4/storage/filesystem/internal/dotgit" + "gopkg.in/src-d/go-git.v4/storage/filesystem/dotgit" ) type ModuleStorage struct { diff --git a/vendor/gopkg.in/src-d/go-git.v4/storage/filesystem/object.go b/vendor/gopkg.in/src-d/go-git.v4/storage/filesystem/object.go index 9f1c5efa6..9ffe4dcf5 100644 --- a/vendor/gopkg.in/src-d/go-git.v4/storage/filesystem/object.go +++ b/vendor/gopkg.in/src-d/go-git.v4/storage/filesystem/object.go @@ -11,7 +11,7 @@ import ( "gopkg.in/src-d/go-git.v4/plumbing/format/objfile" "gopkg.in/src-d/go-git.v4/plumbing/format/packfile" "gopkg.in/src-d/go-git.v4/plumbing/storer" - "gopkg.in/src-d/go-git.v4/storage/filesystem/internal/dotgit" + "gopkg.in/src-d/go-git.v4/storage/filesystem/dotgit" "gopkg.in/src-d/go-git.v4/storage/memory" "gopkg.in/src-d/go-git.v4/utils/ioutil" @@ -26,7 +26,8 @@ type ObjectStorage struct { index map[plumbing.Hash]*packfile.Index } -func newObjectStorage(dir *dotgit.DotGit) (ObjectStorage, error) { +// NewObjectStorage creates a new ObjectStorage with the given .git directory. +func NewObjectStorage(dir *dotgit.DotGit) (ObjectStorage, error) { s := ObjectStorage{ deltaBaseCache: cache.NewObjectLRUDefault(), dir: dir, @@ -55,7 +56,7 @@ func (s *ObjectStorage) requireIndex() error { return nil } -func (s *ObjectStorage) loadIdxFile(h plumbing.Hash) error { +func (s *ObjectStorage) loadIdxFile(h plumbing.Hash) (err error) { f, err := s.dir.ObjectPackIdx(h) if err != nil { return err @@ -94,7 +95,7 @@ func (s *ObjectStorage) PackfileWriter() (io.WriteCloser, error) { } // SetEncodedObject adds a new object to the storage. -func (s *ObjectStorage) SetEncodedObject(o plumbing.EncodedObject) (plumbing.Hash, error) { +func (s *ObjectStorage) SetEncodedObject(o plumbing.EncodedObject) (h plumbing.Hash, err error) { if o.Type() == plumbing.OFSDeltaObject || o.Type() == plumbing.REFDeltaObject { return plumbing.ZeroHash, plumbing.ErrInvalidType } @@ -113,11 +114,11 @@ func (s *ObjectStorage) SetEncodedObject(o plumbing.EncodedObject) (plumbing.Has defer ioutil.CheckClose(or, &err) - if err := ow.WriteHeader(o.Type(), o.Size()); err != nil { + if err = ow.WriteHeader(o.Type(), o.Size()); err != nil { return plumbing.ZeroHash, err } - if _, err := io.Copy(ow, or); err != nil { + if _, err = io.Copy(ow, or); err != nil { return plumbing.ZeroHash, err } @@ -166,7 +167,7 @@ func (s *ObjectStorage) EncodedObject(t plumbing.ObjectType, h plumbing.Hash) (p // Create a new object storage with the DotGit(s) and check for the // required hash object. Skip when not found. for _, dg := range dotgits { - o, oe := newObjectStorage(dg) + o, oe := NewObjectStorage(dg) if oe != nil { continue } diff --git a/vendor/gopkg.in/src-d/go-git.v4/storage/filesystem/object_test.go b/vendor/gopkg.in/src-d/go-git.v4/storage/filesystem/object_test.go index de8f2b2b9..ecd6bebc3 100644 --- a/vendor/gopkg.in/src-d/go-git.v4/storage/filesystem/object_test.go +++ b/vendor/gopkg.in/src-d/go-git.v4/storage/filesystem/object_test.go @@ -2,7 +2,7 @@ package filesystem import ( "gopkg.in/src-d/go-git.v4/plumbing" - "gopkg.in/src-d/go-git.v4/storage/filesystem/internal/dotgit" + "gopkg.in/src-d/go-git.v4/storage/filesystem/dotgit" . "gopkg.in/check.v1" "gopkg.in/src-d/go-git-fixtures.v3" @@ -24,7 +24,7 @@ var _ = Suite(&FsSuite{ func (s *FsSuite) TestGetFromObjectFile(c *C) { fs := fixtures.ByTag(".git").ByTag("unpacked").One().DotGit() - o, err := newObjectStorage(dotgit.New(fs)) + o, err := NewObjectStorage(dotgit.New(fs)) c.Assert(err, IsNil) expected := plumbing.NewHash("f3dfe29d268303fc6e1bbce268605fc99573406e") @@ -36,7 +36,7 @@ func (s *FsSuite) TestGetFromObjectFile(c *C) { func (s *FsSuite) TestGetFromPackfile(c *C) { fixtures.Basic().ByTag(".git").Test(c, func(f *fixtures.Fixture) { fs := f.DotGit() - o, err := newObjectStorage(dotgit.New(fs)) + o, err := NewObjectStorage(dotgit.New(fs)) c.Assert(err, IsNil) expected := plumbing.NewHash("6ecf0ef2c2dffb796033e5a02219af86ec6584e5") @@ -48,7 +48,7 @@ func (s *FsSuite) TestGetFromPackfile(c *C) { func (s *FsSuite) TestGetFromPackfileMultiplePackfiles(c *C) { fs := fixtures.ByTag(".git").ByTag("multi-packfile").One().DotGit() - o, err := newObjectStorage(dotgit.New(fs)) + o, err := NewObjectStorage(dotgit.New(fs)) c.Assert(err, IsNil) expected := plumbing.NewHash("8d45a34641d73851e01d3754320b33bb5be3c4d3") @@ -65,7 +65,7 @@ func (s *FsSuite) TestGetFromPackfileMultiplePackfiles(c *C) { func (s *FsSuite) TestIter(c *C) { fixtures.ByTag(".git").ByTag("packfile").Test(c, func(f *fixtures.Fixture) { fs := f.DotGit() - o, err := newObjectStorage(dotgit.New(fs)) + o, err := NewObjectStorage(dotgit.New(fs)) c.Assert(err, IsNil) iter, err := o.IterEncodedObjects(plumbing.AnyObject) @@ -86,7 +86,7 @@ func (s *FsSuite) TestIterWithType(c *C) { fixtures.ByTag(".git").Test(c, func(f *fixtures.Fixture) { for _, t := range s.Types { fs := f.DotGit() - o, err := newObjectStorage(dotgit.New(fs)) + o, err := NewObjectStorage(dotgit.New(fs)) c.Assert(err, IsNil) iter, err := o.IterEncodedObjects(t) diff --git a/vendor/gopkg.in/src-d/go-git.v4/storage/filesystem/reference.go b/vendor/gopkg.in/src-d/go-git.v4/storage/filesystem/reference.go index 7313f05e8..a891b837b 100644 --- a/vendor/gopkg.in/src-d/go-git.v4/storage/filesystem/reference.go +++ b/vendor/gopkg.in/src-d/go-git.v4/storage/filesystem/reference.go @@ -3,7 +3,7 @@ package filesystem import ( "gopkg.in/src-d/go-git.v4/plumbing" "gopkg.in/src-d/go-git.v4/plumbing/storer" - "gopkg.in/src-d/go-git.v4/storage/filesystem/internal/dotgit" + "gopkg.in/src-d/go-git.v4/storage/filesystem/dotgit" ) type ReferenceStorage struct { diff --git a/vendor/gopkg.in/src-d/go-git.v4/storage/filesystem/shallow.go b/vendor/gopkg.in/src-d/go-git.v4/storage/filesystem/shallow.go index 4b2e2dc8f..502d406da 100644 --- a/vendor/gopkg.in/src-d/go-git.v4/storage/filesystem/shallow.go +++ b/vendor/gopkg.in/src-d/go-git.v4/storage/filesystem/shallow.go @@ -5,7 +5,7 @@ import ( "fmt" "gopkg.in/src-d/go-git.v4/plumbing" - "gopkg.in/src-d/go-git.v4/storage/filesystem/internal/dotgit" + "gopkg.in/src-d/go-git.v4/storage/filesystem/dotgit" "gopkg.in/src-d/go-git.v4/utils/ioutil" ) @@ -41,6 +41,8 @@ func (s *ShallowStorage) Shallow() ([]plumbing.Hash, error) { return nil, err } + defer ioutil.CheckClose(f, &err) + var hash []plumbing.Hash scn := bufio.NewScanner(f) diff --git a/vendor/gopkg.in/src-d/go-git.v4/storage/filesystem/storage.go b/vendor/gopkg.in/src-d/go-git.v4/storage/filesystem/storage.go index 82b137c7c..622bb4a8d 100644 --- a/vendor/gopkg.in/src-d/go-git.v4/storage/filesystem/storage.go +++ b/vendor/gopkg.in/src-d/go-git.v4/storage/filesystem/storage.go @@ -2,7 +2,7 @@ package filesystem import ( - "gopkg.in/src-d/go-git.v4/storage/filesystem/internal/dotgit" + "gopkg.in/src-d/go-git.v4/storage/filesystem/dotgit" "gopkg.in/src-d/go-billy.v4" ) @@ -25,7 +25,7 @@ type Storage struct { // NewStorage returns a new Storage backed by a given `fs.Filesystem` func NewStorage(fs billy.Filesystem) (*Storage, error) { dir := dotgit.New(fs) - o, err := newObjectStorage(dir) + o, err := NewObjectStorage(dir) if err != nil { return nil, err } diff --git a/vendor/gopkg.in/src-d/go-git.v4/submodule_test.go b/vendor/gopkg.in/src-d/go-git.v4/submodule_test.go index bea5a0fcf..2c0a2edeb 100644 --- a/vendor/gopkg.in/src-d/go-git.v4/submodule_test.go +++ b/vendor/gopkg.in/src-d/go-git.v4/submodule_test.go @@ -5,6 +5,7 @@ import ( "io/ioutil" "os" "path/filepath" + "testing" "gopkg.in/src-d/go-git.v4/plumbing" @@ -66,6 +67,10 @@ func (s *SubmoduleSuite) TestInit(c *C) { } func (s *SubmoduleSuite) TestUpdate(c *C) { + if testing.Short() { + c.Skip("skipping test in short mode.") + } + sm, err := s.Worktree.Submodule("basic") c.Assert(err, IsNil) @@ -118,6 +123,10 @@ func (s *SubmoduleSuite) TestUpdateWithNotFetch(c *C) { } func (s *SubmoduleSuite) TestUpdateWithRecursion(c *C) { + if testing.Short() { + c.Skip("skipping test in short mode.") + } + sm, err := s.Worktree.Submodule("itself") c.Assert(err, IsNil) @@ -134,6 +143,10 @@ func (s *SubmoduleSuite) TestUpdateWithRecursion(c *C) { } func (s *SubmoduleSuite) TestUpdateWithInitAndUpdate(c *C) { + if testing.Short() { + c.Skip("skipping test in short mode.") + } + sm, err := s.Worktree.Submodule("basic") c.Assert(err, IsNil) @@ -183,6 +196,21 @@ func (s *SubmoduleSuite) TestSubmodulesInit(c *C) { } } +func (s *SubmoduleSuite) TestGitSubmodulesSymlink(c *C) { + f, err := s.Worktree.Filesystem.Create("badfile") + c.Assert(err, IsNil) + defer f.Close() + + err = s.Worktree.Filesystem.Remove(gitmodulesFile) + c.Assert(err, IsNil) + + err = s.Worktree.Filesystem.Symlink("badfile", gitmodulesFile) + c.Assert(err, IsNil) + + _, err = s.Worktree.Submodules() + c.Assert(err, Equals, ErrGitModulesSymlink) +} + func (s *SubmoduleSuite) TestSubmodulesStatus(c *C) { sm, err := s.Worktree.Submodules() c.Assert(err, IsNil) @@ -193,6 +221,10 @@ func (s *SubmoduleSuite) TestSubmodulesStatus(c *C) { } func (s *SubmoduleSuite) TestSubmodulesUpdateContext(c *C) { + if testing.Short() { + c.Skip("skipping test in short mode.") + } + sm, err := s.Worktree.Submodules() c.Assert(err, IsNil) diff --git a/vendor/gopkg.in/src-d/go-git.v4/worktree.go b/vendor/gopkg.in/src-d/go-git.v4/worktree.go index 394dce453..99b2cd124 100644 --- a/vendor/gopkg.in/src-d/go-git.v4/worktree.go +++ b/vendor/gopkg.in/src-d/go-git.v4/worktree.go @@ -13,6 +13,7 @@ import ( "gopkg.in/src-d/go-git.v4/config" "gopkg.in/src-d/go-git.v4/plumbing" "gopkg.in/src-d/go-git.v4/plumbing/filemode" + "gopkg.in/src-d/go-git.v4/plumbing/format/gitignore" "gopkg.in/src-d/go-git.v4/plumbing/format/index" "gopkg.in/src-d/go-git.v4/plumbing/object" "gopkg.in/src-d/go-git.v4/plumbing/storer" @@ -27,12 +28,15 @@ var ( ErrWorktreeNotClean = errors.New("worktree is not clean") ErrSubmoduleNotFound = errors.New("submodule not found") ErrUnstagedChanges = errors.New("worktree contains unstaged changes") + ErrGitModulesSymlink = errors.New(gitmodulesFile + " is a symlink") ) // Worktree represents a git worktree. type Worktree struct { // Filesystem underlying filesystem. Filesystem billy.Filesystem + // External excludes not found in the repository .gitignore + Excludes []gitignore.Pattern r *Repository } @@ -554,6 +558,22 @@ func (w *Worktree) checkoutFileSymlink(f *object.File) (err error) { } err = w.Filesystem.Symlink(string(bytes), f.Name) + + // On windows, this might fail. + // Follow Git on Windows behavior by writing the link as it is. + if err != nil && isSymlinkWindowsNonAdmin(err) { + mode, _ := f.Mode.ToOSFileMode() + + to, err := w.Filesystem.OpenFile(f.Name, os.O_WRONLY|os.O_CREATE|os.O_TRUNC, mode.Perm()) + if err != nil { + return err + } + + defer ioutil.CheckClose(to, &err) + + _, err = to.Write(bytes) + return err + } return } @@ -661,7 +681,18 @@ func (w *Worktree) newSubmodule(fromModules, fromConfig *config.Submodule) *Subm return m } +func (w *Worktree) isSymlink(path string) bool { + if s, err := w.Filesystem.Lstat(path); err == nil { + return s.Mode()&os.ModeSymlink != 0 + } + return false +} + func (w *Worktree) readGitmodulesFile() (*config.Modules, error) { + if w.isSymlink(gitmodulesFile) { + return nil, ErrGitModulesSymlink + } + f, err := w.Filesystem.Open(gitmodulesFile) if err != nil { if os.IsNotExist(err) { diff --git a/vendor/gopkg.in/src-d/go-git.v4/worktree_darwin.go b/vendor/gopkg.in/src-d/go-git.v4/worktree_bsd.go similarity index 86% rename from vendor/gopkg.in/src-d/go-git.v4/worktree_darwin.go rename to vendor/gopkg.in/src-d/go-git.v4/worktree_bsd.go index 8eaffde8a..3b374c77b 100644 --- a/vendor/gopkg.in/src-d/go-git.v4/worktree_darwin.go +++ b/vendor/gopkg.in/src-d/go-git.v4/worktree_bsd.go @@ -20,3 +20,7 @@ func init() { } } } + +func isSymlinkWindowsNonAdmin(err error) bool { + return false +} diff --git a/vendor/gopkg.in/src-d/go-git.v4/worktree_linux.go b/vendor/gopkg.in/src-d/go-git.v4/worktree_linux.go index a33cd2fb9..891cb1cf3 100644 --- a/vendor/gopkg.in/src-d/go-git.v4/worktree_linux.go +++ b/vendor/gopkg.in/src-d/go-git.v4/worktree_linux.go @@ -20,3 +20,7 @@ func init() { } } } + +func isSymlinkWindowsNonAdmin(err error) bool { + return false +} diff --git a/vendor/gopkg.in/src-d/go-git.v4/worktree_status.go b/vendor/gopkg.in/src-d/go-git.v4/worktree_status.go index 2cac78ed2..0e113d093 100644 --- a/vendor/gopkg.in/src-d/go-git.v4/worktree_status.go +++ b/vendor/gopkg.in/src-d/go-git.v4/worktree_status.go @@ -145,6 +145,9 @@ func (w *Worktree) excludeIgnoredChanges(changes merkletrie.Changes) merkletrie. if err != nil || len(patterns) == 0 { return changes } + + patterns = append(patterns, w.Excludes...) + m := gitignore.NewMatcher(patterns) var res merkletrie.Changes @@ -300,6 +303,10 @@ func (w *Worktree) doAddDirectory(idx *index.Index, s Status, directory string) var a bool if file.IsDir() { + if file.Name() == GitDirName { + // ignore special git directory + continue + } a, err = w.doAddDirectory(idx, s, name) } else { a, _, err = w.doAddFile(idx, s, name) diff --git a/vendor/gopkg.in/src-d/go-git.v4/worktree_test.go b/vendor/gopkg.in/src-d/go-git.v4/worktree_test.go index cb2e5e2fa..df191b0a0 100644 --- a/vendor/gopkg.in/src-d/go-git.v4/worktree_test.go +++ b/vendor/gopkg.in/src-d/go-git.v4/worktree_test.go @@ -3,15 +3,19 @@ package git import ( "bytes" "context" + "errors" "io/ioutil" "os" "path/filepath" "regexp" "runtime" + "testing" + "time" "gopkg.in/src-d/go-git.v4/config" "gopkg.in/src-d/go-git.v4/plumbing" "gopkg.in/src-d/go-git.v4/plumbing/filemode" + "gopkg.in/src-d/go-git.v4/plumbing/format/gitignore" "gopkg.in/src-d/go-git.v4/plumbing/format/index" "gopkg.in/src-d/go-git.v4/plumbing/object" "gopkg.in/src-d/go-git.v4/storage/memory" @@ -196,6 +200,10 @@ func (s *WorktreeSuite) TestPullProgress(c *C) { } func (s *WorktreeSuite) TestPullProgressWithRecursion(c *C) { + if testing.Short() { + c.Skip("skipping test in short mode.") + } + path := fixtures.ByTag("submodule").One().Worktree().Root() dir, err := ioutil.TempDir("", "plain-clone-submodule") @@ -613,6 +621,10 @@ func (s *WorktreeSuite) TestCheckoutTag(c *C) { } func (s *WorktreeSuite) TestCheckoutBisect(c *C) { + if testing.Short() { + c.Skip("skipping test in short mode.") + } + s.testCheckoutBisect(c, "https://github.com/src-d/go-git.git") } @@ -1061,6 +1073,35 @@ func (s *WorktreeSuite) TestAddUntracked(c *C) { c.Assert(obj.Size(), Equals, int64(3)) } +func (s *WorktreeSuite) TestIgnored(c *C) { + fs := memfs.New() + w := &Worktree{ + r: s.Repository, + Filesystem: fs, + } + + w.Excludes = make([]gitignore.Pattern, 0) + w.Excludes = append(w.Excludes, gitignore.ParsePattern("foo", nil)) + + err := w.Checkout(&CheckoutOptions{Force: true}) + c.Assert(err, IsNil) + + idx, err := w.r.Storer.Index() + c.Assert(err, IsNil) + c.Assert(idx.Entries, HasLen, 9) + + err = util.WriteFile(w.Filesystem, "foo", []byte("FOO"), 0755) + c.Assert(err, IsNil) + + status, err := w.Status() + c.Assert(err, IsNil) + c.Assert(status, HasLen, 0) + + file := status.File("foo") + c.Assert(file.Staging, Equals, Untracked) + c.Assert(file.Worktree, Equals, Untracked) +} + func (s *WorktreeSuite) TestAddModified(c *C) { fs := memfs.New() w := &Worktree{ @@ -1824,3 +1865,39 @@ func (s *WorktreeSuite) TestGrep(c *C) { } } } + +func (s *WorktreeSuite) TestAddAndCommit(c *C) { + dir, err := ioutil.TempDir("", "plain-repo") + c.Assert(err, IsNil) + defer os.RemoveAll(dir) + + repo, err := PlainInit(dir, false) + c.Assert(err, IsNil) + + w, err := repo.Worktree() + c.Assert(err, IsNil) + + _, err = w.Add(".") + c.Assert(err, IsNil) + + w.Commit("Test Add And Commit", &CommitOptions{Author: &object.Signature{ + Name: "foo", + Email: "foo@foo.foo", + When: time.Now(), + }}) + + iter, err := w.r.Log(&LogOptions{}) + c.Assert(err, IsNil) + err = iter.ForEach(func(c *object.Commit) error { + files, err := c.Files() + if err != nil { + return err + } + + err = files.ForEach(func(f *object.File) error { + return errors.New("Expected no files, got at least 1") + }) + return err + }) + c.Assert(err, IsNil) +} diff --git a/vendor/gopkg.in/src-d/go-git.v4/worktree_windows.go b/vendor/gopkg.in/src-d/go-git.v4/worktree_windows.go index d59448ef8..1bef6f759 100644 --- a/vendor/gopkg.in/src-d/go-git.v4/worktree_windows.go +++ b/vendor/gopkg.in/src-d/go-git.v4/worktree_windows.go @@ -3,6 +3,7 @@ package git import ( + "os" "syscall" "time" @@ -18,3 +19,17 @@ func init() { } } } + +func isSymlinkWindowsNonAdmin(err error) bool { + const ERROR_PRIVILEGE_NOT_HELD syscall.Errno = 1314 + + if err != nil { + if errLink, ok := err.(*os.LinkError); ok { + if errNo, ok := errLink.Err.(syscall.Errno); ok { + return errNo == ERROR_PRIVILEGE_NOT_HELD + } + } + } + + return false +}