From 5940673903d5b4529a0de84cf8f1ca6b011b9661 Mon Sep 17 00:00:00 2001 From: vince Date: Tue, 24 Nov 2020 20:18:52 +0800 Subject: [PATCH 1/9] add migration 3 this migration updates the bug and identity ids --- migration3/README.md | 3 + migration3/after/bug/bug.go | 661 ++++++++++++++++ migration3/after/bug/bug_actions.go | 143 ++++ migration3/after/bug/bug_actions_test.go | 394 ++++++++++ migration3/after/bug/bug_test.go | 190 +++++ migration3/after/bug/clocks.go | 40 + migration3/after/bug/comment.go | 44 ++ migration3/after/bug/git_tree.go | 84 ++ migration3/after/bug/identity.go | 27 + migration3/after/bug/interface.go | 57 ++ migration3/after/bug/label.go | 100 +++ migration3/after/bug/label_test.go | 35 + migration3/after/bug/op_add_comment.go | 132 ++++ migration3/after/bug/op_add_comment_test.go | 39 + migration3/after/bug/op_create.go | 177 +++++ migration3/after/bug/op_create_test.go | 82 ++ migration3/after/bug/op_edit_comment.go | 170 ++++ migration3/after/bug/op_edit_comment_test.go | 108 +++ migration3/after/bug/op_label_change.go | 285 +++++++ migration3/after/bug/op_label_change_test.go | 40 + migration3/after/bug/op_noop.go | 84 ++ migration3/after/bug/op_noop_test.go | 40 + migration3/after/bug/op_set_metadata.go | 113 +++ migration3/after/bug/op_set_metadata_test.go | 127 +++ migration3/after/bug/op_set_status.go | 127 +++ migration3/after/bug/op_set_status_test.go | 38 + migration3/after/bug/op_set_title.go | 160 ++++ migration3/after/bug/op_set_title_test.go | 38 + migration3/after/bug/operation.go | 213 +++++ migration3/after/bug/operation_iterator.go | 72 ++ .../after/bug/operation_iterator_test.go | 79 ++ migration3/after/bug/operation_pack.go | 186 +++++ migration3/after/bug/operation_pack_test.go | 78 ++ migration3/after/bug/operation_test.go | 133 ++++ migration3/after/bug/snapshot.go | 133 ++++ migration3/after/bug/sorting.go | 57 ++ migration3/after/bug/status.go | 57 ++ migration3/after/bug/timeline.go | 79 ++ migration3/after/bug/with_snapshot.go | 58 ++ migration3/after/entity/doc.go | 8 + migration3/after/entity/err.go | 32 + migration3/after/entity/id.go | 99 +++ migration3/after/entity/interface.go | 12 + migration3/after/entity/merge.go | 74 ++ migration3/after/identity/common.go | 37 + migration3/after/identity/identity.go | 542 +++++++++++++ migration3/after/identity/identity_actions.go | 132 ++++ .../after/identity/identity_actions_test.go | 158 ++++ migration3/after/identity/identity_stub.go | 96 +++ .../after/identity/identity_stub_test.go | 26 + migration3/after/identity/identity_test.go | 248 ++++++ migration3/after/identity/identity_user.go | 68 ++ migration3/after/identity/interface.go | 58 ++ migration3/after/identity/key.go | 18 + migration3/after/identity/resolver.go | 36 + migration3/after/identity/version.go | 286 +++++++ migration3/after/identity/version_test.go | 84 ++ migration3/after/repository/config.go | 145 ++++ migration3/after/repository/config_mem.go | 94 +++ .../after/repository/config_mem_test.go | 7 + migration3/after/repository/config_test.go | 54 ++ migration3/after/repository/config_testing.go | 116 +++ migration3/after/repository/git.go | 461 +++++++++++ migration3/after/repository/git_cli.go | 56 ++ migration3/after/repository/git_config.go | 221 ++++++ migration3/after/repository/git_test.go | 10 + migration3/after/repository/git_testing.go | 74 ++ migration3/after/repository/gogit.go | 704 +++++++++++++++++ migration3/after/repository/gogit_config.go | 236 ++++++ migration3/after/repository/gogit_test.go | 68 ++ migration3/after/repository/gogit_testing.go | 58 ++ migration3/after/repository/hash.go | 51 ++ migration3/after/repository/keyring.go | 50 ++ migration3/after/repository/mock_repo.go | 357 +++++++++ migration3/after/repository/mock_repo_test.go | 10 + migration3/after/repository/repo.go | 157 ++++ migration3/after/repository/repo_testing.go | 244 ++++++ migration3/after/repository/tree_entry.go | 102 +++ .../after/repository/tree_entry_test.go | 31 + migration3/after/util/lamport/clock.go | 15 + .../after/util/lamport/clock_testing.go | 28 + migration3/after/util/lamport/mem_clock.go | 89 +++ .../after/util/lamport/mem_clock_test.go | 8 + .../after/util/lamport/persisted_clock.go | 100 +++ .../util/lamport/persisted_clock_test.go | 19 + migration3/after/util/text/transform.go | 31 + migration3/after/util/text/validate.go | 44 ++ migration3/after/util/timestamp/timestamp.go | 9 + migration3/before/bug/bug.go | 730 ++++++++++++++++++ migration3/before/bug/bug_actions.go | 143 ++++ migration3/before/bug/bug_actions_test.go | 390 ++++++++++ migration3/before/bug/bug_test.go | 186 +++++ migration3/before/bug/clocks.go | 40 + migration3/before/bug/comment.go | 44 ++ migration3/before/bug/identity.go | 27 + migration3/before/bug/interface.go | 57 ++ migration3/before/bug/label.go | 100 +++ migration3/before/bug/label_test.go | 35 + migration3/before/bug/op_add_comment.go | 132 ++++ migration3/before/bug/op_add_comment_test.go | 39 + migration3/before/bug/op_create.go | 155 ++++ migration3/before/bug/op_create_test.go | 78 ++ migration3/before/bug/op_edit_comment.go | 170 ++++ migration3/before/bug/op_edit_comment_test.go | 105 +++ migration3/before/bug/op_label_change.go | 285 +++++++ migration3/before/bug/op_label_change_test.go | 40 + migration3/before/bug/op_noop.go | 84 ++ migration3/before/bug/op_noop_test.go | 40 + migration3/before/bug/op_set_metadata.go | 113 +++ migration3/before/bug/op_set_metadata_test.go | 128 +++ migration3/before/bug/op_set_status.go | 127 +++ migration3/before/bug/op_set_status_test.go | 40 + migration3/before/bug/op_set_title.go | 160 ++++ migration3/before/bug/op_set_title_test.go | 40 + migration3/before/bug/operation.go | 219 ++++++ migration3/before/bug/operation_iterator.go | 72 ++ .../before/bug/operation_iterator_test.go | 78 ++ migration3/before/bug/operation_pack.go | 188 +++++ migration3/before/bug/operation_pack_test.go | 79 ++ migration3/before/bug/operation_test.go | 119 +++ migration3/before/bug/snapshot.go | 133 ++++ migration3/before/bug/sorting.go | 57 ++ migration3/before/bug/status.go | 57 ++ migration3/before/bug/timeline.go | 79 ++ migration3/before/bug/with_snapshot.go | 58 ++ migration3/before/entity/doc.go | 8 + migration3/before/entity/err.go | 32 + migration3/before/entity/id.go | 100 +++ migration3/before/entity/interface.go | 6 + migration3/before/entity/merge.go | 74 ++ migration3/before/identity/common.go | 37 + migration3/before/identity/identity.go | 632 +++++++++++++++ .../before/identity/identity_actions.go | 132 ++++ .../before/identity/identity_actions_test.go | 152 ++++ migration3/before/identity/identity_stub.go | 105 +++ .../before/identity/identity_stub_test.go | 26 + migration3/before/identity/identity_test.go | 316 ++++++++ migration3/before/identity/identity_user.go | 68 ++ migration3/before/identity/interface.go | 58 ++ migration3/before/identity/key.go | 18 + migration3/before/identity/resolver.go | 36 + migration3/before/identity/version.go | 228 ++++++ migration3/before/identity/version_test.go | 41 + migration3/before/repository/config.go | 145 ++++ migration3/before/repository/config_mem.go | 94 +++ .../before/repository/config_mem_test.go | 7 + migration3/before/repository/config_test.go | 54 ++ .../before/repository/config_testing.go | 116 +++ migration3/before/repository/git.go | 410 ++++++++++ migration3/before/repository/git_cli.go | 56 ++ migration3/before/repository/git_config.go | 221 ++++++ migration3/before/repository/git_test.go | 10 + migration3/before/repository/git_testing.go | 74 ++ migration3/before/repository/gogit.go | 655 ++++++++++++++++ migration3/before/repository/gogit_config.go | 236 ++++++ migration3/before/repository/gogit_test.go | 68 ++ migration3/before/repository/gogit_testing.go | 58 ++ migration3/before/repository/hash.go | 51 ++ migration3/before/repository/keyring.go | 50 ++ migration3/before/repository/mock_repo.go | 335 ++++++++ .../before/repository/mock_repo_test.go | 10 + migration3/before/repository/repo.go | 148 ++++ migration3/before/repository/repo_testing.go | 233 ++++++ migration3/before/repository/tree_entry.go | 102 +++ .../before/repository/tree_entry_test.go | 31 + migration3/before/util/lamport/clock.go | 15 + .../before/util/lamport/clock_testing.go | 28 + migration3/before/util/lamport/mem_clock.go | 89 +++ .../before/util/lamport/mem_clock_test.go | 8 + .../before/util/lamport/persisted_clock.go | 100 +++ .../util/lamport/persisted_clock_test.go | 19 + migration3/before/util/text/transform.go | 31 + migration3/before/util/text/validate.go | 44 ++ migration3/before/util/timestamp/timestamp.go | 9 + migration3/migration3.go | 185 +++++ root.go | 2 + 176 files changed, 20736 insertions(+) create mode 100644 migration3/README.md create mode 100644 migration3/after/bug/bug.go create mode 100644 migration3/after/bug/bug_actions.go create mode 100644 migration3/after/bug/bug_actions_test.go create mode 100644 migration3/after/bug/bug_test.go create mode 100644 migration3/after/bug/clocks.go create mode 100644 migration3/after/bug/comment.go create mode 100644 migration3/after/bug/git_tree.go create mode 100644 migration3/after/bug/identity.go create mode 100644 migration3/after/bug/interface.go create mode 100644 migration3/after/bug/label.go create mode 100644 migration3/after/bug/label_test.go create mode 100644 migration3/after/bug/op_add_comment.go create mode 100644 migration3/after/bug/op_add_comment_test.go create mode 100644 migration3/after/bug/op_create.go create mode 100644 migration3/after/bug/op_create_test.go create mode 100644 migration3/after/bug/op_edit_comment.go create mode 100644 migration3/after/bug/op_edit_comment_test.go create mode 100644 migration3/after/bug/op_label_change.go create mode 100644 migration3/after/bug/op_label_change_test.go create mode 100644 migration3/after/bug/op_noop.go create mode 100644 migration3/after/bug/op_noop_test.go create mode 100644 migration3/after/bug/op_set_metadata.go create mode 100644 migration3/after/bug/op_set_metadata_test.go create mode 100644 migration3/after/bug/op_set_status.go create mode 100644 migration3/after/bug/op_set_status_test.go create mode 100644 migration3/after/bug/op_set_title.go create mode 100644 migration3/after/bug/op_set_title_test.go create mode 100644 migration3/after/bug/operation.go create mode 100644 migration3/after/bug/operation_iterator.go create mode 100644 migration3/after/bug/operation_iterator_test.go create mode 100644 migration3/after/bug/operation_pack.go create mode 100644 migration3/after/bug/operation_pack_test.go create mode 100644 migration3/after/bug/operation_test.go create mode 100644 migration3/after/bug/snapshot.go create mode 100644 migration3/after/bug/sorting.go create mode 100644 migration3/after/bug/status.go create mode 100644 migration3/after/bug/timeline.go create mode 100644 migration3/after/bug/with_snapshot.go create mode 100644 migration3/after/entity/doc.go create mode 100644 migration3/after/entity/err.go create mode 100644 migration3/after/entity/id.go create mode 100644 migration3/after/entity/interface.go create mode 100644 migration3/after/entity/merge.go create mode 100644 migration3/after/identity/common.go create mode 100644 migration3/after/identity/identity.go create mode 100644 migration3/after/identity/identity_actions.go create mode 100644 migration3/after/identity/identity_actions_test.go create mode 100644 migration3/after/identity/identity_stub.go create mode 100644 migration3/after/identity/identity_stub_test.go create mode 100644 migration3/after/identity/identity_test.go create mode 100644 migration3/after/identity/identity_user.go create mode 100644 migration3/after/identity/interface.go create mode 100644 migration3/after/identity/key.go create mode 100644 migration3/after/identity/resolver.go create mode 100644 migration3/after/identity/version.go create mode 100644 migration3/after/identity/version_test.go create mode 100644 migration3/after/repository/config.go create mode 100644 migration3/after/repository/config_mem.go create mode 100644 migration3/after/repository/config_mem_test.go create mode 100644 migration3/after/repository/config_test.go create mode 100644 migration3/after/repository/config_testing.go create mode 100644 migration3/after/repository/git.go create mode 100644 migration3/after/repository/git_cli.go create mode 100644 migration3/after/repository/git_config.go create mode 100644 migration3/after/repository/git_test.go create mode 100644 migration3/after/repository/git_testing.go create mode 100644 migration3/after/repository/gogit.go create mode 100644 migration3/after/repository/gogit_config.go create mode 100644 migration3/after/repository/gogit_test.go create mode 100644 migration3/after/repository/gogit_testing.go create mode 100644 migration3/after/repository/hash.go create mode 100644 migration3/after/repository/keyring.go create mode 100644 migration3/after/repository/mock_repo.go create mode 100644 migration3/after/repository/mock_repo_test.go create mode 100644 migration3/after/repository/repo.go create mode 100644 migration3/after/repository/repo_testing.go create mode 100644 migration3/after/repository/tree_entry.go create mode 100644 migration3/after/repository/tree_entry_test.go create mode 100644 migration3/after/util/lamport/clock.go create mode 100644 migration3/after/util/lamport/clock_testing.go create mode 100644 migration3/after/util/lamport/mem_clock.go create mode 100644 migration3/after/util/lamport/mem_clock_test.go create mode 100644 migration3/after/util/lamport/persisted_clock.go create mode 100644 migration3/after/util/lamport/persisted_clock_test.go create mode 100644 migration3/after/util/text/transform.go create mode 100644 migration3/after/util/text/validate.go create mode 100644 migration3/after/util/timestamp/timestamp.go create mode 100644 migration3/before/bug/bug.go create mode 100644 migration3/before/bug/bug_actions.go create mode 100644 migration3/before/bug/bug_actions_test.go create mode 100644 migration3/before/bug/bug_test.go create mode 100644 migration3/before/bug/clocks.go create mode 100644 migration3/before/bug/comment.go create mode 100644 migration3/before/bug/identity.go create mode 100644 migration3/before/bug/interface.go create mode 100644 migration3/before/bug/label.go create mode 100644 migration3/before/bug/label_test.go create mode 100644 migration3/before/bug/op_add_comment.go create mode 100644 migration3/before/bug/op_add_comment_test.go create mode 100644 migration3/before/bug/op_create.go create mode 100644 migration3/before/bug/op_create_test.go create mode 100644 migration3/before/bug/op_edit_comment.go create mode 100644 migration3/before/bug/op_edit_comment_test.go create mode 100644 migration3/before/bug/op_label_change.go create mode 100644 migration3/before/bug/op_label_change_test.go create mode 100644 migration3/before/bug/op_noop.go create mode 100644 migration3/before/bug/op_noop_test.go create mode 100644 migration3/before/bug/op_set_metadata.go create mode 100644 migration3/before/bug/op_set_metadata_test.go create mode 100644 migration3/before/bug/op_set_status.go create mode 100644 migration3/before/bug/op_set_status_test.go create mode 100644 migration3/before/bug/op_set_title.go create mode 100644 migration3/before/bug/op_set_title_test.go create mode 100644 migration3/before/bug/operation.go create mode 100644 migration3/before/bug/operation_iterator.go create mode 100644 migration3/before/bug/operation_iterator_test.go create mode 100644 migration3/before/bug/operation_pack.go create mode 100644 migration3/before/bug/operation_pack_test.go create mode 100644 migration3/before/bug/operation_test.go create mode 100644 migration3/before/bug/snapshot.go create mode 100644 migration3/before/bug/sorting.go create mode 100644 migration3/before/bug/status.go create mode 100644 migration3/before/bug/timeline.go create mode 100644 migration3/before/bug/with_snapshot.go create mode 100644 migration3/before/entity/doc.go create mode 100644 migration3/before/entity/err.go create mode 100644 migration3/before/entity/id.go create mode 100644 migration3/before/entity/interface.go create mode 100644 migration3/before/entity/merge.go create mode 100644 migration3/before/identity/common.go create mode 100644 migration3/before/identity/identity.go create mode 100644 migration3/before/identity/identity_actions.go create mode 100644 migration3/before/identity/identity_actions_test.go create mode 100644 migration3/before/identity/identity_stub.go create mode 100644 migration3/before/identity/identity_stub_test.go create mode 100644 migration3/before/identity/identity_test.go create mode 100644 migration3/before/identity/identity_user.go create mode 100644 migration3/before/identity/interface.go create mode 100644 migration3/before/identity/key.go create mode 100644 migration3/before/identity/resolver.go create mode 100644 migration3/before/identity/version.go create mode 100644 migration3/before/identity/version_test.go create mode 100644 migration3/before/repository/config.go create mode 100644 migration3/before/repository/config_mem.go create mode 100644 migration3/before/repository/config_mem_test.go create mode 100644 migration3/before/repository/config_test.go create mode 100644 migration3/before/repository/config_testing.go create mode 100644 migration3/before/repository/git.go create mode 100644 migration3/before/repository/git_cli.go create mode 100644 migration3/before/repository/git_config.go create mode 100644 migration3/before/repository/git_test.go create mode 100644 migration3/before/repository/git_testing.go create mode 100644 migration3/before/repository/gogit.go create mode 100644 migration3/before/repository/gogit_config.go create mode 100644 migration3/before/repository/gogit_test.go create mode 100644 migration3/before/repository/gogit_testing.go create mode 100644 migration3/before/repository/hash.go create mode 100644 migration3/before/repository/keyring.go create mode 100644 migration3/before/repository/mock_repo.go create mode 100644 migration3/before/repository/mock_repo_test.go create mode 100644 migration3/before/repository/repo.go create mode 100644 migration3/before/repository/repo_testing.go create mode 100644 migration3/before/repository/tree_entry.go create mode 100644 migration3/before/repository/tree_entry_test.go create mode 100644 migration3/before/util/lamport/clock.go create mode 100644 migration3/before/util/lamport/clock_testing.go create mode 100644 migration3/before/util/lamport/mem_clock.go create mode 100644 migration3/before/util/lamport/mem_clock_test.go create mode 100644 migration3/before/util/lamport/persisted_clock.go create mode 100644 migration3/before/util/lamport/persisted_clock_test.go create mode 100644 migration3/before/util/text/transform.go create mode 100644 migration3/before/util/text/validate.go create mode 100644 migration3/before/util/timestamp/timestamp.go create mode 100644 migration3/migration3.go diff --git a/migration3/README.md b/migration3/README.md new file mode 100644 index 0000000..1bc6c03 --- /dev/null +++ b/migration3/README.md @@ -0,0 +1,3 @@ +Usage: +- Recreate all the bugs +- Recreate all the identities \ No newline at end of file diff --git a/migration3/after/bug/bug.go b/migration3/after/bug/bug.go new file mode 100644 index 0000000..7b41d8c --- /dev/null +++ b/migration3/after/bug/bug.go @@ -0,0 +1,661 @@ +// Package bug contains the bug data model and low-level related functions +package bug + +import ( + "encoding/json" + "fmt" + "strings" + + "github.com/pkg/errors" + + "github.com/MichaelMure/git-bug-migration/migration3/after/entity" + "github.com/MichaelMure/git-bug-migration/migration3/after/identity" + "github.com/MichaelMure/git-bug-migration/migration3/after/repository" + "github.com/MichaelMure/git-bug-migration/migration3/after/util/lamport" +) + +const bugsRefPattern = "refs/bugs/" +const bugsRemoteRefPattern = "refs/remotes/%s/bugs/" + +const opsEntryName = "ops" +const mediaEntryName = "media" + +const createClockEntryPrefix = "create-clock-" +const createClockEntryPattern = "create-clock-%d" +const editClockEntryPrefix = "edit-clock-" +const editClockEntryPattern = "edit-clock-%d" + +const creationClockName = "bug-create" +const editClockName = "bug-edit" + +var ErrBugNotExist = errors.New("bug doesn't exist") + +func NewErrMultipleMatchBug(matching []entity.Id) *entity.ErrMultipleMatch { + return entity.NewErrMultipleMatch("bug", matching) +} + +func NewErrMultipleMatchOp(matching []entity.Id) *entity.ErrMultipleMatch { + return entity.NewErrMultipleMatch("operation", matching) +} + +var _ Interface = &Bug{} +var _ entity.Interface = &Bug{} + +// Bug hold the data of a bug thread, organized in a way close to +// how it will be persisted inside Git. This is the data structure +// used to merge two different version of the same Bug. +type Bug struct { + + // A Lamport clock is a logical clock that allow to order event + // inside a distributed system. + // It must be the first field in this struct due to https://github.com/golang/go/issues/599 + createTime lamport.Time + editTime lamport.Time + + // Id used as unique identifier + id entity.Id + + lastCommit repository.Hash + + // all the committed operations + packs []OperationPack + + // a temporary pack of operations used for convenience to pile up new operations + // before a commit + staging OperationPack +} + +// NewBug create a new Bug +func NewBug() *Bug { + // No id yet + // No logical clock yet + return &Bug{id: entity.UnsetId} +} + +// ReadLocal will read a local bug from its hash +func ReadLocal(repo repository.ClockedRepo, id entity.Id) (*Bug, error) { + ref := bugsRefPattern + id.String() + return read(repo, identity.NewSimpleResolver(repo), ref) +} + +// ReadLocalWithResolver will read a local bug from its hash +func ReadLocalWithResolver(repo repository.ClockedRepo, identityResolver identity.Resolver, id entity.Id) (*Bug, error) { + ref := bugsRefPattern + id.String() + return read(repo, identityResolver, ref) +} + +// ReadRemote will read a remote bug from its hash +func ReadRemote(repo repository.ClockedRepo, remote string, id entity.Id) (*Bug, error) { + ref := fmt.Sprintf(bugsRemoteRefPattern, remote) + id.String() + return read(repo, identity.NewSimpleResolver(repo), ref) +} + +// ReadRemoteWithResolver will read a remote bug from its hash +func ReadRemoteWithResolver(repo repository.ClockedRepo, identityResolver identity.Resolver, remote string, id entity.Id) (*Bug, error) { + ref := fmt.Sprintf(bugsRemoteRefPattern, remote) + id.String() + return read(repo, identityResolver, ref) +} + +// read will read and parse a Bug from git +func read(repo repository.ClockedRepo, identityResolver identity.Resolver, ref string) (*Bug, error) { + id := refToId(ref) + + if err := id.Validate(); err != nil { + return nil, errors.Wrap(err, "invalid ref ") + } + + hashes, err := repo.ListCommits(ref) + if err != nil { + return nil, ErrBugNotExist + } + if len(hashes) == 0 { + return nil, fmt.Errorf("empty bug") + } + + bug := Bug{ + id: id, + } + + // Load each OperationPack + for _, hash := range hashes { + tree, err := readTree(repo, hash) + if err != nil { + return nil, err + } + + // Due to rebase, edit Lamport time are not necessarily ordered + if tree.editTime > bug.editTime { + bug.editTime = tree.editTime + } + + // Update the clocks + err = repo.Witness(creationClockName, bug.createTime) + if err != nil { + return nil, errors.Wrap(err, "failed to update create lamport clock") + } + err = repo.Witness(editClockName, bug.editTime) + if err != nil { + return nil, errors.Wrap(err, "failed to update edit lamport clock") + } + + data, err := repo.ReadData(tree.opsEntry.Hash) + if err != nil { + return nil, errors.Wrap(err, "failed to read git blob data") + } + + opp := &OperationPack{} + err = json.Unmarshal(data, &opp) + if err != nil { + return nil, errors.Wrap(err, "failed to decode OperationPack json") + } + + // tag the pack with the commit hash + opp.commitHash = hash + bug.lastCommit = hash + + // if it's the first OperationPack read + if len(bug.packs) == 0 { + bug.createTime = tree.createTime + } + + bug.packs = append(bug.packs, *opp) + } + + // Bug Id is the Id of the first operation + if len(bug.packs[0].Operations) == 0 { + return nil, fmt.Errorf("first OperationPack is empty") + } + if bug.id != bug.packs[0].Operations[0].Id() { + return nil, fmt.Errorf("bug ID doesn't match the first operation ID") + } + + // Make sure that the identities are properly loaded + err = bug.EnsureIdentities(identityResolver) + if err != nil { + return nil, err + } + + return &bug, nil +} + +// RemoveBug will remove a local bug from its entity.Id +func RemoveBug(repo repository.ClockedRepo, id entity.Id) error { + var fullMatches []string + + refs, err := repo.ListRefs(bugsRefPattern + id.String()) + if err != nil { + return err + } + if len(refs) > 1 { + return NewErrMultipleMatchBug(refsToIds(refs)) + } + if len(refs) == 1 { + // we have the bug locally + fullMatches = append(fullMatches, refs[0]) + } + + remotes, err := repo.GetRemotes() + if err != nil { + return err + } + + for remote := range remotes { + remotePrefix := fmt.Sprintf(bugsRemoteRefPattern+id.String(), remote) + remoteRefs, err := repo.ListRefs(remotePrefix) + if err != nil { + return err + } + if len(remoteRefs) > 1 { + return NewErrMultipleMatchBug(refsToIds(refs)) + } + if len(remoteRefs) == 1 { + // found the bug in a remote + fullMatches = append(fullMatches, remoteRefs[0]) + } + } + + if len(fullMatches) == 0 { + return ErrBugNotExist + } + + for _, ref := range fullMatches { + err = repo.RemoveRef(ref) + if err != nil { + return err + } + } + + return nil +} + +type StreamedBug struct { + Bug *Bug + Err error +} + +// ReadAllLocal read and parse all local bugs +func ReadAllLocal(repo repository.ClockedRepo) <-chan StreamedBug { + return readAll(repo, identity.NewSimpleResolver(repo), bugsRefPattern) +} + +// ReadAllLocalWithResolver read and parse all local bugs +func ReadAllLocalWithResolver(repo repository.ClockedRepo, identityResolver identity.Resolver) <-chan StreamedBug { + return readAll(repo, identityResolver, bugsRefPattern) +} + +// ReadAllRemote read and parse all remote bugs for a given remote +func ReadAllRemote(repo repository.ClockedRepo, remote string) <-chan StreamedBug { + refPrefix := fmt.Sprintf(bugsRemoteRefPattern, remote) + return readAll(repo, identity.NewSimpleResolver(repo), refPrefix) +} + +// ReadAllRemoteWithResolver read and parse all remote bugs for a given remote +func ReadAllRemoteWithResolver(repo repository.ClockedRepo, identityResolver identity.Resolver, remote string) <-chan StreamedBug { + refPrefix := fmt.Sprintf(bugsRemoteRefPattern, remote) + return readAll(repo, identityResolver, refPrefix) +} + +// Read and parse all available bug with a given ref prefix +func readAll(repo repository.ClockedRepo, identityResolver identity.Resolver, refPrefix string) <-chan StreamedBug { + out := make(chan StreamedBug) + + go func() { + defer close(out) + + refs, err := repo.ListRefs(refPrefix) + if err != nil { + out <- StreamedBug{Err: err} + return + } + + for _, ref := range refs { + b, err := read(repo, identityResolver, ref) + + if err != nil { + out <- StreamedBug{Err: err} + return + } + + out <- StreamedBug{Bug: b} + } + }() + + return out +} + +// ListLocalIds list all the available local bug ids +func ListLocalIds(repo repository.Repo) ([]entity.Id, error) { + refs, err := repo.ListRefs(bugsRefPattern) + if err != nil { + return nil, err + } + + return refsToIds(refs), nil +} + +func refsToIds(refs []string) []entity.Id { + ids := make([]entity.Id, len(refs)) + + for i, ref := range refs { + ids[i] = refToId(ref) + } + + return ids +} + +func refToId(ref string) entity.Id { + split := strings.Split(ref, "/") + return entity.Id(split[len(split)-1]) +} + +// Validate check if the Bug data is valid +func (bug *Bug) Validate() error { + // non-empty + if len(bug.packs) == 0 && bug.staging.IsEmpty() { + return fmt.Errorf("bug has no operations") + } + + // check if each pack and operations are valid + for _, pack := range bug.packs { + if err := pack.Validate(); err != nil { + return err + } + } + + // check if staging is valid if needed + if !bug.staging.IsEmpty() { + if err := bug.staging.Validate(); err != nil { + return errors.Wrap(err, "staging") + } + } + + // The very first Op should be a CreateOp + firstOp := bug.FirstOp() + if firstOp == nil || firstOp.base().OperationType != CreateOp { + return fmt.Errorf("first operation should be a Create op") + } + + // The bug Id should be the id of the first operation + if bug.FirstOp().Id() != bug.id { + fmt.Println("bug", bug.id.String()) + fmt.Println("op", bug.FirstOp().Id().String()) + return fmt.Errorf("bug id should be the first commit hash") + } + + // Check that there is no more CreateOp op + // Check that there is no colliding operation's ID + it := NewOperationIterator(bug) + createCount := 0 + ids := make(map[entity.Id]struct{}) + for it.Next() { + if it.Value().base().OperationType == CreateOp { + createCount++ + } + if _, ok := ids[it.Value().Id()]; ok { + return fmt.Errorf("id collision: %s", it.Value().Id()) + } + ids[it.Value().Id()] = struct{}{} + } + + if createCount != 1 { + return fmt.Errorf("only one Create op allowed") + } + + return nil +} + +// Append an operation into the staging area, to be committed later +func (bug *Bug) Append(op Operation) { + if len(bug.packs) == 0 && len(bug.staging.Operations) == 0 { + if op.base().OperationType != CreateOp { + panic("first operation should be a Create") + } + bug.id = op.Id() + } + bug.staging.Append(op) +} + +// Commit write the staging area in Git and move the operations to the packs +func (bug *Bug) Commit(repo repository.ClockedRepo) error { + if !bug.NeedCommit() { + return fmt.Errorf("can't commit a bug with no pending operation") + } + + if err := bug.Validate(); err != nil { + return errors.Wrap(err, "can't commit a bug with invalid data") + } + + // update clocks + var err error + bug.editTime, err = repo.Increment(editClockName) + if err != nil { + return err + } + if bug.lastCommit == "" { + bug.createTime, err = repo.Increment(creationClockName) + if err != nil { + return err + } + } + + // Write the Ops as a Git blob containing the serialized array + hash, err := bug.staging.Write(repo) + if err != nil { + return err + } + + // Make a Git tree referencing this blob + tree := []repository.TreeEntry{ + // the last pack of ops + {ObjectType: repository.Blob, Hash: hash, Name: opsEntryName}, + } + + // Store the logical clocks as well + // --> edit clock for each OperationPack/commits + // --> create clock only for the first OperationPack/commits + // + // To avoid having one blob for each clock value, clocks are serialized + // directly into the entry name + emptyBlobHash, err := repo.StoreData([]byte{}) + if err != nil { + return err + } + tree = append(tree, repository.TreeEntry{ + ObjectType: repository.Blob, + Hash: emptyBlobHash, + Name: fmt.Sprintf(editClockEntryPattern, bug.editTime), + }) + if bug.lastCommit == "" { + tree = append(tree, repository.TreeEntry{ + ObjectType: repository.Blob, + Hash: emptyBlobHash, + Name: fmt.Sprintf(createClockEntryPattern, bug.createTime), + }) + } + + // Reference, if any, all the files required by the ops + // Git will check that they actually exist in the storage and will make sure + // to push/pull them as needed. + mediaTree := makeMediaTree(bug.staging) + if len(mediaTree) > 0 { + mediaTreeHash, err := repo.StoreTree(mediaTree) + if err != nil { + return err + } + tree = append(tree, repository.TreeEntry{ + ObjectType: repository.Tree, + Hash: mediaTreeHash, + Name: mediaEntryName, + }) + } + + // Store the tree + hash, err = repo.StoreTree(tree) + if err != nil { + return err + } + + // Write a Git commit referencing the tree, with the previous commit as parent + if bug.lastCommit != "" { + hash, err = repo.StoreCommitWithParent(hash, bug.lastCommit) + } else { + hash, err = repo.StoreCommit(hash) + } + if err != nil { + return err + } + + bug.lastCommit = hash + bug.staging.commitHash = hash + bug.packs = append(bug.packs, bug.staging) + bug.staging = OperationPack{} + + // if it was the first commit, use the Id of the first op (create) + if bug.id == "" || bug.id == entity.UnsetId { + bug.id = bug.packs[0].Operations[0].Id() + } + + // Create or update the Git reference for this bug + // When pushing later, the remote will ensure that this ref update + // is fast-forward, that is no data has been overwritten + ref := fmt.Sprintf("%s%s", bugsRefPattern, bug.id) + return repo.UpdateRef(ref, hash) +} + +func (bug *Bug) CommitAsNeeded(repo repository.ClockedRepo) error { + if !bug.NeedCommit() { + return nil + } + return bug.Commit(repo) +} + +func (bug *Bug) NeedCommit() bool { + return !bug.staging.IsEmpty() +} + +// Merge a different version of the same bug by rebasing operations of this bug +// that are not present in the other on top of the chain of operations of the +// other version. +func (bug *Bug) Merge(repo repository.Repo, other Interface) (bool, error) { + var otherBug = bugFromInterface(other) + + // Note: a faster merge should be possible without actually reading and parsing + // all operations pack of our side. + // Reading the other side is still necessary to validate remote data, at least + // for new operations + + if bug.id != otherBug.id { + return false, errors.New("merging unrelated bugs is not supported") + } + + if len(otherBug.staging.Operations) > 0 { + return false, errors.New("merging a bug with a non-empty staging is not supported") + } + + if bug.lastCommit == "" || otherBug.lastCommit == "" { + return false, errors.New("can't merge a bug that has never been stored") + } + + ancestor, err := repo.FindCommonAncestor(bug.lastCommit, otherBug.lastCommit) + if err != nil { + return false, errors.Wrap(err, "can't find common ancestor") + } + + ancestorIndex := 0 + newPacks := make([]OperationPack, 0, len(bug.packs)) + + // Find the root of the rebase + for i, pack := range bug.packs { + newPacks = append(newPacks, pack) + + if pack.commitHash == ancestor { + ancestorIndex = i + break + } + } + + if len(otherBug.packs) == ancestorIndex+1 { + // Nothing to rebase, return early + return false, nil + } + + // get other bug's extra packs + for i := ancestorIndex + 1; i < len(otherBug.packs); i++ { + // clone is probably not necessary + newPack := otherBug.packs[i].Clone() + + newPacks = append(newPacks, newPack) + bug.lastCommit = newPack.commitHash + } + + // rebase our extra packs + for i := ancestorIndex + 1; i < len(bug.packs); i++ { + pack := bug.packs[i] + + // get the referenced git tree + treeHash, err := repo.GetTreeHash(pack.commitHash) + + if err != nil { + return false, err + } + + // create a new commit with the correct ancestor + hash, err := repo.StoreCommitWithParent(treeHash, bug.lastCommit) + + if err != nil { + return false, err + } + + // replace the pack + newPack := pack.Clone() + newPack.commitHash = hash + newPacks = append(newPacks, newPack) + + // update the bug + bug.lastCommit = hash + } + + bug.packs = newPacks + + // Update the git ref + err = repo.UpdateRef(bugsRefPattern+bug.id.String(), bug.lastCommit) + if err != nil { + return false, err + } + + return true, nil +} + +// Id return the Bug identifier +func (bug *Bug) Id() entity.Id { + if bug.id == "" || bug.id == entity.UnsetId { + // simply panic as it would be a coding error + // (using an id of a bug without operation yet) + panic("no id yet") + } + return bug.id +} + +// CreateLamportTime return the Lamport time of creation +func (bug *Bug) CreateLamportTime() lamport.Time { + return bug.createTime +} + +// EditLamportTime return the Lamport time of the last edit +func (bug *Bug) EditLamportTime() lamport.Time { + return bug.editTime +} + +// Lookup for the very first operation of the bug. +// For a valid Bug, this operation should be a CreateOp +func (bug *Bug) FirstOp() Operation { + for _, pack := range bug.packs { + for _, op := range pack.Operations { + return op + } + } + + if !bug.staging.IsEmpty() { + return bug.staging.Operations[0] + } + + return nil +} + +// Lookup for the very last operation of the bug. +// For a valid Bug, should never be nil +func (bug *Bug) LastOp() Operation { + if !bug.staging.IsEmpty() { + return bug.staging.Operations[len(bug.staging.Operations)-1] + } + + if len(bug.packs) == 0 { + return nil + } + + lastPack := bug.packs[len(bug.packs)-1] + + if len(lastPack.Operations) == 0 { + return nil + } + + return lastPack.Operations[len(lastPack.Operations)-1] +} + +// Compile a bug in a easily usable snapshot +func (bug *Bug) Compile() Snapshot { + snap := Snapshot{ + id: bug.id, + Status: OpenStatus, + } + + it := NewOperationIterator(bug) + + for it.Next() { + op := it.Value() + op.Apply(&snap) + snap.Operations = append(snap.Operations, op) + } + + return snap +} diff --git a/migration3/after/bug/bug_actions.go b/migration3/after/bug/bug_actions.go new file mode 100644 index 0000000..dff0995 --- /dev/null +++ b/migration3/after/bug/bug_actions.go @@ -0,0 +1,143 @@ +package bug + +import ( + "fmt" + "strings" + + "github.com/MichaelMure/git-bug-migration/migration3/after/entity" + "github.com/MichaelMure/git-bug-migration/migration3/after/identity" + "github.com/MichaelMure/git-bug-migration/migration3/after/repository" + "github.com/pkg/errors" +) + +// Fetch retrieve updates from a remote +// This does not change the local bugs state +func Fetch(repo repository.Repo, remote string) (string, error) { + // "refs/bugs/*:refs/remotes/>/bugs/*" + remoteRefSpec := fmt.Sprintf(bugsRemoteRefPattern, remote) + fetchRefSpec := fmt.Sprintf("%s*:%s*", bugsRefPattern, remoteRefSpec) + + return repo.FetchRefs(remote, fetchRefSpec) +} + +// Push update a remote with the local changes +func Push(repo repository.Repo, remote string) (string, error) { + // "refs/bugs/*:refs/bugs/*" + refspec := fmt.Sprintf("%s*:%s*", bugsRefPattern, bugsRefPattern) + + return repo.PushRefs(remote, refspec) +} + +// Pull will do a Fetch + MergeAll +// This function will return an error if a merge fail +func Pull(repo repository.ClockedRepo, remote string) error { + _, err := Fetch(repo, remote) + if err != nil { + return err + } + + for merge := range MergeAll(repo, remote) { + if merge.Err != nil { + return merge.Err + } + if merge.Status == entity.MergeStatusInvalid { + return errors.Errorf("merge failure: %s", merge.Reason) + } + } + + return nil +} + +// MergeAll will merge all the available remote bug: +// +// - If the remote has new commit, the local bug is updated to match the same history +// (fast-forward update) +// - if the local bug has new commits but the remote don't, nothing is changed +// - if both local and remote bug have new commits (that is, we have a concurrent edition), +// new local commits are rewritten at the head of the remote history (that is, a rebase) +func MergeAll(repo repository.ClockedRepo, remote string) <-chan entity.MergeResult { + out := make(chan entity.MergeResult) + + // no caching for the merge, we load everything from git even if that means multiple + // copy of the same entity in memory. The cache layer will intercept the results to + // invalidate entities if necessary. + identityResolver := identity.NewSimpleResolver(repo) + + go func() { + defer close(out) + + remoteRefSpec := fmt.Sprintf(bugsRemoteRefPattern, remote) + remoteRefs, err := repo.ListRefs(remoteRefSpec) + + if err != nil { + out <- entity.MergeResult{Err: err} + return + } + + for _, remoteRef := range remoteRefs { + refSplit := strings.Split(remoteRef, "/") + id := entity.Id(refSplit[len(refSplit)-1]) + + if err := id.Validate(); err != nil { + out <- entity.NewMergeInvalidStatus(id, errors.Wrap(err, "invalid ref").Error()) + continue + } + + remoteBug, err := read(repo, identityResolver, remoteRef) + + if err != nil { + out <- entity.NewMergeInvalidStatus(id, errors.Wrap(err, "remote bug is not readable").Error()) + continue + } + + // Check for error in remote data + if err := remoteBug.Validate(); err != nil { + out <- entity.NewMergeInvalidStatus(id, errors.Wrap(err, "remote bug is invalid").Error()) + continue + } + + localRef := bugsRefPattern + remoteBug.Id().String() + localExist, err := repo.RefExist(localRef) + + if err != nil { + out <- entity.NewMergeError(err, id) + continue + } + + // the bug is not local yet, simply create the reference + if !localExist { + err := repo.CopyRef(remoteRef, localRef) + + if err != nil { + out <- entity.NewMergeError(err, id) + return + } + + out <- entity.NewMergeStatus(entity.MergeStatusNew, id, remoteBug) + continue + } + + localBug, err := read(repo, identityResolver, localRef) + + if err != nil { + out <- entity.NewMergeError(errors.Wrap(err, "local bug is not readable"), id) + return + } + + updated, err := localBug.Merge(repo, remoteBug) + + if err != nil { + out <- entity.NewMergeInvalidStatus(id, errors.Wrap(err, "merge failed").Error()) + return + } + + if updated { + out <- entity.NewMergeStatus(entity.MergeStatusUpdated, id, localBug) + } else { + out <- entity.NewMergeStatus(entity.MergeStatusNothing, id, localBug) + } + } + }() + + return out +} diff --git a/migration3/after/bug/bug_actions_test.go b/migration3/after/bug/bug_actions_test.go new file mode 100644 index 0000000..8f7a2b8 --- /dev/null +++ b/migration3/after/bug/bug_actions_test.go @@ -0,0 +1,394 @@ +package bug + +import ( + "testing" + "time" + + "github.com/stretchr/testify/assert" + "github.com/stretchr/testify/require" + + "github.com/MichaelMure/git-bug-migration/migration3/after/identity" + "github.com/MichaelMure/git-bug-migration/migration3/after/repository" +) + +func TestPushPull(t *testing.T) { + repoA, repoB, remote := repository.SetupReposAndRemote() + defer repository.CleanupTestRepos(repoA, repoB, remote) + + reneA, err := identity.NewIdentity(repoA, "René Descartes", "rene@descartes.fr") + require.NoError(t, err) + err = reneA.Commit(repoA) + require.NoError(t, err) + + bug1, _, err := Create(reneA, time.Now().Unix(), "bug1", "message") + require.NoError(t, err) + assert.True(t, bug1.NeedCommit()) + err = bug1.Commit(repoA) + require.NoError(t, err) + assert.False(t, bug1.NeedCommit()) + + // distribute the identity + _, err = identity.Push(repoA, "origin") + require.NoError(t, err) + err = identity.Pull(repoB, "origin") + require.NoError(t, err) + + // A --> remote --> B + _, err = Push(repoA, "origin") + require.NoError(t, err) + + err = Pull(repoB, "origin") + require.NoError(t, err) + + bugs := allBugs(t, ReadAllLocal(repoB)) + + if len(bugs) != 1 { + t.Fatal("Unexpected number of bugs") + } + + // B --> remote --> A + reneB, err := identity.ReadLocal(repoA, reneA.Id()) + require.NoError(t, err) + + bug2, _, err := Create(reneB, time.Now().Unix(), "bug2", "message") + require.NoError(t, err) + err = bug2.Commit(repoB) + require.NoError(t, err) + + _, err = Push(repoB, "origin") + require.NoError(t, err) + + err = Pull(repoA, "origin") + require.NoError(t, err) + + bugs = allBugs(t, ReadAllLocal(repoA)) + + if len(bugs) != 2 { + t.Fatal("Unexpected number of bugs") + } +} + +func allBugs(t testing.TB, bugs <-chan StreamedBug) []*Bug { + var result []*Bug + for streamed := range bugs { + if streamed.Err != nil { + t.Fatal(streamed.Err) + } + result = append(result, streamed.Bug) + } + return result +} + +func TestRebaseTheirs(t *testing.T) { + _RebaseTheirs(t) +} + +func BenchmarkRebaseTheirs(b *testing.B) { + for n := 0; n < b.N; n++ { + _RebaseTheirs(b) + } +} + +func _RebaseTheirs(t testing.TB) { + repoA, repoB, remote := repository.SetupReposAndRemote() + defer repository.CleanupTestRepos(repoA, repoB, remote) + + reneA, err := identity.NewIdentity(repoA, "René Descartes", "rene@descartes.fr") + require.NoError(t, err) + err = reneA.Commit(repoA) + require.NoError(t, err) + + bug1, _, err := Create(reneA, time.Now().Unix(), "bug1", "message") + require.NoError(t, err) + assert.True(t, bug1.NeedCommit()) + err = bug1.Commit(repoA) + require.NoError(t, err) + assert.False(t, bug1.NeedCommit()) + + // distribute the identity + _, err = identity.Push(repoA, "origin") + require.NoError(t, err) + err = identity.Pull(repoB, "origin") + require.NoError(t, err) + + // A --> remote + + _, err = Push(repoA, "origin") + require.NoError(t, err) + + // remote --> B + err = Pull(repoB, "origin") + require.NoError(t, err) + + bug2, err := ReadLocal(repoB, bug1.Id()) + require.NoError(t, err) + assert.False(t, bug2.NeedCommit()) + + reneB, err := identity.ReadLocal(repoA, reneA.Id()) + require.NoError(t, err) + + _, err = AddComment(bug2, reneB, time.Now().Unix(), "message2") + require.NoError(t, err) + assert.True(t, bug2.NeedCommit()) + _, err = AddComment(bug2, reneB, time.Now().Unix(), "message3") + require.NoError(t, err) + _, err = AddComment(bug2, reneB, time.Now().Unix(), "message4") + require.NoError(t, err) + err = bug2.Commit(repoB) + require.NoError(t, err) + assert.False(t, bug2.NeedCommit()) + + // B --> remote + _, err = Push(repoB, "origin") + require.NoError(t, err) + + // remote --> A + err = Pull(repoA, "origin") + require.NoError(t, err) + + bugs := allBugs(t, ReadAllLocal(repoB)) + + if len(bugs) != 1 { + t.Fatal("Unexpected number of bugs") + } + + bug3, err := ReadLocal(repoA, bug1.Id()) + require.NoError(t, err) + + if nbOps(bug3) != 4 { + t.Fatal("Unexpected number of operations") + } +} + +func TestRebaseOurs(t *testing.T) { + _RebaseOurs(t) +} + +func BenchmarkRebaseOurs(b *testing.B) { + for n := 0; n < b.N; n++ { + _RebaseOurs(b) + } +} + +func _RebaseOurs(t testing.TB) { + repoA, repoB, remote := repository.SetupReposAndRemote() + defer repository.CleanupTestRepos(repoA, repoB, remote) + + reneA, err := identity.NewIdentity(repoA, "René Descartes", "rene@descartes.fr") + require.NoError(t, err) + err = reneA.Commit(repoA) + require.NoError(t, err) + + bug1, _, err := Create(reneA, time.Now().Unix(), "bug1", "message") + require.NoError(t, err) + err = bug1.Commit(repoA) + require.NoError(t, err) + + // distribute the identity + _, err = identity.Push(repoA, "origin") + require.NoError(t, err) + err = identity.Pull(repoB, "origin") + require.NoError(t, err) + + // A --> remote + _, err = Push(repoA, "origin") + require.NoError(t, err) + + // remote --> B + err = Pull(repoB, "origin") + require.NoError(t, err) + + _, err = AddComment(bug1, reneA, time.Now().Unix(), "message2") + require.NoError(t, err) + _, err = AddComment(bug1, reneA, time.Now().Unix(), "message3") + require.NoError(t, err) + _, err = AddComment(bug1, reneA, time.Now().Unix(), "message4") + require.NoError(t, err) + err = bug1.Commit(repoA) + require.NoError(t, err) + + _, err = AddComment(bug1, reneA, time.Now().Unix(), "message5") + require.NoError(t, err) + _, err = AddComment(bug1, reneA, time.Now().Unix(), "message6") + require.NoError(t, err) + _, err = AddComment(bug1, reneA, time.Now().Unix(), "message7") + require.NoError(t, err) + err = bug1.Commit(repoA) + require.NoError(t, err) + + _, err = AddComment(bug1, reneA, time.Now().Unix(), "message8") + require.NoError(t, err) + _, err = AddComment(bug1, reneA, time.Now().Unix(), "message9") + require.NoError(t, err) + _, err = AddComment(bug1, reneA, time.Now().Unix(), "message10") + require.NoError(t, err) + err = bug1.Commit(repoA) + require.NoError(t, err) + + // remote --> A + err = Pull(repoA, "origin") + require.NoError(t, err) + + bugs := allBugs(t, ReadAllLocal(repoA)) + + if len(bugs) != 1 { + t.Fatal("Unexpected number of bugs") + } + + bug2, err := ReadLocal(repoA, bug1.Id()) + require.NoError(t, err) + + if nbOps(bug2) != 10 { + t.Fatal("Unexpected number of operations") + } +} + +func nbOps(b *Bug) int { + it := NewOperationIterator(b) + counter := 0 + for it.Next() { + counter++ + } + return counter +} + +func TestRebaseConflict(t *testing.T) { + _RebaseConflict(t) +} + +func BenchmarkRebaseConflict(b *testing.B) { + for n := 0; n < b.N; n++ { + _RebaseConflict(b) + } +} + +func _RebaseConflict(t testing.TB) { + repoA, repoB, remote := repository.SetupReposAndRemote() + defer repository.CleanupTestRepos(repoA, repoB, remote) + + reneA, err := identity.NewIdentity(repoA, "René Descartes", "rene@descartes.fr") + require.NoError(t, err) + err = reneA.Commit(repoA) + require.NoError(t, err) + + bug1, _, err := Create(reneA, time.Now().Unix(), "bug1", "message") + require.NoError(t, err) + err = bug1.Commit(repoA) + require.NoError(t, err) + + // distribute the identity + _, err = identity.Push(repoA, "origin") + require.NoError(t, err) + err = identity.Pull(repoB, "origin") + require.NoError(t, err) + + // A --> remote + _, err = Push(repoA, "origin") + require.NoError(t, err) + + // remote --> B + err = Pull(repoB, "origin") + require.NoError(t, err) + + _, err = AddComment(bug1, reneA, time.Now().Unix(), "message2") + require.NoError(t, err) + _, err = AddComment(bug1, reneA, time.Now().Unix(), "message3") + require.NoError(t, err) + _, err = AddComment(bug1, reneA, time.Now().Unix(), "message4") + require.NoError(t, err) + err = bug1.Commit(repoA) + require.NoError(t, err) + + _, err = AddComment(bug1, reneA, time.Now().Unix(), "message5") + require.NoError(t, err) + _, err = AddComment(bug1, reneA, time.Now().Unix(), "message6") + require.NoError(t, err) + _, err = AddComment(bug1, reneA, time.Now().Unix(), "message7") + require.NoError(t, err) + err = bug1.Commit(repoA) + require.NoError(t, err) + + _, err = AddComment(bug1, reneA, time.Now().Unix(), "message8") + require.NoError(t, err) + _, err = AddComment(bug1, reneA, time.Now().Unix(), "message9") + require.NoError(t, err) + _, err = AddComment(bug1, reneA, time.Now().Unix(), "message10") + require.NoError(t, err) + err = bug1.Commit(repoA) + require.NoError(t, err) + + bug2, err := ReadLocal(repoB, bug1.Id()) + require.NoError(t, err) + + reneB, err := identity.ReadLocal(repoA, reneA.Id()) + require.NoError(t, err) + + _, err = AddComment(bug2, reneB, time.Now().Unix(), "message11") + require.NoError(t, err) + _, err = AddComment(bug2, reneB, time.Now().Unix(), "message12") + require.NoError(t, err) + _, err = AddComment(bug2, reneB, time.Now().Unix(), "message13") + require.NoError(t, err) + err = bug2.Commit(repoB) + require.NoError(t, err) + + _, err = AddComment(bug2, reneB, time.Now().Unix(), "message14") + require.NoError(t, err) + _, err = AddComment(bug2, reneB, time.Now().Unix(), "message15") + require.NoError(t, err) + _, err = AddComment(bug2, reneB, time.Now().Unix(), "message16") + require.NoError(t, err) + err = bug2.Commit(repoB) + require.NoError(t, err) + + _, err = AddComment(bug2, reneB, time.Now().Unix(), "message17") + require.NoError(t, err) + _, err = AddComment(bug2, reneB, time.Now().Unix(), "message18") + require.NoError(t, err) + _, err = AddComment(bug2, reneB, time.Now().Unix(), "message19") + require.NoError(t, err) + err = bug2.Commit(repoB) + require.NoError(t, err) + + // A --> remote + _, err = Push(repoA, "origin") + require.NoError(t, err) + + // remote --> B + err = Pull(repoB, "origin") + require.NoError(t, err) + + bugs := allBugs(t, ReadAllLocal(repoB)) + + if len(bugs) != 1 { + t.Fatal("Unexpected number of bugs") + } + + bug3, err := ReadLocal(repoB, bug1.Id()) + require.NoError(t, err) + + if nbOps(bug3) != 19 { + t.Fatal("Unexpected number of operations") + } + + // B --> remote + _, err = Push(repoB, "origin") + require.NoError(t, err) + + // remote --> A + err = Pull(repoA, "origin") + require.NoError(t, err) + + bugs = allBugs(t, ReadAllLocal(repoA)) + + if len(bugs) != 1 { + t.Fatal("Unexpected number of bugs") + } + + bug4, err := ReadLocal(repoA, bug1.Id()) + require.NoError(t, err) + + if nbOps(bug4) != 19 { + t.Fatal("Unexpected number of operations") + } +} diff --git a/migration3/after/bug/bug_test.go b/migration3/after/bug/bug_test.go new file mode 100644 index 0000000..60b76be --- /dev/null +++ b/migration3/after/bug/bug_test.go @@ -0,0 +1,190 @@ +package bug + +import ( + "fmt" + "testing" + "time" + + "github.com/stretchr/testify/require" + + "github.com/MichaelMure/git-bug-migration/migration3/after/identity" + "github.com/MichaelMure/git-bug-migration/migration3/after/repository" +) + +func TestBugId(t *testing.T) { + repo := repository.NewMockRepoForTest() + + bug1 := NewBug() + + rene, err := identity.NewIdentity(repo, "René Descartes", "rene@descartes.fr") + require.NoError(t, err) + err = rene.Commit(repo) + require.NoError(t, err) + + createOp := NewCreateOp(rene, time.Now().Unix(), "title", "message", nil) + + bug1.Append(createOp) + + err = bug1.Commit(repo) + + if err != nil { + t.Fatal(err) + } + + bug1.Id() +} + +func TestBugValidity(t *testing.T) { + repo := repository.NewMockRepoForTest() + + bug1 := NewBug() + + rene, err := identity.NewIdentity(repo, "René Descartes", "rene@descartes.fr") + require.NoError(t, err) + err = rene.Commit(repo) + require.NoError(t, err) + + createOp := NewCreateOp(rene, time.Now().Unix(), "title", "message", nil) + + if bug1.Validate() == nil { + t.Fatal("Empty bug should be invalid") + } + + bug1.Append(createOp) + + if bug1.Validate() != nil { + t.Fatal("Bug with just a CreateOp should be valid") + } + + err = bug1.Commit(repo) + if err != nil { + t.Fatal(err) + } + + bug1.Append(createOp) + + if bug1.Validate() == nil { + t.Fatal("Bug with multiple CreateOp should be invalid") + } + + err = bug1.Commit(repo) + if err == nil { + t.Fatal("Invalid bug should not commit") + } +} + +func TestBugCommitLoad(t *testing.T) { + repo := repository.NewMockRepoForTest() + + bug1 := NewBug() + + rene, err := identity.NewIdentity(repo, "René Descartes", "rene@descartes.fr") + require.NoError(t, err) + err = rene.Commit(repo) + require.NoError(t, err) + + createOp := NewCreateOp(rene, time.Now().Unix(), "title", "message", nil) + setTitleOp := NewSetTitleOp(rene, time.Now().Unix(), "title2", "title1") + addCommentOp := NewAddCommentOp(rene, time.Now().Unix(), "message2", nil) + + bug1.Append(createOp) + bug1.Append(setTitleOp) + + require.True(t, bug1.NeedCommit()) + + err = bug1.Commit(repo) + require.Nil(t, err) + require.False(t, bug1.NeedCommit()) + + bug2, err := ReadLocal(repo, bug1.Id()) + require.NoError(t, err) + equivalentBug(t, bug1, bug2) + + // add more op + + bug1.Append(addCommentOp) + + require.True(t, bug1.NeedCommit()) + + err = bug1.Commit(repo) + require.Nil(t, err) + require.False(t, bug1.NeedCommit()) + + bug3, err := ReadLocal(repo, bug1.Id()) + require.NoError(t, err) + equivalentBug(t, bug1, bug3) +} + +func equivalentBug(t *testing.T, expected, actual *Bug) { + require.Equal(t, len(expected.packs), len(actual.packs)) + + for i := range expected.packs { + for j := range expected.packs[i].Operations { + actual.packs[i].Operations[j].base().id = expected.packs[i].Operations[j].base().id + } + } + + require.Equal(t, expected, actual) +} + +func TestBugRemove(t *testing.T) { + repo := repository.CreateGoGitTestRepo(false) + remoteA := repository.CreateGoGitTestRepo(true) + remoteB := repository.CreateGoGitTestRepo(true) + defer repository.CleanupTestRepos(repo, remoteA, remoteB) + + err := repo.AddRemote("remoteA", "file://"+remoteA.GetPath()) + require.NoError(t, err) + + err = repo.AddRemote("remoteB", "file://"+remoteB.GetPath()) + require.NoError(t, err) + + // generate a bunch of bugs + rene, err := identity.NewIdentity(repo, "René Descartes", "rene@descartes.fr") + require.NoError(t, err) + err = rene.Commit(repo) + require.NoError(t, err) + + for i := 0; i < 100; i++ { + b := NewBug() + createOp := NewCreateOp(rene, time.Now().Unix(), "title", fmt.Sprintf("message%v", i), nil) + b.Append(createOp) + err = b.Commit(repo) + require.NoError(t, err) + } + + // and one more for testing + b := NewBug() + createOp := NewCreateOp(rene, time.Now().Unix(), "title", "message", nil) + b.Append(createOp) + err = b.Commit(repo) + require.NoError(t, err) + + _, err = Push(repo, "remoteA") + require.NoError(t, err) + + _, err = Push(repo, "remoteB") + require.NoError(t, err) + + _, err = Fetch(repo, "remoteA") + require.NoError(t, err) + + _, err = Fetch(repo, "remoteB") + require.NoError(t, err) + + err = RemoveBug(repo, b.Id()) + require.NoError(t, err) + + _, err = ReadLocal(repo, b.Id()) + require.Error(t, ErrBugNotExist, err) + + _, err = ReadRemote(repo, "remoteA", b.Id()) + require.Error(t, ErrBugNotExist, err) + + _, err = ReadRemote(repo, "remoteB", b.Id()) + require.Error(t, ErrBugNotExist, err) + + ids, err := ListLocalIds(repo) + require.NoError(t, err) + require.Len(t, ids, 100) +} diff --git a/migration3/after/bug/clocks.go b/migration3/after/bug/clocks.go new file mode 100644 index 0000000..d63fe4b --- /dev/null +++ b/migration3/after/bug/clocks.go @@ -0,0 +1,40 @@ +package bug + +import ( + "github.com/MichaelMure/git-bug-migration/migration3/after/identity" + "github.com/MichaelMure/git-bug-migration/migration3/after/repository" +) + +// ClockLoader is the repository.ClockLoader for the Bug entity +var ClockLoader = repository.ClockLoader{ + Clocks: []string{creationClockName, editClockName}, + Witnesser: func(repo repository.ClockedRepo) error { + // We don't care about the actual identity so an IdentityStub will do + resolver := identity.NewStubResolver() + for b := range ReadAllLocalWithResolver(repo, resolver) { + if b.Err != nil { + return b.Err + } + + createClock, err := repo.GetOrCreateClock(creationClockName) + if err != nil { + return err + } + err = createClock.Witness(b.Bug.createTime) + if err != nil { + return err + } + + editClock, err := repo.GetOrCreateClock(editClockName) + if err != nil { + return err + } + err = editClock.Witness(b.Bug.editTime) + if err != nil { + return err + } + } + + return nil + }, +} diff --git a/migration3/after/bug/comment.go b/migration3/after/bug/comment.go new file mode 100644 index 0000000..1966b11 --- /dev/null +++ b/migration3/after/bug/comment.go @@ -0,0 +1,44 @@ +package bug + +import ( + "github.com/dustin/go-humanize" + + "github.com/MichaelMure/git-bug-migration/migration3/after/entity" + "github.com/MichaelMure/git-bug-migration/migration3/after/identity" + "github.com/MichaelMure/git-bug-migration/migration3/after/repository" + "github.com/MichaelMure/git-bug-migration/migration3/after/util/timestamp" +) + +// Comment represent a comment in a Bug +type Comment struct { + id entity.Id + Author identity.Interface + Message string + Files []repository.Hash + + // Creation time of the comment. + // Should be used only for human display, never for ordering as we can't rely on it in a distributed system. + UnixTime timestamp.Timestamp +} + +// Id return the Comment identifier +func (c Comment) Id() entity.Id { + if c.id == "" { + // simply panic as it would be a coding error + // (using an id of an identity not stored yet) + panic("no id yet") + } + return c.id +} + +// FormatTimeRel format the UnixTime of the comment for human consumption +func (c Comment) FormatTimeRel() string { + return humanize.Time(c.UnixTime.Time()) +} + +func (c Comment) FormatTime() string { + return c.UnixTime.Time().Format("Mon Jan 2 15:04:05 2006 +0200") +} + +// Sign post method for gqlgen +func (c Comment) IsAuthored() {} diff --git a/migration3/after/bug/git_tree.go b/migration3/after/bug/git_tree.go new file mode 100644 index 0000000..a9abeec --- /dev/null +++ b/migration3/after/bug/git_tree.go @@ -0,0 +1,84 @@ +package bug + +import ( + "fmt" + "strings" + + "github.com/pkg/errors" + + "github.com/MichaelMure/git-bug-migration/migration3/after/repository" + "github.com/MichaelMure/git-bug-migration/migration3/after/util/lamport" +) + +type gitTree struct { + opsEntry repository.TreeEntry + createTime lamport.Time + editTime lamport.Time +} + +func readTree(repo repository.RepoData, hash repository.Hash) (*gitTree, error) { + tree := &gitTree{} + + entries, err := repo.ReadTree(hash) + if err != nil { + return nil, errors.Wrap(err, "can't list git tree entries") + } + + opsFound := false + + for _, entry := range entries { + if entry.Name == opsEntryName { + tree.opsEntry = entry + opsFound = true + continue + } + if strings.HasPrefix(entry.Name, createClockEntryPrefix) { + n, err := fmt.Sscanf(entry.Name, createClockEntryPattern, &tree.createTime) + if err != nil { + return nil, errors.Wrap(err, "can't read create lamport time") + } + if n != 1 { + return nil, fmt.Errorf("could not parse create time lamport value") + } + } + if strings.HasPrefix(entry.Name, editClockEntryPrefix) { + n, err := fmt.Sscanf(entry.Name, editClockEntryPattern, &tree.editTime) + if err != nil { + return nil, errors.Wrap(err, "can't read edit lamport time") + } + if n != 1 { + return nil, fmt.Errorf("could not parse edit time lamport value") + } + } + } + + if !opsFound { + return nil, errors.New("invalid tree, missing the ops entry") + } + + return tree, nil +} + +func makeMediaTree(pack OperationPack) []repository.TreeEntry { + var tree []repository.TreeEntry + counter := 0 + added := make(map[repository.Hash]interface{}) + + for _, ops := range pack.Operations { + for _, file := range ops.GetFiles() { + if _, has := added[file]; !has { + tree = append(tree, repository.TreeEntry{ + ObjectType: repository.Blob, + Hash: file, + // The name is not important here, we only need to + // reference the blob. + Name: fmt.Sprintf("file%d", counter), + }) + counter++ + added[file] = struct{}{} + } + } + } + + return tree +} diff --git a/migration3/after/bug/identity.go b/migration3/after/bug/identity.go new file mode 100644 index 0000000..c7a7927 --- /dev/null +++ b/migration3/after/bug/identity.go @@ -0,0 +1,27 @@ +package bug + +import ( + "github.com/MichaelMure/git-bug-migration/migration3/after/identity" +) + +// EnsureIdentities walk the graph of operations and make sure that all Identity +// are properly loaded. That is, it replace all the IdentityStub with the full +// Identity, loaded through a Resolver. +func (bug *Bug) EnsureIdentities(resolver identity.Resolver) error { + it := NewOperationIterator(bug) + + for it.Next() { + op := it.Value() + base := op.base() + + if stub, ok := base.Author.(*identity.IdentityStub); ok { + i, err := resolver.ResolveIdentity(stub.Id()) + if err != nil { + return err + } + + base.Author = i + } + } + return nil +} diff --git a/migration3/after/bug/interface.go b/migration3/after/bug/interface.go new file mode 100644 index 0000000..98a0560 --- /dev/null +++ b/migration3/after/bug/interface.go @@ -0,0 +1,57 @@ +package bug + +import ( + "github.com/MichaelMure/git-bug-migration/migration3/after/entity" + "github.com/MichaelMure/git-bug-migration/migration3/after/repository" + "github.com/MichaelMure/git-bug-migration/migration3/after/util/lamport" +) + +type Interface interface { + // Id return the Bug identifier + Id() entity.Id + + // Validate check if the Bug data is valid + Validate() error + + // Append an operation into the staging area, to be committed later + Append(op Operation) + + // Indicate that the in-memory state changed and need to be commit in the repository + NeedCommit() bool + + // Commit write the staging area in Git and move the operations to the packs + Commit(repo repository.ClockedRepo) error + + // Merge a different version of the same bug by rebasing operations of this bug + // that are not present in the other on top of the chain of operations of the + // other version. + Merge(repo repository.Repo, other Interface) (bool, error) + + // Lookup for the very first operation of the bug. + // For a valid Bug, this operation should be a CreateOp + FirstOp() Operation + + // Lookup for the very last operation of the bug. + // For a valid Bug, should never be nil + LastOp() Operation + + // Compile a bug in a easily usable snapshot + Compile() Snapshot + + // CreateLamportTime return the Lamport time of creation + CreateLamportTime() lamport.Time + + // EditLamportTime return the Lamport time of the last edit + EditLamportTime() lamport.Time +} + +func bugFromInterface(bug Interface) *Bug { + switch bug := bug.(type) { + case *Bug: + return bug + case *WithSnapshot: + return bug.Bug + default: + panic("missing type case") + } +} diff --git a/migration3/after/bug/label.go b/migration3/after/bug/label.go new file mode 100644 index 0000000..0342828 --- /dev/null +++ b/migration3/after/bug/label.go @@ -0,0 +1,100 @@ +package bug + +import ( + "crypto/sha256" + "fmt" + "image/color" + "strings" + + fcolor "github.com/fatih/color" + + "github.com/MichaelMure/git-bug-migration/migration3/after/util/text" +) + +type Label string + +func (l Label) String() string { + return string(l) +} + +// RGBA from a Label computed in a deterministic way +func (l Label) Color() LabelColor { + // colors from: https://material-ui.com/style/color/ + colors := []LabelColor{ + {R: 244, G: 67, B: 54, A: 255}, // red + {R: 233, G: 30, B: 99, A: 255}, // pink + {R: 156, G: 39, B: 176, A: 255}, // purple + {R: 103, G: 58, B: 183, A: 255}, // deepPurple + {R: 63, G: 81, B: 181, A: 255}, // indigo + {R: 33, G: 150, B: 243, A: 255}, // blue + {R: 3, G: 169, B: 244, A: 255}, // lightBlue + {R: 0, G: 188, B: 212, A: 255}, // cyan + {R: 0, G: 150, B: 136, A: 255}, // teal + {R: 76, G: 175, B: 80, A: 255}, // green + {R: 139, G: 195, B: 74, A: 255}, // lightGreen + {R: 205, G: 220, B: 57, A: 255}, // lime + {R: 255, G: 235, B: 59, A: 255}, // yellow + {R: 255, G: 193, B: 7, A: 255}, // amber + {R: 255, G: 152, B: 0, A: 255}, // orange + {R: 255, G: 87, B: 34, A: 255}, // deepOrange + {R: 121, G: 85, B: 72, A: 255}, // brown + {R: 158, G: 158, B: 158, A: 255}, // grey + {R: 96, G: 125, B: 139, A: 255}, // blueGrey + } + + id := 0 + hash := sha256.Sum256([]byte(l)) + for _, char := range hash { + id = (id + int(char)) % len(colors) + } + + return colors[id] +} + +func (l Label) Validate() error { + str := string(l) + + if text.Empty(str) { + return fmt.Errorf("empty") + } + + if strings.Contains(str, "\n") { + return fmt.Errorf("should be a single line") + } + + if !text.Safe(str) { + return fmt.Errorf("not fully printable") + } + + return nil +} + +type LabelColor color.RGBA + +func (lc LabelColor) RGBA() color.RGBA { + return color.RGBA(lc) +} + +func (lc LabelColor) Term256() Term256 { + red := Term256(lc.R) * 6 / 256 + green := Term256(lc.G) * 6 / 256 + blue := Term256(lc.B) * 6 / 256 + + return red*36 + green*6 + blue + 16 +} + +type Term256 int + +func (t Term256) Escape() string { + if fcolor.NoColor { + return "" + } + return fmt.Sprintf("\x1b[38;5;%dm", t) +} + +func (t Term256) Unescape() string { + if fcolor.NoColor { + return "" + } + return "\x1b[0m" +} diff --git a/migration3/after/bug/label_test.go b/migration3/after/bug/label_test.go new file mode 100644 index 0000000..49401c4 --- /dev/null +++ b/migration3/after/bug/label_test.go @@ -0,0 +1,35 @@ +package bug + +import ( + "testing" + + "github.com/stretchr/testify/require" +) + +func TestLabelRGBA(t *testing.T) { + rgba := Label("test1").Color() + expected := LabelColor{R: 0, G: 150, B: 136, A: 255} + + require.Equal(t, expected, rgba) +} + +func TestLabelRGBASimilar(t *testing.T) { + rgba := Label("test2").Color() + expected := LabelColor{R: 3, G: 169, B: 244, A: 255} + + require.Equal(t, expected, rgba) +} + +func TestLabelRGBAReverse(t *testing.T) { + rgba := Label("tset").Color() + expected := LabelColor{R: 63, G: 81, B: 181, A: 255} + + require.Equal(t, expected, rgba) +} + +func TestLabelRGBAEqual(t *testing.T) { + color1 := Label("test").Color() + color2 := Label("test").Color() + + require.Equal(t, color1, color2) +} diff --git a/migration3/after/bug/op_add_comment.go b/migration3/after/bug/op_add_comment.go new file mode 100644 index 0000000..f0e19f8 --- /dev/null +++ b/migration3/after/bug/op_add_comment.go @@ -0,0 +1,132 @@ +package bug + +import ( + "encoding/json" + "fmt" + + "github.com/MichaelMure/git-bug-migration/migration3/after/entity" + "github.com/MichaelMure/git-bug-migration/migration3/after/identity" + "github.com/MichaelMure/git-bug-migration/migration3/after/repository" + "github.com/MichaelMure/git-bug-migration/migration3/after/util/text" + "github.com/MichaelMure/git-bug-migration/migration3/after/util/timestamp" +) + +var _ Operation = &AddCommentOperation{} + +// AddCommentOperation will add a new comment in the bug +type AddCommentOperation struct { + OpBase + Message string `json:"message"` + // TODO: change for a map[string]util.hash to store the filename ? + Files []repository.Hash `json:"files"` +} + +// Sign-post method for gqlgen +func (op *AddCommentOperation) IsOperation() {} + +func (op *AddCommentOperation) base() *OpBase { + return &op.OpBase +} + +func (op *AddCommentOperation) Id() entity.Id { + return idOperation(op) +} + +func (op *AddCommentOperation) Apply(snapshot *Snapshot) { + snapshot.addActor(op.Author) + snapshot.addParticipant(op.Author) + + comment := Comment{ + id: op.Id(), + Message: op.Message, + Author: op.Author, + Files: op.Files, + UnixTime: timestamp.Timestamp(op.UnixTime), + } + + snapshot.Comments = append(snapshot.Comments, comment) + + item := &AddCommentTimelineItem{ + CommentTimelineItem: NewCommentTimelineItem(op.Id(), comment), + } + + snapshot.Timeline = append(snapshot.Timeline, item) +} + +func (op *AddCommentOperation) GetFiles() []repository.Hash { + return op.Files +} + +func (op *AddCommentOperation) Validate() error { + if err := opBaseValidate(op, AddCommentOp); err != nil { + return err + } + + if !text.Safe(op.Message) { + return fmt.Errorf("message is not fully printable") + } + + return nil +} + +// UnmarshalJSON is a two step JSON unmarshalling +// This workaround is necessary to avoid the inner OpBase.MarshalJSON +// overriding the outer op's MarshalJSON +func (op *AddCommentOperation) UnmarshalJSON(data []byte) error { + // Unmarshal OpBase and the op separately + + base := OpBase{} + err := json.Unmarshal(data, &base) + if err != nil { + return err + } + + aux := struct { + Message string `json:"message"` + Files []repository.Hash `json:"files"` + }{} + + err = json.Unmarshal(data, &aux) + if err != nil { + return err + } + + op.OpBase = base + op.Message = aux.Message + op.Files = aux.Files + + return nil +} + +// Sign post method for gqlgen +func (op *AddCommentOperation) IsAuthored() {} + +func NewAddCommentOp(author identity.Interface, unixTime int64, message string, files []repository.Hash) *AddCommentOperation { + return &AddCommentOperation{ + OpBase: newOpBase(AddCommentOp, author, unixTime), + Message: message, + Files: files, + } +} + +// CreateTimelineItem replace a AddComment operation in the Timeline and hold its edition history +type AddCommentTimelineItem struct { + CommentTimelineItem +} + +// Sign post method for gqlgen +func (a *AddCommentTimelineItem) IsAuthored() {} + +// Convenience function to apply the operation +func AddComment(b Interface, author identity.Interface, unixTime int64, message string) (*AddCommentOperation, error) { + return AddCommentWithFiles(b, author, unixTime, message, nil) +} + +func AddCommentWithFiles(b Interface, author identity.Interface, unixTime int64, message string, files []repository.Hash) (*AddCommentOperation, error) { + addCommentOp := NewAddCommentOp(author, unixTime, message, files) + if err := addCommentOp.Validate(); err != nil { + return nil, err + } + b.Append(addCommentOp) + return addCommentOp, nil +} diff --git a/migration3/after/bug/op_add_comment_test.go b/migration3/after/bug/op_add_comment_test.go new file mode 100644 index 0000000..60083a1 --- /dev/null +++ b/migration3/after/bug/op_add_comment_test.go @@ -0,0 +1,39 @@ +package bug + +import ( + "encoding/json" + "testing" + "time" + + "github.com/stretchr/testify/assert" + "github.com/stretchr/testify/require" + + "github.com/MichaelMure/git-bug-migration/migration3/after/identity" + "github.com/MichaelMure/git-bug-migration/migration3/after/repository" +) + +func TestAddCommentSerialize(t *testing.T) { + repo := repository.NewMockRepoClock() + + rene, err := identity.NewIdentity(repo, "René Descartes", "rene@descartes.fr") + require.NoError(t, err) + + unix := time.Now().Unix() + before := NewAddCommentOp(rene, unix, "message", nil) + + data, err := json.Marshal(before) + assert.NoError(t, err) + + var after AddCommentOperation + err = json.Unmarshal(data, &after) + assert.NoError(t, err) + + // enforce creating the ID + before.Id() + + // Replace the identity stub with the real thing + assert.Equal(t, rene.Id(), after.base().Author.Id()) + after.Author = rene + + assert.Equal(t, before, &after) +} diff --git a/migration3/after/bug/op_create.go b/migration3/after/bug/op_create.go new file mode 100644 index 0000000..967fbba --- /dev/null +++ b/migration3/after/bug/op_create.go @@ -0,0 +1,177 @@ +package bug + +import ( + "crypto/rand" + "encoding/json" + "fmt" + "strings" + + "github.com/MichaelMure/git-bug-migration/migration3/after/entity" + "github.com/MichaelMure/git-bug-migration/migration3/after/identity" + "github.com/MichaelMure/git-bug-migration/migration3/after/repository" + "github.com/MichaelMure/git-bug-migration/migration3/after/util/text" + "github.com/MichaelMure/git-bug-migration/migration3/after/util/timestamp" +) + +var _ Operation = &CreateOperation{} + +// CreateOperation define the initial creation of a bug +type CreateOperation struct { + OpBase + // mandatory random bytes to ensure a better randomness of the data of the first + // operation of a bug, used to later generate the ID + // len(Nonce) should be > 20 and < 64 bytes + Nonce []byte `json:"nonce"` + Title string `json:"title"` + Message string `json:"message"` + Files []repository.Hash `json:"files"` +} + +// Sign-post method for gqlgen +func (op *CreateOperation) IsOperation() {} + +func (op *CreateOperation) base() *OpBase { + return &op.OpBase +} + +func (op *CreateOperation) Id() entity.Id { + return idOperation(op) +} + +func (op *CreateOperation) Apply(snapshot *Snapshot) { + snapshot.addActor(op.Author) + snapshot.addParticipant(op.Author) + + snapshot.Title = op.Title + + comment := Comment{ + id: op.Id(), + Message: op.Message, + Author: op.Author, + UnixTime: timestamp.Timestamp(op.UnixTime), + } + + snapshot.Comments = []Comment{comment} + snapshot.Author = op.Author + snapshot.CreateTime = op.Time() + + snapshot.Timeline = []TimelineItem{ + &CreateTimelineItem{ + CommentTimelineItem: NewCommentTimelineItem(op.Id(), comment), + }, + } +} + +func (op *CreateOperation) GetFiles() []repository.Hash { + return op.Files +} + +func (op *CreateOperation) Validate() error { + if err := opBaseValidate(op, CreateOp); err != nil { + return err + } + + if len(op.Nonce) > 64 { + return fmt.Errorf("create nonce is too big") + } + if len(op.Nonce) < 20 { + return fmt.Errorf("create nonce is too small") + } + + if text.Empty(op.Title) { + return fmt.Errorf("title is empty") + } + if strings.Contains(op.Title, "\n") { + return fmt.Errorf("title should be a single line") + } + if !text.Safe(op.Title) { + return fmt.Errorf("title is not fully printable") + } + + if !text.Safe(op.Message) { + return fmt.Errorf("message is not fully printable") + } + + return nil +} + +// UnmarshalJSON is a two step JSON unmarshaling +// This workaround is necessary to avoid the inner OpBase.MarshalJSON +// overriding the outer op's MarshalJSON +func (op *CreateOperation) UnmarshalJSON(data []byte) error { + // Unmarshal OpBase and the op separately + + base := OpBase{} + err := json.Unmarshal(data, &base) + if err != nil { + return err + } + + aux := struct { + Nonce []byte `json:"nonce"` + Title string `json:"title"` + Message string `json:"message"` + Files []repository.Hash `json:"files"` + }{} + + err = json.Unmarshal(data, &aux) + if err != nil { + return err + } + + op.OpBase = base + op.Nonce = aux.Nonce + op.Title = aux.Title + op.Message = aux.Message + op.Files = aux.Files + + return nil +} + +// Sign post method for gqlgen +func (op *CreateOperation) IsAuthored() {} + +func makeNonce(len int) []byte { + result := make([]byte, len) + _, err := rand.Read(result) + if err != nil { + panic(err) + } + return result +} + +func NewCreateOp(author identity.Interface, unixTime int64, title, message string, files []repository.Hash) *CreateOperation { + return &CreateOperation{ + OpBase: newOpBase(CreateOp, author, unixTime), + Nonce: makeNonce(20), + Title: title, + Message: message, + Files: files, + } +} + +// CreateTimelineItem replace a Create operation in the Timeline and hold its edition history +type CreateTimelineItem struct { + CommentTimelineItem +} + +// Sign post method for gqlgen +func (c *CreateTimelineItem) IsAuthored() {} + +// Convenience function to apply the operation +func Create(author identity.Interface, unixTime int64, title, message string) (*Bug, *CreateOperation, error) { + return CreateWithFiles(author, unixTime, title, message, nil) +} + +func CreateWithFiles(author identity.Interface, unixTime int64, title, message string, files []repository.Hash) (*Bug, *CreateOperation, error) { + newBug := NewBug() + createOp := NewCreateOp(author, unixTime, title, message, files) + + if err := createOp.Validate(); err != nil { + return nil, createOp, err + } + + newBug.Append(createOp) + + return newBug, createOp, nil +} diff --git a/migration3/after/bug/op_create_test.go b/migration3/after/bug/op_create_test.go new file mode 100644 index 0000000..f9af5a6 --- /dev/null +++ b/migration3/after/bug/op_create_test.go @@ -0,0 +1,82 @@ +package bug + +import ( + "encoding/json" + "testing" + "time" + + "github.com/stretchr/testify/require" + + "github.com/MichaelMure/git-bug-migration/migration3/after/identity" + "github.com/MichaelMure/git-bug-migration/migration3/after/repository" + "github.com/MichaelMure/git-bug-migration/migration3/after/util/timestamp" +) + +func TestCreate(t *testing.T) { + snapshot := Snapshot{} + + repo := repository.NewMockRepoClock() + + rene, err := identity.NewIdentity(repo, "René Descartes", "rene@descartes.fr") + require.NoError(t, err) + + unix := time.Now().Unix() + + create := NewCreateOp(rene, unix, "title", "message", nil) + + create.Apply(&snapshot) + + id := create.Id() + require.NoError(t, id.Validate()) + + comment := Comment{ + id: id, + Author: rene, + Message: "message", + UnixTime: timestamp.Timestamp(create.UnixTime), + } + + expected := Snapshot{ + Title: "title", + Comments: []Comment{ + comment, + }, + Author: rene, + Participants: []identity.Interface{rene}, + Actors: []identity.Interface{rene}, + CreateTime: create.Time(), + Timeline: []TimelineItem{ + &CreateTimelineItem{ + CommentTimelineItem: NewCommentTimelineItem(id, comment), + }, + }, + } + + require.Equal(t, expected, snapshot) +} + +func TestCreateSerialize(t *testing.T) { + repo := repository.NewMockRepoClock() + + rene, err := identity.NewIdentity(repo, "René Descartes", "rene@descartes.fr") + require.NoError(t, err) + + unix := time.Now().Unix() + before := NewCreateOp(rene, unix, "title", "message", nil) + + data, err := json.Marshal(before) + require.NoError(t, err) + + var after CreateOperation + err = json.Unmarshal(data, &after) + require.NoError(t, err) + + // enforce creating the ID + before.Id() + + // Replace the identity stub with the real thing + require.Equal(t, rene.Id(), after.base().Author.Id()) + after.Author = rene + + require.Equal(t, before, &after) +} diff --git a/migration3/after/bug/op_edit_comment.go b/migration3/after/bug/op_edit_comment.go new file mode 100644 index 0000000..eeb5103 --- /dev/null +++ b/migration3/after/bug/op_edit_comment.go @@ -0,0 +1,170 @@ +package bug + +import ( + "encoding/json" + "fmt" + + "github.com/pkg/errors" + + "github.com/MichaelMure/git-bug-migration/migration3/after/entity" + "github.com/MichaelMure/git-bug-migration/migration3/after/identity" + "github.com/MichaelMure/git-bug-migration/migration3/after/repository" + "github.com/MichaelMure/git-bug-migration/migration3/after/util/timestamp" + + "github.com/MichaelMure/git-bug-migration/migration3/after/util/text" +) + +var _ Operation = &EditCommentOperation{} + +// EditCommentOperation will change a comment in the bug +type EditCommentOperation struct { + OpBase + Target entity.Id `json:"target"` + Message string `json:"message"` + Files []repository.Hash `json:"files"` +} + +// Sign-post method for gqlgen +func (op *EditCommentOperation) IsOperation() {} + +func (op *EditCommentOperation) base() *OpBase { + return &op.OpBase +} + +func (op *EditCommentOperation) Id() entity.Id { + return idOperation(op) +} + +func (op *EditCommentOperation) Apply(snapshot *Snapshot) { + // Todo: currently any message can be edited, even by a different author + // crypto signature are needed. + + snapshot.addActor(op.Author) + + var target TimelineItem + + for i, item := range snapshot.Timeline { + if item.Id() == op.Target { + target = snapshot.Timeline[i] + break + } + } + + if target == nil { + // Target not found, edit is a no-op + return + } + + comment := Comment{ + id: op.Target, + Message: op.Message, + Files: op.Files, + UnixTime: timestamp.Timestamp(op.UnixTime), + } + + switch target := target.(type) { + case *CreateTimelineItem: + target.Append(comment) + case *AddCommentTimelineItem: + target.Append(comment) + } + + // Updating the corresponding comment + + for i := range snapshot.Comments { + if snapshot.Comments[i].Id() == op.Target { + snapshot.Comments[i].Message = op.Message + snapshot.Comments[i].Files = op.Files + break + } + } +} + +func (op *EditCommentOperation) GetFiles() []repository.Hash { + return op.Files +} + +func (op *EditCommentOperation) Validate() error { + if err := opBaseValidate(op, EditCommentOp); err != nil { + return err + } + + if err := op.Target.Validate(); err != nil { + return errors.Wrap(err, "target hash is invalid") + } + + if !text.Safe(op.Message) { + return fmt.Errorf("message is not fully printable") + } + + return nil +} + +// UnmarshalJSON is a two step JSON unmarshaling +// This workaround is necessary to avoid the inner OpBase.MarshalJSON +// overriding the outer op's MarshalJSON +func (op *EditCommentOperation) UnmarshalJSON(data []byte) error { + // Unmarshal OpBase and the op separately + + base := OpBase{} + err := json.Unmarshal(data, &base) + if err != nil { + return err + } + + aux := struct { + Target entity.Id `json:"target"` + Message string `json:"message"` + Files []repository.Hash `json:"files"` + }{} + + err = json.Unmarshal(data, &aux) + if err != nil { + return err + } + + op.OpBase = base + op.Target = aux.Target + op.Message = aux.Message + op.Files = aux.Files + + return nil +} + +// Sign post method for gqlgen +func (op *EditCommentOperation) IsAuthored() {} + +func NewEditCommentOp(author identity.Interface, unixTime int64, target entity.Id, message string, files []repository.Hash) *EditCommentOperation { + return &EditCommentOperation{ + OpBase: newOpBase(EditCommentOp, author, unixTime), + Target: target, + Message: message, + Files: files, + } +} + +// Convenience function to apply the operation +func EditComment(b Interface, author identity.Interface, unixTime int64, target entity.Id, message string) (*EditCommentOperation, error) { + return EditCommentWithFiles(b, author, unixTime, target, message, nil) +} + +func EditCommentWithFiles(b Interface, author identity.Interface, unixTime int64, target entity.Id, message string, files []repository.Hash) (*EditCommentOperation, error) { + editCommentOp := NewEditCommentOp(author, unixTime, target, message, files) + if err := editCommentOp.Validate(); err != nil { + return nil, err + } + b.Append(editCommentOp) + return editCommentOp, nil +} + +// Convenience function to edit the body of a bug (the first comment) +func EditCreateComment(b Interface, author identity.Interface, unixTime int64, message string) (*EditCommentOperation, error) { + createOp := b.FirstOp().(*CreateOperation) + return EditComment(b, author, unixTime, createOp.Id(), message) +} + +// Convenience function to edit the body of a bug (the first comment) +func EditCreateCommentWithFiles(b Interface, author identity.Interface, unixTime int64, message string, files []repository.Hash) (*EditCommentOperation, error) { + createOp := b.FirstOp().(*CreateOperation) + return EditCommentWithFiles(b, author, unixTime, createOp.Id(), message, files) +} diff --git a/migration3/after/bug/op_edit_comment_test.go b/migration3/after/bug/op_edit_comment_test.go new file mode 100644 index 0000000..1416862 --- /dev/null +++ b/migration3/after/bug/op_edit_comment_test.go @@ -0,0 +1,108 @@ +package bug + +import ( + "encoding/json" + "testing" + "time" + + "github.com/stretchr/testify/require" + + "github.com/MichaelMure/git-bug-migration/migration3/after/identity" + "github.com/MichaelMure/git-bug-migration/migration3/after/repository" +) + +func TestEdit(t *testing.T) { + snapshot := Snapshot{} + + repo := repository.NewMockRepoForTest() + + rene, err := identity.NewIdentity(repo, "René Descartes", "rene@descartes.fr") + require.NoError(t, err) + err = rene.Commit(repo) + require.NoError(t, err) + + unix := time.Now().Unix() + + create := NewCreateOp(rene, unix, "title", "create", nil) + create.Apply(&snapshot) + + id1 := create.Id() + require.NoError(t, id1.Validate()) + + comment1 := NewAddCommentOp(rene, unix, "comment 1", nil) + comment1.Apply(&snapshot) + + id2 := comment1.Id() + require.NoError(t, id2.Validate()) + + // add another unrelated op in between + setTitle := NewSetTitleOp(rene, unix, "edited title", "title") + setTitle.Apply(&snapshot) + + comment2 := NewAddCommentOp(rene, unix, "comment 2", nil) + comment2.Apply(&snapshot) + + id3 := comment2.Id() + require.NoError(t, id3.Validate()) + + edit := NewEditCommentOp(rene, unix, id1, "create edited", nil) + edit.Apply(&snapshot) + + require.Equal(t, len(snapshot.Timeline), 4) + require.Equal(t, len(snapshot.Timeline[0].(*CreateTimelineItem).History), 2) + require.Equal(t, len(snapshot.Timeline[1].(*AddCommentTimelineItem).History), 1) + require.Equal(t, len(snapshot.Timeline[3].(*AddCommentTimelineItem).History), 1) + require.Equal(t, snapshot.Comments[0].Message, "create edited") + require.Equal(t, snapshot.Comments[1].Message, "comment 1") + require.Equal(t, snapshot.Comments[2].Message, "comment 2") + + edit2 := NewEditCommentOp(rene, unix, id2, "comment 1 edited", nil) + edit2.Apply(&snapshot) + + require.Equal(t, len(snapshot.Timeline), 4) + require.Equal(t, len(snapshot.Timeline[0].(*CreateTimelineItem).History), 2) + require.Equal(t, len(snapshot.Timeline[1].(*AddCommentTimelineItem).History), 2) + require.Equal(t, len(snapshot.Timeline[3].(*AddCommentTimelineItem).History), 1) + require.Equal(t, snapshot.Comments[0].Message, "create edited") + require.Equal(t, snapshot.Comments[1].Message, "comment 1 edited") + require.Equal(t, snapshot.Comments[2].Message, "comment 2") + + edit3 := NewEditCommentOp(rene, unix, id3, "comment 2 edited", nil) + edit3.Apply(&snapshot) + + require.Equal(t, len(snapshot.Timeline), 4) + require.Equal(t, len(snapshot.Timeline[0].(*CreateTimelineItem).History), 2) + require.Equal(t, len(snapshot.Timeline[1].(*AddCommentTimelineItem).History), 2) + require.Equal(t, len(snapshot.Timeline[3].(*AddCommentTimelineItem).History), 2) + require.Equal(t, snapshot.Comments[0].Message, "create edited") + require.Equal(t, snapshot.Comments[1].Message, "comment 1 edited") + require.Equal(t, snapshot.Comments[2].Message, "comment 2 edited") +} + +func TestEditCommentSerialize(t *testing.T) { + repo := repository.NewMockRepoForTest() + + rene, err := identity.NewIdentity(repo, "René Descartes", "rene@descartes.fr") + require.NoError(t, err) + err = rene.Commit(repo) + require.NoError(t, err) + + unix := time.Now().Unix() + before := NewEditCommentOp(rene, unix, "target", "message", nil) + + data, err := json.Marshal(before) + require.NoError(t, err) + + var after EditCommentOperation + err = json.Unmarshal(data, &after) + require.NoError(t, err) + + // enforce creating the ID + before.Id() + + // Replace the identity stub with the real thing + require.Equal(t, rene.Id(), after.base().Author.Id()) + after.Author = rene + + require.Equal(t, before, &after) +} diff --git a/migration3/after/bug/op_label_change.go b/migration3/after/bug/op_label_change.go new file mode 100644 index 0000000..2c908ad --- /dev/null +++ b/migration3/after/bug/op_label_change.go @@ -0,0 +1,285 @@ +package bug + +import ( + "encoding/json" + "fmt" + "sort" + + "github.com/pkg/errors" + + "github.com/MichaelMure/git-bug-migration/migration3/after/entity" + "github.com/MichaelMure/git-bug-migration/migration3/after/identity" + "github.com/MichaelMure/git-bug-migration/migration3/after/util/timestamp" +) + +var _ Operation = &LabelChangeOperation{} + +// LabelChangeOperation define a Bug operation to add or remove labels +type LabelChangeOperation struct { + OpBase + Added []Label `json:"added"` + Removed []Label `json:"removed"` +} + +// Sign-post method for gqlgen +func (op *LabelChangeOperation) IsOperation() {} + +func (op *LabelChangeOperation) base() *OpBase { + return &op.OpBase +} + +func (op *LabelChangeOperation) Id() entity.Id { + return idOperation(op) +} + +// Apply apply the operation +func (op *LabelChangeOperation) Apply(snapshot *Snapshot) { + snapshot.addActor(op.Author) + + // Add in the set +AddLoop: + for _, added := range op.Added { + for _, label := range snapshot.Labels { + if label == added { + // Already exist + continue AddLoop + } + } + + snapshot.Labels = append(snapshot.Labels, added) + } + + // Remove in the set + for _, removed := range op.Removed { + for i, label := range snapshot.Labels { + if label == removed { + snapshot.Labels[i] = snapshot.Labels[len(snapshot.Labels)-1] + snapshot.Labels = snapshot.Labels[:len(snapshot.Labels)-1] + } + } + } + + // Sort + sort.Slice(snapshot.Labels, func(i, j int) bool { + return string(snapshot.Labels[i]) < string(snapshot.Labels[j]) + }) + + item := &LabelChangeTimelineItem{ + id: op.Id(), + Author: op.Author, + UnixTime: timestamp.Timestamp(op.UnixTime), + Added: op.Added, + Removed: op.Removed, + } + + snapshot.Timeline = append(snapshot.Timeline, item) +} + +func (op *LabelChangeOperation) Validate() error { + if err := opBaseValidate(op, LabelChangeOp); err != nil { + return err + } + + for _, l := range op.Added { + if err := l.Validate(); err != nil { + return errors.Wrap(err, "added label") + } + } + + for _, l := range op.Removed { + if err := l.Validate(); err != nil { + return errors.Wrap(err, "removed label") + } + } + + if len(op.Added)+len(op.Removed) <= 0 { + return fmt.Errorf("no label change") + } + + return nil +} + +// UnmarshalJSON is a two step JSON unmarshaling +// This workaround is necessary to avoid the inner OpBase.MarshalJSON +// overriding the outer op's MarshalJSON +func (op *LabelChangeOperation) UnmarshalJSON(data []byte) error { + // Unmarshal OpBase and the op separately + + base := OpBase{} + err := json.Unmarshal(data, &base) + if err != nil { + return err + } + + aux := struct { + Added []Label `json:"added"` + Removed []Label `json:"removed"` + }{} + + err = json.Unmarshal(data, &aux) + if err != nil { + return err + } + + op.OpBase = base + op.Added = aux.Added + op.Removed = aux.Removed + + return nil +} + +// Sign post method for gqlgen +func (op *LabelChangeOperation) IsAuthored() {} + +func NewLabelChangeOperation(author identity.Interface, unixTime int64, added, removed []Label) *LabelChangeOperation { + return &LabelChangeOperation{ + OpBase: newOpBase(LabelChangeOp, author, unixTime), + Added: added, + Removed: removed, + } +} + +type LabelChangeTimelineItem struct { + id entity.Id + Author identity.Interface + UnixTime timestamp.Timestamp + Added []Label + Removed []Label +} + +func (l LabelChangeTimelineItem) Id() entity.Id { + return l.id +} + +// Sign post method for gqlgen +func (l *LabelChangeTimelineItem) IsAuthored() {} + +// ChangeLabels is a convenience function to apply the operation +func ChangeLabels(b Interface, author identity.Interface, unixTime int64, add, remove []string) ([]LabelChangeResult, *LabelChangeOperation, error) { + var added, removed []Label + var results []LabelChangeResult + + snap := b.Compile() + + for _, str := range add { + label := Label(str) + + // check for duplicate + if labelExist(added, label) { + results = append(results, LabelChangeResult{Label: label, Status: LabelChangeDuplicateInOp}) + continue + } + + // check that the label doesn't already exist + if labelExist(snap.Labels, label) { + results = append(results, LabelChangeResult{Label: label, Status: LabelChangeAlreadySet}) + continue + } + + added = append(added, label) + results = append(results, LabelChangeResult{Label: label, Status: LabelChangeAdded}) + } + + for _, str := range remove { + label := Label(str) + + // check for duplicate + if labelExist(removed, label) { + results = append(results, LabelChangeResult{Label: label, Status: LabelChangeDuplicateInOp}) + continue + } + + // check that the label actually exist + if !labelExist(snap.Labels, label) { + results = append(results, LabelChangeResult{Label: label, Status: LabelChangeDoesntExist}) + continue + } + + removed = append(removed, label) + results = append(results, LabelChangeResult{Label: label, Status: LabelChangeRemoved}) + } + + if len(added) == 0 && len(removed) == 0 { + return results, nil, fmt.Errorf("no label added or removed") + } + + labelOp := NewLabelChangeOperation(author, unixTime, added, removed) + + if err := labelOp.Validate(); err != nil { + return nil, nil, err + } + + b.Append(labelOp) + + return results, labelOp, nil +} + +// ForceChangeLabels is a convenience function to apply the operation +// The difference with ChangeLabels is that no checks of deduplications are done. You are entirely +// responsible of what you are doing. In the general case, you want to use ChangeLabels instead. +// The intended use of this function is to allow importers to create legal but unexpected label changes, +// like removing a label with no information of when it was added before. +func ForceChangeLabels(b Interface, author identity.Interface, unixTime int64, add, remove []string) (*LabelChangeOperation, error) { + added := make([]Label, len(add)) + for i, str := range add { + added[i] = Label(str) + } + + removed := make([]Label, len(remove)) + for i, str := range remove { + removed[i] = Label(str) + } + + labelOp := NewLabelChangeOperation(author, unixTime, added, removed) + + if err := labelOp.Validate(); err != nil { + return nil, err + } + + b.Append(labelOp) + + return labelOp, nil +} + +func labelExist(labels []Label, label Label) bool { + for _, l := range labels { + if l == label { + return true + } + } + + return false +} + +type LabelChangeStatus int + +const ( + _ LabelChangeStatus = iota + LabelChangeAdded + LabelChangeRemoved + LabelChangeDuplicateInOp + LabelChangeAlreadySet + LabelChangeDoesntExist +) + +type LabelChangeResult struct { + Label Label + Status LabelChangeStatus +} + +func (l LabelChangeResult) String() string { + switch l.Status { + case LabelChangeAdded: + return fmt.Sprintf("label %s added", l.Label) + case LabelChangeRemoved: + return fmt.Sprintf("label %s removed", l.Label) + case LabelChangeDuplicateInOp: + return fmt.Sprintf("label %s is a duplicate", l.Label) + case LabelChangeAlreadySet: + return fmt.Sprintf("label %s was already set", l.Label) + case LabelChangeDoesntExist: + return fmt.Sprintf("label %s doesn't exist on this bug", l.Label) + default: + panic(fmt.Sprintf("unknown label change status %v", l.Status)) + } +} diff --git a/migration3/after/bug/op_label_change_test.go b/migration3/after/bug/op_label_change_test.go new file mode 100644 index 0000000..01c6aa6 --- /dev/null +++ b/migration3/after/bug/op_label_change_test.go @@ -0,0 +1,40 @@ +package bug + +import ( + "encoding/json" + "testing" + "time" + + "github.com/stretchr/testify/require" + + "github.com/MichaelMure/git-bug-migration/migration3/after/identity" + "github.com/MichaelMure/git-bug-migration/migration3/after/repository" +) + +func TestLabelChangeSerialize(t *testing.T) { + repo := repository.NewMockRepoForTest() + + rene, err := identity.NewIdentity(repo, "René Descartes", "rene@descartes.fr") + require.NoError(t, err) + err = rene.Commit(repo) + require.NoError(t, err) + + unix := time.Now().Unix() + before := NewLabelChangeOperation(rene, unix, []Label{"added"}, []Label{"removed"}) + + data, err := json.Marshal(before) + require.NoError(t, err) + + var after LabelChangeOperation + err = json.Unmarshal(data, &after) + require.NoError(t, err) + + // enforce creating the ID + before.Id() + + // Replace the identity stub with the real thing + require.Equal(t, rene.Id(), after.base().Author.Id()) + after.Author = rene + + require.Equal(t, before, &after) +} diff --git a/migration3/after/bug/op_noop.go b/migration3/after/bug/op_noop.go new file mode 100644 index 0000000..570405c --- /dev/null +++ b/migration3/after/bug/op_noop.go @@ -0,0 +1,84 @@ +package bug + +import ( + "encoding/json" + + "github.com/MichaelMure/git-bug-migration/migration3/after/entity" + "github.com/MichaelMure/git-bug-migration/migration3/after/identity" +) + +var _ Operation = &NoOpOperation{} + +// NoOpOperation is an operation that does not change the bug state. It can +// however be used to store arbitrary metadata in the bug history, for example +// to support a bridge feature. +type NoOpOperation struct { + OpBase +} + +// Sign-post method for gqlgen +func (op *NoOpOperation) IsOperation() {} + +func (op *NoOpOperation) base() *OpBase { + return &op.OpBase +} + +func (op *NoOpOperation) Id() entity.Id { + return idOperation(op) +} + +func (op *NoOpOperation) Apply(snapshot *Snapshot) { + // Nothing to do +} + +func (op *NoOpOperation) Validate() error { + return opBaseValidate(op, NoOpOp) +} + +// UnmarshalJSON is a two step JSON unmarshaling +// This workaround is necessary to avoid the inner OpBase.MarshalJSON +// overriding the outer op's MarshalJSON +func (op *NoOpOperation) UnmarshalJSON(data []byte) error { + // Unmarshal OpBase and the op separately + + base := OpBase{} + err := json.Unmarshal(data, &base) + if err != nil { + return err + } + + aux := struct{}{} + + err = json.Unmarshal(data, &aux) + if err != nil { + return err + } + + op.OpBase = base + + return nil +} + +// Sign post method for gqlgen +func (op *NoOpOperation) IsAuthored() {} + +func NewNoOpOp(author identity.Interface, unixTime int64) *NoOpOperation { + return &NoOpOperation{ + OpBase: newOpBase(NoOpOp, author, unixTime), + } +} + +// Convenience function to apply the operation +func NoOp(b Interface, author identity.Interface, unixTime int64, metadata map[string]string) (*NoOpOperation, error) { + op := NewNoOpOp(author, unixTime) + + for key, value := range metadata { + op.SetMetadata(key, value) + } + + if err := op.Validate(); err != nil { + return nil, err + } + b.Append(op) + return op, nil +} diff --git a/migration3/after/bug/op_noop_test.go b/migration3/after/bug/op_noop_test.go new file mode 100644 index 0000000..aa5e878 --- /dev/null +++ b/migration3/after/bug/op_noop_test.go @@ -0,0 +1,40 @@ +package bug + +import ( + "encoding/json" + "testing" + "time" + + "github.com/stretchr/testify/require" + + "github.com/MichaelMure/git-bug-migration/migration3/after/identity" + "github.com/MichaelMure/git-bug-migration/migration3/after/repository" + + "github.com/stretchr/testify/assert" +) + +func TestNoopSerialize(t *testing.T) { + repo := repository.NewMockRepoClock() + + rene, err := identity.NewIdentity(repo, "René Descartes", "rene@descartes.fr") + require.NoError(t, err) + + unix := time.Now().Unix() + before := NewNoOpOp(rene, unix) + + data, err := json.Marshal(before) + assert.NoError(t, err) + + var after NoOpOperation + err = json.Unmarshal(data, &after) + assert.NoError(t, err) + + // enforce creating the ID + before.Id() + + // Replace the identity stub with the real thing + assert.Equal(t, rene.Id(), after.base().Author.Id()) + after.Author = rene + + assert.Equal(t, before, &after) +} diff --git a/migration3/after/bug/op_set_metadata.go b/migration3/after/bug/op_set_metadata.go new file mode 100644 index 0000000..23b720d --- /dev/null +++ b/migration3/after/bug/op_set_metadata.go @@ -0,0 +1,113 @@ +package bug + +import ( + "encoding/json" + + "github.com/pkg/errors" + + "github.com/MichaelMure/git-bug-migration/migration3/after/entity" + "github.com/MichaelMure/git-bug-migration/migration3/after/identity" +) + +var _ Operation = &SetMetadataOperation{} + +type SetMetadataOperation struct { + OpBase + Target entity.Id `json:"target"` + NewMetadata map[string]string `json:"new_metadata"` +} + +// Sign-post method for gqlgen +func (op *SetMetadataOperation) IsOperation() {} + +func (op *SetMetadataOperation) base() *OpBase { + return &op.OpBase +} + +func (op *SetMetadataOperation) Id() entity.Id { + return idOperation(op) +} + +func (op *SetMetadataOperation) Apply(snapshot *Snapshot) { + for _, target := range snapshot.Operations { + if target.Id() == op.Target { + base := target.base() + + if base.extraMetadata == nil { + base.extraMetadata = make(map[string]string) + } + + // Apply the metadata in an immutable way: if a metadata already + // exist, it's not possible to override it. + for key, val := range op.NewMetadata { + if _, exist := base.extraMetadata[key]; !exist { + base.extraMetadata[key] = val + } + } + + return + } + } +} + +func (op *SetMetadataOperation) Validate() error { + if err := opBaseValidate(op, SetMetadataOp); err != nil { + return err + } + + if err := op.Target.Validate(); err != nil { + return errors.Wrap(err, "target invalid") + } + + return nil +} + +// UnmarshalJSON is a two step JSON unmarshaling +// This workaround is necessary to avoid the inner OpBase.MarshalJSON +// overriding the outer op's MarshalJSON +func (op *SetMetadataOperation) UnmarshalJSON(data []byte) error { + // Unmarshal OpBase and the op separately + + base := OpBase{} + err := json.Unmarshal(data, &base) + if err != nil { + return err + } + + aux := struct { + Target entity.Id `json:"target"` + NewMetadata map[string]string `json:"new_metadata"` + }{} + + err = json.Unmarshal(data, &aux) + if err != nil { + return err + } + + op.OpBase = base + op.Target = aux.Target + op.NewMetadata = aux.NewMetadata + + return nil +} + +// Sign post method for gqlgen +func (op *SetMetadataOperation) IsAuthored() {} + +func NewSetMetadataOp(author identity.Interface, unixTime int64, target entity.Id, newMetadata map[string]string) *SetMetadataOperation { + return &SetMetadataOperation{ + OpBase: newOpBase(SetMetadataOp, author, unixTime), + Target: target, + NewMetadata: newMetadata, + } +} + +// Convenience function to apply the operation +func SetMetadata(b Interface, author identity.Interface, unixTime int64, target entity.Id, newMetadata map[string]string) (*SetMetadataOperation, error) { + SetMetadataOp := NewSetMetadataOp(author, unixTime, target, newMetadata) + if err := SetMetadataOp.Validate(); err != nil { + return nil, err + } + b.Append(SetMetadataOp) + return SetMetadataOp, nil +} diff --git a/migration3/after/bug/op_set_metadata_test.go b/migration3/after/bug/op_set_metadata_test.go new file mode 100644 index 0000000..353d0d3 --- /dev/null +++ b/migration3/after/bug/op_set_metadata_test.go @@ -0,0 +1,127 @@ +package bug + +import ( + "encoding/json" + "testing" + "time" + + "github.com/MichaelMure/git-bug-migration/migration3/after/identity" + "github.com/MichaelMure/git-bug-migration/migration3/after/repository" + + "github.com/stretchr/testify/require" +) + +func TestSetMetadata(t *testing.T) { + snapshot := Snapshot{} + + repo := repository.NewMockRepoClock() + + rene, err := identity.NewIdentity(repo, "René Descartes", "rene@descartes.fr") + require.NoError(t, err) + + unix := time.Now().Unix() + + create := NewCreateOp(rene, unix, "title", "create", nil) + create.SetMetadata("key", "value") + create.Apply(&snapshot) + snapshot.Operations = append(snapshot.Operations, create) + + id1 := create.Id() + require.NoError(t, id1.Validate()) + + comment := NewAddCommentOp(rene, unix, "comment", nil) + comment.SetMetadata("key2", "value2") + comment.Apply(&snapshot) + snapshot.Operations = append(snapshot.Operations, comment) + + id2 := comment.Id() + require.NoError(t, id2.Validate()) + + op1 := NewSetMetadataOp(rene, unix, id1, map[string]string{ + "key": "override", + "key2": "value", + }) + + op1.Apply(&snapshot) + snapshot.Operations = append(snapshot.Operations, op1) + + createMetadata := snapshot.Operations[0].AllMetadata() + require.Equal(t, len(createMetadata), 2) + // original key is not overrided + require.Equal(t, createMetadata["key"], "value") + // new key is set + require.Equal(t, createMetadata["key2"], "value") + + commentMetadata := snapshot.Operations[1].AllMetadata() + require.Equal(t, len(commentMetadata), 1) + require.Equal(t, commentMetadata["key2"], "value2") + + op2 := NewSetMetadataOp(rene, unix, id2, map[string]string{ + "key2": "value", + "key3": "value3", + }) + + op2.Apply(&snapshot) + snapshot.Operations = append(snapshot.Operations, op2) + + createMetadata = snapshot.Operations[0].AllMetadata() + require.Equal(t, len(createMetadata), 2) + require.Equal(t, createMetadata["key"], "value") + require.Equal(t, createMetadata["key2"], "value") + + commentMetadata = snapshot.Operations[1].AllMetadata() + require.Equal(t, len(commentMetadata), 2) + // original key is not overrided + require.Equal(t, commentMetadata["key2"], "value2") + // new key is set + require.Equal(t, commentMetadata["key3"], "value3") + + op3 := NewSetMetadataOp(rene, unix, id1, map[string]string{ + "key": "override", + "key2": "override", + }) + + op3.Apply(&snapshot) + snapshot.Operations = append(snapshot.Operations, op3) + + createMetadata = snapshot.Operations[0].AllMetadata() + require.Equal(t, len(createMetadata), 2) + // original key is not overrided + require.Equal(t, createMetadata["key"], "value") + // previously set key is not overrided + require.Equal(t, createMetadata["key2"], "value") + + commentMetadata = snapshot.Operations[1].AllMetadata() + require.Equal(t, len(commentMetadata), 2) + require.Equal(t, commentMetadata["key2"], "value2") + require.Equal(t, commentMetadata["key3"], "value3") +} + +func TestSetMetadataSerialize(t *testing.T) { + repo := repository.NewMockRepoClock() + + rene, err := identity.NewIdentity(repo, "René Descartes", "rene@descartes.fr") + require.NoError(t, err) + + unix := time.Now().Unix() + before := NewSetMetadataOp(rene, unix, "message", map[string]string{ + "key1": "value1", + "key2": "value2", + }) + + data, err := json.Marshal(before) + require.NoError(t, err) + + var after SetMetadataOperation + err = json.Unmarshal(data, &after) + require.NoError(t, err) + + // enforce creating the ID + before.Id() + + // Replace the identity stub with the real thing + require.Equal(t, rene.Id(), after.base().Author.Id()) + after.Author = rene + + require.Equal(t, before, &after) +} diff --git a/migration3/after/bug/op_set_status.go b/migration3/after/bug/op_set_status.go new file mode 100644 index 0000000..3d17caf --- /dev/null +++ b/migration3/after/bug/op_set_status.go @@ -0,0 +1,127 @@ +package bug + +import ( + "encoding/json" + + "github.com/pkg/errors" + + "github.com/MichaelMure/git-bug-migration/migration3/after/entity" + "github.com/MichaelMure/git-bug-migration/migration3/after/identity" + "github.com/MichaelMure/git-bug-migration/migration3/after/util/timestamp" +) + +var _ Operation = &SetStatusOperation{} + +// SetStatusOperation will change the status of a bug +type SetStatusOperation struct { + OpBase + Status Status `json:"status"` +} + +// Sign-post method for gqlgen +func (op *SetStatusOperation) IsOperation() {} + +func (op *SetStatusOperation) base() *OpBase { + return &op.OpBase +} + +func (op *SetStatusOperation) Id() entity.Id { + return idOperation(op) +} + +func (op *SetStatusOperation) Apply(snapshot *Snapshot) { + snapshot.Status = op.Status + snapshot.addActor(op.Author) + + item := &SetStatusTimelineItem{ + id: op.Id(), + Author: op.Author, + UnixTime: timestamp.Timestamp(op.UnixTime), + Status: op.Status, + } + + snapshot.Timeline = append(snapshot.Timeline, item) +} + +func (op *SetStatusOperation) Validate() error { + if err := opBaseValidate(op, SetStatusOp); err != nil { + return err + } + + if err := op.Status.Validate(); err != nil { + return errors.Wrap(err, "status") + } + + return nil +} + +// UnmarshalJSON is a two step JSON unmarshaling +// This workaround is necessary to avoid the inner OpBase.MarshalJSON +// overriding the outer op's MarshalJSON +func (op *SetStatusOperation) UnmarshalJSON(data []byte) error { + // Unmarshal OpBase and the op separately + + base := OpBase{} + err := json.Unmarshal(data, &base) + if err != nil { + return err + } + + aux := struct { + Status Status `json:"status"` + }{} + + err = json.Unmarshal(data, &aux) + if err != nil { + return err + } + + op.OpBase = base + op.Status = aux.Status + + return nil +} + +// Sign post method for gqlgen +func (op *SetStatusOperation) IsAuthored() {} + +func NewSetStatusOp(author identity.Interface, unixTime int64, status Status) *SetStatusOperation { + return &SetStatusOperation{ + OpBase: newOpBase(SetStatusOp, author, unixTime), + Status: status, + } +} + +type SetStatusTimelineItem struct { + id entity.Id + Author identity.Interface + UnixTime timestamp.Timestamp + Status Status +} + +func (s SetStatusTimelineItem) Id() entity.Id { + return s.id +} + +// Sign post method for gqlgen +func (s *SetStatusTimelineItem) IsAuthored() {} + +// Convenience function to apply the operation +func Open(b Interface, author identity.Interface, unixTime int64) (*SetStatusOperation, error) { + op := NewSetStatusOp(author, unixTime, OpenStatus) + if err := op.Validate(); err != nil { + return nil, err + } + b.Append(op) + return op, nil +} + +// Convenience function to apply the operation +func Close(b Interface, author identity.Interface, unixTime int64) (*SetStatusOperation, error) { + op := NewSetStatusOp(author, unixTime, ClosedStatus) + if err := op.Validate(); err != nil { + return nil, err + } + b.Append(op) + return op, nil +} diff --git a/migration3/after/bug/op_set_status_test.go b/migration3/after/bug/op_set_status_test.go new file mode 100644 index 0000000..56a8bd2 --- /dev/null +++ b/migration3/after/bug/op_set_status_test.go @@ -0,0 +1,38 @@ +package bug + +import ( + "encoding/json" + "testing" + "time" + + "github.com/stretchr/testify/require" + + "github.com/MichaelMure/git-bug-migration/migration3/after/identity" + "github.com/MichaelMure/git-bug-migration/migration3/after/repository" +) + +func TestSetStatusSerialize(t *testing.T) { + repo := repository.NewMockRepoClock() + + rene, err := identity.NewIdentity(repo, "René Descartes", "rene@descartes.fr") + require.NoError(t, err) + + unix := time.Now().Unix() + before := NewSetStatusOp(rene, unix, ClosedStatus) + + data, err := json.Marshal(before) + require.NoError(t, err) + + var after SetStatusOperation + err = json.Unmarshal(data, &after) + require.NoError(t, err) + + // enforce creating the ID + before.Id() + + // Replace the identity stub with the real thing + require.Equal(t, rene.Id(), after.base().Author.Id()) + after.Author = rene + + require.Equal(t, before, &after) +} diff --git a/migration3/after/bug/op_set_title.go b/migration3/after/bug/op_set_title.go new file mode 100644 index 0000000..35c6a59 --- /dev/null +++ b/migration3/after/bug/op_set_title.go @@ -0,0 +1,160 @@ +package bug + +import ( + "encoding/json" + "fmt" + "strings" + + "github.com/MichaelMure/git-bug-migration/migration3/after/entity" + "github.com/MichaelMure/git-bug-migration/migration3/after/identity" + "github.com/MichaelMure/git-bug-migration/migration3/after/util/timestamp" + + "github.com/MichaelMure/git-bug-migration/migration3/after/util/text" +) + +var _ Operation = &SetTitleOperation{} + +// SetTitleOperation will change the title of a bug +type SetTitleOperation struct { + OpBase + Title string `json:"title"` + Was string `json:"was"` +} + +// Sign-post method for gqlgen +func (op *SetTitleOperation) IsOperation() {} + +func (op *SetTitleOperation) base() *OpBase { + return &op.OpBase +} + +func (op *SetTitleOperation) Id() entity.Id { + return idOperation(op) +} + +func (op *SetTitleOperation) Apply(snapshot *Snapshot) { + snapshot.Title = op.Title + snapshot.addActor(op.Author) + + item := &SetTitleTimelineItem{ + id: op.Id(), + Author: op.Author, + UnixTime: timestamp.Timestamp(op.UnixTime), + Title: op.Title, + Was: op.Was, + } + + snapshot.Timeline = append(snapshot.Timeline, item) +} + +func (op *SetTitleOperation) Validate() error { + if err := opBaseValidate(op, SetTitleOp); err != nil { + return err + } + + if text.Empty(op.Title) { + return fmt.Errorf("title is empty") + } + + if strings.Contains(op.Title, "\n") { + return fmt.Errorf("title should be a single line") + } + + if !text.Safe(op.Title) { + return fmt.Errorf("title should be fully printable") + } + + if strings.Contains(op.Was, "\n") { + return fmt.Errorf("previous title should be a single line") + } + + if !text.Safe(op.Was) { + return fmt.Errorf("previous title should be fully printable") + } + + return nil +} + +// UnmarshalJSON is a two step JSON unmarshaling +// This workaround is necessary to avoid the inner OpBase.MarshalJSON +// overriding the outer op's MarshalJSON +func (op *SetTitleOperation) UnmarshalJSON(data []byte) error { + // Unmarshal OpBase and the op separately + + base := OpBase{} + err := json.Unmarshal(data, &base) + if err != nil { + return err + } + + aux := struct { + Title string `json:"title"` + Was string `json:"was"` + }{} + + err = json.Unmarshal(data, &aux) + if err != nil { + return err + } + + op.OpBase = base + op.Title = aux.Title + op.Was = aux.Was + + return nil +} + +// Sign post method for gqlgen +func (op *SetTitleOperation) IsAuthored() {} + +func NewSetTitleOp(author identity.Interface, unixTime int64, title string, was string) *SetTitleOperation { + return &SetTitleOperation{ + OpBase: newOpBase(SetTitleOp, author, unixTime), + Title: title, + Was: was, + } +} + +type SetTitleTimelineItem struct { + id entity.Id + Author identity.Interface + UnixTime timestamp.Timestamp + Title string + Was string +} + +func (s SetTitleTimelineItem) Id() entity.Id { + return s.id +} + +// Sign post method for gqlgen +func (s *SetTitleTimelineItem) IsAuthored() {} + +// Convenience function to apply the operation +func SetTitle(b Interface, author identity.Interface, unixTime int64, title string) (*SetTitleOperation, error) { + it := NewOperationIterator(b) + + var lastTitleOp Operation + for it.Next() { + op := it.Value() + if op.base().OperationType == SetTitleOp { + lastTitleOp = op + } + } + + var was string + if lastTitleOp != nil { + was = lastTitleOp.(*SetTitleOperation).Title + } else { + was = b.FirstOp().(*CreateOperation).Title + } + + setTitleOp := NewSetTitleOp(author, unixTime, title, was) + + if err := setTitleOp.Validate(); err != nil { + return nil, err + } + + b.Append(setTitleOp) + return setTitleOp, nil +} diff --git a/migration3/after/bug/op_set_title_test.go b/migration3/after/bug/op_set_title_test.go new file mode 100644 index 0000000..26e05bd --- /dev/null +++ b/migration3/after/bug/op_set_title_test.go @@ -0,0 +1,38 @@ +package bug + +import ( + "encoding/json" + "testing" + "time" + + "github.com/stretchr/testify/require" + + "github.com/MichaelMure/git-bug-migration/migration3/after/identity" + "github.com/MichaelMure/git-bug-migration/migration3/after/repository" +) + +func TestSetTitleSerialize(t *testing.T) { + repo := repository.NewMockRepoClock() + + rene, err := identity.NewIdentity(repo, "René Descartes", "rene@descartes.fr") + require.NoError(t, err) + + unix := time.Now().Unix() + before := NewSetTitleOp(rene, unix, "title", "was") + + data, err := json.Marshal(before) + require.NoError(t, err) + + var after SetTitleOperation + err = json.Unmarshal(data, &after) + require.NoError(t, err) + + // enforce creating the ID + before.Id() + + // Replace the identity stub with the real thing + require.Equal(t, rene.Id(), after.base().Author.Id()) + after.Author = rene + + require.Equal(t, before, &after) +} diff --git a/migration3/after/bug/operation.go b/migration3/after/bug/operation.go new file mode 100644 index 0000000..d7e0ad4 --- /dev/null +++ b/migration3/after/bug/operation.go @@ -0,0 +1,213 @@ +package bug + +import ( + "encoding/json" + "fmt" + "time" + + "github.com/pkg/errors" + + "github.com/MichaelMure/git-bug-migration/migration3/after/entity" + "github.com/MichaelMure/git-bug-migration/migration3/after/identity" + "github.com/MichaelMure/git-bug-migration/migration3/after/repository" +) + +// OperationType is an operation type identifier +type OperationType int + +const ( + _ OperationType = iota + CreateOp + SetTitleOp + AddCommentOp + SetStatusOp + LabelChangeOp + EditCommentOp + NoOpOp + SetMetadataOp +) + +// Operation define the interface to fulfill for an edit operation of a Bug +type Operation interface { + // base return the OpBase of the Operation, for package internal use + base() *OpBase + // Id return the identifier of the operation, to be used for back references + Id() entity.Id + // Time return the time when the operation was added + Time() time.Time + // GetFiles return the files needed by this operation + GetFiles() []repository.Hash + // Apply the operation to a Snapshot to create the final state + Apply(snapshot *Snapshot) + // Validate check if the operation is valid (ex: a title is a single line) + Validate() error + // SetMetadata store arbitrary metadata about the operation + SetMetadata(key string, value string) + // GetMetadata retrieve arbitrary metadata about the operation + GetMetadata(key string) (string, bool) + // AllMetadata return all metadata for this operation + AllMetadata() map[string]string + // GetAuthor return the author identity + GetAuthor() identity.Interface + + // sign-post method for gqlgen + IsOperation() +} + +func idOperation(op Operation) entity.Id { + base := op.base() + + if base.id == "" { + // something went really wrong + panic("op's id not set") + } + if base.id == entity.UnsetId { + // This means we are trying to get the op's Id *before* it has been stored, for instance when + // adding multiple ops in one go in an OperationPack. + // As the Id is computed based on the actual bytes written on the disk, we are going to predict + // those and then get the Id. This is safe as it will be the exact same code writing on disk later. + + data, err := json.Marshal(op) + if err != nil { + panic(err) + } + + base.id = entity.DeriveId(data) + } + return base.id +} + +// OpBase implement the common code for all operations +type OpBase struct { + OperationType OperationType `json:"type"` + Author identity.Interface `json:"author"` + // TODO: part of the data model upgrade, this should eventually be a timestamp + lamport + UnixTime int64 `json:"timestamp"` + Metadata map[string]string `json:"metadata,omitempty"` + // Not serialized. Store the op's id in memory. + id entity.Id + // Not serialized. Store the extra metadata in memory, + // compiled from SetMetadataOperation. + extraMetadata map[string]string +} + +// newOpBase is the constructor for an OpBase +func newOpBase(opType OperationType, author identity.Interface, unixTime int64) OpBase { + return OpBase{ + OperationType: opType, + Author: author, + UnixTime: unixTime, + id: entity.UnsetId, + } +} + +func (op *OpBase) UnmarshalJSON(data []byte) error { + // Compute the Id when loading the op from disk. + op.id = entity.DeriveId(data) + + aux := struct { + OperationType OperationType `json:"type"` + Author json.RawMessage `json:"author"` + UnixTime int64 `json:"timestamp"` + Metadata map[string]string `json:"metadata,omitempty"` + }{} + + if err := json.Unmarshal(data, &aux); err != nil { + return err + } + + // delegate the decoding of the identity + author, err := identity.UnmarshalJSON(aux.Author) + if err != nil { + return err + } + + op.OperationType = aux.OperationType + op.Author = author + op.UnixTime = aux.UnixTime + op.Metadata = aux.Metadata + + return nil +} + +// Time return the time when the operation was added +func (op *OpBase) Time() time.Time { + return time.Unix(op.UnixTime, 0) +} + +// GetFiles return the files needed by this operation +func (op *OpBase) GetFiles() []repository.Hash { + return nil +} + +// Validate check the OpBase for errors +func opBaseValidate(op Operation, opType OperationType) error { + if op.base().OperationType != opType { + return fmt.Errorf("incorrect operation type (expected: %v, actual: %v)", opType, op.base().OperationType) + } + + if op.Time().Unix() == 0 { + return fmt.Errorf("time not set") + } + + if op.base().Author == nil { + return fmt.Errorf("author not set") + } + + if err := op.base().Author.Validate(); err != nil { + return errors.Wrap(err, "author") + } + + for _, hash := range op.GetFiles() { + if !hash.IsValid() { + return fmt.Errorf("file with invalid hash %v", hash) + } + } + + return nil +} + +// SetMetadata store arbitrary metadata about the operation +func (op *OpBase) SetMetadata(key string, value string) { + if op.Metadata == nil { + op.Metadata = make(map[string]string) + } + + op.Metadata[key] = value + op.id = entity.UnsetId +} + +// GetMetadata retrieve arbitrary metadata about the operation +func (op *OpBase) GetMetadata(key string) (string, bool) { + val, ok := op.Metadata[key] + + if ok { + return val, true + } + + // extraMetadata can't replace the original operations value if any + val, ok = op.extraMetadata[key] + + return val, ok +} + +// AllMetadata return all metadata for this operation +func (op *OpBase) AllMetadata() map[string]string { + result := make(map[string]string) + + for key, val := range op.extraMetadata { + result[key] = val + } + + // Original metadata take precedence + for key, val := range op.Metadata { + result[key] = val + } + + return result +} + +// GetAuthor return author identity +func (op *OpBase) GetAuthor() identity.Interface { + return op.Author +} diff --git a/migration3/after/bug/operation_iterator.go b/migration3/after/bug/operation_iterator.go new file mode 100644 index 0000000..f42b177 --- /dev/null +++ b/migration3/after/bug/operation_iterator.go @@ -0,0 +1,72 @@ +package bug + +type OperationIterator struct { + bug *Bug + packIndex int + opIndex int +} + +func NewOperationIterator(bug Interface) *OperationIterator { + return &OperationIterator{ + bug: bugFromInterface(bug), + packIndex: 0, + opIndex: -1, + } +} + +func (it *OperationIterator) Next() bool { + // Special case of the staging area + if it.packIndex == len(it.bug.packs) { + pack := it.bug.staging + it.opIndex++ + return it.opIndex < len(pack.Operations) + } + + if it.packIndex >= len(it.bug.packs) { + return false + } + + pack := it.bug.packs[it.packIndex] + + it.opIndex++ + + if it.opIndex < len(pack.Operations) { + return true + } + + // Note: this iterator doesn't handle the empty pack case + it.opIndex = 0 + it.packIndex++ + + // Special case of the non-empty staging area + if it.packIndex == len(it.bug.packs) && len(it.bug.staging.Operations) > 0 { + return true + } + + return it.packIndex < len(it.bug.packs) +} + +func (it *OperationIterator) Value() Operation { + // Special case of the staging area + if it.packIndex == len(it.bug.packs) { + pack := it.bug.staging + + if it.opIndex >= len(pack.Operations) { + panic("Iterator is not valid anymore") + } + + return pack.Operations[it.opIndex] + } + + if it.packIndex >= len(it.bug.packs) { + panic("Iterator is not valid anymore") + } + + pack := it.bug.packs[it.packIndex] + + if it.opIndex >= len(pack.Operations) { + panic("Iterator is not valid anymore") + } + + return pack.Operations[it.opIndex] +} diff --git a/migration3/after/bug/operation_iterator_test.go b/migration3/after/bug/operation_iterator_test.go new file mode 100644 index 0000000..f932e1b --- /dev/null +++ b/migration3/after/bug/operation_iterator_test.go @@ -0,0 +1,79 @@ +package bug + +import ( + "fmt" + "testing" + "time" + + "github.com/stretchr/testify/require" + + "github.com/MichaelMure/git-bug-migration/migration3/after/identity" + "github.com/MichaelMure/git-bug-migration/migration3/after/repository" +) + +func ExampleOperationIterator() { + b := NewBug() + + // add operations + + it := NewOperationIterator(b) + + for it.Next() { + // do something with each operations + _ = it.Value() + } +} + +func TestOpIterator(t *testing.T) { + repo := repository.NewMockRepoForTest() + + rene, err := identity.NewIdentity(repo, "René Descartes", "rene@descartes.fr") + require.NoError(t, err) + err = rene.Commit(repo) + require.NoError(t, err) + + unix := time.Now().Unix() + + createOp := NewCreateOp(rene, unix, "title", "message", nil) + addCommentOp := NewAddCommentOp(rene, unix, "message2", nil) + setStatusOp := NewSetStatusOp(rene, unix, ClosedStatus) + labelChangeOp := NewLabelChangeOperation(rene, unix, []Label{"added"}, []Label{"removed"}) + + var i int + genTitleOp := func() Operation { + i++ + return NewSetTitleOp(rene, unix, fmt.Sprintf("title%d", i), "") + } + + bug1 := NewBug() + + // first pack + bug1.Append(createOp) + bug1.Append(addCommentOp) + bug1.Append(setStatusOp) + bug1.Append(labelChangeOp) + err = bug1.Commit(repo) + require.NoError(t, err) + + // second pack + bug1.Append(genTitleOp()) + bug1.Append(genTitleOp()) + bug1.Append(genTitleOp()) + err = bug1.Commit(repo) + require.NoError(t, err) + + // staging + bug1.Append(genTitleOp()) + bug1.Append(genTitleOp()) + bug1.Append(genTitleOp()) + + it := NewOperationIterator(bug1) + + counter := 0 + for it.Next() { + _ = it.Value() + counter++ + } + + require.Equal(t, 10, counter) +} diff --git a/migration3/after/bug/operation_pack.go b/migration3/after/bug/operation_pack.go new file mode 100644 index 0000000..cf9a5d4 --- /dev/null +++ b/migration3/after/bug/operation_pack.go @@ -0,0 +1,186 @@ +package bug + +import ( + "encoding/json" + "fmt" + + "github.com/pkg/errors" + + "github.com/MichaelMure/git-bug-migration/migration3/after/repository" +) + +// 1: original format +// 2: no more legacy identities +// 3: Ids are generated from the create operation serialized data instead of from the first git commit +const formatVersion = 3 + +// OperationPack represent an ordered set of operation to apply +// to a Bug. These operations are stored in a single Git commit. +// +// These commits will be linked together in a linear chain of commits +// inside Git to form the complete ordered chain of operation to +// apply to get the final state of the Bug +type OperationPack struct { + Operations []Operation + + // Private field so not serialized + commitHash repository.Hash +} + +func (opp *OperationPack) MarshalJSON() ([]byte, error) { + return json.Marshal(struct { + Version uint `json:"version"` + Operations []Operation `json:"ops"` + }{ + Version: formatVersion, + Operations: opp.Operations, + }) +} + +func (opp *OperationPack) UnmarshalJSON(data []byte) error { + aux := struct { + Version uint `json:"version"` + Operations []json.RawMessage `json:"ops"` + }{} + + if err := json.Unmarshal(data, &aux); err != nil { + return err + } + + if aux.Version < formatVersion { + return fmt.Errorf("outdated repository format, please use https://github.com/MichaelMure/git-bug-migration to upgrade") + } + if aux.Version > formatVersion { + return fmt.Errorf("your version of git-bug is too old for this repository (bug format %v), please upgrade to the latest version", aux.Version) + } + + for _, raw := range aux.Operations { + var t struct { + OperationType OperationType `json:"type"` + } + + if err := json.Unmarshal(raw, &t); err != nil { + return err + } + + // delegate to specialized unmarshal function + op, err := opp.unmarshalOp(raw, t.OperationType) + if err != nil { + return err + } + + opp.Operations = append(opp.Operations, op) + } + + return nil +} + +func (opp *OperationPack) unmarshalOp(raw []byte, _type OperationType) (Operation, error) { + switch _type { + case AddCommentOp: + op := &AddCommentOperation{} + err := json.Unmarshal(raw, &op) + return op, err + case CreateOp: + op := &CreateOperation{} + err := json.Unmarshal(raw, &op) + return op, err + case EditCommentOp: + op := &EditCommentOperation{} + err := json.Unmarshal(raw, &op) + return op, err + case LabelChangeOp: + op := &LabelChangeOperation{} + err := json.Unmarshal(raw, &op) + return op, err + case NoOpOp: + op := &NoOpOperation{} + err := json.Unmarshal(raw, &op) + return op, err + case SetMetadataOp: + op := &SetMetadataOperation{} + err := json.Unmarshal(raw, &op) + return op, err + case SetStatusOp: + op := &SetStatusOperation{} + err := json.Unmarshal(raw, &op) + return op, err + case SetTitleOp: + op := &SetTitleOperation{} + err := json.Unmarshal(raw, &op) + return op, err + default: + return nil, fmt.Errorf("unknown operation type %v", _type) + } +} + +// Append a new operation to the pack +func (opp *OperationPack) Append(op Operation) { + opp.Operations = append(opp.Operations, op) +} + +// IsEmpty tell if the OperationPack is empty +func (opp *OperationPack) IsEmpty() bool { + return len(opp.Operations) == 0 +} + +// IsValid tell if the OperationPack is considered valid +func (opp *OperationPack) Validate() error { + if opp.IsEmpty() { + return fmt.Errorf("empty") + } + + for _, op := range opp.Operations { + if err := op.Validate(); err != nil { + return errors.Wrap(err, "op") + } + } + + return nil +} + +// Write will serialize and store the OperationPack as a git blob and return +// its hash +func (opp *OperationPack) Write(repo repository.ClockedRepo) (repository.Hash, error) { + // make sure we don't write invalid data + err := opp.Validate() + if err != nil { + return "", errors.Wrap(err, "validation error") + } + + // First, make sure that all the identities are properly Commit as well + // TODO: this might be downgraded to "make sure it exist in git" but then, what make + // sure no data is lost on identities ? + for _, op := range opp.Operations { + if op.base().Author.NeedCommit() { + return "", fmt.Errorf("identity need commmit") + } + } + + data, err := json.Marshal(opp) + if err != nil { + return "", err + } + + hash, err := repo.StoreData(data) + if err != nil { + return "", err + } + + return hash, nil +} + +// Make a deep copy +func (opp *OperationPack) Clone() OperationPack { + + clone := OperationPack{ + Operations: make([]Operation, len(opp.Operations)), + commitHash: opp.commitHash, + } + + for i, op := range opp.Operations { + clone.Operations[i] = op + } + + return clone +} diff --git a/migration3/after/bug/operation_pack_test.go b/migration3/after/bug/operation_pack_test.go new file mode 100644 index 0000000..51470f3 --- /dev/null +++ b/migration3/after/bug/operation_pack_test.go @@ -0,0 +1,78 @@ +package bug + +import ( + "encoding/json" + "testing" + "time" + + "github.com/stretchr/testify/require" + + "github.com/MichaelMure/git-bug-migration/migration3/after/identity" + "github.com/MichaelMure/git-bug-migration/migration3/after/repository" +) + +func TestOperationPackSerialize(t *testing.T) { + opp := &OperationPack{} + + repo := repository.NewMockRepoClock() + + rene, err := identity.NewIdentity(repo, "René Descartes", "rene@descartes.fr") + require.NoError(t, err) + + createOp := NewCreateOp(rene, time.Now().Unix(), "title", "message", nil) + setTitleOp := NewSetTitleOp(rene, time.Now().Unix(), "title2", "title1") + addCommentOp := NewAddCommentOp(rene, time.Now().Unix(), "message2", nil) + setStatusOp := NewSetStatusOp(rene, time.Now().Unix(), ClosedStatus) + labelChangeOp := NewLabelChangeOperation(rene, time.Now().Unix(), []Label{"added"}, []Label{"removed"}) + + opp.Append(createOp) + opp.Append(setTitleOp) + opp.Append(addCommentOp) + opp.Append(setStatusOp) + opp.Append(labelChangeOp) + + opMeta := NewSetTitleOp(rene, time.Now().Unix(), "title3", "title2") + opMeta.SetMetadata("key", "value") + opp.Append(opMeta) + + require.Equal(t, 1, len(opMeta.Metadata)) + + opFile := NewAddCommentOp(rene, time.Now().Unix(), "message", []repository.Hash{ + "abcdef", + "ghijkl", + }) + opp.Append(opFile) + + require.Equal(t, 2, len(opFile.Files)) + + data, err := json.Marshal(opp) + require.NoError(t, err) + + var opp2 *OperationPack + err = json.Unmarshal(data, &opp2) + require.NoError(t, err) + + ensureIds(opp) + ensureAuthors(t, opp, opp2) + + require.Equal(t, opp, opp2) +} + +func ensureIds(opp *OperationPack) { + for _, op := range opp.Operations { + op.Id() + } +} + +func ensureAuthors(t *testing.T, opp1 *OperationPack, opp2 *OperationPack) { + require.Equal(t, len(opp1.Operations), len(opp2.Operations)) + for i := 0; i < len(opp1.Operations); i++ { + op1 := opp1.Operations[i] + op2 := opp2.Operations[i] + + // ensure we have equivalent authors (IdentityStub vs Identity) then + // enforce equality + require.Equal(t, op1.base().Author.Id(), op2.base().Author.Id()) + op1.base().Author = op2.base().Author + } +} diff --git a/migration3/after/bug/operation_test.go b/migration3/after/bug/operation_test.go new file mode 100644 index 0000000..1d4aa65 --- /dev/null +++ b/migration3/after/bug/operation_test.go @@ -0,0 +1,133 @@ +package bug + +import ( + "testing" + "time" + + "github.com/stretchr/testify/require" + + "github.com/MichaelMure/git-bug-migration/migration3/after/identity" + "github.com/MichaelMure/git-bug-migration/migration3/after/repository" +) + +func TestValidate(t *testing.T) { + repo := repository.NewMockRepoClock() + + makeIdentity := func(t *testing.T, name, email string) *identity.Identity { + i, err := identity.NewIdentity(repo, name, email) + require.NoError(t, err) + return i + } + + rene := makeIdentity(t, "René Descartes", "rene@descartes.fr") + + unix := time.Now().Unix() + + good := []Operation{ + NewCreateOp(rene, unix, "title", "message", nil), + NewSetTitleOp(rene, unix, "title2", "title1"), + NewAddCommentOp(rene, unix, "message2", nil), + NewSetStatusOp(rene, unix, ClosedStatus), + NewLabelChangeOperation(rene, unix, []Label{"added"}, []Label{"removed"}), + } + + for _, op := range good { + if err := op.Validate(); err != nil { + t.Fatal(err) + } + } + + bad := []Operation{ + // opbase + NewSetStatusOp(makeIdentity(t, "", "rene@descartes.fr"), unix, ClosedStatus), + NewSetStatusOp(makeIdentity(t, "René Descartes\u001b", "rene@descartes.fr"), unix, ClosedStatus), + NewSetStatusOp(makeIdentity(t, "René Descartes", "rene@descartes.fr\u001b"), unix, ClosedStatus), + NewSetStatusOp(makeIdentity(t, "René \nDescartes", "rene@descartes.fr"), unix, ClosedStatus), + NewSetStatusOp(makeIdentity(t, "René Descartes", "rene@\ndescartes.fr"), unix, ClosedStatus), + &CreateOperation{OpBase: OpBase{ + Author: rene, + UnixTime: 0, + OperationType: CreateOp, + }, + Title: "title", + Message: "message", + }, + + NewCreateOp(rene, unix, "multi\nline", "message", nil), + NewCreateOp(rene, unix, "title", "message", []repository.Hash{repository.Hash("invalid")}), + NewCreateOp(rene, unix, "title\u001b", "message", nil), + NewCreateOp(rene, unix, "title", "message\u001b", nil), + NewSetTitleOp(rene, unix, "multi\nline", "title1"), + NewSetTitleOp(rene, unix, "title", "multi\nline"), + NewSetTitleOp(rene, unix, "title\u001b", "title2"), + NewSetTitleOp(rene, unix, "title", "title2\u001b"), + NewAddCommentOp(rene, unix, "message\u001b", nil), + NewAddCommentOp(rene, unix, "message", []repository.Hash{repository.Hash("invalid")}), + NewSetStatusOp(rene, unix, 1000), + NewSetStatusOp(rene, unix, 0), + NewLabelChangeOperation(rene, unix, []Label{}, []Label{}), + NewLabelChangeOperation(rene, unix, []Label{"multi\nline"}, []Label{}), + } + + for i, op := range bad { + if err := op.Validate(); err == nil { + t.Fatal("validation should have failed", i, op) + } + } +} + +func TestMetadata(t *testing.T) { + repo := repository.NewMockRepoClock() + + rene, err := identity.NewIdentity(repo, "René Descartes", "rene@descartes.fr") + require.NoError(t, err) + + op := NewCreateOp(rene, time.Now().Unix(), "title", "message", nil) + + op.SetMetadata("key", "value") + + val, ok := op.GetMetadata("key") + require.True(t, ok) + require.Equal(t, val, "value") +} + +func TestID(t *testing.T) { + repo := repository.CreateGoGitTestRepo(false) + defer repository.CleanupTestRepos(repo) + + repos := []repository.ClockedRepo{ + repository.NewMockRepoForTest(), + repo, + } + + for _, repo := range repos { + rene, err := identity.NewIdentity(repo, "René Descartes", "rene@descartes.fr") + require.NoError(t, err) + err = rene.Commit(repo) + require.NoError(t, err) + + b, op, err := Create(rene, time.Now().Unix(), "title", "message") + require.NoError(t, err) + + id1 := op.Id() + require.NoError(t, id1.Validate()) + + err = b.Commit(repo) + require.NoError(t, err) + + op2 := b.FirstOp() + + id2 := op2.Id() + require.NoError(t, id2.Validate()) + require.Equal(t, id1, id2) + + b2, err := ReadLocal(repo, b.Id()) + require.NoError(t, err) + + op3 := b2.FirstOp() + + id3 := op3.Id() + require.NoError(t, id3.Validate()) + require.Equal(t, id1, id3) + } +} diff --git a/migration3/after/bug/snapshot.go b/migration3/after/bug/snapshot.go new file mode 100644 index 0000000..16f4362 --- /dev/null +++ b/migration3/after/bug/snapshot.go @@ -0,0 +1,133 @@ +package bug + +import ( + "fmt" + "time" + + "github.com/MichaelMure/git-bug-migration/migration3/after/entity" + "github.com/MichaelMure/git-bug-migration/migration3/after/identity" +) + +// Snapshot is a compiled form of the Bug data structure used for storage and merge +type Snapshot struct { + id entity.Id + + Status Status + Title string + Comments []Comment + Labels []Label + Author identity.Interface + Actors []identity.Interface + Participants []identity.Interface + CreateTime time.Time + + Timeline []TimelineItem + + Operations []Operation +} + +// Return the Bug identifier +func (snap *Snapshot) Id() entity.Id { + return snap.id +} + +// Return the last time a bug was modified +func (snap *Snapshot) EditTime() time.Time { + if len(snap.Operations) == 0 { + return time.Unix(0, 0) + } + + return snap.Operations[len(snap.Operations)-1].Time() +} + +// GetCreateMetadata return the creation metadata +func (snap *Snapshot) GetCreateMetadata(key string) (string, bool) { + return snap.Operations[0].GetMetadata(key) +} + +// SearchTimelineItem will search in the timeline for an item matching the given hash +func (snap *Snapshot) SearchTimelineItem(id entity.Id) (TimelineItem, error) { + for i := range snap.Timeline { + if snap.Timeline[i].Id() == id { + return snap.Timeline[i], nil + } + } + + return nil, fmt.Errorf("timeline item not found") +} + +// SearchComment will search for a comment matching the given hash +func (snap *Snapshot) SearchComment(id entity.Id) (*Comment, error) { + for _, c := range snap.Comments { + if c.id == id { + return &c, nil + } + } + + return nil, fmt.Errorf("comment item not found") +} + +// append the operation author to the actors list +func (snap *Snapshot) addActor(actor identity.Interface) { + for _, a := range snap.Actors { + if actor.Id() == a.Id() { + return + } + } + + snap.Actors = append(snap.Actors, actor) +} + +// append the operation author to the participants list +func (snap *Snapshot) addParticipant(participant identity.Interface) { + for _, p := range snap.Participants { + if participant.Id() == p.Id() { + return + } + } + + snap.Participants = append(snap.Participants, participant) +} + +// HasParticipant return true if the id is a participant +func (snap *Snapshot) HasParticipant(id entity.Id) bool { + for _, p := range snap.Participants { + if p.Id() == id { + return true + } + } + return false +} + +// HasAnyParticipant return true if one of the ids is a participant +func (snap *Snapshot) HasAnyParticipant(ids ...entity.Id) bool { + for _, id := range ids { + if snap.HasParticipant(id) { + return true + } + } + return false +} + +// HasActor return true if the id is a actor +func (snap *Snapshot) HasActor(id entity.Id) bool { + for _, p := range snap.Actors { + if p.Id() == id { + return true + } + } + return false +} + +// HasAnyActor return true if one of the ids is a actor +func (snap *Snapshot) HasAnyActor(ids ...entity.Id) bool { + for _, id := range ids { + if snap.HasActor(id) { + return true + } + } + return false +} + +// Sign post method for gqlgen +func (snap *Snapshot) IsAuthored() {} diff --git a/migration3/after/bug/sorting.go b/migration3/after/bug/sorting.go new file mode 100644 index 0000000..d1c370d --- /dev/null +++ b/migration3/after/bug/sorting.go @@ -0,0 +1,57 @@ +package bug + +type BugsByCreationTime []*Bug + +func (b BugsByCreationTime) Len() int { + return len(b) +} + +func (b BugsByCreationTime) Less(i, j int) bool { + if b[i].createTime < b[j].createTime { + return true + } + + if b[i].createTime > b[j].createTime { + return false + } + + // When the logical clocks are identical, that means we had a concurrent + // edition. In this case we rely on the timestamp. While the timestamp might + // be incorrect due to a badly set clock, the drift in sorting is bounded + // by the first sorting using the logical clock. That means that if users + // synchronize their bugs regularly, the timestamp will rarely be used, and + // should still provide a kinda accurate sorting when needed. + return b[i].FirstOp().Time().Before(b[j].FirstOp().Time()) +} + +func (b BugsByCreationTime) Swap(i, j int) { + b[i], b[j] = b[j], b[i] +} + +type BugsByEditTime []*Bug + +func (b BugsByEditTime) Len() int { + return len(b) +} + +func (b BugsByEditTime) Less(i, j int) bool { + if b[i].editTime < b[j].editTime { + return true + } + + if b[i].editTime > b[j].editTime { + return false + } + + // When the logical clocks are identical, that means we had a concurrent + // edition. In this case we rely on the timestamp. While the timestamp might + // be incorrect due to a badly set clock, the drift in sorting is bounded + // by the first sorting using the logical clock. That means that if users + // synchronize their bugs regularly, the timestamp will rarely be used, and + // should still provide a kinda accurate sorting when needed. + return b[i].LastOp().Time().Before(b[j].LastOp().Time()) +} + +func (b BugsByEditTime) Swap(i, j int) { + b[i], b[j] = b[j], b[i] +} diff --git a/migration3/after/bug/status.go b/migration3/after/bug/status.go new file mode 100644 index 0000000..9e99803 --- /dev/null +++ b/migration3/after/bug/status.go @@ -0,0 +1,57 @@ +package bug + +import ( + "fmt" + "strings" +) + +type Status int + +const ( + _ Status = iota + OpenStatus + ClosedStatus +) + +func (s Status) String() string { + switch s { + case OpenStatus: + return "open" + case ClosedStatus: + return "closed" + default: + return "unknown status" + } +} + +func (s Status) Action() string { + switch s { + case OpenStatus: + return "opened" + case ClosedStatus: + return "closed" + default: + return "unknown status" + } +} + +func StatusFromString(str string) (Status, error) { + cleaned := strings.ToLower(strings.TrimSpace(str)) + + switch cleaned { + case "open": + return OpenStatus, nil + case "closed": + return ClosedStatus, nil + default: + return 0, fmt.Errorf("unknown status") + } +} + +func (s Status) Validate() error { + if s != OpenStatus && s != ClosedStatus { + return fmt.Errorf("invalid") + } + + return nil +} diff --git a/migration3/after/bug/timeline.go b/migration3/after/bug/timeline.go new file mode 100644 index 0000000..6ba3daf --- /dev/null +++ b/migration3/after/bug/timeline.go @@ -0,0 +1,79 @@ +package bug + +import ( + "strings" + + "github.com/MichaelMure/git-bug-migration/migration3/after/entity" + "github.com/MichaelMure/git-bug-migration/migration3/after/identity" + "github.com/MichaelMure/git-bug-migration/migration3/after/repository" + "github.com/MichaelMure/git-bug-migration/migration3/after/util/timestamp" +) + +type TimelineItem interface { + // ID return the identifier of the item + Id() entity.Id +} + +// CommentHistoryStep hold one version of a message in the history +type CommentHistoryStep struct { + // The author of the edition, not necessarily the same as the author of the + // original comment + Author identity.Interface + // The new message + Message string + UnixTime timestamp.Timestamp +} + +// CommentTimelineItem is a TimelineItem that holds a Comment and its edition history +type CommentTimelineItem struct { + id entity.Id + Author identity.Interface + Message string + Files []repository.Hash + CreatedAt timestamp.Timestamp + LastEdit timestamp.Timestamp + History []CommentHistoryStep +} + +func NewCommentTimelineItem(ID entity.Id, comment Comment) CommentTimelineItem { + return CommentTimelineItem{ + id: ID, + Author: comment.Author, + Message: comment.Message, + Files: comment.Files, + CreatedAt: comment.UnixTime, + LastEdit: comment.UnixTime, + History: []CommentHistoryStep{ + { + Message: comment.Message, + UnixTime: comment.UnixTime, + }, + }, + } +} + +func (c *CommentTimelineItem) Id() entity.Id { + return c.id +} + +// Append will append a new comment in the history and update the other values +func (c *CommentTimelineItem) Append(comment Comment) { + c.Message = comment.Message + c.Files = comment.Files + c.LastEdit = comment.UnixTime + c.History = append(c.History, CommentHistoryStep{ + Author: comment.Author, + Message: comment.Message, + UnixTime: comment.UnixTime, + }) +} + +// Edited say if the comment was edited +func (c *CommentTimelineItem) Edited() bool { + return len(c.History) > 1 +} + +// MessageIsEmpty return true is the message is empty or only made of spaces +func (c *CommentTimelineItem) MessageIsEmpty() bool { + return len(strings.TrimSpace(c.Message)) == 0 +} diff --git a/migration3/after/bug/with_snapshot.go b/migration3/after/bug/with_snapshot.go new file mode 100644 index 0000000..17eb089 --- /dev/null +++ b/migration3/after/bug/with_snapshot.go @@ -0,0 +1,58 @@ +package bug + +import "github.com/MichaelMure/git-bug-migration/migration3/after/repository" + +var _ Interface = &WithSnapshot{} + +// WithSnapshot encapsulate a Bug and maintain the corresponding Snapshot efficiently +type WithSnapshot struct { + *Bug + snap *Snapshot +} + +// Snapshot return the current snapshot +func (b *WithSnapshot) Snapshot() *Snapshot { + if b.snap == nil { + snap := b.Bug.Compile() + b.snap = &snap + } + return b.snap +} + +// Append intercept Bug.Append() to update the snapshot efficiently +func (b *WithSnapshot) Append(op Operation) { + b.Bug.Append(op) + + if b.snap == nil { + return + } + + op.Apply(b.snap) + b.snap.Operations = append(b.snap.Operations, op) +} + +// Commit intercept Bug.Commit() to update the snapshot efficiently +func (b *WithSnapshot) Commit(repo repository.ClockedRepo) error { + err := b.Bug.Commit(repo) + + if err != nil { + b.snap = nil + return err + } + + // Commit() shouldn't change anything of the bug state apart from the + // initial ID set + + if b.snap == nil { + return nil + } + + b.snap.id = b.Bug.id + return nil +} + +// Merge intercept Bug.Merge() and clear the snapshot +func (b *WithSnapshot) Merge(repo repository.Repo, other Interface) (bool, error) { + b.snap = nil + return b.Bug.Merge(repo, other) +} diff --git a/migration3/after/entity/doc.go b/migration3/after/entity/doc.go new file mode 100644 index 0000000..4682d54 --- /dev/null +++ b/migration3/after/entity/doc.go @@ -0,0 +1,8 @@ +// Package entity contains the base common code to define an entity stored +// in a chain of git objects, supporting actions like Push, Pull and Merge. +package entity + +// TODO: Bug and Identity are very similar, right ? I expect that this package +// will eventually hold the common code to define an entity and the related +// helpers, errors and so on. When this work is done, it will become easier +// to add new entities, for example to support pull requests. diff --git a/migration3/after/entity/err.go b/migration3/after/entity/err.go new file mode 100644 index 0000000..7d6c662 --- /dev/null +++ b/migration3/after/entity/err.go @@ -0,0 +1,32 @@ +package entity + +import ( + "fmt" + "strings" +) + +type ErrMultipleMatch struct { + entityType string + Matching []Id +} + +func NewErrMultipleMatch(entityType string, matching []Id) *ErrMultipleMatch { + return &ErrMultipleMatch{entityType: entityType, Matching: matching} +} + +func (e ErrMultipleMatch) Error() string { + matching := make([]string, len(e.Matching)) + + for i, match := range e.Matching { + matching[i] = match.String() + } + + return fmt.Sprintf("Multiple matching %s found:\n%s", + e.entityType, + strings.Join(matching, "\n")) +} + +func IsErrMultipleMatch(err error) bool { + _, ok := err.(*ErrMultipleMatch) + return ok +} diff --git a/migration3/after/entity/id.go b/migration3/after/entity/id.go new file mode 100644 index 0000000..9e72401 --- /dev/null +++ b/migration3/after/entity/id.go @@ -0,0 +1,99 @@ +package entity + +import ( + "crypto/sha256" + "fmt" + "io" + "strings" + + "github.com/pkg/errors" +) + +// sha-256 +const idLength = 64 +const humanIdLength = 7 + +const UnsetId = Id("unset") + +// Id is an identifier for an entity or part of an entity +type Id string + +// DeriveId generate an Id from some data, taken from a root part of the entity. +func DeriveId(data []byte) Id { + // My understanding is that sha256 is enough to prevent collision (git use that, so ...?) + // If you read this code, I'd be happy to be schooled. + + sum := sha256.Sum256(data) + return Id(fmt.Sprintf("%x", sum)) +} + +// String return the identifier as a string +func (i Id) String() string { + return string(i) +} + +// Human return the identifier, shortened for human consumption +func (i Id) Human() string { + format := fmt.Sprintf("%%.%ds", humanIdLength) + return fmt.Sprintf(format, i) +} + +func (i Id) HasPrefix(prefix string) bool { + return strings.HasPrefix(string(i), prefix) +} + +// UnmarshalGQL implement the Unmarshaler interface for gqlgen +func (i *Id) UnmarshalGQL(v interface{}) error { + _, ok := v.(string) + if !ok { + return fmt.Errorf("IDs must be strings") + } + + *i = v.(Id) + + if err := i.Validate(); err != nil { + return errors.Wrap(err, "invalid ID") + } + + return nil +} + +// MarshalGQL implement the Marshaler interface for gqlgen +func (i Id) MarshalGQL(w io.Writer) { + _, _ = w.Write([]byte(`"` + i.String() + `"`)) +} + +// IsValid tell if the Id is valid +func (i Id) Validate() error { + // Special case to + if len(i) == 40 { + return fmt.Errorf("outdated repository format, please use https://github.com/MichaelMure/git-bug-migration to upgrade") + } + if len(i) != idLength { + return fmt.Errorf("invalid length") + } + for _, r := range i { + if (r < 'a' || r > 'z') && (r < '0' || r > '9') { + return fmt.Errorf("invalid character") + } + } + return nil +} + +/* + * Sorting + */ + +type Alphabetical []Id + +func (a Alphabetical) Len() int { + return len(a) +} + +func (a Alphabetical) Less(i, j int) bool { + return a[i] < a[j] +} + +func (a Alphabetical) Swap(i, j int) { + a[i], a[j] = a[j], a[i] +} diff --git a/migration3/after/entity/interface.go b/migration3/after/entity/interface.go new file mode 100644 index 0000000..fb4735e --- /dev/null +++ b/migration3/after/entity/interface.go @@ -0,0 +1,12 @@ +package entity + +type Interface interface { + // Id return the Entity identifier + // + // This Id need to be immutable without having to store the entity somewhere (ie, an entity only in memory + // should have a valid Id, and it should not change if further edit are done on this entity). + // How to achieve that is up to the entity itself. A common way would be to take a hash of an immutable data at + // the root of the entity. + // It is acceptable to use such a hash and keep mutating that data as long as Id() is not called. + Id() Id +} diff --git a/migration3/after/entity/merge.go b/migration3/after/entity/merge.go new file mode 100644 index 0000000..3ce8eda --- /dev/null +++ b/migration3/after/entity/merge.go @@ -0,0 +1,74 @@ +package entity + +import ( + "fmt" +) + +// MergeStatus represent the result of a merge operation of an entity +type MergeStatus int + +const ( + _ MergeStatus = iota + MergeStatusNew + MergeStatusInvalid + MergeStatusUpdated + MergeStatusNothing + MergeStatusError +) + +type MergeResult struct { + // Err is set when a terminal error occur in the process + Err error + + Id Id + Status MergeStatus + + // Only set for invalid status + Reason string + + // Not set for invalid status + Entity Interface +} + +func (mr MergeResult) String() string { + switch mr.Status { + case MergeStatusNew: + return "new" + case MergeStatusInvalid: + return fmt.Sprintf("invalid data: %s", mr.Reason) + case MergeStatusUpdated: + return "updated" + case MergeStatusNothing: + return "nothing to do" + case MergeStatusError: + return fmt.Sprintf("merge error on %s: %s", mr.Id, mr.Err.Error()) + default: + panic("unknown merge status") + } +} + +func NewMergeError(err error, id Id) MergeResult { + return MergeResult{ + Err: err, + Id: id, + Status: MergeStatusError, + } +} + +func NewMergeStatus(status MergeStatus, id Id, entity Interface) MergeResult { + return MergeResult{ + Id: id, + Status: status, + + // Entity is not set for an invalid merge result + Entity: entity, + } +} + +func NewMergeInvalidStatus(id Id, reason string) MergeResult { + return MergeResult{ + Id: id, + Status: MergeStatusInvalid, + Reason: reason, + } +} diff --git a/migration3/after/identity/common.go b/migration3/after/identity/common.go new file mode 100644 index 0000000..4f012ed --- /dev/null +++ b/migration3/after/identity/common.go @@ -0,0 +1,37 @@ +package identity + +import ( + "encoding/json" + "errors" + "fmt" + + "github.com/MichaelMure/git-bug-migration/migration3/after/entity" +) + +var ErrIdentityNotExist = errors.New("identity doesn't exist") + +func NewErrMultipleMatch(matching []entity.Id) *entity.ErrMultipleMatch { + return entity.NewErrMultipleMatch("identity", matching) +} + +// Custom unmarshaling function to allow package user to delegate +// the decoding of an Identity and distinguish between an Identity +// and a Bare. +// +// If the given message has a "id" field, it's considered being a proper Identity. +func UnmarshalJSON(raw json.RawMessage) (Interface, error) { + aux := &IdentityStub{} + + // First try to decode and load as a normal Identity + err := json.Unmarshal(raw, &aux) + if err == nil && aux.Id() != "" { + return aux, nil + } + + // abort if we have an error other than the wrong type + if _, ok := err.(*json.UnmarshalTypeError); err != nil && !ok { + return nil, err + } + + return nil, fmt.Errorf("unknown identity type") +} diff --git a/migration3/after/identity/identity.go b/migration3/after/identity/identity.go new file mode 100644 index 0000000..40485c6 --- /dev/null +++ b/migration3/after/identity/identity.go @@ -0,0 +1,542 @@ +// Package identity contains the identity data model and low-level related functions +package identity + +import ( + "encoding/json" + "fmt" + "reflect" + "strings" + + "github.com/pkg/errors" + + "github.com/MichaelMure/git-bug-migration/migration3/after/entity" + "github.com/MichaelMure/git-bug-migration/migration3/after/repository" + "github.com/MichaelMure/git-bug-migration/migration3/after/util/lamport" + "github.com/MichaelMure/git-bug-migration/migration3/after/util/timestamp" +) + +const identityRefPattern = "refs/identities/" +const identityRemoteRefPattern = "refs/remotes/%s/identities/" +const versionEntryName = "version" +const identityConfigKey = "git-bug.identity" + +var ErrNonFastForwardMerge = errors.New("non fast-forward identity merge") +var ErrNoIdentitySet = errors.New("No identity is set.\n" + + "To interact with bugs, an identity first needs to be created using " + + "\"git bug user create\"") +var ErrMultipleIdentitiesSet = errors.New("multiple user identities set") + +var _ Interface = &Identity{} +var _ entity.Interface = &Identity{} + +type Identity struct { + // all the successive version of the identity + versions []*version +} + +func NewIdentity(repo repository.RepoClock, name string, email string) (*Identity, error) { + return NewIdentityFull(repo, name, email, "", "", nil) +} + +func NewIdentityFull(repo repository.RepoClock, name string, email string, login string, avatarUrl string, keys []*Key) (*Identity, error) { + v, err := newVersion(repo, name, email, login, avatarUrl, keys) + if err != nil { + return nil, err + } + return &Identity{ + versions: []*version{v}, + }, nil +} + +// NewFromGitUser will query the repository for user detail and +// build the corresponding Identity +func NewFromGitUser(repo repository.ClockedRepo) (*Identity, error) { + name, err := repo.GetUserName() + if err != nil { + return nil, err + } + if name == "" { + return nil, errors.New("user name is not configured in git yet. Please use `git config --global user.name \"John Doe\"`") + } + + email, err := repo.GetUserEmail() + if err != nil { + return nil, err + } + if email == "" { + return nil, errors.New("user name is not configured in git yet. Please use `git config --global user.email johndoe@example.com`") + } + + return NewIdentity(repo, name, email) +} + +// MarshalJSON will only serialize the id +func (i *Identity) MarshalJSON() ([]byte, error) { + return json.Marshal(&IdentityStub{ + id: i.Id(), + }) +} + +// UnmarshalJSON will only read the id +// Users of this package are expected to run Load() to load +// the remaining data from the identities data in git. +func (i *Identity) UnmarshalJSON(data []byte) error { + panic("identity should be loaded with identity.UnmarshalJSON") +} + +// ReadLocal load a local Identity from the identities data available in git +func ReadLocal(repo repository.Repo, id entity.Id) (*Identity, error) { + ref := fmt.Sprintf("%s%s", identityRefPattern, id) + return read(repo, ref) +} + +// ReadRemote load a remote Identity from the identities data available in git +func ReadRemote(repo repository.Repo, remote string, id string) (*Identity, error) { + ref := fmt.Sprintf(identityRemoteRefPattern, remote) + id + return read(repo, ref) +} + +// read will load and parse an identity from git +func read(repo repository.Repo, ref string) (*Identity, error) { + refSplit := strings.Split(ref, "/") + id := entity.Id(refSplit[len(refSplit)-1]) + + if err := id.Validate(); err != nil { + return nil, errors.Wrap(err, "invalid ref") + } + + hashes, err := repo.ListCommits(ref) + if err != nil { + return nil, ErrIdentityNotExist + } + if len(hashes) == 0 { + return nil, fmt.Errorf("empty identity") + } + + i := &Identity{} + + for _, hash := range hashes { + entries, err := repo.ReadTree(hash) + if err != nil { + return nil, errors.Wrap(err, "can't list git tree entries") + } + if len(entries) != 1 { + return nil, fmt.Errorf("invalid identity data at hash %s", hash) + } + + entry := entries[0] + if entry.Name != versionEntryName { + return nil, fmt.Errorf("invalid identity data at hash %s", hash) + } + + data, err := repo.ReadData(entry.Hash) + if err != nil { + return nil, errors.Wrap(err, "failed to read git blob data") + } + + var version version + err = json.Unmarshal(data, &version) + if err != nil { + return nil, errors.Wrapf(err, "failed to decode Identity version json %s", hash) + } + + // tag the version with the commit hash + version.commitHash = hash + + i.versions = append(i.versions, &version) + } + + if id != i.versions[0].Id() { + return nil, fmt.Errorf("identity ID doesn't math the first version ID") + } + + return i, nil +} + +type StreamedIdentity struct { + Identity *Identity + Err error +} + +// ReadAllLocal read and parse all local Identity +func ReadAllLocal(repo repository.ClockedRepo) <-chan StreamedIdentity { + return readAll(repo, identityRefPattern) +} + +// ReadAllRemote read and parse all remote Identity for a given remote +func ReadAllRemote(repo repository.ClockedRepo, remote string) <-chan StreamedIdentity { + refPrefix := fmt.Sprintf(identityRemoteRefPattern, remote) + return readAll(repo, refPrefix) +} + +// readAll read and parse all available bug with a given ref prefix +func readAll(repo repository.ClockedRepo, refPrefix string) <-chan StreamedIdentity { + out := make(chan StreamedIdentity) + + go func() { + defer close(out) + + refs, err := repo.ListRefs(refPrefix) + if err != nil { + out <- StreamedIdentity{Err: err} + return + } + + for _, ref := range refs { + b, err := read(repo, ref) + + if err != nil { + out <- StreamedIdentity{Err: err} + return + } + + out <- StreamedIdentity{Identity: b} + } + }() + + return out +} + +type Mutator struct { + Name string + Login string + Email string + AvatarUrl string + Keys []*Key +} + +// Mutate allow to create a new version of the Identity in one go +func (i *Identity) Mutate(repo repository.RepoClock, f func(orig *Mutator)) error { + copyKeys := func(keys []*Key) []*Key { + result := make([]*Key, len(keys)) + for i, key := range keys { + result[i] = key.Clone() + } + return result + } + + orig := Mutator{ + Name: i.Name(), + Email: i.Email(), + Login: i.Login(), + AvatarUrl: i.AvatarUrl(), + Keys: copyKeys(i.Keys()), + } + mutated := orig + mutated.Keys = copyKeys(orig.Keys) + + f(&mutated) + + if reflect.DeepEqual(orig, mutated) { + return nil + } + + v, err := newVersion(repo, + mutated.Name, + mutated.Email, + mutated.Login, + mutated.AvatarUrl, + mutated.Keys, + ) + if err != nil { + return err + } + + i.versions = append(i.versions, v) + return nil +} + +// Write the identity into the Repository. In particular, this ensure that +// the Id is properly set. +func (i *Identity) Commit(repo repository.ClockedRepo) error { + if !i.NeedCommit() { + return fmt.Errorf("can't commit an identity with no pending version") + } + + if err := i.Validate(); err != nil { + return errors.Wrap(err, "can't commit an identity with invalid data") + } + + var lastCommit repository.Hash + for _, v := range i.versions { + if v.commitHash != "" { + lastCommit = v.commitHash + // ignore already commit versions + continue + } + + blobHash, err := v.Write(repo) + if err != nil { + return err + } + + // Make a git tree referencing the blob + tree := []repository.TreeEntry{ + {ObjectType: repository.Blob, Hash: blobHash, Name: versionEntryName}, + } + + treeHash, err := repo.StoreTree(tree) + if err != nil { + return err + } + + var commitHash repository.Hash + if lastCommit != "" { + commitHash, err = repo.StoreCommitWithParent(treeHash, lastCommit) + } else { + commitHash, err = repo.StoreCommit(treeHash) + } + if err != nil { + return err + } + + lastCommit = commitHash + v.commitHash = commitHash + } + + ref := fmt.Sprintf("%s%s", identityRefPattern, i.Id().String()) + return repo.UpdateRef(ref, lastCommit) +} + +func (i *Identity) CommitAsNeeded(repo repository.ClockedRepo) error { + if !i.NeedCommit() { + return nil + } + return i.Commit(repo) +} + +func (i *Identity) NeedCommit() bool { + for _, v := range i.versions { + if v.commitHash == "" { + return true + } + } + + return false +} + +// Merge will merge a different version of the same Identity +// +// To make sure that an Identity history can't be altered, a strict fast-forward +// only policy is applied here. As an Identity should be tied to a single user, this +// should work in practice but it does leave a possibility that a user would edit his +// Identity from two different repo concurrently and push the changes in a non-centralized +// network of repositories. In this case, it would result in some of the repo accepting one +// version and some other accepting another, preventing the network in general to converge +// to the same result. This would create a sort of partition of the network, and manual +// cleaning would be required. +// +// An alternative approach would be to have a determinist rebase: +// - any commits present in both local and remote version would be kept, never changed. +// - newer commits would be merged in a linear chain of commits, ordered based on the +// Lamport time +// +// However, this approach leave the possibility, in the case of a compromised crypto keys, +// of forging a new version with a bogus Lamport time to be inserted before a legit version, +// invalidating the correct version and hijacking the Identity. There would only be a short +// period of time where this would be possible (before the network converge) but I'm not +// confident enough to implement that. I choose the strict fast-forward only approach, +// despite it's potential problem with two different version as mentioned above. +func (i *Identity) Merge(repo repository.Repo, other *Identity) (bool, error) { + if i.Id() != other.Id() { + return false, errors.New("merging unrelated identities is not supported") + } + + modified := false + var lastCommit repository.Hash + for j, otherVersion := range other.versions { + // if there is more version in other, take them + if len(i.versions) == j { + i.versions = append(i.versions, otherVersion) + lastCommit = otherVersion.commitHash + modified = true + } + + // we have a non fast-forward merge. + // as explained in the doc above, refusing to merge + if i.versions[j].commitHash != otherVersion.commitHash { + return false, ErrNonFastForwardMerge + } + } + + if modified { + err := repo.UpdateRef(identityRefPattern+i.Id().String(), lastCommit) + if err != nil { + return false, err + } + } + + return false, nil +} + +// Validate check if the Identity data is valid +func (i *Identity) Validate() error { + lastTimes := make(map[string]lamport.Time) + + if len(i.versions) == 0 { + return fmt.Errorf("no version") + } + + for _, v := range i.versions { + if err := v.Validate(); err != nil { + return err + } + + // check for always increasing lamport time + // check that a new version didn't drop a clock + for name, previous := range lastTimes { + if now, ok := v.times[name]; ok { + if now < previous { + return fmt.Errorf("non-chronological lamport clock %s (%d --> %d)", name, previous, now) + } + } else { + return fmt.Errorf("version has less lamport clocks than before (missing %s)", name) + } + } + + for name, now := range v.times { + lastTimes[name] = now + } + } + + return nil +} + +func (i *Identity) lastVersion() *version { + if len(i.versions) <= 0 { + panic("no version at all") + } + + return i.versions[len(i.versions)-1] +} + +// Id return the Identity identifier +func (i *Identity) Id() entity.Id { + // id is the id of the first version + return i.versions[0].Id() +} + +// Name return the last version of the name +func (i *Identity) Name() string { + return i.lastVersion().name +} + +// DisplayName return a non-empty string to display, representing the +// identity, based on the non-empty values. +func (i *Identity) DisplayName() string { + switch { + case i.Name() == "" && i.Login() != "": + return i.Login() + case i.Name() != "" && i.Login() == "": + return i.Name() + case i.Name() != "" && i.Login() != "": + return fmt.Sprintf("%s (%s)", i.Name(), i.Login()) + } + + panic("invalid person data") +} + +// Email return the last version of the email +func (i *Identity) Email() string { + return i.lastVersion().email +} + +// Login return the last version of the login +func (i *Identity) Login() string { + return i.lastVersion().login +} + +// AvatarUrl return the last version of the Avatar URL +func (i *Identity) AvatarUrl() string { + return i.lastVersion().avatarURL +} + +// Keys return the last version of the valid keys +func (i *Identity) Keys() []*Key { + return i.lastVersion().keys +} + +// ValidKeysAtTime return the set of keys valid at a given lamport time +func (i *Identity) ValidKeysAtTime(clockName string, time lamport.Time) []*Key { + var result []*Key + + var lastTime lamport.Time + for _, v := range i.versions { + refTime, ok := v.times[clockName] + if !ok { + refTime = lastTime + } + lastTime = refTime + + if refTime > time { + return result + } + + result = v.keys + } + + return result +} + +// LastModification return the timestamp at which the last version of the identity became valid. +func (i *Identity) LastModification() timestamp.Timestamp { + return timestamp.Timestamp(i.lastVersion().unixTime) +} + +// LastModificationLamports return the lamport times at which the last version of the identity became valid. +func (i *Identity) LastModificationLamports() map[string]lamport.Time { + return i.lastVersion().times +} + +// IsProtected return true if the chain of git commits started to be signed. +// If that's the case, only signed commit with a valid key for this identity can be added. +func (i *Identity) IsProtected() bool { + // Todo + return false +} + +// SetMetadata store arbitrary metadata along the last not-commit version. +// If the version has been commit to git already, a new identical version is added and will need to be +// commit. +func (i *Identity) SetMetadata(key string, value string) { + // once commit, data is immutable so we create a new version + if i.lastVersion().commitHash != "" { + i.versions = append(i.versions, i.lastVersion().Clone()) + } + // if Id() has been called, we can't change the first version anymore, so we create a new version + if len(i.versions) == 1 && i.versions[0].id != entity.UnsetId && i.versions[0].id != "" { + i.versions = append(i.versions, i.lastVersion().Clone()) + } + + i.lastVersion().SetMetadata(key, value) +} + +// ImmutableMetadata return all metadata for this Identity, accumulated from each version. +// If multiple value are found, the first defined takes precedence. +func (i *Identity) ImmutableMetadata() map[string]string { + metadata := make(map[string]string) + + for _, version := range i.versions { + for key, value := range version.metadata { + if _, has := metadata[key]; !has { + metadata[key] = value + } + } + } + + return metadata +} + +// MutableMetadata return all metadata for this Identity, accumulated from each version. +// If multiple value are found, the last defined takes precedence. +func (i *Identity) MutableMetadata() map[string]string { + metadata := make(map[string]string) + + for _, version := range i.versions { + for key, value := range version.metadata { + metadata[key] = value + } + } + + return metadata +} diff --git a/migration3/after/identity/identity_actions.go b/migration3/after/identity/identity_actions.go new file mode 100644 index 0000000..d5c1d7f --- /dev/null +++ b/migration3/after/identity/identity_actions.go @@ -0,0 +1,132 @@ +package identity + +import ( + "fmt" + "strings" + + "github.com/pkg/errors" + + "github.com/MichaelMure/git-bug-migration/migration3/after/entity" + "github.com/MichaelMure/git-bug-migration/migration3/after/repository" +) + +// Fetch retrieve updates from a remote +// This does not change the local identities state +func Fetch(repo repository.Repo, remote string) (string, error) { + // "refs/identities/*:refs/remotes//identities/*" + remoteRefSpec := fmt.Sprintf(identityRemoteRefPattern, remote) + fetchRefSpec := fmt.Sprintf("%s*:%s*", identityRefPattern, remoteRefSpec) + + return repo.FetchRefs(remote, fetchRefSpec) +} + +// Push update a remote with the local changes +func Push(repo repository.Repo, remote string) (string, error) { + // "refs/identities/*:refs/identities/*" + refspec := fmt.Sprintf("%s*:%s*", identityRefPattern, identityRefPattern) + + return repo.PushRefs(remote, refspec) +} + +// Pull will do a Fetch + MergeAll +// This function will return an error if a merge fail +func Pull(repo repository.ClockedRepo, remote string) error { + _, err := Fetch(repo, remote) + if err != nil { + return err + } + + for merge := range MergeAll(repo, remote) { + if merge.Err != nil { + return merge.Err + } + if merge.Status == entity.MergeStatusInvalid { + return errors.Errorf("merge failure: %s", merge.Reason) + } + } + + return nil +} + +// MergeAll will merge all the available remote identity +func MergeAll(repo repository.ClockedRepo, remote string) <-chan entity.MergeResult { + out := make(chan entity.MergeResult) + + go func() { + defer close(out) + + remoteRefSpec := fmt.Sprintf(identityRemoteRefPattern, remote) + remoteRefs, err := repo.ListRefs(remoteRefSpec) + + if err != nil { + out <- entity.MergeResult{Err: err} + return + } + + for _, remoteRef := range remoteRefs { + refSplit := strings.Split(remoteRef, "/") + id := entity.Id(refSplit[len(refSplit)-1]) + + if err := id.Validate(); err != nil { + out <- entity.NewMergeInvalidStatus(id, errors.Wrap(err, "invalid ref").Error()) + continue + } + + remoteIdentity, err := read(repo, remoteRef) + + if err != nil { + out <- entity.NewMergeInvalidStatus(id, errors.Wrap(err, "remote identity is not readable").Error()) + continue + } + + // Check for error in remote data + if err := remoteIdentity.Validate(); err != nil { + out <- entity.NewMergeInvalidStatus(id, errors.Wrap(err, "remote identity is invalid").Error()) + continue + } + + localRef := identityRefPattern + remoteIdentity.Id().String() + localExist, err := repo.RefExist(localRef) + + if err != nil { + out <- entity.NewMergeError(err, id) + continue + } + + // the identity is not local yet, simply create the reference + if !localExist { + err := repo.CopyRef(remoteRef, localRef) + + if err != nil { + out <- entity.NewMergeError(err, id) + return + } + + out <- entity.NewMergeStatus(entity.MergeStatusNew, id, remoteIdentity) + continue + } + + localIdentity, err := read(repo, localRef) + + if err != nil { + out <- entity.NewMergeError(errors.Wrap(err, "local identity is not readable"), id) + return + } + + updated, err := localIdentity.Merge(repo, remoteIdentity) + + if err != nil { + out <- entity.NewMergeInvalidStatus(id, errors.Wrap(err, "merge failed").Error()) + return + } + + if updated { + out <- entity.NewMergeStatus(entity.MergeStatusUpdated, id, localIdentity) + } else { + out <- entity.NewMergeStatus(entity.MergeStatusNothing, id, localIdentity) + } + } + }() + + return out +} diff --git a/migration3/after/identity/identity_actions_test.go b/migration3/after/identity/identity_actions_test.go new file mode 100644 index 0000000..2923d48 --- /dev/null +++ b/migration3/after/identity/identity_actions_test.go @@ -0,0 +1,158 @@ +package identity + +import ( + "testing" + + "github.com/stretchr/testify/require" + + "github.com/MichaelMure/git-bug-migration/migration3/after/repository" +) + +func TestPushPull(t *testing.T) { + repoA, repoB, remote := repository.SetupReposAndRemote() + defer repository.CleanupTestRepos(repoA, repoB, remote) + + identity1, err := NewIdentity(repoA, "name1", "email1") + require.NoError(t, err) + err = identity1.Commit(repoA) + require.NoError(t, err) + + // A --> remote --> B + _, err = Push(repoA, "origin") + require.NoError(t, err) + + err = Pull(repoB, "origin") + require.NoError(t, err) + + identities := allIdentities(t, ReadAllLocal(repoB)) + + if len(identities) != 1 { + t.Fatal("Unexpected number of bugs") + } + + // B --> remote --> A + identity2, err := NewIdentity(repoB, "name2", "email2") + require.NoError(t, err) + err = identity2.Commit(repoB) + require.NoError(t, err) + + _, err = Push(repoB, "origin") + require.NoError(t, err) + + err = Pull(repoA, "origin") + require.NoError(t, err) + + identities = allIdentities(t, ReadAllLocal(repoA)) + + if len(identities) != 2 { + t.Fatal("Unexpected number of bugs") + } + + // Update both + + err = identity1.Mutate(repoA, func(orig *Mutator) { + orig.Name = "name1b" + orig.Email = "email1b" + }) + require.NoError(t, err) + err = identity1.Commit(repoA) + require.NoError(t, err) + + err = identity2.Mutate(repoB, func(orig *Mutator) { + orig.Name = "name2b" + orig.Email = "email2b" + }) + require.NoError(t, err) + err = identity2.Commit(repoB) + require.NoError(t, err) + + // A --> remote --> B + + _, err = Push(repoA, "origin") + require.NoError(t, err) + + err = Pull(repoB, "origin") + require.NoError(t, err) + + identities = allIdentities(t, ReadAllLocal(repoB)) + + if len(identities) != 2 { + t.Fatal("Unexpected number of bugs") + } + + // B --> remote --> A + + _, err = Push(repoB, "origin") + require.NoError(t, err) + + err = Pull(repoA, "origin") + require.NoError(t, err) + + identities = allIdentities(t, ReadAllLocal(repoA)) + + if len(identities) != 2 { + t.Fatal("Unexpected number of bugs") + } + + // Concurrent update + + err = identity1.Mutate(repoA, func(orig *Mutator) { + orig.Name = "name1c" + orig.Email = "email1c" + }) + require.NoError(t, err) + err = identity1.Commit(repoA) + require.NoError(t, err) + + identity1B, err := ReadLocal(repoB, identity1.Id()) + require.NoError(t, err) + + err = identity1B.Mutate(repoB, func(orig *Mutator) { + orig.Name = "name1concurrent" + orig.Email = "name1concurrent" + }) + require.NoError(t, err) + err = identity1B.Commit(repoB) + require.NoError(t, err) + + // A --> remote --> B + + _, err = Push(repoA, "origin") + require.NoError(t, err) + + // Pulling a non-fast-forward update should fail + err = Pull(repoB, "origin") + require.Error(t, err) + + identities = allIdentities(t, ReadAllLocal(repoB)) + + if len(identities) != 2 { + t.Fatal("Unexpected number of bugs") + } + + // B --> remote --> A + + // Pushing a non-fast-forward update should fail + _, err = Push(repoB, "origin") + require.Error(t, err) + + err = Pull(repoA, "origin") + require.NoError(t, err) + + identities = allIdentities(t, ReadAllLocal(repoA)) + + if len(identities) != 2 { + t.Fatal("Unexpected number of bugs") + } +} + +func allIdentities(t testing.TB, identities <-chan StreamedIdentity) []*Identity { + var result []*Identity + for streamed := range identities { + if streamed.Err != nil { + t.Fatal(streamed.Err) + } + result = append(result, streamed.Identity) + } + return result +} diff --git a/migration3/after/identity/identity_stub.go b/migration3/after/identity/identity_stub.go new file mode 100644 index 0000000..0eaf449 --- /dev/null +++ b/migration3/after/identity/identity_stub.go @@ -0,0 +1,96 @@ +package identity + +import ( + "encoding/json" + + "github.com/MichaelMure/git-bug-migration/migration3/after/entity" + "github.com/MichaelMure/git-bug-migration/migration3/after/util/lamport" + "github.com/MichaelMure/git-bug-migration/migration3/after/util/timestamp" +) + +var _ Interface = &IdentityStub{} + +// IdentityStub is an almost empty Identity, holding only the id. +// When a normal Identity is serialized into JSON, only the id is serialized. +// All the other data are stored in git in a chain of commit + a ref. +// When this JSON is deserialized, an IdentityStub is returned instead, to be replaced +// later by the proper Identity, loaded from the Repo. +type IdentityStub struct { + id entity.Id +} + +func (i *IdentityStub) MarshalJSON() ([]byte, error) { + // TODO: add a type marker + return json.Marshal(struct { + Id entity.Id `json:"id"` + }{ + Id: i.id, + }) +} + +func (i *IdentityStub) UnmarshalJSON(data []byte) error { + aux := struct { + Id entity.Id `json:"id"` + }{} + + if err := json.Unmarshal(data, &aux); err != nil { + return err + } + + i.id = aux.Id + + return nil +} + +// Id return the Identity identifier +func (i *IdentityStub) Id() entity.Id { + return i.id +} + +func (IdentityStub) Name() string { + panic("identities needs to be properly loaded with identity.ReadLocal()") +} + +func (IdentityStub) DisplayName() string { + panic("identities needs to be properly loaded with identity.ReadLocal()") +} + +func (IdentityStub) Email() string { + panic("identities needs to be properly loaded with identity.ReadLocal()") +} + +func (IdentityStub) Login() string { + panic("identities needs to be properly loaded with identity.ReadLocal()") +} + +func (IdentityStub) AvatarUrl() string { + panic("identities needs to be properly loaded with identity.ReadLocal()") +} + +func (IdentityStub) Keys() []*Key { + panic("identities needs to be properly loaded with identity.ReadLocal()") +} + +func (IdentityStub) ValidKeysAtTime(_ string, _ lamport.Time) []*Key { + panic("identities needs to be properly loaded with identity.ReadLocal()") +} + +func (i *IdentityStub) LastModification() timestamp.Timestamp { + panic("identities needs to be properly loaded with identity.ReadLocal()") +} + +func (i *IdentityStub) LastModificationLamports() map[string]lamport.Time { + panic("identities needs to be properly loaded with identity.ReadLocal()") +} + +func (IdentityStub) IsProtected() bool { + panic("identities needs to be properly loaded with identity.ReadLocal()") +} + +func (IdentityStub) Validate() error { + panic("identities needs to be properly loaded with identity.ReadLocal()") +} + +func (i *IdentityStub) NeedCommit() bool { + return false +} diff --git a/migration3/after/identity/identity_stub_test.go b/migration3/after/identity/identity_stub_test.go new file mode 100644 index 0000000..b01a718 --- /dev/null +++ b/migration3/after/identity/identity_stub_test.go @@ -0,0 +1,26 @@ +package identity + +import ( + "encoding/json" + "testing" + + "github.com/stretchr/testify/assert" +) + +func TestIdentityStubSerialize(t *testing.T) { + before := &IdentityStub{ + id: "id1234", + } + + data, err := json.Marshal(before) + assert.NoError(t, err) + + var after IdentityStub + err = json.Unmarshal(data, &after) + assert.NoError(t, err) + + // enforce creating the Id + before.Id() + + assert.Equal(t, before, &after) +} diff --git a/migration3/after/identity/identity_test.go b/migration3/after/identity/identity_test.go new file mode 100644 index 0000000..fabafde --- /dev/null +++ b/migration3/after/identity/identity_test.go @@ -0,0 +1,248 @@ +package identity + +import ( + "encoding/json" + "testing" + + "github.com/stretchr/testify/require" + + "github.com/MichaelMure/git-bug-migration/migration3/after/util/lamport" +) + +// Test the commit and load of an Identity with multiple versions +func TestIdentityCommitLoad(t *testing.T) { + repo := makeIdentityTestRepo(t) + + // single version + + identity, err := NewIdentity(repo, "René Descartes", "rene.descartes@example.com") + require.NoError(t, err) + + idBeforeCommit := identity.Id() + + err = identity.Commit(repo) + require.NoError(t, err) + + commitsAreSet(t, identity) + require.NotEmpty(t, identity.Id()) + require.Equal(t, idBeforeCommit, identity.Id()) + require.Equal(t, idBeforeCommit, identity.versions[0].Id()) + + loaded, err := ReadLocal(repo, identity.Id()) + require.NoError(t, err) + commitsAreSet(t, loaded) + require.Equal(t, identity, loaded) + + // multiple versions + + identity, err = NewIdentityFull(repo, "René Descartes", "rene.descartes@example.com", "", "", []*Key{{PubKey: "pubkeyA"}}) + require.NoError(t, err) + + idBeforeCommit = identity.Id() + + err = identity.Mutate(repo, func(orig *Mutator) { + orig.Keys = []*Key{{PubKey: "pubkeyB"}} + }) + require.NoError(t, err) + + err = identity.Mutate(repo, func(orig *Mutator) { + orig.Keys = []*Key{{PubKey: "pubkeyC"}} + }) + require.NoError(t, err) + + require.Equal(t, idBeforeCommit, identity.Id()) + + err = identity.Commit(repo) + require.NoError(t, err) + + commitsAreSet(t, identity) + require.NotEmpty(t, identity.Id()) + require.Equal(t, idBeforeCommit, identity.Id()) + require.Equal(t, idBeforeCommit, identity.versions[0].Id()) + + loaded, err = ReadLocal(repo, identity.Id()) + require.NoError(t, err) + commitsAreSet(t, loaded) + require.Equal(t, identity, loaded) + + // add more version + + err = identity.Mutate(repo, func(orig *Mutator) { + orig.Email = "rene@descartes.com" + orig.Keys = []*Key{{PubKey: "pubkeyD"}} + }) + require.NoError(t, err) + + err = identity.Mutate(repo, func(orig *Mutator) { + orig.Email = "rene@descartes.com" + orig.Keys = []*Key{{PubKey: "pubkeyD"}, {PubKey: "pubkeyE"}} + }) + require.NoError(t, err) + + err = identity.Commit(repo) + require.NoError(t, err) + + commitsAreSet(t, identity) + require.NotEmpty(t, identity.Id()) + require.Equal(t, idBeforeCommit, identity.Id()) + require.Equal(t, idBeforeCommit, identity.versions[0].Id()) + + loaded, err = ReadLocal(repo, identity.Id()) + require.NoError(t, err) + commitsAreSet(t, loaded) + require.Equal(t, identity, loaded) +} + +func TestIdentityMutate(t *testing.T) { + repo := makeIdentityTestRepo(t) + + identity, err := NewIdentity(repo, "René Descartes", "rene.descartes@example.com") + require.NoError(t, err) + + require.Len(t, identity.versions, 1) + + err = identity.Mutate(repo, func(orig *Mutator) { + orig.Email = "rene@descartes.fr" + orig.Name = "René" + orig.Login = "rene" + }) + require.NoError(t, err) + + require.Len(t, identity.versions, 2) + require.Equal(t, identity.Email(), "rene@descartes.fr") + require.Equal(t, identity.Name(), "René") + require.Equal(t, identity.Login(), "rene") +} + +func commitsAreSet(t *testing.T, identity *Identity) { + for _, version := range identity.versions { + require.NotEmpty(t, version.commitHash) + } +} + +// Test that the correct crypto keys are returned for a given lamport time +func TestIdentity_ValidKeysAtTime(t *testing.T) { + identity := Identity{ + versions: []*version{ + { + times: map[string]lamport.Time{"foo": 100}, + keys: []*Key{ + {PubKey: "pubkeyA"}, + }, + }, + { + times: map[string]lamport.Time{"foo": 200}, + keys: []*Key{ + {PubKey: "pubkeyB"}, + }, + }, + { + times: map[string]lamport.Time{"foo": 201}, + keys: []*Key{ + {PubKey: "pubkeyC"}, + }, + }, + { + times: map[string]lamport.Time{"foo": 201}, + keys: []*Key{ + {PubKey: "pubkeyD"}, + }, + }, + { + times: map[string]lamport.Time{"foo": 300}, + keys: []*Key{ + {PubKey: "pubkeyE"}, + }, + }, + }, + } + + require.Nil(t, identity.ValidKeysAtTime("foo", 10)) + require.Equal(t, identity.ValidKeysAtTime("foo", 100), []*Key{{PubKey: "pubkeyA"}}) + require.Equal(t, identity.ValidKeysAtTime("foo", 140), []*Key{{PubKey: "pubkeyA"}}) + require.Equal(t, identity.ValidKeysAtTime("foo", 200), []*Key{{PubKey: "pubkeyB"}}) + require.Equal(t, identity.ValidKeysAtTime("foo", 201), []*Key{{PubKey: "pubkeyD"}}) + require.Equal(t, identity.ValidKeysAtTime("foo", 202), []*Key{{PubKey: "pubkeyD"}}) + require.Equal(t, identity.ValidKeysAtTime("foo", 300), []*Key{{PubKey: "pubkeyE"}}) + require.Equal(t, identity.ValidKeysAtTime("foo", 3000), []*Key{{PubKey: "pubkeyE"}}) +} + +// Test the immutable or mutable metadata search +func TestMetadata(t *testing.T) { + repo := makeIdentityTestRepo(t) + + identity, err := NewIdentity(repo, "René Descartes", "rene.descartes@example.com") + require.NoError(t, err) + + identity.SetMetadata("key1", "value1") + assertHasKeyValue(t, identity.ImmutableMetadata(), "key1", "value1") + assertHasKeyValue(t, identity.MutableMetadata(), "key1", "value1") + + err = identity.Commit(repo) + require.NoError(t, err) + + assertHasKeyValue(t, identity.ImmutableMetadata(), "key1", "value1") + assertHasKeyValue(t, identity.MutableMetadata(), "key1", "value1") + + // try override + err = identity.Mutate(repo, func(orig *Mutator) { + orig.Email = "rene@descartes.fr" + }) + require.NoError(t, err) + + identity.SetMetadata("key1", "value2") + assertHasKeyValue(t, identity.ImmutableMetadata(), "key1", "value1") + assertHasKeyValue(t, identity.MutableMetadata(), "key1", "value2") + + err = identity.Commit(repo) + require.NoError(t, err) + + // reload + loaded, err := ReadLocal(repo, identity.Id()) + require.NoError(t, err) + + assertHasKeyValue(t, loaded.ImmutableMetadata(), "key1", "value1") + assertHasKeyValue(t, loaded.MutableMetadata(), "key1", "value2") + + // set metadata after commit + versionCount := len(identity.versions) + identity.SetMetadata("foo", "bar") + require.True(t, identity.NeedCommit()) + require.Len(t, identity.versions, versionCount+1) + + err = identity.Commit(repo) + require.NoError(t, err) + require.Len(t, identity.versions, versionCount+1) +} + +func assertHasKeyValue(t *testing.T, metadata map[string]string, key, value string) { + val, ok := metadata[key] + require.True(t, ok) + require.Equal(t, val, value) +} + +func TestJSON(t *testing.T) { + repo := makeIdentityTestRepo(t) + + identity, err := NewIdentity(repo, "René Descartes", "rene.descartes@example.com") + require.NoError(t, err) + + // commit to make sure we have an Id + err = identity.Commit(repo) + require.NoError(t, err) + require.NotEmpty(t, identity.Id()) + + // serialize + data, err := json.Marshal(identity) + require.NoError(t, err) + + // deserialize, got a IdentityStub with the same id + var i Interface + i, err = UnmarshalJSON(data) + require.NoError(t, err) + require.Equal(t, identity.Id(), i.Id()) + + // make sure we can load the identity properly + i, err = ReadLocal(repo, i.Id()) + require.NoError(t, err) +} diff --git a/migration3/after/identity/identity_user.go b/migration3/after/identity/identity_user.go new file mode 100644 index 0000000..8241666 --- /dev/null +++ b/migration3/after/identity/identity_user.go @@ -0,0 +1,68 @@ +package identity + +import ( + "fmt" + "os" + + "github.com/pkg/errors" + + "github.com/MichaelMure/git-bug-migration/migration3/after/entity" + "github.com/MichaelMure/git-bug-migration/migration3/after/repository" +) + +// SetUserIdentity store the user identity's id in the git config +func SetUserIdentity(repo repository.RepoConfig, identity *Identity) error { + return repo.LocalConfig().StoreString(identityConfigKey, identity.Id().String()) +} + +// GetUserIdentity read the current user identity, set with a git config entry +func GetUserIdentity(repo repository.Repo) (*Identity, error) { + id, err := GetUserIdentityId(repo) + if err != nil { + return nil, err + } + + i, err := ReadLocal(repo, id) + if err == ErrIdentityNotExist { + innerErr := repo.LocalConfig().RemoveAll(identityConfigKey) + if innerErr != nil { + _, _ = fmt.Fprintln(os.Stderr, errors.Wrap(innerErr, "can't clear user identity").Error()) + } + return nil, err + } + + return i, nil +} + +func GetUserIdentityId(repo repository.Repo) (entity.Id, error) { + val, err := repo.LocalConfig().ReadString(identityConfigKey) + if err == repository.ErrNoConfigEntry { + return entity.UnsetId, ErrNoIdentitySet + } + if err == repository.ErrMultipleConfigEntry { + return entity.UnsetId, ErrMultipleIdentitiesSet + } + if err != nil { + return entity.UnsetId, err + } + + var id = entity.Id(val) + + if err := id.Validate(); err != nil { + return entity.UnsetId, err + } + + return id, nil +} + +// IsUserIdentitySet say if the user has set his identity +func IsUserIdentitySet(repo repository.Repo) (bool, error) { + _, err := repo.LocalConfig().ReadString(identityConfigKey) + if err == repository.ErrNoConfigEntry { + return false, nil + } + if err != nil { + return false, err + } + return true, nil +} diff --git a/migration3/after/identity/interface.go b/migration3/after/identity/interface.go new file mode 100644 index 0000000..1ef07be --- /dev/null +++ b/migration3/after/identity/interface.go @@ -0,0 +1,58 @@ +package identity + +import ( + "github.com/MichaelMure/git-bug-migration/migration3/after/entity" + "github.com/MichaelMure/git-bug-migration/migration3/after/util/lamport" + "github.com/MichaelMure/git-bug-migration/migration3/after/util/timestamp" +) + +type Interface interface { + entity.Interface + + // Name return the last version of the name + // Can be empty. + Name() string + + // DisplayName return a non-empty string to display, representing the + // identity, based on the non-empty values. + DisplayName() string + + // Email return the last version of the email + // Can be empty. + Email() string + + // Login return the last version of the login + // Can be empty. + // Warning: this login can be defined when importing from a bridge but should *not* be + // used to identify an identity as multiple bridge with different login can map to the same + // identity. Use the metadata system for that usage instead. + Login() string + + // AvatarUrl return the last version of the Avatar URL + // Can be empty. + AvatarUrl() string + + // Keys return the last version of the valid keys + // Can be empty. + Keys() []*Key + + // ValidKeysAtTime return the set of keys valid at a given lamport time for a given clock of another entity + // Can be empty. + ValidKeysAtTime(clockName string, time lamport.Time) []*Key + + // LastModification return the timestamp at which the last version of the identity became valid. + LastModification() timestamp.Timestamp + + // LastModificationLamports return the lamport times at which the last version of the identity became valid. + LastModificationLamports() map[string]lamport.Time + + // IsProtected return true if the chain of git commits started to be signed. + // If that's the case, only signed commit with a valid key for this identity can be added. + IsProtected() bool + + // Validate check if the Identity data is valid + Validate() error + + // Indicate that the in-memory state changed and need to be commit in the repository + NeedCommit() bool +} diff --git a/migration3/after/identity/key.go b/migration3/after/identity/key.go new file mode 100644 index 0000000..cc94839 --- /dev/null +++ b/migration3/after/identity/key.go @@ -0,0 +1,18 @@ +package identity + +type Key struct { + // The GPG fingerprint of the key + Fingerprint string `json:"fingerprint"` + PubKey string `json:"pub_key"` +} + +func (k *Key) Validate() error { + // Todo + + return nil +} + +func (k *Key) Clone() *Key { + clone := *k + return &clone +} diff --git a/migration3/after/identity/resolver.go b/migration3/after/identity/resolver.go new file mode 100644 index 0000000..e3ec1a2 --- /dev/null +++ b/migration3/after/identity/resolver.go @@ -0,0 +1,36 @@ +package identity + +import ( + "github.com/MichaelMure/git-bug-migration/migration3/after/entity" + "github.com/MichaelMure/git-bug-migration/migration3/after/repository" +) + +// Resolver define the interface of an Identity resolver, able to load +// an identity from, for example, a repo or a cache. +type Resolver interface { + ResolveIdentity(id entity.Id) (Interface, error) +} + +// SimpleResolver is a Resolver loading Identities directly from a Repo +type SimpleResolver struct { + repo repository.Repo +} + +func NewSimpleResolver(repo repository.Repo) *SimpleResolver { + return &SimpleResolver{repo: repo} +} + +func (r *SimpleResolver) ResolveIdentity(id entity.Id) (Interface, error) { + return ReadLocal(r.repo, id) +} + +// StubResolver is a Resolver that doesn't load anything, only returning IdentityStub instances +type StubResolver struct{} + +func NewStubResolver() *StubResolver { + return &StubResolver{} +} + +func (s *StubResolver) ResolveIdentity(id entity.Id) (Interface, error) { + return &IdentityStub{id: id}, nil +} diff --git a/migration3/after/identity/version.go b/migration3/after/identity/version.go new file mode 100644 index 0000000..cf5c0f8 --- /dev/null +++ b/migration3/after/identity/version.go @@ -0,0 +1,286 @@ +package identity + +import ( + "crypto/rand" + "encoding/json" + "fmt" + "strings" + "time" + + "github.com/pkg/errors" + + "github.com/MichaelMure/git-bug-migration/migration3/after/entity" + "github.com/MichaelMure/git-bug-migration/migration3/after/repository" + "github.com/MichaelMure/git-bug-migration/migration3/after/util/lamport" + "github.com/MichaelMure/git-bug-migration/migration3/after/util/text" +) + +// 1: original format +// 2: Identity Ids are generated from the first version serialized data instead of from the first git commit +// + Identity hold multiple lamport clocks from other entities, instead of just bug edit +const formatVersion = 2 + +// version is a complete set of information about an Identity at a point in time. +type version struct { + name string + email string // as defined in git or from a bridge when importing the identity + login string // from a bridge when importing the identity + avatarURL string + + // The lamport times of the other entities at which this version become effective + times map[string]lamport.Time + unixTime int64 + + // The set of keys valid at that time, from this version onward, until they get removed + // in a new version. This allow to have multiple key for the same identity (e.g. one per + // device) as well as revoke key. + keys []*Key + + // mandatory random bytes to ensure a better randomness of the data of the first + // version of a bug, used to later generate the ID + // len(Nonce) should be > 20 and < 64 bytes + // It has no functional purpose and should be ignored. + // TODO: optional after first version? + nonce []byte + + // A set of arbitrary key/value to store metadata about a version or about an Identity in general. + metadata map[string]string + + // Not serialized. Store the version's id in memory. + id entity.Id + // Not serialized + commitHash repository.Hash +} + +func newVersion(repo repository.RepoClock, name string, email string, login string, avatarURL string, keys []*Key) (*version, error) { + clocks, err := repo.AllClocks() + if err != nil { + return nil, err + } + + times := make(map[string]lamport.Time) + for name, clock := range clocks { + times[name] = clock.Time() + } + + return &version{ + id: entity.UnsetId, + name: name, + email: email, + login: login, + avatarURL: avatarURL, + times: times, + unixTime: time.Now().Unix(), + keys: keys, + nonce: makeNonce(20), + }, nil +} + +type versionJSON struct { + // Additional field to version the data + FormatVersion uint `json:"version"` + + Times map[string]lamport.Time `json:"times"` + UnixTime int64 `json:"unix_time"` + Name string `json:"name,omitempty"` + Email string `json:"email,omitempty"` + Login string `json:"login,omitempty"` + AvatarUrl string `json:"avatar_url,omitempty"` + Keys []*Key `json:"pub_keys,omitempty"` + Nonce []byte `json:"nonce"` + Metadata map[string]string `json:"metadata,omitempty"` +} + +// Id return the identifier of the version +func (v *version) Id() entity.Id { + if v.id == "" { + // something went really wrong + panic("version's id not set") + } + if v.id == entity.UnsetId { + // This means we are trying to get the version's Id *before* it has been stored. + // As the Id is computed based on the actual bytes written on the disk, we are going to predict + // those and then get the Id. This is safe as it will be the exact same code writing on disk later. + data, err := json.Marshal(v) + if err != nil { + panic(err) + } + v.id = entity.DeriveId(data) + } + return v.id +} + +// Make a deep copy +func (v *version) Clone() *version { + // copy direct fields + clone := *v + + // reset some fields + clone.commitHash = "" + clone.id = entity.UnsetId + + clone.times = make(map[string]lamport.Time) + for name, t := range v.times { + clone.times[name] = t + } + + clone.keys = make([]*Key, len(v.keys)) + for i, key := range v.keys { + clone.keys[i] = key.Clone() + } + + clone.nonce = make([]byte, len(v.nonce)) + copy(clone.nonce, v.nonce) + + // not copying metadata + + return &clone +} + +func (v *version) MarshalJSON() ([]byte, error) { + return json.Marshal(versionJSON{ + FormatVersion: formatVersion, + Times: v.times, + UnixTime: v.unixTime, + Name: v.name, + Email: v.email, + Login: v.login, + AvatarUrl: v.avatarURL, + Keys: v.keys, + Nonce: v.nonce, + Metadata: v.metadata, + }) +} + +func (v *version) UnmarshalJSON(data []byte) error { + var aux versionJSON + + if err := json.Unmarshal(data, &aux); err != nil { + return err + } + + if aux.FormatVersion < formatVersion { + return fmt.Errorf("outdated repository format, please use https://github.com/MichaelMure/git-bug-migration to upgrade") + } + if aux.FormatVersion > formatVersion { + return fmt.Errorf("your version of git-bug is too old for this repository (identity format %v), please upgrade to the latest version", aux.FormatVersion) + } + + v.id = entity.DeriveId(data) + v.times = aux.Times + v.unixTime = aux.UnixTime + v.name = aux.Name + v.email = aux.Email + v.login = aux.Login + v.avatarURL = aux.AvatarUrl + v.keys = aux.Keys + v.nonce = aux.Nonce + v.metadata = aux.Metadata + + return nil +} + +func (v *version) Validate() error { + // time must be set after a commit + if v.commitHash != "" && v.unixTime == 0 { + return fmt.Errorf("unix time not set") + } + + if text.Empty(v.name) && text.Empty(v.login) { + return fmt.Errorf("either name or login should be set") + } + if strings.Contains(v.name, "\n") { + return fmt.Errorf("name should be a single line") + } + if !text.Safe(v.name) { + return fmt.Errorf("name is not fully printable") + } + + if strings.Contains(v.login, "\n") { + return fmt.Errorf("login should be a single line") + } + if !text.Safe(v.login) { + return fmt.Errorf("login is not fully printable") + } + + if strings.Contains(v.email, "\n") { + return fmt.Errorf("email should be a single line") + } + if !text.Safe(v.email) { + return fmt.Errorf("email is not fully printable") + } + + if v.avatarURL != "" && !text.ValidUrl(v.avatarURL) { + return fmt.Errorf("avatarUrl is not a valid URL") + } + + if len(v.nonce) > 64 { + return fmt.Errorf("nonce is too big") + } + if len(v.nonce) < 20 { + return fmt.Errorf("nonce is too small") + } + + for _, k := range v.keys { + if err := k.Validate(); err != nil { + return errors.Wrap(err, "invalid key") + } + } + + return nil +} + +// Write will serialize and store the version as a git blob and return +// its hash +func (v *version) Write(repo repository.Repo) (repository.Hash, error) { + // make sure we don't write invalid data + err := v.Validate() + if err != nil { + return "", errors.Wrap(err, "validation error") + } + + data, err := json.Marshal(v) + if err != nil { + return "", err + } + + hash, err := repo.StoreData(data) + if err != nil { + return "", err + } + + // make sure we set the Id when writing in the repo + v.id = entity.DeriveId(data) + + return hash, nil +} + +func makeNonce(len int) []byte { + result := make([]byte, len) + _, err := rand.Read(result) + if err != nil { + panic(err) + } + return result +} + +// SetMetadata store arbitrary metadata about a version or an Identity in general +// If the version has been commit to git already, it won't be overwritten. +// Beware: changing the metadata on a version will change it's ID +func (v *version) SetMetadata(key string, value string) { + if v.metadata == nil { + v.metadata = make(map[string]string) + } + v.metadata[key] = value +} + +// GetMetadata retrieve arbitrary metadata about the version +func (v *version) GetMetadata(key string) (string, bool) { + val, ok := v.metadata[key] + return val, ok +} + +// AllMetadata return all metadata for this version +func (v *version) AllMetadata() map[string]string { + return v.metadata +} diff --git a/migration3/after/identity/version_test.go b/migration3/after/identity/version_test.go new file mode 100644 index 0000000..2fa5b8c --- /dev/null +++ b/migration3/after/identity/version_test.go @@ -0,0 +1,84 @@ +package identity + +import ( + "encoding/json" + "testing" + "time" + + "github.com/stretchr/testify/assert" + "github.com/stretchr/testify/require" + + "github.com/MichaelMure/git-bug-migration/migration3/after/entity" + "github.com/MichaelMure/git-bug-migration/migration3/after/repository" + "github.com/MichaelMure/git-bug-migration/migration3/after/util/lamport" +) + +func makeIdentityTestRepo(t *testing.T) repository.ClockedRepo { + repo := repository.NewMockRepoForTest() + + clock1, err := repo.GetOrCreateClock("foo") + require.NoError(t, err) + err = clock1.Witness(42) // clock goes to 43 + require.NoError(t, err) + + clock2, err := repo.GetOrCreateClock("bar") + require.NoError(t, err) + err = clock2.Witness(34) // clock goes to 35 + require.NoError(t, err) + + return repo +} + +func TestVersionSerialize(t *testing.T) { + repo := makeIdentityTestRepo(t) + + keys := []*Key{ + { + Fingerprint: "fingerprint1", + PubKey: "pubkey1", + }, + { + Fingerprint: "fingerprint2", + PubKey: "pubkey2", + }, + } + + before, err := newVersion(repo, "name", "email", "login", "avatarUrl", keys) + require.NoError(t, err) + + before.SetMetadata("key1", "value1") + before.SetMetadata("key2", "value2") + + expected := &version{ + id: entity.UnsetId, + name: "name", + email: "email", + login: "login", + avatarURL: "avatarUrl", + unixTime: time.Now().Unix(), + times: map[string]lamport.Time{ + "foo": 43, + "bar": 35, + }, + keys: keys, + nonce: before.nonce, + metadata: map[string]string{ + "key1": "value1", + "key2": "value2", + }, + } + + require.Equal(t, expected, before) + + data, err := json.Marshal(before) + assert.NoError(t, err) + + var after version + err = json.Unmarshal(data, &after) + assert.NoError(t, err) + + // make sure we now have an Id + expected.Id() + + assert.Equal(t, expected, &after) +} diff --git a/migration3/after/repository/config.go b/migration3/after/repository/config.go new file mode 100644 index 0000000..4db8d4b --- /dev/null +++ b/migration3/after/repository/config.go @@ -0,0 +1,145 @@ +package repository + +import ( + "errors" + "strconv" + "time" +) + +var ( + ErrNoConfigEntry = errors.New("no config entry for the given key") + ErrMultipleConfigEntry = errors.New("multiple config entry for the given key") +) + +// Config represent the common function interacting with the repository config storage +type Config interface { + ConfigRead + ConfigWrite +} + +type ConfigRead interface { + // ReadAll reads all key/value pair matching the key prefix + ReadAll(keyPrefix string) (map[string]string, error) + + // ReadBool read a single boolean value from the config + // Return ErrNoConfigEntry or ErrMultipleConfigEntry if + // there is zero or more than one entry for this key + ReadBool(key string) (bool, error) + + // ReadBool read a single string value from the config + // Return ErrNoConfigEntry or ErrMultipleConfigEntry if + // there is zero or more than one entry for this key + ReadString(key string) (string, error) + + // ReadTimestamp read a single timestamp value from the config + // Return ErrNoConfigEntry or ErrMultipleConfigEntry if + // there is zero or more than one entry for this key + ReadTimestamp(key string) (time.Time, error) +} + +type ConfigWrite interface { + // Store writes a single key/value pair in the config + StoreString(key, value string) error + + // Store writes a key and timestamp value to the config + StoreTimestamp(key string, value time.Time) error + + // Store writes a key and boolean value to the config + StoreBool(key string, value bool) error + + // RemoveAll removes all key/value pair matching the key prefix + RemoveAll(keyPrefix string) error +} + +func ParseTimestamp(s string) (time.Time, error) { + timestamp, err := strconv.Atoi(s) + if err != nil { + return time.Time{}, err + } + + return time.Unix(int64(timestamp), 0), nil +} + +// mergeConfig is a helper to easily support RepoConfig.AnyConfig() +// from two separate local and global Config +func mergeConfig(local ConfigRead, global ConfigRead) *mergedConfig { + return &mergedConfig{ + local: local, + global: global, + } +} + +var _ ConfigRead = &mergedConfig{} + +type mergedConfig struct { + local ConfigRead + global ConfigRead +} + +func (m *mergedConfig) ReadAll(keyPrefix string) (map[string]string, error) { + values, err := m.global.ReadAll(keyPrefix) + if err != nil { + return nil, err + } + locals, err := m.local.ReadAll(keyPrefix) + if err != nil { + return nil, err + } + for k, val := range locals { + values[k] = val + } + return values, nil +} + +func (m *mergedConfig) ReadBool(key string) (bool, error) { + v, err := m.local.ReadBool(key) + if err == nil { + return v, nil + } + if err != ErrNoConfigEntry && err != ErrMultipleConfigEntry { + return false, err + } + return m.global.ReadBool(key) +} + +func (m *mergedConfig) ReadString(key string) (string, error) { + val, err := m.local.ReadString(key) + if err == nil { + return val, nil + } + if err != ErrNoConfigEntry && err != ErrMultipleConfigEntry { + return "", err + } + return m.global.ReadString(key) +} + +func (m *mergedConfig) ReadTimestamp(key string) (time.Time, error) { + val, err := m.local.ReadTimestamp(key) + if err == nil { + return val, nil + } + if err != ErrNoConfigEntry && err != ErrMultipleConfigEntry { + return time.Time{}, err + } + return m.global.ReadTimestamp(key) +} + +var _ ConfigWrite = &configPanicWriter{} + +type configPanicWriter struct{} + +func (c configPanicWriter) StoreString(key, value string) error { + panic("not implemented") +} + +func (c configPanicWriter) StoreTimestamp(key string, value time.Time) error { + panic("not implemented") +} + +func (c configPanicWriter) StoreBool(key string, value bool) error { + panic("not implemented") +} + +func (c configPanicWriter) RemoveAll(keyPrefix string) error { + panic("not implemented") +} diff --git a/migration3/after/repository/config_mem.go b/migration3/after/repository/config_mem.go new file mode 100644 index 0000000..9725e8d --- /dev/null +++ b/migration3/after/repository/config_mem.go @@ -0,0 +1,94 @@ +package repository + +import ( + "fmt" + "strconv" + "strings" + "time" +) + +var _ Config = &MemConfig{} + +type MemConfig struct { + config map[string]string +} + +func NewMemConfig() *MemConfig { + return &MemConfig{ + config: make(map[string]string), + } +} + +func (mc *MemConfig) StoreString(key, value string) error { + mc.config[key] = value + return nil +} + +func (mc *MemConfig) StoreBool(key string, value bool) error { + return mc.StoreString(key, strconv.FormatBool(value)) +} + +func (mc *MemConfig) StoreTimestamp(key string, value time.Time) error { + return mc.StoreString(key, strconv.Itoa(int(value.Unix()))) +} + +func (mc *MemConfig) ReadAll(keyPrefix string) (map[string]string, error) { + result := make(map[string]string) + for key, val := range mc.config { + if strings.HasPrefix(key, keyPrefix) { + result[key] = val + } + } + return result, nil +} + +func (mc *MemConfig) ReadString(key string) (string, error) { + // unlike git, the mock can only store one value for the same key + val, ok := mc.config[key] + if !ok { + return "", ErrNoConfigEntry + } + + return val, nil +} + +func (mc *MemConfig) ReadBool(key string) (bool, error) { + // unlike git, the mock can only store one value for the same key + val, ok := mc.config[key] + if !ok { + return false, ErrNoConfigEntry + } + + return strconv.ParseBool(val) +} + +func (mc *MemConfig) ReadTimestamp(key string) (time.Time, error) { + value, err := mc.ReadString(key) + if err != nil { + return time.Time{}, err + } + + timestamp, err := strconv.Atoi(value) + if err != nil { + return time.Time{}, err + } + + return time.Unix(int64(timestamp), 0), nil +} + +// RmConfigs remove all key/value pair matching the key prefix +func (mc *MemConfig) RemoveAll(keyPrefix string) error { + found := false + for key := range mc.config { + if strings.HasPrefix(key, keyPrefix) { + delete(mc.config, key) + found = true + } + } + + if !found { + return fmt.Errorf("section not found") + } + + return nil +} diff --git a/migration3/after/repository/config_mem_test.go b/migration3/after/repository/config_mem_test.go new file mode 100644 index 0000000..d9c3385 --- /dev/null +++ b/migration3/after/repository/config_mem_test.go @@ -0,0 +1,7 @@ +package repository + +import "testing" + +func TestNewMemConfig(t *testing.T) { + testConfig(t, NewMemConfig()) +} diff --git a/migration3/after/repository/config_test.go b/migration3/after/repository/config_test.go new file mode 100644 index 0000000..2a76354 --- /dev/null +++ b/migration3/after/repository/config_test.go @@ -0,0 +1,54 @@ +package repository + +import ( + "testing" + "time" + + "github.com/stretchr/testify/require" +) + +func TestMergedConfig(t *testing.T) { + local := NewMemConfig() + global := NewMemConfig() + merged := mergeConfig(local, global) + + require.NoError(t, global.StoreBool("bool", true)) + require.NoError(t, global.StoreString("string", "foo")) + require.NoError(t, global.StoreTimestamp("timestamp", time.Unix(1234, 0))) + + val1, err := merged.ReadBool("bool") + require.NoError(t, err) + require.Equal(t, val1, true) + + val2, err := merged.ReadString("string") + require.NoError(t, err) + require.Equal(t, val2, "foo") + + val3, err := merged.ReadTimestamp("timestamp") + require.NoError(t, err) + require.Equal(t, val3, time.Unix(1234, 0)) + + require.NoError(t, local.StoreBool("bool", false)) + require.NoError(t, local.StoreString("string", "bar")) + require.NoError(t, local.StoreTimestamp("timestamp", time.Unix(5678, 0))) + + val1, err = merged.ReadBool("bool") + require.NoError(t, err) + require.Equal(t, val1, false) + + val2, err = merged.ReadString("string") + require.NoError(t, err) + require.Equal(t, val2, "bar") + + val3, err = merged.ReadTimestamp("timestamp") + require.NoError(t, err) + require.Equal(t, val3, time.Unix(5678, 0)) + + all, err := merged.ReadAll("") + require.NoError(t, err) + require.Equal(t, all, map[string]string{ + "bool": "false", + "string": "bar", + "timestamp": "5678", + }) +} diff --git a/migration3/after/repository/config_testing.go b/migration3/after/repository/config_testing.go new file mode 100644 index 0000000..445f872 --- /dev/null +++ b/migration3/after/repository/config_testing.go @@ -0,0 +1,116 @@ +package repository + +import ( + "testing" + "time" + + "github.com/stretchr/testify/require" +) + +func testConfig(t *testing.T, config Config) { + // string + err := config.StoreString("section.key", "value") + require.NoError(t, err) + + val, err := config.ReadString("section.key") + require.NoError(t, err) + require.Equal(t, "value", val) + + // bool + err = config.StoreBool("section.true", true) + require.NoError(t, err) + + val2, err := config.ReadBool("section.true") + require.NoError(t, err) + require.Equal(t, true, val2) + + // timestamp + err = config.StoreTimestamp("section.time", time.Unix(1234, 0)) + require.NoError(t, err) + + val3, err := config.ReadTimestamp("section.time") + require.NoError(t, err) + require.Equal(t, time.Unix(1234, 0), val3) + + // ReadAll + configs, err := config.ReadAll("section") + require.NoError(t, err) + require.Equal(t, map[string]string{ + "section.key": "value", + "section.true": "true", + "section.time": "1234", + }, configs) + + // RemoveAll + err = config.RemoveAll("section.true") + require.NoError(t, err) + + configs, err = config.ReadAll("section") + require.NoError(t, err) + require.Equal(t, map[string]string{ + "section.key": "value", + "section.time": "1234", + }, configs) + + _, err = config.ReadBool("section.true") + require.Equal(t, ErrNoConfigEntry, err) + + err = config.RemoveAll("section.nonexistingkey") + require.Error(t, err) + + err = config.RemoveAll("section.key") + require.NoError(t, err) + + _, err = config.ReadString("section.key") + require.Equal(t, ErrNoConfigEntry, err) + + err = config.RemoveAll("nonexistingsection") + require.Error(t, err) + + err = config.RemoveAll("section.time") + require.NoError(t, err) + + err = config.RemoveAll("section") + require.Error(t, err) + + _, err = config.ReadString("section.key") + require.Error(t, err) + + err = config.RemoveAll("section.key") + require.Error(t, err) + + // section + subsections + require.NoError(t, config.StoreString("section.opt1", "foo")) + require.NoError(t, config.StoreString("section.opt2", "foo2")) + require.NoError(t, config.StoreString("section.subsection.opt1", "foo3")) + require.NoError(t, config.StoreString("section.subsection.opt2", "foo4")) + require.NoError(t, config.StoreString("section.subsection.subsection.opt1", "foo5")) + require.NoError(t, config.StoreString("section.subsection.subsection.opt2", "foo6")) + + all, err := config.ReadAll("section") + require.NoError(t, err) + require.Equal(t, map[string]string{ + "section.opt1": "foo", + "section.opt2": "foo2", + "section.subsection.opt1": "foo3", + "section.subsection.opt2": "foo4", + "section.subsection.subsection.opt1": "foo5", + "section.subsection.subsection.opt2": "foo6", + }, all) + + all, err = config.ReadAll("section.subsection") + require.NoError(t, err) + require.Equal(t, map[string]string{ + "section.subsection.opt1": "foo3", + "section.subsection.opt2": "foo4", + "section.subsection.subsection.opt1": "foo5", + "section.subsection.subsection.opt2": "foo6", + }, all) + + all, err = config.ReadAll("section.subsection.subsection") + require.NoError(t, err) + require.Equal(t, map[string]string{ + "section.subsection.subsection.opt1": "foo5", + "section.subsection.subsection.opt2": "foo6", + }, all) +} diff --git a/migration3/after/repository/git.go b/migration3/after/repository/git.go new file mode 100644 index 0000000..6540fb5 --- /dev/null +++ b/migration3/after/repository/git.go @@ -0,0 +1,461 @@ +// Package repository contains helper methods for working with the Git repo. +package repository + +import ( + "bytes" + "fmt" + "io/ioutil" + "os" + "path" + "strings" + "sync" + + "github.com/MichaelMure/git-bug-migration/migration3/after/util/lamport" +) + +const ( + clockPath = "git-bug/clocks" +) + +var _ ClockedRepo = &GitRepo{} +var _ TestedRepo = &GitRepo{} + +// GitRepo represents an instance of a (local) git repository. +type GitRepo struct { + gitCli + path string + + clocksMutex sync.Mutex + clocks map[string]lamport.Clock + + keyring Keyring +} + +// NewGitRepo determines if the given working directory is inside of a git repository, +// and returns the corresponding GitRepo instance if it is. +func NewGitRepo(path string, clockLoaders []ClockLoader) (*GitRepo, error) { + k, err := defaultKeyring() + if err != nil { + return nil, err + } + + repo := &GitRepo{ + gitCli: gitCli{path: path}, + path: path, + clocks: make(map[string]lamport.Clock), + keyring: k, + } + + // Check the repo and retrieve the root path + stdout, err := repo.runGitCommand("rev-parse", "--absolute-git-dir") + + // Now dir is fetched with "git rev-parse --git-dir". May be it can + // still return nothing in some cases. Then empty stdout check is + // kept. + if err != nil || stdout == "" { + return nil, ErrNotARepo + } + + // Fix the path to be sure we are at the root + repo.path = stdout + repo.gitCli.path = stdout + + for _, loader := range clockLoaders { + allExist := true + for _, name := range loader.Clocks { + if _, err := repo.getClock(name); err != nil { + allExist = false + } + } + + if !allExist { + err = loader.Witnesser(repo) + if err != nil { + return nil, err + } + } + } + + return repo, nil +} + +// InitGitRepo create a new empty git repo at the given path +func InitGitRepo(path string) (*GitRepo, error) { + repo := &GitRepo{ + gitCli: gitCli{path: path}, + path: path + "/.git", + clocks: make(map[string]lamport.Clock), + } + + _, err := repo.runGitCommand("init", path) + if err != nil { + return nil, err + } + + return repo, nil +} + +// InitBareGitRepo create a new --bare empty git repo at the given path +func InitBareGitRepo(path string) (*GitRepo, error) { + repo := &GitRepo{ + gitCli: gitCli{path: path}, + path: path, + clocks: make(map[string]lamport.Clock), + } + + _, err := repo.runGitCommand("init", "--bare", path) + if err != nil { + return nil, err + } + + return repo, nil +} + +// LocalConfig give access to the repository scoped configuration +func (repo *GitRepo) LocalConfig() Config { + return newGitConfig(repo.gitCli, false) +} + +// GlobalConfig give access to the global scoped configuration +func (repo *GitRepo) GlobalConfig() Config { + return newGitConfig(repo.gitCli, true) +} + +// AnyConfig give access to a merged local/global configuration +func (repo *GitRepo) AnyConfig() ConfigRead { + return mergeConfig(repo.LocalConfig(), repo.GlobalConfig()) +} + +// Keyring give access to a user-wide storage for secrets +func (repo *GitRepo) Keyring() Keyring { + return repo.keyring +} + +// GetPath returns the path to the repo. +func (repo *GitRepo) GetPath() string { + return repo.path +} + +// GetUserName returns the name the the user has used to configure git +func (repo *GitRepo) GetUserName() (string, error) { + return repo.runGitCommand("config", "user.name") +} + +// GetUserEmail returns the email address that the user has used to configure git. +func (repo *GitRepo) GetUserEmail() (string, error) { + return repo.runGitCommand("config", "user.email") +} + +// GetCoreEditor returns the name of the editor that the user has used to configure git. +func (repo *GitRepo) GetCoreEditor() (string, error) { + return repo.runGitCommand("var", "GIT_EDITOR") +} + +// GetRemotes returns the configured remotes repositories. +func (repo *GitRepo) GetRemotes() (map[string]string, error) { + stdout, err := repo.runGitCommand("remote", "--verbose") + if err != nil { + return nil, err + } + + lines := strings.Split(stdout, "\n") + remotes := make(map[string]string, len(lines)) + + for _, line := range lines { + if strings.TrimSpace(line) == "" { + continue + } + elements := strings.Fields(line) + if len(elements) != 3 { + return nil, fmt.Errorf("git remote: unexpected output format: %s", line) + } + + remotes[elements[0]] = elements[1] + } + + return remotes, nil +} + +// FetchRefs fetch git refs from a remote +func (repo *GitRepo) FetchRefs(remote, refSpec string) (string, error) { + stdout, err := repo.runGitCommand("fetch", remote, refSpec) + + if err != nil { + return stdout, fmt.Errorf("failed to fetch from the remote '%s': %v", remote, err) + } + + return stdout, err +} + +// PushRefs push git refs to a remote +func (repo *GitRepo) PushRefs(remote string, refSpec string) (string, error) { + stdout, stderr, err := repo.runGitCommandRaw(nil, "push", remote, refSpec) + + if err != nil { + return stdout + stderr, fmt.Errorf("failed to push to the remote '%s': %v", remote, stderr) + } + return stdout + stderr, nil +} + +// StoreData will store arbitrary data and return the corresponding hash +func (repo *GitRepo) StoreData(data []byte) (Hash, error) { + var stdin = bytes.NewReader(data) + + stdout, err := repo.runGitCommandWithStdin(stdin, "hash-object", "--stdin", "-w") + + return Hash(stdout), err +} + +// ReadData will attempt to read arbitrary data from the given hash +func (repo *GitRepo) ReadData(hash Hash) ([]byte, error) { + var stdout bytes.Buffer + var stderr bytes.Buffer + + err := repo.runGitCommandWithIO(nil, &stdout, &stderr, "cat-file", "-p", string(hash)) + + if err != nil { + return []byte{}, err + } + + return stdout.Bytes(), nil +} + +// StoreTree will store a mapping key-->Hash as a Git tree +func (repo *GitRepo) StoreTree(entries []TreeEntry) (Hash, error) { + buffer := prepareTreeEntries(entries) + + stdout, err := repo.runGitCommandWithStdin(&buffer, "mktree") + + if err != nil { + return "", err + } + + return Hash(stdout), nil +} + +// StoreCommit will store a Git commit with the given Git tree +func (repo *GitRepo) StoreCommit(treeHash Hash) (Hash, error) { + stdout, err := repo.runGitCommand("commit-tree", string(treeHash)) + + if err != nil { + return "", err + } + + return Hash(stdout), nil +} + +// StoreCommitWithParent will store a Git commit with the given Git tree +func (repo *GitRepo) StoreCommitWithParent(treeHash Hash, parent Hash) (Hash, error) { + stdout, err := repo.runGitCommand("commit-tree", string(treeHash), + "-p", string(parent)) + + if err != nil { + return "", err + } + + return Hash(stdout), nil +} + +// UpdateRef will create or update a Git reference +func (repo *GitRepo) UpdateRef(ref string, hash Hash) error { + _, err := repo.runGitCommand("update-ref", ref, string(hash)) + + return err +} + +// RemoveRef will remove a Git reference +func (repo *GitRepo) RemoveRef(ref string) error { + _, err := repo.runGitCommand("update-ref", "-d", ref) + + return err +} + +// ListRefs will return a list of Git ref matching the given refspec +func (repo *GitRepo) ListRefs(refPrefix string) ([]string, error) { + stdout, err := repo.runGitCommand("for-each-ref", "--format=%(refname)", refPrefix) + + if err != nil { + return nil, err + } + + split := strings.Split(stdout, "\n") + + if len(split) == 1 && split[0] == "" { + return []string{}, nil + } + + return split, nil +} + +// RefExist will check if a reference exist in Git +func (repo *GitRepo) RefExist(ref string) (bool, error) { + stdout, err := repo.runGitCommand("for-each-ref", ref) + + if err != nil { + return false, err + } + + return stdout != "", nil +} + +// CopyRef will create a new reference with the same value as another one +func (repo *GitRepo) CopyRef(source string, dest string) error { + _, err := repo.runGitCommand("update-ref", dest, source) + + return err +} + +// ListCommits will return the list of commit hashes of a ref, in chronological order +func (repo *GitRepo) ListCommits(ref string) ([]Hash, error) { + stdout, err := repo.runGitCommand("rev-list", "--first-parent", "--reverse", ref) + + if err != nil { + return nil, err + } + + split := strings.Split(stdout, "\n") + + casted := make([]Hash, len(split)) + for i, line := range split { + casted[i] = Hash(line) + } + + return casted, nil + +} + +// ReadTree will return the list of entries in a Git tree +func (repo *GitRepo) ReadTree(hash Hash) ([]TreeEntry, error) { + stdout, err := repo.runGitCommand("ls-tree", string(hash)) + + if err != nil { + return nil, err + } + + return readTreeEntries(stdout) +} + +// FindCommonAncestor will return the last common ancestor of two chain of commit +func (repo *GitRepo) FindCommonAncestor(hash1 Hash, hash2 Hash) (Hash, error) { + stdout, err := repo.runGitCommand("merge-base", string(hash1), string(hash2)) + + if err != nil { + return "", err + } + + return Hash(stdout), nil +} + +// GetTreeHash return the git tree hash referenced in a commit +func (repo *GitRepo) GetTreeHash(commit Hash) (Hash, error) { + stdout, err := repo.runGitCommand("rev-parse", string(commit)+"^{tree}") + + if err != nil { + return "", err + } + + return Hash(stdout), nil +} + +func (repo *GitRepo) AllClocks() (map[string]lamport.Clock, error) { + repo.clocksMutex.Lock() + defer repo.clocksMutex.Unlock() + + result := make(map[string]lamport.Clock) + + files, err := ioutil.ReadDir(path.Join(repo.path, clockPath)) + if os.IsNotExist(err) { + return nil, nil + } + if err != nil { + return nil, err + } + + for _, file := range files { + name := file.Name() + if c, ok := repo.clocks[name]; ok { + result[name] = c + } else { + c, err := lamport.LoadPersistedClock(path.Join(repo.path, clockPath, name)) + if err != nil { + return nil, err + } + repo.clocks[name] = c + result[name] = c + } + } + + return result, nil +} + +// GetOrCreateClock return a Lamport clock stored in the Repo. +// If the clock doesn't exist, it's created. +func (repo *GitRepo) GetOrCreateClock(name string) (lamport.Clock, error) { + c, err := repo.getClock(name) + if err == nil { + return c, nil + } + if err != ErrClockNotExist { + return nil, err + } + + repo.clocksMutex.Lock() + defer repo.clocksMutex.Unlock() + + p := path.Join(repo.path, clockPath, name) + + c, err = lamport.NewPersistedClock(p) + if err != nil { + return nil, err + } + + repo.clocks[name] = c + return c, nil +} + +func (repo *GitRepo) getClock(name string) (lamport.Clock, error) { + repo.clocksMutex.Lock() + defer repo.clocksMutex.Unlock() + + if c, ok := repo.clocks[name]; ok { + return c, nil + } + + p := path.Join(repo.path, clockPath, name) + + c, err := lamport.LoadPersistedClock(p) + if err == nil { + repo.clocks[name] = c + return c, nil + } + if err == lamport.ErrClockNotExist { + return nil, ErrClockNotExist + } + return nil, err +} + +// Increment is equivalent to c = GetOrCreateClock(name) + c.Increment() +func (repo *GitRepo) Increment(name string) (lamport.Time, error) { + c, err := repo.GetOrCreateClock(name) + if err != nil { + return lamport.Time(0), err + } + return c.Increment() +} + +// Witness is equivalent to c = GetOrCreateClock(name) + c.Witness(time) +func (repo *GitRepo) Witness(name string, time lamport.Time) error { + c, err := repo.GetOrCreateClock(name) + if err != nil { + return err + } + return c.Witness(time) +} + +// AddRemote add a new remote to the repository +// Not in the interface because it's only used for testing +func (repo *GitRepo) AddRemote(name string, url string) error { + _, err := repo.runGitCommand("remote", "add", name, url) + + return err +} diff --git a/migration3/after/repository/git_cli.go b/migration3/after/repository/git_cli.go new file mode 100644 index 0000000..085b1cd --- /dev/null +++ b/migration3/after/repository/git_cli.go @@ -0,0 +1,56 @@ +package repository + +import ( + "bytes" + "fmt" + "io" + "os/exec" + "strings" +) + +// gitCli is a helper to launch CLI git commands +type gitCli struct { + path string +} + +// Run the given git command with the given I/O reader/writers, returning an error if it fails. +func (cli gitCli) runGitCommandWithIO(stdin io.Reader, stdout, stderr io.Writer, args ...string) error { + // make sure that the working directory for the command + // always exist, in particular when running "git init". + path := strings.TrimSuffix(cli.path, ".git") + + // fmt.Printf("[%s] Running git %s\n", path, strings.Join(args, " ")) + + cmd := exec.Command("git", args...) + cmd.Dir = path + cmd.Stdin = stdin + cmd.Stdout = stdout + cmd.Stderr = stderr + + return cmd.Run() +} + +// Run the given git command and return its stdout, or an error if the command fails. +func (cli gitCli) runGitCommandRaw(stdin io.Reader, args ...string) (string, string, error) { + var stdout bytes.Buffer + var stderr bytes.Buffer + err := cli.runGitCommandWithIO(stdin, &stdout, &stderr, args...) + return strings.TrimSpace(stdout.String()), strings.TrimSpace(stderr.String()), err +} + +// Run the given git command and return its stdout, or an error if the command fails. +func (cli gitCli) runGitCommandWithStdin(stdin io.Reader, args ...string) (string, error) { + stdout, stderr, err := cli.runGitCommandRaw(stdin, args...) + if err != nil { + if stderr == "" { + stderr = "Error running git command: " + strings.Join(args, " ") + } + err = fmt.Errorf(stderr) + } + return stdout, err +} + +// Run the given git command and return its stdout, or an error if the command fails. +func (cli gitCli) runGitCommand(args ...string) (string, error) { + return cli.runGitCommandWithStdin(nil, args...) +} diff --git a/migration3/after/repository/git_config.go b/migration3/after/repository/git_config.go new file mode 100644 index 0000000..b46cc69 --- /dev/null +++ b/migration3/after/repository/git_config.go @@ -0,0 +1,221 @@ +package repository + +import ( + "fmt" + "regexp" + "strconv" + "strings" + "time" + + "github.com/blang/semver" + "github.com/pkg/errors" +) + +var _ Config = &gitConfig{} + +type gitConfig struct { + cli gitCli + localityFlag string +} + +func newGitConfig(cli gitCli, global bool) *gitConfig { + localityFlag := "--local" + if global { + localityFlag = "--global" + } + return &gitConfig{ + cli: cli, + localityFlag: localityFlag, + } +} + +// StoreString store a single key/value pair in the config of the repo +func (gc *gitConfig) StoreString(key string, value string) error { + _, err := gc.cli.runGitCommand("config", gc.localityFlag, "--replace-all", key, value) + return err +} + +func (gc *gitConfig) StoreBool(key string, value bool) error { + return gc.StoreString(key, strconv.FormatBool(value)) +} + +func (gc *gitConfig) StoreTimestamp(key string, value time.Time) error { + return gc.StoreString(key, strconv.Itoa(int(value.Unix()))) +} + +// ReadAll read all key/value pair matching the key prefix +func (gc *gitConfig) ReadAll(keyPrefix string) (map[string]string, error) { + stdout, err := gc.cli.runGitCommand("config", gc.localityFlag, "--includes", "--get-regexp", keyPrefix) + + // / \ + // / ! \ + // ------- + // + // There can be a legitimate error here, but I see no portable way to + // distinguish them from the git error that say "no matching value exist" + if err != nil { + return nil, nil + } + + lines := strings.Split(stdout, "\n") + + result := make(map[string]string, len(lines)) + + for _, line := range lines { + if strings.TrimSpace(line) == "" { + continue + } + + parts := strings.SplitN(line, " ", 2) + result[parts[0]] = parts[1] + } + + return result, nil +} + +func (gc *gitConfig) ReadString(key string) (string, error) { + stdout, err := gc.cli.runGitCommand("config", gc.localityFlag, "--includes", "--get-all", key) + + // / \ + // / ! \ + // ------- + // + // There can be a legitimate error here, but I see no portable way to + // distinguish them from the git error that say "no matching value exist" + if err != nil { + return "", ErrNoConfigEntry + } + + lines := strings.Split(stdout, "\n") + + if len(lines) == 0 { + return "", ErrNoConfigEntry + } + if len(lines) > 1 { + return "", ErrMultipleConfigEntry + } + + return lines[0], nil +} + +func (gc *gitConfig) ReadBool(key string) (bool, error) { + val, err := gc.ReadString(key) + if err != nil { + return false, err + } + + return strconv.ParseBool(val) +} + +func (gc *gitConfig) ReadTimestamp(key string) (time.Time, error) { + value, err := gc.ReadString(key) + if err != nil { + return time.Time{}, err + } + return ParseTimestamp(value) +} + +func (gc *gitConfig) rmSection(keyPrefix string) error { + _, err := gc.cli.runGitCommand("config", gc.localityFlag, "--remove-section", keyPrefix) + return err +} + +func (gc *gitConfig) unsetAll(keyPrefix string) error { + _, err := gc.cli.runGitCommand("config", gc.localityFlag, "--unset-all", keyPrefix) + return err +} + +// return keyPrefix section +// example: sectionFromKey(a.b.c.d) return a.b.c +func sectionFromKey(keyPrefix string) string { + s := strings.Split(keyPrefix, ".") + if len(s) == 1 { + return keyPrefix + } + + return strings.Join(s[:len(s)-1], ".") +} + +// rmConfigs with git version lesser than 2.18 +func (gc *gitConfig) rmConfigsGitVersionLT218(keyPrefix string) error { + // try to remove key/value pair by key + err := gc.unsetAll(keyPrefix) + if err != nil { + return gc.rmSection(keyPrefix) + } + + m, err := gc.ReadAll(sectionFromKey(keyPrefix)) + if err != nil { + return err + } + + // if section doesn't have any left key/value remove the section + if len(m) == 0 { + return gc.rmSection(sectionFromKey(keyPrefix)) + } + + return nil +} + +// RmConfigs remove all key/value pair matching the key prefix +func (gc *gitConfig) RemoveAll(keyPrefix string) error { + // starting from git 2.18.0 sections are automatically deleted when the last existing + // key/value is removed. Before 2.18.0 we should remove the section + // see https://github.com/git/git/blob/master/Documentation/RelNotes/2.18.0.txt#L379 + lt218, err := gc.gitVersionLT218() + if err != nil { + return errors.Wrap(err, "getting git version") + } + + if lt218 { + return gc.rmConfigsGitVersionLT218(keyPrefix) + } + + err = gc.unsetAll(keyPrefix) + if err != nil { + return gc.rmSection(keyPrefix) + } + + return nil +} + +func (gc *gitConfig) gitVersion() (*semver.Version, error) { + versionOut, err := gc.cli.runGitCommand("version") + if err != nil { + return nil, err + } + return parseGitVersion(versionOut) +} + +func parseGitVersion(versionOut string) (*semver.Version, error) { + // extract the version and truncate potential bad parts + // ex: 2.23.0.rc1 instead of 2.23.0-rc1 + r := regexp.MustCompile(`(\d+\.){1,2}\d+`) + + extracted := r.FindString(versionOut) + if extracted == "" { + return nil, fmt.Errorf("unreadable git version %s", versionOut) + } + + version, err := semver.Make(extracted) + if err != nil { + return nil, err + } + + return &version, nil +} + +func (gc *gitConfig) gitVersionLT218() (bool, error) { + version, err := gc.gitVersion() + if err != nil { + return false, err + } + + version218string := "2.18.0" + gitVersion218, err := semver.Make(version218string) + if err != nil { + return false, err + } + + return version.LT(gitVersion218), nil +} diff --git a/migration3/after/repository/git_test.go b/migration3/after/repository/git_test.go new file mode 100644 index 0000000..1b36fd4 --- /dev/null +++ b/migration3/after/repository/git_test.go @@ -0,0 +1,10 @@ +// Package repository contains helper methods for working with the Git repo. +package repository + +import ( + "testing" +) + +func TestGitRepo(t *testing.T) { + RepoTest(t, CreateTestRepo, CleanupTestRepos) +} diff --git a/migration3/after/repository/git_testing.go b/migration3/after/repository/git_testing.go new file mode 100644 index 0000000..874cc86 --- /dev/null +++ b/migration3/after/repository/git_testing.go @@ -0,0 +1,74 @@ +package repository + +import ( + "io/ioutil" + "log" + + "github.com/99designs/keyring" +) + +// This is intended for testing only + +func CreateTestRepo(bare bool) TestedRepo { + dir, err := ioutil.TempDir("", "") + if err != nil { + log.Fatal(err) + } + + var creator func(string) (*GitRepo, error) + + if bare { + creator = InitBareGitRepo + } else { + creator = InitGitRepo + } + + repo, err := creator(dir) + if err != nil { + log.Fatal(err) + } + + config := repo.LocalConfig() + if err := config.StoreString("user.name", "testuser"); err != nil { + log.Fatal("failed to set user.name for test repository: ", err) + } + if err := config.StoreString("user.email", "testuser@example.com"); err != nil { + log.Fatal("failed to set user.email for test repository: ", err) + } + + // make sure we use a mock keyring for testing to not interact with the global system + return &replaceKeyring{ + TestedRepo: repo, + keyring: keyring.NewArrayKeyring(nil), + } +} + +func SetupReposAndRemote() (repoA, repoB, remote TestedRepo) { + repoA = CreateGoGitTestRepo(false) + repoB = CreateGoGitTestRepo(false) + remote = CreateGoGitTestRepo(true) + + remoteAddr := "file://" + remote.GetPath() + + err := repoA.AddRemote("origin", remoteAddr) + if err != nil { + log.Fatal(err) + } + + err = repoB.AddRemote("origin", remoteAddr) + if err != nil { + log.Fatal(err) + } + + return repoA, repoB, remote +} + +// replaceKeyring allow to replace the Keyring of the underlying repo +type replaceKeyring struct { + TestedRepo + keyring Keyring +} + +func (rk replaceKeyring) Keyring() Keyring { + return rk.keyring +} diff --git a/migration3/after/repository/gogit.go b/migration3/after/repository/gogit.go new file mode 100644 index 0000000..b533409 --- /dev/null +++ b/migration3/after/repository/gogit.go @@ -0,0 +1,704 @@ +package repository + +import ( + "bytes" + "fmt" + "io/ioutil" + "os" + "os/exec" + stdpath "path" + "path/filepath" + "sort" + "strings" + "sync" + "time" + + gogit "github.com/go-git/go-git/v5" + "github.com/go-git/go-git/v5/config" + "github.com/go-git/go-git/v5/plumbing" + "github.com/go-git/go-git/v5/plumbing/filemode" + "github.com/go-git/go-git/v5/plumbing/object" + + "github.com/MichaelMure/git-bug-migration/migration3/after/util/lamport" +) + +var _ ClockedRepo = &GoGitRepo{} + +type GoGitRepo struct { + r *gogit.Repository + path string + + clocksMutex sync.Mutex + clocks map[string]lamport.Clock + + keyring Keyring +} + +func NewGoGitRepo(path string, clockLoaders []ClockLoader) (*GoGitRepo, error) { + path, err := detectGitPath(path) + if err != nil { + return nil, err + } + + r, err := gogit.PlainOpen(path) + if err != nil { + return nil, err + } + + k, err := defaultKeyring() + if err != nil { + return nil, err + } + + repo := &GoGitRepo{ + r: r, + path: path, + clocks: make(map[string]lamport.Clock), + keyring: k, + } + + for _, loader := range clockLoaders { + allExist := true + for _, name := range loader.Clocks { + if _, err := repo.getClock(name); err != nil { + allExist = false + } + } + + if !allExist { + err = loader.Witnesser(repo) + if err != nil { + return nil, err + } + } + } + + return repo, nil +} + +func detectGitPath(path string) (string, error) { + // normalize the path + path, err := filepath.Abs(path) + if err != nil { + return "", err + } + + for { + fi, err := os.Stat(stdpath.Join(path, ".git")) + if err == nil { + if !fi.IsDir() { + return "", fmt.Errorf(".git exist but is not a directory") + } + return stdpath.Join(path, ".git"), nil + } + if !os.IsNotExist(err) { + // unknown error + return "", err + } + + // detect bare repo + ok, err := isGitDir(path) + if err != nil { + return "", err + } + if ok { + return path, nil + } + + if parent := filepath.Dir(path); parent == path { + return "", fmt.Errorf(".git not found") + } else { + path = parent + } + } +} + +func isGitDir(path string) (bool, error) { + markers := []string{"HEAD", "objects", "refs"} + + for _, marker := range markers { + _, err := os.Stat(stdpath.Join(path, marker)) + if err == nil { + continue + } + if !os.IsNotExist(err) { + // unknown error + return false, err + } else { + return false, nil + } + } + + return true, nil +} + +// InitGoGitRepo create a new empty git repo at the given path +func InitGoGitRepo(path string) (*GoGitRepo, error) { + r, err := gogit.PlainInit(path, false) + if err != nil { + return nil, err + } + + k, err := defaultKeyring() + if err != nil { + return nil, err + } + + return &GoGitRepo{ + r: r, + path: path + "/.git", + clocks: make(map[string]lamport.Clock), + keyring: k, + }, nil +} + +// InitBareGoGitRepo create a new --bare empty git repo at the given path +func InitBareGoGitRepo(path string) (*GoGitRepo, error) { + r, err := gogit.PlainInit(path, true) + if err != nil { + return nil, err + } + + k, err := defaultKeyring() + if err != nil { + return nil, err + } + + return &GoGitRepo{ + r: r, + path: path, + clocks: make(map[string]lamport.Clock), + keyring: k, + }, nil +} + +// LocalConfig give access to the repository scoped configuration +func (repo *GoGitRepo) LocalConfig() Config { + return newGoGitLocalConfig(repo.r) +} + +// GlobalConfig give access to the global scoped configuration +func (repo *GoGitRepo) GlobalConfig() Config { + // TODO: replace that with go-git native implementation once it's supported + // see: https://github.com/go-git/go-git + // see: https://github.com/src-d/go-git/issues/760 + return newGoGitGlobalConfig(repo.r) +} + +// AnyConfig give access to a merged local/global configuration +func (repo *GoGitRepo) AnyConfig() ConfigRead { + return mergeConfig(repo.LocalConfig(), repo.GlobalConfig()) +} + +// Keyring give access to a user-wide storage for secrets +func (repo *GoGitRepo) Keyring() Keyring { + return repo.keyring +} + +// GetPath returns the path to the repo. +func (repo *GoGitRepo) GetPath() string { + return repo.path +} + +// GetUserName returns the name the the user has used to configure git +func (repo *GoGitRepo) GetUserName() (string, error) { + return repo.AnyConfig().ReadString("user.name") +} + +// GetUserEmail returns the email address that the user has used to configure git. +func (repo *GoGitRepo) GetUserEmail() (string, error) { + return repo.AnyConfig().ReadString("user.email") +} + +// GetCoreEditor returns the name of the editor that the user has used to configure git. +func (repo *GoGitRepo) GetCoreEditor() (string, error) { + // See https://git-scm.com/docs/git-var + // The order of preference is the $GIT_EDITOR environment variable, then core.editor configuration, then $VISUAL, then $EDITOR, and then the default chosen at compile time, which is usually vi. + + if val, ok := os.LookupEnv("GIT_EDITOR"); ok { + return val, nil + } + + val, err := repo.AnyConfig().ReadString("core.editor") + if err == nil && val != "" { + return val, nil + } + if err != nil && err != ErrNoConfigEntry { + return "", err + } + + if val, ok := os.LookupEnv("VISUAL"); ok { + return val, nil + } + + if val, ok := os.LookupEnv("EDITOR"); ok { + return val, nil + } + + priorities := []string{ + "editor", + "nano", + "vim", + "vi", + "emacs", + } + + for _, cmd := range priorities { + if _, err = exec.LookPath(cmd); err == nil { + return cmd, nil + } + + } + + return "ed", nil +} + +// GetRemotes returns the configured remotes repositories. +func (repo *GoGitRepo) GetRemotes() (map[string]string, error) { + cfg, err := repo.r.Config() + if err != nil { + return nil, err + } + + result := make(map[string]string, len(cfg.Remotes)) + for name, remote := range cfg.Remotes { + if len(remote.URLs) > 0 { + result[name] = remote.URLs[0] + } + } + + return result, nil +} + +// FetchRefs fetch git refs from a remote +func (repo *GoGitRepo) FetchRefs(remote string, refSpec string) (string, error) { + buf := bytes.NewBuffer(nil) + + err := repo.r.Fetch(&gogit.FetchOptions{ + RemoteName: remote, + RefSpecs: []config.RefSpec{config.RefSpec(refSpec)}, + Progress: buf, + }) + if err == gogit.NoErrAlreadyUpToDate { + return "already up-to-date", nil + } + if err != nil { + return "", err + } + + return buf.String(), nil +} + +// PushRefs push git refs to a remote +func (repo *GoGitRepo) PushRefs(remote string, refSpec string) (string, error) { + buf := bytes.NewBuffer(nil) + + err := repo.r.Push(&gogit.PushOptions{ + RemoteName: remote, + RefSpecs: []config.RefSpec{config.RefSpec(refSpec)}, + Progress: buf, + }) + if err == gogit.NoErrAlreadyUpToDate { + return "already up-to-date", nil + } + if err != nil { + return "", err + } + + return buf.String(), nil +} + +// StoreData will store arbitrary data and return the corresponding hash +func (repo *GoGitRepo) StoreData(data []byte) (Hash, error) { + obj := repo.r.Storer.NewEncodedObject() + obj.SetType(plumbing.BlobObject) + + w, err := obj.Writer() + if err != nil { + return "", err + } + + _, err = w.Write(data) + if err != nil { + return "", err + } + + h, err := repo.r.Storer.SetEncodedObject(obj) + if err != nil { + return "", err + } + + return Hash(h.String()), nil +} + +// ReadData will attempt to read arbitrary data from the given hash +func (repo *GoGitRepo) ReadData(hash Hash) ([]byte, error) { + obj, err := repo.r.BlobObject(plumbing.NewHash(hash.String())) + if err != nil { + return nil, err + } + + r, err := obj.Reader() + if err != nil { + return nil, err + } + + return ioutil.ReadAll(r) +} + +// StoreTree will store a mapping key-->Hash as a Git tree +func (repo *GoGitRepo) StoreTree(mapping []TreeEntry) (Hash, error) { + var tree object.Tree + + // TODO: can be removed once https://github.com/go-git/go-git/issues/193 is resolved + sorted := make([]TreeEntry, len(mapping)) + copy(sorted, mapping) + sort.Slice(sorted, func(i, j int) bool { + nameI := sorted[i].Name + if sorted[i].ObjectType == Tree { + nameI += "/" + } + nameJ := sorted[j].Name + if sorted[j].ObjectType == Tree { + nameJ += "/" + } + return nameI < nameJ + }) + + for _, entry := range sorted { + mode := filemode.Regular + if entry.ObjectType == Tree { + mode = filemode.Dir + } + + tree.Entries = append(tree.Entries, object.TreeEntry{ + Name: entry.Name, + Mode: mode, + Hash: plumbing.NewHash(entry.Hash.String()), + }) + } + + obj := repo.r.Storer.NewEncodedObject() + obj.SetType(plumbing.TreeObject) + err := tree.Encode(obj) + if err != nil { + return "", err + } + + hash, err := repo.r.Storer.SetEncodedObject(obj) + if err != nil { + return "", err + } + + return Hash(hash.String()), nil +} + +// ReadTree will return the list of entries in a Git tree +func (repo *GoGitRepo) ReadTree(hash Hash) ([]TreeEntry, error) { + h := plumbing.NewHash(hash.String()) + + // the given hash could be a tree or a commit + obj, err := repo.r.Storer.EncodedObject(plumbing.AnyObject, h) + if err != nil { + return nil, err + } + + var tree *object.Tree + switch obj.Type() { + case plumbing.TreeObject: + tree, err = object.DecodeTree(repo.r.Storer, obj) + case plumbing.CommitObject: + var commit *object.Commit + commit, err = object.DecodeCommit(repo.r.Storer, obj) + if err != nil { + return nil, err + } + tree, err = commit.Tree() + default: + return nil, fmt.Errorf("given hash is not a tree") + } + if err != nil { + return nil, err + } + + treeEntries := make([]TreeEntry, len(tree.Entries)) + for i, entry := range tree.Entries { + objType := Blob + if entry.Mode == filemode.Dir { + objType = Tree + } + + treeEntries[i] = TreeEntry{ + ObjectType: objType, + Hash: Hash(entry.Hash.String()), + Name: entry.Name, + } + } + + return treeEntries, nil +} + +// StoreCommit will store a Git commit with the given Git tree +func (repo *GoGitRepo) StoreCommit(treeHash Hash) (Hash, error) { + return repo.StoreCommitWithParent(treeHash, "") +} + +// StoreCommit will store a Git commit with the given Git tree +func (repo *GoGitRepo) StoreCommitWithParent(treeHash Hash, parent Hash) (Hash, error) { + cfg, err := repo.r.Config() + if err != nil { + return "", err + } + + commit := object.Commit{ + Author: object.Signature{ + Name: cfg.Author.Name, + Email: cfg.Author.Email, + When: time.Now(), + }, + Committer: object.Signature{ + Name: cfg.Committer.Name, + Email: cfg.Committer.Email, + When: time.Now(), + }, + Message: "", + TreeHash: plumbing.NewHash(treeHash.String()), + } + + if parent != "" { + commit.ParentHashes = []plumbing.Hash{plumbing.NewHash(parent.String())} + } + + obj := repo.r.Storer.NewEncodedObject() + obj.SetType(plumbing.CommitObject) + err = commit.Encode(obj) + if err != nil { + return "", err + } + + hash, err := repo.r.Storer.SetEncodedObject(obj) + if err != nil { + return "", err + } + + return Hash(hash.String()), nil +} + +// GetTreeHash return the git tree hash referenced in a commit +func (repo *GoGitRepo) GetTreeHash(commit Hash) (Hash, error) { + obj, err := repo.r.CommitObject(plumbing.NewHash(commit.String())) + if err != nil { + return "", err + } + + return Hash(obj.TreeHash.String()), nil +} + +// FindCommonAncestor will return the last common ancestor of two chain of commit +func (repo *GoGitRepo) FindCommonAncestor(commit1 Hash, commit2 Hash) (Hash, error) { + obj1, err := repo.r.CommitObject(plumbing.NewHash(commit1.String())) + if err != nil { + return "", err + } + obj2, err := repo.r.CommitObject(plumbing.NewHash(commit2.String())) + if err != nil { + return "", err + } + + commits, err := obj1.MergeBase(obj2) + if err != nil { + return "", err + } + + return Hash(commits[0].Hash.String()), nil +} + +// UpdateRef will create or update a Git reference +func (repo *GoGitRepo) UpdateRef(ref string, hash Hash) error { + return repo.r.Storer.SetReference(plumbing.NewHashReference(plumbing.ReferenceName(ref), plumbing.NewHash(hash.String()))) +} + +// RemoveRef will remove a Git reference +func (repo *GoGitRepo) RemoveRef(ref string) error { + return repo.r.Storer.RemoveReference(plumbing.ReferenceName(ref)) +} + +// ListRefs will return a list of Git ref matching the given refspec +func (repo *GoGitRepo) ListRefs(refPrefix string) ([]string, error) { + refIter, err := repo.r.References() + if err != nil { + return nil, err + } + + refs := make([]string, 0) + + err = refIter.ForEach(func(ref *plumbing.Reference) error { + if strings.HasPrefix(ref.Name().String(), refPrefix) { + refs = append(refs, ref.Name().String()) + } + return nil + }) + if err != nil { + return nil, err + } + + return refs, nil +} + +// RefExist will check if a reference exist in Git +func (repo *GoGitRepo) RefExist(ref string) (bool, error) { + _, err := repo.r.Reference(plumbing.ReferenceName(ref), false) + if err == nil { + return true, nil + } else if err == plumbing.ErrReferenceNotFound { + return false, nil + } + return false, err +} + +// CopyRef will create a new reference with the same value as another one +func (repo *GoGitRepo) CopyRef(source string, dest string) error { + r, err := repo.r.Reference(plumbing.ReferenceName(source), false) + if err != nil { + return err + } + return repo.r.Storer.SetReference(plumbing.NewHashReference(plumbing.ReferenceName(dest), r.Hash())) +} + +// ListCommits will return the list of tree hashes of a ref, in chronological order +func (repo *GoGitRepo) ListCommits(ref string) ([]Hash, error) { + r, err := repo.r.Reference(plumbing.ReferenceName(ref), false) + if err != nil { + return nil, err + } + + commit, err := repo.r.CommitObject(r.Hash()) + if err != nil { + return nil, err + } + hashes := []Hash{Hash(commit.Hash.String())} + + for { + commit, err = commit.Parent(0) + if err == object.ErrParentNotFound { + break + } + if err != nil { + return nil, err + } + + if commit.NumParents() > 1 { + return nil, fmt.Errorf("multiple parents") + } + + hashes = append([]Hash{Hash(commit.Hash.String())}, hashes...) + } + + return hashes, nil +} + +func (repo *GoGitRepo) AllClocks() (map[string]lamport.Clock, error) { + repo.clocksMutex.Lock() + defer repo.clocksMutex.Unlock() + + result := make(map[string]lamport.Clock) + + files, err := ioutil.ReadDir(stdpath.Join(repo.path, clockPath)) + if os.IsNotExist(err) { + return nil, nil + } + if err != nil { + return nil, err + } + + for _, file := range files { + name := file.Name() + if c, ok := repo.clocks[name]; ok { + result[name] = c + } else { + c, err := lamport.LoadPersistedClock(stdpath.Join(repo.path, clockPath, name)) + if err != nil { + return nil, err + } + repo.clocks[name] = c + result[name] = c + } + } + + return result, nil +} + +// GetOrCreateClock return a Lamport clock stored in the Repo. +// If the clock doesn't exist, it's created. +func (repo *GoGitRepo) GetOrCreateClock(name string) (lamport.Clock, error) { + c, err := repo.getClock(name) + if err == nil { + return c, nil + } + if err != ErrClockNotExist { + return nil, err + } + + repo.clocksMutex.Lock() + defer repo.clocksMutex.Unlock() + + p := stdpath.Join(repo.path, clockPath, name) + + c, err = lamport.NewPersistedClock(p) + if err != nil { + return nil, err + } + + repo.clocks[name] = c + return c, nil +} + +func (repo *GoGitRepo) getClock(name string) (lamport.Clock, error) { + repo.clocksMutex.Lock() + defer repo.clocksMutex.Unlock() + + if c, ok := repo.clocks[name]; ok { + return c, nil + } + + p := stdpath.Join(repo.path, clockPath, name) + + c, err := lamport.LoadPersistedClock(p) + if err == nil { + repo.clocks[name] = c + return c, nil + } + if err == lamport.ErrClockNotExist { + return nil, ErrClockNotExist + } + return nil, err +} + +// Increment is equivalent to c = GetOrCreateClock(name) + c.Increment() +func (repo *GoGitRepo) Increment(name string) (lamport.Time, error) { + c, err := repo.GetOrCreateClock(name) + if err != nil { + return lamport.Time(0), err + } + return c.Increment() +} + +// Witness is equivalent to c = GetOrCreateClock(name) + c.Witness(time) +func (repo *GoGitRepo) Witness(name string, time lamport.Time) error { + c, err := repo.GetOrCreateClock(name) + if err != nil { + return err + } + return c.Witness(time) +} + +// AddRemote add a new remote to the repository +// Not in the interface because it's only used for testing +func (repo *GoGitRepo) AddRemote(name string, url string) error { + _, err := repo.r.CreateRemote(&config.RemoteConfig{ + Name: name, + URLs: []string{url}, + }) + + return err +} diff --git a/migration3/after/repository/gogit_config.go b/migration3/after/repository/gogit_config.go new file mode 100644 index 0000000..2f9a4cc --- /dev/null +++ b/migration3/after/repository/gogit_config.go @@ -0,0 +1,236 @@ +package repository + +import ( + "fmt" + "strconv" + "strings" + "time" + + gogit "github.com/go-git/go-git/v5" + "github.com/go-git/go-git/v5/config" +) + +var _ Config = &goGitConfig{} + +type goGitConfig struct { + ConfigRead + ConfigWrite +} + +func newGoGitLocalConfig(repo *gogit.Repository) *goGitConfig { + return &goGitConfig{ + ConfigRead: &goGitConfigReader{getConfig: repo.Config}, + ConfigWrite: &goGitConfigWriter{repo: repo}, + } +} + +func newGoGitGlobalConfig(repo *gogit.Repository) *goGitConfig { + return &goGitConfig{ + ConfigRead: &goGitConfigReader{getConfig: func() (*config.Config, error) { + return config.LoadConfig(config.GlobalScope) + }}, + ConfigWrite: &configPanicWriter{}, + } +} + +var _ ConfigRead = &goGitConfigReader{} + +type goGitConfigReader struct { + getConfig func() (*config.Config, error) +} + +func (cr *goGitConfigReader) ReadAll(keyPrefix string) (map[string]string, error) { + cfg, err := cr.getConfig() + if err != nil { + return nil, err + } + + split := strings.Split(keyPrefix, ".") + result := make(map[string]string) + + switch { + case keyPrefix == "": + for _, section := range cfg.Raw.Sections { + for _, option := range section.Options { + result[fmt.Sprintf("%s.%s", section.Name, option.Key)] = option.Value + } + for _, subsection := range section.Subsections { + for _, option := range subsection.Options { + result[fmt.Sprintf("%s.%s.%s", section.Name, subsection.Name, option.Key)] = option.Value + } + } + } + case len(split) == 1: + if !cfg.Raw.HasSection(split[0]) { + return nil, nil + } + section := cfg.Raw.Section(split[0]) + for _, option := range section.Options { + result[fmt.Sprintf("%s.%s", section.Name, option.Key)] = option.Value + } + for _, subsection := range section.Subsections { + for _, option := range subsection.Options { + result[fmt.Sprintf("%s.%s.%s", section.Name, subsection.Name, option.Key)] = option.Value + } + } + default: + if !cfg.Raw.HasSection(split[0]) { + return nil, nil + } + section := cfg.Raw.Section(split[0]) + rest := strings.Join(split[1:], ".") + rest = strings.TrimSuffix(rest, ".") + for _, subsection := range section.Subsections { + if strings.HasPrefix(subsection.Name, rest) { + for _, option := range subsection.Options { + result[fmt.Sprintf("%s.%s.%s", section.Name, subsection.Name, option.Key)] = option.Value + } + } + } + } + + return result, nil +} + +func (cr *goGitConfigReader) ReadBool(key string) (bool, error) { + val, err := cr.ReadString(key) + if err != nil { + return false, err + } + + return strconv.ParseBool(val) +} + +func (cr *goGitConfigReader) ReadString(key string) (string, error) { + cfg, err := cr.getConfig() + if err != nil { + return "", err + } + + split := strings.Split(key, ".") + + if len(split) <= 1 { + return "", fmt.Errorf("invalid key") + } + + sectionName := split[0] + if !cfg.Raw.HasSection(sectionName) { + return "", ErrNoConfigEntry + } + section := cfg.Raw.Section(sectionName) + + switch { + case len(split) == 2: + optionName := split[1] + if !section.HasOption(optionName) { + return "", ErrNoConfigEntry + } + if len(section.OptionAll(optionName)) > 1 { + return "", ErrMultipleConfigEntry + } + return section.Option(optionName), nil + default: + subsectionName := strings.Join(split[1:len(split)-2], ".") + optionName := split[len(split)-1] + if !section.HasSubsection(subsectionName) { + return "", ErrNoConfigEntry + } + subsection := section.Subsection(subsectionName) + if !subsection.HasOption(optionName) { + return "", ErrNoConfigEntry + } + if len(subsection.OptionAll(optionName)) > 1 { + return "", ErrMultipleConfigEntry + } + return subsection.Option(optionName), nil + } +} + +func (cr *goGitConfigReader) ReadTimestamp(key string) (time.Time, error) { + value, err := cr.ReadString(key) + if err != nil { + return time.Time{}, err + } + return ParseTimestamp(value) +} + +var _ ConfigWrite = &goGitConfigWriter{} + +// Only works for the local config as go-git only support that +type goGitConfigWriter struct { + repo *gogit.Repository +} + +func (cw *goGitConfigWriter) StoreString(key, value string) error { + cfg, err := cw.repo.Config() + if err != nil { + return err + } + + split := strings.Split(key, ".") + + switch { + case len(split) <= 1: + return fmt.Errorf("invalid key") + case len(split) == 2: + cfg.Raw.Section(split[0]).SetOption(split[1], value) + default: + section := split[0] + subsection := strings.Join(split[1:len(split)-1], ".") + option := split[len(split)-1] + cfg.Raw.Section(section).Subsection(subsection).SetOption(option, value) + } + + return cw.repo.SetConfig(cfg) +} + +func (cw *goGitConfigWriter) StoreTimestamp(key string, value time.Time) error { + return cw.StoreString(key, strconv.Itoa(int(value.Unix()))) +} + +func (cw *goGitConfigWriter) StoreBool(key string, value bool) error { + return cw.StoreString(key, strconv.FormatBool(value)) +} + +func (cw *goGitConfigWriter) RemoveAll(keyPrefix string) error { + cfg, err := cw.repo.Config() + if err != nil { + return err + } + + split := strings.Split(keyPrefix, ".") + + switch { + case keyPrefix == "": + cfg.Raw.Sections = nil + // warning: this does not actually remove everything as go-git config hold + // some entries in multiple places (cfg.User ...) + case len(split) == 1: + if cfg.Raw.HasSection(split[0]) { + cfg.Raw.RemoveSection(split[0]) + } else { + return fmt.Errorf("invalid key prefix") + } + default: + if !cfg.Raw.HasSection(split[0]) { + return fmt.Errorf("invalid key prefix") + } + section := cfg.Raw.Section(split[0]) + rest := strings.Join(split[1:], ".") + + ok := false + if section.HasSubsection(rest) { + section.RemoveSubsection(rest) + ok = true + } + if section.HasOption(rest) { + section.RemoveOption(rest) + ok = true + } + if !ok { + return fmt.Errorf("invalid key prefix") + } + } + + return cw.repo.SetConfig(cfg) +} diff --git a/migration3/after/repository/gogit_test.go b/migration3/after/repository/gogit_test.go new file mode 100644 index 0000000..fba990d --- /dev/null +++ b/migration3/after/repository/gogit_test.go @@ -0,0 +1,68 @@ +package repository + +import ( + "io/ioutil" + "os" + "path" + "path/filepath" + "testing" + + "github.com/stretchr/testify/assert" + "github.com/stretchr/testify/require" +) + +func TestNewGoGitRepo(t *testing.T) { + // Plain + plainRoot, err := ioutil.TempDir("", "") + require.NoError(t, err) + defer os.RemoveAll(plainRoot) + + _, err = InitGoGitRepo(plainRoot) + require.NoError(t, err) + plainGitDir := path.Join(plainRoot, ".git") + + // Bare + bareRoot, err := ioutil.TempDir("", "") + require.NoError(t, err) + defer os.RemoveAll(bareRoot) + + _, err = InitBareGoGitRepo(bareRoot) + require.NoError(t, err) + bareGitDir := bareRoot + + tests := []struct { + inPath string + outPath string + err bool + }{ + // errors + {"/", "", true}, + // parent dir of a repo + {filepath.Dir(plainRoot), "", true}, + + // Plain repo + {plainRoot, plainGitDir, false}, + {plainGitDir, plainGitDir, false}, + {path.Join(plainGitDir, "objects"), plainGitDir, false}, + + // Bare repo + {bareRoot, bareGitDir, false}, + {bareGitDir, bareGitDir, false}, + {path.Join(bareGitDir, "objects"), bareGitDir, false}, + } + + for i, tc := range tests { + r, err := NewGoGitRepo(tc.inPath, nil) + + if tc.err { + require.Error(t, err, i) + } else { + require.NoError(t, err, i) + assert.Equal(t, filepath.ToSlash(tc.outPath), filepath.ToSlash(r.GetPath()), i) + } + } +} + +func TestGoGitRepo(t *testing.T) { + RepoTest(t, CreateGoGitTestRepo, CleanupTestRepos) +} diff --git a/migration3/after/repository/gogit_testing.go b/migration3/after/repository/gogit_testing.go new file mode 100644 index 0000000..f20ff6b --- /dev/null +++ b/migration3/after/repository/gogit_testing.go @@ -0,0 +1,58 @@ +package repository + +import ( + "io/ioutil" + "log" +) + +// This is intended for testing only + +func CreateGoGitTestRepo(bare bool) TestedRepo { + dir, err := ioutil.TempDir("", "") + if err != nil { + log.Fatal(err) + } + + var creator func(string) (*GoGitRepo, error) + + if bare { + creator = InitBareGoGitRepo + } else { + creator = InitGoGitRepo + } + + repo, err := creator(dir) + if err != nil { + log.Fatal(err) + } + + config := repo.LocalConfig() + if err := config.StoreString("user.name", "testuser"); err != nil { + log.Fatal("failed to set user.name for test repository: ", err) + } + if err := config.StoreString("user.email", "testuser@example.com"); err != nil { + log.Fatal("failed to set user.email for test repository: ", err) + } + + return repo +} + +func SetupGoGitReposAndRemote() (repoA, repoB, remote TestedRepo) { + repoA = CreateGoGitTestRepo(false) + repoB = CreateGoGitTestRepo(false) + remote = CreateGoGitTestRepo(true) + + remoteAddr := "file://" + remote.GetPath() + + err := repoA.AddRemote("origin", remoteAddr) + if err != nil { + log.Fatal(err) + } + + err = repoB.AddRemote("origin", remoteAddr) + if err != nil { + log.Fatal(err) + } + + return repoA, repoB, remote +} diff --git a/migration3/after/repository/hash.go b/migration3/after/repository/hash.go new file mode 100644 index 0000000..6a11558 --- /dev/null +++ b/migration3/after/repository/hash.go @@ -0,0 +1,51 @@ +package repository + +import ( + "fmt" + "io" +) + +const idLengthSHA1 = 40 +const idLengthSHA256 = 64 + +// Hash is a git hash +type Hash string + +func (h Hash) String() string { + return string(h) +} + +// UnmarshalGQL implement the Unmarshaler interface for gqlgen +func (h *Hash) UnmarshalGQL(v interface{}) error { + _, ok := v.(string) + if !ok { + return fmt.Errorf("hashes must be strings") + } + + *h = v.(Hash) + + if !h.IsValid() { + return fmt.Errorf("invalid hash") + } + + return nil +} + +// MarshalGQL implement the Marshaler interface for gqlgen +func (h Hash) MarshalGQL(w io.Writer) { + _, _ = w.Write([]byte(`"` + h.String() + `"`)) +} + +// IsValid tell if the hash is valid +func (h *Hash) IsValid() bool { + // Support for both sha1 and sha256 git hashes + if len(*h) != idLengthSHA1 && len(*h) != idLengthSHA256 { + return false + } + for _, r := range *h { + if (r < 'a' || r > 'z') && (r < '0' || r > '9') { + return false + } + } + return true +} diff --git a/migration3/after/repository/keyring.go b/migration3/after/repository/keyring.go new file mode 100644 index 0000000..f690b0b --- /dev/null +++ b/migration3/after/repository/keyring.go @@ -0,0 +1,50 @@ +package repository + +import ( + "os" + "path" + + "github.com/99designs/keyring" +) + +type Item = keyring.Item + +var ErrKeyringKeyNotFound = keyring.ErrKeyNotFound + +// Keyring provides the uniform interface over the underlying backends +type Keyring interface { + // Returns an Item matching the key or ErrKeyringKeyNotFound + Get(key string) (Item, error) + // Stores an Item on the keyring + Set(item Item) error + // Removes the item with matching key + Remove(key string) error + // Provides a slice of all keys stored on the keyring + Keys() ([]string, error) +} + +func defaultKeyring() (Keyring, error) { + ucd, err := os.UserConfigDir() + if err != nil { + return nil, err + } + + return keyring.Open(keyring.Config{ + // only use the file backend until https://github.com/99designs/keyring/issues/74 is resolved + AllowedBackends: []keyring.BackendType{ + keyring.FileBackend, + }, + + ServiceName: "git-bug", + + // Fallback encrypted file + FileDir: path.Join(ucd, "git-bug", "keyring"), + // As we write the file in the user's config directory, this file should already be protected by the OS against + // other user's access. We actually don't terribly need to protect it further and a password prompt across all + // UI's would be a pain. Therefore we use here a constant password so the file will be unreadable by generic file + // scanners if the user's machine get compromised. + FilePasswordFunc: func(string) (string, error) { + return "git-bug", nil + }, + }) +} diff --git a/migration3/after/repository/mock_repo.go b/migration3/after/repository/mock_repo.go new file mode 100644 index 0000000..6d0d388 --- /dev/null +++ b/migration3/after/repository/mock_repo.go @@ -0,0 +1,357 @@ +package repository + +import ( + "crypto/sha1" + "fmt" + "strings" + + "github.com/99designs/keyring" + + "github.com/MichaelMure/git-bug-migration/migration3/after/util/lamport" +) + +var _ ClockedRepo = &mockRepoForTest{} +var _ TestedRepo = &mockRepoForTest{} + +// mockRepoForTest defines an instance of Repo that can be used for testing. +type mockRepoForTest struct { + *mockRepoConfig + *mockRepoKeyring + *mockRepoCommon + *mockRepoData + *mockRepoClock +} + +func NewMockRepoForTest() *mockRepoForTest { + return &mockRepoForTest{ + mockRepoConfig: NewMockRepoConfig(), + mockRepoKeyring: NewMockRepoKeyring(), + mockRepoCommon: NewMockRepoCommon(), + mockRepoData: NewMockRepoData(), + mockRepoClock: NewMockRepoClock(), + } +} + +var _ RepoConfig = &mockRepoConfig{} + +type mockRepoConfig struct { + localConfig *MemConfig + globalConfig *MemConfig +} + +func NewMockRepoConfig() *mockRepoConfig { + return &mockRepoConfig{ + localConfig: NewMemConfig(), + globalConfig: NewMemConfig(), + } +} + +// LocalConfig give access to the repository scoped configuration +func (r *mockRepoConfig) LocalConfig() Config { + return r.localConfig +} + +// GlobalConfig give access to the git global configuration +func (r *mockRepoConfig) GlobalConfig() Config { + return r.globalConfig +} + +// AnyConfig give access to a merged local/global configuration +func (r *mockRepoConfig) AnyConfig() ConfigRead { + return mergeConfig(r.localConfig, r.globalConfig) +} + +var _ RepoKeyring = &mockRepoKeyring{} + +type mockRepoKeyring struct { + keyring *keyring.ArrayKeyring +} + +func NewMockRepoKeyring() *mockRepoKeyring { + return &mockRepoKeyring{ + keyring: keyring.NewArrayKeyring(nil), + } +} + +// Keyring give access to a user-wide storage for secrets +func (r *mockRepoKeyring) Keyring() Keyring { + return r.keyring +} + +var _ RepoCommon = &mockRepoCommon{} + +type mockRepoCommon struct{} + +func NewMockRepoCommon() *mockRepoCommon { + return &mockRepoCommon{} +} + +// GetPath returns the path to the repo. +func (r *mockRepoCommon) GetPath() string { + return "~/mockRepo/" +} + +func (r *mockRepoCommon) GetUserName() (string, error) { + return "René Descartes", nil +} + +// GetUserEmail returns the email address that the user has used to configure git. +func (r *mockRepoCommon) GetUserEmail() (string, error) { + return "user@example.com", nil +} + +// GetCoreEditor returns the name of the editor that the user has used to configure git. +func (r *mockRepoCommon) GetCoreEditor() (string, error) { + return "vi", nil +} + +// GetRemotes returns the configured remotes repositories. +func (r *mockRepoCommon) GetRemotes() (map[string]string, error) { + return map[string]string{ + "origin": "git://github.com/MichaelMure/git-bug", + }, nil +} + +var _ RepoData = &mockRepoData{} + +type commit struct { + treeHash Hash + parent Hash +} + +type mockRepoData struct { + blobs map[Hash][]byte + trees map[Hash]string + commits map[Hash]commit + refs map[string]Hash +} + +func NewMockRepoData() *mockRepoData { + return &mockRepoData{ + blobs: make(map[Hash][]byte), + trees: make(map[Hash]string), + commits: make(map[Hash]commit), + refs: make(map[string]Hash), + } +} + +// PushRefs push git refs to a remote +func (r *mockRepoData) PushRefs(remote string, refSpec string) (string, error) { + return "", nil +} + +func (r *mockRepoData) FetchRefs(remote string, refSpec string) (string, error) { + return "", nil +} + +func (r *mockRepoData) StoreData(data []byte) (Hash, error) { + rawHash := sha1.Sum(data) + hash := Hash(fmt.Sprintf("%x", rawHash)) + r.blobs[hash] = data + return hash, nil +} + +func (r *mockRepoData) ReadData(hash Hash) ([]byte, error) { + data, ok := r.blobs[hash] + + if !ok { + return nil, fmt.Errorf("unknown hash") + } + + return data, nil +} + +func (r *mockRepoData) StoreTree(entries []TreeEntry) (Hash, error) { + buffer := prepareTreeEntries(entries) + rawHash := sha1.Sum(buffer.Bytes()) + hash := Hash(fmt.Sprintf("%x", rawHash)) + r.trees[hash] = buffer.String() + + return hash, nil +} + +func (r *mockRepoData) StoreCommit(treeHash Hash) (Hash, error) { + rawHash := sha1.Sum([]byte(treeHash)) + hash := Hash(fmt.Sprintf("%x", rawHash)) + r.commits[hash] = commit{ + treeHash: treeHash, + } + return hash, nil +} + +func (r *mockRepoData) StoreCommitWithParent(treeHash Hash, parent Hash) (Hash, error) { + rawHash := sha1.Sum([]byte(treeHash + parent)) + hash := Hash(fmt.Sprintf("%x", rawHash)) + r.commits[hash] = commit{ + treeHash: treeHash, + parent: parent, + } + return hash, nil +} + +func (r *mockRepoData) UpdateRef(ref string, hash Hash) error { + r.refs[ref] = hash + return nil +} + +func (r *mockRepoData) RemoveRef(ref string) error { + delete(r.refs, ref) + return nil +} + +func (r *mockRepoData) RefExist(ref string) (bool, error) { + _, exist := r.refs[ref] + return exist, nil +} + +func (r *mockRepoData) CopyRef(source string, dest string) error { + hash, exist := r.refs[source] + + if !exist { + return fmt.Errorf("Unknown ref") + } + + r.refs[dest] = hash + return nil +} + +func (r *mockRepoData) ListRefs(refPrefix string) ([]string, error) { + var keys []string + + for k := range r.refs { + if strings.HasPrefix(k, refPrefix) { + keys = append(keys, k) + } + } + + return keys, nil +} + +func (r *mockRepoData) ListCommits(ref string) ([]Hash, error) { + var hashes []Hash + + hash := r.refs[ref] + + for { + commit, ok := r.commits[hash] + + if !ok { + break + } + + hashes = append([]Hash{hash}, hashes...) + hash = commit.parent + } + + return hashes, nil +} + +func (r *mockRepoData) ReadTree(hash Hash) ([]TreeEntry, error) { + var data string + + data, ok := r.trees[hash] + + if !ok { + // Git will understand a commit hash to reach a tree + commit, ok := r.commits[hash] + + if !ok { + return nil, fmt.Errorf("unknown hash") + } + + data, ok = r.trees[commit.treeHash] + + if !ok { + return nil, fmt.Errorf("unknown hash") + } + } + + return readTreeEntries(data) +} + +func (r *mockRepoData) FindCommonAncestor(hash1 Hash, hash2 Hash) (Hash, error) { + ancestor1 := []Hash{hash1} + + for hash1 != "" { + c, ok := r.commits[hash1] + if !ok { + return "", fmt.Errorf("unknown commit %v", hash1) + } + ancestor1 = append(ancestor1, c.parent) + hash1 = c.parent + } + + for { + for _, ancestor := range ancestor1 { + if ancestor == hash2 { + return ancestor, nil + } + } + + c, ok := r.commits[hash2] + if !ok { + return "", fmt.Errorf("unknown commit %v", hash1) + } + + if c.parent == "" { + return "", fmt.Errorf("no ancestor found") + } + + hash2 = c.parent + } +} + +func (r *mockRepoData) GetTreeHash(commit Hash) (Hash, error) { + c, ok := r.commits[commit] + if !ok { + return "", fmt.Errorf("unknown commit") + } + + return c.treeHash, nil +} + +func (r *mockRepoData) AddRemote(name string, url string) error { + panic("implement me") +} + +var _ RepoClock = &mockRepoClock{} + +type mockRepoClock struct { + clocks map[string]lamport.Clock +} + +func (r *mockRepoClock) AllClocks() (map[string]lamport.Clock, error) { + return r.clocks, nil +} + +func NewMockRepoClock() *mockRepoClock { + return &mockRepoClock{ + clocks: make(map[string]lamport.Clock), + } +} + +func (r *mockRepoClock) GetOrCreateClock(name string) (lamport.Clock, error) { + if c, ok := r.clocks[name]; ok { + return c, nil + } + + c := lamport.NewMemClock() + r.clocks[name] = c + return c, nil +} + +func (r *mockRepoClock) Increment(name string) (lamport.Time, error) { + c, err := r.GetOrCreateClock(name) + if err != nil { + return lamport.Time(0), err + } + return c.Increment() +} + +func (r *mockRepoClock) Witness(name string, time lamport.Time) error { + c, err := r.GetOrCreateClock(name) + if err != nil { + return err + } + return c.Witness(time) +} diff --git a/migration3/after/repository/mock_repo_test.go b/migration3/after/repository/mock_repo_test.go new file mode 100644 index 0000000..b56b94f --- /dev/null +++ b/migration3/after/repository/mock_repo_test.go @@ -0,0 +1,10 @@ +package repository + +import "testing" + +func TestMockRepo(t *testing.T) { + creator := func(bare bool) TestedRepo { return NewMockRepoForTest() } + cleaner := func(repos ...Repo) {} + + RepoTest(t, creator, cleaner) +} diff --git a/migration3/after/repository/repo.go b/migration3/after/repository/repo.go new file mode 100644 index 0000000..4d66f21 --- /dev/null +++ b/migration3/after/repository/repo.go @@ -0,0 +1,157 @@ +// Package repository contains helper methods for working with a Git repo. +package repository + +import ( + "errors" + + "github.com/MichaelMure/git-bug-migration/migration3/after/util/lamport" +) + +var ( + // ErrNotARepo is the error returned when the git repo root wan't be found + ErrNotARepo = errors.New("not a git repository") + // ErrClockNotExist is the error returned when a clock can't be found + ErrClockNotExist = errors.New("clock doesn't exist") +) + +// Repo represents a source code repository. +type Repo interface { + RepoConfig + RepoKeyring + RepoCommon + RepoData +} + +// ClockedRepo is a Repo that also has Lamport clocks +type ClockedRepo interface { + Repo + RepoClock +} + +// RepoConfig access the configuration of a repository +type RepoConfig interface { + // LocalConfig give access to the repository scoped configuration + LocalConfig() Config + + // GlobalConfig give access to the global scoped configuration + GlobalConfig() Config + + // AnyConfig give access to a merged local/global configuration + AnyConfig() ConfigRead +} + +// RepoKeyring give access to a user-wide storage for secrets +type RepoKeyring interface { + // Keyring give access to a user-wide storage for secrets + Keyring() Keyring +} + +// RepoCommon represent the common function the we want all the repo to implement +type RepoCommon interface { + // GetPath returns the path to the repo. + GetPath() string + + // GetUserName returns the name the the user has used to configure git + GetUserName() (string, error) + + // GetUserEmail returns the email address that the user has used to configure git. + GetUserEmail() (string, error) + + // GetCoreEditor returns the name of the editor that the user has used to configure git. + GetCoreEditor() (string, error) + + // GetRemotes returns the configured remotes repositories. + GetRemotes() (map[string]string, error) +} + +// RepoData give access to the git data storage +type RepoData interface { + // FetchRefs fetch git refs from a remote + FetchRefs(remote string, refSpec string) (string, error) + + // PushRefs push git refs to a remote + PushRefs(remote string, refSpec string) (string, error) + + // StoreData will store arbitrary data and return the corresponding hash + StoreData(data []byte) (Hash, error) + + // ReadData will attempt to read arbitrary data from the given hash + ReadData(hash Hash) ([]byte, error) + + // StoreTree will store a mapping key-->Hash as a Git tree + StoreTree(mapping []TreeEntry) (Hash, error) + + // ReadTree will return the list of entries in a Git tree + // The given hash could be from either a commit or a tree + ReadTree(hash Hash) ([]TreeEntry, error) + + // StoreCommit will store a Git commit with the given Git tree + StoreCommit(treeHash Hash) (Hash, error) + + // StoreCommit will store a Git commit with the given Git tree + StoreCommitWithParent(treeHash Hash, parent Hash) (Hash, error) + + // GetTreeHash return the git tree hash referenced in a commit + GetTreeHash(commit Hash) (Hash, error) + + // FindCommonAncestor will return the last common ancestor of two chain of commit + FindCommonAncestor(commit1 Hash, commit2 Hash) (Hash, error) + + // UpdateRef will create or update a Git reference + UpdateRef(ref string, hash Hash) error + + // RemoveRef will remove a Git reference + RemoveRef(ref string) error + + // ListRefs will return a list of Git ref matching the given refspec + ListRefs(refPrefix string) ([]string, error) + + // RefExist will check if a reference exist in Git + RefExist(ref string) (bool, error) + + // CopyRef will create a new reference with the same value as another one + CopyRef(source string, dest string) error + + // ListCommits will return the list of tree hashes of a ref, in chronological order + ListCommits(ref string) ([]Hash, error) +} + +// RepoClock give access to Lamport clocks +type RepoClock interface { + // AllClocks return all the known clocks + AllClocks() (map[string]lamport.Clock, error) + + // GetOrCreateClock return a Lamport clock stored in the Repo. + // If the clock doesn't exist, it's created. + GetOrCreateClock(name string) (lamport.Clock, error) + + // Increment is equivalent to c = GetOrCreateClock(name) + c.Increment() + Increment(name string) (lamport.Time, error) + + // Witness is equivalent to c = GetOrCreateClock(name) + c.Witness(time) + Witness(name string, time lamport.Time) error +} + +// ClockLoader hold which logical clock need to exist for an entity and +// how to create them if they don't. +type ClockLoader struct { + // Clocks hold the name of all the clocks this loader deal with. + // Those clocks will be checked when the repo load. If not present or broken, + // Witnesser will be used to create them. + Clocks []string + // Witnesser is a function that will initialize the clocks of a repo + // from scratch + Witnesser func(repo ClockedRepo) error +} + +// TestedRepo is an extended ClockedRepo with function for testing only +type TestedRepo interface { + ClockedRepo + repoTest +} + +// repoTest give access to test only functions +type repoTest interface { + // AddRemote add a new remote to the repository + AddRemote(name string, url string) error +} diff --git a/migration3/after/repository/repo_testing.go b/migration3/after/repository/repo_testing.go new file mode 100644 index 0000000..a4604ef --- /dev/null +++ b/migration3/after/repository/repo_testing.go @@ -0,0 +1,244 @@ +package repository + +import ( + "log" + "math/rand" + "os" + "strings" + "testing" + + "github.com/stretchr/testify/require" + + "github.com/MichaelMure/git-bug-migration/migration3/after/util/lamport" +) + +func CleanupTestRepos(repos ...Repo) { + var firstErr error + for _, repo := range repos { + path := repo.GetPath() + if strings.HasSuffix(path, "/.git") { + // for a normal repository (not --bare), we want to remove everything + // including the parent directory where files are checked out + path = strings.TrimSuffix(path, "/.git") + + // Testing non-bare repo should also check path is + // only .git (i.e. ./.git), but doing so, we should + // try to remove the current directory and hav some + // trouble. In the present case, this case should not + // occur. + // TODO consider warning or error when path == ".git" + } + // fmt.Println("Cleaning repo:", path) + err := os.RemoveAll(path) + if err != nil { + log.Println(err) + if firstErr == nil { + firstErr = err + } + } + } + + if firstErr != nil { + log.Fatal(firstErr) + } +} + +type RepoCreator func(bare bool) TestedRepo +type RepoCleaner func(repos ...Repo) + +// Test suite for a Repo implementation +func RepoTest(t *testing.T, creator RepoCreator, cleaner RepoCleaner) { + for bare, name := range map[bool]string{ + false: "Plain", + true: "Bare", + } { + t.Run(name, func(t *testing.T) { + repo := creator(bare) + defer cleaner(repo) + + t.Run("Data", func(t *testing.T) { + RepoDataTest(t, repo) + }) + + t.Run("Config", func(t *testing.T) { + RepoConfigTest(t, repo) + }) + + t.Run("Clocks", func(t *testing.T) { + RepoClockTest(t, repo) + }) + }) + } +} + +// helper to test a RepoConfig +func RepoConfigTest(t *testing.T, repo RepoConfig) { + testConfig(t, repo.LocalConfig()) +} + +// helper to test a RepoData +func RepoDataTest(t *testing.T, repo RepoData) { + // Blob + + data := randomData() + + blobHash1, err := repo.StoreData(data) + require.NoError(t, err) + require.True(t, blobHash1.IsValid()) + + blob1Read, err := repo.ReadData(blobHash1) + require.NoError(t, err) + require.Equal(t, data, blob1Read) + + // Tree + + blobHash2, err := repo.StoreData(randomData()) + require.NoError(t, err) + blobHash3, err := repo.StoreData(randomData()) + require.NoError(t, err) + + tree1 := []TreeEntry{ + { + ObjectType: Blob, + Hash: blobHash1, + Name: "blob1", + }, + { + ObjectType: Blob, + Hash: blobHash2, + Name: "blob2", + }, + } + + treeHash1, err := repo.StoreTree(tree1) + require.NoError(t, err) + require.True(t, treeHash1.IsValid()) + + tree1Read, err := repo.ReadTree(treeHash1) + require.NoError(t, err) + require.ElementsMatch(t, tree1, tree1Read) + + tree2 := []TreeEntry{ + { + ObjectType: Tree, + Hash: treeHash1, + Name: "tree1", + }, + { + ObjectType: Blob, + Hash: blobHash3, + Name: "blob3", + }, + } + + treeHash2, err := repo.StoreTree(tree2) + require.NoError(t, err) + require.True(t, treeHash2.IsValid()) + + tree2Read, err := repo.ReadTree(treeHash2) + require.NoError(t, err) + require.ElementsMatch(t, tree2, tree2Read) + + // Commit + + commit1, err := repo.StoreCommit(treeHash1) + require.NoError(t, err) + require.True(t, commit1.IsValid()) + + treeHash1Read, err := repo.GetTreeHash(commit1) + require.NoError(t, err) + require.Equal(t, treeHash1, treeHash1Read) + + commit2, err := repo.StoreCommitWithParent(treeHash2, commit1) + require.NoError(t, err) + require.True(t, commit2.IsValid()) + + treeHash2Read, err := repo.GetTreeHash(commit2) + require.NoError(t, err) + require.Equal(t, treeHash2, treeHash2Read) + + // ReadTree should accept tree and commit hashes + tree1read, err := repo.ReadTree(commit1) + require.NoError(t, err) + require.Equal(t, tree1read, tree1) + + // Ref + + exist1, err := repo.RefExist("refs/bugs/ref1") + require.NoError(t, err) + require.False(t, exist1) + + err = repo.UpdateRef("refs/bugs/ref1", commit2) + require.NoError(t, err) + + exist1, err = repo.RefExist("refs/bugs/ref1") + require.NoError(t, err) + require.True(t, exist1) + + ls, err := repo.ListRefs("refs/bugs") + require.NoError(t, err) + require.ElementsMatch(t, []string{"refs/bugs/ref1"}, ls) + + err = repo.CopyRef("refs/bugs/ref1", "refs/bugs/ref2") + require.NoError(t, err) + + ls, err = repo.ListRefs("refs/bugs") + require.NoError(t, err) + require.ElementsMatch(t, []string{"refs/bugs/ref1", "refs/bugs/ref2"}, ls) + + commits, err := repo.ListCommits("refs/bugs/ref2") + require.NoError(t, err) + require.Equal(t, []Hash{commit1, commit2}, commits) + + // Graph + + commit3, err := repo.StoreCommitWithParent(treeHash1, commit1) + require.NoError(t, err) + + ancestorHash, err := repo.FindCommonAncestor(commit2, commit3) + require.NoError(t, err) + require.Equal(t, commit1, ancestorHash) + + err = repo.RemoveRef("refs/bugs/ref1") + require.NoError(t, err) +} + +// helper to test a RepoClock +func RepoClockTest(t *testing.T, repo RepoClock) { + allClocks, err := repo.AllClocks() + require.NoError(t, err) + require.Len(t, allClocks, 0) + + clock, err := repo.GetOrCreateClock("foo") + require.NoError(t, err) + require.Equal(t, lamport.Time(1), clock.Time()) + + time, err := clock.Increment() + require.NoError(t, err) + require.Equal(t, lamport.Time(2), time) + require.Equal(t, lamport.Time(2), clock.Time()) + + clock2, err := repo.GetOrCreateClock("foo") + require.NoError(t, err) + require.Equal(t, lamport.Time(2), clock2.Time()) + + clock3, err := repo.GetOrCreateClock("bar") + require.NoError(t, err) + require.Equal(t, lamport.Time(1), clock3.Time()) + + allClocks, err = repo.AllClocks() + require.NoError(t, err) + require.Equal(t, map[string]lamport.Clock{ + "foo": clock, + "bar": clock3, + }, allClocks) +} + +func randomData() []byte { + var letterRunes = "abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ" + b := make([]byte, 32) + for i := range b { + b[i] = letterRunes[rand.Intn(len(letterRunes))] + } + return b +} diff --git a/migration3/after/repository/tree_entry.go b/migration3/after/repository/tree_entry.go new file mode 100644 index 0000000..6c5ec1a --- /dev/null +++ b/migration3/after/repository/tree_entry.go @@ -0,0 +1,102 @@ +package repository + +import ( + "bytes" + "fmt" + "strings" +) + +type TreeEntry struct { + ObjectType ObjectType + Hash Hash + Name string +} + +type ObjectType int + +const ( + Unknown ObjectType = iota + Blob + Tree +) + +func ParseTreeEntry(line string) (TreeEntry, error) { + fields := strings.Fields(line) + + if len(fields) < 4 { + return TreeEntry{}, fmt.Errorf("Invalid input to parse as a TreeEntry") + } + + objType, err := ParseObjectType(fields[0], fields[1]) + + if err != nil { + return TreeEntry{}, err + } + + hash := Hash(fields[2]) + name := strings.Join(fields[3:], "") + + return TreeEntry{ + ObjectType: objType, + Hash: hash, + Name: name, + }, nil +} + +// Format the entry as a git ls-tree compatible line +func (entry TreeEntry) Format() string { + return fmt.Sprintf("%s %s\t%s\n", entry.ObjectType.Format(), entry.Hash, entry.Name) +} + +func (ot ObjectType) Format() string { + switch ot { + case Blob: + return "100644 blob" + case Tree: + return "040000 tree" + default: + panic("Unknown git object type") + } +} + +func ParseObjectType(mode, objType string) (ObjectType, error) { + switch { + case mode == "100644" && objType == "blob": + return Blob, nil + case mode == "040000" && objType == "tree": + return Tree, nil + default: + return Unknown, fmt.Errorf("Unknown git object type %s %s", mode, objType) + } +} + +func prepareTreeEntries(entries []TreeEntry) bytes.Buffer { + var buffer bytes.Buffer + + for _, entry := range entries { + buffer.WriteString(entry.Format()) + } + + return buffer +} + +func readTreeEntries(s string) ([]TreeEntry, error) { + split := strings.Split(strings.TrimSpace(s), "\n") + + casted := make([]TreeEntry, len(split)) + for i, line := range split { + if line == "" { + continue + } + + entry, err := ParseTreeEntry(line) + + if err != nil { + return nil, err + } + + casted[i] = entry + } + + return casted, nil +} diff --git a/migration3/after/repository/tree_entry_test.go b/migration3/after/repository/tree_entry_test.go new file mode 100644 index 0000000..d57433f --- /dev/null +++ b/migration3/after/repository/tree_entry_test.go @@ -0,0 +1,31 @@ +package repository + +import ( + "testing" + + "github.com/stretchr/testify/assert" +) + +func TestTreeEntryFormat(t *testing.T) { + entries := []TreeEntry{ + {Blob, Hash("a85730cf5287d40a1e32d3a671ba2296c73387cb"), "name"}, + {Tree, Hash("a85730cf5287d40a1e32d3a671ba2296c73387cb"), "name"}, + } + + for _, entry := range entries { + _ = entry.Format() + } +} + +func TestTreeEntryParse(t *testing.T) { + lines := []string{ + "100644 blob 1e5ffaffc67049635ba7b01f77143313503f1ca1 .gitignore", + "040000 tree 728421fea4168b874bc1a8aa409d6723ef445a4e bug", + } + + for _, line := range lines { + _, err := ParseTreeEntry(line) + assert.NoError(t, err) + } + +} diff --git a/migration3/after/util/lamport/clock.go b/migration3/after/util/lamport/clock.go new file mode 100644 index 0000000..53b0ac7 --- /dev/null +++ b/migration3/after/util/lamport/clock.go @@ -0,0 +1,15 @@ +package lamport + +// Time is the value of a Clock. +type Time uint64 + +// Clock is a Lamport logical clock +type Clock interface { + // Time is used to return the current value of the lamport clock + Time() Time + // Increment is used to return the value of the lamport clock and increment it afterwards + Increment() (Time, error) + // Witness is called to update our local clock if necessary after + // witnessing a clock value received from another process + Witness(time Time) error +} diff --git a/migration3/after/util/lamport/clock_testing.go b/migration3/after/util/lamport/clock_testing.go new file mode 100644 index 0000000..4bf6d2b --- /dev/null +++ b/migration3/after/util/lamport/clock_testing.go @@ -0,0 +1,28 @@ +package lamport + +import ( + "testing" + + "github.com/stretchr/testify/assert" +) + +func testClock(t *testing.T, c Clock) { + assert.Equal(t, Time(1), c.Time()) + + val, err := c.Increment() + assert.NoError(t, err) + assert.Equal(t, Time(2), val) + assert.Equal(t, Time(2), c.Time()) + + err = c.Witness(41) + assert.NoError(t, err) + assert.Equal(t, Time(42), c.Time()) + + err = c.Witness(41) + assert.NoError(t, err) + assert.Equal(t, Time(42), c.Time()) + + err = c.Witness(30) + assert.NoError(t, err) + assert.Equal(t, Time(42), c.Time()) +} diff --git a/migration3/after/util/lamport/mem_clock.go b/migration3/after/util/lamport/mem_clock.go new file mode 100644 index 0000000..f113b50 --- /dev/null +++ b/migration3/after/util/lamport/mem_clock.go @@ -0,0 +1,89 @@ +/* + + This Source Code Form is subject to the terms of the Mozilla Public + License, v. 2.0. If a copy of the MPL was not distributed with this file, + You can obtain one at http://mozilla.org/MPL/2.0/. + + Copyright (c) 2013, Armon Dadgar armon.dadgar@gmail.com + Copyright (c) 2013, Mitchell Hashimoto mitchell.hashimoto@gmail.com + + Alternatively, the contents of this file may be used under the terms + of the GNU General Public License Version 3 or later, as described below: + + This file is free software: you may copy, redistribute and/or modify + it under the terms of the GNU General Public License as published by the + Free Software Foundation, either version 3 of the License, or (at your + option) any later version. + + This file is distributed in the hope that it will be useful, but + WITHOUT ANY WARRANTY; without even the implied warranty of + MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General + Public License for more details. + + You should have received a copy of the GNU General Public License + along with this program. If not, see http://www.gnu.org/licenses/. + +*/ + +package lamport + +import ( + "sync/atomic" +) + +var _ Clock = &MemClock{} + +// MemClock is a thread safe implementation of a lamport clock. It +// uses efficient atomic operations for all of its functions, falling back +// to a heavy lock only if there are enough CAS failures. +type MemClock struct { + counter uint64 +} + +// NewMemClock create a new clock with the value 1. +// Value 0 is considered as invalid. +func NewMemClock() *MemClock { + return &MemClock{ + counter: 1, + } +} + +// NewMemClockWithTime create a new clock with a value. +func NewMemClockWithTime(time uint64) *MemClock { + return &MemClock{ + counter: time, + } +} + +// Time is used to return the current value of the lamport clock +func (mc *MemClock) Time() Time { + return Time(atomic.LoadUint64(&mc.counter)) +} + +// Increment is used to return the value of the lamport clock and increment it afterwards +func (mc *MemClock) Increment() (Time, error) { + return Time(atomic.AddUint64(&mc.counter, 1)), nil +} + +// Witness is called to update our local clock if necessary after +// witnessing a clock value received from another process +func (mc *MemClock) Witness(v Time) error { +WITNESS: + // If the other value is old, we do not need to do anything + cur := atomic.LoadUint64(&mc.counter) + other := uint64(v) + if other < cur { + return nil + } + + // Ensure that our local clock is at least one ahead. + if !atomic.CompareAndSwapUint64(&mc.counter, cur, other+1) { + // CAS: CompareAndSwap + // The CAS failed, so we just retry. Eventually our CAS should + // succeed or a future witness will pass us by and our witness + // will end. + goto WITNESS + } + + return nil +} diff --git a/migration3/after/util/lamport/mem_clock_test.go b/migration3/after/util/lamport/mem_clock_test.go new file mode 100644 index 0000000..e01d2ec --- /dev/null +++ b/migration3/after/util/lamport/mem_clock_test.go @@ -0,0 +1,8 @@ +package lamport + +import "testing" + +func TestMemClock(t *testing.T) { + c := NewMemClock() + testClock(t, c) +} diff --git a/migration3/after/util/lamport/persisted_clock.go b/migration3/after/util/lamport/persisted_clock.go new file mode 100644 index 0000000..e70b01e --- /dev/null +++ b/migration3/after/util/lamport/persisted_clock.go @@ -0,0 +1,100 @@ +package lamport + +import ( + "errors" + "fmt" + "io/ioutil" + "os" + "path/filepath" +) + +var ErrClockNotExist = errors.New("clock doesn't exist") + +type PersistedClock struct { + *MemClock + filePath string +} + +// NewPersistedClock create a new persisted Lamport clock +func NewPersistedClock(filePath string) (*PersistedClock, error) { + clock := &PersistedClock{ + MemClock: NewMemClock(), + filePath: filePath, + } + + dir := filepath.Dir(filePath) + err := os.MkdirAll(dir, 0777) + if err != nil { + return nil, err + } + + err = clock.Write() + if err != nil { + return nil, err + } + + return clock, nil +} + +// LoadPersistedClock load a persisted Lamport clock from a file +func LoadPersistedClock(filePath string) (*PersistedClock, error) { + clock := &PersistedClock{ + filePath: filePath, + } + + err := clock.read() + if err != nil { + return nil, err + } + + return clock, nil +} + +// Increment is used to return the value of the lamport clock and increment it afterwards +func (pc *PersistedClock) Increment() (Time, error) { + time, err := pc.MemClock.Increment() + if err != nil { + return 0, err + } + return time, pc.Write() +} + +// Witness is called to update our local clock if necessary after +// witnessing a clock value received from another process +func (pc *PersistedClock) Witness(time Time) error { + // TODO: rework so that we write only when the clock was actually updated + err := pc.MemClock.Witness(time) + if err != nil { + return err + } + return pc.Write() +} + +func (pc *PersistedClock) read() error { + content, err := ioutil.ReadFile(pc.filePath) + if os.IsNotExist(err) { + return ErrClockNotExist + } + if err != nil { + return err + } + + var value uint64 + n, err := fmt.Sscanf(string(content), "%d", &value) + if err != nil { + return err + } + + if n != 1 { + return fmt.Errorf("could not read the clock") + } + + pc.MemClock = NewMemClockWithTime(value) + + return nil +} + +func (pc *PersistedClock) Write() error { + data := []byte(fmt.Sprintf("%d", pc.counter)) + return ioutil.WriteFile(pc.filePath, data, 0644) +} diff --git a/migration3/after/util/lamport/persisted_clock_test.go b/migration3/after/util/lamport/persisted_clock_test.go new file mode 100644 index 0000000..aacec3b --- /dev/null +++ b/migration3/after/util/lamport/persisted_clock_test.go @@ -0,0 +1,19 @@ +package lamport + +import ( + "io/ioutil" + "path" + "testing" + + "github.com/stretchr/testify/require" +) + +func TestPersistedClock(t *testing.T) { + dir, err := ioutil.TempDir("", "") + require.NoError(t, err) + + c, err := NewPersistedClock(path.Join(dir, "test-clock")) + require.NoError(t, err) + + testClock(t, c) +} diff --git a/migration3/after/util/text/transform.go b/migration3/after/util/text/transform.go new file mode 100644 index 0000000..59dc4e0 --- /dev/null +++ b/migration3/after/util/text/transform.go @@ -0,0 +1,31 @@ +package text + +import ( + "strings" + "unicode" + + "golang.org/x/text/runes" + "golang.org/x/text/transform" +) + +func Cleanup(text string) (string, error) { + // windows new line, Github, really ? + text = strings.Replace(text, "\r\n", "\n", -1) + + // remove all unicode control characters except + // '\n', '\r' and '\t' + t := runes.Remove(runes.Predicate(func(r rune) bool { + switch r { + case '\r', '\n', '\t': + return false + } + return unicode.IsControl(r) + })) + sanitized, _, err := transform.String(t, text) + if err != nil { + return "", err + } + + // trim extra new line not displayed in the github UI but still present in the data + return strings.TrimSpace(sanitized), nil +} diff --git a/migration3/after/util/text/validate.go b/migration3/after/util/text/validate.go new file mode 100644 index 0000000..51e94fb --- /dev/null +++ b/migration3/after/util/text/validate.go @@ -0,0 +1,44 @@ +package text + +import ( + "net/url" + "strings" + "unicode" +) + +// Empty tell if the string is considered empty once space +// and not graphics characters are removed +func Empty(s string) bool { + trim := strings.TrimFunc(s, func(r rune) bool { + return unicode.IsSpace(r) || !unicode.IsGraphic(r) + }) + + return trim == "" +} + +// Safe will tell if a character in the string is considered unsafe +// Currently trigger on unicode control character except \n, \t and \r +func Safe(s string) bool { + for _, r := range s { + switch r { + case '\t', '\r', '\n': + continue + } + + if unicode.IsControl(r) { + return false + } + } + + return true +} + +// ValidUrl will tell if the string contains what seems to be a valid URL +func ValidUrl(s string) bool { + if strings.Contains(s, "\n") { + return false + } + + _, err := url.ParseRequestURI(s) + return err == nil +} diff --git a/migration3/after/util/timestamp/timestamp.go b/migration3/after/util/timestamp/timestamp.go new file mode 100644 index 0000000..4f587cb --- /dev/null +++ b/migration3/after/util/timestamp/timestamp.go @@ -0,0 +1,9 @@ +package timestamp + +import "time" + +type Timestamp int64 + +func (t Timestamp) Time() time.Time { + return time.Unix(int64(t), 0) +} diff --git a/migration3/before/bug/bug.go b/migration3/before/bug/bug.go new file mode 100644 index 0000000..12bc0bf --- /dev/null +++ b/migration3/before/bug/bug.go @@ -0,0 +1,730 @@ +// Package bug contains the bug data model and low-level related functions +package bug + +import ( + "encoding/json" + "fmt" + "strings" + + "github.com/pkg/errors" + + "github.com/MichaelMure/git-bug-migration/migration3/before/entity" + "github.com/MichaelMure/git-bug-migration/migration3/before/identity" + "github.com/MichaelMure/git-bug-migration/migration3/before/repository" + "github.com/MichaelMure/git-bug-migration/migration3/before/util/lamport" +) + +const bugsRefPattern = "refs/bugs/" +const bugsRemoteRefPattern = "refs/remotes/%s/bugs/" + +const opsEntryName = "ops" +const rootEntryName = "root" +const mediaEntryName = "media" + +const createClockEntryPrefix = "create-clock-" +const createClockEntryPattern = "create-clock-%d" +const editClockEntryPrefix = "edit-clock-" +const editClockEntryPattern = "edit-clock-%d" + +const creationClockName = "bug-create" +const editClockName = "bug-edit" + +var ErrBugNotExist = errors.New("bug doesn't exist") + +func NewErrMultipleMatchBug(matching []entity.Id) *entity.ErrMultipleMatch { + return entity.NewErrMultipleMatch("bug", matching) +} + +func NewErrMultipleMatchOp(matching []entity.Id) *entity.ErrMultipleMatch { + return entity.NewErrMultipleMatch("operation", matching) +} + +var _ Interface = &Bug{} +var _ entity.Interface = &Bug{} + +// Bug hold the data of a bug thread, organized in a way close to +// how it will be persisted inside Git. This is the data structure +// used to merge two different version of the same Bug. +type Bug struct { + + // A Lamport clock is a logical clock that allow to order event + // inside a distributed system. + // It must be the first field in this struct due to https://github.com/golang/go/issues/599 + createTime lamport.Time + editTime lamport.Time + + // Id used as unique identifier + id entity.Id + + lastCommit repository.Hash + rootPack repository.Hash + + // all the committed operations + Packs []OperationPack + + // a temporary pack of operations used for convenience to pile up new operations + // before a commit + staging OperationPack +} + +// NewBug create a new Bug +func NewBug() *Bug { + // No id yet + // No logical clock yet + return &Bug{} +} + +// ReadLocal will read a local bug from its hash +func ReadLocal(repo repository.ClockedRepo, id entity.Id) (*Bug, error) { + ref := bugsRefPattern + id.String() + return read(repo, identity.NewSimpleResolver(repo), ref) +} + +// ReadLocalWithResolver will read a local bug from its hash +func ReadLocalWithResolver(repo repository.ClockedRepo, identityResolver identity.Resolver, id entity.Id) (*Bug, error) { + ref := bugsRefPattern + id.String() + return read(repo, identityResolver, ref) +} + +// ReadRemote will read a remote bug from its hash +func ReadRemote(repo repository.ClockedRepo, remote string, id entity.Id) (*Bug, error) { + ref := fmt.Sprintf(bugsRemoteRefPattern, remote) + id.String() + return read(repo, identity.NewSimpleResolver(repo), ref) +} + +// ReadRemoteWithResolver will read a remote bug from its hash +func ReadRemoteWithResolver(repo repository.ClockedRepo, identityResolver identity.Resolver, remote string, id entity.Id) (*Bug, error) { + ref := fmt.Sprintf(bugsRemoteRefPattern, remote) + id.String() + return read(repo, identityResolver, ref) +} + +// read will read and parse a Bug from git +func read(repo repository.ClockedRepo, identityResolver identity.Resolver, ref string) (*Bug, error) { + refSplit := strings.Split(ref, "/") + id := entity.Id(refSplit[len(refSplit)-1]) + + if err := id.Validate(); err != nil { + return nil, errors.Wrap(err, "invalid ref ") + } + + hashes, err := repo.ListCommits(ref) + + // TODO: this is not perfect, it might be a command invoke error + if err != nil { + return nil, ErrBugNotExist + } + + bug := Bug{ + id: id, + editTime: 0, + } + + // Load each OperationPack + for _, hash := range hashes { + entries, err := repo.ReadTree(hash) + if err != nil { + return nil, errors.Wrap(err, "can't list git tree entries") + } + + bug.lastCommit = hash + + var opsEntry repository.TreeEntry + opsFound := false + var rootEntry repository.TreeEntry + rootFound := false + var createTime uint64 + var editTime uint64 + + for _, entry := range entries { + if entry.Name == opsEntryName { + opsEntry = entry + opsFound = true + continue + } + if entry.Name == rootEntryName { + rootEntry = entry + rootFound = true + } + if strings.HasPrefix(entry.Name, createClockEntryPrefix) { + n, err := fmt.Sscanf(entry.Name, createClockEntryPattern, &createTime) + if err != nil { + return nil, errors.Wrap(err, "can't read create lamport time") + } + if n != 1 { + return nil, fmt.Errorf("could not parse create time lamport value") + } + } + if strings.HasPrefix(entry.Name, editClockEntryPrefix) { + n, err := fmt.Sscanf(entry.Name, editClockEntryPattern, &editTime) + if err != nil { + return nil, errors.Wrap(err, "can't read edit lamport time") + } + if n != 1 { + return nil, fmt.Errorf("could not parse edit time lamport value") + } + } + } + + if !opsFound { + return nil, errors.New("invalid tree, missing the ops entry") + } + if !rootFound { + return nil, errors.New("invalid tree, missing the root entry") + } + + if bug.rootPack == "" { + bug.rootPack = rootEntry.Hash + bug.createTime = lamport.Time(createTime) + } + + // Due to rebase, edit Lamport time are not necessarily ordered + if editTime > uint64(bug.editTime) { + bug.editTime = lamport.Time(editTime) + } + + // Update the clocks + createClock, err := repo.GetOrCreateClock(creationClockName) + if err != nil { + return nil, err + } + if err := createClock.Witness(bug.createTime); err != nil { + return nil, errors.Wrap(err, "failed to update create lamport clock") + } + editClock, err := repo.GetOrCreateClock(editClockName) + if err != nil { + return nil, err + } + if err := editClock.Witness(bug.editTime); err != nil { + return nil, errors.Wrap(err, "failed to update edit lamport clock") + } + + data, err := repo.ReadData(opsEntry.Hash) + if err != nil { + return nil, errors.Wrap(err, "failed to read git blob data") + } + + opp := &OperationPack{} + err = json.Unmarshal(data, &opp) + + if err != nil { + return nil, errors.Wrap(err, "failed to decode OperationPack json") + } + + // tag the pack with the commit hash + opp.commitHash = hash + + bug.Packs = append(bug.Packs, *opp) + } + + // Make sure that the identities are properly loaded + err = bug.EnsureIdentities(identityResolver) + if err != nil { + return nil, err + } + + return &bug, nil +} + +// RemoveBug will remove a local bug from its entity.Id +func RemoveBug(repo repository.ClockedRepo, id entity.Id) error { + var fullMatches []string + + refs, err := repo.ListRefs(bugsRefPattern + id.String()) + if err != nil { + return err + } + if len(refs) > 1 { + return NewErrMultipleMatchBug(entity.RefsToIds(refs)) + } + if len(refs) == 1 { + // we have the bug locally + fullMatches = append(fullMatches, refs[0]) + } + + remotes, err := repo.GetRemotes() + if err != nil { + return err + } + + for remote := range remotes { + remotePrefix := fmt.Sprintf(bugsRemoteRefPattern+id.String(), remote) + remoteRefs, err := repo.ListRefs(remotePrefix) + if err != nil { + return err + } + if len(remoteRefs) > 1 { + return NewErrMultipleMatchBug(entity.RefsToIds(refs)) + } + if len(remoteRefs) == 1 { + // found the bug in a remote + fullMatches = append(fullMatches, remoteRefs[0]) + } + } + + if len(fullMatches) == 0 { + return ErrBugNotExist + } + + for _, ref := range fullMatches { + err = repo.RemoveRef(ref) + if err != nil { + return err + } + } + + return nil +} + +type StreamedBug struct { + Bug *Bug + Err error +} + +// ReadAllLocal read and parse all local bugs +func ReadAllLocal(repo repository.ClockedRepo) <-chan StreamedBug { + return readAll(repo, identity.NewSimpleResolver(repo), bugsRefPattern) +} + +// ReadAllLocalWithResolver read and parse all local bugs +func ReadAllLocalWithResolver(repo repository.ClockedRepo, identityResolver identity.Resolver) <-chan StreamedBug { + return readAll(repo, identityResolver, bugsRefPattern) +} + +// ReadAllRemote read and parse all remote bugs for a given remote +func ReadAllRemote(repo repository.ClockedRepo, remote string) <-chan StreamedBug { + refPrefix := fmt.Sprintf(bugsRemoteRefPattern, remote) + return readAll(repo, identity.NewSimpleResolver(repo), refPrefix) +} + +// ReadAllRemoteWithResolver read and parse all remote bugs for a given remote +func ReadAllRemoteWithResolver(repo repository.ClockedRepo, identityResolver identity.Resolver, remote string) <-chan StreamedBug { + refPrefix := fmt.Sprintf(bugsRemoteRefPattern, remote) + return readAll(repo, identityResolver, refPrefix) +} + +// Read and parse all available bug with a given ref prefix +func readAll(repo repository.ClockedRepo, identityResolver identity.Resolver, refPrefix string) <-chan StreamedBug { + out := make(chan StreamedBug) + + go func() { + defer close(out) + + refs, err := repo.ListRefs(refPrefix) + if err != nil { + out <- StreamedBug{Err: err} + return + } + + for _, ref := range refs { + b, err := read(repo, identityResolver, ref) + + if err != nil { + out <- StreamedBug{Err: err} + return + } + + out <- StreamedBug{Bug: b} + } + }() + + return out +} + +// ListLocalIds list all the available local bug ids +func ListLocalIds(repo repository.Repo) ([]entity.Id, error) { + refs, err := repo.ListRefs(bugsRefPattern) + if err != nil { + return nil, err + } + + return entity.RefsToIds(refs), nil +} + +// Validate check if the Bug data is valid +func (bug *Bug) Validate() error { + // non-empty + if len(bug.Packs) == 0 && bug.staging.IsEmpty() { + return fmt.Errorf("bug has no operations") + } + + // check if each pack and operations are valid + for _, pack := range bug.Packs { + if err := pack.Validate(); err != nil { + return err + } + } + + // check if staging is valid if needed + if !bug.staging.IsEmpty() { + if err := bug.staging.Validate(); err != nil { + return errors.Wrap(err, "staging") + } + } + + // The very first Op should be a CreateOp + firstOp := bug.FirstOp() + if firstOp == nil || firstOp.base().OperationType != CreateOp { + return fmt.Errorf("first operation should be a Create op") + } + + // The bug Id should be the hash of the first commit + if len(bug.Packs) > 0 && string(bug.Packs[0].commitHash) != bug.id.String() { + return fmt.Errorf("bug id should be the first commit hash") + } + + // Check that there is no more CreateOp op + // Check that there is no colliding operation's ID + it := NewOperationIterator(bug) + createCount := 0 + ids := make(map[entity.Id]struct{}) + for it.Next() { + if it.Value().base().OperationType == CreateOp { + createCount++ + } + if _, ok := ids[it.Value().Id()]; ok { + return fmt.Errorf("id collision: %s", it.Value().Id()) + } + ids[it.Value().Id()] = struct{}{} + } + + if createCount != 1 { + return fmt.Errorf("only one Create op allowed") + } + + return nil +} + +// Append an operation into the staging area, to be committed later +func (bug *Bug) Append(op Operation) { + bug.staging.Append(op) +} + +// Commit write the staging area in Git and move the operations to the Packs +func (bug *Bug) Commit(repo repository.ClockedRepo) error { + + if !bug.NeedCommit() { + return fmt.Errorf("can't commit a bug with no pending operation") + } + + if err := bug.Validate(); err != nil { + return errors.Wrap(err, "can't commit a bug with invalid data") + } + + // Write the Ops as a Git blob containing the serialized array + hash, err := bug.staging.Write(repo) + if err != nil { + return err + } + + if bug.rootPack == "" { + bug.rootPack = hash + } + + // Make a Git tree referencing this blob + tree := []repository.TreeEntry{ + // the last pack of ops + {ObjectType: repository.Blob, Hash: hash, Name: opsEntryName}, + // always the first pack of ops (might be the same) + {ObjectType: repository.Blob, Hash: bug.rootPack, Name: rootEntryName}, + } + + // Reference, if any, all the files required by the ops + // Git will check that they actually exist in the storage and will make sure + // to push/pull them as needed. + mediaTree := makeMediaTree(bug.staging) + if len(mediaTree) > 0 { + mediaTreeHash, err := repo.StoreTree(mediaTree) + if err != nil { + return err + } + tree = append(tree, repository.TreeEntry{ + ObjectType: repository.Tree, + Hash: mediaTreeHash, + Name: mediaEntryName, + }) + } + + // Store the logical clocks as well + // --> edit clock for each OperationPack/commits + // --> create clock only for the first OperationPack/commits + // + // To avoid having one blob for each clock value, clocks are serialized + // directly into the entry name + emptyBlobHash, err := repo.StoreData([]byte{}) + if err != nil { + return err + } + + editClock, err := repo.GetOrCreateClock(editClockName) + if err != nil { + return err + } + bug.editTime, err = editClock.Increment() + if err != nil { + return err + } + + tree = append(tree, repository.TreeEntry{ + ObjectType: repository.Blob, + Hash: emptyBlobHash, + Name: fmt.Sprintf(editClockEntryPattern, bug.editTime), + }) + if bug.lastCommit == "" { + createClock, err := repo.GetOrCreateClock(creationClockName) + if err != nil { + return err + } + bug.createTime, err = createClock.Increment() + if err != nil { + return err + } + + tree = append(tree, repository.TreeEntry{ + ObjectType: repository.Blob, + Hash: emptyBlobHash, + Name: fmt.Sprintf(createClockEntryPattern, bug.createTime), + }) + } + + // Store the tree + hash, err = repo.StoreTree(tree) + if err != nil { + return err + } + + // Write a Git commit referencing the tree, with the previous commit as parent + if bug.lastCommit != "" { + hash, err = repo.StoreCommitWithParent(hash, bug.lastCommit) + } else { + hash, err = repo.StoreCommit(hash) + } + + if err != nil { + return err + } + + bug.lastCommit = hash + + // if it was the first commit, use the commit hash as bug id + if bug.id == "" { + bug.id = entity.Id(hash) + } + + // Create or update the Git reference for this bug + // When pushing later, the remote will ensure that this ref update + // is fast-forward, that is no data has been overwritten + ref := fmt.Sprintf("%s%s", bugsRefPattern, bug.id) + err = repo.UpdateRef(ref, hash) + + if err != nil { + return err + } + + bug.staging.commitHash = hash + bug.Packs = append(bug.Packs, bug.staging) + bug.staging = OperationPack{} + + return nil +} + +func (bug *Bug) CommitAsNeeded(repo repository.ClockedRepo) error { + if !bug.NeedCommit() { + return nil + } + return bug.Commit(repo) +} + +func (bug *Bug) NeedCommit() bool { + return !bug.staging.IsEmpty() +} + +func makeMediaTree(pack OperationPack) []repository.TreeEntry { + var tree []repository.TreeEntry + counter := 0 + added := make(map[repository.Hash]interface{}) + + for _, ops := range pack.Operations { + for _, file := range ops.GetFiles() { + if _, has := added[file]; !has { + tree = append(tree, repository.TreeEntry{ + ObjectType: repository.Blob, + Hash: file, + // The name is not important here, we only need to + // reference the blob. + Name: fmt.Sprintf("file%d", counter), + }) + counter++ + added[file] = struct{}{} + } + } + } + + return tree +} + +// Merge a different version of the same bug by rebasing operations of this bug +// that are not present in the other on top of the chain of operations of the +// other version. +func (bug *Bug) Merge(repo repository.Repo, other Interface) (bool, error) { + var otherBug = bugFromInterface(other) + + // Note: a faster merge should be possible without actually reading and parsing + // all operations pack of our side. + // Reading the other side is still necessary to validate remote data, at least + // for new operations + + if bug.id != otherBug.id { + return false, errors.New("merging unrelated bugs is not supported") + } + + if len(otherBug.staging.Operations) > 0 { + return false, errors.New("merging a bug with a non-empty staging is not supported") + } + + if bug.lastCommit == "" || otherBug.lastCommit == "" { + return false, errors.New("can't merge a bug that has never been stored") + } + + ancestor, err := repo.FindCommonAncestor(bug.lastCommit, otherBug.lastCommit) + if err != nil { + return false, errors.Wrap(err, "can't find common ancestor") + } + + ancestorIndex := 0 + newPacks := make([]OperationPack, 0, len(bug.Packs)) + + // Find the root of the rebase + for i, pack := range bug.Packs { + newPacks = append(newPacks, pack) + + if pack.commitHash == ancestor { + ancestorIndex = i + break + } + } + + if len(otherBug.Packs) == ancestorIndex+1 { + // Nothing to rebase, return early + return false, nil + } + + // get other bug's extra Packs + for i := ancestorIndex + 1; i < len(otherBug.Packs); i++ { + // clone is probably not necessary + newPack := otherBug.Packs[i].Clone() + + newPacks = append(newPacks, newPack) + bug.lastCommit = newPack.commitHash + } + + // rebase our extra Packs + for i := ancestorIndex + 1; i < len(bug.Packs); i++ { + pack := bug.Packs[i] + + // get the referenced git tree + treeHash, err := repo.GetTreeHash(pack.commitHash) + + if err != nil { + return false, err + } + + // create a new commit with the correct ancestor + hash, err := repo.StoreCommitWithParent(treeHash, bug.lastCommit) + + if err != nil { + return false, err + } + + // replace the pack + newPack := pack.Clone() + newPack.commitHash = hash + newPacks = append(newPacks, newPack) + + // update the bug + bug.lastCommit = hash + } + + bug.Packs = newPacks + + // Update the git ref + err = repo.UpdateRef(bugsRefPattern+bug.id.String(), bug.lastCommit) + if err != nil { + return false, err + } + + return true, nil +} + +// Id return the Bug identifier +func (bug *Bug) Id() entity.Id { + if bug.id == "" { + // simply panic as it would be a coding error + // (using an id of a bug not stored yet) + panic("no id yet") + } + return bug.id +} + +// CreateLamportTime return the Lamport time of creation +func (bug *Bug) CreateLamportTime() lamport.Time { + return bug.createTime +} + +// EditLamportTime return the Lamport time of the last edit +func (bug *Bug) EditLamportTime() lamport.Time { + return bug.editTime +} + +// Lookup for the very first operation of the bug. +// For a valid Bug, this operation should be a CreateOp +func (bug *Bug) FirstOp() Operation { + for _, pack := range bug.Packs { + for _, op := range pack.Operations { + return op + } + } + + if !bug.staging.IsEmpty() { + return bug.staging.Operations[0] + } + + return nil +} + +// Lookup for the very last operation of the bug. +// For a valid Bug, should never be nil +func (bug *Bug) LastOp() Operation { + if !bug.staging.IsEmpty() { + return bug.staging.Operations[len(bug.staging.Operations)-1] + } + + if len(bug.Packs) == 0 { + return nil + } + + lastPack := bug.Packs[len(bug.Packs)-1] + + if len(lastPack.Operations) == 0 { + return nil + } + + return lastPack.Operations[len(lastPack.Operations)-1] +} + +// Compile a bug in a easily usable snapshot +func (bug *Bug) Compile() Snapshot { + snap := Snapshot{ + id: bug.id, + Status: OpenStatus, + } + + it := NewOperationIterator(bug) + + for it.Next() { + op := it.Value() + op.Apply(&snap) + snap.Operations = append(snap.Operations, op) + } + + return snap +} diff --git a/migration3/before/bug/bug_actions.go b/migration3/before/bug/bug_actions.go new file mode 100644 index 0000000..b5e68ea --- /dev/null +++ b/migration3/before/bug/bug_actions.go @@ -0,0 +1,143 @@ +package bug + +import ( + "fmt" + "strings" + + "github.com/MichaelMure/git-bug-migration/migration3/before/entity" + "github.com/MichaelMure/git-bug-migration/migration3/before/identity" + "github.com/MichaelMure/git-bug-migration/migration3/before/repository" + "github.com/pkg/errors" +) + +// Fetch retrieve updates from a remote +// This does not change the local bugs state +func Fetch(repo repository.Repo, remote string) (string, error) { + // "refs/bugs/*:refs/remotes/>/bugs/*" + remoteRefSpec := fmt.Sprintf(bugsRemoteRefPattern, remote) + fetchRefSpec := fmt.Sprintf("%s*:%s*", bugsRefPattern, remoteRefSpec) + + return repo.FetchRefs(remote, fetchRefSpec) +} + +// Push update a remote with the local changes +func Push(repo repository.Repo, remote string) (string, error) { + // "refs/bugs/*:refs/bugs/*" + refspec := fmt.Sprintf("%s*:%s*", bugsRefPattern, bugsRefPattern) + + return repo.PushRefs(remote, refspec) +} + +// Pull will do a Fetch + MergeAll +// This function will return an error if a merge fail +func Pull(repo repository.ClockedRepo, remote string) error { + _, err := Fetch(repo, remote) + if err != nil { + return err + } + + for merge := range MergeAll(repo, remote) { + if merge.Err != nil { + return merge.Err + } + if merge.Status == entity.MergeStatusInvalid { + return errors.Errorf("merge failure: %s", merge.Reason) + } + } + + return nil +} + +// MergeAll will merge all the available remote bug: +// +// - If the remote has new commit, the local bug is updated to match the same history +// (fast-forward update) +// - if the local bug has new commits but the remote don't, nothing is changed +// - if both local and remote bug have new commits (that is, we have a concurrent edition), +// new local commits are rewritten at the head of the remote history (that is, a rebase) +func MergeAll(repo repository.ClockedRepo, remote string) <-chan entity.MergeResult { + out := make(chan entity.MergeResult) + + // no caching for the merge, we load everything from git even if that means multiple + // copy of the same entity in memory. The cache layer will intercept the results to + // invalidate entities if necessary. + identityResolver := identity.NewSimpleResolver(repo) + + go func() { + defer close(out) + + remoteRefSpec := fmt.Sprintf(bugsRemoteRefPattern, remote) + remoteRefs, err := repo.ListRefs(remoteRefSpec) + + if err != nil { + out <- entity.MergeResult{Err: err} + return + } + + for _, remoteRef := range remoteRefs { + refSplit := strings.Split(remoteRef, "/") + id := entity.Id(refSplit[len(refSplit)-1]) + + if err := id.Validate(); err != nil { + out <- entity.NewMergeInvalidStatus(id, errors.Wrap(err, "invalid ref").Error()) + continue + } + + remoteBug, err := read(repo, identityResolver, remoteRef) + + if err != nil { + out <- entity.NewMergeInvalidStatus(id, errors.Wrap(err, "remote bug is not readable").Error()) + continue + } + + // Check for error in remote data + if err := remoteBug.Validate(); err != nil { + out <- entity.NewMergeInvalidStatus(id, errors.Wrap(err, "remote bug is invalid").Error()) + continue + } + + localRef := bugsRefPattern + remoteBug.Id().String() + localExist, err := repo.RefExist(localRef) + + if err != nil { + out <- entity.NewMergeError(err, id) + continue + } + + // the bug is not local yet, simply create the reference + if !localExist { + err := repo.CopyRef(remoteRef, localRef) + + if err != nil { + out <- entity.NewMergeError(err, id) + return + } + + out <- entity.NewMergeStatus(entity.MergeStatusNew, id, remoteBug) + continue + } + + localBug, err := read(repo, identityResolver, localRef) + + if err != nil { + out <- entity.NewMergeError(errors.Wrap(err, "local bug is not readable"), id) + return + } + + updated, err := localBug.Merge(repo, remoteBug) + + if err != nil { + out <- entity.NewMergeInvalidStatus(id, errors.Wrap(err, "merge failed").Error()) + return + } + + if updated { + out <- entity.NewMergeStatus(entity.MergeStatusUpdated, id, localBug) + } else { + out <- entity.NewMergeStatus(entity.MergeStatusNothing, id, localBug) + } + } + }() + + return out +} diff --git a/migration3/before/bug/bug_actions_test.go b/migration3/before/bug/bug_actions_test.go new file mode 100644 index 0000000..a6170d5 --- /dev/null +++ b/migration3/before/bug/bug_actions_test.go @@ -0,0 +1,390 @@ +package bug + +import ( + "testing" + "time" + + "github.com/stretchr/testify/assert" + "github.com/stretchr/testify/require" + + "github.com/MichaelMure/git-bug-migration/migration3/before/identity" + "github.com/MichaelMure/git-bug-migration/migration3/before/repository" +) + +func TestPushPull(t *testing.T) { + repoA, repoB, remote := repository.SetupReposAndRemote() + defer repository.CleanupTestRepos(repoA, repoB, remote) + + reneA := identity.NewIdentity("René Descartes", "rene@descartes.fr") + err := reneA.Commit(repoA) + require.NoError(t, err) + + bug1, _, err := Create(reneA, time.Now().Unix(), "bug1", "message") + require.NoError(t, err) + assert.True(t, bug1.NeedCommit()) + err = bug1.Commit(repoA) + require.NoError(t, err) + assert.False(t, bug1.NeedCommit()) + + // distribute the identity + _, err = identity.Push(repoA, "origin") + require.NoError(t, err) + err = identity.Pull(repoB, "origin") + require.NoError(t, err) + + // A --> remote --> B + _, err = Push(repoA, "origin") + require.NoError(t, err) + + err = Pull(repoB, "origin") + require.NoError(t, err) + + bugs := allBugs(t, ReadAllLocal(repoB)) + + if len(bugs) != 1 { + t.Fatal("Unexpected number of bugs") + } + + // B --> remote --> A + reneB, err := identity.ReadLocal(repoA, reneA.Id()) + require.NoError(t, err) + + bug2, _, err := Create(reneB, time.Now().Unix(), "bug2", "message") + require.NoError(t, err) + err = bug2.Commit(repoB) + require.NoError(t, err) + + _, err = Push(repoB, "origin") + require.NoError(t, err) + + err = Pull(repoA, "origin") + require.NoError(t, err) + + bugs = allBugs(t, ReadAllLocal(repoA)) + + if len(bugs) != 2 { + t.Fatal("Unexpected number of bugs") + } +} + +func allBugs(t testing.TB, bugs <-chan StreamedBug) []*Bug { + var result []*Bug + for streamed := range bugs { + if streamed.Err != nil { + t.Fatal(streamed.Err) + } + result = append(result, streamed.Bug) + } + return result +} + +func TestRebaseTheirs(t *testing.T) { + _RebaseTheirs(t) +} + +func BenchmarkRebaseTheirs(b *testing.B) { + for n := 0; n < b.N; n++ { + _RebaseTheirs(b) + } +} + +func _RebaseTheirs(t testing.TB) { + repoA, repoB, remote := repository.SetupReposAndRemote() + defer repository.CleanupTestRepos(repoA, repoB, remote) + + reneA := identity.NewIdentity("René Descartes", "rene@descartes.fr") + err := reneA.Commit(repoA) + require.NoError(t, err) + + bug1, _, err := Create(reneA, time.Now().Unix(), "bug1", "message") + require.NoError(t, err) + assert.True(t, bug1.NeedCommit()) + err = bug1.Commit(repoA) + require.NoError(t, err) + assert.False(t, bug1.NeedCommit()) + + // distribute the identity + _, err = identity.Push(repoA, "origin") + require.NoError(t, err) + err = identity.Pull(repoB, "origin") + require.NoError(t, err) + + // A --> remote + + _, err = Push(repoA, "origin") + require.NoError(t, err) + + // remote --> B + err = Pull(repoB, "origin") + require.NoError(t, err) + + bug2, err := ReadLocal(repoB, bug1.Id()) + require.NoError(t, err) + assert.False(t, bug2.NeedCommit()) + + reneB, err := identity.ReadLocal(repoA, reneA.Id()) + require.NoError(t, err) + + _, err = AddComment(bug2, reneB, time.Now().Unix(), "message2") + require.NoError(t, err) + assert.True(t, bug2.NeedCommit()) + _, err = AddComment(bug2, reneB, time.Now().Unix(), "message3") + require.NoError(t, err) + _, err = AddComment(bug2, reneB, time.Now().Unix(), "message4") + require.NoError(t, err) + err = bug2.Commit(repoB) + require.NoError(t, err) + assert.False(t, bug2.NeedCommit()) + + // B --> remote + _, err = Push(repoB, "origin") + require.NoError(t, err) + + // remote --> A + err = Pull(repoA, "origin") + require.NoError(t, err) + + bugs := allBugs(t, ReadAllLocal(repoB)) + + if len(bugs) != 1 { + t.Fatal("Unexpected number of bugs") + } + + bug3, err := ReadLocal(repoA, bug1.Id()) + require.NoError(t, err) + + if nbOps(bug3) != 4 { + t.Fatal("Unexpected number of operations") + } +} + +func TestRebaseOurs(t *testing.T) { + _RebaseOurs(t) +} + +func BenchmarkRebaseOurs(b *testing.B) { + for n := 0; n < b.N; n++ { + _RebaseOurs(b) + } +} + +func _RebaseOurs(t testing.TB) { + repoA, repoB, remote := repository.SetupReposAndRemote() + defer repository.CleanupTestRepos(repoA, repoB, remote) + + reneA := identity.NewIdentity("René Descartes", "rene@descartes.fr") + err := reneA.Commit(repoA) + require.NoError(t, err) + + bug1, _, err := Create(reneA, time.Now().Unix(), "bug1", "message") + require.NoError(t, err) + err = bug1.Commit(repoA) + require.NoError(t, err) + + // distribute the identity + _, err = identity.Push(repoA, "origin") + require.NoError(t, err) + err = identity.Pull(repoB, "origin") + require.NoError(t, err) + + // A --> remote + _, err = Push(repoA, "origin") + require.NoError(t, err) + + // remote --> B + err = Pull(repoB, "origin") + require.NoError(t, err) + + _, err = AddComment(bug1, reneA, time.Now().Unix(), "message2") + require.NoError(t, err) + _, err = AddComment(bug1, reneA, time.Now().Unix(), "message3") + require.NoError(t, err) + _, err = AddComment(bug1, reneA, time.Now().Unix(), "message4") + require.NoError(t, err) + err = bug1.Commit(repoA) + require.NoError(t, err) + + _, err = AddComment(bug1, reneA, time.Now().Unix(), "message5") + require.NoError(t, err) + _, err = AddComment(bug1, reneA, time.Now().Unix(), "message6") + require.NoError(t, err) + _, err = AddComment(bug1, reneA, time.Now().Unix(), "message7") + require.NoError(t, err) + err = bug1.Commit(repoA) + require.NoError(t, err) + + _, err = AddComment(bug1, reneA, time.Now().Unix(), "message8") + require.NoError(t, err) + _, err = AddComment(bug1, reneA, time.Now().Unix(), "message9") + require.NoError(t, err) + _, err = AddComment(bug1, reneA, time.Now().Unix(), "message10") + require.NoError(t, err) + err = bug1.Commit(repoA) + require.NoError(t, err) + + // remote --> A + err = Pull(repoA, "origin") + require.NoError(t, err) + + bugs := allBugs(t, ReadAllLocal(repoA)) + + if len(bugs) != 1 { + t.Fatal("Unexpected number of bugs") + } + + bug2, err := ReadLocal(repoA, bug1.Id()) + require.NoError(t, err) + + if nbOps(bug2) != 10 { + t.Fatal("Unexpected number of operations") + } +} + +func nbOps(b *Bug) int { + it := NewOperationIterator(b) + counter := 0 + for it.Next() { + counter++ + } + return counter +} + +func TestRebaseConflict(t *testing.T) { + _RebaseConflict(t) +} + +func BenchmarkRebaseConflict(b *testing.B) { + for n := 0; n < b.N; n++ { + _RebaseConflict(b) + } +} + +func _RebaseConflict(t testing.TB) { + repoA, repoB, remote := repository.SetupReposAndRemote() + defer repository.CleanupTestRepos(repoA, repoB, remote) + + reneA := identity.NewIdentity("René Descartes", "rene@descartes.fr") + err := reneA.Commit(repoA) + require.NoError(t, err) + + bug1, _, err := Create(reneA, time.Now().Unix(), "bug1", "message") + require.NoError(t, err) + err = bug1.Commit(repoA) + require.NoError(t, err) + + // distribute the identity + _, err = identity.Push(repoA, "origin") + require.NoError(t, err) + err = identity.Pull(repoB, "origin") + require.NoError(t, err) + + // A --> remote + _, err = Push(repoA, "origin") + require.NoError(t, err) + + // remote --> B + err = Pull(repoB, "origin") + require.NoError(t, err) + + _, err = AddComment(bug1, reneA, time.Now().Unix(), "message2") + require.NoError(t, err) + _, err = AddComment(bug1, reneA, time.Now().Unix(), "message3") + require.NoError(t, err) + _, err = AddComment(bug1, reneA, time.Now().Unix(), "message4") + require.NoError(t, err) + err = bug1.Commit(repoA) + require.NoError(t, err) + + _, err = AddComment(bug1, reneA, time.Now().Unix(), "message5") + require.NoError(t, err) + _, err = AddComment(bug1, reneA, time.Now().Unix(), "message6") + require.NoError(t, err) + _, err = AddComment(bug1, reneA, time.Now().Unix(), "message7") + require.NoError(t, err) + err = bug1.Commit(repoA) + require.NoError(t, err) + + _, err = AddComment(bug1, reneA, time.Now().Unix(), "message8") + require.NoError(t, err) + _, err = AddComment(bug1, reneA, time.Now().Unix(), "message9") + require.NoError(t, err) + _, err = AddComment(bug1, reneA, time.Now().Unix(), "message10") + require.NoError(t, err) + err = bug1.Commit(repoA) + require.NoError(t, err) + + bug2, err := ReadLocal(repoB, bug1.Id()) + require.NoError(t, err) + + reneB, err := identity.ReadLocal(repoA, reneA.Id()) + require.NoError(t, err) + + _, err = AddComment(bug2, reneB, time.Now().Unix(), "message11") + require.NoError(t, err) + _, err = AddComment(bug2, reneB, time.Now().Unix(), "message12") + require.NoError(t, err) + _, err = AddComment(bug2, reneB, time.Now().Unix(), "message13") + require.NoError(t, err) + err = bug2.Commit(repoB) + require.NoError(t, err) + + _, err = AddComment(bug2, reneB, time.Now().Unix(), "message14") + require.NoError(t, err) + _, err = AddComment(bug2, reneB, time.Now().Unix(), "message15") + require.NoError(t, err) + _, err = AddComment(bug2, reneB, time.Now().Unix(), "message16") + require.NoError(t, err) + err = bug2.Commit(repoB) + require.NoError(t, err) + + _, err = AddComment(bug2, reneB, time.Now().Unix(), "message17") + require.NoError(t, err) + _, err = AddComment(bug2, reneB, time.Now().Unix(), "message18") + require.NoError(t, err) + _, err = AddComment(bug2, reneB, time.Now().Unix(), "message19") + require.NoError(t, err) + err = bug2.Commit(repoB) + require.NoError(t, err) + + // A --> remote + _, err = Push(repoA, "origin") + require.NoError(t, err) + + // remote --> B + err = Pull(repoB, "origin") + require.NoError(t, err) + + bugs := allBugs(t, ReadAllLocal(repoB)) + + if len(bugs) != 1 { + t.Fatal("Unexpected number of bugs") + } + + bug3, err := ReadLocal(repoB, bug1.Id()) + require.NoError(t, err) + + if nbOps(bug3) != 19 { + t.Fatal("Unexpected number of operations") + } + + // B --> remote + _, err = Push(repoB, "origin") + require.NoError(t, err) + + // remote --> A + err = Pull(repoA, "origin") + require.NoError(t, err) + + bugs = allBugs(t, ReadAllLocal(repoA)) + + if len(bugs) != 1 { + t.Fatal("Unexpected number of bugs") + } + + bug4, err := ReadLocal(repoA, bug1.Id()) + require.NoError(t, err) + + if nbOps(bug4) != 19 { + t.Fatal("Unexpected number of operations") + } +} diff --git a/migration3/before/bug/bug_test.go b/migration3/before/bug/bug_test.go new file mode 100644 index 0000000..dcc6bb8 --- /dev/null +++ b/migration3/before/bug/bug_test.go @@ -0,0 +1,186 @@ +package bug + +import ( + "fmt" + "testing" + "time" + + "github.com/stretchr/testify/require" + + "github.com/MichaelMure/git-bug-migration/migration3/before/identity" + "github.com/MichaelMure/git-bug-migration/migration3/before/repository" +) + +func TestBugId(t *testing.T) { + mockRepo := repository.NewMockRepoForTest() + + bug1 := NewBug() + + rene := identity.NewIdentity("René Descartes", "rene@descartes.fr") + err := rene.Commit(mockRepo) + require.NoError(t, err) + + createOp := NewCreateOp(rene, time.Now().Unix(), "title", "message", nil) + + bug1.Append(createOp) + + err = bug1.Commit(mockRepo) + + if err != nil { + t.Fatal(err) + } + + bug1.Id() +} + +func TestBugValidity(t *testing.T) { + mockRepo := repository.NewMockRepoForTest() + + bug1 := NewBug() + + rene := identity.NewIdentity("René Descartes", "rene@descartes.fr") + err := rene.Commit(mockRepo) + require.NoError(t, err) + + createOp := NewCreateOp(rene, time.Now().Unix(), "title", "message", nil) + + if bug1.Validate() == nil { + t.Fatal("Empty bug should be invalid") + } + + bug1.Append(createOp) + + if bug1.Validate() != nil { + t.Fatal("Bug with just a CreateOp should be valid") + } + + err = bug1.Commit(mockRepo) + if err != nil { + t.Fatal(err) + } + + bug1.Append(createOp) + + if bug1.Validate() == nil { + t.Fatal("Bug with multiple CreateOp should be invalid") + } + + err = bug1.Commit(mockRepo) + if err == nil { + t.Fatal("Invalid bug should not commit") + } +} + +func TestBugCommitLoad(t *testing.T) { + repo := repository.NewMockRepoForTest() + + bug1 := NewBug() + + rene := identity.NewIdentity("René Descartes", "rene@descartes.fr") + err := rene.Commit(repo) + require.NoError(t, err) + + createOp := NewCreateOp(rene, time.Now().Unix(), "title", "message", nil) + setTitleOp := NewSetTitleOp(rene, time.Now().Unix(), "title2", "title1") + addCommentOp := NewAddCommentOp(rene, time.Now().Unix(), "message2", nil) + + bug1.Append(createOp) + bug1.Append(setTitleOp) + + require.True(t, bug1.NeedCommit()) + + err = bug1.Commit(repo) + require.Nil(t, err) + require.False(t, bug1.NeedCommit()) + + bug2, err := ReadLocal(repo, bug1.Id()) + require.NoError(t, err) + equivalentBug(t, bug1, bug2) + + // add more op + + bug1.Append(addCommentOp) + + require.True(t, bug1.NeedCommit()) + + err = bug1.Commit(repo) + require.Nil(t, err) + require.False(t, bug1.NeedCommit()) + + bug3, err := ReadLocal(repo, bug1.Id()) + require.NoError(t, err) + equivalentBug(t, bug1, bug3) +} + +func equivalentBug(t *testing.T, expected, actual *Bug) { + require.Equal(t, len(expected.Packs), len(actual.Packs)) + + for i := range expected.Packs { + for j := range expected.Packs[i].Operations { + actual.Packs[i].Operations[j].base().id = expected.Packs[i].Operations[j].base().id + } + } + + require.Equal(t, expected, actual) +} + +func TestBugRemove(t *testing.T) { + repo := repository.CreateGoGitTestRepo(false) + remoteA := repository.CreateGoGitTestRepo(true) + remoteB := repository.CreateGoGitTestRepo(true) + defer repository.CleanupTestRepos(repo, remoteA, remoteB) + + err := repo.AddRemote("remoteA", "file://"+remoteA.GetPath()) + require.NoError(t, err) + + err = repo.AddRemote("remoteB", "file://"+remoteB.GetPath()) + require.NoError(t, err) + + // generate a bunch of bugs + rene := identity.NewIdentity("René Descartes", "rene@descartes.fr") + err = rene.Commit(repo) + require.NoError(t, err) + + for i := 0; i < 100; i++ { + b := NewBug() + createOp := NewCreateOp(rene, time.Now().Unix(), "title", fmt.Sprintf("message%v", i), nil) + b.Append(createOp) + err = b.Commit(repo) + require.NoError(t, err) + } + + // and one more for testing + b := NewBug() + createOp := NewCreateOp(rene, time.Now().Unix(), "title", "message", nil) + b.Append(createOp) + err = b.Commit(repo) + require.NoError(t, err) + + _, err = Push(repo, "remoteA") + require.NoError(t, err) + + _, err = Push(repo, "remoteB") + require.NoError(t, err) + + _, err = Fetch(repo, "remoteA") + require.NoError(t, err) + + _, err = Fetch(repo, "remoteB") + require.NoError(t, err) + + err = RemoveBug(repo, b.Id()) + require.NoError(t, err) + + _, err = ReadLocal(repo, b.Id()) + require.Error(t, ErrBugNotExist, err) + + _, err = ReadRemote(repo, "remoteA", b.Id()) + require.Error(t, ErrBugNotExist, err) + + _, err = ReadRemote(repo, "remoteB", b.Id()) + require.Error(t, ErrBugNotExist, err) + + ids, err := ListLocalIds(repo) + require.NoError(t, err) + require.Len(t, ids, 100) +} diff --git a/migration3/before/bug/clocks.go b/migration3/before/bug/clocks.go new file mode 100644 index 0000000..9fe33a0 --- /dev/null +++ b/migration3/before/bug/clocks.go @@ -0,0 +1,40 @@ +package bug + +import ( + "github.com/MichaelMure/git-bug-migration/migration3/before/identity" + "github.com/MichaelMure/git-bug-migration/migration3/before/repository" +) + +// ClockLoader is the repository.ClockLoader for the Bug entity +var ClockLoader = repository.ClockLoader{ + Clocks: []string{creationClockName, editClockName}, + Witnesser: func(repo repository.ClockedRepo) error { + // We don't care about the actual identity so an IdentityStub will do + resolver := identity.NewStubResolver() + for b := range ReadAllLocalWithResolver(repo, resolver) { + if b.Err != nil { + return b.Err + } + + createClock, err := repo.GetOrCreateClock(creationClockName) + if err != nil { + return err + } + err = createClock.Witness(b.Bug.createTime) + if err != nil { + return err + } + + editClock, err := repo.GetOrCreateClock(editClockName) + if err != nil { + return err + } + err = editClock.Witness(b.Bug.editTime) + if err != nil { + return err + } + } + + return nil + }, +} diff --git a/migration3/before/bug/comment.go b/migration3/before/bug/comment.go new file mode 100644 index 0000000..57ca853 --- /dev/null +++ b/migration3/before/bug/comment.go @@ -0,0 +1,44 @@ +package bug + +import ( + "github.com/dustin/go-humanize" + + "github.com/MichaelMure/git-bug-migration/migration3/before/entity" + "github.com/MichaelMure/git-bug-migration/migration3/before/identity" + "github.com/MichaelMure/git-bug-migration/migration3/before/repository" + "github.com/MichaelMure/git-bug-migration/migration3/before/util/timestamp" +) + +// Comment represent a comment in a Bug +type Comment struct { + id entity.Id + Author identity.Interface + Message string + Files []repository.Hash + + // Creation time of the comment. + // Should be used only for human display, never for ordering as we can't rely on it in a distributed system. + UnixTime timestamp.Timestamp +} + +// Id return the Comment identifier +func (c Comment) Id() entity.Id { + if c.id == "" { + // simply panic as it would be a coding error + // (using an id of an identity not stored yet) + panic("no id yet") + } + return c.id +} + +// FormatTimeRel format the UnixTime of the comment for human consumption +func (c Comment) FormatTimeRel() string { + return humanize.Time(c.UnixTime.Time()) +} + +func (c Comment) FormatTime() string { + return c.UnixTime.Time().Format("Mon Jan 2 15:04:05 2006 +0200") +} + +// Sign post method for gqlgen +func (c Comment) IsAuthored() {} diff --git a/migration3/before/bug/identity.go b/migration3/before/bug/identity.go new file mode 100644 index 0000000..ac839c9 --- /dev/null +++ b/migration3/before/bug/identity.go @@ -0,0 +1,27 @@ +package bug + +import ( + "github.com/MichaelMure/git-bug-migration/migration3/before/identity" +) + +// EnsureIdentities walk the graph of operations and make sure that all Identity +// are properly loaded. That is, it replace all the IdentityStub with the full +// Identity, loaded through a Resolver. +func (bug *Bug) EnsureIdentities(resolver identity.Resolver) error { + it := NewOperationIterator(bug) + + for it.Next() { + op := it.Value() + base := op.base() + + if stub, ok := base.Author.(*identity.IdentityStub); ok { + i, err := resolver.ResolveIdentity(stub.Id()) + if err != nil { + return err + } + + base.Author = i + } + } + return nil +} diff --git a/migration3/before/bug/interface.go b/migration3/before/bug/interface.go new file mode 100644 index 0000000..6032598 --- /dev/null +++ b/migration3/before/bug/interface.go @@ -0,0 +1,57 @@ +package bug + +import ( + "github.com/MichaelMure/git-bug-migration/migration3/before/entity" + "github.com/MichaelMure/git-bug-migration/migration3/before/repository" + "github.com/MichaelMure/git-bug-migration/migration3/before/util/lamport" +) + +type Interface interface { + // Id return the Bug identifier + Id() entity.Id + + // Validate check if the Bug data is valid + Validate() error + + // Append an operation into the staging area, to be committed later + Append(op Operation) + + // Indicate that the in-memory state changed and need to be commit in the repository + NeedCommit() bool + + // Commit write the staging area in Git and move the operations to the Packs + Commit(repo repository.ClockedRepo) error + + // Merge a different version of the same bug by rebasing operations of this bug + // that are not present in the other on top of the chain of operations of the + // other version. + Merge(repo repository.Repo, other Interface) (bool, error) + + // Lookup for the very first operation of the bug. + // For a valid Bug, this operation should be a CreateOp + FirstOp() Operation + + // Lookup for the very last operation of the bug. + // For a valid Bug, should never be nil + LastOp() Operation + + // Compile a bug in a easily usable snapshot + Compile() Snapshot + + // CreateLamportTime return the Lamport time of creation + CreateLamportTime() lamport.Time + + // EditLamportTime return the Lamport time of the last edit + EditLamportTime() lamport.Time +} + +func bugFromInterface(bug Interface) *Bug { + switch bug := bug.(type) { + case *Bug: + return bug + case *WithSnapshot: + return bug.Bug + default: + panic("missing type case") + } +} diff --git a/migration3/before/bug/label.go b/migration3/before/bug/label.go new file mode 100644 index 0000000..a621575 --- /dev/null +++ b/migration3/before/bug/label.go @@ -0,0 +1,100 @@ +package bug + +import ( + "crypto/sha256" + "fmt" + "image/color" + "strings" + + fcolor "github.com/fatih/color" + + "github.com/MichaelMure/git-bug-migration/migration3/before/util/text" +) + +type Label string + +func (l Label) String() string { + return string(l) +} + +// RGBA from a Label computed in a deterministic way +func (l Label) Color() LabelColor { + // colors from: https://material-ui.com/style/color/ + colors := []LabelColor{ + {R: 244, G: 67, B: 54, A: 255}, // red + {R: 233, G: 30, B: 99, A: 255}, // pink + {R: 156, G: 39, B: 176, A: 255}, // purple + {R: 103, G: 58, B: 183, A: 255}, // deepPurple + {R: 63, G: 81, B: 181, A: 255}, // indigo + {R: 33, G: 150, B: 243, A: 255}, // blue + {R: 3, G: 169, B: 244, A: 255}, // lightBlue + {R: 0, G: 188, B: 212, A: 255}, // cyan + {R: 0, G: 150, B: 136, A: 255}, // teal + {R: 76, G: 175, B: 80, A: 255}, // green + {R: 139, G: 195, B: 74, A: 255}, // lightGreen + {R: 205, G: 220, B: 57, A: 255}, // lime + {R: 255, G: 235, B: 59, A: 255}, // yellow + {R: 255, G: 193, B: 7, A: 255}, // amber + {R: 255, G: 152, B: 0, A: 255}, // orange + {R: 255, G: 87, B: 34, A: 255}, // deepOrange + {R: 121, G: 85, B: 72, A: 255}, // brown + {R: 158, G: 158, B: 158, A: 255}, // grey + {R: 96, G: 125, B: 139, A: 255}, // blueGrey + } + + id := 0 + hash := sha256.Sum256([]byte(l)) + for _, char := range hash { + id = (id + int(char)) % len(colors) + } + + return colors[id] +} + +func (l Label) Validate() error { + str := string(l) + + if text.Empty(str) { + return fmt.Errorf("empty") + } + + if strings.Contains(str, "\n") { + return fmt.Errorf("should be a single line") + } + + if !text.Safe(str) { + return fmt.Errorf("not fully printable") + } + + return nil +} + +type LabelColor color.RGBA + +func (lc LabelColor) RGBA() color.RGBA { + return color.RGBA(lc) +} + +func (lc LabelColor) Term256() Term256 { + red := Term256(lc.R) * 6 / 256 + green := Term256(lc.G) * 6 / 256 + blue := Term256(lc.B) * 6 / 256 + + return red*36 + green*6 + blue + 16 +} + +type Term256 int + +func (t Term256) Escape() string { + if fcolor.NoColor { + return "" + } + return fmt.Sprintf("\x1b[38;5;%dm", t) +} + +func (t Term256) Unescape() string { + if fcolor.NoColor { + return "" + } + return "\x1b[0m" +} diff --git a/migration3/before/bug/label_test.go b/migration3/before/bug/label_test.go new file mode 100644 index 0000000..49401c4 --- /dev/null +++ b/migration3/before/bug/label_test.go @@ -0,0 +1,35 @@ +package bug + +import ( + "testing" + + "github.com/stretchr/testify/require" +) + +func TestLabelRGBA(t *testing.T) { + rgba := Label("test1").Color() + expected := LabelColor{R: 0, G: 150, B: 136, A: 255} + + require.Equal(t, expected, rgba) +} + +func TestLabelRGBASimilar(t *testing.T) { + rgba := Label("test2").Color() + expected := LabelColor{R: 3, G: 169, B: 244, A: 255} + + require.Equal(t, expected, rgba) +} + +func TestLabelRGBAReverse(t *testing.T) { + rgba := Label("tset").Color() + expected := LabelColor{R: 63, G: 81, B: 181, A: 255} + + require.Equal(t, expected, rgba) +} + +func TestLabelRGBAEqual(t *testing.T) { + color1 := Label("test").Color() + color2 := Label("test").Color() + + require.Equal(t, color1, color2) +} diff --git a/migration3/before/bug/op_add_comment.go b/migration3/before/bug/op_add_comment.go new file mode 100644 index 0000000..1a8f16f --- /dev/null +++ b/migration3/before/bug/op_add_comment.go @@ -0,0 +1,132 @@ +package bug + +import ( + "encoding/json" + "fmt" + + "github.com/MichaelMure/git-bug-migration/migration3/before/entity" + "github.com/MichaelMure/git-bug-migration/migration3/before/identity" + "github.com/MichaelMure/git-bug-migration/migration3/before/repository" + "github.com/MichaelMure/git-bug-migration/migration3/before/util/text" + "github.com/MichaelMure/git-bug-migration/migration3/before/util/timestamp" +) + +var _ Operation = &AddCommentOperation{} + +// AddCommentOperation will add a new comment in the bug +type AddCommentOperation struct { + OpBase + Message string `json:"message"` + // TODO: change for a map[string]util.hash to store the filename ? + Files []repository.Hash `json:"files"` +} + +// Sign-post method for gqlgen +func (op *AddCommentOperation) IsOperation() {} + +func (op *AddCommentOperation) base() *OpBase { + return &op.OpBase +} + +func (op *AddCommentOperation) Id() entity.Id { + return idOperation(op) +} + +func (op *AddCommentOperation) Apply(snapshot *Snapshot) { + snapshot.addActor(op.Author) + snapshot.addParticipant(op.Author) + + comment := Comment{ + id: op.Id(), + Message: op.Message, + Author: op.Author, + Files: op.Files, + UnixTime: timestamp.Timestamp(op.UnixTime), + } + + snapshot.Comments = append(snapshot.Comments, comment) + + item := &AddCommentTimelineItem{ + CommentTimelineItem: NewCommentTimelineItem(op.Id(), comment), + } + + snapshot.Timeline = append(snapshot.Timeline, item) +} + +func (op *AddCommentOperation) GetFiles() []repository.Hash { + return op.Files +} + +func (op *AddCommentOperation) Validate() error { + if err := opBaseValidate(op, AddCommentOp); err != nil { + return err + } + + if !text.Safe(op.Message) { + return fmt.Errorf("message is not fully printable") + } + + return nil +} + +// UnmarshalJSON is a two step JSON unmarshalling +// This workaround is necessary to avoid the inner OpBase.MarshalJSON +// overriding the outer op's MarshalJSON +func (op *AddCommentOperation) UnmarshalJSON(data []byte) error { + // Unmarshal OpBase and the op separately + + base := OpBase{} + err := json.Unmarshal(data, &base) + if err != nil { + return err + } + + aux := struct { + Message string `json:"message"` + Files []repository.Hash `json:"files"` + }{} + + err = json.Unmarshal(data, &aux) + if err != nil { + return err + } + + op.OpBase = base + op.Message = aux.Message + op.Files = aux.Files + + return nil +} + +// Sign post method for gqlgen +func (op *AddCommentOperation) IsAuthored() {} + +func NewAddCommentOp(author identity.Interface, unixTime int64, message string, files []repository.Hash) *AddCommentOperation { + return &AddCommentOperation{ + OpBase: newOpBase(AddCommentOp, author, unixTime), + Message: message, + Files: files, + } +} + +// CreateTimelineItem replace a AddComment operation in the Timeline and hold its edition history +type AddCommentTimelineItem struct { + CommentTimelineItem +} + +// Sign post method for gqlgen +func (a *AddCommentTimelineItem) IsAuthored() {} + +// Convenience function to apply the operation +func AddComment(b Interface, author identity.Interface, unixTime int64, message string) (*AddCommentOperation, error) { + return AddCommentWithFiles(b, author, unixTime, message, nil) +} + +func AddCommentWithFiles(b Interface, author identity.Interface, unixTime int64, message string, files []repository.Hash) (*AddCommentOperation, error) { + addCommentOp := NewAddCommentOp(author, unixTime, message, files) + if err := addCommentOp.Validate(); err != nil { + return nil, err + } + b.Append(addCommentOp) + return addCommentOp, nil +} diff --git a/migration3/before/bug/op_add_comment_test.go b/migration3/before/bug/op_add_comment_test.go new file mode 100644 index 0000000..1743417 --- /dev/null +++ b/migration3/before/bug/op_add_comment_test.go @@ -0,0 +1,39 @@ +package bug + +import ( + "encoding/json" + "testing" + "time" + + "github.com/stretchr/testify/assert" + "github.com/stretchr/testify/require" + + "github.com/MichaelMure/git-bug-migration/migration3/before/identity" + "github.com/MichaelMure/git-bug-migration/migration3/before/repository" +) + +func TestAddCommentSerialize(t *testing.T) { + repo := repository.NewMockRepoForTest() + rene := identity.NewIdentity("René Descartes", "rene@descartes.fr") + err := rene.Commit(repo) + require.NoError(t, err) + + unix := time.Now().Unix() + before := NewAddCommentOp(rene, unix, "message", nil) + + data, err := json.Marshal(before) + assert.NoError(t, err) + + var after AddCommentOperation + err = json.Unmarshal(data, &after) + assert.NoError(t, err) + + // enforce creating the ID + before.Id() + + // Replace the identity stub with the real thing + assert.Equal(t, rene.Id(), after.base().Author.Id()) + after.Author = rene + + assert.Equal(t, before, &after) +} diff --git a/migration3/before/bug/op_create.go b/migration3/before/bug/op_create.go new file mode 100644 index 0000000..c638a2c --- /dev/null +++ b/migration3/before/bug/op_create.go @@ -0,0 +1,155 @@ +package bug + +import ( + "encoding/json" + "fmt" + "strings" + + "github.com/MichaelMure/git-bug-migration/migration3/before/entity" + "github.com/MichaelMure/git-bug-migration/migration3/before/identity" + "github.com/MichaelMure/git-bug-migration/migration3/before/repository" + "github.com/MichaelMure/git-bug-migration/migration3/before/util/text" + "github.com/MichaelMure/git-bug-migration/migration3/before/util/timestamp" +) + +var _ Operation = &CreateOperation{} + +// CreateOperation define the initial creation of a bug +type CreateOperation struct { + OpBase + Title string `json:"title"` + Message string `json:"message"` + Files []repository.Hash `json:"files"` +} + +// Sign-post method for gqlgen +func (op *CreateOperation) IsOperation() {} + +func (op *CreateOperation) base() *OpBase { + return &op.OpBase +} + +func (op *CreateOperation) Id() entity.Id { + return idOperation(op) +} + +func (op *CreateOperation) Apply(snapshot *Snapshot) { + snapshot.addActor(op.Author) + snapshot.addParticipant(op.Author) + + snapshot.Title = op.Title + + comment := Comment{ + id: op.Id(), + Message: op.Message, + Author: op.Author, + UnixTime: timestamp.Timestamp(op.UnixTime), + } + + snapshot.Comments = []Comment{comment} + snapshot.Author = op.Author + snapshot.CreateTime = op.Time() + + snapshot.Timeline = []TimelineItem{ + &CreateTimelineItem{ + CommentTimelineItem: NewCommentTimelineItem(op.Id(), comment), + }, + } +} + +func (op *CreateOperation) GetFiles() []repository.Hash { + return op.Files +} + +func (op *CreateOperation) Validate() error { + if err := opBaseValidate(op, CreateOp); err != nil { + return err + } + + if text.Empty(op.Title) { + return fmt.Errorf("title is empty") + } + + if strings.Contains(op.Title, "\n") { + return fmt.Errorf("title should be a single line") + } + + if !text.Safe(op.Title) { + return fmt.Errorf("title is not fully printable") + } + + if !text.Safe(op.Message) { + return fmt.Errorf("message is not fully printable") + } + + return nil +} + +// UnmarshalJSON is a two step JSON unmarshaling +// This workaround is necessary to avoid the inner OpBase.MarshalJSON +// overriding the outer op's MarshalJSON +func (op *CreateOperation) UnmarshalJSON(data []byte) error { + // Unmarshal OpBase and the op separately + + base := OpBase{} + err := json.Unmarshal(data, &base) + if err != nil { + return err + } + + aux := struct { + Title string `json:"title"` + Message string `json:"message"` + Files []repository.Hash `json:"files"` + }{} + + err = json.Unmarshal(data, &aux) + if err != nil { + return err + } + + op.OpBase = base + op.Title = aux.Title + op.Message = aux.Message + op.Files = aux.Files + + return nil +} + +// Sign post method for gqlgen +func (op *CreateOperation) IsAuthored() {} + +func NewCreateOp(author identity.Interface, unixTime int64, title, message string, files []repository.Hash) *CreateOperation { + return &CreateOperation{ + OpBase: newOpBase(CreateOp, author, unixTime), + Title: title, + Message: message, + Files: files, + } +} + +// CreateTimelineItem replace a Create operation in the Timeline and hold its edition history +type CreateTimelineItem struct { + CommentTimelineItem +} + +// Sign post method for gqlgen +func (c *CreateTimelineItem) IsAuthored() {} + +// Convenience function to apply the operation +func Create(author identity.Interface, unixTime int64, title, message string) (*Bug, *CreateOperation, error) { + return CreateWithFiles(author, unixTime, title, message, nil) +} + +func CreateWithFiles(author identity.Interface, unixTime int64, title, message string, files []repository.Hash) (*Bug, *CreateOperation, error) { + newBug := NewBug() + createOp := NewCreateOp(author, unixTime, title, message, files) + + if err := createOp.Validate(); err != nil { + return nil, createOp, err + } + + newBug.Append(createOp) + + return newBug, createOp, nil +} diff --git a/migration3/before/bug/op_create_test.go b/migration3/before/bug/op_create_test.go new file mode 100644 index 0000000..7ba78d7 --- /dev/null +++ b/migration3/before/bug/op_create_test.go @@ -0,0 +1,78 @@ +package bug + +import ( + "encoding/json" + "testing" + "time" + + "github.com/MichaelMure/git-bug-migration/migration3/before/identity" + "github.com/MichaelMure/git-bug-migration/migration3/before/repository" + "github.com/MichaelMure/git-bug-migration/migration3/before/util/timestamp" + "github.com/stretchr/testify/assert" + "github.com/stretchr/testify/require" +) + +func TestCreate(t *testing.T) { + snapshot := Snapshot{} + + rene := identity.NewIdentity("René Descartes", "rene@descartes.fr") + unix := time.Now().Unix() + + create := NewCreateOp(rene, unix, "title", "message", nil) + + create.Apply(&snapshot) + + id := create.Id() + assert.NoError(t, id.Validate()) + + comment := Comment{ + id: id, + Author: rene, + Message: "message", + UnixTime: timestamp.Timestamp(create.UnixTime), + } + + expected := Snapshot{ + Title: "title", + Comments: []Comment{ + comment, + }, + Author: rene, + Participants: []identity.Interface{rene}, + Actors: []identity.Interface{rene}, + CreateTime: create.Time(), + Timeline: []TimelineItem{ + &CreateTimelineItem{ + CommentTimelineItem: NewCommentTimelineItem(id, comment), + }, + }, + } + + assert.Equal(t, expected, snapshot) +} + +func TestCreateSerialize(t *testing.T) { + repo := repository.NewMockRepoForTest() + rene := identity.NewIdentity("René Descartes", "rene@descartes.fr") + err := rene.Commit(repo) + require.NoError(t, err) + + unix := time.Now().Unix() + before := NewCreateOp(rene, unix, "title", "message", nil) + + data, err := json.Marshal(before) + assert.NoError(t, err) + + var after CreateOperation + err = json.Unmarshal(data, &after) + assert.NoError(t, err) + + // enforce creating the ID + before.Id() + + // Replace the identity stub with the real thing + assert.Equal(t, rene.Id(), after.base().Author.Id()) + after.Author = rene + + assert.Equal(t, before, &after) +} diff --git a/migration3/before/bug/op_edit_comment.go b/migration3/before/bug/op_edit_comment.go new file mode 100644 index 0000000..3021853 --- /dev/null +++ b/migration3/before/bug/op_edit_comment.go @@ -0,0 +1,170 @@ +package bug + +import ( + "encoding/json" + "fmt" + + "github.com/pkg/errors" + + "github.com/MichaelMure/git-bug-migration/migration3/before/entity" + "github.com/MichaelMure/git-bug-migration/migration3/before/identity" + "github.com/MichaelMure/git-bug-migration/migration3/before/repository" + "github.com/MichaelMure/git-bug-migration/migration3/before/util/timestamp" + + "github.com/MichaelMure/git-bug-migration/migration3/before/util/text" +) + +var _ Operation = &EditCommentOperation{} + +// EditCommentOperation will change a comment in the bug +type EditCommentOperation struct { + OpBase + Target entity.Id `json:"target"` + Message string `json:"message"` + Files []repository.Hash `json:"files"` +} + +// Sign-post method for gqlgen +func (op *EditCommentOperation) IsOperation() {} + +func (op *EditCommentOperation) base() *OpBase { + return &op.OpBase +} + +func (op *EditCommentOperation) Id() entity.Id { + return idOperation(op) +} + +func (op *EditCommentOperation) Apply(snapshot *Snapshot) { + // Todo: currently any message can be edited, even by a different author + // crypto signature are needed. + + snapshot.addActor(op.Author) + + var target TimelineItem + + for i, item := range snapshot.Timeline { + if item.Id() == op.Target { + target = snapshot.Timeline[i] + break + } + } + + if target == nil { + // Target not found, edit is a no-op + return + } + + comment := Comment{ + id: op.Target, + Message: op.Message, + Files: op.Files, + UnixTime: timestamp.Timestamp(op.UnixTime), + } + + switch target := target.(type) { + case *CreateTimelineItem: + target.Append(comment) + case *AddCommentTimelineItem: + target.Append(comment) + } + + // Updating the corresponding comment + + for i := range snapshot.Comments { + if snapshot.Comments[i].Id() == op.Target { + snapshot.Comments[i].Message = op.Message + snapshot.Comments[i].Files = op.Files + break + } + } +} + +func (op *EditCommentOperation) GetFiles() []repository.Hash { + return op.Files +} + +func (op *EditCommentOperation) Validate() error { + if err := opBaseValidate(op, EditCommentOp); err != nil { + return err + } + + if err := op.Target.Validate(); err != nil { + return errors.Wrap(err, "target hash is invalid") + } + + if !text.Safe(op.Message) { + return fmt.Errorf("message is not fully printable") + } + + return nil +} + +// UnmarshalJSON is a two step JSON unmarshaling +// This workaround is necessary to avoid the inner OpBase.MarshalJSON +// overriding the outer op's MarshalJSON +func (op *EditCommentOperation) UnmarshalJSON(data []byte) error { + // Unmarshal OpBase and the op separately + + base := OpBase{} + err := json.Unmarshal(data, &base) + if err != nil { + return err + } + + aux := struct { + Target entity.Id `json:"target"` + Message string `json:"message"` + Files []repository.Hash `json:"files"` + }{} + + err = json.Unmarshal(data, &aux) + if err != nil { + return err + } + + op.OpBase = base + op.Target = aux.Target + op.Message = aux.Message + op.Files = aux.Files + + return nil +} + +// Sign post method for gqlgen +func (op *EditCommentOperation) IsAuthored() {} + +func NewEditCommentOp(author identity.Interface, unixTime int64, target entity.Id, message string, files []repository.Hash) *EditCommentOperation { + return &EditCommentOperation{ + OpBase: newOpBase(EditCommentOp, author, unixTime), + Target: target, + Message: message, + Files: files, + } +} + +// Convenience function to apply the operation +func EditComment(b Interface, author identity.Interface, unixTime int64, target entity.Id, message string) (*EditCommentOperation, error) { + return EditCommentWithFiles(b, author, unixTime, target, message, nil) +} + +func EditCommentWithFiles(b Interface, author identity.Interface, unixTime int64, target entity.Id, message string, files []repository.Hash) (*EditCommentOperation, error) { + editCommentOp := NewEditCommentOp(author, unixTime, target, message, files) + if err := editCommentOp.Validate(); err != nil { + return nil, err + } + b.Append(editCommentOp) + return editCommentOp, nil +} + +// Convenience function to edit the body of a bug (the first comment) +func EditCreateComment(b Interface, author identity.Interface, unixTime int64, message string) (*EditCommentOperation, error) { + createOp := b.FirstOp().(*CreateOperation) + return EditComment(b, author, unixTime, createOp.Id(), message) +} + +// Convenience function to edit the body of a bug (the first comment) +func EditCreateCommentWithFiles(b Interface, author identity.Interface, unixTime int64, message string, files []repository.Hash) (*EditCommentOperation, error) { + createOp := b.FirstOp().(*CreateOperation) + return EditCommentWithFiles(b, author, unixTime, createOp.Id(), message, files) +} diff --git a/migration3/before/bug/op_edit_comment_test.go b/migration3/before/bug/op_edit_comment_test.go new file mode 100644 index 0000000..754590f --- /dev/null +++ b/migration3/before/bug/op_edit_comment_test.go @@ -0,0 +1,105 @@ +package bug + +import ( + "encoding/json" + "testing" + "time" + + "github.com/stretchr/testify/assert" + "github.com/stretchr/testify/require" + + "github.com/MichaelMure/git-bug-migration/migration3/before/identity" + "github.com/MichaelMure/git-bug-migration/migration3/before/repository" +) + +func TestEdit(t *testing.T) { + snapshot := Snapshot{} + + repo := repository.NewMockRepoForTest() + rene := identity.NewIdentity("René Descartes", "rene@descartes.fr") + err := rene.Commit(repo) + require.NoError(t, err) + + unix := time.Now().Unix() + + create := NewCreateOp(rene, unix, "title", "create", nil) + create.Apply(&snapshot) + + id1 := create.Id() + require.NoError(t, id1.Validate()) + + comment1 := NewAddCommentOp(rene, unix, "comment 1", nil) + comment1.Apply(&snapshot) + + id2 := comment1.Id() + require.NoError(t, id2.Validate()) + + // add another unrelated op in between + setTitle := NewSetTitleOp(rene, unix, "edited title", "title") + setTitle.Apply(&snapshot) + + comment2 := NewAddCommentOp(rene, unix, "comment 2", nil) + comment2.Apply(&snapshot) + + id3 := comment2.Id() + require.NoError(t, id3.Validate()) + + edit := NewEditCommentOp(rene, unix, id1, "create edited", nil) + edit.Apply(&snapshot) + + assert.Equal(t, len(snapshot.Timeline), 4) + assert.Equal(t, len(snapshot.Timeline[0].(*CreateTimelineItem).History), 2) + assert.Equal(t, len(snapshot.Timeline[1].(*AddCommentTimelineItem).History), 1) + assert.Equal(t, len(snapshot.Timeline[3].(*AddCommentTimelineItem).History), 1) + assert.Equal(t, snapshot.Comments[0].Message, "create edited") + assert.Equal(t, snapshot.Comments[1].Message, "comment 1") + assert.Equal(t, snapshot.Comments[2].Message, "comment 2") + + edit2 := NewEditCommentOp(rene, unix, id2, "comment 1 edited", nil) + edit2.Apply(&snapshot) + + assert.Equal(t, len(snapshot.Timeline), 4) + assert.Equal(t, len(snapshot.Timeline[0].(*CreateTimelineItem).History), 2) + assert.Equal(t, len(snapshot.Timeline[1].(*AddCommentTimelineItem).History), 2) + assert.Equal(t, len(snapshot.Timeline[3].(*AddCommentTimelineItem).History), 1) + assert.Equal(t, snapshot.Comments[0].Message, "create edited") + assert.Equal(t, snapshot.Comments[1].Message, "comment 1 edited") + assert.Equal(t, snapshot.Comments[2].Message, "comment 2") + + edit3 := NewEditCommentOp(rene, unix, id3, "comment 2 edited", nil) + edit3.Apply(&snapshot) + + assert.Equal(t, len(snapshot.Timeline), 4) + assert.Equal(t, len(snapshot.Timeline[0].(*CreateTimelineItem).History), 2) + assert.Equal(t, len(snapshot.Timeline[1].(*AddCommentTimelineItem).History), 2) + assert.Equal(t, len(snapshot.Timeline[3].(*AddCommentTimelineItem).History), 2) + assert.Equal(t, snapshot.Comments[0].Message, "create edited") + assert.Equal(t, snapshot.Comments[1].Message, "comment 1 edited") + assert.Equal(t, snapshot.Comments[2].Message, "comment 2 edited") +} + +func TestEditCommentSerialize(t *testing.T) { + repo := repository.NewMockRepoForTest() + rene := identity.NewIdentity("René Descartes", "rene@descartes.fr") + err := rene.Commit(repo) + require.NoError(t, err) + + unix := time.Now().Unix() + before := NewEditCommentOp(rene, unix, "target", "message", nil) + + data, err := json.Marshal(before) + assert.NoError(t, err) + + var after EditCommentOperation + err = json.Unmarshal(data, &after) + assert.NoError(t, err) + + // enforce creating the ID + before.Id() + + // Replace the identity stub with the real thing + assert.Equal(t, rene.Id(), after.base().Author.Id()) + after.Author = rene + + assert.Equal(t, before, &after) +} diff --git a/migration3/before/bug/op_label_change.go b/migration3/before/bug/op_label_change.go new file mode 100644 index 0000000..4093836 --- /dev/null +++ b/migration3/before/bug/op_label_change.go @@ -0,0 +1,285 @@ +package bug + +import ( + "encoding/json" + "fmt" + "sort" + + "github.com/pkg/errors" + + "github.com/MichaelMure/git-bug-migration/migration3/before/entity" + "github.com/MichaelMure/git-bug-migration/migration3/before/identity" + "github.com/MichaelMure/git-bug-migration/migration3/before/util/timestamp" +) + +var _ Operation = &LabelChangeOperation{} + +// LabelChangeOperation define a Bug operation to add or remove labels +type LabelChangeOperation struct { + OpBase + Added []Label `json:"added"` + Removed []Label `json:"removed"` +} + +// Sign-post method for gqlgen +func (op *LabelChangeOperation) IsOperation() {} + +func (op *LabelChangeOperation) base() *OpBase { + return &op.OpBase +} + +func (op *LabelChangeOperation) Id() entity.Id { + return idOperation(op) +} + +// Apply apply the operation +func (op *LabelChangeOperation) Apply(snapshot *Snapshot) { + snapshot.addActor(op.Author) + + // Add in the set +AddLoop: + for _, added := range op.Added { + for _, label := range snapshot.Labels { + if label == added { + // Already exist + continue AddLoop + } + } + + snapshot.Labels = append(snapshot.Labels, added) + } + + // Remove in the set + for _, removed := range op.Removed { + for i, label := range snapshot.Labels { + if label == removed { + snapshot.Labels[i] = snapshot.Labels[len(snapshot.Labels)-1] + snapshot.Labels = snapshot.Labels[:len(snapshot.Labels)-1] + } + } + } + + // Sort + sort.Slice(snapshot.Labels, func(i, j int) bool { + return string(snapshot.Labels[i]) < string(snapshot.Labels[j]) + }) + + item := &LabelChangeTimelineItem{ + id: op.Id(), + Author: op.Author, + UnixTime: timestamp.Timestamp(op.UnixTime), + Added: op.Added, + Removed: op.Removed, + } + + snapshot.Timeline = append(snapshot.Timeline, item) +} + +func (op *LabelChangeOperation) Validate() error { + if err := opBaseValidate(op, LabelChangeOp); err != nil { + return err + } + + for _, l := range op.Added { + if err := l.Validate(); err != nil { + return errors.Wrap(err, "added label") + } + } + + for _, l := range op.Removed { + if err := l.Validate(); err != nil { + return errors.Wrap(err, "removed label") + } + } + + if len(op.Added)+len(op.Removed) <= 0 { + return fmt.Errorf("no label change") + } + + return nil +} + +// UnmarshalJSON is a two step JSON unmarshaling +// This workaround is necessary to avoid the inner OpBase.MarshalJSON +// overriding the outer op's MarshalJSON +func (op *LabelChangeOperation) UnmarshalJSON(data []byte) error { + // Unmarshal OpBase and the op separately + + base := OpBase{} + err := json.Unmarshal(data, &base) + if err != nil { + return err + } + + aux := struct { + Added []Label `json:"added"` + Removed []Label `json:"removed"` + }{} + + err = json.Unmarshal(data, &aux) + if err != nil { + return err + } + + op.OpBase = base + op.Added = aux.Added + op.Removed = aux.Removed + + return nil +} + +// Sign post method for gqlgen +func (op *LabelChangeOperation) IsAuthored() {} + +func NewLabelChangeOperation(author identity.Interface, unixTime int64, added, removed []Label) *LabelChangeOperation { + return &LabelChangeOperation{ + OpBase: newOpBase(LabelChangeOp, author, unixTime), + Added: added, + Removed: removed, + } +} + +type LabelChangeTimelineItem struct { + id entity.Id + Author identity.Interface + UnixTime timestamp.Timestamp + Added []Label + Removed []Label +} + +func (l LabelChangeTimelineItem) Id() entity.Id { + return l.id +} + +// Sign post method for gqlgen +func (l *LabelChangeTimelineItem) IsAuthored() {} + +// ChangeLabels is a convenience function to apply the operation +func ChangeLabels(b Interface, author identity.Interface, unixTime int64, add, remove []string) ([]LabelChangeResult, *LabelChangeOperation, error) { + var added, removed []Label + var results []LabelChangeResult + + snap := b.Compile() + + for _, str := range add { + label := Label(str) + + // check for duplicate + if labelExist(added, label) { + results = append(results, LabelChangeResult{Label: label, Status: LabelChangeDuplicateInOp}) + continue + } + + // check that the label doesn't already exist + if labelExist(snap.Labels, label) { + results = append(results, LabelChangeResult{Label: label, Status: LabelChangeAlreadySet}) + continue + } + + added = append(added, label) + results = append(results, LabelChangeResult{Label: label, Status: LabelChangeAdded}) + } + + for _, str := range remove { + label := Label(str) + + // check for duplicate + if labelExist(removed, label) { + results = append(results, LabelChangeResult{Label: label, Status: LabelChangeDuplicateInOp}) + continue + } + + // check that the label actually exist + if !labelExist(snap.Labels, label) { + results = append(results, LabelChangeResult{Label: label, Status: LabelChangeDoesntExist}) + continue + } + + removed = append(removed, label) + results = append(results, LabelChangeResult{Label: label, Status: LabelChangeRemoved}) + } + + if len(added) == 0 && len(removed) == 0 { + return results, nil, fmt.Errorf("no label added or removed") + } + + labelOp := NewLabelChangeOperation(author, unixTime, added, removed) + + if err := labelOp.Validate(); err != nil { + return nil, nil, err + } + + b.Append(labelOp) + + return results, labelOp, nil +} + +// ForceChangeLabels is a convenience function to apply the operation +// The difference with ChangeLabels is that no checks of deduplications are done. You are entirely +// responsible of what you are doing. In the general case, you want to use ChangeLabels instead. +// The intended use of this function is to allow importers to create legal but unexpected label changes, +// like removing a label with no information of when it was added before. +func ForceChangeLabels(b Interface, author identity.Interface, unixTime int64, add, remove []string) (*LabelChangeOperation, error) { + added := make([]Label, len(add)) + for i, str := range add { + added[i] = Label(str) + } + + removed := make([]Label, len(remove)) + for i, str := range remove { + removed[i] = Label(str) + } + + labelOp := NewLabelChangeOperation(author, unixTime, added, removed) + + if err := labelOp.Validate(); err != nil { + return nil, err + } + + b.Append(labelOp) + + return labelOp, nil +} + +func labelExist(labels []Label, label Label) bool { + for _, l := range labels { + if l == label { + return true + } + } + + return false +} + +type LabelChangeStatus int + +const ( + _ LabelChangeStatus = iota + LabelChangeAdded + LabelChangeRemoved + LabelChangeDuplicateInOp + LabelChangeAlreadySet + LabelChangeDoesntExist +) + +type LabelChangeResult struct { + Label Label + Status LabelChangeStatus +} + +func (l LabelChangeResult) String() string { + switch l.Status { + case LabelChangeAdded: + return fmt.Sprintf("label %s added", l.Label) + case LabelChangeRemoved: + return fmt.Sprintf("label %s removed", l.Label) + case LabelChangeDuplicateInOp: + return fmt.Sprintf("label %s is a duplicate", l.Label) + case LabelChangeAlreadySet: + return fmt.Sprintf("label %s was already set", l.Label) + case LabelChangeDoesntExist: + return fmt.Sprintf("label %s doesn't exist on this bug", l.Label) + default: + panic(fmt.Sprintf("unknown label change status %v", l.Status)) + } +} diff --git a/migration3/before/bug/op_label_change_test.go b/migration3/before/bug/op_label_change_test.go new file mode 100644 index 0000000..0e4ef06 --- /dev/null +++ b/migration3/before/bug/op_label_change_test.go @@ -0,0 +1,40 @@ +package bug + +import ( + "encoding/json" + "testing" + "time" + + "github.com/stretchr/testify/require" + + "github.com/MichaelMure/git-bug-migration/migration3/before/identity" + "github.com/MichaelMure/git-bug-migration/migration3/before/repository" + + "github.com/stretchr/testify/assert" +) + +func TestLabelChangeSerialize(t *testing.T) { + repo := repository.NewMockRepoForTest() + rene := identity.NewIdentity("René Descartes", "rene@descartes.fr") + err := rene.Commit(repo) + require.NoError(t, err) + + unix := time.Now().Unix() + before := NewLabelChangeOperation(rene, unix, []Label{"added"}, []Label{"removed"}) + + data, err := json.Marshal(before) + assert.NoError(t, err) + + var after LabelChangeOperation + err = json.Unmarshal(data, &after) + assert.NoError(t, err) + + // enforce creating the ID + before.Id() + + // Replace the identity stub with the real thing + assert.Equal(t, rene.Id(), after.base().Author.Id()) + after.Author = rene + + assert.Equal(t, before, &after) +} diff --git a/migration3/before/bug/op_noop.go b/migration3/before/bug/op_noop.go new file mode 100644 index 0000000..65097a8 --- /dev/null +++ b/migration3/before/bug/op_noop.go @@ -0,0 +1,84 @@ +package bug + +import ( + "encoding/json" + + "github.com/MichaelMure/git-bug-migration/migration3/before/entity" + "github.com/MichaelMure/git-bug-migration/migration3/before/identity" +) + +var _ Operation = &NoOpOperation{} + +// NoOpOperation is an operation that does not change the bug state. It can +// however be used to store arbitrary metadata in the bug history, for example +// to support a bridge feature. +type NoOpOperation struct { + OpBase +} + +// Sign-post method for gqlgen +func (op *NoOpOperation) IsOperation() {} + +func (op *NoOpOperation) base() *OpBase { + return &op.OpBase +} + +func (op *NoOpOperation) Id() entity.Id { + return idOperation(op) +} + +func (op *NoOpOperation) Apply(snapshot *Snapshot) { + // Nothing to do +} + +func (op *NoOpOperation) Validate() error { + return opBaseValidate(op, NoOpOp) +} + +// UnmarshalJSON is a two step JSON unmarshaling +// This workaround is necessary to avoid the inner OpBase.MarshalJSON +// overriding the outer op's MarshalJSON +func (op *NoOpOperation) UnmarshalJSON(data []byte) error { + // Unmarshal OpBase and the op separately + + base := OpBase{} + err := json.Unmarshal(data, &base) + if err != nil { + return err + } + + aux := struct{}{} + + err = json.Unmarshal(data, &aux) + if err != nil { + return err + } + + op.OpBase = base + + return nil +} + +// Sign post method for gqlgen +func (op *NoOpOperation) IsAuthored() {} + +func NewNoOpOp(author identity.Interface, unixTime int64) *NoOpOperation { + return &NoOpOperation{ + OpBase: newOpBase(NoOpOp, author, unixTime), + } +} + +// Convenience function to apply the operation +func NoOp(b Interface, author identity.Interface, unixTime int64, metadata map[string]string) (*NoOpOperation, error) { + op := NewNoOpOp(author, unixTime) + + for key, value := range metadata { + op.SetMetadata(key, value) + } + + if err := op.Validate(); err != nil { + return nil, err + } + b.Append(op) + return op, nil +} diff --git a/migration3/before/bug/op_noop_test.go b/migration3/before/bug/op_noop_test.go new file mode 100644 index 0000000..3cbdee4 --- /dev/null +++ b/migration3/before/bug/op_noop_test.go @@ -0,0 +1,40 @@ +package bug + +import ( + "encoding/json" + "testing" + "time" + + "github.com/stretchr/testify/require" + + "github.com/MichaelMure/git-bug-migration/migration3/before/identity" + "github.com/MichaelMure/git-bug-migration/migration3/before/repository" + + "github.com/stretchr/testify/assert" +) + +func TestNoopSerialize(t *testing.T) { + repo := repository.NewMockRepoForTest() + rene := identity.NewIdentity("René Descartes", "rene@descartes.fr") + err := rene.Commit(repo) + require.NoError(t, err) + + unix := time.Now().Unix() + before := NewNoOpOp(rene, unix) + + data, err := json.Marshal(before) + assert.NoError(t, err) + + var after NoOpOperation + err = json.Unmarshal(data, &after) + assert.NoError(t, err) + + // enforce creating the ID + before.Id() + + // Replace the identity stub with the real thing + assert.Equal(t, rene.Id(), after.base().Author.Id()) + after.Author = rene + + assert.Equal(t, before, &after) +} diff --git a/migration3/before/bug/op_set_metadata.go b/migration3/before/bug/op_set_metadata.go new file mode 100644 index 0000000..b6f19bb --- /dev/null +++ b/migration3/before/bug/op_set_metadata.go @@ -0,0 +1,113 @@ +package bug + +import ( + "encoding/json" + + "github.com/pkg/errors" + + "github.com/MichaelMure/git-bug-migration/migration3/before/entity" + "github.com/MichaelMure/git-bug-migration/migration3/before/identity" +) + +var _ Operation = &SetMetadataOperation{} + +type SetMetadataOperation struct { + OpBase + Target entity.Id `json:"target"` + NewMetadata map[string]string `json:"new_metadata"` +} + +// Sign-post method for gqlgen +func (op *SetMetadataOperation) IsOperation() {} + +func (op *SetMetadataOperation) base() *OpBase { + return &op.OpBase +} + +func (op *SetMetadataOperation) Id() entity.Id { + return idOperation(op) +} + +func (op *SetMetadataOperation) Apply(snapshot *Snapshot) { + for _, target := range snapshot.Operations { + if target.Id() == op.Target { + base := target.base() + + if base.extraMetadata == nil { + base.extraMetadata = make(map[string]string) + } + + // Apply the metadata in an immutable way: if a metadata already + // exist, it's not possible to override it. + for key, val := range op.NewMetadata { + if _, exist := base.extraMetadata[key]; !exist { + base.extraMetadata[key] = val + } + } + + return + } + } +} + +func (op *SetMetadataOperation) Validate() error { + if err := opBaseValidate(op, SetMetadataOp); err != nil { + return err + } + + if err := op.Target.Validate(); err != nil { + return errors.Wrap(err, "target invalid") + } + + return nil +} + +// UnmarshalJSON is a two step JSON unmarshaling +// This workaround is necessary to avoid the inner OpBase.MarshalJSON +// overriding the outer op's MarshalJSON +func (op *SetMetadataOperation) UnmarshalJSON(data []byte) error { + // Unmarshal OpBase and the op separately + + base := OpBase{} + err := json.Unmarshal(data, &base) + if err != nil { + return err + } + + aux := struct { + Target entity.Id `json:"target"` + NewMetadata map[string]string `json:"new_metadata"` + }{} + + err = json.Unmarshal(data, &aux) + if err != nil { + return err + } + + op.OpBase = base + op.Target = aux.Target + op.NewMetadata = aux.NewMetadata + + return nil +} + +// Sign post method for gqlgen +func (op *SetMetadataOperation) IsAuthored() {} + +func NewSetMetadataOp(author identity.Interface, unixTime int64, target entity.Id, newMetadata map[string]string) *SetMetadataOperation { + return &SetMetadataOperation{ + OpBase: newOpBase(SetMetadataOp, author, unixTime), + Target: target, + NewMetadata: newMetadata, + } +} + +// Convenience function to apply the operation +func SetMetadata(b Interface, author identity.Interface, unixTime int64, target entity.Id, newMetadata map[string]string) (*SetMetadataOperation, error) { + SetMetadataOp := NewSetMetadataOp(author, unixTime, target, newMetadata) + if err := SetMetadataOp.Validate(); err != nil { + return nil, err + } + b.Append(SetMetadataOp) + return SetMetadataOp, nil +} diff --git a/migration3/before/bug/op_set_metadata_test.go b/migration3/before/bug/op_set_metadata_test.go new file mode 100644 index 0000000..d099f9e --- /dev/null +++ b/migration3/before/bug/op_set_metadata_test.go @@ -0,0 +1,128 @@ +package bug + +import ( + "encoding/json" + "testing" + "time" + + "github.com/MichaelMure/git-bug-migration/migration3/before/identity" + "github.com/MichaelMure/git-bug-migration/migration3/before/repository" + + "github.com/stretchr/testify/assert" + "github.com/stretchr/testify/require" +) + +func TestSetMetadata(t *testing.T) { + snapshot := Snapshot{} + + repo := repository.NewMockRepoForTest() + rene := identity.NewIdentity("René Descartes", "rene@descartes.fr") + err := rene.Commit(repo) + require.NoError(t, err) + + unix := time.Now().Unix() + + create := NewCreateOp(rene, unix, "title", "create", nil) + create.SetMetadata("key", "value") + create.Apply(&snapshot) + snapshot.Operations = append(snapshot.Operations, create) + + id1 := create.Id() + require.NoError(t, id1.Validate()) + + comment := NewAddCommentOp(rene, unix, "comment", nil) + comment.SetMetadata("key2", "value2") + comment.Apply(&snapshot) + snapshot.Operations = append(snapshot.Operations, comment) + + id2 := comment.Id() + require.NoError(t, id2.Validate()) + + op1 := NewSetMetadataOp(rene, unix, id1, map[string]string{ + "key": "override", + "key2": "value", + }) + + op1.Apply(&snapshot) + snapshot.Operations = append(snapshot.Operations, op1) + + createMetadata := snapshot.Operations[0].AllMetadata() + assert.Equal(t, len(createMetadata), 2) + // original key is not overrided + assert.Equal(t, createMetadata["key"], "value") + // new key is set + assert.Equal(t, createMetadata["key2"], "value") + + commentMetadata := snapshot.Operations[1].AllMetadata() + assert.Equal(t, len(commentMetadata), 1) + assert.Equal(t, commentMetadata["key2"], "value2") + + op2 := NewSetMetadataOp(rene, unix, id2, map[string]string{ + "key2": "value", + "key3": "value3", + }) + + op2.Apply(&snapshot) + snapshot.Operations = append(snapshot.Operations, op2) + + createMetadata = snapshot.Operations[0].AllMetadata() + assert.Equal(t, len(createMetadata), 2) + assert.Equal(t, createMetadata["key"], "value") + assert.Equal(t, createMetadata["key2"], "value") + + commentMetadata = snapshot.Operations[1].AllMetadata() + assert.Equal(t, len(commentMetadata), 2) + // original key is not overrided + assert.Equal(t, commentMetadata["key2"], "value2") + // new key is set + assert.Equal(t, commentMetadata["key3"], "value3") + + op3 := NewSetMetadataOp(rene, unix, id1, map[string]string{ + "key": "override", + "key2": "override", + }) + + op3.Apply(&snapshot) + snapshot.Operations = append(snapshot.Operations, op3) + + createMetadata = snapshot.Operations[0].AllMetadata() + assert.Equal(t, len(createMetadata), 2) + // original key is not overrided + assert.Equal(t, createMetadata["key"], "value") + // previously set key is not overrided + assert.Equal(t, createMetadata["key2"], "value") + + commentMetadata = snapshot.Operations[1].AllMetadata() + assert.Equal(t, len(commentMetadata), 2) + assert.Equal(t, commentMetadata["key2"], "value2") + assert.Equal(t, commentMetadata["key3"], "value3") +} + +func TestSetMetadataSerialize(t *testing.T) { + repo := repository.NewMockRepoForTest() + rene := identity.NewIdentity("René Descartes", "rene@descartes.fr") + err := rene.Commit(repo) + require.NoError(t, err) + + unix := time.Now().Unix() + before := NewSetMetadataOp(rene, unix, "message", map[string]string{ + "key1": "value1", + "key2": "value2", + }) + + data, err := json.Marshal(before) + assert.NoError(t, err) + + var after SetMetadataOperation + err = json.Unmarshal(data, &after) + assert.NoError(t, err) + + // enforce creating the ID + before.Id() + + // Replace the identity stub with the real thing + assert.Equal(t, rene.Id(), after.base().Author.Id()) + after.Author = rene + + assert.Equal(t, before, &after) +} diff --git a/migration3/before/bug/op_set_status.go b/migration3/before/bug/op_set_status.go new file mode 100644 index 0000000..5fac28c --- /dev/null +++ b/migration3/before/bug/op_set_status.go @@ -0,0 +1,127 @@ +package bug + +import ( + "encoding/json" + + "github.com/pkg/errors" + + "github.com/MichaelMure/git-bug-migration/migration3/before/entity" + "github.com/MichaelMure/git-bug-migration/migration3/before/identity" + "github.com/MichaelMure/git-bug-migration/migration3/before/util/timestamp" +) + +var _ Operation = &SetStatusOperation{} + +// SetStatusOperation will change the status of a bug +type SetStatusOperation struct { + OpBase + Status Status `json:"status"` +} + +// Sign-post method for gqlgen +func (op *SetStatusOperation) IsOperation() {} + +func (op *SetStatusOperation) base() *OpBase { + return &op.OpBase +} + +func (op *SetStatusOperation) Id() entity.Id { + return idOperation(op) +} + +func (op *SetStatusOperation) Apply(snapshot *Snapshot) { + snapshot.Status = op.Status + snapshot.addActor(op.Author) + + item := &SetStatusTimelineItem{ + id: op.Id(), + Author: op.Author, + UnixTime: timestamp.Timestamp(op.UnixTime), + Status: op.Status, + } + + snapshot.Timeline = append(snapshot.Timeline, item) +} + +func (op *SetStatusOperation) Validate() error { + if err := opBaseValidate(op, SetStatusOp); err != nil { + return err + } + + if err := op.Status.Validate(); err != nil { + return errors.Wrap(err, "status") + } + + return nil +} + +// UnmarshalJSON is a two step JSON unmarshaling +// This workaround is necessary to avoid the inner OpBase.MarshalJSON +// overriding the outer op's MarshalJSON +func (op *SetStatusOperation) UnmarshalJSON(data []byte) error { + // Unmarshal OpBase and the op separately + + base := OpBase{} + err := json.Unmarshal(data, &base) + if err != nil { + return err + } + + aux := struct { + Status Status `json:"status"` + }{} + + err = json.Unmarshal(data, &aux) + if err != nil { + return err + } + + op.OpBase = base + op.Status = aux.Status + + return nil +} + +// Sign post method for gqlgen +func (op *SetStatusOperation) IsAuthored() {} + +func NewSetStatusOp(author identity.Interface, unixTime int64, status Status) *SetStatusOperation { + return &SetStatusOperation{ + OpBase: newOpBase(SetStatusOp, author, unixTime), + Status: status, + } +} + +type SetStatusTimelineItem struct { + id entity.Id + Author identity.Interface + UnixTime timestamp.Timestamp + Status Status +} + +func (s SetStatusTimelineItem) Id() entity.Id { + return s.id +} + +// Sign post method for gqlgen +func (s *SetStatusTimelineItem) IsAuthored() {} + +// Convenience function to apply the operation +func Open(b Interface, author identity.Interface, unixTime int64) (*SetStatusOperation, error) { + op := NewSetStatusOp(author, unixTime, OpenStatus) + if err := op.Validate(); err != nil { + return nil, err + } + b.Append(op) + return op, nil +} + +// Convenience function to apply the operation +func Close(b Interface, author identity.Interface, unixTime int64) (*SetStatusOperation, error) { + op := NewSetStatusOp(author, unixTime, ClosedStatus) + if err := op.Validate(); err != nil { + return nil, err + } + b.Append(op) + return op, nil +} diff --git a/migration3/before/bug/op_set_status_test.go b/migration3/before/bug/op_set_status_test.go new file mode 100644 index 0000000..18af44f --- /dev/null +++ b/migration3/before/bug/op_set_status_test.go @@ -0,0 +1,40 @@ +package bug + +import ( + "encoding/json" + "testing" + "time" + + "github.com/stretchr/testify/require" + + "github.com/MichaelMure/git-bug-migration/migration3/before/identity" + "github.com/MichaelMure/git-bug-migration/migration3/before/repository" + + "github.com/stretchr/testify/assert" +) + +func TestSetStatusSerialize(t *testing.T) { + repo := repository.NewMockRepoForTest() + rene := identity.NewIdentity("René Descartes", "rene@descartes.fr") + err := rene.Commit(repo) + require.NoError(t, err) + + unix := time.Now().Unix() + before := NewSetStatusOp(rene, unix, ClosedStatus) + + data, err := json.Marshal(before) + assert.NoError(t, err) + + var after SetStatusOperation + err = json.Unmarshal(data, &after) + assert.NoError(t, err) + + // enforce creating the ID + before.Id() + + // Replace the identity stub with the real thing + assert.Equal(t, rene.Id(), after.base().Author.Id()) + after.Author = rene + + assert.Equal(t, before, &after) +} diff --git a/migration3/before/bug/op_set_title.go b/migration3/before/bug/op_set_title.go new file mode 100644 index 0000000..dbb3f28 --- /dev/null +++ b/migration3/before/bug/op_set_title.go @@ -0,0 +1,160 @@ +package bug + +import ( + "encoding/json" + "fmt" + "strings" + + "github.com/MichaelMure/git-bug-migration/migration3/before/entity" + "github.com/MichaelMure/git-bug-migration/migration3/before/identity" + "github.com/MichaelMure/git-bug-migration/migration3/before/util/timestamp" + + "github.com/MichaelMure/git-bug-migration/migration3/before/util/text" +) + +var _ Operation = &SetTitleOperation{} + +// SetTitleOperation will change the title of a bug +type SetTitleOperation struct { + OpBase + Title string `json:"title"` + Was string `json:"was"` +} + +// Sign-post method for gqlgen +func (op *SetTitleOperation) IsOperation() {} + +func (op *SetTitleOperation) base() *OpBase { + return &op.OpBase +} + +func (op *SetTitleOperation) Id() entity.Id { + return idOperation(op) +} + +func (op *SetTitleOperation) Apply(snapshot *Snapshot) { + snapshot.Title = op.Title + snapshot.addActor(op.Author) + + item := &SetTitleTimelineItem{ + id: op.Id(), + Author: op.Author, + UnixTime: timestamp.Timestamp(op.UnixTime), + Title: op.Title, + Was: op.Was, + } + + snapshot.Timeline = append(snapshot.Timeline, item) +} + +func (op *SetTitleOperation) Validate() error { + if err := opBaseValidate(op, SetTitleOp); err != nil { + return err + } + + if text.Empty(op.Title) { + return fmt.Errorf("title is empty") + } + + if strings.Contains(op.Title, "\n") { + return fmt.Errorf("title should be a single line") + } + + if !text.Safe(op.Title) { + return fmt.Errorf("title should be fully printable") + } + + if strings.Contains(op.Was, "\n") { + return fmt.Errorf("previous title should be a single line") + } + + if !text.Safe(op.Was) { + return fmt.Errorf("previous title should be fully printable") + } + + return nil +} + +// UnmarshalJSON is a two step JSON unmarshaling +// This workaround is necessary to avoid the inner OpBase.MarshalJSON +// overriding the outer op's MarshalJSON +func (op *SetTitleOperation) UnmarshalJSON(data []byte) error { + // Unmarshal OpBase and the op separately + + base := OpBase{} + err := json.Unmarshal(data, &base) + if err != nil { + return err + } + + aux := struct { + Title string `json:"title"` + Was string `json:"was"` + }{} + + err = json.Unmarshal(data, &aux) + if err != nil { + return err + } + + op.OpBase = base + op.Title = aux.Title + op.Was = aux.Was + + return nil +} + +// Sign post method for gqlgen +func (op *SetTitleOperation) IsAuthored() {} + +func NewSetTitleOp(author identity.Interface, unixTime int64, title string, was string) *SetTitleOperation { + return &SetTitleOperation{ + OpBase: newOpBase(SetTitleOp, author, unixTime), + Title: title, + Was: was, + } +} + +type SetTitleTimelineItem struct { + id entity.Id + Author identity.Interface + UnixTime timestamp.Timestamp + Title string + Was string +} + +func (s SetTitleTimelineItem) Id() entity.Id { + return s.id +} + +// Sign post method for gqlgen +func (s *SetTitleTimelineItem) IsAuthored() {} + +// Convenience function to apply the operation +func SetTitle(b Interface, author identity.Interface, unixTime int64, title string) (*SetTitleOperation, error) { + it := NewOperationIterator(b) + + var lastTitleOp Operation + for it.Next() { + op := it.Value() + if op.base().OperationType == SetTitleOp { + lastTitleOp = op + } + } + + var was string + if lastTitleOp != nil { + was = lastTitleOp.(*SetTitleOperation).Title + } else { + was = b.FirstOp().(*CreateOperation).Title + } + + setTitleOp := NewSetTitleOp(author, unixTime, title, was) + + if err := setTitleOp.Validate(); err != nil { + return nil, err + } + + b.Append(setTitleOp) + return setTitleOp, nil +} diff --git a/migration3/before/bug/op_set_title_test.go b/migration3/before/bug/op_set_title_test.go new file mode 100644 index 0000000..d3e1480 --- /dev/null +++ b/migration3/before/bug/op_set_title_test.go @@ -0,0 +1,40 @@ +package bug + +import ( + "encoding/json" + "testing" + "time" + + "github.com/stretchr/testify/require" + + "github.com/MichaelMure/git-bug-migration/migration3/before/identity" + "github.com/MichaelMure/git-bug-migration/migration3/before/repository" + + "github.com/stretchr/testify/assert" +) + +func TestSetTitleSerialize(t *testing.T) { + repo := repository.NewMockRepoForTest() + rene := identity.NewIdentity("René Descartes", "rene@descartes.fr") + err := rene.Commit(repo) + require.NoError(t, err) + + unix := time.Now().Unix() + before := NewSetTitleOp(rene, unix, "title", "was") + + data, err := json.Marshal(before) + assert.NoError(t, err) + + var after SetTitleOperation + err = json.Unmarshal(data, &after) + assert.NoError(t, err) + + // enforce creating the ID + before.Id() + + // Replace the identity stub with the real thing + assert.Equal(t, rene.Id(), after.base().Author.Id()) + after.Author = rene + + assert.Equal(t, before, &after) +} diff --git a/migration3/before/bug/operation.go b/migration3/before/bug/operation.go new file mode 100644 index 0000000..8118526 --- /dev/null +++ b/migration3/before/bug/operation.go @@ -0,0 +1,219 @@ +package bug + +import ( + "crypto/sha256" + "encoding/json" + "fmt" + "time" + + "github.com/pkg/errors" + + "github.com/MichaelMure/git-bug-migration/migration3/before/entity" + "github.com/MichaelMure/git-bug-migration/migration3/before/identity" + "github.com/MichaelMure/git-bug-migration/migration3/before/repository" +) + +// OperationType is an operation type identifier +type OperationType int + +const ( + _ OperationType = iota + CreateOp + SetTitleOp + AddCommentOp + SetStatusOp + LabelChangeOp + EditCommentOp + NoOpOp + SetMetadataOp +) + +// Operation define the interface to fulfill for an edit operation of a Bug +type Operation interface { + // base return the OpBase of the Operation, for package internal use + base() *OpBase + // Id return the identifier of the operation, to be used for back references + Id() entity.Id + // Time return the time when the operation was added + Time() time.Time + // GetFiles return the files needed by this operation + GetFiles() []repository.Hash + // Apply the operation to a Snapshot to create the final state + Apply(snapshot *Snapshot) + // Validate check if the operation is valid (ex: a title is a single line) + Validate() error + // SetMetadata store arbitrary metadata about the operation + SetMetadata(key string, value string) + // GetMetadata retrieve arbitrary metadata about the operation + GetMetadata(key string) (string, bool) + // AllMetadata return all metadata for this operation + AllMetadata() map[string]string + // GetAuthor return the author identity + GetAuthor() identity.Interface + + // sign-post method for gqlgen + IsOperation() +} + +func deriveId(data []byte) entity.Id { + sum := sha256.Sum256(data) + return entity.Id(fmt.Sprintf("%x", sum)) +} + +func idOperation(op Operation) entity.Id { + base := op.base() + + if base.id == "" { + // something went really wrong + panic("op's id not set") + } + if base.id == entity.UnsetId { + // This means we are trying to get the op's Id *before* it has been stored, for instance when + // adding multiple ops in one go in an OperationPack. + // As the Id is computed based on the actual bytes written on the disk, we are going to predict + // those and then get the Id. This is safe as it will be the exact same code writing on disk later. + + data, err := json.Marshal(op) + if err != nil { + panic(err) + } + + base.id = deriveId(data) + } + return base.id +} + +// OpBase implement the common code for all operations +type OpBase struct { + OperationType OperationType `json:"type"` + Author identity.Interface `json:"author"` + // TODO: part of the data model upgrade, this should eventually be a timestamp + lamport + UnixTime int64 `json:"timestamp"` + Metadata map[string]string `json:"metadata,omitempty"` + // Not serialized. Store the op's id in memory. + id entity.Id + // Not serialized. Store the extra metadata in memory, + // compiled from SetMetadataOperation. + extraMetadata map[string]string +} + +// newOpBase is the constructor for an OpBase +func newOpBase(opType OperationType, author identity.Interface, unixTime int64) OpBase { + return OpBase{ + OperationType: opType, + Author: author, + UnixTime: unixTime, + id: entity.UnsetId, + } +} + +func (op *OpBase) UnmarshalJSON(data []byte) error { + // Compute the Id when loading the op from disk. + op.id = deriveId(data) + + aux := struct { + OperationType OperationType `json:"type"` + Author json.RawMessage `json:"author"` + UnixTime int64 `json:"timestamp"` + Metadata map[string]string `json:"metadata,omitempty"` + }{} + + if err := json.Unmarshal(data, &aux); err != nil { + return err + } + + // delegate the decoding of the identity + author, err := identity.UnmarshalJSON(aux.Author) + if err != nil { + return err + } + + op.OperationType = aux.OperationType + op.Author = author + op.UnixTime = aux.UnixTime + op.Metadata = aux.Metadata + + return nil +} + +// Time return the time when the operation was added +func (op *OpBase) Time() time.Time { + return time.Unix(op.UnixTime, 0) +} + +// GetFiles return the files needed by this operation +func (op *OpBase) GetFiles() []repository.Hash { + return nil +} + +// Validate check the OpBase for errors +func opBaseValidate(op Operation, opType OperationType) error { + if op.base().OperationType != opType { + return fmt.Errorf("incorrect operation type (expected: %v, actual: %v)", opType, op.base().OperationType) + } + + if op.Time().Unix() == 0 { + return fmt.Errorf("time not set") + } + + if op.base().Author == nil { + return fmt.Errorf("author not set") + } + + if err := op.base().Author.Validate(); err != nil { + return errors.Wrap(err, "author") + } + + for _, hash := range op.GetFiles() { + if !hash.IsValid() { + return fmt.Errorf("file with invalid hash %v", hash) + } + } + + return nil +} + +// SetMetadata store arbitrary metadata about the operation +func (op *OpBase) SetMetadata(key string, value string) { + if op.Metadata == nil { + op.Metadata = make(map[string]string) + } + + op.Metadata[key] = value + op.id = entity.UnsetId +} + +// GetMetadata retrieve arbitrary metadata about the operation +func (op *OpBase) GetMetadata(key string) (string, bool) { + val, ok := op.Metadata[key] + + if ok { + return val, true + } + + // extraMetadata can't replace the original operations value if any + val, ok = op.extraMetadata[key] + + return val, ok +} + +// AllMetadata return all metadata for this operation +func (op *OpBase) AllMetadata() map[string]string { + result := make(map[string]string) + + for key, val := range op.extraMetadata { + result[key] = val + } + + // Original metadata take precedence + for key, val := range op.Metadata { + result[key] = val + } + + return result +} + +// GetAuthor return author identity +func (op *OpBase) GetAuthor() identity.Interface { + return op.Author +} diff --git a/migration3/before/bug/operation_iterator.go b/migration3/before/bug/operation_iterator.go new file mode 100644 index 0000000..05c1007 --- /dev/null +++ b/migration3/before/bug/operation_iterator.go @@ -0,0 +1,72 @@ +package bug + +type OperationIterator struct { + bug *Bug + packIndex int + opIndex int +} + +func NewOperationIterator(bug Interface) *OperationIterator { + return &OperationIterator{ + bug: bugFromInterface(bug), + packIndex: 0, + opIndex: -1, + } +} + +func (it *OperationIterator) Next() bool { + // Special case of the staging area + if it.packIndex == len(it.bug.Packs) { + pack := it.bug.staging + it.opIndex++ + return it.opIndex < len(pack.Operations) + } + + if it.packIndex >= len(it.bug.Packs) { + return false + } + + pack := it.bug.Packs[it.packIndex] + + it.opIndex++ + + if it.opIndex < len(pack.Operations) { + return true + } + + // Note: this iterator doesn't handle the empty pack case + it.opIndex = 0 + it.packIndex++ + + // Special case of the non-empty staging area + if it.packIndex == len(it.bug.Packs) && len(it.bug.staging.Operations) > 0 { + return true + } + + return it.packIndex < len(it.bug.Packs) +} + +func (it *OperationIterator) Value() Operation { + // Special case of the staging area + if it.packIndex == len(it.bug.Packs) { + pack := it.bug.staging + + if it.opIndex >= len(pack.Operations) { + panic("Iterator is not valid anymore") + } + + return pack.Operations[it.opIndex] + } + + if it.packIndex >= len(it.bug.Packs) { + panic("Iterator is not valid anymore") + } + + pack := it.bug.Packs[it.packIndex] + + if it.opIndex >= len(pack.Operations) { + panic("Iterator is not valid anymore") + } + + return pack.Operations[it.opIndex] +} diff --git a/migration3/before/bug/operation_iterator_test.go b/migration3/before/bug/operation_iterator_test.go new file mode 100644 index 0000000..3661f16 --- /dev/null +++ b/migration3/before/bug/operation_iterator_test.go @@ -0,0 +1,78 @@ +package bug + +import ( + "fmt" + "testing" + "time" + + "github.com/stretchr/testify/require" + + "github.com/MichaelMure/git-bug-migration/migration3/before/identity" + "github.com/MichaelMure/git-bug-migration/migration3/before/repository" +) + +func ExampleOperationIterator() { + b := NewBug() + + // add operations + + it := NewOperationIterator(b) + + for it.Next() { + // do something with each operations + _ = it.Value() + } +} + +func TestOpIterator(t *testing.T) { + mockRepo := repository.NewMockRepoForTest() + + rene := identity.NewIdentity("René Descartes", "rene@descartes.fr") + err := rene.Commit(mockRepo) + require.NoError(t, err) + + unix := time.Now().Unix() + + createOp := NewCreateOp(rene, unix, "title", "message", nil) + addCommentOp := NewAddCommentOp(rene, unix, "message2", nil) + setStatusOp := NewSetStatusOp(rene, unix, ClosedStatus) + labelChangeOp := NewLabelChangeOperation(rene, unix, []Label{"added"}, []Label{"removed"}) + + var i int + genTitleOp := func() Operation { + i++ + return NewSetTitleOp(rene, unix, fmt.Sprintf("title%d", i), "") + } + + bug1 := NewBug() + + // first pack + bug1.Append(createOp) + bug1.Append(addCommentOp) + bug1.Append(setStatusOp) + bug1.Append(labelChangeOp) + err = bug1.Commit(mockRepo) + require.NoError(t, err) + + // second pack + bug1.Append(genTitleOp()) + bug1.Append(genTitleOp()) + bug1.Append(genTitleOp()) + err = bug1.Commit(mockRepo) + require.NoError(t, err) + + // staging + bug1.Append(genTitleOp()) + bug1.Append(genTitleOp()) + bug1.Append(genTitleOp()) + + it := NewOperationIterator(bug1) + + counter := 0 + for it.Next() { + _ = it.Value() + counter++ + } + + require.Equal(t, 10, counter) +} diff --git a/migration3/before/bug/operation_pack.go b/migration3/before/bug/operation_pack.go new file mode 100644 index 0000000..576e868 --- /dev/null +++ b/migration3/before/bug/operation_pack.go @@ -0,0 +1,188 @@ +package bug + +import ( + "encoding/json" + "fmt" + + "github.com/pkg/errors" + + "github.com/MichaelMure/git-bug-migration/migration3/before/repository" +) + +// 1: original format +// 2: no more legacy identities +const formatVersion = 2 + +// OperationPack represent an ordered set of operation to apply +// to a Bug. These operations are stored in a single Git commit. +// +// These commits will be linked together in a linear chain of commits +// inside Git to form the complete ordered chain of operation to +// apply to get the final state of the Bug +type OperationPack struct { + Operations []Operation + + // Private field so not serialized + commitHash repository.Hash + FormatVersion uint +} + +func (opp *OperationPack) MarshalJSON() ([]byte, error) { + return json.Marshal(struct { + Version uint `json:"version"` + Operations []Operation `json:"ops"` + }{ + Version: formatVersion, + Operations: opp.Operations, + }) +} + +func (opp *OperationPack) UnmarshalJSON(data []byte) error { + aux := struct { + Version uint `json:"version"` + Operations []json.RawMessage `json:"ops"` + }{} + + if err := json.Unmarshal(data, &aux); err != nil { + return err + } + + if aux.Version < formatVersion { + return fmt.Errorf("outdated repository format, please use https://github.com/MichaelMure/git-bug-migration to upgrade") + } + if aux.Version > formatVersion { + return fmt.Errorf("your version of git-bug is too old for this repository (version %v), please upgrade to the latest version", aux.Version) + } + + for _, raw := range aux.Operations { + var t struct { + OperationType OperationType `json:"type"` + } + + if err := json.Unmarshal(raw, &t); err != nil { + return err + } + + // delegate to specialized unmarshal function + op, err := opp.unmarshalOp(raw, t.OperationType) + if err != nil { + return err + } + + opp.Operations = append(opp.Operations, op) + } + + return nil +} + +func (opp *OperationPack) unmarshalOp(raw []byte, _type OperationType) (Operation, error) { + switch _type { + case AddCommentOp: + op := &AddCommentOperation{} + err := json.Unmarshal(raw, &op) + return op, err + case CreateOp: + op := &CreateOperation{} + err := json.Unmarshal(raw, &op) + return op, err + case EditCommentOp: + op := &EditCommentOperation{} + err := json.Unmarshal(raw, &op) + return op, err + case LabelChangeOp: + op := &LabelChangeOperation{} + err := json.Unmarshal(raw, &op) + return op, err + case NoOpOp: + op := &NoOpOperation{} + err := json.Unmarshal(raw, &op) + return op, err + case SetMetadataOp: + op := &SetMetadataOperation{} + err := json.Unmarshal(raw, &op) + return op, err + case SetStatusOp: + op := &SetStatusOperation{} + err := json.Unmarshal(raw, &op) + return op, err + case SetTitleOp: + op := &SetTitleOperation{} + err := json.Unmarshal(raw, &op) + return op, err + default: + return nil, fmt.Errorf("unknown operation type %v", _type) + } +} + +// Append a new operation to the pack +func (opp *OperationPack) Append(op Operation) { + opp.Operations = append(opp.Operations, op) +} + +// IsEmpty tell if the OperationPack is empty +func (opp *OperationPack) IsEmpty() bool { + return len(opp.Operations) == 0 +} + +// IsValid tell if the OperationPack is considered valid +func (opp *OperationPack) Validate() error { + if opp.IsEmpty() { + return fmt.Errorf("empty") + } + + for _, op := range opp.Operations { + if err := op.Validate(); err != nil { + return errors.Wrap(err, "op") + } + } + + return nil +} + +// Write will serialize and store the OperationPack as a git blob and return +// its hash +func (opp *OperationPack) Write(repo repository.ClockedRepo) (repository.Hash, error) { + // make sure we don't write invalid data + err := opp.Validate() + if err != nil { + return "", errors.Wrap(err, "validation error") + } + + // First, make sure that all the identities are properly Commit as well + // TODO: this might be downgraded to "make sure it exist in git" but then, what make + // sure no data is lost on identities ? + for _, op := range opp.Operations { + if op.base().Author.NeedCommit() { + return "", fmt.Errorf("identity need commmit") + } + } + + data, err := json.Marshal(opp) + + if err != nil { + return "", err + } + + hash, err := repo.StoreData(data) + + if err != nil { + return "", err + } + + return hash, nil +} + +// Make a deep copy +func (opp *OperationPack) Clone() OperationPack { + + clone := OperationPack{ + Operations: make([]Operation, len(opp.Operations)), + commitHash: opp.commitHash, + } + + for i, op := range opp.Operations { + clone.Operations[i] = op + } + + return clone +} diff --git a/migration3/before/bug/operation_pack_test.go b/migration3/before/bug/operation_pack_test.go new file mode 100644 index 0000000..23545c9 --- /dev/null +++ b/migration3/before/bug/operation_pack_test.go @@ -0,0 +1,79 @@ +package bug + +import ( + "encoding/json" + "testing" + "time" + + "github.com/stretchr/testify/assert" + "github.com/stretchr/testify/require" + + "github.com/MichaelMure/git-bug-migration/migration3/before/identity" + "github.com/MichaelMure/git-bug-migration/migration3/before/repository" +) + +func TestOperationPackSerialize(t *testing.T) { + opp := &OperationPack{} + + repo := repository.NewMockRepoForTest() + rene := identity.NewIdentity("René Descartes", "rene@descartes.fr") + err := rene.Commit(repo) + require.NoError(t, err) + + createOp := NewCreateOp(rene, time.Now().Unix(), "title", "message", nil) + setTitleOp := NewSetTitleOp(rene, time.Now().Unix(), "title2", "title1") + addCommentOp := NewAddCommentOp(rene, time.Now().Unix(), "message2", nil) + setStatusOp := NewSetStatusOp(rene, time.Now().Unix(), ClosedStatus) + labelChangeOp := NewLabelChangeOperation(rene, time.Now().Unix(), []Label{"added"}, []Label{"removed"}) + + opp.Append(createOp) + opp.Append(setTitleOp) + opp.Append(addCommentOp) + opp.Append(setStatusOp) + opp.Append(labelChangeOp) + + opMeta := NewSetTitleOp(rene, time.Now().Unix(), "title3", "title2") + opMeta.SetMetadata("key", "value") + opp.Append(opMeta) + + assert.Equal(t, 1, len(opMeta.Metadata)) + + opFile := NewAddCommentOp(rene, time.Now().Unix(), "message", []repository.Hash{ + "abcdef", + "ghijkl", + }) + opp.Append(opFile) + + assert.Equal(t, 2, len(opFile.Files)) + + data, err := json.Marshal(opp) + assert.NoError(t, err) + + var opp2 *OperationPack + err = json.Unmarshal(data, &opp2) + assert.NoError(t, err) + + ensureIds(opp) + ensureAuthors(t, opp, opp2) + + assert.Equal(t, opp, opp2) +} + +func ensureIds(opp *OperationPack) { + for _, op := range opp.Operations { + op.Id() + } +} + +func ensureAuthors(t *testing.T, opp1 *OperationPack, opp2 *OperationPack) { + require.Equal(t, len(opp1.Operations), len(opp2.Operations)) + for i := 0; i < len(opp1.Operations); i++ { + op1 := opp1.Operations[i] + op2 := opp2.Operations[i] + + // ensure we have equivalent authors (IdentityStub vs Identity) then + // enforce equality + require.Equal(t, op1.base().Author.Id(), op2.base().Author.Id()) + op1.base().Author = op2.base().Author + } +} diff --git a/migration3/before/bug/operation_test.go b/migration3/before/bug/operation_test.go new file mode 100644 index 0000000..cdf120d --- /dev/null +++ b/migration3/before/bug/operation_test.go @@ -0,0 +1,119 @@ +package bug + +import ( + "testing" + "time" + + "github.com/stretchr/testify/require" + + "github.com/MichaelMure/git-bug-migration/migration3/before/identity" + "github.com/MichaelMure/git-bug-migration/migration3/before/repository" +) + +func TestValidate(t *testing.T) { + rene := identity.NewIdentity("René Descartes", "rene@descartes.fr") + unix := time.Now().Unix() + + good := []Operation{ + NewCreateOp(rene, unix, "title", "message", nil), + NewSetTitleOp(rene, unix, "title2", "title1"), + NewAddCommentOp(rene, unix, "message2", nil), + NewSetStatusOp(rene, unix, ClosedStatus), + NewLabelChangeOperation(rene, unix, []Label{"added"}, []Label{"removed"}), + } + + for _, op := range good { + if err := op.Validate(); err != nil { + t.Fatal(err) + } + } + + bad := []Operation{ + // opbase + NewSetStatusOp(identity.NewIdentity("", "rene@descartes.fr"), unix, ClosedStatus), + NewSetStatusOp(identity.NewIdentity("René Descartes\u001b", "rene@descartes.fr"), unix, ClosedStatus), + NewSetStatusOp(identity.NewIdentity("René Descartes", "rene@descartes.fr\u001b"), unix, ClosedStatus), + NewSetStatusOp(identity.NewIdentity("René \nDescartes", "rene@descartes.fr"), unix, ClosedStatus), + NewSetStatusOp(identity.NewIdentity("René Descartes", "rene@\ndescartes.fr"), unix, ClosedStatus), + &CreateOperation{OpBase: OpBase{ + Author: rene, + UnixTime: 0, + OperationType: CreateOp, + }, + Title: "title", + Message: "message", + }, + + NewCreateOp(rene, unix, "multi\nline", "message", nil), + NewCreateOp(rene, unix, "title", "message", []repository.Hash{repository.Hash("invalid")}), + NewCreateOp(rene, unix, "title\u001b", "message", nil), + NewCreateOp(rene, unix, "title", "message\u001b", nil), + NewSetTitleOp(rene, unix, "multi\nline", "title1"), + NewSetTitleOp(rene, unix, "title", "multi\nline"), + NewSetTitleOp(rene, unix, "title\u001b", "title2"), + NewSetTitleOp(rene, unix, "title", "title2\u001b"), + NewAddCommentOp(rene, unix, "message\u001b", nil), + NewAddCommentOp(rene, unix, "message", []repository.Hash{repository.Hash("invalid")}), + NewSetStatusOp(rene, unix, 1000), + NewSetStatusOp(rene, unix, 0), + NewLabelChangeOperation(rene, unix, []Label{}, []Label{}), + NewLabelChangeOperation(rene, unix, []Label{"multi\nline"}, []Label{}), + } + + for i, op := range bad { + if err := op.Validate(); err == nil { + t.Fatal("validation should have failed", i, op) + } + } +} + +func TestMetadata(t *testing.T) { + rene := identity.NewIdentity("René Descartes", "rene@descartes.fr") + op := NewCreateOp(rene, time.Now().Unix(), "title", "message", nil) + + op.SetMetadata("key", "value") + + val, ok := op.GetMetadata("key") + require.True(t, ok) + require.Equal(t, val, "value") +} + +func TestID(t *testing.T) { + repo := repository.CreateGoGitTestRepo(false) + defer repository.CleanupTestRepos(repo) + + repos := []repository.ClockedRepo{ + repository.NewMockRepoForTest(), + repo, + } + + for _, repo := range repos { + rene := identity.NewIdentity("René Descartes", "rene@descartes.fr") + err := rene.Commit(repo) + require.NoError(t, err) + + b, op, err := Create(rene, time.Now().Unix(), "title", "message") + require.NoError(t, err) + + id1 := op.Id() + require.NoError(t, id1.Validate()) + + err = b.Commit(repo) + require.NoError(t, err) + + op2 := b.FirstOp() + + id2 := op2.Id() + require.NoError(t, id2.Validate()) + require.Equal(t, id1, id2) + + b2, err := ReadLocal(repo, b.Id()) + require.NoError(t, err) + + op3 := b2.FirstOp() + + id3 := op3.Id() + require.NoError(t, id3.Validate()) + require.Equal(t, id1, id3) + } +} diff --git a/migration3/before/bug/snapshot.go b/migration3/before/bug/snapshot.go new file mode 100644 index 0000000..e43db25 --- /dev/null +++ b/migration3/before/bug/snapshot.go @@ -0,0 +1,133 @@ +package bug + +import ( + "fmt" + "time" + + "github.com/MichaelMure/git-bug-migration/migration3/before/entity" + "github.com/MichaelMure/git-bug-migration/migration3/before/identity" +) + +// Snapshot is a compiled form of the Bug data structure used for storage and merge +type Snapshot struct { + id entity.Id + + Status Status + Title string + Comments []Comment + Labels []Label + Author identity.Interface + Actors []identity.Interface + Participants []identity.Interface + CreateTime time.Time + + Timeline []TimelineItem + + Operations []Operation +} + +// Return the Bug identifier +func (snap *Snapshot) Id() entity.Id { + return snap.id +} + +// Return the last time a bug was modified +func (snap *Snapshot) EditTime() time.Time { + if len(snap.Operations) == 0 { + return time.Unix(0, 0) + } + + return snap.Operations[len(snap.Operations)-1].Time() +} + +// GetCreateMetadata return the creation metadata +func (snap *Snapshot) GetCreateMetadata(key string) (string, bool) { + return snap.Operations[0].GetMetadata(key) +} + +// SearchTimelineItem will search in the timeline for an item matching the given hash +func (snap *Snapshot) SearchTimelineItem(id entity.Id) (TimelineItem, error) { + for i := range snap.Timeline { + if snap.Timeline[i].Id() == id { + return snap.Timeline[i], nil + } + } + + return nil, fmt.Errorf("timeline item not found") +} + +// SearchComment will search for a comment matching the given hash +func (snap *Snapshot) SearchComment(id entity.Id) (*Comment, error) { + for _, c := range snap.Comments { + if c.id == id { + return &c, nil + } + } + + return nil, fmt.Errorf("comment item not found") +} + +// append the operation author to the actors list +func (snap *Snapshot) addActor(actor identity.Interface) { + for _, a := range snap.Actors { + if actor.Id() == a.Id() { + return + } + } + + snap.Actors = append(snap.Actors, actor) +} + +// append the operation author to the participants list +func (snap *Snapshot) addParticipant(participant identity.Interface) { + for _, p := range snap.Participants { + if participant.Id() == p.Id() { + return + } + } + + snap.Participants = append(snap.Participants, participant) +} + +// HasParticipant return true if the id is a participant +func (snap *Snapshot) HasParticipant(id entity.Id) bool { + for _, p := range snap.Participants { + if p.Id() == id { + return true + } + } + return false +} + +// HasAnyParticipant return true if one of the ids is a participant +func (snap *Snapshot) HasAnyParticipant(ids ...entity.Id) bool { + for _, id := range ids { + if snap.HasParticipant(id) { + return true + } + } + return false +} + +// HasActor return true if the id is a actor +func (snap *Snapshot) HasActor(id entity.Id) bool { + for _, p := range snap.Actors { + if p.Id() == id { + return true + } + } + return false +} + +// HasAnyActor return true if one of the ids is a actor +func (snap *Snapshot) HasAnyActor(ids ...entity.Id) bool { + for _, id := range ids { + if snap.HasActor(id) { + return true + } + } + return false +} + +// Sign post method for gqlgen +func (snap *Snapshot) IsAuthored() {} diff --git a/migration3/before/bug/sorting.go b/migration3/before/bug/sorting.go new file mode 100644 index 0000000..d1c370d --- /dev/null +++ b/migration3/before/bug/sorting.go @@ -0,0 +1,57 @@ +package bug + +type BugsByCreationTime []*Bug + +func (b BugsByCreationTime) Len() int { + return len(b) +} + +func (b BugsByCreationTime) Less(i, j int) bool { + if b[i].createTime < b[j].createTime { + return true + } + + if b[i].createTime > b[j].createTime { + return false + } + + // When the logical clocks are identical, that means we had a concurrent + // edition. In this case we rely on the timestamp. While the timestamp might + // be incorrect due to a badly set clock, the drift in sorting is bounded + // by the first sorting using the logical clock. That means that if users + // synchronize their bugs regularly, the timestamp will rarely be used, and + // should still provide a kinda accurate sorting when needed. + return b[i].FirstOp().Time().Before(b[j].FirstOp().Time()) +} + +func (b BugsByCreationTime) Swap(i, j int) { + b[i], b[j] = b[j], b[i] +} + +type BugsByEditTime []*Bug + +func (b BugsByEditTime) Len() int { + return len(b) +} + +func (b BugsByEditTime) Less(i, j int) bool { + if b[i].editTime < b[j].editTime { + return true + } + + if b[i].editTime > b[j].editTime { + return false + } + + // When the logical clocks are identical, that means we had a concurrent + // edition. In this case we rely on the timestamp. While the timestamp might + // be incorrect due to a badly set clock, the drift in sorting is bounded + // by the first sorting using the logical clock. That means that if users + // synchronize their bugs regularly, the timestamp will rarely be used, and + // should still provide a kinda accurate sorting when needed. + return b[i].LastOp().Time().Before(b[j].LastOp().Time()) +} + +func (b BugsByEditTime) Swap(i, j int) { + b[i], b[j] = b[j], b[i] +} diff --git a/migration3/before/bug/status.go b/migration3/before/bug/status.go new file mode 100644 index 0000000..9e99803 --- /dev/null +++ b/migration3/before/bug/status.go @@ -0,0 +1,57 @@ +package bug + +import ( + "fmt" + "strings" +) + +type Status int + +const ( + _ Status = iota + OpenStatus + ClosedStatus +) + +func (s Status) String() string { + switch s { + case OpenStatus: + return "open" + case ClosedStatus: + return "closed" + default: + return "unknown status" + } +} + +func (s Status) Action() string { + switch s { + case OpenStatus: + return "opened" + case ClosedStatus: + return "closed" + default: + return "unknown status" + } +} + +func StatusFromString(str string) (Status, error) { + cleaned := strings.ToLower(strings.TrimSpace(str)) + + switch cleaned { + case "open": + return OpenStatus, nil + case "closed": + return ClosedStatus, nil + default: + return 0, fmt.Errorf("unknown status") + } +} + +func (s Status) Validate() error { + if s != OpenStatus && s != ClosedStatus { + return fmt.Errorf("invalid") + } + + return nil +} diff --git a/migration3/before/bug/timeline.go b/migration3/before/bug/timeline.go new file mode 100644 index 0000000..e9b227e --- /dev/null +++ b/migration3/before/bug/timeline.go @@ -0,0 +1,79 @@ +package bug + +import ( + "strings" + + "github.com/MichaelMure/git-bug-migration/migration3/before/entity" + "github.com/MichaelMure/git-bug-migration/migration3/before/identity" + "github.com/MichaelMure/git-bug-migration/migration3/before/repository" + "github.com/MichaelMure/git-bug-migration/migration3/before/util/timestamp" +) + +type TimelineItem interface { + // ID return the identifier of the item + Id() entity.Id +} + +// CommentHistoryStep hold one version of a message in the history +type CommentHistoryStep struct { + // The author of the edition, not necessarily the same as the author of the + // original comment + Author identity.Interface + // The new message + Message string + UnixTime timestamp.Timestamp +} + +// CommentTimelineItem is a TimelineItem that holds a Comment and its edition history +type CommentTimelineItem struct { + id entity.Id + Author identity.Interface + Message string + Files []repository.Hash + CreatedAt timestamp.Timestamp + LastEdit timestamp.Timestamp + History []CommentHistoryStep +} + +func NewCommentTimelineItem(ID entity.Id, comment Comment) CommentTimelineItem { + return CommentTimelineItem{ + id: ID, + Author: comment.Author, + Message: comment.Message, + Files: comment.Files, + CreatedAt: comment.UnixTime, + LastEdit: comment.UnixTime, + History: []CommentHistoryStep{ + { + Message: comment.Message, + UnixTime: comment.UnixTime, + }, + }, + } +} + +func (c *CommentTimelineItem) Id() entity.Id { + return c.id +} + +// Append will append a new comment in the history and update the other values +func (c *CommentTimelineItem) Append(comment Comment) { + c.Message = comment.Message + c.Files = comment.Files + c.LastEdit = comment.UnixTime + c.History = append(c.History, CommentHistoryStep{ + Author: comment.Author, + Message: comment.Message, + UnixTime: comment.UnixTime, + }) +} + +// Edited say if the comment was edited +func (c *CommentTimelineItem) Edited() bool { + return len(c.History) > 1 +} + +// MessageIsEmpty return true is the message is empty or only made of spaces +func (c *CommentTimelineItem) MessageIsEmpty() bool { + return len(strings.TrimSpace(c.Message)) == 0 +} diff --git a/migration3/before/bug/with_snapshot.go b/migration3/before/bug/with_snapshot.go new file mode 100644 index 0000000..d04acb7 --- /dev/null +++ b/migration3/before/bug/with_snapshot.go @@ -0,0 +1,58 @@ +package bug + +import "github.com/MichaelMure/git-bug-migration/migration3/before/repository" + +var _ Interface = &WithSnapshot{} + +// WithSnapshot encapsulate a Bug and maintain the corresponding Snapshot efficiently +type WithSnapshot struct { + *Bug + snap *Snapshot +} + +// Snapshot return the current snapshot +func (b *WithSnapshot) Snapshot() *Snapshot { + if b.snap == nil { + snap := b.Bug.Compile() + b.snap = &snap + } + return b.snap +} + +// Append intercept Bug.Append() to update the snapshot efficiently +func (b *WithSnapshot) Append(op Operation) { + b.Bug.Append(op) + + if b.snap == nil { + return + } + + op.Apply(b.snap) + b.snap.Operations = append(b.snap.Operations, op) +} + +// Commit intercept Bug.Commit() to update the snapshot efficiently +func (b *WithSnapshot) Commit(repo repository.ClockedRepo) error { + err := b.Bug.Commit(repo) + + if err != nil { + b.snap = nil + return err + } + + // Commit() shouldn't change anything of the bug state apart from the + // initial ID set + + if b.snap == nil { + return nil + } + + b.snap.id = b.Bug.id + return nil +} + +// Merge intercept Bug.Merge() and clear the snapshot +func (b *WithSnapshot) Merge(repo repository.Repo, other Interface) (bool, error) { + b.snap = nil + return b.Bug.Merge(repo, other) +} diff --git a/migration3/before/entity/doc.go b/migration3/before/entity/doc.go new file mode 100644 index 0000000..4682d54 --- /dev/null +++ b/migration3/before/entity/doc.go @@ -0,0 +1,8 @@ +// Package entity contains the base common code to define an entity stored +// in a chain of git objects, supporting actions like Push, Pull and Merge. +package entity + +// TODO: Bug and Identity are very similar, right ? I expect that this package +// will eventually hold the common code to define an entity and the related +// helpers, errors and so on. When this work is done, it will become easier +// to add new entities, for example to support pull requests. diff --git a/migration3/before/entity/err.go b/migration3/before/entity/err.go new file mode 100644 index 0000000..7d6c662 --- /dev/null +++ b/migration3/before/entity/err.go @@ -0,0 +1,32 @@ +package entity + +import ( + "fmt" + "strings" +) + +type ErrMultipleMatch struct { + entityType string + Matching []Id +} + +func NewErrMultipleMatch(entityType string, matching []Id) *ErrMultipleMatch { + return &ErrMultipleMatch{entityType: entityType, Matching: matching} +} + +func (e ErrMultipleMatch) Error() string { + matching := make([]string, len(e.Matching)) + + for i, match := range e.Matching { + matching[i] = match.String() + } + + return fmt.Sprintf("Multiple matching %s found:\n%s", + e.entityType, + strings.Join(matching, "\n")) +} + +func IsErrMultipleMatch(err error) bool { + _, ok := err.(*ErrMultipleMatch) + return ok +} diff --git a/migration3/before/entity/id.go b/migration3/before/entity/id.go new file mode 100644 index 0000000..5c67b52 --- /dev/null +++ b/migration3/before/entity/id.go @@ -0,0 +1,100 @@ +package entity + +import ( + "fmt" + "io" + "strings" + + "github.com/pkg/errors" +) + +const IdLengthSHA1 = 40 +const IdLengthSHA256 = 64 +const humanIdLength = 7 + +const UnsetId = Id("unset") + +// Id is an identifier for an entity or part of an entity +type Id string + +// String return the identifier as a string +func (i Id) String() string { + return string(i) +} + +// Human return the identifier, shortened for human consumption +func (i Id) Human() string { + format := fmt.Sprintf("%%.%ds", humanIdLength) + return fmt.Sprintf(format, i) +} + +func (i Id) HasPrefix(prefix string) bool { + return strings.HasPrefix(string(i), prefix) +} + +// UnmarshalGQL implement the Unmarshaler interface for gqlgen +func (i *Id) UnmarshalGQL(v interface{}) error { + _, ok := v.(string) + if !ok { + return fmt.Errorf("IDs must be strings") + } + + *i = v.(Id) + + if err := i.Validate(); err != nil { + return errors.Wrap(err, "invalid ID") + } + + return nil +} + +// MarshalGQL implement the Marshaler interface for gqlgen +func (i Id) MarshalGQL(w io.Writer) { + _, _ = w.Write([]byte(`"` + i.String() + `"`)) +} + +// IsValid tell if the Id is valid +func (i Id) Validate() error { + if len(i) != IdLengthSHA1 && len(i) != IdLengthSHA256 { + return fmt.Errorf("invalid length") + } + for _, r := range i { + if (r < 'a' || r > 'z') && (r < '0' || r > '9') { + return fmt.Errorf("invalid character") + } + } + return nil +} + +/* + * Sorting + */ + +type Alphabetical []Id + +func (a Alphabetical) Len() int { + return len(a) +} + +func (a Alphabetical) Less(i, j int) bool { + return a[i] < a[j] +} + +func (a Alphabetical) Swap(i, j int) { + a[i], a[j] = a[j], a[i] +} + +func RefsToIds(refs []string) []Id { + ids := make([]Id, len(refs)) + + for i, ref := range refs { + ids[i] = refToId(ref) + } + + return ids +} + +func refToId(ref string) Id { + split := strings.Split(ref, "/") + return Id(split[len(split)-1]) +} diff --git a/migration3/before/entity/interface.go b/migration3/before/entity/interface.go new file mode 100644 index 0000000..dd5d69b --- /dev/null +++ b/migration3/before/entity/interface.go @@ -0,0 +1,6 @@ +package entity + +type Interface interface { + // Id return the Entity identifier + Id() Id +} diff --git a/migration3/before/entity/merge.go b/migration3/before/entity/merge.go new file mode 100644 index 0000000..3ce8eda --- /dev/null +++ b/migration3/before/entity/merge.go @@ -0,0 +1,74 @@ +package entity + +import ( + "fmt" +) + +// MergeStatus represent the result of a merge operation of an entity +type MergeStatus int + +const ( + _ MergeStatus = iota + MergeStatusNew + MergeStatusInvalid + MergeStatusUpdated + MergeStatusNothing + MergeStatusError +) + +type MergeResult struct { + // Err is set when a terminal error occur in the process + Err error + + Id Id + Status MergeStatus + + // Only set for invalid status + Reason string + + // Not set for invalid status + Entity Interface +} + +func (mr MergeResult) String() string { + switch mr.Status { + case MergeStatusNew: + return "new" + case MergeStatusInvalid: + return fmt.Sprintf("invalid data: %s", mr.Reason) + case MergeStatusUpdated: + return "updated" + case MergeStatusNothing: + return "nothing to do" + case MergeStatusError: + return fmt.Sprintf("merge error on %s: %s", mr.Id, mr.Err.Error()) + default: + panic("unknown merge status") + } +} + +func NewMergeError(err error, id Id) MergeResult { + return MergeResult{ + Err: err, + Id: id, + Status: MergeStatusError, + } +} + +func NewMergeStatus(status MergeStatus, id Id, entity Interface) MergeResult { + return MergeResult{ + Id: id, + Status: status, + + // Entity is not set for an invalid merge result + Entity: entity, + } +} + +func NewMergeInvalidStatus(id Id, reason string) MergeResult { + return MergeResult{ + Id: id, + Status: MergeStatusInvalid, + Reason: reason, + } +} diff --git a/migration3/before/identity/common.go b/migration3/before/identity/common.go new file mode 100644 index 0000000..f1c698a --- /dev/null +++ b/migration3/before/identity/common.go @@ -0,0 +1,37 @@ +package identity + +import ( + "encoding/json" + "errors" + "fmt" + + "github.com/MichaelMure/git-bug-migration/migration3/before/entity" +) + +var ErrIdentityNotExist = errors.New("identity doesn't exist") + +func NewErrMultipleMatch(matching []entity.Id) *entity.ErrMultipleMatch { + return entity.NewErrMultipleMatch("identity", matching) +} + +// Custom unmarshaling function to allow package user to delegate +// the decoding of an Identity and distinguish between an Identity +// and a Bare. +// +// If the given message has a "id" field, it's considered being a proper Identity. +func UnmarshalJSON(raw json.RawMessage) (Interface, error) { + aux := &IdentityStub{} + + // First try to decode and load as a normal Identity + err := json.Unmarshal(raw, &aux) + if err == nil && aux.Id() != "" { + return aux, nil + } + + // abort if we have an error other than the wrong type + if _, ok := err.(*json.UnmarshalTypeError); err != nil && !ok { + return nil, err + } + + return nil, fmt.Errorf("unknown identity type") +} diff --git a/migration3/before/identity/identity.go b/migration3/before/identity/identity.go new file mode 100644 index 0000000..d776582 --- /dev/null +++ b/migration3/before/identity/identity.go @@ -0,0 +1,632 @@ +// Package identity contains the identity data model and low-level related functions +package identity + +import ( + "encoding/json" + "fmt" + "reflect" + "strings" + "time" + + "github.com/pkg/errors" + + "github.com/MichaelMure/git-bug-migration/migration3/before/entity" + "github.com/MichaelMure/git-bug-migration/migration3/before/repository" + "github.com/MichaelMure/git-bug-migration/migration3/before/util/lamport" + "github.com/MichaelMure/git-bug-migration/migration3/before/util/timestamp" +) + +const identityRefPattern = "refs/identities/" +const identityRemoteRefPattern = "refs/remotes/%s/identities/" +const versionEntryName = "version" +const identityConfigKey = "git-bug.identity" + +var ErrNonFastForwardMerge = errors.New("non fast-forward identity merge") +var ErrNoIdentitySet = errors.New("No identity is set.\n" + + "To interact with bugs, an identity first needs to be created using " + + "\"git bug user create\"") +var ErrMultipleIdentitiesSet = errors.New("multiple user identities set") + +func NewErrMultipleMatchIdentity(matching []entity.Id) *entity.ErrMultipleMatch { + return entity.NewErrMultipleMatch("identity", matching) +} + +var _ Interface = &Identity{} +var _ entity.Interface = &Identity{} + +type Identity struct { + // Id used as unique identifier + id entity.Id + + // all the successive version of the identity + versions []*Version + + // not serialized + lastCommit repository.Hash +} + +func NewIdentity(name string, email string) *Identity { + return &Identity{ + id: entity.UnsetId, + versions: []*Version{ + { + name: name, + email: email, + nonce: makeNonce(20), + }, + }, + } +} + +func NewIdentityFull(name string, email string, login string, avatarUrl string) *Identity { + return &Identity{ + id: entity.UnsetId, + versions: []*Version{ + { + name: name, + email: email, + login: login, + avatarURL: avatarUrl, + nonce: makeNonce(20), + }, + }, + } +} + +// NewFromGitUser will query the repository for user detail and +// build the corresponding Identity +func NewFromGitUser(repo repository.Repo) (*Identity, error) { + name, err := repo.GetUserName() + if err != nil { + return nil, err + } + if name == "" { + return nil, errors.New("user name is not configured in git yet. Please use `git config --global user.name \"John Doe\"`") + } + + email, err := repo.GetUserEmail() + if err != nil { + return nil, err + } + if email == "" { + return nil, errors.New("user name is not configured in git yet. Please use `git config --global user.email johndoe@example.com`") + } + + return NewIdentity(name, email), nil +} + +// MarshalJSON will only serialize the id +func (i *Identity) MarshalJSON() ([]byte, error) { + return json.Marshal(&IdentityStub{ + id: i.id, + }) +} + +// UnmarshalJSON will only read the id +// Users of this package are expected to run Load() to load +// the remaining data from the identities data in git. +func (i *Identity) UnmarshalJSON(data []byte) error { + panic("identity should be loaded with identity.UnmarshalJSON") +} + +// ReadLocal load a local Identity from the identities data available in git +func ReadLocal(repo repository.Repo, id entity.Id) (*Identity, error) { + ref := fmt.Sprintf("%s%s", identityRefPattern, id) + return read(repo, ref) +} + +// ReadRemote load a remote Identity from the identities data available in git +func ReadRemote(repo repository.Repo, remote string, id string) (*Identity, error) { + ref := fmt.Sprintf(identityRemoteRefPattern, remote) + id + return read(repo, ref) +} + +// read will load and parse an identity from git +func read(repo repository.Repo, ref string) (*Identity, error) { + refSplit := strings.Split(ref, "/") + id := entity.Id(refSplit[len(refSplit)-1]) + + if err := id.Validate(); err != nil { + return nil, errors.Wrap(err, "invalid ref") + } + + hashes, err := repo.ListCommits(ref) + + // TODO: this is not perfect, it might be a command invoke error + if err != nil { + return nil, ErrIdentityNotExist + } + + i := &Identity{ + id: id, + } + + for _, hash := range hashes { + entries, err := repo.ReadTree(hash) + if err != nil { + return nil, errors.Wrap(err, "can't list git tree entries") + } + + if len(entries) != 1 { + return nil, fmt.Errorf("invalid identity data at hash %s", hash) + } + + entry := entries[0] + + if entry.Name != versionEntryName { + return nil, fmt.Errorf("invalid identity data at hash %s", hash) + } + + data, err := repo.ReadData(entry.Hash) + if err != nil { + return nil, errors.Wrap(err, "failed to read git blob data") + } + + var version Version + err = json.Unmarshal(data, &version) + + if err != nil { + return nil, errors.Wrapf(err, "failed to decode Identity version json %s", hash) + } + + // tag the version with the commit hash + version.commitHash = hash + i.lastCommit = hash + + i.versions = append(i.versions, &version) + } + + return i, nil +} + +// ListLocalIds list all the available local identity ids +func ListLocalIds(repo repository.Repo) ([]entity.Id, error) { + refs, err := repo.ListRefs(identityRefPattern) + if err != nil { + return nil, err + } + + return entity.RefsToIds(refs), nil +} + +// RemoveIdentity will remove a local identity from its entity.Id +func RemoveIdentity(repo repository.ClockedRepo, id entity.Id) error { + var fullMatches []string + + refs, err := repo.ListRefs(identityRefPattern + id.String()) + if err != nil { + return err + } + if len(refs) > 1 { + return NewErrMultipleMatchIdentity(entity.RefsToIds(refs)) + } + if len(refs) == 1 { + // we have the identity locally + fullMatches = append(fullMatches, refs[0]) + } + + remotes, err := repo.GetRemotes() + if err != nil { + return err + } + + for remote := range remotes { + remotePrefix := fmt.Sprintf(identityRemoteRefPattern+id.String(), remote) + remoteRefs, err := repo.ListRefs(remotePrefix) + if err != nil { + return err + } + if len(remoteRefs) > 1 { + return NewErrMultipleMatchIdentity(entity.RefsToIds(refs)) + } + if len(remoteRefs) == 1 { + // found the identity in a remote + fullMatches = append(fullMatches, remoteRefs[0]) + } + } + + if len(fullMatches) == 0 { + return ErrIdentityNotExist + } + + for _, ref := range fullMatches { + err = repo.RemoveRef(ref) + if err != nil { + return err + } + } + + return nil +} + +type StreamedIdentity struct { + Identity *Identity + Err error +} + +// ReadAllLocal read and parse all local Identity +func ReadAllLocal(repo repository.ClockedRepo) <-chan StreamedIdentity { + return readAll(repo, identityRefPattern) +} + +// ReadAllRemote read and parse all remote Identity for a given remote +func ReadAllRemote(repo repository.ClockedRepo, remote string) <-chan StreamedIdentity { + refPrefix := fmt.Sprintf(identityRemoteRefPattern, remote) + return readAll(repo, refPrefix) +} + +// readAll read and parse all available bug with a given ref prefix +func readAll(repo repository.ClockedRepo, refPrefix string) <-chan StreamedIdentity { + out := make(chan StreamedIdentity) + + go func() { + defer close(out) + + refs, err := repo.ListRefs(refPrefix) + if err != nil { + out <- StreamedIdentity{Err: err} + return + } + + for _, ref := range refs { + b, err := read(repo, ref) + + if err != nil { + out <- StreamedIdentity{Err: err} + return + } + + out <- StreamedIdentity{Identity: b} + } + }() + + return out +} + +type Mutator struct { + Name string + Login string + Email string + AvatarUrl string + Keys []*Key +} + +// Mutate allow to create a new version of the Identity in one go +func (i *Identity) Mutate(f func(orig Mutator) Mutator) { + orig := Mutator{ + Name: i.Name(), + Email: i.Email(), + Login: i.Login(), + AvatarUrl: i.AvatarUrl(), + Keys: i.Keys(), + } + mutated := f(orig) + if reflect.DeepEqual(orig, mutated) { + return + } + i.versions = append(i.versions, &Version{ + name: mutated.Name, + email: mutated.Email, + login: mutated.Login, + avatarURL: mutated.AvatarUrl, + keys: mutated.Keys, + }) +} + +// Write the identity into the Repository. In particular, this ensure that +// the Id is properly set. +func (i *Identity) Commit(repo repository.ClockedRepo) error { + // Todo: check for mismatch between memory and commit data + + if !i.NeedCommit() { + return fmt.Errorf("can't commit an identity with no pending version") + } + + if err := i.Validate(); err != nil { + return errors.Wrap(err, "can't commit an identity with invalid data") + } + + for _, v := range i.versions { + if v.commitHash != "" { + i.lastCommit = v.commitHash + // ignore already commit versions + continue + } + + // get the times where new versions starts to be valid + // TODO: instead of this hardcoded clock for bugs only, this need to be + // a vector of edit clock, one for each entity (bug, PR, config ..) + bugEditClock, err := repo.GetOrCreateClock("bug-edit") + if err != nil { + return err + } + + v.time = bugEditClock.Time() + v.unixTime = time.Now().Unix() + + blobHash, err := v.Write(repo) + if err != nil { + return err + } + + // Make a git tree referencing the blob + tree := []repository.TreeEntry{ + {ObjectType: repository.Blob, Hash: blobHash, Name: versionEntryName}, + } + + treeHash, err := repo.StoreTree(tree) + if err != nil { + return err + } + + var commitHash repository.Hash + if i.lastCommit != "" { + commitHash, err = repo.StoreCommitWithParent(treeHash, i.lastCommit) + } else { + commitHash, err = repo.StoreCommit(treeHash) + } + + if err != nil { + return err + } + + i.lastCommit = commitHash + v.commitHash = commitHash + + // if it was the first commit, use the commit hash as the Identity id + if i.id == "" || i.id == entity.UnsetId { + i.id = entity.Id(commitHash) + } + } + + if i.id == "" { + panic("identity with no id") + } + + ref := fmt.Sprintf("%s%s", identityRefPattern, i.id) + err := repo.UpdateRef(ref, i.lastCommit) + + if err != nil { + return err + } + + return nil +} + +func (i *Identity) CommitAsNeeded(repo repository.ClockedRepo) error { + if !i.NeedCommit() { + return nil + } + return i.Commit(repo) +} + +func (i *Identity) NeedCommit() bool { + for _, v := range i.versions { + if v.commitHash == "" { + return true + } + } + + return false +} + +// Merge will merge a different version of the same Identity +// +// To make sure that an Identity history can't be altered, a strict fast-forward +// only policy is applied here. As an Identity should be tied to a single user, this +// should work in practice but it does leave a possibility that a user would edit his +// Identity from two different repo concurrently and push the changes in a non-centralized +// network of repositories. In this case, it would result in some of the repo accepting one +// version and some other accepting another, preventing the network in general to converge +// to the same result. This would create a sort of partition of the network, and manual +// cleaning would be required. +// +// An alternative approach would be to have a determinist rebase: +// - any commits present in both local and remote version would be kept, never changed. +// - newer commits would be merged in a linear chain of commits, ordered based on the +// Lamport time +// +// However, this approach leave the possibility, in the case of a compromised crypto keys, +// of forging a new version with a bogus Lamport time to be inserted before a legit version, +// invalidating the correct version and hijacking the Identity. There would only be a short +// period of time where this would be possible (before the network converge) but I'm not +// confident enough to implement that. I choose the strict fast-forward only approach, +// despite it's potential problem with two different version as mentioned above. +func (i *Identity) Merge(repo repository.Repo, other *Identity) (bool, error) { + if i.id != other.id { + return false, errors.New("merging unrelated identities is not supported") + } + + if i.lastCommit == "" || other.lastCommit == "" { + return false, errors.New("can't merge identities that has never been stored") + } + + modified := false + for j, otherVersion := range other.versions { + // if there is more version in other, take them + if len(i.versions) == j { + i.versions = append(i.versions, otherVersion) + i.lastCommit = otherVersion.commitHash + modified = true + } + + // we have a non fast-forward merge. + // as explained in the doc above, refusing to merge + if i.versions[j].commitHash != otherVersion.commitHash { + return false, ErrNonFastForwardMerge + } + } + + if modified { + err := repo.UpdateRef(identityRefPattern+i.id.String(), i.lastCommit) + if err != nil { + return false, err + } + } + + return false, nil +} + +// Validate check if the Identity data is valid +func (i *Identity) Validate() error { + lastTime := lamport.Time(0) + + if len(i.versions) == 0 { + return fmt.Errorf("no version") + } + + for _, v := range i.versions { + if err := v.Validate(); err != nil { + return err + } + + if v.commitHash != "" && v.time < lastTime { + return fmt.Errorf("non-chronological version (%d --> %d)", lastTime, v.time) + } + + lastTime = v.time + } + + // The identity Id should be the hash of the first commit + if i.versions[0].commitHash != "" && string(i.versions[0].commitHash) != i.id.String() { + return fmt.Errorf("identity id should be the first commit hash") + } + + return nil +} + +func (i *Identity) lastVersion() *Version { + if len(i.versions) <= 0 { + panic("no version at all") + } + + return i.versions[len(i.versions)-1] +} + +// Id return the Identity identifier +func (i *Identity) Id() entity.Id { + if i.id == "" || i.id == entity.UnsetId { + // simply panic as it would be a coding error + // (using an id of an identity not stored yet) + panic("no id yet") + } + return i.id +} + +// Name return the last version of the name +func (i *Identity) Name() string { + return i.lastVersion().name +} + +// Email return the last version of the email +func (i *Identity) Email() string { + return i.lastVersion().email +} + +// Login return the last version of the login +func (i *Identity) Login() string { + return i.lastVersion().login +} + +// AvatarUrl return the last version of the Avatar URL +func (i *Identity) AvatarUrl() string { + return i.lastVersion().avatarURL +} + +// Keys return the last version of the valid keys +func (i *Identity) Keys() []*Key { + return i.lastVersion().keys +} + +// ValidKeysAtTime return the set of keys valid at a given lamport time +func (i *Identity) ValidKeysAtTime(time lamport.Time) []*Key { + var result []*Key + + for _, v := range i.versions { + if v.time > time { + return result + } + + result = v.keys + } + + return result +} + +// DisplayName return a non-empty string to display, representing the +// identity, based on the non-empty values. +func (i *Identity) DisplayName() string { + switch { + case i.Name() == "" && i.Login() != "": + return i.Login() + case i.Name() != "" && i.Login() == "": + return i.Name() + case i.Name() != "" && i.Login() != "": + return fmt.Sprintf("%s (%s)", i.Name(), i.Login()) + } + + panic("invalid person data") +} + +// IsProtected return true if the chain of git commits started to be signed. +// If that's the case, only signed commit with a valid key for this identity can be added. +func (i *Identity) IsProtected() bool { + // Todo + return false +} + +// LastModificationLamportTime return the Lamport time at which the last version of the identity became valid. +func (i *Identity) LastModificationLamport() lamport.Time { + return i.lastVersion().time +} + +// LastModification return the timestamp at which the last version of the identity became valid. +func (i *Identity) LastModification() timestamp.Timestamp { + return timestamp.Timestamp(i.lastVersion().unixTime) +} + +// SetMetadata store arbitrary metadata along the last not-commit Version. +// If the Version has been commit to git already, a new identical version is added and will need to be +// commit. +func (i *Identity) SetMetadata(key string, value string) { + if i.lastVersion().commitHash != "" { + i.versions = append(i.versions, i.lastVersion().Clone()) + } + i.lastVersion().SetMetadata(key, value) +} + +// ImmutableMetadata return all metadata for this Identity, accumulated from each Version. +// If multiple value are found, the first defined takes precedence. +func (i *Identity) ImmutableMetadata() map[string]string { + metadata := make(map[string]string) + + for _, version := range i.versions { + for key, value := range version.metadata { + if _, has := metadata[key]; !has { + metadata[key] = value + } + } + } + + return metadata +} + +// MutableMetadata return all metadata for this Identity, accumulated from each Version. +// If multiple value are found, the last defined takes precedence. +func (i *Identity) MutableMetadata() map[string]string { + metadata := make(map[string]string) + + for _, version := range i.versions { + for key, value := range version.metadata { + metadata[key] = value + } + } + + return metadata +} + +// addVersionForTest add a new version to the identity +// Only for testing ! +func (i *Identity) addVersionForTest(version *Version) { + i.versions = append(i.versions, version) +} diff --git a/migration3/before/identity/identity_actions.go b/migration3/before/identity/identity_actions.go new file mode 100644 index 0000000..51fc3ad --- /dev/null +++ b/migration3/before/identity/identity_actions.go @@ -0,0 +1,132 @@ +package identity + +import ( + "fmt" + "strings" + + "github.com/pkg/errors" + + "github.com/MichaelMure/git-bug-migration/migration3/before/entity" + "github.com/MichaelMure/git-bug-migration/migration3/before/repository" +) + +// Fetch retrieve updates from a remote +// This does not change the local identities state +func Fetch(repo repository.Repo, remote string) (string, error) { + // "refs/identities/*:refs/remotes//identities/*" + remoteRefSpec := fmt.Sprintf(identityRemoteRefPattern, remote) + fetchRefSpec := fmt.Sprintf("%s*:%s*", identityRefPattern, remoteRefSpec) + + return repo.FetchRefs(remote, fetchRefSpec) +} + +// Push update a remote with the local changes +func Push(repo repository.Repo, remote string) (string, error) { + // "refs/identities/*:refs/identities/*" + refspec := fmt.Sprintf("%s*:%s*", identityRefPattern, identityRefPattern) + + return repo.PushRefs(remote, refspec) +} + +// Pull will do a Fetch + MergeAll +// This function will return an error if a merge fail +func Pull(repo repository.ClockedRepo, remote string) error { + _, err := Fetch(repo, remote) + if err != nil { + return err + } + + for merge := range MergeAll(repo, remote) { + if merge.Err != nil { + return merge.Err + } + if merge.Status == entity.MergeStatusInvalid { + return errors.Errorf("merge failure: %s", merge.Reason) + } + } + + return nil +} + +// MergeAll will merge all the available remote identity +func MergeAll(repo repository.ClockedRepo, remote string) <-chan entity.MergeResult { + out := make(chan entity.MergeResult) + + go func() { + defer close(out) + + remoteRefSpec := fmt.Sprintf(identityRemoteRefPattern, remote) + remoteRefs, err := repo.ListRefs(remoteRefSpec) + + if err != nil { + out <- entity.MergeResult{Err: err} + return + } + + for _, remoteRef := range remoteRefs { + refSplit := strings.Split(remoteRef, "/") + id := entity.Id(refSplit[len(refSplit)-1]) + + if err := id.Validate(); err != nil { + out <- entity.NewMergeInvalidStatus(id, errors.Wrap(err, "invalid ref").Error()) + continue + } + + remoteIdentity, err := read(repo, remoteRef) + + if err != nil { + out <- entity.NewMergeInvalidStatus(id, errors.Wrap(err, "remote identity is not readable").Error()) + continue + } + + // Check for error in remote data + if err := remoteIdentity.Validate(); err != nil { + out <- entity.NewMergeInvalidStatus(id, errors.Wrap(err, "remote identity is invalid").Error()) + continue + } + + localRef := identityRefPattern + remoteIdentity.Id().String() + localExist, err := repo.RefExist(localRef) + + if err != nil { + out <- entity.NewMergeError(err, id) + continue + } + + // the identity is not local yet, simply create the reference + if !localExist { + err := repo.CopyRef(remoteRef, localRef) + + if err != nil { + out <- entity.NewMergeError(err, id) + return + } + + out <- entity.NewMergeStatus(entity.MergeStatusNew, id, remoteIdentity) + continue + } + + localIdentity, err := read(repo, localRef) + + if err != nil { + out <- entity.NewMergeError(errors.Wrap(err, "local identity is not readable"), id) + return + } + + updated, err := localIdentity.Merge(repo, remoteIdentity) + + if err != nil { + out <- entity.NewMergeInvalidStatus(id, errors.Wrap(err, "merge failed").Error()) + return + } + + if updated { + out <- entity.NewMergeStatus(entity.MergeStatusUpdated, id, localIdentity) + } else { + out <- entity.NewMergeStatus(entity.MergeStatusNothing, id, localIdentity) + } + } + }() + + return out +} diff --git a/migration3/before/identity/identity_actions_test.go b/migration3/before/identity/identity_actions_test.go new file mode 100644 index 0000000..c17e248 --- /dev/null +++ b/migration3/before/identity/identity_actions_test.go @@ -0,0 +1,152 @@ +package identity + +import ( + "testing" + + "github.com/stretchr/testify/require" + + "github.com/MichaelMure/git-bug-migration/migration3/before/repository" +) + +func TestPushPull(t *testing.T) { + repoA, repoB, remote := repository.SetupReposAndRemote() + defer repository.CleanupTestRepos(repoA, repoB, remote) + + identity1 := NewIdentity("name1", "email1") + err := identity1.Commit(repoA) + require.NoError(t, err) + + // A --> remote --> B + _, err = Push(repoA, "origin") + require.NoError(t, err) + + err = Pull(repoB, "origin") + require.NoError(t, err) + + identities := allIdentities(t, ReadAllLocal(repoB)) + + if len(identities) != 1 { + t.Fatal("Unexpected number of bugs") + } + + // B --> remote --> A + identity2 := NewIdentity("name2", "email2") + err = identity2.Commit(repoB) + require.NoError(t, err) + + _, err = Push(repoB, "origin") + require.NoError(t, err) + + err = Pull(repoA, "origin") + require.NoError(t, err) + + identities = allIdentities(t, ReadAllLocal(repoA)) + + if len(identities) != 2 { + t.Fatal("Unexpected number of bugs") + } + + // Update both + + identity1.addVersionForTest(&Version{ + name: "name1b", + email: "email1b", + }) + err = identity1.Commit(repoA) + require.NoError(t, err) + + identity2.addVersionForTest(&Version{ + name: "name2b", + email: "email2b", + }) + err = identity2.Commit(repoB) + require.NoError(t, err) + + // A --> remote --> B + + _, err = Push(repoA, "origin") + require.NoError(t, err) + + err = Pull(repoB, "origin") + require.NoError(t, err) + + identities = allIdentities(t, ReadAllLocal(repoB)) + + if len(identities) != 2 { + t.Fatal("Unexpected number of bugs") + } + + // B --> remote --> A + + _, err = Push(repoB, "origin") + require.NoError(t, err) + + err = Pull(repoA, "origin") + require.NoError(t, err) + + identities = allIdentities(t, ReadAllLocal(repoA)) + + if len(identities) != 2 { + t.Fatal("Unexpected number of bugs") + } + + // Concurrent update + + identity1.addVersionForTest(&Version{ + name: "name1c", + email: "email1c", + }) + err = identity1.Commit(repoA) + require.NoError(t, err) + + identity1B, err := ReadLocal(repoB, identity1.Id()) + require.NoError(t, err) + + identity1B.addVersionForTest(&Version{ + name: "name1concurrent", + email: "email1concurrent", + }) + err = identity1B.Commit(repoB) + require.NoError(t, err) + + // A --> remote --> B + + _, err = Push(repoA, "origin") + require.NoError(t, err) + + // Pulling a non-fast-forward update should fail + err = Pull(repoB, "origin") + require.Error(t, err) + + identities = allIdentities(t, ReadAllLocal(repoB)) + + if len(identities) != 2 { + t.Fatal("Unexpected number of bugs") + } + + // B --> remote --> A + + // Pushing a non-fast-forward update should fail + _, err = Push(repoB, "origin") + require.Error(t, err) + + err = Pull(repoA, "origin") + require.NoError(t, err) + + identities = allIdentities(t, ReadAllLocal(repoA)) + + if len(identities) != 2 { + t.Fatal("Unexpected number of bugs") + } +} + +func allIdentities(t testing.TB, identities <-chan StreamedIdentity) []*Identity { + var result []*Identity + for streamed := range identities { + if streamed.Err != nil { + t.Fatal(streamed.Err) + } + result = append(result, streamed.Identity) + } + return result +} diff --git a/migration3/before/identity/identity_stub.go b/migration3/before/identity/identity_stub.go new file mode 100644 index 0000000..4e46183 --- /dev/null +++ b/migration3/before/identity/identity_stub.go @@ -0,0 +1,105 @@ +package identity + +import ( + "encoding/json" + + "github.com/MichaelMure/git-bug-migration/migration3/before/entity" + "github.com/MichaelMure/git-bug-migration/migration3/before/repository" + "github.com/MichaelMure/git-bug-migration/migration3/before/util/lamport" + "github.com/MichaelMure/git-bug-migration/migration3/before/util/timestamp" +) + +var _ Interface = &IdentityStub{} + +// IdentityStub is an almost empty Identity, holding only the id. +// When a normal Identity is serialized into JSON, only the id is serialized. +// All the other data are stored in git in a chain of commit + a ref. +// When this JSON is deserialized, an IdentityStub is returned instead, to be replaced +// later by the proper Identity, loaded from the Repo. +type IdentityStub struct { + id entity.Id +} + +func (i *IdentityStub) MarshalJSON() ([]byte, error) { + // TODO: add a type marker + return json.Marshal(struct { + Id entity.Id `json:"id"` + }{ + Id: i.id, + }) +} + +func (i *IdentityStub) UnmarshalJSON(data []byte) error { + aux := struct { + Id entity.Id `json:"id"` + }{} + + if err := json.Unmarshal(data, &aux); err != nil { + return err + } + + i.id = aux.Id + + return nil +} + +// Id return the Identity identifier +func (i *IdentityStub) Id() entity.Id { + return i.id +} + +func (IdentityStub) Name() string { + panic("identities needs to be properly loaded with identity.ReadLocal()") +} + +func (IdentityStub) Email() string { + panic("identities needs to be properly loaded with identity.ReadLocal()") +} + +func (IdentityStub) Login() string { + panic("identities needs to be properly loaded with identity.ReadLocal()") +} + +func (IdentityStub) AvatarUrl() string { + panic("identities needs to be properly loaded with identity.ReadLocal()") +} + +func (IdentityStub) Keys() []*Key { + panic("identities needs to be properly loaded with identity.ReadLocal()") +} + +func (IdentityStub) ValidKeysAtTime(_ lamport.Time) []*Key { + panic("identities needs to be properly loaded with identity.ReadLocal()") +} + +func (IdentityStub) DisplayName() string { + panic("identities needs to be properly loaded with identity.ReadLocal()") +} + +func (IdentityStub) Validate() error { + panic("identities needs to be properly loaded with identity.ReadLocal()") +} + +func (IdentityStub) CommitWithRepo(repo repository.ClockedRepo) error { + panic("identities needs to be properly loaded with identity.ReadLocal()") +} + +func (i *IdentityStub) CommitAsNeededWithRepo(repo repository.ClockedRepo) error { + panic("identities needs to be properly loaded with identity.ReadLocal()") +} + +func (IdentityStub) IsProtected() bool { + panic("identities needs to be properly loaded with identity.ReadLocal()") +} + +func (i *IdentityStub) LastModificationLamport() lamport.Time { + panic("identities needs to be properly loaded with identity.ReadLocal()") +} + +func (i *IdentityStub) LastModification() timestamp.Timestamp { + panic("identities needs to be properly loaded with identity.ReadLocal()") +} + +func (i *IdentityStub) NeedCommit() bool { + return false +} diff --git a/migration3/before/identity/identity_stub_test.go b/migration3/before/identity/identity_stub_test.go new file mode 100644 index 0000000..b01a718 --- /dev/null +++ b/migration3/before/identity/identity_stub_test.go @@ -0,0 +1,26 @@ +package identity + +import ( + "encoding/json" + "testing" + + "github.com/stretchr/testify/assert" +) + +func TestIdentityStubSerialize(t *testing.T) { + before := &IdentityStub{ + id: "id1234", + } + + data, err := json.Marshal(before) + assert.NoError(t, err) + + var after IdentityStub + err = json.Unmarshal(data, &after) + assert.NoError(t, err) + + // enforce creating the Id + before.Id() + + assert.Equal(t, before, &after) +} diff --git a/migration3/before/identity/identity_test.go b/migration3/before/identity/identity_test.go new file mode 100644 index 0000000..ff41862 --- /dev/null +++ b/migration3/before/identity/identity_test.go @@ -0,0 +1,316 @@ +package identity + +import ( + "encoding/json" + "path/filepath" + "testing" + + "github.com/stretchr/testify/assert" + "github.com/stretchr/testify/require" + + "github.com/MichaelMure/git-bug-migration/migration3/before/entity" + "github.com/MichaelMure/git-bug-migration/migration3/before/repository" +) + +// Test the commit and load of an Identity with multiple versions +func TestIdentityCommitLoad(t *testing.T) { + mockRepo := repository.NewMockRepoForTest() + + // single version + + identity := &Identity{ + id: entity.UnsetId, + versions: []*Version{ + { + name: "René Descartes", + email: "rene.descartes@example.com", + }, + }, + } + + err := identity.Commit(mockRepo) + + assert.Nil(t, err) + assert.NotEmpty(t, identity.id) + + loaded, err := ReadLocal(mockRepo, identity.id) + assert.Nil(t, err) + commitsAreSet(t, loaded) + assert.Equal(t, identity, loaded) + + // multiple version + + identity = &Identity{ + id: entity.UnsetId, + versions: []*Version{ + { + time: 100, + name: "René Descartes", + email: "rene.descartes@example.com", + keys: []*Key{ + {PubKey: "pubkeyA"}, + }, + }, + { + time: 200, + name: "René Descartes", + email: "rene.descartes@example.com", + keys: []*Key{ + {PubKey: "pubkeyB"}, + }, + }, + { + time: 201, + name: "René Descartes", + email: "rene.descartes@example.com", + keys: []*Key{ + {PubKey: "pubkeyC"}, + }, + }, + }, + } + + err = identity.Commit(mockRepo) + + assert.Nil(t, err) + assert.NotEmpty(t, identity.id) + + loaded, err = ReadLocal(mockRepo, identity.id) + assert.Nil(t, err) + commitsAreSet(t, loaded) + assert.Equal(t, identity, loaded) + + // add more version + + identity.addVersionForTest(&Version{ + time: 201, + name: "René Descartes", + email: "rene.descartes@example.com", + keys: []*Key{ + {PubKey: "pubkeyD"}, + }, + }) + + identity.addVersionForTest(&Version{ + time: 300, + name: "René Descartes", + email: "rene.descartes@example.com", + keys: []*Key{ + {PubKey: "pubkeyE"}, + }, + }) + + err = identity.Commit(mockRepo) + + assert.Nil(t, err) + assert.NotEmpty(t, identity.id) + + loaded, err = ReadLocal(mockRepo, identity.id) + assert.Nil(t, err) + commitsAreSet(t, loaded) + assert.Equal(t, identity, loaded) +} + +func TestIdentityMutate(t *testing.T) { + identity := NewIdentity("René Descartes", "rene.descartes@example.com") + + assert.Len(t, identity.versions, 1) + + identity.Mutate(func(orig Mutator) Mutator { + orig.Email = "rene@descartes.fr" + orig.Name = "René" + orig.Login = "rene" + return orig + }) + + assert.Len(t, identity.versions, 2) + assert.Equal(t, identity.Email(), "rene@descartes.fr") + assert.Equal(t, identity.Name(), "René") + assert.Equal(t, identity.Login(), "rene") +} + +func commitsAreSet(t *testing.T, identity *Identity) { + for _, version := range identity.versions { + assert.NotEmpty(t, version.commitHash) + } +} + +// Test that the correct crypto keys are returned for a given lamport time +func TestIdentity_ValidKeysAtTime(t *testing.T) { + identity := Identity{ + id: entity.UnsetId, + versions: []*Version{ + { + time: 100, + name: "René Descartes", + email: "rene.descartes@example.com", + keys: []*Key{ + {PubKey: "pubkeyA"}, + }, + }, + { + time: 200, + name: "René Descartes", + email: "rene.descartes@example.com", + keys: []*Key{ + {PubKey: "pubkeyB"}, + }, + }, + { + time: 201, + name: "René Descartes", + email: "rene.descartes@example.com", + keys: []*Key{ + {PubKey: "pubkeyC"}, + }, + }, + { + time: 201, + name: "René Descartes", + email: "rene.descartes@example.com", + keys: []*Key{ + {PubKey: "pubkeyD"}, + }, + }, + { + time: 300, + name: "René Descartes", + email: "rene.descartes@example.com", + keys: []*Key{ + {PubKey: "pubkeyE"}, + }, + }, + }, + } + + assert.Nil(t, identity.ValidKeysAtTime(10)) + assert.Equal(t, identity.ValidKeysAtTime(100), []*Key{{PubKey: "pubkeyA"}}) + assert.Equal(t, identity.ValidKeysAtTime(140), []*Key{{PubKey: "pubkeyA"}}) + assert.Equal(t, identity.ValidKeysAtTime(200), []*Key{{PubKey: "pubkeyB"}}) + assert.Equal(t, identity.ValidKeysAtTime(201), []*Key{{PubKey: "pubkeyD"}}) + assert.Equal(t, identity.ValidKeysAtTime(202), []*Key{{PubKey: "pubkeyD"}}) + assert.Equal(t, identity.ValidKeysAtTime(300), []*Key{{PubKey: "pubkeyE"}}) + assert.Equal(t, identity.ValidKeysAtTime(3000), []*Key{{PubKey: "pubkeyE"}}) +} + +// Test the immutable or mutable metadata search +func TestMetadata(t *testing.T) { + mockRepo := repository.NewMockRepoForTest() + + identity := NewIdentity("René Descartes", "rene.descartes@example.com") + + identity.SetMetadata("key1", "value1") + assertHasKeyValue(t, identity.ImmutableMetadata(), "key1", "value1") + assertHasKeyValue(t, identity.MutableMetadata(), "key1", "value1") + + err := identity.Commit(mockRepo) + assert.NoError(t, err) + + assertHasKeyValue(t, identity.ImmutableMetadata(), "key1", "value1") + assertHasKeyValue(t, identity.MutableMetadata(), "key1", "value1") + + // try override + identity.addVersionForTest(&Version{ + name: "René Descartes", + email: "rene.descartes@example.com", + }) + + identity.SetMetadata("key1", "value2") + assertHasKeyValue(t, identity.ImmutableMetadata(), "key1", "value1") + assertHasKeyValue(t, identity.MutableMetadata(), "key1", "value2") + + err = identity.Commit(mockRepo) + assert.NoError(t, err) + + // reload + loaded, err := ReadLocal(mockRepo, identity.id) + assert.Nil(t, err) + + assertHasKeyValue(t, loaded.ImmutableMetadata(), "key1", "value1") + assertHasKeyValue(t, loaded.MutableMetadata(), "key1", "value2") +} + +func assertHasKeyValue(t *testing.T, metadata map[string]string, key, value string) { + val, ok := metadata[key] + assert.True(t, ok) + assert.Equal(t, val, value) +} + +func TestJSON(t *testing.T) { + mockRepo := repository.NewMockRepoForTest() + + identity := &Identity{ + id: entity.UnsetId, + versions: []*Version{ + { + name: "René Descartes", + email: "rene.descartes@example.com", + }, + }, + } + + // commit to make sure we have an Id + err := identity.Commit(mockRepo) + assert.Nil(t, err) + assert.NotEmpty(t, identity.id) + + // serialize + data, err := json.Marshal(identity) + assert.NoError(t, err) + + // deserialize, got a IdentityStub with the same id + var i Interface + i, err = UnmarshalJSON(data) + assert.NoError(t, err) + assert.Equal(t, identity.id, i.Id()) + + // make sure we can load the identity properly + i, err = ReadLocal(mockRepo, i.Id()) + assert.NoError(t, err) +} + +func TestIdentityRemove(t *testing.T) { + repo := repository.CreateGoGitTestRepo(false) + remoteA := repository.CreateGoGitTestRepo(true) + remoteB := repository.CreateGoGitTestRepo(true) + defer repository.CleanupTestRepos(repo, remoteA, remoteB) + + print(filepath.Join("file://", remoteA.GetPath())) + err := repo.AddRemote("remoteA", filepath.Join("file://", remoteA.GetPath())) + require.NoError(t, err) + + err = repo.AddRemote("remoteB", "file://"+remoteB.GetPath()) + require.NoError(t, err) + + // generate an identity for testing + rene := NewIdentity("René Descartes", "rene@descartes.fr") + err = rene.Commit(repo) + require.NoError(t, err) + + _, err = Push(repo, "remoteA") + require.NoError(t, err) + + _, err = Push(repo, "remoteB") + require.NoError(t, err) + + _, err = Fetch(repo, "remoteA") + require.NoError(t, err) + + _, err = Fetch(repo, "remoteB") + require.NoError(t, err) + + err = RemoveIdentity(repo, rene.Id()) + require.NoError(t, err) + + _, err = ReadLocal(repo, rene.Id()) + require.Error(t, ErrIdentityNotExist, err) + + _, err = ReadRemote(repo, "remoteA", string(rene.Id())) + require.Error(t, ErrIdentityNotExist, err) + + _, err = ReadRemote(repo, "remoteB", string(rene.Id())) + require.Error(t, ErrIdentityNotExist, err) + + ids := ReadAllLocal(repo) + require.Len(t, ids, 0) +} diff --git a/migration3/before/identity/identity_user.go b/migration3/before/identity/identity_user.go new file mode 100644 index 0000000..c0b277d --- /dev/null +++ b/migration3/before/identity/identity_user.go @@ -0,0 +1,68 @@ +package identity + +import ( + "fmt" + "os" + + "github.com/pkg/errors" + + "github.com/MichaelMure/git-bug-migration/migration3/before/entity" + "github.com/MichaelMure/git-bug-migration/migration3/before/repository" +) + +// SetUserIdentity store the user identity's id in the git config +func SetUserIdentity(repo repository.RepoConfig, identity *Identity) error { + return repo.LocalConfig().StoreString(identityConfigKey, identity.Id().String()) +} + +// GetUserIdentity read the current user identity, set with a git config entry +func GetUserIdentity(repo repository.Repo) (*Identity, error) { + id, err := GetUserIdentityId(repo) + if err != nil { + return nil, err + } + + i, err := ReadLocal(repo, id) + if err == ErrIdentityNotExist { + innerErr := repo.LocalConfig().RemoveAll(identityConfigKey) + if innerErr != nil { + _, _ = fmt.Fprintln(os.Stderr, errors.Wrap(innerErr, "can't clear user identity").Error()) + } + return nil, err + } + + return i, nil +} + +func GetUserIdentityId(repo repository.Repo) (entity.Id, error) { + val, err := repo.LocalConfig().ReadString(identityConfigKey) + if err == repository.ErrNoConfigEntry { + return entity.UnsetId, ErrNoIdentitySet + } + if err == repository.ErrMultipleConfigEntry { + return entity.UnsetId, ErrMultipleIdentitiesSet + } + if err != nil { + return entity.UnsetId, err + } + + var id = entity.Id(val) + + if err := id.Validate(); err != nil { + return entity.UnsetId, err + } + + return id, nil +} + +// IsUserIdentitySet say if the user has set his identity +func IsUserIdentitySet(repo repository.Repo) (bool, error) { + _, err := repo.LocalConfig().ReadString(identityConfigKey) + if err == repository.ErrNoConfigEntry { + return false, nil + } + if err != nil { + return false, err + } + return true, nil +} diff --git a/migration3/before/identity/interface.go b/migration3/before/identity/interface.go new file mode 100644 index 0000000..1b0ac0d --- /dev/null +++ b/migration3/before/identity/interface.go @@ -0,0 +1,58 @@ +package identity + +import ( + "github.com/MichaelMure/git-bug-migration/migration3/before/entity" + "github.com/MichaelMure/git-bug-migration/migration3/before/util/lamport" + "github.com/MichaelMure/git-bug-migration/migration3/before/util/timestamp" +) + +type Interface interface { + entity.Interface + + // Name return the last version of the name + // Can be empty. + Name() string + + // Email return the last version of the email + // Can be empty. + Email() string + + // Login return the last version of the login + // Can be empty. + // Warning: this login can be defined when importing from a bridge but should *not* be + // used to identify an identity as multiple bridge with different login can map to the same + // identity. Use the metadata system for that usage instead. + Login() string + + // AvatarUrl return the last version of the Avatar URL + // Can be empty. + AvatarUrl() string + + // Keys return the last version of the valid keys + // Can be empty. + Keys() []*Key + + // ValidKeysAtTime return the set of keys valid at a given lamport time + // Can be empty. + ValidKeysAtTime(time lamport.Time) []*Key + + // DisplayName return a non-empty string to display, representing the + // identity, based on the non-empty values. + DisplayName() string + + // Validate check if the Identity data is valid + Validate() error + + // IsProtected return true if the chain of git commits started to be signed. + // If that's the case, only signed commit with a valid key for this identity can be added. + IsProtected() bool + + // LastModificationLamportTime return the Lamport time at which the last version of the identity became valid. + LastModificationLamport() lamport.Time + + // LastModification return the timestamp at which the last version of the identity became valid. + LastModification() timestamp.Timestamp + + // Indicate that the in-memory state changed and need to be commit in the repository + NeedCommit() bool +} diff --git a/migration3/before/identity/key.go b/migration3/before/identity/key.go new file mode 100644 index 0000000..cc94839 --- /dev/null +++ b/migration3/before/identity/key.go @@ -0,0 +1,18 @@ +package identity + +type Key struct { + // The GPG fingerprint of the key + Fingerprint string `json:"fingerprint"` + PubKey string `json:"pub_key"` +} + +func (k *Key) Validate() error { + // Todo + + return nil +} + +func (k *Key) Clone() *Key { + clone := *k + return &clone +} diff --git a/migration3/before/identity/resolver.go b/migration3/before/identity/resolver.go new file mode 100644 index 0000000..61d09eb --- /dev/null +++ b/migration3/before/identity/resolver.go @@ -0,0 +1,36 @@ +package identity + +import ( + "github.com/MichaelMure/git-bug-migration/migration3/before/entity" + "github.com/MichaelMure/git-bug-migration/migration3/before/repository" +) + +// Resolver define the interface of an Identity resolver, able to load +// an identity from, for example, a repo or a cache. +type Resolver interface { + ResolveIdentity(id entity.Id) (Interface, error) +} + +// SimpleResolver is a Resolver loading Identities directly from a Repo +type SimpleResolver struct { + repo repository.Repo +} + +func NewSimpleResolver(repo repository.Repo) *SimpleResolver { + return &SimpleResolver{repo: repo} +} + +func (r *SimpleResolver) ResolveIdentity(id entity.Id) (Interface, error) { + return ReadLocal(r.repo, id) +} + +// StubResolver is a Resolver that doesn't load anything, only returning IdentityStub instances +type StubResolver struct{} + +func NewStubResolver() *StubResolver { + return &StubResolver{} +} + +func (s *StubResolver) ResolveIdentity(id entity.Id) (Interface, error) { + return &IdentityStub{id: id}, nil +} diff --git a/migration3/before/identity/version.go b/migration3/before/identity/version.go new file mode 100644 index 0000000..550999e --- /dev/null +++ b/migration3/before/identity/version.go @@ -0,0 +1,228 @@ +package identity + +import ( + "crypto/rand" + "encoding/json" + "fmt" + "strings" + + "github.com/pkg/errors" + + "github.com/MichaelMure/git-bug-migration/migration3/before/repository" + "github.com/MichaelMure/git-bug-migration/migration3/before/util/lamport" + "github.com/MichaelMure/git-bug-migration/migration3/before/util/text" +) + +// 1: original format +const formatVersion = 1 + +// Version is a complete set of information about an Identity at a point in time. +type Version struct { + // The lamport time at which this version become effective + // The reference time is the bug edition lamport clock + // It must be the first field in this struct due to https://github.com/golang/go/issues/599 + // + // TODO: BREAKING CHANGE - this need to actually be one edition lamport time **per entity** + // This is not a problem right now but will be when more entities are added (pull-request, config ...) + time lamport.Time + unixTime int64 + + name string + email string // as defined in git or from a bridge when importing the identity + login string // from a bridge when importing the identity + avatarURL string + + // The set of keys valid at that time, from this version onward, until they get removed + // in a new version. This allow to have multiple key for the same identity (e.g. one per + // device) as well as revoke key. + keys []*Key + + // This optional array is here to ensure a better randomness of the identity id to avoid collisions. + // It has no functional purpose and should be ignored. + // It is advised to fill this array if there is not enough entropy, e.g. if there is no keys. + nonce []byte + + // A set of arbitrary key/value to store metadata about a version or about an Identity in general. + metadata map[string]string + + // Not serialized + commitHash repository.Hash +} + +type VersionJSON struct { + // Additional field to version the data + FormatVersion uint `json:"version"` + + Time lamport.Time `json:"time"` + UnixTime int64 `json:"unix_time"` + Name string `json:"name,omitempty"` + Email string `json:"email,omitempty"` + Login string `json:"login,omitempty"` + AvatarUrl string `json:"avatar_url,omitempty"` + Keys []*Key `json:"pub_keys,omitempty"` + Nonce []byte `json:"nonce,omitempty"` + Metadata map[string]string `json:"metadata,omitempty"` +} + +// Make a deep copy +func (v *Version) Clone() *Version { + clone := &Version{ + name: v.name, + email: v.email, + avatarURL: v.avatarURL, + keys: make([]*Key, len(v.keys)), + } + + for i, key := range v.keys { + clone.keys[i] = key.Clone() + } + + return clone +} + +func (v *Version) MarshalJSON() ([]byte, error) { + return json.Marshal(VersionJSON{ + FormatVersion: formatVersion, + Time: v.time, + UnixTime: v.unixTime, + Name: v.name, + Email: v.email, + Login: v.login, + AvatarUrl: v.avatarURL, + Keys: v.keys, + Nonce: v.nonce, + Metadata: v.metadata, + }) +} + +func (v *Version) UnmarshalJSON(data []byte) error { + var aux VersionJSON + + if err := json.Unmarshal(data, &aux); err != nil { + return err + } + + if aux.FormatVersion != formatVersion { + return fmt.Errorf("unknown format version %v", aux.FormatVersion) + } + + v.time = aux.Time + v.unixTime = aux.UnixTime + v.name = aux.Name + v.email = aux.Email + v.login = aux.Login + v.avatarURL = aux.AvatarUrl + v.keys = aux.Keys + v.nonce = aux.Nonce + v.metadata = aux.Metadata + + return nil +} + +func (v *Version) Validate() error { + // time must be set after a commit + if v.commitHash != "" && v.unixTime == 0 { + return fmt.Errorf("unix time not set") + } + if v.commitHash != "" && v.time == 0 { + return fmt.Errorf("lamport time not set") + } + + if text.Empty(v.name) && text.Empty(v.login) { + return fmt.Errorf("either name or login should be set") + } + + if strings.Contains(v.name, "\n") { + return fmt.Errorf("name should be a single line") + } + + if !text.Safe(v.name) { + return fmt.Errorf("name is not fully printable") + } + + if strings.Contains(v.login, "\n") { + return fmt.Errorf("login should be a single line") + } + + if !text.Safe(v.login) { + return fmt.Errorf("login is not fully printable") + } + + if strings.Contains(v.email, "\n") { + return fmt.Errorf("email should be a single line") + } + + if !text.Safe(v.email) { + return fmt.Errorf("email is not fully printable") + } + + if v.avatarURL != "" && !text.ValidUrl(v.avatarURL) { + return fmt.Errorf("avatarUrl is not a valid URL") + } + + if len(v.nonce) > 64 { + return fmt.Errorf("nonce is too big") + } + + for _, k := range v.keys { + if err := k.Validate(); err != nil { + return errors.Wrap(err, "invalid key") + } + } + + return nil +} + +// Write will serialize and store the Version as a git blob and return +// its hash +func (v *Version) Write(repo repository.Repo) (repository.Hash, error) { + // make sure we don't write invalid data + err := v.Validate() + if err != nil { + return "", errors.Wrap(err, "validation error") + } + + data, err := json.Marshal(v) + + if err != nil { + return "", err + } + + hash, err := repo.StoreData(data) + + if err != nil { + return "", err + } + + return hash, nil +} + +func makeNonce(len int) []byte { + result := make([]byte, len) + _, err := rand.Read(result) + if err != nil { + panic(err) + } + return result +} + +// SetMetadata store arbitrary metadata about a version or an Identity in general +// If the Version has been commit to git already, it won't be overwritten. +func (v *Version) SetMetadata(key string, value string) { + if v.metadata == nil { + v.metadata = make(map[string]string) + } + + v.metadata[key] = value +} + +// GetMetadata retrieve arbitrary metadata about the Version +func (v *Version) GetMetadata(key string) (string, bool) { + val, ok := v.metadata[key] + return val, ok +} + +// AllMetadata return all metadata for this Version +func (v *Version) AllMetadata() map[string]string { + return v.metadata +} diff --git a/migration3/before/identity/version_test.go b/migration3/before/identity/version_test.go new file mode 100644 index 0000000..25848eb --- /dev/null +++ b/migration3/before/identity/version_test.go @@ -0,0 +1,41 @@ +package identity + +import ( + "encoding/json" + "testing" + + "github.com/stretchr/testify/assert" +) + +func TestVersionSerialize(t *testing.T) { + before := &Version{ + name: "name", + email: "email", + avatarURL: "avatarUrl", + keys: []*Key{ + { + Fingerprint: "fingerprint1", + PubKey: "pubkey1", + }, + { + Fingerprint: "fingerprint2", + PubKey: "pubkey2", + }, + }, + nonce: makeNonce(20), + metadata: map[string]string{ + "key1": "value1", + "key2": "value2", + }, + time: 3, + } + + data, err := json.Marshal(before) + assert.NoError(t, err) + + var after Version + err = json.Unmarshal(data, &after) + assert.NoError(t, err) + + assert.Equal(t, before, &after) +} diff --git a/migration3/before/repository/config.go b/migration3/before/repository/config.go new file mode 100644 index 0000000..4db8d4b --- /dev/null +++ b/migration3/before/repository/config.go @@ -0,0 +1,145 @@ +package repository + +import ( + "errors" + "strconv" + "time" +) + +var ( + ErrNoConfigEntry = errors.New("no config entry for the given key") + ErrMultipleConfigEntry = errors.New("multiple config entry for the given key") +) + +// Config represent the common function interacting with the repository config storage +type Config interface { + ConfigRead + ConfigWrite +} + +type ConfigRead interface { + // ReadAll reads all key/value pair matching the key prefix + ReadAll(keyPrefix string) (map[string]string, error) + + // ReadBool read a single boolean value from the config + // Return ErrNoConfigEntry or ErrMultipleConfigEntry if + // there is zero or more than one entry for this key + ReadBool(key string) (bool, error) + + // ReadBool read a single string value from the config + // Return ErrNoConfigEntry or ErrMultipleConfigEntry if + // there is zero or more than one entry for this key + ReadString(key string) (string, error) + + // ReadTimestamp read a single timestamp value from the config + // Return ErrNoConfigEntry or ErrMultipleConfigEntry if + // there is zero or more than one entry for this key + ReadTimestamp(key string) (time.Time, error) +} + +type ConfigWrite interface { + // Store writes a single key/value pair in the config + StoreString(key, value string) error + + // Store writes a key and timestamp value to the config + StoreTimestamp(key string, value time.Time) error + + // Store writes a key and boolean value to the config + StoreBool(key string, value bool) error + + // RemoveAll removes all key/value pair matching the key prefix + RemoveAll(keyPrefix string) error +} + +func ParseTimestamp(s string) (time.Time, error) { + timestamp, err := strconv.Atoi(s) + if err != nil { + return time.Time{}, err + } + + return time.Unix(int64(timestamp), 0), nil +} + +// mergeConfig is a helper to easily support RepoConfig.AnyConfig() +// from two separate local and global Config +func mergeConfig(local ConfigRead, global ConfigRead) *mergedConfig { + return &mergedConfig{ + local: local, + global: global, + } +} + +var _ ConfigRead = &mergedConfig{} + +type mergedConfig struct { + local ConfigRead + global ConfigRead +} + +func (m *mergedConfig) ReadAll(keyPrefix string) (map[string]string, error) { + values, err := m.global.ReadAll(keyPrefix) + if err != nil { + return nil, err + } + locals, err := m.local.ReadAll(keyPrefix) + if err != nil { + return nil, err + } + for k, val := range locals { + values[k] = val + } + return values, nil +} + +func (m *mergedConfig) ReadBool(key string) (bool, error) { + v, err := m.local.ReadBool(key) + if err == nil { + return v, nil + } + if err != ErrNoConfigEntry && err != ErrMultipleConfigEntry { + return false, err + } + return m.global.ReadBool(key) +} + +func (m *mergedConfig) ReadString(key string) (string, error) { + val, err := m.local.ReadString(key) + if err == nil { + return val, nil + } + if err != ErrNoConfigEntry && err != ErrMultipleConfigEntry { + return "", err + } + return m.global.ReadString(key) +} + +func (m *mergedConfig) ReadTimestamp(key string) (time.Time, error) { + val, err := m.local.ReadTimestamp(key) + if err == nil { + return val, nil + } + if err != ErrNoConfigEntry && err != ErrMultipleConfigEntry { + return time.Time{}, err + } + return m.global.ReadTimestamp(key) +} + +var _ ConfigWrite = &configPanicWriter{} + +type configPanicWriter struct{} + +func (c configPanicWriter) StoreString(key, value string) error { + panic("not implemented") +} + +func (c configPanicWriter) StoreTimestamp(key string, value time.Time) error { + panic("not implemented") +} + +func (c configPanicWriter) StoreBool(key string, value bool) error { + panic("not implemented") +} + +func (c configPanicWriter) RemoveAll(keyPrefix string) error { + panic("not implemented") +} diff --git a/migration3/before/repository/config_mem.go b/migration3/before/repository/config_mem.go new file mode 100644 index 0000000..9725e8d --- /dev/null +++ b/migration3/before/repository/config_mem.go @@ -0,0 +1,94 @@ +package repository + +import ( + "fmt" + "strconv" + "strings" + "time" +) + +var _ Config = &MemConfig{} + +type MemConfig struct { + config map[string]string +} + +func NewMemConfig() *MemConfig { + return &MemConfig{ + config: make(map[string]string), + } +} + +func (mc *MemConfig) StoreString(key, value string) error { + mc.config[key] = value + return nil +} + +func (mc *MemConfig) StoreBool(key string, value bool) error { + return mc.StoreString(key, strconv.FormatBool(value)) +} + +func (mc *MemConfig) StoreTimestamp(key string, value time.Time) error { + return mc.StoreString(key, strconv.Itoa(int(value.Unix()))) +} + +func (mc *MemConfig) ReadAll(keyPrefix string) (map[string]string, error) { + result := make(map[string]string) + for key, val := range mc.config { + if strings.HasPrefix(key, keyPrefix) { + result[key] = val + } + } + return result, nil +} + +func (mc *MemConfig) ReadString(key string) (string, error) { + // unlike git, the mock can only store one value for the same key + val, ok := mc.config[key] + if !ok { + return "", ErrNoConfigEntry + } + + return val, nil +} + +func (mc *MemConfig) ReadBool(key string) (bool, error) { + // unlike git, the mock can only store one value for the same key + val, ok := mc.config[key] + if !ok { + return false, ErrNoConfigEntry + } + + return strconv.ParseBool(val) +} + +func (mc *MemConfig) ReadTimestamp(key string) (time.Time, error) { + value, err := mc.ReadString(key) + if err != nil { + return time.Time{}, err + } + + timestamp, err := strconv.Atoi(value) + if err != nil { + return time.Time{}, err + } + + return time.Unix(int64(timestamp), 0), nil +} + +// RmConfigs remove all key/value pair matching the key prefix +func (mc *MemConfig) RemoveAll(keyPrefix string) error { + found := false + for key := range mc.config { + if strings.HasPrefix(key, keyPrefix) { + delete(mc.config, key) + found = true + } + } + + if !found { + return fmt.Errorf("section not found") + } + + return nil +} diff --git a/migration3/before/repository/config_mem_test.go b/migration3/before/repository/config_mem_test.go new file mode 100644 index 0000000..d9c3385 --- /dev/null +++ b/migration3/before/repository/config_mem_test.go @@ -0,0 +1,7 @@ +package repository + +import "testing" + +func TestNewMemConfig(t *testing.T) { + testConfig(t, NewMemConfig()) +} diff --git a/migration3/before/repository/config_test.go b/migration3/before/repository/config_test.go new file mode 100644 index 0000000..2a76354 --- /dev/null +++ b/migration3/before/repository/config_test.go @@ -0,0 +1,54 @@ +package repository + +import ( + "testing" + "time" + + "github.com/stretchr/testify/require" +) + +func TestMergedConfig(t *testing.T) { + local := NewMemConfig() + global := NewMemConfig() + merged := mergeConfig(local, global) + + require.NoError(t, global.StoreBool("bool", true)) + require.NoError(t, global.StoreString("string", "foo")) + require.NoError(t, global.StoreTimestamp("timestamp", time.Unix(1234, 0))) + + val1, err := merged.ReadBool("bool") + require.NoError(t, err) + require.Equal(t, val1, true) + + val2, err := merged.ReadString("string") + require.NoError(t, err) + require.Equal(t, val2, "foo") + + val3, err := merged.ReadTimestamp("timestamp") + require.NoError(t, err) + require.Equal(t, val3, time.Unix(1234, 0)) + + require.NoError(t, local.StoreBool("bool", false)) + require.NoError(t, local.StoreString("string", "bar")) + require.NoError(t, local.StoreTimestamp("timestamp", time.Unix(5678, 0))) + + val1, err = merged.ReadBool("bool") + require.NoError(t, err) + require.Equal(t, val1, false) + + val2, err = merged.ReadString("string") + require.NoError(t, err) + require.Equal(t, val2, "bar") + + val3, err = merged.ReadTimestamp("timestamp") + require.NoError(t, err) + require.Equal(t, val3, time.Unix(5678, 0)) + + all, err := merged.ReadAll("") + require.NoError(t, err) + require.Equal(t, all, map[string]string{ + "bool": "false", + "string": "bar", + "timestamp": "5678", + }) +} diff --git a/migration3/before/repository/config_testing.go b/migration3/before/repository/config_testing.go new file mode 100644 index 0000000..445f872 --- /dev/null +++ b/migration3/before/repository/config_testing.go @@ -0,0 +1,116 @@ +package repository + +import ( + "testing" + "time" + + "github.com/stretchr/testify/require" +) + +func testConfig(t *testing.T, config Config) { + // string + err := config.StoreString("section.key", "value") + require.NoError(t, err) + + val, err := config.ReadString("section.key") + require.NoError(t, err) + require.Equal(t, "value", val) + + // bool + err = config.StoreBool("section.true", true) + require.NoError(t, err) + + val2, err := config.ReadBool("section.true") + require.NoError(t, err) + require.Equal(t, true, val2) + + // timestamp + err = config.StoreTimestamp("section.time", time.Unix(1234, 0)) + require.NoError(t, err) + + val3, err := config.ReadTimestamp("section.time") + require.NoError(t, err) + require.Equal(t, time.Unix(1234, 0), val3) + + // ReadAll + configs, err := config.ReadAll("section") + require.NoError(t, err) + require.Equal(t, map[string]string{ + "section.key": "value", + "section.true": "true", + "section.time": "1234", + }, configs) + + // RemoveAll + err = config.RemoveAll("section.true") + require.NoError(t, err) + + configs, err = config.ReadAll("section") + require.NoError(t, err) + require.Equal(t, map[string]string{ + "section.key": "value", + "section.time": "1234", + }, configs) + + _, err = config.ReadBool("section.true") + require.Equal(t, ErrNoConfigEntry, err) + + err = config.RemoveAll("section.nonexistingkey") + require.Error(t, err) + + err = config.RemoveAll("section.key") + require.NoError(t, err) + + _, err = config.ReadString("section.key") + require.Equal(t, ErrNoConfigEntry, err) + + err = config.RemoveAll("nonexistingsection") + require.Error(t, err) + + err = config.RemoveAll("section.time") + require.NoError(t, err) + + err = config.RemoveAll("section") + require.Error(t, err) + + _, err = config.ReadString("section.key") + require.Error(t, err) + + err = config.RemoveAll("section.key") + require.Error(t, err) + + // section + subsections + require.NoError(t, config.StoreString("section.opt1", "foo")) + require.NoError(t, config.StoreString("section.opt2", "foo2")) + require.NoError(t, config.StoreString("section.subsection.opt1", "foo3")) + require.NoError(t, config.StoreString("section.subsection.opt2", "foo4")) + require.NoError(t, config.StoreString("section.subsection.subsection.opt1", "foo5")) + require.NoError(t, config.StoreString("section.subsection.subsection.opt2", "foo6")) + + all, err := config.ReadAll("section") + require.NoError(t, err) + require.Equal(t, map[string]string{ + "section.opt1": "foo", + "section.opt2": "foo2", + "section.subsection.opt1": "foo3", + "section.subsection.opt2": "foo4", + "section.subsection.subsection.opt1": "foo5", + "section.subsection.subsection.opt2": "foo6", + }, all) + + all, err = config.ReadAll("section.subsection") + require.NoError(t, err) + require.Equal(t, map[string]string{ + "section.subsection.opt1": "foo3", + "section.subsection.opt2": "foo4", + "section.subsection.subsection.opt1": "foo5", + "section.subsection.subsection.opt2": "foo6", + }, all) + + all, err = config.ReadAll("section.subsection.subsection") + require.NoError(t, err) + require.Equal(t, map[string]string{ + "section.subsection.subsection.opt1": "foo5", + "section.subsection.subsection.opt2": "foo6", + }, all) +} diff --git a/migration3/before/repository/git.go b/migration3/before/repository/git.go new file mode 100644 index 0000000..78503e1 --- /dev/null +++ b/migration3/before/repository/git.go @@ -0,0 +1,410 @@ +// Package repository contains helper methods for working with the Git repo. +package repository + +import ( + "bytes" + "fmt" + "path" + "strings" + "sync" + + "github.com/MichaelMure/git-bug-migration/migration3/before/util/lamport" +) + +const ( + clockPath = "git-bug" +) + +var _ ClockedRepo = &GitRepo{} +var _ TestedRepo = &GitRepo{} + +// GitRepo represents an instance of a (local) git repository. +type GitRepo struct { + gitCli + path string + + clocksMutex sync.Mutex + clocks map[string]lamport.Clock + + keyring Keyring +} + +// NewGitRepo determines if the given working directory is inside of a git repository, +// and returns the corresponding GitRepo instance if it is. +func NewGitRepo(path string, clockLoaders []ClockLoader) (*GitRepo, error) { + k, err := defaultKeyring() + if err != nil { + return nil, err + } + + repo := &GitRepo{ + gitCli: gitCli{path: path}, + path: path, + clocks: make(map[string]lamport.Clock), + keyring: k, + } + + // Check the repo and retrieve the root path + stdout, err := repo.runGitCommand("rev-parse", "--absolute-git-dir") + + // Now dir is fetched with "git rev-parse --git-dir". May be it can + // still return nothing in some cases. Then empty stdout check is + // kept. + if err != nil || stdout == "" { + return nil, ErrNotARepo + } + + // Fix the path to be sure we are at the root + repo.path = stdout + repo.gitCli.path = stdout + + for _, loader := range clockLoaders { + allExist := true + for _, name := range loader.Clocks { + if _, err := repo.getClock(name); err != nil { + allExist = false + } + } + + if !allExist { + err = loader.Witnesser(repo) + if err != nil { + return nil, err + } + } + } + + return repo, nil +} + +// InitGitRepo create a new empty git repo at the given path +func InitGitRepo(path string) (*GitRepo, error) { + repo := &GitRepo{ + gitCli: gitCli{path: path}, + path: path + "/.git", + clocks: make(map[string]lamport.Clock), + } + + _, err := repo.runGitCommand("init", path) + if err != nil { + return nil, err + } + + return repo, nil +} + +// InitBareGitRepo create a new --bare empty git repo at the given path +func InitBareGitRepo(path string) (*GitRepo, error) { + repo := &GitRepo{ + gitCli: gitCli{path: path}, + path: path, + clocks: make(map[string]lamport.Clock), + } + + _, err := repo.runGitCommand("init", "--bare", path) + if err != nil { + return nil, err + } + + return repo, nil +} + +// LocalConfig give access to the repository scoped configuration +func (repo *GitRepo) LocalConfig() Config { + return newGitConfig(repo.gitCli, false) +} + +// GlobalConfig give access to the global scoped configuration +func (repo *GitRepo) GlobalConfig() Config { + return newGitConfig(repo.gitCli, true) +} + +// AnyConfig give access to a merged local/global configuration +func (repo *GitRepo) AnyConfig() ConfigRead { + return mergeConfig(repo.LocalConfig(), repo.GlobalConfig()) +} + +// Keyring give access to a user-wide storage for secrets +func (repo *GitRepo) Keyring() Keyring { + return repo.keyring +} + +// GetPath returns the path to the repo. +func (repo *GitRepo) GetPath() string { + return repo.path +} + +// GetUserName returns the name the the user has used to configure git +func (repo *GitRepo) GetUserName() (string, error) { + return repo.runGitCommand("config", "user.name") +} + +// GetUserEmail returns the email address that the user has used to configure git. +func (repo *GitRepo) GetUserEmail() (string, error) { + return repo.runGitCommand("config", "user.email") +} + +// GetCoreEditor returns the name of the editor that the user has used to configure git. +func (repo *GitRepo) GetCoreEditor() (string, error) { + return repo.runGitCommand("var", "GIT_EDITOR") +} + +// GetRemotes returns the configured remotes repositories. +func (repo *GitRepo) GetRemotes() (map[string]string, error) { + stdout, err := repo.runGitCommand("remote", "--verbose") + if err != nil { + return nil, err + } + + lines := strings.Split(stdout, "\n") + remotes := make(map[string]string, len(lines)) + + for _, line := range lines { + if strings.TrimSpace(line) == "" { + continue + } + elements := strings.Fields(line) + if len(elements) != 3 { + return nil, fmt.Errorf("git remote: unexpected output format: %s", line) + } + + remotes[elements[0]] = elements[1] + } + + return remotes, nil +} + +// FetchRefs fetch git refs from a remote +func (repo *GitRepo) FetchRefs(remote, refSpec string) (string, error) { + stdout, err := repo.runGitCommand("fetch", remote, refSpec) + + if err != nil { + return stdout, fmt.Errorf("failed to fetch from the remote '%s': %v", remote, err) + } + + return stdout, err +} + +// PushRefs push git refs to a remote +func (repo *GitRepo) PushRefs(remote string, refSpec string) (string, error) { + stdout, stderr, err := repo.runGitCommandRaw(nil, "push", remote, refSpec) + + if err != nil { + return stdout + stderr, fmt.Errorf("failed to push to the remote '%s': %v", remote, stderr) + } + return stdout + stderr, nil +} + +// StoreData will store arbitrary data and return the corresponding hash +func (repo *GitRepo) StoreData(data []byte) (Hash, error) { + var stdin = bytes.NewReader(data) + + stdout, err := repo.runGitCommandWithStdin(stdin, "hash-object", "--stdin", "-w") + + return Hash(stdout), err +} + +// ReadData will attempt to read arbitrary data from the given hash +func (repo *GitRepo) ReadData(hash Hash) ([]byte, error) { + var stdout bytes.Buffer + var stderr bytes.Buffer + + err := repo.runGitCommandWithIO(nil, &stdout, &stderr, "cat-file", "-p", string(hash)) + + if err != nil { + return []byte{}, err + } + + return stdout.Bytes(), nil +} + +// StoreTree will store a mapping key-->Hash as a Git tree +func (repo *GitRepo) StoreTree(entries []TreeEntry) (Hash, error) { + buffer := prepareTreeEntries(entries) + + stdout, err := repo.runGitCommandWithStdin(&buffer, "mktree") + + if err != nil { + return "", err + } + + return Hash(stdout), nil +} + +// StoreCommit will store a Git commit with the given Git tree +func (repo *GitRepo) StoreCommit(treeHash Hash) (Hash, error) { + stdout, err := repo.runGitCommand("commit-tree", string(treeHash)) + + if err != nil { + return "", err + } + + return Hash(stdout), nil +} + +// StoreCommitWithParent will store a Git commit with the given Git tree +func (repo *GitRepo) StoreCommitWithParent(treeHash Hash, parent Hash) (Hash, error) { + stdout, err := repo.runGitCommand("commit-tree", string(treeHash), + "-p", string(parent)) + + if err != nil { + return "", err + } + + return Hash(stdout), nil +} + +// UpdateRef will create or update a Git reference +func (repo *GitRepo) UpdateRef(ref string, hash Hash) error { + _, err := repo.runGitCommand("update-ref", ref, string(hash)) + + return err +} + +// RemoveRef will remove a Git reference +func (repo *GitRepo) RemoveRef(ref string) error { + _, err := repo.runGitCommand("update-ref", "-d", ref) + + return err +} + +// ListRefs will return a list of Git ref matching the given refspec +func (repo *GitRepo) ListRefs(refPrefix string) ([]string, error) { + stdout, err := repo.runGitCommand("for-each-ref", "--format=%(refname)", refPrefix) + + if err != nil { + return nil, err + } + + split := strings.Split(stdout, "\n") + + if len(split) == 1 && split[0] == "" { + return []string{}, nil + } + + return split, nil +} + +// RefExist will check if a reference exist in Git +func (repo *GitRepo) RefExist(ref string) (bool, error) { + stdout, err := repo.runGitCommand("for-each-ref", ref) + + if err != nil { + return false, err + } + + return stdout != "", nil +} + +// CopyRef will create a new reference with the same value as another one +func (repo *GitRepo) CopyRef(source string, dest string) error { + _, err := repo.runGitCommand("update-ref", dest, source) + + return err +} + +// ListCommits will return the list of commit hashes of a ref, in chronological order +func (repo *GitRepo) ListCommits(ref string) ([]Hash, error) { + stdout, err := repo.runGitCommand("rev-list", "--first-parent", "--reverse", ref) + + if err != nil { + return nil, err + } + + split := strings.Split(stdout, "\n") + + casted := make([]Hash, len(split)) + for i, line := range split { + casted[i] = Hash(line) + } + + return casted, nil + +} + +// ReadTree will return the list of entries in a Git tree +func (repo *GitRepo) ReadTree(hash Hash) ([]TreeEntry, error) { + stdout, err := repo.runGitCommand("ls-tree", string(hash)) + + if err != nil { + return nil, err + } + + return readTreeEntries(stdout) +} + +// FindCommonAncestor will return the last common ancestor of two chain of commit +func (repo *GitRepo) FindCommonAncestor(hash1 Hash, hash2 Hash) (Hash, error) { + stdout, err := repo.runGitCommand("merge-base", string(hash1), string(hash2)) + + if err != nil { + return "", err + } + + return Hash(stdout), nil +} + +// GetTreeHash return the git tree hash referenced in a commit +func (repo *GitRepo) GetTreeHash(commit Hash) (Hash, error) { + stdout, err := repo.runGitCommand("rev-parse", string(commit)+"^{tree}") + + if err != nil { + return "", err + } + + return Hash(stdout), nil +} + +// GetOrCreateClock return a Lamport clock stored in the Repo. +// If the clock doesn't exist, it's created. +func (repo *GitRepo) GetOrCreateClock(name string) (lamport.Clock, error) { + c, err := repo.getClock(name) + if err == nil { + return c, nil + } + if err != ErrClockNotExist { + return nil, err + } + + repo.clocksMutex.Lock() + defer repo.clocksMutex.Unlock() + + p := path.Join(repo.path, clockPath, name+"-clock") + + c, err = lamport.NewPersistedClock(p) + if err != nil { + return nil, err + } + + repo.clocks[name] = c + return c, nil +} + +func (repo *GitRepo) getClock(name string) (lamport.Clock, error) { + repo.clocksMutex.Lock() + defer repo.clocksMutex.Unlock() + + if c, ok := repo.clocks[name]; ok { + return c, nil + } + + p := path.Join(repo.path, clockPath, name+"-clock") + + c, err := lamport.LoadPersistedClock(p) + if err == nil { + repo.clocks[name] = c + return c, nil + } + if err == lamport.ErrClockNotExist { + return nil, ErrClockNotExist + } + return nil, err +} + +// AddRemote add a new remote to the repository +// Not in the interface because it's only used for testing +func (repo *GitRepo) AddRemote(name string, url string) error { + _, err := repo.runGitCommand("remote", "add", name, url) + + return err +} diff --git a/migration3/before/repository/git_cli.go b/migration3/before/repository/git_cli.go new file mode 100644 index 0000000..085b1cd --- /dev/null +++ b/migration3/before/repository/git_cli.go @@ -0,0 +1,56 @@ +package repository + +import ( + "bytes" + "fmt" + "io" + "os/exec" + "strings" +) + +// gitCli is a helper to launch CLI git commands +type gitCli struct { + path string +} + +// Run the given git command with the given I/O reader/writers, returning an error if it fails. +func (cli gitCli) runGitCommandWithIO(stdin io.Reader, stdout, stderr io.Writer, args ...string) error { + // make sure that the working directory for the command + // always exist, in particular when running "git init". + path := strings.TrimSuffix(cli.path, ".git") + + // fmt.Printf("[%s] Running git %s\n", path, strings.Join(args, " ")) + + cmd := exec.Command("git", args...) + cmd.Dir = path + cmd.Stdin = stdin + cmd.Stdout = stdout + cmd.Stderr = stderr + + return cmd.Run() +} + +// Run the given git command and return its stdout, or an error if the command fails. +func (cli gitCli) runGitCommandRaw(stdin io.Reader, args ...string) (string, string, error) { + var stdout bytes.Buffer + var stderr bytes.Buffer + err := cli.runGitCommandWithIO(stdin, &stdout, &stderr, args...) + return strings.TrimSpace(stdout.String()), strings.TrimSpace(stderr.String()), err +} + +// Run the given git command and return its stdout, or an error if the command fails. +func (cli gitCli) runGitCommandWithStdin(stdin io.Reader, args ...string) (string, error) { + stdout, stderr, err := cli.runGitCommandRaw(stdin, args...) + if err != nil { + if stderr == "" { + stderr = "Error running git command: " + strings.Join(args, " ") + } + err = fmt.Errorf(stderr) + } + return stdout, err +} + +// Run the given git command and return its stdout, or an error if the command fails. +func (cli gitCli) runGitCommand(args ...string) (string, error) { + return cli.runGitCommandWithStdin(nil, args...) +} diff --git a/migration3/before/repository/git_config.go b/migration3/before/repository/git_config.go new file mode 100644 index 0000000..b46cc69 --- /dev/null +++ b/migration3/before/repository/git_config.go @@ -0,0 +1,221 @@ +package repository + +import ( + "fmt" + "regexp" + "strconv" + "strings" + "time" + + "github.com/blang/semver" + "github.com/pkg/errors" +) + +var _ Config = &gitConfig{} + +type gitConfig struct { + cli gitCli + localityFlag string +} + +func newGitConfig(cli gitCli, global bool) *gitConfig { + localityFlag := "--local" + if global { + localityFlag = "--global" + } + return &gitConfig{ + cli: cli, + localityFlag: localityFlag, + } +} + +// StoreString store a single key/value pair in the config of the repo +func (gc *gitConfig) StoreString(key string, value string) error { + _, err := gc.cli.runGitCommand("config", gc.localityFlag, "--replace-all", key, value) + return err +} + +func (gc *gitConfig) StoreBool(key string, value bool) error { + return gc.StoreString(key, strconv.FormatBool(value)) +} + +func (gc *gitConfig) StoreTimestamp(key string, value time.Time) error { + return gc.StoreString(key, strconv.Itoa(int(value.Unix()))) +} + +// ReadAll read all key/value pair matching the key prefix +func (gc *gitConfig) ReadAll(keyPrefix string) (map[string]string, error) { + stdout, err := gc.cli.runGitCommand("config", gc.localityFlag, "--includes", "--get-regexp", keyPrefix) + + // / \ + // / ! \ + // ------- + // + // There can be a legitimate error here, but I see no portable way to + // distinguish them from the git error that say "no matching value exist" + if err != nil { + return nil, nil + } + + lines := strings.Split(stdout, "\n") + + result := make(map[string]string, len(lines)) + + for _, line := range lines { + if strings.TrimSpace(line) == "" { + continue + } + + parts := strings.SplitN(line, " ", 2) + result[parts[0]] = parts[1] + } + + return result, nil +} + +func (gc *gitConfig) ReadString(key string) (string, error) { + stdout, err := gc.cli.runGitCommand("config", gc.localityFlag, "--includes", "--get-all", key) + + // / \ + // / ! \ + // ------- + // + // There can be a legitimate error here, but I see no portable way to + // distinguish them from the git error that say "no matching value exist" + if err != nil { + return "", ErrNoConfigEntry + } + + lines := strings.Split(stdout, "\n") + + if len(lines) == 0 { + return "", ErrNoConfigEntry + } + if len(lines) > 1 { + return "", ErrMultipleConfigEntry + } + + return lines[0], nil +} + +func (gc *gitConfig) ReadBool(key string) (bool, error) { + val, err := gc.ReadString(key) + if err != nil { + return false, err + } + + return strconv.ParseBool(val) +} + +func (gc *gitConfig) ReadTimestamp(key string) (time.Time, error) { + value, err := gc.ReadString(key) + if err != nil { + return time.Time{}, err + } + return ParseTimestamp(value) +} + +func (gc *gitConfig) rmSection(keyPrefix string) error { + _, err := gc.cli.runGitCommand("config", gc.localityFlag, "--remove-section", keyPrefix) + return err +} + +func (gc *gitConfig) unsetAll(keyPrefix string) error { + _, err := gc.cli.runGitCommand("config", gc.localityFlag, "--unset-all", keyPrefix) + return err +} + +// return keyPrefix section +// example: sectionFromKey(a.b.c.d) return a.b.c +func sectionFromKey(keyPrefix string) string { + s := strings.Split(keyPrefix, ".") + if len(s) == 1 { + return keyPrefix + } + + return strings.Join(s[:len(s)-1], ".") +} + +// rmConfigs with git version lesser than 2.18 +func (gc *gitConfig) rmConfigsGitVersionLT218(keyPrefix string) error { + // try to remove key/value pair by key + err := gc.unsetAll(keyPrefix) + if err != nil { + return gc.rmSection(keyPrefix) + } + + m, err := gc.ReadAll(sectionFromKey(keyPrefix)) + if err != nil { + return err + } + + // if section doesn't have any left key/value remove the section + if len(m) == 0 { + return gc.rmSection(sectionFromKey(keyPrefix)) + } + + return nil +} + +// RmConfigs remove all key/value pair matching the key prefix +func (gc *gitConfig) RemoveAll(keyPrefix string) error { + // starting from git 2.18.0 sections are automatically deleted when the last existing + // key/value is removed. Before 2.18.0 we should remove the section + // see https://github.com/git/git/blob/master/Documentation/RelNotes/2.18.0.txt#L379 + lt218, err := gc.gitVersionLT218() + if err != nil { + return errors.Wrap(err, "getting git version") + } + + if lt218 { + return gc.rmConfigsGitVersionLT218(keyPrefix) + } + + err = gc.unsetAll(keyPrefix) + if err != nil { + return gc.rmSection(keyPrefix) + } + + return nil +} + +func (gc *gitConfig) gitVersion() (*semver.Version, error) { + versionOut, err := gc.cli.runGitCommand("version") + if err != nil { + return nil, err + } + return parseGitVersion(versionOut) +} + +func parseGitVersion(versionOut string) (*semver.Version, error) { + // extract the version and truncate potential bad parts + // ex: 2.23.0.rc1 instead of 2.23.0-rc1 + r := regexp.MustCompile(`(\d+\.){1,2}\d+`) + + extracted := r.FindString(versionOut) + if extracted == "" { + return nil, fmt.Errorf("unreadable git version %s", versionOut) + } + + version, err := semver.Make(extracted) + if err != nil { + return nil, err + } + + return &version, nil +} + +func (gc *gitConfig) gitVersionLT218() (bool, error) { + version, err := gc.gitVersion() + if err != nil { + return false, err + } + + version218string := "2.18.0" + gitVersion218, err := semver.Make(version218string) + if err != nil { + return false, err + } + + return version.LT(gitVersion218), nil +} diff --git a/migration3/before/repository/git_test.go b/migration3/before/repository/git_test.go new file mode 100644 index 0000000..1b36fd4 --- /dev/null +++ b/migration3/before/repository/git_test.go @@ -0,0 +1,10 @@ +// Package repository contains helper methods for working with the Git repo. +package repository + +import ( + "testing" +) + +func TestGitRepo(t *testing.T) { + RepoTest(t, CreateTestRepo, CleanupTestRepos) +} diff --git a/migration3/before/repository/git_testing.go b/migration3/before/repository/git_testing.go new file mode 100644 index 0000000..874cc86 --- /dev/null +++ b/migration3/before/repository/git_testing.go @@ -0,0 +1,74 @@ +package repository + +import ( + "io/ioutil" + "log" + + "github.com/99designs/keyring" +) + +// This is intended for testing only + +func CreateTestRepo(bare bool) TestedRepo { + dir, err := ioutil.TempDir("", "") + if err != nil { + log.Fatal(err) + } + + var creator func(string) (*GitRepo, error) + + if bare { + creator = InitBareGitRepo + } else { + creator = InitGitRepo + } + + repo, err := creator(dir) + if err != nil { + log.Fatal(err) + } + + config := repo.LocalConfig() + if err := config.StoreString("user.name", "testuser"); err != nil { + log.Fatal("failed to set user.name for test repository: ", err) + } + if err := config.StoreString("user.email", "testuser@example.com"); err != nil { + log.Fatal("failed to set user.email for test repository: ", err) + } + + // make sure we use a mock keyring for testing to not interact with the global system + return &replaceKeyring{ + TestedRepo: repo, + keyring: keyring.NewArrayKeyring(nil), + } +} + +func SetupReposAndRemote() (repoA, repoB, remote TestedRepo) { + repoA = CreateGoGitTestRepo(false) + repoB = CreateGoGitTestRepo(false) + remote = CreateGoGitTestRepo(true) + + remoteAddr := "file://" + remote.GetPath() + + err := repoA.AddRemote("origin", remoteAddr) + if err != nil { + log.Fatal(err) + } + + err = repoB.AddRemote("origin", remoteAddr) + if err != nil { + log.Fatal(err) + } + + return repoA, repoB, remote +} + +// replaceKeyring allow to replace the Keyring of the underlying repo +type replaceKeyring struct { + TestedRepo + keyring Keyring +} + +func (rk replaceKeyring) Keyring() Keyring { + return rk.keyring +} diff --git a/migration3/before/repository/gogit.go b/migration3/before/repository/gogit.go new file mode 100644 index 0000000..dfd0848 --- /dev/null +++ b/migration3/before/repository/gogit.go @@ -0,0 +1,655 @@ +package repository + +import ( + "bytes" + "fmt" + "io/ioutil" + "os" + "os/exec" + stdpath "path" + "path/filepath" + "sort" + "strings" + "sync" + "time" + + gogit "github.com/go-git/go-git/v5" + "github.com/go-git/go-git/v5/config" + "github.com/go-git/go-git/v5/plumbing" + "github.com/go-git/go-git/v5/plumbing/filemode" + "github.com/go-git/go-git/v5/plumbing/object" + + "github.com/MichaelMure/git-bug-migration/migration3/before/util/lamport" +) + +var _ ClockedRepo = &GoGitRepo{} + +type GoGitRepo struct { + r *gogit.Repository + path string + + clocksMutex sync.Mutex + clocks map[string]lamport.Clock + + keyring Keyring +} + +func NewGoGitRepo(path string, clockLoaders []ClockLoader) (*GoGitRepo, error) { + path, err := detectGitPath(path) + if err != nil { + return nil, err + } + + r, err := gogit.PlainOpen(path) + if err != nil { + return nil, err + } + + k, err := defaultKeyring() + if err != nil { + return nil, err + } + + repo := &GoGitRepo{ + r: r, + path: path, + clocks: make(map[string]lamport.Clock), + keyring: k, + } + + for _, loader := range clockLoaders { + allExist := true + for _, name := range loader.Clocks { + if _, err := repo.getClock(name); err != nil { + allExist = false + } + } + + if !allExist { + err = loader.Witnesser(repo) + if err != nil { + return nil, err + } + } + } + + return repo, nil +} + +func detectGitPath(path string) (string, error) { + // normalize the path + path, err := filepath.Abs(path) + if err != nil { + return "", err + } + + for { + fi, err := os.Stat(stdpath.Join(path, ".git")) + if err == nil { + if !fi.IsDir() { + return "", fmt.Errorf(".git exist but is not a directory") + } + return stdpath.Join(path, ".git"), nil + } + if !os.IsNotExist(err) { + // unknown error + return "", err + } + + // detect bare repo + ok, err := isGitDir(path) + if err != nil { + return "", err + } + if ok { + return path, nil + } + + if parent := filepath.Dir(path); parent == path { + return "", fmt.Errorf(".git not found") + } else { + path = parent + } + } +} + +func isGitDir(path string) (bool, error) { + markers := []string{"HEAD", "objects", "refs"} + + for _, marker := range markers { + _, err := os.Stat(stdpath.Join(path, marker)) + if err == nil { + continue + } + if !os.IsNotExist(err) { + // unknown error + return false, err + } else { + return false, nil + } + } + + return true, nil +} + +// InitGoGitRepo create a new empty git repo at the given path +func InitGoGitRepo(path string) (*GoGitRepo, error) { + r, err := gogit.PlainInit(path, false) + if err != nil { + return nil, err + } + + k, err := defaultKeyring() + if err != nil { + return nil, err + } + + return &GoGitRepo{ + r: r, + path: path + "/.git", + clocks: make(map[string]lamport.Clock), + keyring: k, + }, nil +} + +// InitBareGoGitRepo create a new --bare empty git repo at the given path +func InitBareGoGitRepo(path string) (*GoGitRepo, error) { + r, err := gogit.PlainInit(path, true) + if err != nil { + return nil, err + } + + k, err := defaultKeyring() + if err != nil { + return nil, err + } + + return &GoGitRepo{ + r: r, + path: path, + clocks: make(map[string]lamport.Clock), + keyring: k, + }, nil +} + +// LocalConfig give access to the repository scoped configuration +func (repo *GoGitRepo) LocalConfig() Config { + return newGoGitLocalConfig(repo.r) +} + +// GlobalConfig give access to the global scoped configuration +func (repo *GoGitRepo) GlobalConfig() Config { + // TODO: replace that with go-git native implementation once it's supported + // see: https://github.com/go-git/go-git + // see: https://github.com/src-d/go-git/issues/760 + return newGoGitGlobalConfig(repo.r) +} + +// AnyConfig give access to a merged local/global configuration +func (repo *GoGitRepo) AnyConfig() ConfigRead { + return mergeConfig(repo.LocalConfig(), repo.GlobalConfig()) +} + +// Keyring give access to a user-wide storage for secrets +func (repo *GoGitRepo) Keyring() Keyring { + return repo.keyring +} + +// GetPath returns the path to the repo. +func (repo *GoGitRepo) GetPath() string { + return repo.path +} + +// GetUserName returns the name the the user has used to configure git +func (repo *GoGitRepo) GetUserName() (string, error) { + return repo.AnyConfig().ReadString("user.name") +} + +// GetUserEmail returns the email address that the user has used to configure git. +func (repo *GoGitRepo) GetUserEmail() (string, error) { + return repo.AnyConfig().ReadString("user.email") +} + +// GetCoreEditor returns the name of the editor that the user has used to configure git. +func (repo *GoGitRepo) GetCoreEditor() (string, error) { + // See https://git-scm.com/docs/git-var + // The order of preference is the $GIT_EDITOR environment variable, then core.editor configuration, then $VISUAL, then $EDITOR, and then the default chosen at compile time, which is usually vi. + + if val, ok := os.LookupEnv("GIT_EDITOR"); ok { + return val, nil + } + + val, err := repo.AnyConfig().ReadString("core.editor") + if err == nil && val != "" { + return val, nil + } + if err != nil && err != ErrNoConfigEntry { + return "", err + } + + if val, ok := os.LookupEnv("VISUAL"); ok { + return val, nil + } + + if val, ok := os.LookupEnv("EDITOR"); ok { + return val, nil + } + + priorities := []string{ + "editor", + "nano", + "vim", + "vi", + "emacs", + } + + for _, cmd := range priorities { + if _, err = exec.LookPath(cmd); err == nil { + return cmd, nil + } + + } + + return "ed", nil +} + +// GetRemotes returns the configured remotes repositories. +func (repo *GoGitRepo) GetRemotes() (map[string]string, error) { + cfg, err := repo.r.Config() + if err != nil { + return nil, err + } + + result := make(map[string]string, len(cfg.Remotes)) + for name, remote := range cfg.Remotes { + if len(remote.URLs) > 0 { + result[name] = remote.URLs[0] + } + } + + return result, nil +} + +// FetchRefs fetch git refs from a remote +func (repo *GoGitRepo) FetchRefs(remote string, refSpec string) (string, error) { + buf := bytes.NewBuffer(nil) + + err := repo.r.Fetch(&gogit.FetchOptions{ + RemoteName: remote, + RefSpecs: []config.RefSpec{config.RefSpec(refSpec)}, + Progress: buf, + }) + if err == gogit.NoErrAlreadyUpToDate { + return "already up-to-date", nil + } + if err != nil { + return "", err + } + + return buf.String(), nil +} + +// PushRefs push git refs to a remote +func (repo *GoGitRepo) PushRefs(remote string, refSpec string) (string, error) { + buf := bytes.NewBuffer(nil) + + err := repo.r.Push(&gogit.PushOptions{ + RemoteName: remote, + RefSpecs: []config.RefSpec{config.RefSpec(refSpec)}, + Progress: buf, + }) + if err == gogit.NoErrAlreadyUpToDate { + return "already up-to-date", nil + } + if err != nil { + return "", err + } + + return buf.String(), nil +} + +// StoreData will store arbitrary data and return the corresponding hash +func (repo *GoGitRepo) StoreData(data []byte) (Hash, error) { + obj := repo.r.Storer.NewEncodedObject() + obj.SetType(plumbing.BlobObject) + + w, err := obj.Writer() + if err != nil { + return "", err + } + + _, err = w.Write(data) + if err != nil { + return "", err + } + + h, err := repo.r.Storer.SetEncodedObject(obj) + if err != nil { + return "", err + } + + return Hash(h.String()), nil +} + +// ReadData will attempt to read arbitrary data from the given hash +func (repo *GoGitRepo) ReadData(hash Hash) ([]byte, error) { + obj, err := repo.r.BlobObject(plumbing.NewHash(hash.String())) + if err != nil { + return nil, err + } + + r, err := obj.Reader() + if err != nil { + return nil, err + } + + return ioutil.ReadAll(r) +} + +// StoreTree will store a mapping key-->Hash as a Git tree +func (repo *GoGitRepo) StoreTree(mapping []TreeEntry) (Hash, error) { + var tree object.Tree + + // TODO: can be removed once https://github.com/go-git/go-git/issues/193 is resolved + sorted := make([]TreeEntry, len(mapping)) + copy(sorted, mapping) + sort.Slice(sorted, func(i, j int) bool { + nameI := sorted[i].Name + if sorted[i].ObjectType == Tree { + nameI += "/" + } + nameJ := sorted[j].Name + if sorted[j].ObjectType == Tree { + nameJ += "/" + } + return nameI < nameJ + }) + + for _, entry := range sorted { + mode := filemode.Regular + if entry.ObjectType == Tree { + mode = filemode.Dir + } + + tree.Entries = append(tree.Entries, object.TreeEntry{ + Name: entry.Name, + Mode: mode, + Hash: plumbing.NewHash(entry.Hash.String()), + }) + } + + obj := repo.r.Storer.NewEncodedObject() + obj.SetType(plumbing.TreeObject) + err := tree.Encode(obj) + if err != nil { + return "", err + } + + hash, err := repo.r.Storer.SetEncodedObject(obj) + if err != nil { + return "", err + } + + return Hash(hash.String()), nil +} + +// ReadTree will return the list of entries in a Git tree +func (repo *GoGitRepo) ReadTree(hash Hash) ([]TreeEntry, error) { + h := plumbing.NewHash(hash.String()) + + // the given hash could be a tree or a commit + obj, err := repo.r.Storer.EncodedObject(plumbing.AnyObject, h) + if err != nil { + return nil, err + } + + var tree *object.Tree + switch obj.Type() { + case plumbing.TreeObject: + tree, err = object.DecodeTree(repo.r.Storer, obj) + case plumbing.CommitObject: + var commit *object.Commit + commit, err = object.DecodeCommit(repo.r.Storer, obj) + if err != nil { + return nil, err + } + tree, err = commit.Tree() + default: + return nil, fmt.Errorf("given hash is not a tree") + } + if err != nil { + return nil, err + } + + treeEntries := make([]TreeEntry, len(tree.Entries)) + for i, entry := range tree.Entries { + objType := Blob + if entry.Mode == filemode.Dir { + objType = Tree + } + + treeEntries[i] = TreeEntry{ + ObjectType: objType, + Hash: Hash(entry.Hash.String()), + Name: entry.Name, + } + } + + return treeEntries, nil +} + +// StoreCommit will store a Git commit with the given Git tree +func (repo *GoGitRepo) StoreCommit(treeHash Hash) (Hash, error) { + return repo.StoreCommitWithParent(treeHash, "") +} + +// StoreCommit will store a Git commit with the given Git tree +func (repo *GoGitRepo) StoreCommitWithParent(treeHash Hash, parent Hash) (Hash, error) { + cfg, err := repo.r.Config() + if err != nil { + return "", err + } + + commit := object.Commit{ + Author: object.Signature{ + Name: cfg.Author.Name, + Email: cfg.Author.Email, + When: time.Now(), + }, + Committer: object.Signature{ + Name: cfg.Committer.Name, + Email: cfg.Committer.Email, + When: time.Now(), + }, + Message: "", + TreeHash: plumbing.NewHash(treeHash.String()), + } + + if parent != "" { + commit.ParentHashes = []plumbing.Hash{plumbing.NewHash(parent.String())} + } + + obj := repo.r.Storer.NewEncodedObject() + obj.SetType(plumbing.CommitObject) + err = commit.Encode(obj) + if err != nil { + return "", err + } + + hash, err := repo.r.Storer.SetEncodedObject(obj) + if err != nil { + return "", err + } + + return Hash(hash.String()), nil +} + +// GetTreeHash return the git tree hash referenced in a commit +func (repo *GoGitRepo) GetTreeHash(commit Hash) (Hash, error) { + obj, err := repo.r.CommitObject(plumbing.NewHash(commit.String())) + if err != nil { + return "", err + } + + return Hash(obj.TreeHash.String()), nil +} + +// FindCommonAncestor will return the last common ancestor of two chain of commit +func (repo *GoGitRepo) FindCommonAncestor(commit1 Hash, commit2 Hash) (Hash, error) { + obj1, err := repo.r.CommitObject(plumbing.NewHash(commit1.String())) + if err != nil { + return "", err + } + obj2, err := repo.r.CommitObject(plumbing.NewHash(commit2.String())) + if err != nil { + return "", err + } + + commits, err := obj1.MergeBase(obj2) + if err != nil { + return "", err + } + + return Hash(commits[0].Hash.String()), nil +} + +// UpdateRef will create or update a Git reference +func (repo *GoGitRepo) UpdateRef(ref string, hash Hash) error { + return repo.r.Storer.SetReference(plumbing.NewHashReference(plumbing.ReferenceName(ref), plumbing.NewHash(hash.String()))) +} + +// RemoveRef will remove a Git reference +func (repo *GoGitRepo) RemoveRef(ref string) error { + return repo.r.Storer.RemoveReference(plumbing.ReferenceName(ref)) +} + +// ListRefs will return a list of Git ref matching the given refspec +func (repo *GoGitRepo) ListRefs(refPrefix string) ([]string, error) { + refIter, err := repo.r.References() + if err != nil { + return nil, err + } + + refs := make([]string, 0) + + err = refIter.ForEach(func(ref *plumbing.Reference) error { + if strings.HasPrefix(ref.Name().String(), refPrefix) { + refs = append(refs, ref.Name().String()) + } + return nil + }) + if err != nil { + return nil, err + } + + return refs, nil +} + +// RefExist will check if a reference exist in Git +func (repo *GoGitRepo) RefExist(ref string) (bool, error) { + _, err := repo.r.Reference(plumbing.ReferenceName(ref), false) + if err == nil { + return true, nil + } else if err == plumbing.ErrReferenceNotFound { + return false, nil + } + return false, err +} + +// CopyRef will create a new reference with the same value as another one +func (repo *GoGitRepo) CopyRef(source string, dest string) error { + r, err := repo.r.Reference(plumbing.ReferenceName(source), false) + if err != nil { + return err + } + return repo.r.Storer.SetReference(plumbing.NewHashReference(plumbing.ReferenceName(dest), r.Hash())) +} + +// ListCommits will return the list of tree hashes of a ref, in chronological order +func (repo *GoGitRepo) ListCommits(ref string) ([]Hash, error) { + r, err := repo.r.Reference(plumbing.ReferenceName(ref), false) + if err != nil { + return nil, err + } + + commit, err := repo.r.CommitObject(r.Hash()) + if err != nil { + return nil, err + } + hashes := []Hash{Hash(commit.Hash.String())} + + for { + commit, err = commit.Parent(0) + if err == object.ErrParentNotFound { + break + } + if err != nil { + return nil, err + } + + if commit.NumParents() > 1 { + return nil, fmt.Errorf("multiple parents") + } + + hashes = append([]Hash{Hash(commit.Hash.String())}, hashes...) + } + + return hashes, nil +} + +// GetOrCreateClock return a Lamport clock stored in the Repo. +// If the clock doesn't exist, it's created. +func (repo *GoGitRepo) GetOrCreateClock(name string) (lamport.Clock, error) { + c, err := repo.getClock(name) + if err == nil { + return c, nil + } + if err != ErrClockNotExist { + return nil, err + } + + repo.clocksMutex.Lock() + defer repo.clocksMutex.Unlock() + + p := stdpath.Join(repo.path, clockPath, name+"-clock") + + c, err = lamport.NewPersistedClock(p) + if err != nil { + return nil, err + } + + repo.clocks[name] = c + return c, nil +} + +func (repo *GoGitRepo) getClock(name string) (lamport.Clock, error) { + repo.clocksMutex.Lock() + defer repo.clocksMutex.Unlock() + + if c, ok := repo.clocks[name]; ok { + return c, nil + } + + p := stdpath.Join(repo.path, clockPath, name+"-clock") + + c, err := lamport.LoadPersistedClock(p) + if err == nil { + repo.clocks[name] = c + return c, nil + } + if err == lamport.ErrClockNotExist { + return nil, ErrClockNotExist + } + return nil, err +} + +// AddRemote add a new remote to the repository +// Not in the interface because it's only used for testing +func (repo *GoGitRepo) AddRemote(name string, url string) error { + _, err := repo.r.CreateRemote(&config.RemoteConfig{ + Name: name, + URLs: []string{url}, + }) + + return err +} diff --git a/migration3/before/repository/gogit_config.go b/migration3/before/repository/gogit_config.go new file mode 100644 index 0000000..2f9a4cc --- /dev/null +++ b/migration3/before/repository/gogit_config.go @@ -0,0 +1,236 @@ +package repository + +import ( + "fmt" + "strconv" + "strings" + "time" + + gogit "github.com/go-git/go-git/v5" + "github.com/go-git/go-git/v5/config" +) + +var _ Config = &goGitConfig{} + +type goGitConfig struct { + ConfigRead + ConfigWrite +} + +func newGoGitLocalConfig(repo *gogit.Repository) *goGitConfig { + return &goGitConfig{ + ConfigRead: &goGitConfigReader{getConfig: repo.Config}, + ConfigWrite: &goGitConfigWriter{repo: repo}, + } +} + +func newGoGitGlobalConfig(repo *gogit.Repository) *goGitConfig { + return &goGitConfig{ + ConfigRead: &goGitConfigReader{getConfig: func() (*config.Config, error) { + return config.LoadConfig(config.GlobalScope) + }}, + ConfigWrite: &configPanicWriter{}, + } +} + +var _ ConfigRead = &goGitConfigReader{} + +type goGitConfigReader struct { + getConfig func() (*config.Config, error) +} + +func (cr *goGitConfigReader) ReadAll(keyPrefix string) (map[string]string, error) { + cfg, err := cr.getConfig() + if err != nil { + return nil, err + } + + split := strings.Split(keyPrefix, ".") + result := make(map[string]string) + + switch { + case keyPrefix == "": + for _, section := range cfg.Raw.Sections { + for _, option := range section.Options { + result[fmt.Sprintf("%s.%s", section.Name, option.Key)] = option.Value + } + for _, subsection := range section.Subsections { + for _, option := range subsection.Options { + result[fmt.Sprintf("%s.%s.%s", section.Name, subsection.Name, option.Key)] = option.Value + } + } + } + case len(split) == 1: + if !cfg.Raw.HasSection(split[0]) { + return nil, nil + } + section := cfg.Raw.Section(split[0]) + for _, option := range section.Options { + result[fmt.Sprintf("%s.%s", section.Name, option.Key)] = option.Value + } + for _, subsection := range section.Subsections { + for _, option := range subsection.Options { + result[fmt.Sprintf("%s.%s.%s", section.Name, subsection.Name, option.Key)] = option.Value + } + } + default: + if !cfg.Raw.HasSection(split[0]) { + return nil, nil + } + section := cfg.Raw.Section(split[0]) + rest := strings.Join(split[1:], ".") + rest = strings.TrimSuffix(rest, ".") + for _, subsection := range section.Subsections { + if strings.HasPrefix(subsection.Name, rest) { + for _, option := range subsection.Options { + result[fmt.Sprintf("%s.%s.%s", section.Name, subsection.Name, option.Key)] = option.Value + } + } + } + } + + return result, nil +} + +func (cr *goGitConfigReader) ReadBool(key string) (bool, error) { + val, err := cr.ReadString(key) + if err != nil { + return false, err + } + + return strconv.ParseBool(val) +} + +func (cr *goGitConfigReader) ReadString(key string) (string, error) { + cfg, err := cr.getConfig() + if err != nil { + return "", err + } + + split := strings.Split(key, ".") + + if len(split) <= 1 { + return "", fmt.Errorf("invalid key") + } + + sectionName := split[0] + if !cfg.Raw.HasSection(sectionName) { + return "", ErrNoConfigEntry + } + section := cfg.Raw.Section(sectionName) + + switch { + case len(split) == 2: + optionName := split[1] + if !section.HasOption(optionName) { + return "", ErrNoConfigEntry + } + if len(section.OptionAll(optionName)) > 1 { + return "", ErrMultipleConfigEntry + } + return section.Option(optionName), nil + default: + subsectionName := strings.Join(split[1:len(split)-2], ".") + optionName := split[len(split)-1] + if !section.HasSubsection(subsectionName) { + return "", ErrNoConfigEntry + } + subsection := section.Subsection(subsectionName) + if !subsection.HasOption(optionName) { + return "", ErrNoConfigEntry + } + if len(subsection.OptionAll(optionName)) > 1 { + return "", ErrMultipleConfigEntry + } + return subsection.Option(optionName), nil + } +} + +func (cr *goGitConfigReader) ReadTimestamp(key string) (time.Time, error) { + value, err := cr.ReadString(key) + if err != nil { + return time.Time{}, err + } + return ParseTimestamp(value) +} + +var _ ConfigWrite = &goGitConfigWriter{} + +// Only works for the local config as go-git only support that +type goGitConfigWriter struct { + repo *gogit.Repository +} + +func (cw *goGitConfigWriter) StoreString(key, value string) error { + cfg, err := cw.repo.Config() + if err != nil { + return err + } + + split := strings.Split(key, ".") + + switch { + case len(split) <= 1: + return fmt.Errorf("invalid key") + case len(split) == 2: + cfg.Raw.Section(split[0]).SetOption(split[1], value) + default: + section := split[0] + subsection := strings.Join(split[1:len(split)-1], ".") + option := split[len(split)-1] + cfg.Raw.Section(section).Subsection(subsection).SetOption(option, value) + } + + return cw.repo.SetConfig(cfg) +} + +func (cw *goGitConfigWriter) StoreTimestamp(key string, value time.Time) error { + return cw.StoreString(key, strconv.Itoa(int(value.Unix()))) +} + +func (cw *goGitConfigWriter) StoreBool(key string, value bool) error { + return cw.StoreString(key, strconv.FormatBool(value)) +} + +func (cw *goGitConfigWriter) RemoveAll(keyPrefix string) error { + cfg, err := cw.repo.Config() + if err != nil { + return err + } + + split := strings.Split(keyPrefix, ".") + + switch { + case keyPrefix == "": + cfg.Raw.Sections = nil + // warning: this does not actually remove everything as go-git config hold + // some entries in multiple places (cfg.User ...) + case len(split) == 1: + if cfg.Raw.HasSection(split[0]) { + cfg.Raw.RemoveSection(split[0]) + } else { + return fmt.Errorf("invalid key prefix") + } + default: + if !cfg.Raw.HasSection(split[0]) { + return fmt.Errorf("invalid key prefix") + } + section := cfg.Raw.Section(split[0]) + rest := strings.Join(split[1:], ".") + + ok := false + if section.HasSubsection(rest) { + section.RemoveSubsection(rest) + ok = true + } + if section.HasOption(rest) { + section.RemoveOption(rest) + ok = true + } + if !ok { + return fmt.Errorf("invalid key prefix") + } + } + + return cw.repo.SetConfig(cfg) +} diff --git a/migration3/before/repository/gogit_test.go b/migration3/before/repository/gogit_test.go new file mode 100644 index 0000000..fba990d --- /dev/null +++ b/migration3/before/repository/gogit_test.go @@ -0,0 +1,68 @@ +package repository + +import ( + "io/ioutil" + "os" + "path" + "path/filepath" + "testing" + + "github.com/stretchr/testify/assert" + "github.com/stretchr/testify/require" +) + +func TestNewGoGitRepo(t *testing.T) { + // Plain + plainRoot, err := ioutil.TempDir("", "") + require.NoError(t, err) + defer os.RemoveAll(plainRoot) + + _, err = InitGoGitRepo(plainRoot) + require.NoError(t, err) + plainGitDir := path.Join(plainRoot, ".git") + + // Bare + bareRoot, err := ioutil.TempDir("", "") + require.NoError(t, err) + defer os.RemoveAll(bareRoot) + + _, err = InitBareGoGitRepo(bareRoot) + require.NoError(t, err) + bareGitDir := bareRoot + + tests := []struct { + inPath string + outPath string + err bool + }{ + // errors + {"/", "", true}, + // parent dir of a repo + {filepath.Dir(plainRoot), "", true}, + + // Plain repo + {plainRoot, plainGitDir, false}, + {plainGitDir, plainGitDir, false}, + {path.Join(plainGitDir, "objects"), plainGitDir, false}, + + // Bare repo + {bareRoot, bareGitDir, false}, + {bareGitDir, bareGitDir, false}, + {path.Join(bareGitDir, "objects"), bareGitDir, false}, + } + + for i, tc := range tests { + r, err := NewGoGitRepo(tc.inPath, nil) + + if tc.err { + require.Error(t, err, i) + } else { + require.NoError(t, err, i) + assert.Equal(t, filepath.ToSlash(tc.outPath), filepath.ToSlash(r.GetPath()), i) + } + } +} + +func TestGoGitRepo(t *testing.T) { + RepoTest(t, CreateGoGitTestRepo, CleanupTestRepos) +} diff --git a/migration3/before/repository/gogit_testing.go b/migration3/before/repository/gogit_testing.go new file mode 100644 index 0000000..f20ff6b --- /dev/null +++ b/migration3/before/repository/gogit_testing.go @@ -0,0 +1,58 @@ +package repository + +import ( + "io/ioutil" + "log" +) + +// This is intended for testing only + +func CreateGoGitTestRepo(bare bool) TestedRepo { + dir, err := ioutil.TempDir("", "") + if err != nil { + log.Fatal(err) + } + + var creator func(string) (*GoGitRepo, error) + + if bare { + creator = InitBareGoGitRepo + } else { + creator = InitGoGitRepo + } + + repo, err := creator(dir) + if err != nil { + log.Fatal(err) + } + + config := repo.LocalConfig() + if err := config.StoreString("user.name", "testuser"); err != nil { + log.Fatal("failed to set user.name for test repository: ", err) + } + if err := config.StoreString("user.email", "testuser@example.com"); err != nil { + log.Fatal("failed to set user.email for test repository: ", err) + } + + return repo +} + +func SetupGoGitReposAndRemote() (repoA, repoB, remote TestedRepo) { + repoA = CreateGoGitTestRepo(false) + repoB = CreateGoGitTestRepo(false) + remote = CreateGoGitTestRepo(true) + + remoteAddr := "file://" + remote.GetPath() + + err := repoA.AddRemote("origin", remoteAddr) + if err != nil { + log.Fatal(err) + } + + err = repoB.AddRemote("origin", remoteAddr) + if err != nil { + log.Fatal(err) + } + + return repoA, repoB, remote +} diff --git a/migration3/before/repository/hash.go b/migration3/before/repository/hash.go new file mode 100644 index 0000000..6a11558 --- /dev/null +++ b/migration3/before/repository/hash.go @@ -0,0 +1,51 @@ +package repository + +import ( + "fmt" + "io" +) + +const idLengthSHA1 = 40 +const idLengthSHA256 = 64 + +// Hash is a git hash +type Hash string + +func (h Hash) String() string { + return string(h) +} + +// UnmarshalGQL implement the Unmarshaler interface for gqlgen +func (h *Hash) UnmarshalGQL(v interface{}) error { + _, ok := v.(string) + if !ok { + return fmt.Errorf("hashes must be strings") + } + + *h = v.(Hash) + + if !h.IsValid() { + return fmt.Errorf("invalid hash") + } + + return nil +} + +// MarshalGQL implement the Marshaler interface for gqlgen +func (h Hash) MarshalGQL(w io.Writer) { + _, _ = w.Write([]byte(`"` + h.String() + `"`)) +} + +// IsValid tell if the hash is valid +func (h *Hash) IsValid() bool { + // Support for both sha1 and sha256 git hashes + if len(*h) != idLengthSHA1 && len(*h) != idLengthSHA256 { + return false + } + for _, r := range *h { + if (r < 'a' || r > 'z') && (r < '0' || r > '9') { + return false + } + } + return true +} diff --git a/migration3/before/repository/keyring.go b/migration3/before/repository/keyring.go new file mode 100644 index 0000000..f690b0b --- /dev/null +++ b/migration3/before/repository/keyring.go @@ -0,0 +1,50 @@ +package repository + +import ( + "os" + "path" + + "github.com/99designs/keyring" +) + +type Item = keyring.Item + +var ErrKeyringKeyNotFound = keyring.ErrKeyNotFound + +// Keyring provides the uniform interface over the underlying backends +type Keyring interface { + // Returns an Item matching the key or ErrKeyringKeyNotFound + Get(key string) (Item, error) + // Stores an Item on the keyring + Set(item Item) error + // Removes the item with matching key + Remove(key string) error + // Provides a slice of all keys stored on the keyring + Keys() ([]string, error) +} + +func defaultKeyring() (Keyring, error) { + ucd, err := os.UserConfigDir() + if err != nil { + return nil, err + } + + return keyring.Open(keyring.Config{ + // only use the file backend until https://github.com/99designs/keyring/issues/74 is resolved + AllowedBackends: []keyring.BackendType{ + keyring.FileBackend, + }, + + ServiceName: "git-bug", + + // Fallback encrypted file + FileDir: path.Join(ucd, "git-bug", "keyring"), + // As we write the file in the user's config directory, this file should already be protected by the OS against + // other user's access. We actually don't terribly need to protect it further and a password prompt across all + // UI's would be a pain. Therefore we use here a constant password so the file will be unreadable by generic file + // scanners if the user's machine get compromised. + FilePasswordFunc: func(string) (string, error) { + return "git-bug", nil + }, + }) +} diff --git a/migration3/before/repository/mock_repo.go b/migration3/before/repository/mock_repo.go new file mode 100644 index 0000000..4c6e9cc --- /dev/null +++ b/migration3/before/repository/mock_repo.go @@ -0,0 +1,335 @@ +package repository + +import ( + "crypto/sha1" + "fmt" + "strings" + + "github.com/99designs/keyring" + + "github.com/MichaelMure/git-bug-migration/migration3/before/util/lamport" +) + +var _ ClockedRepo = &mockRepoForTest{} +var _ TestedRepo = &mockRepoForTest{} + +// mockRepoForTest defines an instance of Repo that can be used for testing. +type mockRepoForTest struct { + *mockRepoConfig + *mockRepoKeyring + *mockRepoCommon + *mockRepoData + *mockRepoClock +} + +func NewMockRepoForTest() *mockRepoForTest { + return &mockRepoForTest{ + mockRepoConfig: NewMockRepoConfig(), + mockRepoKeyring: NewMockRepoKeyring(), + mockRepoCommon: NewMockRepoCommon(), + mockRepoData: NewMockRepoData(), + mockRepoClock: NewMockRepoClock(), + } +} + +var _ RepoConfig = &mockRepoConfig{} + +type mockRepoConfig struct { + localConfig *MemConfig + globalConfig *MemConfig +} + +func NewMockRepoConfig() *mockRepoConfig { + return &mockRepoConfig{ + localConfig: NewMemConfig(), + globalConfig: NewMemConfig(), + } +} + +// LocalConfig give access to the repository scoped configuration +func (r *mockRepoConfig) LocalConfig() Config { + return r.localConfig +} + +// GlobalConfig give access to the git global configuration +func (r *mockRepoConfig) GlobalConfig() Config { + return r.globalConfig +} + +// AnyConfig give access to a merged local/global configuration +func (r *mockRepoConfig) AnyConfig() ConfigRead { + return mergeConfig(r.localConfig, r.globalConfig) +} + +var _ RepoKeyring = &mockRepoKeyring{} + +type mockRepoKeyring struct { + keyring *keyring.ArrayKeyring +} + +func NewMockRepoKeyring() *mockRepoKeyring { + return &mockRepoKeyring{ + keyring: keyring.NewArrayKeyring(nil), + } +} + +// Keyring give access to a user-wide storage for secrets +func (r *mockRepoKeyring) Keyring() Keyring { + return r.keyring +} + +var _ RepoCommon = &mockRepoCommon{} + +type mockRepoCommon struct{} + +func NewMockRepoCommon() *mockRepoCommon { + return &mockRepoCommon{} +} + +// GetPath returns the path to the repo. +func (r *mockRepoCommon) GetPath() string { + return "~/mockRepo/" +} + +func (r *mockRepoCommon) GetUserName() (string, error) { + return "René Descartes", nil +} + +// GetUserEmail returns the email address that the user has used to configure git. +func (r *mockRepoCommon) GetUserEmail() (string, error) { + return "user@example.com", nil +} + +// GetCoreEditor returns the name of the editor that the user has used to configure git. +func (r *mockRepoCommon) GetCoreEditor() (string, error) { + return "vi", nil +} + +// GetRemotes returns the configured remotes repositories. +func (r *mockRepoCommon) GetRemotes() (map[string]string, error) { + return map[string]string{ + "origin": "git://github.com/MichaelMure/git-bug", + }, nil +} + +var _ RepoData = &mockRepoData{} + +type commit struct { + treeHash Hash + parent Hash +} + +type mockRepoData struct { + blobs map[Hash][]byte + trees map[Hash]string + commits map[Hash]commit + refs map[string]Hash +} + +func NewMockRepoData() *mockRepoData { + return &mockRepoData{ + blobs: make(map[Hash][]byte), + trees: make(map[Hash]string), + commits: make(map[Hash]commit), + refs: make(map[string]Hash), + } +} + +// PushRefs push git refs to a remote +func (r *mockRepoData) PushRefs(remote string, refSpec string) (string, error) { + return "", nil +} + +func (r *mockRepoData) FetchRefs(remote string, refSpec string) (string, error) { + return "", nil +} + +func (r *mockRepoData) StoreData(data []byte) (Hash, error) { + rawHash := sha1.Sum(data) + hash := Hash(fmt.Sprintf("%x", rawHash)) + r.blobs[hash] = data + return hash, nil +} + +func (r *mockRepoData) ReadData(hash Hash) ([]byte, error) { + data, ok := r.blobs[hash] + + if !ok { + return nil, fmt.Errorf("unknown hash") + } + + return data, nil +} + +func (r *mockRepoData) StoreTree(entries []TreeEntry) (Hash, error) { + buffer := prepareTreeEntries(entries) + rawHash := sha1.Sum(buffer.Bytes()) + hash := Hash(fmt.Sprintf("%x", rawHash)) + r.trees[hash] = buffer.String() + + return hash, nil +} + +func (r *mockRepoData) StoreCommit(treeHash Hash) (Hash, error) { + rawHash := sha1.Sum([]byte(treeHash)) + hash := Hash(fmt.Sprintf("%x", rawHash)) + r.commits[hash] = commit{ + treeHash: treeHash, + } + return hash, nil +} + +func (r *mockRepoData) StoreCommitWithParent(treeHash Hash, parent Hash) (Hash, error) { + rawHash := sha1.Sum([]byte(treeHash + parent)) + hash := Hash(fmt.Sprintf("%x", rawHash)) + r.commits[hash] = commit{ + treeHash: treeHash, + parent: parent, + } + return hash, nil +} + +func (r *mockRepoData) UpdateRef(ref string, hash Hash) error { + r.refs[ref] = hash + return nil +} + +func (r *mockRepoData) RemoveRef(ref string) error { + delete(r.refs, ref) + return nil +} + +func (r *mockRepoData) RefExist(ref string) (bool, error) { + _, exist := r.refs[ref] + return exist, nil +} + +func (r *mockRepoData) CopyRef(source string, dest string) error { + hash, exist := r.refs[source] + + if !exist { + return fmt.Errorf("Unknown ref") + } + + r.refs[dest] = hash + return nil +} + +func (r *mockRepoData) ListRefs(refPrefix string) ([]string, error) { + var keys []string + + for k := range r.refs { + if strings.HasPrefix(k, refPrefix) { + keys = append(keys, k) + } + } + + return keys, nil +} + +func (r *mockRepoData) ListCommits(ref string) ([]Hash, error) { + var hashes []Hash + + hash := r.refs[ref] + + for { + commit, ok := r.commits[hash] + + if !ok { + break + } + + hashes = append([]Hash{hash}, hashes...) + hash = commit.parent + } + + return hashes, nil +} + +func (r *mockRepoData) ReadTree(hash Hash) ([]TreeEntry, error) { + var data string + + data, ok := r.trees[hash] + + if !ok { + // Git will understand a commit hash to reach a tree + commit, ok := r.commits[hash] + + if !ok { + return nil, fmt.Errorf("unknown hash") + } + + data, ok = r.trees[commit.treeHash] + + if !ok { + return nil, fmt.Errorf("unknown hash") + } + } + + return readTreeEntries(data) +} + +func (r *mockRepoData) FindCommonAncestor(hash1 Hash, hash2 Hash) (Hash, error) { + ancestor1 := []Hash{hash1} + + for hash1 != "" { + c, ok := r.commits[hash1] + if !ok { + return "", fmt.Errorf("unknown commit %v", hash1) + } + ancestor1 = append(ancestor1, c.parent) + hash1 = c.parent + } + + for { + for _, ancestor := range ancestor1 { + if ancestor == hash2 { + return ancestor, nil + } + } + + c, ok := r.commits[hash2] + if !ok { + return "", fmt.Errorf("unknown commit %v", hash1) + } + + if c.parent == "" { + return "", fmt.Errorf("no ancestor found") + } + + hash2 = c.parent + } +} + +func (r *mockRepoData) GetTreeHash(commit Hash) (Hash, error) { + c, ok := r.commits[commit] + if !ok { + return "", fmt.Errorf("unknown commit") + } + + return c.treeHash, nil +} + +func (r *mockRepoData) AddRemote(name string, url string) error { + panic("implement me") +} + +type mockRepoClock struct { + clocks map[string]lamport.Clock +} + +func NewMockRepoClock() *mockRepoClock { + return &mockRepoClock{ + clocks: make(map[string]lamport.Clock), + } +} + +func (r *mockRepoClock) GetOrCreateClock(name string) (lamport.Clock, error) { + if c, ok := r.clocks[name]; ok { + return c, nil + } + + c := lamport.NewMemClock() + r.clocks[name] = c + return c, nil +} diff --git a/migration3/before/repository/mock_repo_test.go b/migration3/before/repository/mock_repo_test.go new file mode 100644 index 0000000..b56b94f --- /dev/null +++ b/migration3/before/repository/mock_repo_test.go @@ -0,0 +1,10 @@ +package repository + +import "testing" + +func TestMockRepo(t *testing.T) { + creator := func(bare bool) TestedRepo { return NewMockRepoForTest() } + cleaner := func(repos ...Repo) {} + + RepoTest(t, creator, cleaner) +} diff --git a/migration3/before/repository/repo.go b/migration3/before/repository/repo.go new file mode 100644 index 0000000..6f7cb65 --- /dev/null +++ b/migration3/before/repository/repo.go @@ -0,0 +1,148 @@ +// Package repository contains helper methods for working with a Git repo. +package repository + +import ( + "errors" + + "github.com/MichaelMure/git-bug-migration/migration3/before/util/lamport" +) + +var ( + // ErrNotARepo is the error returned when the git repo root wan't be found + ErrNotARepo = errors.New("not a git repository") + // ErrClockNotExist is the error returned when a clock can't be found + ErrClockNotExist = errors.New("clock doesn't exist") +) + +// Repo represents a source code repository. +type Repo interface { + RepoConfig + RepoKeyring + RepoCommon + RepoData +} + +// ClockedRepo is a Repo that also has Lamport clocks +type ClockedRepo interface { + Repo + RepoClock +} + +// RepoConfig access the configuration of a repository +type RepoConfig interface { + // LocalConfig give access to the repository scoped configuration + LocalConfig() Config + + // GlobalConfig give access to the global scoped configuration + GlobalConfig() Config + + // AnyConfig give access to a merged local/global configuration + AnyConfig() ConfigRead +} + +// RepoKeyring give access to a user-wide storage for secrets +type RepoKeyring interface { + // Keyring give access to a user-wide storage for secrets + Keyring() Keyring +} + +// RepoCommon represent the common function the we want all the repo to implement +type RepoCommon interface { + // GetPath returns the path to the repo. + GetPath() string + + // GetUserName returns the name the the user has used to configure git + GetUserName() (string, error) + + // GetUserEmail returns the email address that the user has used to configure git. + GetUserEmail() (string, error) + + // GetCoreEditor returns the name of the editor that the user has used to configure git. + GetCoreEditor() (string, error) + + // GetRemotes returns the configured remotes repositories. + GetRemotes() (map[string]string, error) +} + +// RepoData give access to the git data storage +type RepoData interface { + // FetchRefs fetch git refs from a remote + FetchRefs(remote string, refSpec string) (string, error) + + // PushRefs push git refs to a remote + PushRefs(remote string, refSpec string) (string, error) + + // StoreData will store arbitrary data and return the corresponding hash + StoreData(data []byte) (Hash, error) + + // ReadData will attempt to read arbitrary data from the given hash + ReadData(hash Hash) ([]byte, error) + + // StoreTree will store a mapping key-->Hash as a Git tree + StoreTree(mapping []TreeEntry) (Hash, error) + + // ReadTree will return the list of entries in a Git tree + // The given hash could be from either a commit or a tree + ReadTree(hash Hash) ([]TreeEntry, error) + + // StoreCommit will store a Git commit with the given Git tree + StoreCommit(treeHash Hash) (Hash, error) + + // StoreCommit will store a Git commit with the given Git tree + StoreCommitWithParent(treeHash Hash, parent Hash) (Hash, error) + + // GetTreeHash return the git tree hash referenced in a commit + GetTreeHash(commit Hash) (Hash, error) + + // FindCommonAncestor will return the last common ancestor of two chain of commit + FindCommonAncestor(commit1 Hash, commit2 Hash) (Hash, error) + + // UpdateRef will create or update a Git reference + UpdateRef(ref string, hash Hash) error + + // RemoveRef will remove a Git reference + RemoveRef(ref string) error + + // ListRefs will return a list of Git ref matching the given refspec + ListRefs(refPrefix string) ([]string, error) + + // RefExist will check if a reference exist in Git + RefExist(ref string) (bool, error) + + // CopyRef will create a new reference with the same value as another one + CopyRef(source string, dest string) error + + // ListCommits will return the list of tree hashes of a ref, in chronological order + ListCommits(ref string) ([]Hash, error) +} + +// RepoClock give access to Lamport clocks +type RepoClock interface { + // GetOrCreateClock return a Lamport clock stored in the Repo. + // If the clock doesn't exist, it's created. + GetOrCreateClock(name string) (lamport.Clock, error) +} + +// ClockLoader hold which logical clock need to exist for an entity and +// how to create them if they don't. +type ClockLoader struct { + // Clocks hold the name of all the clocks this loader deal with. + // Those clocks will be checked when the repo load. If not present or broken, + // Witnesser will be used to create them. + Clocks []string + // Witnesser is a function that will initialize the clocks of a repo + // from scratch + Witnesser func(repo ClockedRepo) error +} + +// TestedRepo is an extended ClockedRepo with function for testing only +type TestedRepo interface { + ClockedRepo + repoTest +} + +// repoTest give access to test only functions +type repoTest interface { + // AddRemote add a new remote to the repository + AddRemote(name string, url string) error +} diff --git a/migration3/before/repository/repo_testing.go b/migration3/before/repository/repo_testing.go new file mode 100644 index 0000000..b501d64 --- /dev/null +++ b/migration3/before/repository/repo_testing.go @@ -0,0 +1,233 @@ +package repository + +import ( + "log" + "math/rand" + "os" + "strings" + "testing" + + "github.com/stretchr/testify/require" + + "github.com/MichaelMure/git-bug-migration/migration3/before/util/lamport" +) + +func CleanupTestRepos(repos ...Repo) { + var firstErr error + for _, repo := range repos { + path := repo.GetPath() + if strings.HasSuffix(path, "/.git") { + // for a normal repository (not --bare), we want to remove everything + // including the parent directory where files are checked out + path = strings.TrimSuffix(path, "/.git") + + // Testing non-bare repo should also check path is + // only .git (i.e. ./.git), but doing so, we should + // try to remove the current directory and hav some + // trouble. In the present case, this case should not + // occur. + // TODO consider warning or error when path == ".git" + } + // fmt.Println("Cleaning repo:", path) + err := os.RemoveAll(path) + if err != nil { + log.Println(err) + if firstErr == nil { + firstErr = err + } + } + } + + if firstErr != nil { + log.Fatal(firstErr) + } +} + +type RepoCreator func(bare bool) TestedRepo +type RepoCleaner func(repos ...Repo) + +// Test suite for a Repo implementation +func RepoTest(t *testing.T, creator RepoCreator, cleaner RepoCleaner) { + for bare, name := range map[bool]string{ + false: "Plain", + true: "Bare", + } { + t.Run(name, func(t *testing.T) { + repo := creator(bare) + defer cleaner(repo) + + t.Run("Data", func(t *testing.T) { + RepoDataTest(t, repo) + }) + + t.Run("Config", func(t *testing.T) { + RepoConfigTest(t, repo) + }) + + t.Run("Clocks", func(t *testing.T) { + RepoClockTest(t, repo) + }) + }) + } +} + +// helper to test a RepoConfig +func RepoConfigTest(t *testing.T, repo RepoConfig) { + testConfig(t, repo.LocalConfig()) +} + +// helper to test a RepoData +func RepoDataTest(t *testing.T, repo RepoData) { + // Blob + + data := randomData() + + blobHash1, err := repo.StoreData(data) + require.NoError(t, err) + require.True(t, blobHash1.IsValid()) + + blob1Read, err := repo.ReadData(blobHash1) + require.NoError(t, err) + require.Equal(t, data, blob1Read) + + // Tree + + blobHash2, err := repo.StoreData(randomData()) + require.NoError(t, err) + blobHash3, err := repo.StoreData(randomData()) + require.NoError(t, err) + + tree1 := []TreeEntry{ + { + ObjectType: Blob, + Hash: blobHash1, + Name: "blob1", + }, + { + ObjectType: Blob, + Hash: blobHash2, + Name: "blob2", + }, + } + + treeHash1, err := repo.StoreTree(tree1) + require.NoError(t, err) + require.True(t, treeHash1.IsValid()) + + tree1Read, err := repo.ReadTree(treeHash1) + require.NoError(t, err) + require.ElementsMatch(t, tree1, tree1Read) + + tree2 := []TreeEntry{ + { + ObjectType: Tree, + Hash: treeHash1, + Name: "tree1", + }, + { + ObjectType: Blob, + Hash: blobHash3, + Name: "blob3", + }, + } + + treeHash2, err := repo.StoreTree(tree2) + require.NoError(t, err) + require.True(t, treeHash2.IsValid()) + + tree2Read, err := repo.ReadTree(treeHash2) + require.NoError(t, err) + require.ElementsMatch(t, tree2, tree2Read) + + // Commit + + commit1, err := repo.StoreCommit(treeHash1) + require.NoError(t, err) + require.True(t, commit1.IsValid()) + + treeHash1Read, err := repo.GetTreeHash(commit1) + require.NoError(t, err) + require.Equal(t, treeHash1, treeHash1Read) + + commit2, err := repo.StoreCommitWithParent(treeHash2, commit1) + require.NoError(t, err) + require.True(t, commit2.IsValid()) + + treeHash2Read, err := repo.GetTreeHash(commit2) + require.NoError(t, err) + require.Equal(t, treeHash2, treeHash2Read) + + // ReadTree should accept tree and commit hashes + tree1read, err := repo.ReadTree(commit1) + require.NoError(t, err) + require.Equal(t, tree1read, tree1) + + // Ref + + exist1, err := repo.RefExist("refs/bugs/ref1") + require.NoError(t, err) + require.False(t, exist1) + + err = repo.UpdateRef("refs/bugs/ref1", commit2) + require.NoError(t, err) + + exist1, err = repo.RefExist("refs/bugs/ref1") + require.NoError(t, err) + require.True(t, exist1) + + ls, err := repo.ListRefs("refs/bugs") + require.NoError(t, err) + require.ElementsMatch(t, []string{"refs/bugs/ref1"}, ls) + + err = repo.CopyRef("refs/bugs/ref1", "refs/bugs/ref2") + require.NoError(t, err) + + ls, err = repo.ListRefs("refs/bugs") + require.NoError(t, err) + require.ElementsMatch(t, []string{"refs/bugs/ref1", "refs/bugs/ref2"}, ls) + + commits, err := repo.ListCommits("refs/bugs/ref2") + require.NoError(t, err) + require.Equal(t, []Hash{commit1, commit2}, commits) + + // Graph + + commit3, err := repo.StoreCommitWithParent(treeHash1, commit1) + require.NoError(t, err) + + ancestorHash, err := repo.FindCommonAncestor(commit2, commit3) + require.NoError(t, err) + require.Equal(t, commit1, ancestorHash) + + err = repo.RemoveRef("refs/bugs/ref1") + require.NoError(t, err) +} + +// helper to test a RepoClock +func RepoClockTest(t *testing.T, repo RepoClock) { + clock, err := repo.GetOrCreateClock("foo") + require.NoError(t, err) + require.Equal(t, lamport.Time(1), clock.Time()) + + time, err := clock.Increment() + require.NoError(t, err) + require.Equal(t, lamport.Time(1), time) + require.Equal(t, lamport.Time(2), clock.Time()) + + clock2, err := repo.GetOrCreateClock("foo") + require.NoError(t, err) + require.Equal(t, lamport.Time(2), clock2.Time()) + + clock3, err := repo.GetOrCreateClock("bar") + require.NoError(t, err) + require.Equal(t, lamport.Time(1), clock3.Time()) +} + +func randomData() []byte { + var letterRunes = "abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ" + b := make([]byte, 32) + for i := range b { + b[i] = letterRunes[rand.Intn(len(letterRunes))] + } + return b +} diff --git a/migration3/before/repository/tree_entry.go b/migration3/before/repository/tree_entry.go new file mode 100644 index 0000000..6c5ec1a --- /dev/null +++ b/migration3/before/repository/tree_entry.go @@ -0,0 +1,102 @@ +package repository + +import ( + "bytes" + "fmt" + "strings" +) + +type TreeEntry struct { + ObjectType ObjectType + Hash Hash + Name string +} + +type ObjectType int + +const ( + Unknown ObjectType = iota + Blob + Tree +) + +func ParseTreeEntry(line string) (TreeEntry, error) { + fields := strings.Fields(line) + + if len(fields) < 4 { + return TreeEntry{}, fmt.Errorf("Invalid input to parse as a TreeEntry") + } + + objType, err := ParseObjectType(fields[0], fields[1]) + + if err != nil { + return TreeEntry{}, err + } + + hash := Hash(fields[2]) + name := strings.Join(fields[3:], "") + + return TreeEntry{ + ObjectType: objType, + Hash: hash, + Name: name, + }, nil +} + +// Format the entry as a git ls-tree compatible line +func (entry TreeEntry) Format() string { + return fmt.Sprintf("%s %s\t%s\n", entry.ObjectType.Format(), entry.Hash, entry.Name) +} + +func (ot ObjectType) Format() string { + switch ot { + case Blob: + return "100644 blob" + case Tree: + return "040000 tree" + default: + panic("Unknown git object type") + } +} + +func ParseObjectType(mode, objType string) (ObjectType, error) { + switch { + case mode == "100644" && objType == "blob": + return Blob, nil + case mode == "040000" && objType == "tree": + return Tree, nil + default: + return Unknown, fmt.Errorf("Unknown git object type %s %s", mode, objType) + } +} + +func prepareTreeEntries(entries []TreeEntry) bytes.Buffer { + var buffer bytes.Buffer + + for _, entry := range entries { + buffer.WriteString(entry.Format()) + } + + return buffer +} + +func readTreeEntries(s string) ([]TreeEntry, error) { + split := strings.Split(strings.TrimSpace(s), "\n") + + casted := make([]TreeEntry, len(split)) + for i, line := range split { + if line == "" { + continue + } + + entry, err := ParseTreeEntry(line) + + if err != nil { + return nil, err + } + + casted[i] = entry + } + + return casted, nil +} diff --git a/migration3/before/repository/tree_entry_test.go b/migration3/before/repository/tree_entry_test.go new file mode 100644 index 0000000..d57433f --- /dev/null +++ b/migration3/before/repository/tree_entry_test.go @@ -0,0 +1,31 @@ +package repository + +import ( + "testing" + + "github.com/stretchr/testify/assert" +) + +func TestTreeEntryFormat(t *testing.T) { + entries := []TreeEntry{ + {Blob, Hash("a85730cf5287d40a1e32d3a671ba2296c73387cb"), "name"}, + {Tree, Hash("a85730cf5287d40a1e32d3a671ba2296c73387cb"), "name"}, + } + + for _, entry := range entries { + _ = entry.Format() + } +} + +func TestTreeEntryParse(t *testing.T) { + lines := []string{ + "100644 blob 1e5ffaffc67049635ba7b01f77143313503f1ca1 .gitignore", + "040000 tree 728421fea4168b874bc1a8aa409d6723ef445a4e bug", + } + + for _, line := range lines { + _, err := ParseTreeEntry(line) + assert.NoError(t, err) + } + +} diff --git a/migration3/before/util/lamport/clock.go b/migration3/before/util/lamport/clock.go new file mode 100644 index 0000000..53b0ac7 --- /dev/null +++ b/migration3/before/util/lamport/clock.go @@ -0,0 +1,15 @@ +package lamport + +// Time is the value of a Clock. +type Time uint64 + +// Clock is a Lamport logical clock +type Clock interface { + // Time is used to return the current value of the lamport clock + Time() Time + // Increment is used to return the value of the lamport clock and increment it afterwards + Increment() (Time, error) + // Witness is called to update our local clock if necessary after + // witnessing a clock value received from another process + Witness(time Time) error +} diff --git a/migration3/before/util/lamport/clock_testing.go b/migration3/before/util/lamport/clock_testing.go new file mode 100644 index 0000000..fc59afb --- /dev/null +++ b/migration3/before/util/lamport/clock_testing.go @@ -0,0 +1,28 @@ +package lamport + +import ( + "testing" + + "github.com/stretchr/testify/assert" +) + +func testClock(t *testing.T, c Clock) { + assert.Equal(t, Time(1), c.Time()) + + val, err := c.Increment() + assert.NoError(t, err) + assert.Equal(t, Time(1), val) + assert.Equal(t, Time(2), c.Time()) + + err = c.Witness(41) + assert.NoError(t, err) + assert.Equal(t, Time(42), c.Time()) + + err = c.Witness(41) + assert.NoError(t, err) + assert.Equal(t, Time(42), c.Time()) + + err = c.Witness(30) + assert.NoError(t, err) + assert.Equal(t, Time(42), c.Time()) +} diff --git a/migration3/before/util/lamport/mem_clock.go b/migration3/before/util/lamport/mem_clock.go new file mode 100644 index 0000000..ce6f2d4 --- /dev/null +++ b/migration3/before/util/lamport/mem_clock.go @@ -0,0 +1,89 @@ +/* + + This Source Code Form is subject to the terms of the Mozilla Public + License, v. 2.0. If a copy of the MPL was not distributed with this file, + You can obtain one at http://mozilla.org/MPL/2.0/. + + Copyright (c) 2013, Armon Dadgar armon.dadgar@gmail.com + Copyright (c) 2013, Mitchell Hashimoto mitchell.hashimoto@gmail.com + + Alternatively, the contents of this file may be used under the terms + of the GNU General Public License Version 3 or later, as described below: + + This file is free software: you may copy, redistribute and/or modify + it under the terms of the GNU General Public License as published by the + Free Software Foundation, either version 3 of the License, or (at your + option) any later version. + + This file is distributed in the hope that it will be useful, but + WITHOUT ANY WARRANTY; without even the implied warranty of + MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General + Public License for more details. + + You should have received a copy of the GNU General Public License + along with this program. If not, see http://www.gnu.org/licenses/. + +*/ + +package lamport + +import ( + "sync/atomic" +) + +var _ Clock = &MemClock{} + +// MemClock is a thread safe implementation of a lamport clock. It +// uses efficient atomic operations for all of its functions, falling back +// to a heavy lock only if there are enough CAS failures. +type MemClock struct { + counter uint64 +} + +// NewMemClock create a new clock with the value 1. +// Value 0 is considered as invalid. +func NewMemClock() *MemClock { + return &MemClock{ + counter: 1, + } +} + +// NewMemClockWithTime create a new clock with a value. +func NewMemClockWithTime(time uint64) *MemClock { + return &MemClock{ + counter: time, + } +} + +// Time is used to return the current value of the lamport clock +func (mc *MemClock) Time() Time { + return Time(atomic.LoadUint64(&mc.counter)) +} + +// Increment is used to return the value of the lamport clock and increment it afterwards +func (mc *MemClock) Increment() (Time, error) { + return Time(atomic.AddUint64(&mc.counter, 1) - 1), nil +} + +// Witness is called to update our local clock if necessary after +// witnessing a clock value received from another process +func (mc *MemClock) Witness(v Time) error { +WITNESS: + // If the other value is old, we do not need to do anything + cur := atomic.LoadUint64(&mc.counter) + other := uint64(v) + if other < cur { + return nil + } + + // Ensure that our local clock is at least one ahead. + if !atomic.CompareAndSwapUint64(&mc.counter, cur, other+1) { + // CAS: CompareAndSwap + // The CAS failed, so we just retry. Eventually our CAS should + // succeed or a future witness will pass us by and our witness + // will end. + goto WITNESS + } + + return nil +} diff --git a/migration3/before/util/lamport/mem_clock_test.go b/migration3/before/util/lamport/mem_clock_test.go new file mode 100644 index 0000000..e01d2ec --- /dev/null +++ b/migration3/before/util/lamport/mem_clock_test.go @@ -0,0 +1,8 @@ +package lamport + +import "testing" + +func TestMemClock(t *testing.T) { + c := NewMemClock() + testClock(t, c) +} diff --git a/migration3/before/util/lamport/persisted_clock.go b/migration3/before/util/lamport/persisted_clock.go new file mode 100644 index 0000000..e70b01e --- /dev/null +++ b/migration3/before/util/lamport/persisted_clock.go @@ -0,0 +1,100 @@ +package lamport + +import ( + "errors" + "fmt" + "io/ioutil" + "os" + "path/filepath" +) + +var ErrClockNotExist = errors.New("clock doesn't exist") + +type PersistedClock struct { + *MemClock + filePath string +} + +// NewPersistedClock create a new persisted Lamport clock +func NewPersistedClock(filePath string) (*PersistedClock, error) { + clock := &PersistedClock{ + MemClock: NewMemClock(), + filePath: filePath, + } + + dir := filepath.Dir(filePath) + err := os.MkdirAll(dir, 0777) + if err != nil { + return nil, err + } + + err = clock.Write() + if err != nil { + return nil, err + } + + return clock, nil +} + +// LoadPersistedClock load a persisted Lamport clock from a file +func LoadPersistedClock(filePath string) (*PersistedClock, error) { + clock := &PersistedClock{ + filePath: filePath, + } + + err := clock.read() + if err != nil { + return nil, err + } + + return clock, nil +} + +// Increment is used to return the value of the lamport clock and increment it afterwards +func (pc *PersistedClock) Increment() (Time, error) { + time, err := pc.MemClock.Increment() + if err != nil { + return 0, err + } + return time, pc.Write() +} + +// Witness is called to update our local clock if necessary after +// witnessing a clock value received from another process +func (pc *PersistedClock) Witness(time Time) error { + // TODO: rework so that we write only when the clock was actually updated + err := pc.MemClock.Witness(time) + if err != nil { + return err + } + return pc.Write() +} + +func (pc *PersistedClock) read() error { + content, err := ioutil.ReadFile(pc.filePath) + if os.IsNotExist(err) { + return ErrClockNotExist + } + if err != nil { + return err + } + + var value uint64 + n, err := fmt.Sscanf(string(content), "%d", &value) + if err != nil { + return err + } + + if n != 1 { + return fmt.Errorf("could not read the clock") + } + + pc.MemClock = NewMemClockWithTime(value) + + return nil +} + +func (pc *PersistedClock) Write() error { + data := []byte(fmt.Sprintf("%d", pc.counter)) + return ioutil.WriteFile(pc.filePath, data, 0644) +} diff --git a/migration3/before/util/lamport/persisted_clock_test.go b/migration3/before/util/lamport/persisted_clock_test.go new file mode 100644 index 0000000..aacec3b --- /dev/null +++ b/migration3/before/util/lamport/persisted_clock_test.go @@ -0,0 +1,19 @@ +package lamport + +import ( + "io/ioutil" + "path" + "testing" + + "github.com/stretchr/testify/require" +) + +func TestPersistedClock(t *testing.T) { + dir, err := ioutil.TempDir("", "") + require.NoError(t, err) + + c, err := NewPersistedClock(path.Join(dir, "test-clock")) + require.NoError(t, err) + + testClock(t, c) +} diff --git a/migration3/before/util/text/transform.go b/migration3/before/util/text/transform.go new file mode 100644 index 0000000..59dc4e0 --- /dev/null +++ b/migration3/before/util/text/transform.go @@ -0,0 +1,31 @@ +package text + +import ( + "strings" + "unicode" + + "golang.org/x/text/runes" + "golang.org/x/text/transform" +) + +func Cleanup(text string) (string, error) { + // windows new line, Github, really ? + text = strings.Replace(text, "\r\n", "\n", -1) + + // remove all unicode control characters except + // '\n', '\r' and '\t' + t := runes.Remove(runes.Predicate(func(r rune) bool { + switch r { + case '\r', '\n', '\t': + return false + } + return unicode.IsControl(r) + })) + sanitized, _, err := transform.String(t, text) + if err != nil { + return "", err + } + + // trim extra new line not displayed in the github UI but still present in the data + return strings.TrimSpace(sanitized), nil +} diff --git a/migration3/before/util/text/validate.go b/migration3/before/util/text/validate.go new file mode 100644 index 0000000..51e94fb --- /dev/null +++ b/migration3/before/util/text/validate.go @@ -0,0 +1,44 @@ +package text + +import ( + "net/url" + "strings" + "unicode" +) + +// Empty tell if the string is considered empty once space +// and not graphics characters are removed +func Empty(s string) bool { + trim := strings.TrimFunc(s, func(r rune) bool { + return unicode.IsSpace(r) || !unicode.IsGraphic(r) + }) + + return trim == "" +} + +// Safe will tell if a character in the string is considered unsafe +// Currently trigger on unicode control character except \n, \t and \r +func Safe(s string) bool { + for _, r := range s { + switch r { + case '\t', '\r', '\n': + continue + } + + if unicode.IsControl(r) { + return false + } + } + + return true +} + +// ValidUrl will tell if the string contains what seems to be a valid URL +func ValidUrl(s string) bool { + if strings.Contains(s, "\n") { + return false + } + + _, err := url.ParseRequestURI(s) + return err == nil +} diff --git a/migration3/before/util/timestamp/timestamp.go b/migration3/before/util/timestamp/timestamp.go new file mode 100644 index 0000000..4f587cb --- /dev/null +++ b/migration3/before/util/timestamp/timestamp.go @@ -0,0 +1,9 @@ +package timestamp + +import "time" + +type Timestamp int64 + +func (t Timestamp) Time() time.Time { + return time.Unix(int64(t), 0) +} diff --git a/migration3/migration3.go b/migration3/migration3.go new file mode 100644 index 0000000..e9ba54c --- /dev/null +++ b/migration3/migration3.go @@ -0,0 +1,185 @@ +package migration3 + +import ( + "fmt" + + afterbug "github.com/MichaelMure/git-bug-migration/migration3/after/bug" + afterentity "github.com/MichaelMure/git-bug-migration/migration3/after/entity" + afteridentity "github.com/MichaelMure/git-bug-migration/migration3/after/identity" + afterrepo "github.com/MichaelMure/git-bug-migration/migration3/after/repository" + + beforebug "github.com/MichaelMure/git-bug-migration/migration3/before/bug" + beforeentity "github.com/MichaelMure/git-bug-migration/migration3/before/entity" + beforeidentity "github.com/MichaelMure/git-bug-migration/migration3/before/identity" + beforerepo "github.com/MichaelMure/git-bug-migration/migration3/before/repository" +) + +type Migration3 struct{} + +func (m *Migration3) Description() string { + return "Migrate bridge credentials from the global git config to a keyring" +} + +func (m *Migration3) Run(repoPath string) error { + oldRepo, err := beforerepo.NewGitRepo(repoPath, nil) + if err != nil { + return err + } + + newRepo, err := afterrepo.NewGitRepo(repoPath, nil) + if err != nil { + return err + } + + return m.migrate(oldRepo, newRepo) +} + +func (m *Migration3) migrate(oldRepo beforerepo.ClockedRepo, newRepo afterrepo.ClockedRepo) error { + identities := beforeidentity.ReadAllLocal(oldRepo) + bugs := beforebug.ReadAllLocal(oldRepo) + + migratedIdentities := map[beforeentity.Id]*afteridentity.Identity{} + + for streamedIdentity := range identities { + oldIdentity := streamedIdentity.Identity + fmt.Printf("identity %s:\n", oldIdentity.Id().Human()) + newIdentity, err := afteridentity.NewIdentityFull( + newRepo, + oldIdentity.Name(), + oldIdentity.Email(), + oldIdentity.Login(), + oldIdentity.AvatarUrl(), + nil, + ) + if err != nil { + return err + } + + migratedIdentities[oldIdentity.Id()] = newIdentity + if err := newIdentity.Commit(newRepo); err != nil { + return err + } + fmt.Printf("migrated to %s\n", newIdentity.Id().Human()) + } + + for streamedBug := range bugs { + oldBug := streamedBug.Bug + fmt.Printf("bug %s:\n", oldBug.Id().Human()) + newBug, err := migrateBug(oldBug, migratedIdentities) + if err != nil { + return err + } + if err := newBug.Commit(newRepo); err != nil { + return err + } + fmt.Printf("migrated to %s\n", oldBug.Id().Human(), newBug.Id().Human()) + if err := beforebug.RemoveBug(oldRepo, oldBug.Id()); err != nil { + return err + } + } + + for oldIdentity := range migratedIdentities { + if err := beforeidentity.RemoveIdentity(oldRepo, oldIdentity); err != nil { + return err + } + } + + return nil +} + +func migrateBug(oldBug *beforebug.Bug, migratedIdentities map[beforeentity.Id]*afteridentity.Identity) (*afterbug.Bug, error) { + if oldBug.Packs[0].FormatVersion != 2 { + return nil, nil + } + + // Making a new bug + newBug := afterbug.NewBug() + + migratedOperations := map[beforeentity.Id]afterentity.Id{} + + // Iterating over each operation in the bug + it := beforebug.NewOperationIterator(oldBug) + for it.Next() { + oldOperation := it.Value() + + var newOperation afterbug.Operation + switch operation := oldOperation.(type) { + case *beforebug.AddCommentOperation: + newOperation = afterbug.NewAddCommentOp( + migratedIdentities[operation.Author.Id()], + operation.Time().Unix(), + operation.Message, + migrateHashes(operation.Files), + ) + case *beforebug.CreateOperation: + newOperation = afterbug.NewCreateOp( + migratedIdentities[operation.Author.Id()], + operation.Time().Unix(), + operation.Title, + operation.Message, + migrateHashes(operation.Files), + ) + case *beforebug.EditCommentOperation: + newOperation = afterbug.NewEditCommentOp( + migratedIdentities[operation.Author.Id()], + operation.Time().Unix(), + migratedOperations[operation.Target], + operation.Message, + migrateHashes(operation.Files), + ) + case *beforebug.LabelChangeOperation: + newOperation = afterbug.NewLabelChangeOperation( + migratedIdentities[operation.Author.Id()], + operation.Time().Unix(), + migrateLabels(operation.Added), + migrateLabels(operation.Removed), + ) + case *beforebug.NoOpOperation: + newOperation = afterbug.NewNoOpOp( + migratedIdentities[operation.Author.Id()], + operation.Time().Unix(), + ) + case *beforebug.SetMetadataOperation: + newOperation = afterbug.NewSetMetadataOp( + migratedIdentities[operation.Author.Id()], + operation.Time().Unix(), + migratedOperations[operation.Target], + operation.Metadata, + ) + case *beforebug.SetStatusOperation: + newOperation = afterbug.NewSetStatusOp( + migratedIdentities[operation.Author.Id()], + operation.Time().Unix(), + afterbug.Status(operation.Status), + ) + case *beforebug.SetTitleOperation: + newOperation = afterbug.NewSetTitleOp( + migratedIdentities[operation.Author.Id()], + operation.Time().Unix(), + operation.Title, + operation.Was, + ) + default: + return nil, fmt.Errorf("Unknown oldOperation type: %T\n", operation) + } + + newBug.Append(newOperation) + migratedOperations[oldOperation.Id()] = newOperation.Id() + } + + return newBug, nil +} + +func migrateHashes(oldHashes []beforerepo.Hash) (newHashes []afterrepo.Hash) { + for _, hash := range oldHashes { + newHashes = append(newHashes, afterrepo.Hash(hash)) + } + return +} + +func migrateLabels(oldLabels []beforebug.Label) (newLabels []afterbug.Label) { + for _, label := range oldLabels { + newLabels = append(newLabels, afterbug.Label(label)) + } + return +} diff --git a/root.go b/root.go index 6b47379..583883d 100644 --- a/root.go +++ b/root.go @@ -8,6 +8,7 @@ import ( "github.com/MichaelMure/git-bug-migration/migration1" "github.com/MichaelMure/git-bug-migration/migration2" + "github.com/MichaelMure/git-bug-migration/migration3" ) const rootCommandName = "git-bug-migration" @@ -18,6 +19,7 @@ func NewRootCommand() *cobra.Command { migrations := []Migration{ &migration1.Migration1{}, &migration2.Migration2{}, + &migration3.Migration3{}, } cmd := &cobra.Command{ From 2ab9fbd68c60cef5ff3ab1255a4dcd6bbcbf1270 Mon Sep 17 00:00:00 2001 From: vince Date: Tue, 24 Nov 2020 21:33:40 +0800 Subject: [PATCH 2/9] add testing to migration3 also fixes some bugs --- migration3/before/bug/operation_pack.go | 2 + migration3/migration3.go | 3 +- migration3/migration3_test.go | 92 +++++++++++++++++++++++++ 3 files changed, 96 insertions(+), 1 deletion(-) create mode 100644 migration3/migration3_test.go diff --git a/migration3/before/bug/operation_pack.go b/migration3/before/bug/operation_pack.go index 576e868..7ec85fd 100644 --- a/migration3/before/bug/operation_pack.go +++ b/migration3/before/bug/operation_pack.go @@ -47,6 +47,8 @@ func (opp *OperationPack) UnmarshalJSON(data []byte) error { return err } + opp.FormatVersion = aux.Version + if aux.Version < formatVersion { return fmt.Errorf("outdated repository format, please use https://github.com/MichaelMure/git-bug-migration to upgrade") } diff --git a/migration3/migration3.go b/migration3/migration3.go index e9ba54c..969b202 100644 --- a/migration3/migration3.go +++ b/migration3/migration3.go @@ -69,10 +69,11 @@ func (m *Migration3) migrate(oldRepo beforerepo.ClockedRepo, newRepo afterrepo.C if err != nil { return err } + fmt.Println(newBug) if err := newBug.Commit(newRepo); err != nil { return err } - fmt.Printf("migrated to %s\n", oldBug.Id().Human(), newBug.Id().Human()) + fmt.Printf("migrated to %s\n", newBug.Id().Human()) if err := beforebug.RemoveBug(oldRepo, oldBug.Id()); err != nil { return err } diff --git a/migration3/migration3_test.go b/migration3/migration3_test.go new file mode 100644 index 0000000..84bc0da --- /dev/null +++ b/migration3/migration3_test.go @@ -0,0 +1,92 @@ +package migration3 + +import ( + "fmt" + "io/ioutil" + "os" + "testing" + "time" + + "github.com/stretchr/testify/require" + + afterbug "github.com/MichaelMure/git-bug-migration/migration3/after/bug" + afterrepo "github.com/MichaelMure/git-bug-migration/migration3/after/repository" + beforebug "github.com/MichaelMure/git-bug-migration/migration3/before/bug" + beforeidentity "github.com/MichaelMure/git-bug-migration/migration3/before/identity" + beforerepo "github.com/MichaelMure/git-bug-migration/migration3/before/repository" +) + +func createFolder() (string, error) { + dir, err := ioutil.TempDir("", "") + return dir, err +} + +func removeFolder(path string) error { + return os.RemoveAll(path) +} + +func TestMigrate23(t *testing.T) { + cwd, err := os.Getwd() + require.Nil(t, err, "got error when attempting to access the current working directory") + + var unix = time.Now().Unix() + + dir, err := createFolder() + require.Nil(t, err, "got error when creating temporary repository dir with version 0") + err = os.Chdir(dir) + require.Nil(t, err, "got error when opening temporary repository folder") + + oldRepo, err := beforerepo.InitGitRepo(dir) + require.Nil(t, err, "got error when initializing old repository") + newRepo, err := afterrepo.InitGitRepo(dir) + require.Nil(t, err, "got error when initializing new repository") + + oldVinc := beforeidentity.NewIdentityFull( + "Vincent Tiu", + "vincetiu8@gmail.com", + "invincibot", + "https://avatars2.githubusercontent.com/u/46623413?s=460&u=56824597898bc22464222f5c33e8eae6d72def5b&v=4", + ) + err = oldVinc.Commit(oldRepo) + require.NoError(t, err) + + title := "bug0" + message := "beep bop bug" + bug0, _, err := beforebug.Create(oldVinc, unix, title, message) + require.Nil(t, err, "got error when creating bug") + + err = bug0.Commit(oldRepo) + require.Nil(t, err, "got error when committing bug") + + m := Migration3{} + err = m.migrate(oldRepo, newRepo) + require.Nil(t, err, "got error when migrating repository") + + bugs1 := afterbug.ReadAllLocal(newRepo) + bug1 := (<-bugs1).Bug + + operations := afterbug.NewOperationIterator(bug1) + require.Equal(t, true, operations.Next(), "unable to get first operation") + + operation := operations.Value() + createOperation, ok := operation.(*afterbug.CreateOperation) + require.True(t, ok) + require.Equal(t, title, createOperation.Title) + require.Equal(t, unix, createOperation.UnixTime) + require.Equal(t, message, createOperation.Message) + + author := operation.GetAuthor() + require.Equal(t, oldVinc.Name(), author.Name()) + require.Equal(t, oldVinc.Login(), author.Login()) + require.Equal(t, oldVinc.Email(), author.Email()) + require.Equal(t, oldVinc.AvatarUrl(), author.AvatarUrl()) + + var bug afterbug.StreamedBug + require.Equal(t, bug, <-bugs1, "got additional bug when getting bugs in repository") + + err = os.Chdir(cwd) + err = removeFolder(dir) + if err != nil { + fmt.Printf("got error when removing temporary folder: %q", err) + } +} From 64ca4ccacc88cb2e35b8a8a1b382d6bf72138d05 Mon Sep 17 00:00:00 2001 From: vince Date: Thu, 26 Nov 2020 20:06:47 +0800 Subject: [PATCH 3/9] remove tests --- migration3/after/bug/bug_actions_test.go | 394 ------------------ migration3/after/bug/bug_test.go | 190 --------- migration3/after/bug/label_test.go | 35 -- migration3/after/bug/op_add_comment_test.go | 39 -- migration3/after/bug/op_create_test.go | 82 ---- migration3/after/bug/op_edit_comment_test.go | 108 ----- migration3/after/bug/op_label_change_test.go | 40 -- migration3/after/bug/op_noop_test.go | 40 -- migration3/after/bug/op_set_metadata_test.go | 127 ------ migration3/after/bug/op_set_status_test.go | 38 -- migration3/after/bug/op_set_title_test.go | 38 -- .../after/bug/operation_iterator_test.go | 79 ---- migration3/after/bug/operation_pack_test.go | 78 ---- migration3/after/bug/operation_test.go | 133 ------ .../after/identity/identity_actions_test.go | 158 ------- .../after/identity/identity_stub_test.go | 26 -- migration3/after/identity/identity_test.go | 248 ----------- migration3/after/identity/version_test.go | 84 ---- .../after/repository/config_mem_test.go | 7 - migration3/after/repository/config_test.go | 54 --- migration3/after/repository/git_test.go | 10 - migration3/after/repository/gogit_test.go | 68 --- migration3/after/repository/mock_repo_test.go | 10 - .../after/repository/tree_entry_test.go | 31 -- .../after/util/lamport/mem_clock_test.go | 8 - .../util/lamport/persisted_clock_test.go | 19 - migration3/before/bug/bug_actions_test.go | 390 ----------------- migration3/before/bug/bug_test.go | 186 --------- migration3/before/bug/label_test.go | 35 -- migration3/before/bug/op_add_comment_test.go | 39 -- migration3/before/bug/op_create_test.go | 78 ---- migration3/before/bug/op_edit_comment_test.go | 105 ----- migration3/before/bug/op_label_change_test.go | 40 -- migration3/before/bug/op_noop_test.go | 40 -- migration3/before/bug/op_set_metadata_test.go | 128 ------ migration3/before/bug/op_set_status_test.go | 40 -- migration3/before/bug/op_set_title_test.go | 40 -- .../before/bug/operation_iterator_test.go | 78 ---- migration3/before/bug/operation_pack_test.go | 79 ---- migration3/before/bug/operation_test.go | 119 ------ .../before/identity/identity_actions_test.go | 152 ------- .../before/identity/identity_stub_test.go | 26 -- migration3/before/identity/identity_test.go | 316 -------------- migration3/before/identity/version_test.go | 41 -- .../before/repository/config_mem_test.go | 7 - migration3/before/repository/config_test.go | 54 --- migration3/before/repository/git_test.go | 10 - migration3/before/repository/gogit_test.go | 68 --- .../before/repository/mock_repo_test.go | 10 - .../before/repository/tree_entry_test.go | 31 -- .../before/util/lamport/mem_clock_test.go | 8 - .../util/lamport/persisted_clock_test.go | 19 - 52 files changed, 4283 deletions(-) delete mode 100644 migration3/after/bug/bug_actions_test.go delete mode 100644 migration3/after/bug/bug_test.go delete mode 100644 migration3/after/bug/label_test.go delete mode 100644 migration3/after/bug/op_add_comment_test.go delete mode 100644 migration3/after/bug/op_create_test.go delete mode 100644 migration3/after/bug/op_edit_comment_test.go delete mode 100644 migration3/after/bug/op_label_change_test.go delete mode 100644 migration3/after/bug/op_noop_test.go delete mode 100644 migration3/after/bug/op_set_metadata_test.go delete mode 100644 migration3/after/bug/op_set_status_test.go delete mode 100644 migration3/after/bug/op_set_title_test.go delete mode 100644 migration3/after/bug/operation_iterator_test.go delete mode 100644 migration3/after/bug/operation_pack_test.go delete mode 100644 migration3/after/bug/operation_test.go delete mode 100644 migration3/after/identity/identity_actions_test.go delete mode 100644 migration3/after/identity/identity_stub_test.go delete mode 100644 migration3/after/identity/identity_test.go delete mode 100644 migration3/after/identity/version_test.go delete mode 100644 migration3/after/repository/config_mem_test.go delete mode 100644 migration3/after/repository/config_test.go delete mode 100644 migration3/after/repository/git_test.go delete mode 100644 migration3/after/repository/gogit_test.go delete mode 100644 migration3/after/repository/mock_repo_test.go delete mode 100644 migration3/after/repository/tree_entry_test.go delete mode 100644 migration3/after/util/lamport/mem_clock_test.go delete mode 100644 migration3/after/util/lamport/persisted_clock_test.go delete mode 100644 migration3/before/bug/bug_actions_test.go delete mode 100644 migration3/before/bug/bug_test.go delete mode 100644 migration3/before/bug/label_test.go delete mode 100644 migration3/before/bug/op_add_comment_test.go delete mode 100644 migration3/before/bug/op_create_test.go delete mode 100644 migration3/before/bug/op_edit_comment_test.go delete mode 100644 migration3/before/bug/op_label_change_test.go delete mode 100644 migration3/before/bug/op_noop_test.go delete mode 100644 migration3/before/bug/op_set_metadata_test.go delete mode 100644 migration3/before/bug/op_set_status_test.go delete mode 100644 migration3/before/bug/op_set_title_test.go delete mode 100644 migration3/before/bug/operation_iterator_test.go delete mode 100644 migration3/before/bug/operation_pack_test.go delete mode 100644 migration3/before/bug/operation_test.go delete mode 100644 migration3/before/identity/identity_actions_test.go delete mode 100644 migration3/before/identity/identity_stub_test.go delete mode 100644 migration3/before/identity/identity_test.go delete mode 100644 migration3/before/identity/version_test.go delete mode 100644 migration3/before/repository/config_mem_test.go delete mode 100644 migration3/before/repository/config_test.go delete mode 100644 migration3/before/repository/git_test.go delete mode 100644 migration3/before/repository/gogit_test.go delete mode 100644 migration3/before/repository/mock_repo_test.go delete mode 100644 migration3/before/repository/tree_entry_test.go delete mode 100644 migration3/before/util/lamport/mem_clock_test.go delete mode 100644 migration3/before/util/lamport/persisted_clock_test.go diff --git a/migration3/after/bug/bug_actions_test.go b/migration3/after/bug/bug_actions_test.go deleted file mode 100644 index 8f7a2b8..0000000 --- a/migration3/after/bug/bug_actions_test.go +++ /dev/null @@ -1,394 +0,0 @@ -package bug - -import ( - "testing" - "time" - - "github.com/stretchr/testify/assert" - "github.com/stretchr/testify/require" - - "github.com/MichaelMure/git-bug-migration/migration3/after/identity" - "github.com/MichaelMure/git-bug-migration/migration3/after/repository" -) - -func TestPushPull(t *testing.T) { - repoA, repoB, remote := repository.SetupReposAndRemote() - defer repository.CleanupTestRepos(repoA, repoB, remote) - - reneA, err := identity.NewIdentity(repoA, "René Descartes", "rene@descartes.fr") - require.NoError(t, err) - err = reneA.Commit(repoA) - require.NoError(t, err) - - bug1, _, err := Create(reneA, time.Now().Unix(), "bug1", "message") - require.NoError(t, err) - assert.True(t, bug1.NeedCommit()) - err = bug1.Commit(repoA) - require.NoError(t, err) - assert.False(t, bug1.NeedCommit()) - - // distribute the identity - _, err = identity.Push(repoA, "origin") - require.NoError(t, err) - err = identity.Pull(repoB, "origin") - require.NoError(t, err) - - // A --> remote --> B - _, err = Push(repoA, "origin") - require.NoError(t, err) - - err = Pull(repoB, "origin") - require.NoError(t, err) - - bugs := allBugs(t, ReadAllLocal(repoB)) - - if len(bugs) != 1 { - t.Fatal("Unexpected number of bugs") - } - - // B --> remote --> A - reneB, err := identity.ReadLocal(repoA, reneA.Id()) - require.NoError(t, err) - - bug2, _, err := Create(reneB, time.Now().Unix(), "bug2", "message") - require.NoError(t, err) - err = bug2.Commit(repoB) - require.NoError(t, err) - - _, err = Push(repoB, "origin") - require.NoError(t, err) - - err = Pull(repoA, "origin") - require.NoError(t, err) - - bugs = allBugs(t, ReadAllLocal(repoA)) - - if len(bugs) != 2 { - t.Fatal("Unexpected number of bugs") - } -} - -func allBugs(t testing.TB, bugs <-chan StreamedBug) []*Bug { - var result []*Bug - for streamed := range bugs { - if streamed.Err != nil { - t.Fatal(streamed.Err) - } - result = append(result, streamed.Bug) - } - return result -} - -func TestRebaseTheirs(t *testing.T) { - _RebaseTheirs(t) -} - -func BenchmarkRebaseTheirs(b *testing.B) { - for n := 0; n < b.N; n++ { - _RebaseTheirs(b) - } -} - -func _RebaseTheirs(t testing.TB) { - repoA, repoB, remote := repository.SetupReposAndRemote() - defer repository.CleanupTestRepos(repoA, repoB, remote) - - reneA, err := identity.NewIdentity(repoA, "René Descartes", "rene@descartes.fr") - require.NoError(t, err) - err = reneA.Commit(repoA) - require.NoError(t, err) - - bug1, _, err := Create(reneA, time.Now().Unix(), "bug1", "message") - require.NoError(t, err) - assert.True(t, bug1.NeedCommit()) - err = bug1.Commit(repoA) - require.NoError(t, err) - assert.False(t, bug1.NeedCommit()) - - // distribute the identity - _, err = identity.Push(repoA, "origin") - require.NoError(t, err) - err = identity.Pull(repoB, "origin") - require.NoError(t, err) - - // A --> remote - - _, err = Push(repoA, "origin") - require.NoError(t, err) - - // remote --> B - err = Pull(repoB, "origin") - require.NoError(t, err) - - bug2, err := ReadLocal(repoB, bug1.Id()) - require.NoError(t, err) - assert.False(t, bug2.NeedCommit()) - - reneB, err := identity.ReadLocal(repoA, reneA.Id()) - require.NoError(t, err) - - _, err = AddComment(bug2, reneB, time.Now().Unix(), "message2") - require.NoError(t, err) - assert.True(t, bug2.NeedCommit()) - _, err = AddComment(bug2, reneB, time.Now().Unix(), "message3") - require.NoError(t, err) - _, err = AddComment(bug2, reneB, time.Now().Unix(), "message4") - require.NoError(t, err) - err = bug2.Commit(repoB) - require.NoError(t, err) - assert.False(t, bug2.NeedCommit()) - - // B --> remote - _, err = Push(repoB, "origin") - require.NoError(t, err) - - // remote --> A - err = Pull(repoA, "origin") - require.NoError(t, err) - - bugs := allBugs(t, ReadAllLocal(repoB)) - - if len(bugs) != 1 { - t.Fatal("Unexpected number of bugs") - } - - bug3, err := ReadLocal(repoA, bug1.Id()) - require.NoError(t, err) - - if nbOps(bug3) != 4 { - t.Fatal("Unexpected number of operations") - } -} - -func TestRebaseOurs(t *testing.T) { - _RebaseOurs(t) -} - -func BenchmarkRebaseOurs(b *testing.B) { - for n := 0; n < b.N; n++ { - _RebaseOurs(b) - } -} - -func _RebaseOurs(t testing.TB) { - repoA, repoB, remote := repository.SetupReposAndRemote() - defer repository.CleanupTestRepos(repoA, repoB, remote) - - reneA, err := identity.NewIdentity(repoA, "René Descartes", "rene@descartes.fr") - require.NoError(t, err) - err = reneA.Commit(repoA) - require.NoError(t, err) - - bug1, _, err := Create(reneA, time.Now().Unix(), "bug1", "message") - require.NoError(t, err) - err = bug1.Commit(repoA) - require.NoError(t, err) - - // distribute the identity - _, err = identity.Push(repoA, "origin") - require.NoError(t, err) - err = identity.Pull(repoB, "origin") - require.NoError(t, err) - - // A --> remote - _, err = Push(repoA, "origin") - require.NoError(t, err) - - // remote --> B - err = Pull(repoB, "origin") - require.NoError(t, err) - - _, err = AddComment(bug1, reneA, time.Now().Unix(), "message2") - require.NoError(t, err) - _, err = AddComment(bug1, reneA, time.Now().Unix(), "message3") - require.NoError(t, err) - _, err = AddComment(bug1, reneA, time.Now().Unix(), "message4") - require.NoError(t, err) - err = bug1.Commit(repoA) - require.NoError(t, err) - - _, err = AddComment(bug1, reneA, time.Now().Unix(), "message5") - require.NoError(t, err) - _, err = AddComment(bug1, reneA, time.Now().Unix(), "message6") - require.NoError(t, err) - _, err = AddComment(bug1, reneA, time.Now().Unix(), "message7") - require.NoError(t, err) - err = bug1.Commit(repoA) - require.NoError(t, err) - - _, err = AddComment(bug1, reneA, time.Now().Unix(), "message8") - require.NoError(t, err) - _, err = AddComment(bug1, reneA, time.Now().Unix(), "message9") - require.NoError(t, err) - _, err = AddComment(bug1, reneA, time.Now().Unix(), "message10") - require.NoError(t, err) - err = bug1.Commit(repoA) - require.NoError(t, err) - - // remote --> A - err = Pull(repoA, "origin") - require.NoError(t, err) - - bugs := allBugs(t, ReadAllLocal(repoA)) - - if len(bugs) != 1 { - t.Fatal("Unexpected number of bugs") - } - - bug2, err := ReadLocal(repoA, bug1.Id()) - require.NoError(t, err) - - if nbOps(bug2) != 10 { - t.Fatal("Unexpected number of operations") - } -} - -func nbOps(b *Bug) int { - it := NewOperationIterator(b) - counter := 0 - for it.Next() { - counter++ - } - return counter -} - -func TestRebaseConflict(t *testing.T) { - _RebaseConflict(t) -} - -func BenchmarkRebaseConflict(b *testing.B) { - for n := 0; n < b.N; n++ { - _RebaseConflict(b) - } -} - -func _RebaseConflict(t testing.TB) { - repoA, repoB, remote := repository.SetupReposAndRemote() - defer repository.CleanupTestRepos(repoA, repoB, remote) - - reneA, err := identity.NewIdentity(repoA, "René Descartes", "rene@descartes.fr") - require.NoError(t, err) - err = reneA.Commit(repoA) - require.NoError(t, err) - - bug1, _, err := Create(reneA, time.Now().Unix(), "bug1", "message") - require.NoError(t, err) - err = bug1.Commit(repoA) - require.NoError(t, err) - - // distribute the identity - _, err = identity.Push(repoA, "origin") - require.NoError(t, err) - err = identity.Pull(repoB, "origin") - require.NoError(t, err) - - // A --> remote - _, err = Push(repoA, "origin") - require.NoError(t, err) - - // remote --> B - err = Pull(repoB, "origin") - require.NoError(t, err) - - _, err = AddComment(bug1, reneA, time.Now().Unix(), "message2") - require.NoError(t, err) - _, err = AddComment(bug1, reneA, time.Now().Unix(), "message3") - require.NoError(t, err) - _, err = AddComment(bug1, reneA, time.Now().Unix(), "message4") - require.NoError(t, err) - err = bug1.Commit(repoA) - require.NoError(t, err) - - _, err = AddComment(bug1, reneA, time.Now().Unix(), "message5") - require.NoError(t, err) - _, err = AddComment(bug1, reneA, time.Now().Unix(), "message6") - require.NoError(t, err) - _, err = AddComment(bug1, reneA, time.Now().Unix(), "message7") - require.NoError(t, err) - err = bug1.Commit(repoA) - require.NoError(t, err) - - _, err = AddComment(bug1, reneA, time.Now().Unix(), "message8") - require.NoError(t, err) - _, err = AddComment(bug1, reneA, time.Now().Unix(), "message9") - require.NoError(t, err) - _, err = AddComment(bug1, reneA, time.Now().Unix(), "message10") - require.NoError(t, err) - err = bug1.Commit(repoA) - require.NoError(t, err) - - bug2, err := ReadLocal(repoB, bug1.Id()) - require.NoError(t, err) - - reneB, err := identity.ReadLocal(repoA, reneA.Id()) - require.NoError(t, err) - - _, err = AddComment(bug2, reneB, time.Now().Unix(), "message11") - require.NoError(t, err) - _, err = AddComment(bug2, reneB, time.Now().Unix(), "message12") - require.NoError(t, err) - _, err = AddComment(bug2, reneB, time.Now().Unix(), "message13") - require.NoError(t, err) - err = bug2.Commit(repoB) - require.NoError(t, err) - - _, err = AddComment(bug2, reneB, time.Now().Unix(), "message14") - require.NoError(t, err) - _, err = AddComment(bug2, reneB, time.Now().Unix(), "message15") - require.NoError(t, err) - _, err = AddComment(bug2, reneB, time.Now().Unix(), "message16") - require.NoError(t, err) - err = bug2.Commit(repoB) - require.NoError(t, err) - - _, err = AddComment(bug2, reneB, time.Now().Unix(), "message17") - require.NoError(t, err) - _, err = AddComment(bug2, reneB, time.Now().Unix(), "message18") - require.NoError(t, err) - _, err = AddComment(bug2, reneB, time.Now().Unix(), "message19") - require.NoError(t, err) - err = bug2.Commit(repoB) - require.NoError(t, err) - - // A --> remote - _, err = Push(repoA, "origin") - require.NoError(t, err) - - // remote --> B - err = Pull(repoB, "origin") - require.NoError(t, err) - - bugs := allBugs(t, ReadAllLocal(repoB)) - - if len(bugs) != 1 { - t.Fatal("Unexpected number of bugs") - } - - bug3, err := ReadLocal(repoB, bug1.Id()) - require.NoError(t, err) - - if nbOps(bug3) != 19 { - t.Fatal("Unexpected number of operations") - } - - // B --> remote - _, err = Push(repoB, "origin") - require.NoError(t, err) - - // remote --> A - err = Pull(repoA, "origin") - require.NoError(t, err) - - bugs = allBugs(t, ReadAllLocal(repoA)) - - if len(bugs) != 1 { - t.Fatal("Unexpected number of bugs") - } - - bug4, err := ReadLocal(repoA, bug1.Id()) - require.NoError(t, err) - - if nbOps(bug4) != 19 { - t.Fatal("Unexpected number of operations") - } -} diff --git a/migration3/after/bug/bug_test.go b/migration3/after/bug/bug_test.go deleted file mode 100644 index 60b76be..0000000 --- a/migration3/after/bug/bug_test.go +++ /dev/null @@ -1,190 +0,0 @@ -package bug - -import ( - "fmt" - "testing" - "time" - - "github.com/stretchr/testify/require" - - "github.com/MichaelMure/git-bug-migration/migration3/after/identity" - "github.com/MichaelMure/git-bug-migration/migration3/after/repository" -) - -func TestBugId(t *testing.T) { - repo := repository.NewMockRepoForTest() - - bug1 := NewBug() - - rene, err := identity.NewIdentity(repo, "René Descartes", "rene@descartes.fr") - require.NoError(t, err) - err = rene.Commit(repo) - require.NoError(t, err) - - createOp := NewCreateOp(rene, time.Now().Unix(), "title", "message", nil) - - bug1.Append(createOp) - - err = bug1.Commit(repo) - - if err != nil { - t.Fatal(err) - } - - bug1.Id() -} - -func TestBugValidity(t *testing.T) { - repo := repository.NewMockRepoForTest() - - bug1 := NewBug() - - rene, err := identity.NewIdentity(repo, "René Descartes", "rene@descartes.fr") - require.NoError(t, err) - err = rene.Commit(repo) - require.NoError(t, err) - - createOp := NewCreateOp(rene, time.Now().Unix(), "title", "message", nil) - - if bug1.Validate() == nil { - t.Fatal("Empty bug should be invalid") - } - - bug1.Append(createOp) - - if bug1.Validate() != nil { - t.Fatal("Bug with just a CreateOp should be valid") - } - - err = bug1.Commit(repo) - if err != nil { - t.Fatal(err) - } - - bug1.Append(createOp) - - if bug1.Validate() == nil { - t.Fatal("Bug with multiple CreateOp should be invalid") - } - - err = bug1.Commit(repo) - if err == nil { - t.Fatal("Invalid bug should not commit") - } -} - -func TestBugCommitLoad(t *testing.T) { - repo := repository.NewMockRepoForTest() - - bug1 := NewBug() - - rene, err := identity.NewIdentity(repo, "René Descartes", "rene@descartes.fr") - require.NoError(t, err) - err = rene.Commit(repo) - require.NoError(t, err) - - createOp := NewCreateOp(rene, time.Now().Unix(), "title", "message", nil) - setTitleOp := NewSetTitleOp(rene, time.Now().Unix(), "title2", "title1") - addCommentOp := NewAddCommentOp(rene, time.Now().Unix(), "message2", nil) - - bug1.Append(createOp) - bug1.Append(setTitleOp) - - require.True(t, bug1.NeedCommit()) - - err = bug1.Commit(repo) - require.Nil(t, err) - require.False(t, bug1.NeedCommit()) - - bug2, err := ReadLocal(repo, bug1.Id()) - require.NoError(t, err) - equivalentBug(t, bug1, bug2) - - // add more op - - bug1.Append(addCommentOp) - - require.True(t, bug1.NeedCommit()) - - err = bug1.Commit(repo) - require.Nil(t, err) - require.False(t, bug1.NeedCommit()) - - bug3, err := ReadLocal(repo, bug1.Id()) - require.NoError(t, err) - equivalentBug(t, bug1, bug3) -} - -func equivalentBug(t *testing.T, expected, actual *Bug) { - require.Equal(t, len(expected.packs), len(actual.packs)) - - for i := range expected.packs { - for j := range expected.packs[i].Operations { - actual.packs[i].Operations[j].base().id = expected.packs[i].Operations[j].base().id - } - } - - require.Equal(t, expected, actual) -} - -func TestBugRemove(t *testing.T) { - repo := repository.CreateGoGitTestRepo(false) - remoteA := repository.CreateGoGitTestRepo(true) - remoteB := repository.CreateGoGitTestRepo(true) - defer repository.CleanupTestRepos(repo, remoteA, remoteB) - - err := repo.AddRemote("remoteA", "file://"+remoteA.GetPath()) - require.NoError(t, err) - - err = repo.AddRemote("remoteB", "file://"+remoteB.GetPath()) - require.NoError(t, err) - - // generate a bunch of bugs - rene, err := identity.NewIdentity(repo, "René Descartes", "rene@descartes.fr") - require.NoError(t, err) - err = rene.Commit(repo) - require.NoError(t, err) - - for i := 0; i < 100; i++ { - b := NewBug() - createOp := NewCreateOp(rene, time.Now().Unix(), "title", fmt.Sprintf("message%v", i), nil) - b.Append(createOp) - err = b.Commit(repo) - require.NoError(t, err) - } - - // and one more for testing - b := NewBug() - createOp := NewCreateOp(rene, time.Now().Unix(), "title", "message", nil) - b.Append(createOp) - err = b.Commit(repo) - require.NoError(t, err) - - _, err = Push(repo, "remoteA") - require.NoError(t, err) - - _, err = Push(repo, "remoteB") - require.NoError(t, err) - - _, err = Fetch(repo, "remoteA") - require.NoError(t, err) - - _, err = Fetch(repo, "remoteB") - require.NoError(t, err) - - err = RemoveBug(repo, b.Id()) - require.NoError(t, err) - - _, err = ReadLocal(repo, b.Id()) - require.Error(t, ErrBugNotExist, err) - - _, err = ReadRemote(repo, "remoteA", b.Id()) - require.Error(t, ErrBugNotExist, err) - - _, err = ReadRemote(repo, "remoteB", b.Id()) - require.Error(t, ErrBugNotExist, err) - - ids, err := ListLocalIds(repo) - require.NoError(t, err) - require.Len(t, ids, 100) -} diff --git a/migration3/after/bug/label_test.go b/migration3/after/bug/label_test.go deleted file mode 100644 index 49401c4..0000000 --- a/migration3/after/bug/label_test.go +++ /dev/null @@ -1,35 +0,0 @@ -package bug - -import ( - "testing" - - "github.com/stretchr/testify/require" -) - -func TestLabelRGBA(t *testing.T) { - rgba := Label("test1").Color() - expected := LabelColor{R: 0, G: 150, B: 136, A: 255} - - require.Equal(t, expected, rgba) -} - -func TestLabelRGBASimilar(t *testing.T) { - rgba := Label("test2").Color() - expected := LabelColor{R: 3, G: 169, B: 244, A: 255} - - require.Equal(t, expected, rgba) -} - -func TestLabelRGBAReverse(t *testing.T) { - rgba := Label("tset").Color() - expected := LabelColor{R: 63, G: 81, B: 181, A: 255} - - require.Equal(t, expected, rgba) -} - -func TestLabelRGBAEqual(t *testing.T) { - color1 := Label("test").Color() - color2 := Label("test").Color() - - require.Equal(t, color1, color2) -} diff --git a/migration3/after/bug/op_add_comment_test.go b/migration3/after/bug/op_add_comment_test.go deleted file mode 100644 index 60083a1..0000000 --- a/migration3/after/bug/op_add_comment_test.go +++ /dev/null @@ -1,39 +0,0 @@ -package bug - -import ( - "encoding/json" - "testing" - "time" - - "github.com/stretchr/testify/assert" - "github.com/stretchr/testify/require" - - "github.com/MichaelMure/git-bug-migration/migration3/after/identity" - "github.com/MichaelMure/git-bug-migration/migration3/after/repository" -) - -func TestAddCommentSerialize(t *testing.T) { - repo := repository.NewMockRepoClock() - - rene, err := identity.NewIdentity(repo, "René Descartes", "rene@descartes.fr") - require.NoError(t, err) - - unix := time.Now().Unix() - before := NewAddCommentOp(rene, unix, "message", nil) - - data, err := json.Marshal(before) - assert.NoError(t, err) - - var after AddCommentOperation - err = json.Unmarshal(data, &after) - assert.NoError(t, err) - - // enforce creating the ID - before.Id() - - // Replace the identity stub with the real thing - assert.Equal(t, rene.Id(), after.base().Author.Id()) - after.Author = rene - - assert.Equal(t, before, &after) -} diff --git a/migration3/after/bug/op_create_test.go b/migration3/after/bug/op_create_test.go deleted file mode 100644 index f9af5a6..0000000 --- a/migration3/after/bug/op_create_test.go +++ /dev/null @@ -1,82 +0,0 @@ -package bug - -import ( - "encoding/json" - "testing" - "time" - - "github.com/stretchr/testify/require" - - "github.com/MichaelMure/git-bug-migration/migration3/after/identity" - "github.com/MichaelMure/git-bug-migration/migration3/after/repository" - "github.com/MichaelMure/git-bug-migration/migration3/after/util/timestamp" -) - -func TestCreate(t *testing.T) { - snapshot := Snapshot{} - - repo := repository.NewMockRepoClock() - - rene, err := identity.NewIdentity(repo, "René Descartes", "rene@descartes.fr") - require.NoError(t, err) - - unix := time.Now().Unix() - - create := NewCreateOp(rene, unix, "title", "message", nil) - - create.Apply(&snapshot) - - id := create.Id() - require.NoError(t, id.Validate()) - - comment := Comment{ - id: id, - Author: rene, - Message: "message", - UnixTime: timestamp.Timestamp(create.UnixTime), - } - - expected := Snapshot{ - Title: "title", - Comments: []Comment{ - comment, - }, - Author: rene, - Participants: []identity.Interface{rene}, - Actors: []identity.Interface{rene}, - CreateTime: create.Time(), - Timeline: []TimelineItem{ - &CreateTimelineItem{ - CommentTimelineItem: NewCommentTimelineItem(id, comment), - }, - }, - } - - require.Equal(t, expected, snapshot) -} - -func TestCreateSerialize(t *testing.T) { - repo := repository.NewMockRepoClock() - - rene, err := identity.NewIdentity(repo, "René Descartes", "rene@descartes.fr") - require.NoError(t, err) - - unix := time.Now().Unix() - before := NewCreateOp(rene, unix, "title", "message", nil) - - data, err := json.Marshal(before) - require.NoError(t, err) - - var after CreateOperation - err = json.Unmarshal(data, &after) - require.NoError(t, err) - - // enforce creating the ID - before.Id() - - // Replace the identity stub with the real thing - require.Equal(t, rene.Id(), after.base().Author.Id()) - after.Author = rene - - require.Equal(t, before, &after) -} diff --git a/migration3/after/bug/op_edit_comment_test.go b/migration3/after/bug/op_edit_comment_test.go deleted file mode 100644 index 1416862..0000000 --- a/migration3/after/bug/op_edit_comment_test.go +++ /dev/null @@ -1,108 +0,0 @@ -package bug - -import ( - "encoding/json" - "testing" - "time" - - "github.com/stretchr/testify/require" - - "github.com/MichaelMure/git-bug-migration/migration3/after/identity" - "github.com/MichaelMure/git-bug-migration/migration3/after/repository" -) - -func TestEdit(t *testing.T) { - snapshot := Snapshot{} - - repo := repository.NewMockRepoForTest() - - rene, err := identity.NewIdentity(repo, "René Descartes", "rene@descartes.fr") - require.NoError(t, err) - err = rene.Commit(repo) - require.NoError(t, err) - - unix := time.Now().Unix() - - create := NewCreateOp(rene, unix, "title", "create", nil) - create.Apply(&snapshot) - - id1 := create.Id() - require.NoError(t, id1.Validate()) - - comment1 := NewAddCommentOp(rene, unix, "comment 1", nil) - comment1.Apply(&snapshot) - - id2 := comment1.Id() - require.NoError(t, id2.Validate()) - - // add another unrelated op in between - setTitle := NewSetTitleOp(rene, unix, "edited title", "title") - setTitle.Apply(&snapshot) - - comment2 := NewAddCommentOp(rene, unix, "comment 2", nil) - comment2.Apply(&snapshot) - - id3 := comment2.Id() - require.NoError(t, id3.Validate()) - - edit := NewEditCommentOp(rene, unix, id1, "create edited", nil) - edit.Apply(&snapshot) - - require.Equal(t, len(snapshot.Timeline), 4) - require.Equal(t, len(snapshot.Timeline[0].(*CreateTimelineItem).History), 2) - require.Equal(t, len(snapshot.Timeline[1].(*AddCommentTimelineItem).History), 1) - require.Equal(t, len(snapshot.Timeline[3].(*AddCommentTimelineItem).History), 1) - require.Equal(t, snapshot.Comments[0].Message, "create edited") - require.Equal(t, snapshot.Comments[1].Message, "comment 1") - require.Equal(t, snapshot.Comments[2].Message, "comment 2") - - edit2 := NewEditCommentOp(rene, unix, id2, "comment 1 edited", nil) - edit2.Apply(&snapshot) - - require.Equal(t, len(snapshot.Timeline), 4) - require.Equal(t, len(snapshot.Timeline[0].(*CreateTimelineItem).History), 2) - require.Equal(t, len(snapshot.Timeline[1].(*AddCommentTimelineItem).History), 2) - require.Equal(t, len(snapshot.Timeline[3].(*AddCommentTimelineItem).History), 1) - require.Equal(t, snapshot.Comments[0].Message, "create edited") - require.Equal(t, snapshot.Comments[1].Message, "comment 1 edited") - require.Equal(t, snapshot.Comments[2].Message, "comment 2") - - edit3 := NewEditCommentOp(rene, unix, id3, "comment 2 edited", nil) - edit3.Apply(&snapshot) - - require.Equal(t, len(snapshot.Timeline), 4) - require.Equal(t, len(snapshot.Timeline[0].(*CreateTimelineItem).History), 2) - require.Equal(t, len(snapshot.Timeline[1].(*AddCommentTimelineItem).History), 2) - require.Equal(t, len(snapshot.Timeline[3].(*AddCommentTimelineItem).History), 2) - require.Equal(t, snapshot.Comments[0].Message, "create edited") - require.Equal(t, snapshot.Comments[1].Message, "comment 1 edited") - require.Equal(t, snapshot.Comments[2].Message, "comment 2 edited") -} - -func TestEditCommentSerialize(t *testing.T) { - repo := repository.NewMockRepoForTest() - - rene, err := identity.NewIdentity(repo, "René Descartes", "rene@descartes.fr") - require.NoError(t, err) - err = rene.Commit(repo) - require.NoError(t, err) - - unix := time.Now().Unix() - before := NewEditCommentOp(rene, unix, "target", "message", nil) - - data, err := json.Marshal(before) - require.NoError(t, err) - - var after EditCommentOperation - err = json.Unmarshal(data, &after) - require.NoError(t, err) - - // enforce creating the ID - before.Id() - - // Replace the identity stub with the real thing - require.Equal(t, rene.Id(), after.base().Author.Id()) - after.Author = rene - - require.Equal(t, before, &after) -} diff --git a/migration3/after/bug/op_label_change_test.go b/migration3/after/bug/op_label_change_test.go deleted file mode 100644 index 01c6aa6..0000000 --- a/migration3/after/bug/op_label_change_test.go +++ /dev/null @@ -1,40 +0,0 @@ -package bug - -import ( - "encoding/json" - "testing" - "time" - - "github.com/stretchr/testify/require" - - "github.com/MichaelMure/git-bug-migration/migration3/after/identity" - "github.com/MichaelMure/git-bug-migration/migration3/after/repository" -) - -func TestLabelChangeSerialize(t *testing.T) { - repo := repository.NewMockRepoForTest() - - rene, err := identity.NewIdentity(repo, "René Descartes", "rene@descartes.fr") - require.NoError(t, err) - err = rene.Commit(repo) - require.NoError(t, err) - - unix := time.Now().Unix() - before := NewLabelChangeOperation(rene, unix, []Label{"added"}, []Label{"removed"}) - - data, err := json.Marshal(before) - require.NoError(t, err) - - var after LabelChangeOperation - err = json.Unmarshal(data, &after) - require.NoError(t, err) - - // enforce creating the ID - before.Id() - - // Replace the identity stub with the real thing - require.Equal(t, rene.Id(), after.base().Author.Id()) - after.Author = rene - - require.Equal(t, before, &after) -} diff --git a/migration3/after/bug/op_noop_test.go b/migration3/after/bug/op_noop_test.go deleted file mode 100644 index aa5e878..0000000 --- a/migration3/after/bug/op_noop_test.go +++ /dev/null @@ -1,40 +0,0 @@ -package bug - -import ( - "encoding/json" - "testing" - "time" - - "github.com/stretchr/testify/require" - - "github.com/MichaelMure/git-bug-migration/migration3/after/identity" - "github.com/MichaelMure/git-bug-migration/migration3/after/repository" - - "github.com/stretchr/testify/assert" -) - -func TestNoopSerialize(t *testing.T) { - repo := repository.NewMockRepoClock() - - rene, err := identity.NewIdentity(repo, "René Descartes", "rene@descartes.fr") - require.NoError(t, err) - - unix := time.Now().Unix() - before := NewNoOpOp(rene, unix) - - data, err := json.Marshal(before) - assert.NoError(t, err) - - var after NoOpOperation - err = json.Unmarshal(data, &after) - assert.NoError(t, err) - - // enforce creating the ID - before.Id() - - // Replace the identity stub with the real thing - assert.Equal(t, rene.Id(), after.base().Author.Id()) - after.Author = rene - - assert.Equal(t, before, &after) -} diff --git a/migration3/after/bug/op_set_metadata_test.go b/migration3/after/bug/op_set_metadata_test.go deleted file mode 100644 index 353d0d3..0000000 --- a/migration3/after/bug/op_set_metadata_test.go +++ /dev/null @@ -1,127 +0,0 @@ -package bug - -import ( - "encoding/json" - "testing" - "time" - - "github.com/MichaelMure/git-bug-migration/migration3/after/identity" - "github.com/MichaelMure/git-bug-migration/migration3/after/repository" - - "github.com/stretchr/testify/require" -) - -func TestSetMetadata(t *testing.T) { - snapshot := Snapshot{} - - repo := repository.NewMockRepoClock() - - rene, err := identity.NewIdentity(repo, "René Descartes", "rene@descartes.fr") - require.NoError(t, err) - - unix := time.Now().Unix() - - create := NewCreateOp(rene, unix, "title", "create", nil) - create.SetMetadata("key", "value") - create.Apply(&snapshot) - snapshot.Operations = append(snapshot.Operations, create) - - id1 := create.Id() - require.NoError(t, id1.Validate()) - - comment := NewAddCommentOp(rene, unix, "comment", nil) - comment.SetMetadata("key2", "value2") - comment.Apply(&snapshot) - snapshot.Operations = append(snapshot.Operations, comment) - - id2 := comment.Id() - require.NoError(t, id2.Validate()) - - op1 := NewSetMetadataOp(rene, unix, id1, map[string]string{ - "key": "override", - "key2": "value", - }) - - op1.Apply(&snapshot) - snapshot.Operations = append(snapshot.Operations, op1) - - createMetadata := snapshot.Operations[0].AllMetadata() - require.Equal(t, len(createMetadata), 2) - // original key is not overrided - require.Equal(t, createMetadata["key"], "value") - // new key is set - require.Equal(t, createMetadata["key2"], "value") - - commentMetadata := snapshot.Operations[1].AllMetadata() - require.Equal(t, len(commentMetadata), 1) - require.Equal(t, commentMetadata["key2"], "value2") - - op2 := NewSetMetadataOp(rene, unix, id2, map[string]string{ - "key2": "value", - "key3": "value3", - }) - - op2.Apply(&snapshot) - snapshot.Operations = append(snapshot.Operations, op2) - - createMetadata = snapshot.Operations[0].AllMetadata() - require.Equal(t, len(createMetadata), 2) - require.Equal(t, createMetadata["key"], "value") - require.Equal(t, createMetadata["key2"], "value") - - commentMetadata = snapshot.Operations[1].AllMetadata() - require.Equal(t, len(commentMetadata), 2) - // original key is not overrided - require.Equal(t, commentMetadata["key2"], "value2") - // new key is set - require.Equal(t, commentMetadata["key3"], "value3") - - op3 := NewSetMetadataOp(rene, unix, id1, map[string]string{ - "key": "override", - "key2": "override", - }) - - op3.Apply(&snapshot) - snapshot.Operations = append(snapshot.Operations, op3) - - createMetadata = snapshot.Operations[0].AllMetadata() - require.Equal(t, len(createMetadata), 2) - // original key is not overrided - require.Equal(t, createMetadata["key"], "value") - // previously set key is not overrided - require.Equal(t, createMetadata["key2"], "value") - - commentMetadata = snapshot.Operations[1].AllMetadata() - require.Equal(t, len(commentMetadata), 2) - require.Equal(t, commentMetadata["key2"], "value2") - require.Equal(t, commentMetadata["key3"], "value3") -} - -func TestSetMetadataSerialize(t *testing.T) { - repo := repository.NewMockRepoClock() - - rene, err := identity.NewIdentity(repo, "René Descartes", "rene@descartes.fr") - require.NoError(t, err) - - unix := time.Now().Unix() - before := NewSetMetadataOp(rene, unix, "message", map[string]string{ - "key1": "value1", - "key2": "value2", - }) - - data, err := json.Marshal(before) - require.NoError(t, err) - - var after SetMetadataOperation - err = json.Unmarshal(data, &after) - require.NoError(t, err) - - // enforce creating the ID - before.Id() - - // Replace the identity stub with the real thing - require.Equal(t, rene.Id(), after.base().Author.Id()) - after.Author = rene - - require.Equal(t, before, &after) -} diff --git a/migration3/after/bug/op_set_status_test.go b/migration3/after/bug/op_set_status_test.go deleted file mode 100644 index 56a8bd2..0000000 --- a/migration3/after/bug/op_set_status_test.go +++ /dev/null @@ -1,38 +0,0 @@ -package bug - -import ( - "encoding/json" - "testing" - "time" - - "github.com/stretchr/testify/require" - - "github.com/MichaelMure/git-bug-migration/migration3/after/identity" - "github.com/MichaelMure/git-bug-migration/migration3/after/repository" -) - -func TestSetStatusSerialize(t *testing.T) { - repo := repository.NewMockRepoClock() - - rene, err := identity.NewIdentity(repo, "René Descartes", "rene@descartes.fr") - require.NoError(t, err) - - unix := time.Now().Unix() - before := NewSetStatusOp(rene, unix, ClosedStatus) - - data, err := json.Marshal(before) - require.NoError(t, err) - - var after SetStatusOperation - err = json.Unmarshal(data, &after) - require.NoError(t, err) - - // enforce creating the ID - before.Id() - - // Replace the identity stub with the real thing - require.Equal(t, rene.Id(), after.base().Author.Id()) - after.Author = rene - - require.Equal(t, before, &after) -} diff --git a/migration3/after/bug/op_set_title_test.go b/migration3/after/bug/op_set_title_test.go deleted file mode 100644 index 26e05bd..0000000 --- a/migration3/after/bug/op_set_title_test.go +++ /dev/null @@ -1,38 +0,0 @@ -package bug - -import ( - "encoding/json" - "testing" - "time" - - "github.com/stretchr/testify/require" - - "github.com/MichaelMure/git-bug-migration/migration3/after/identity" - "github.com/MichaelMure/git-bug-migration/migration3/after/repository" -) - -func TestSetTitleSerialize(t *testing.T) { - repo := repository.NewMockRepoClock() - - rene, err := identity.NewIdentity(repo, "René Descartes", "rene@descartes.fr") - require.NoError(t, err) - - unix := time.Now().Unix() - before := NewSetTitleOp(rene, unix, "title", "was") - - data, err := json.Marshal(before) - require.NoError(t, err) - - var after SetTitleOperation - err = json.Unmarshal(data, &after) - require.NoError(t, err) - - // enforce creating the ID - before.Id() - - // Replace the identity stub with the real thing - require.Equal(t, rene.Id(), after.base().Author.Id()) - after.Author = rene - - require.Equal(t, before, &after) -} diff --git a/migration3/after/bug/operation_iterator_test.go b/migration3/after/bug/operation_iterator_test.go deleted file mode 100644 index f932e1b..0000000 --- a/migration3/after/bug/operation_iterator_test.go +++ /dev/null @@ -1,79 +0,0 @@ -package bug - -import ( - "fmt" - "testing" - "time" - - "github.com/stretchr/testify/require" - - "github.com/MichaelMure/git-bug-migration/migration3/after/identity" - "github.com/MichaelMure/git-bug-migration/migration3/after/repository" -) - -func ExampleOperationIterator() { - b := NewBug() - - // add operations - - it := NewOperationIterator(b) - - for it.Next() { - // do something with each operations - _ = it.Value() - } -} - -func TestOpIterator(t *testing.T) { - repo := repository.NewMockRepoForTest() - - rene, err := identity.NewIdentity(repo, "René Descartes", "rene@descartes.fr") - require.NoError(t, err) - err = rene.Commit(repo) - require.NoError(t, err) - - unix := time.Now().Unix() - - createOp := NewCreateOp(rene, unix, "title", "message", nil) - addCommentOp := NewAddCommentOp(rene, unix, "message2", nil) - setStatusOp := NewSetStatusOp(rene, unix, ClosedStatus) - labelChangeOp := NewLabelChangeOperation(rene, unix, []Label{"added"}, []Label{"removed"}) - - var i int - genTitleOp := func() Operation { - i++ - return NewSetTitleOp(rene, unix, fmt.Sprintf("title%d", i), "") - } - - bug1 := NewBug() - - // first pack - bug1.Append(createOp) - bug1.Append(addCommentOp) - bug1.Append(setStatusOp) - bug1.Append(labelChangeOp) - err = bug1.Commit(repo) - require.NoError(t, err) - - // second pack - bug1.Append(genTitleOp()) - bug1.Append(genTitleOp()) - bug1.Append(genTitleOp()) - err = bug1.Commit(repo) - require.NoError(t, err) - - // staging - bug1.Append(genTitleOp()) - bug1.Append(genTitleOp()) - bug1.Append(genTitleOp()) - - it := NewOperationIterator(bug1) - - counter := 0 - for it.Next() { - _ = it.Value() - counter++ - } - - require.Equal(t, 10, counter) -} diff --git a/migration3/after/bug/operation_pack_test.go b/migration3/after/bug/operation_pack_test.go deleted file mode 100644 index 51470f3..0000000 --- a/migration3/after/bug/operation_pack_test.go +++ /dev/null @@ -1,78 +0,0 @@ -package bug - -import ( - "encoding/json" - "testing" - "time" - - "github.com/stretchr/testify/require" - - "github.com/MichaelMure/git-bug-migration/migration3/after/identity" - "github.com/MichaelMure/git-bug-migration/migration3/after/repository" -) - -func TestOperationPackSerialize(t *testing.T) { - opp := &OperationPack{} - - repo := repository.NewMockRepoClock() - - rene, err := identity.NewIdentity(repo, "René Descartes", "rene@descartes.fr") - require.NoError(t, err) - - createOp := NewCreateOp(rene, time.Now().Unix(), "title", "message", nil) - setTitleOp := NewSetTitleOp(rene, time.Now().Unix(), "title2", "title1") - addCommentOp := NewAddCommentOp(rene, time.Now().Unix(), "message2", nil) - setStatusOp := NewSetStatusOp(rene, time.Now().Unix(), ClosedStatus) - labelChangeOp := NewLabelChangeOperation(rene, time.Now().Unix(), []Label{"added"}, []Label{"removed"}) - - opp.Append(createOp) - opp.Append(setTitleOp) - opp.Append(addCommentOp) - opp.Append(setStatusOp) - opp.Append(labelChangeOp) - - opMeta := NewSetTitleOp(rene, time.Now().Unix(), "title3", "title2") - opMeta.SetMetadata("key", "value") - opp.Append(opMeta) - - require.Equal(t, 1, len(opMeta.Metadata)) - - opFile := NewAddCommentOp(rene, time.Now().Unix(), "message", []repository.Hash{ - "abcdef", - "ghijkl", - }) - opp.Append(opFile) - - require.Equal(t, 2, len(opFile.Files)) - - data, err := json.Marshal(opp) - require.NoError(t, err) - - var opp2 *OperationPack - err = json.Unmarshal(data, &opp2) - require.NoError(t, err) - - ensureIds(opp) - ensureAuthors(t, opp, opp2) - - require.Equal(t, opp, opp2) -} - -func ensureIds(opp *OperationPack) { - for _, op := range opp.Operations { - op.Id() - } -} - -func ensureAuthors(t *testing.T, opp1 *OperationPack, opp2 *OperationPack) { - require.Equal(t, len(opp1.Operations), len(opp2.Operations)) - for i := 0; i < len(opp1.Operations); i++ { - op1 := opp1.Operations[i] - op2 := opp2.Operations[i] - - // ensure we have equivalent authors (IdentityStub vs Identity) then - // enforce equality - require.Equal(t, op1.base().Author.Id(), op2.base().Author.Id()) - op1.base().Author = op2.base().Author - } -} diff --git a/migration3/after/bug/operation_test.go b/migration3/after/bug/operation_test.go deleted file mode 100644 index 1d4aa65..0000000 --- a/migration3/after/bug/operation_test.go +++ /dev/null @@ -1,133 +0,0 @@ -package bug - -import ( - "testing" - "time" - - "github.com/stretchr/testify/require" - - "github.com/MichaelMure/git-bug-migration/migration3/after/identity" - "github.com/MichaelMure/git-bug-migration/migration3/after/repository" -) - -func TestValidate(t *testing.T) { - repo := repository.NewMockRepoClock() - - makeIdentity := func(t *testing.T, name, email string) *identity.Identity { - i, err := identity.NewIdentity(repo, name, email) - require.NoError(t, err) - return i - } - - rene := makeIdentity(t, "René Descartes", "rene@descartes.fr") - - unix := time.Now().Unix() - - good := []Operation{ - NewCreateOp(rene, unix, "title", "message", nil), - NewSetTitleOp(rene, unix, "title2", "title1"), - NewAddCommentOp(rene, unix, "message2", nil), - NewSetStatusOp(rene, unix, ClosedStatus), - NewLabelChangeOperation(rene, unix, []Label{"added"}, []Label{"removed"}), - } - - for _, op := range good { - if err := op.Validate(); err != nil { - t.Fatal(err) - } - } - - bad := []Operation{ - // opbase - NewSetStatusOp(makeIdentity(t, "", "rene@descartes.fr"), unix, ClosedStatus), - NewSetStatusOp(makeIdentity(t, "René Descartes\u001b", "rene@descartes.fr"), unix, ClosedStatus), - NewSetStatusOp(makeIdentity(t, "René Descartes", "rene@descartes.fr\u001b"), unix, ClosedStatus), - NewSetStatusOp(makeIdentity(t, "René \nDescartes", "rene@descartes.fr"), unix, ClosedStatus), - NewSetStatusOp(makeIdentity(t, "René Descartes", "rene@\ndescartes.fr"), unix, ClosedStatus), - &CreateOperation{OpBase: OpBase{ - Author: rene, - UnixTime: 0, - OperationType: CreateOp, - }, - Title: "title", - Message: "message", - }, - - NewCreateOp(rene, unix, "multi\nline", "message", nil), - NewCreateOp(rene, unix, "title", "message", []repository.Hash{repository.Hash("invalid")}), - NewCreateOp(rene, unix, "title\u001b", "message", nil), - NewCreateOp(rene, unix, "title", "message\u001b", nil), - NewSetTitleOp(rene, unix, "multi\nline", "title1"), - NewSetTitleOp(rene, unix, "title", "multi\nline"), - NewSetTitleOp(rene, unix, "title\u001b", "title2"), - NewSetTitleOp(rene, unix, "title", "title2\u001b"), - NewAddCommentOp(rene, unix, "message\u001b", nil), - NewAddCommentOp(rene, unix, "message", []repository.Hash{repository.Hash("invalid")}), - NewSetStatusOp(rene, unix, 1000), - NewSetStatusOp(rene, unix, 0), - NewLabelChangeOperation(rene, unix, []Label{}, []Label{}), - NewLabelChangeOperation(rene, unix, []Label{"multi\nline"}, []Label{}), - } - - for i, op := range bad { - if err := op.Validate(); err == nil { - t.Fatal("validation should have failed", i, op) - } - } -} - -func TestMetadata(t *testing.T) { - repo := repository.NewMockRepoClock() - - rene, err := identity.NewIdentity(repo, "René Descartes", "rene@descartes.fr") - require.NoError(t, err) - - op := NewCreateOp(rene, time.Now().Unix(), "title", "message", nil) - - op.SetMetadata("key", "value") - - val, ok := op.GetMetadata("key") - require.True(t, ok) - require.Equal(t, val, "value") -} - -func TestID(t *testing.T) { - repo := repository.CreateGoGitTestRepo(false) - defer repository.CleanupTestRepos(repo) - - repos := []repository.ClockedRepo{ - repository.NewMockRepoForTest(), - repo, - } - - for _, repo := range repos { - rene, err := identity.NewIdentity(repo, "René Descartes", "rene@descartes.fr") - require.NoError(t, err) - err = rene.Commit(repo) - require.NoError(t, err) - - b, op, err := Create(rene, time.Now().Unix(), "title", "message") - require.NoError(t, err) - - id1 := op.Id() - require.NoError(t, id1.Validate()) - - err = b.Commit(repo) - require.NoError(t, err) - - op2 := b.FirstOp() - - id2 := op2.Id() - require.NoError(t, id2.Validate()) - require.Equal(t, id1, id2) - - b2, err := ReadLocal(repo, b.Id()) - require.NoError(t, err) - - op3 := b2.FirstOp() - - id3 := op3.Id() - require.NoError(t, id3.Validate()) - require.Equal(t, id1, id3) - } -} diff --git a/migration3/after/identity/identity_actions_test.go b/migration3/after/identity/identity_actions_test.go deleted file mode 100644 index 2923d48..0000000 --- a/migration3/after/identity/identity_actions_test.go +++ /dev/null @@ -1,158 +0,0 @@ -package identity - -import ( - "testing" - - "github.com/stretchr/testify/require" - - "github.com/MichaelMure/git-bug-migration/migration3/after/repository" -) - -func TestPushPull(t *testing.T) { - repoA, repoB, remote := repository.SetupReposAndRemote() - defer repository.CleanupTestRepos(repoA, repoB, remote) - - identity1, err := NewIdentity(repoA, "name1", "email1") - require.NoError(t, err) - err = identity1.Commit(repoA) - require.NoError(t, err) - - // A --> remote --> B - _, err = Push(repoA, "origin") - require.NoError(t, err) - - err = Pull(repoB, "origin") - require.NoError(t, err) - - identities := allIdentities(t, ReadAllLocal(repoB)) - - if len(identities) != 1 { - t.Fatal("Unexpected number of bugs") - } - - // B --> remote --> A - identity2, err := NewIdentity(repoB, "name2", "email2") - require.NoError(t, err) - err = identity2.Commit(repoB) - require.NoError(t, err) - - _, err = Push(repoB, "origin") - require.NoError(t, err) - - err = Pull(repoA, "origin") - require.NoError(t, err) - - identities = allIdentities(t, ReadAllLocal(repoA)) - - if len(identities) != 2 { - t.Fatal("Unexpected number of bugs") - } - - // Update both - - err = identity1.Mutate(repoA, func(orig *Mutator) { - orig.Name = "name1b" - orig.Email = "email1b" - }) - require.NoError(t, err) - err = identity1.Commit(repoA) - require.NoError(t, err) - - err = identity2.Mutate(repoB, func(orig *Mutator) { - orig.Name = "name2b" - orig.Email = "email2b" - }) - require.NoError(t, err) - err = identity2.Commit(repoB) - require.NoError(t, err) - - // A --> remote --> B - - _, err = Push(repoA, "origin") - require.NoError(t, err) - - err = Pull(repoB, "origin") - require.NoError(t, err) - - identities = allIdentities(t, ReadAllLocal(repoB)) - - if len(identities) != 2 { - t.Fatal("Unexpected number of bugs") - } - - // B --> remote --> A - - _, err = Push(repoB, "origin") - require.NoError(t, err) - - err = Pull(repoA, "origin") - require.NoError(t, err) - - identities = allIdentities(t, ReadAllLocal(repoA)) - - if len(identities) != 2 { - t.Fatal("Unexpected number of bugs") - } - - // Concurrent update - - err = identity1.Mutate(repoA, func(orig *Mutator) { - orig.Name = "name1c" - orig.Email = "email1c" - }) - require.NoError(t, err) - err = identity1.Commit(repoA) - require.NoError(t, err) - - identity1B, err := ReadLocal(repoB, identity1.Id()) - require.NoError(t, err) - - err = identity1B.Mutate(repoB, func(orig *Mutator) { - orig.Name = "name1concurrent" - orig.Email = "name1concurrent" - }) - require.NoError(t, err) - err = identity1B.Commit(repoB) - require.NoError(t, err) - - // A --> remote --> B - - _, err = Push(repoA, "origin") - require.NoError(t, err) - - // Pulling a non-fast-forward update should fail - err = Pull(repoB, "origin") - require.Error(t, err) - - identities = allIdentities(t, ReadAllLocal(repoB)) - - if len(identities) != 2 { - t.Fatal("Unexpected number of bugs") - } - - // B --> remote --> A - - // Pushing a non-fast-forward update should fail - _, err = Push(repoB, "origin") - require.Error(t, err) - - err = Pull(repoA, "origin") - require.NoError(t, err) - - identities = allIdentities(t, ReadAllLocal(repoA)) - - if len(identities) != 2 { - t.Fatal("Unexpected number of bugs") - } -} - -func allIdentities(t testing.TB, identities <-chan StreamedIdentity) []*Identity { - var result []*Identity - for streamed := range identities { - if streamed.Err != nil { - t.Fatal(streamed.Err) - } - result = append(result, streamed.Identity) - } - return result -} diff --git a/migration3/after/identity/identity_stub_test.go b/migration3/after/identity/identity_stub_test.go deleted file mode 100644 index b01a718..0000000 --- a/migration3/after/identity/identity_stub_test.go +++ /dev/null @@ -1,26 +0,0 @@ -package identity - -import ( - "encoding/json" - "testing" - - "github.com/stretchr/testify/assert" -) - -func TestIdentityStubSerialize(t *testing.T) { - before := &IdentityStub{ - id: "id1234", - } - - data, err := json.Marshal(before) - assert.NoError(t, err) - - var after IdentityStub - err = json.Unmarshal(data, &after) - assert.NoError(t, err) - - // enforce creating the Id - before.Id() - - assert.Equal(t, before, &after) -} diff --git a/migration3/after/identity/identity_test.go b/migration3/after/identity/identity_test.go deleted file mode 100644 index fabafde..0000000 --- a/migration3/after/identity/identity_test.go +++ /dev/null @@ -1,248 +0,0 @@ -package identity - -import ( - "encoding/json" - "testing" - - "github.com/stretchr/testify/require" - - "github.com/MichaelMure/git-bug-migration/migration3/after/util/lamport" -) - -// Test the commit and load of an Identity with multiple versions -func TestIdentityCommitLoad(t *testing.T) { - repo := makeIdentityTestRepo(t) - - // single version - - identity, err := NewIdentity(repo, "René Descartes", "rene.descartes@example.com") - require.NoError(t, err) - - idBeforeCommit := identity.Id() - - err = identity.Commit(repo) - require.NoError(t, err) - - commitsAreSet(t, identity) - require.NotEmpty(t, identity.Id()) - require.Equal(t, idBeforeCommit, identity.Id()) - require.Equal(t, idBeforeCommit, identity.versions[0].Id()) - - loaded, err := ReadLocal(repo, identity.Id()) - require.NoError(t, err) - commitsAreSet(t, loaded) - require.Equal(t, identity, loaded) - - // multiple versions - - identity, err = NewIdentityFull(repo, "René Descartes", "rene.descartes@example.com", "", "", []*Key{{PubKey: "pubkeyA"}}) - require.NoError(t, err) - - idBeforeCommit = identity.Id() - - err = identity.Mutate(repo, func(orig *Mutator) { - orig.Keys = []*Key{{PubKey: "pubkeyB"}} - }) - require.NoError(t, err) - - err = identity.Mutate(repo, func(orig *Mutator) { - orig.Keys = []*Key{{PubKey: "pubkeyC"}} - }) - require.NoError(t, err) - - require.Equal(t, idBeforeCommit, identity.Id()) - - err = identity.Commit(repo) - require.NoError(t, err) - - commitsAreSet(t, identity) - require.NotEmpty(t, identity.Id()) - require.Equal(t, idBeforeCommit, identity.Id()) - require.Equal(t, idBeforeCommit, identity.versions[0].Id()) - - loaded, err = ReadLocal(repo, identity.Id()) - require.NoError(t, err) - commitsAreSet(t, loaded) - require.Equal(t, identity, loaded) - - // add more version - - err = identity.Mutate(repo, func(orig *Mutator) { - orig.Email = "rene@descartes.com" - orig.Keys = []*Key{{PubKey: "pubkeyD"}} - }) - require.NoError(t, err) - - err = identity.Mutate(repo, func(orig *Mutator) { - orig.Email = "rene@descartes.com" - orig.Keys = []*Key{{PubKey: "pubkeyD"}, {PubKey: "pubkeyE"}} - }) - require.NoError(t, err) - - err = identity.Commit(repo) - require.NoError(t, err) - - commitsAreSet(t, identity) - require.NotEmpty(t, identity.Id()) - require.Equal(t, idBeforeCommit, identity.Id()) - require.Equal(t, idBeforeCommit, identity.versions[0].Id()) - - loaded, err = ReadLocal(repo, identity.Id()) - require.NoError(t, err) - commitsAreSet(t, loaded) - require.Equal(t, identity, loaded) -} - -func TestIdentityMutate(t *testing.T) { - repo := makeIdentityTestRepo(t) - - identity, err := NewIdentity(repo, "René Descartes", "rene.descartes@example.com") - require.NoError(t, err) - - require.Len(t, identity.versions, 1) - - err = identity.Mutate(repo, func(orig *Mutator) { - orig.Email = "rene@descartes.fr" - orig.Name = "René" - orig.Login = "rene" - }) - require.NoError(t, err) - - require.Len(t, identity.versions, 2) - require.Equal(t, identity.Email(), "rene@descartes.fr") - require.Equal(t, identity.Name(), "René") - require.Equal(t, identity.Login(), "rene") -} - -func commitsAreSet(t *testing.T, identity *Identity) { - for _, version := range identity.versions { - require.NotEmpty(t, version.commitHash) - } -} - -// Test that the correct crypto keys are returned for a given lamport time -func TestIdentity_ValidKeysAtTime(t *testing.T) { - identity := Identity{ - versions: []*version{ - { - times: map[string]lamport.Time{"foo": 100}, - keys: []*Key{ - {PubKey: "pubkeyA"}, - }, - }, - { - times: map[string]lamport.Time{"foo": 200}, - keys: []*Key{ - {PubKey: "pubkeyB"}, - }, - }, - { - times: map[string]lamport.Time{"foo": 201}, - keys: []*Key{ - {PubKey: "pubkeyC"}, - }, - }, - { - times: map[string]lamport.Time{"foo": 201}, - keys: []*Key{ - {PubKey: "pubkeyD"}, - }, - }, - { - times: map[string]lamport.Time{"foo": 300}, - keys: []*Key{ - {PubKey: "pubkeyE"}, - }, - }, - }, - } - - require.Nil(t, identity.ValidKeysAtTime("foo", 10)) - require.Equal(t, identity.ValidKeysAtTime("foo", 100), []*Key{{PubKey: "pubkeyA"}}) - require.Equal(t, identity.ValidKeysAtTime("foo", 140), []*Key{{PubKey: "pubkeyA"}}) - require.Equal(t, identity.ValidKeysAtTime("foo", 200), []*Key{{PubKey: "pubkeyB"}}) - require.Equal(t, identity.ValidKeysAtTime("foo", 201), []*Key{{PubKey: "pubkeyD"}}) - require.Equal(t, identity.ValidKeysAtTime("foo", 202), []*Key{{PubKey: "pubkeyD"}}) - require.Equal(t, identity.ValidKeysAtTime("foo", 300), []*Key{{PubKey: "pubkeyE"}}) - require.Equal(t, identity.ValidKeysAtTime("foo", 3000), []*Key{{PubKey: "pubkeyE"}}) -} - -// Test the immutable or mutable metadata search -func TestMetadata(t *testing.T) { - repo := makeIdentityTestRepo(t) - - identity, err := NewIdentity(repo, "René Descartes", "rene.descartes@example.com") - require.NoError(t, err) - - identity.SetMetadata("key1", "value1") - assertHasKeyValue(t, identity.ImmutableMetadata(), "key1", "value1") - assertHasKeyValue(t, identity.MutableMetadata(), "key1", "value1") - - err = identity.Commit(repo) - require.NoError(t, err) - - assertHasKeyValue(t, identity.ImmutableMetadata(), "key1", "value1") - assertHasKeyValue(t, identity.MutableMetadata(), "key1", "value1") - - // try override - err = identity.Mutate(repo, func(orig *Mutator) { - orig.Email = "rene@descartes.fr" - }) - require.NoError(t, err) - - identity.SetMetadata("key1", "value2") - assertHasKeyValue(t, identity.ImmutableMetadata(), "key1", "value1") - assertHasKeyValue(t, identity.MutableMetadata(), "key1", "value2") - - err = identity.Commit(repo) - require.NoError(t, err) - - // reload - loaded, err := ReadLocal(repo, identity.Id()) - require.NoError(t, err) - - assertHasKeyValue(t, loaded.ImmutableMetadata(), "key1", "value1") - assertHasKeyValue(t, loaded.MutableMetadata(), "key1", "value2") - - // set metadata after commit - versionCount := len(identity.versions) - identity.SetMetadata("foo", "bar") - require.True(t, identity.NeedCommit()) - require.Len(t, identity.versions, versionCount+1) - - err = identity.Commit(repo) - require.NoError(t, err) - require.Len(t, identity.versions, versionCount+1) -} - -func assertHasKeyValue(t *testing.T, metadata map[string]string, key, value string) { - val, ok := metadata[key] - require.True(t, ok) - require.Equal(t, val, value) -} - -func TestJSON(t *testing.T) { - repo := makeIdentityTestRepo(t) - - identity, err := NewIdentity(repo, "René Descartes", "rene.descartes@example.com") - require.NoError(t, err) - - // commit to make sure we have an Id - err = identity.Commit(repo) - require.NoError(t, err) - require.NotEmpty(t, identity.Id()) - - // serialize - data, err := json.Marshal(identity) - require.NoError(t, err) - - // deserialize, got a IdentityStub with the same id - var i Interface - i, err = UnmarshalJSON(data) - require.NoError(t, err) - require.Equal(t, identity.Id(), i.Id()) - - // make sure we can load the identity properly - i, err = ReadLocal(repo, i.Id()) - require.NoError(t, err) -} diff --git a/migration3/after/identity/version_test.go b/migration3/after/identity/version_test.go deleted file mode 100644 index 2fa5b8c..0000000 --- a/migration3/after/identity/version_test.go +++ /dev/null @@ -1,84 +0,0 @@ -package identity - -import ( - "encoding/json" - "testing" - "time" - - "github.com/stretchr/testify/assert" - "github.com/stretchr/testify/require" - - "github.com/MichaelMure/git-bug-migration/migration3/after/entity" - "github.com/MichaelMure/git-bug-migration/migration3/after/repository" - "github.com/MichaelMure/git-bug-migration/migration3/after/util/lamport" -) - -func makeIdentityTestRepo(t *testing.T) repository.ClockedRepo { - repo := repository.NewMockRepoForTest() - - clock1, err := repo.GetOrCreateClock("foo") - require.NoError(t, err) - err = clock1.Witness(42) // clock goes to 43 - require.NoError(t, err) - - clock2, err := repo.GetOrCreateClock("bar") - require.NoError(t, err) - err = clock2.Witness(34) // clock goes to 35 - require.NoError(t, err) - - return repo -} - -func TestVersionSerialize(t *testing.T) { - repo := makeIdentityTestRepo(t) - - keys := []*Key{ - { - Fingerprint: "fingerprint1", - PubKey: "pubkey1", - }, - { - Fingerprint: "fingerprint2", - PubKey: "pubkey2", - }, - } - - before, err := newVersion(repo, "name", "email", "login", "avatarUrl", keys) - require.NoError(t, err) - - before.SetMetadata("key1", "value1") - before.SetMetadata("key2", "value2") - - expected := &version{ - id: entity.UnsetId, - name: "name", - email: "email", - login: "login", - avatarURL: "avatarUrl", - unixTime: time.Now().Unix(), - times: map[string]lamport.Time{ - "foo": 43, - "bar": 35, - }, - keys: keys, - nonce: before.nonce, - metadata: map[string]string{ - "key1": "value1", - "key2": "value2", - }, - } - - require.Equal(t, expected, before) - - data, err := json.Marshal(before) - assert.NoError(t, err) - - var after version - err = json.Unmarshal(data, &after) - assert.NoError(t, err) - - // make sure we now have an Id - expected.Id() - - assert.Equal(t, expected, &after) -} diff --git a/migration3/after/repository/config_mem_test.go b/migration3/after/repository/config_mem_test.go deleted file mode 100644 index d9c3385..0000000 --- a/migration3/after/repository/config_mem_test.go +++ /dev/null @@ -1,7 +0,0 @@ -package repository - -import "testing" - -func TestNewMemConfig(t *testing.T) { - testConfig(t, NewMemConfig()) -} diff --git a/migration3/after/repository/config_test.go b/migration3/after/repository/config_test.go deleted file mode 100644 index 2a76354..0000000 --- a/migration3/after/repository/config_test.go +++ /dev/null @@ -1,54 +0,0 @@ -package repository - -import ( - "testing" - "time" - - "github.com/stretchr/testify/require" -) - -func TestMergedConfig(t *testing.T) { - local := NewMemConfig() - global := NewMemConfig() - merged := mergeConfig(local, global) - - require.NoError(t, global.StoreBool("bool", true)) - require.NoError(t, global.StoreString("string", "foo")) - require.NoError(t, global.StoreTimestamp("timestamp", time.Unix(1234, 0))) - - val1, err := merged.ReadBool("bool") - require.NoError(t, err) - require.Equal(t, val1, true) - - val2, err := merged.ReadString("string") - require.NoError(t, err) - require.Equal(t, val2, "foo") - - val3, err := merged.ReadTimestamp("timestamp") - require.NoError(t, err) - require.Equal(t, val3, time.Unix(1234, 0)) - - require.NoError(t, local.StoreBool("bool", false)) - require.NoError(t, local.StoreString("string", "bar")) - require.NoError(t, local.StoreTimestamp("timestamp", time.Unix(5678, 0))) - - val1, err = merged.ReadBool("bool") - require.NoError(t, err) - require.Equal(t, val1, false) - - val2, err = merged.ReadString("string") - require.NoError(t, err) - require.Equal(t, val2, "bar") - - val3, err = merged.ReadTimestamp("timestamp") - require.NoError(t, err) - require.Equal(t, val3, time.Unix(5678, 0)) - - all, err := merged.ReadAll("") - require.NoError(t, err) - require.Equal(t, all, map[string]string{ - "bool": "false", - "string": "bar", - "timestamp": "5678", - }) -} diff --git a/migration3/after/repository/git_test.go b/migration3/after/repository/git_test.go deleted file mode 100644 index 1b36fd4..0000000 --- a/migration3/after/repository/git_test.go +++ /dev/null @@ -1,10 +0,0 @@ -// Package repository contains helper methods for working with the Git repo. -package repository - -import ( - "testing" -) - -func TestGitRepo(t *testing.T) { - RepoTest(t, CreateTestRepo, CleanupTestRepos) -} diff --git a/migration3/after/repository/gogit_test.go b/migration3/after/repository/gogit_test.go deleted file mode 100644 index fba990d..0000000 --- a/migration3/after/repository/gogit_test.go +++ /dev/null @@ -1,68 +0,0 @@ -package repository - -import ( - "io/ioutil" - "os" - "path" - "path/filepath" - "testing" - - "github.com/stretchr/testify/assert" - "github.com/stretchr/testify/require" -) - -func TestNewGoGitRepo(t *testing.T) { - // Plain - plainRoot, err := ioutil.TempDir("", "") - require.NoError(t, err) - defer os.RemoveAll(plainRoot) - - _, err = InitGoGitRepo(plainRoot) - require.NoError(t, err) - plainGitDir := path.Join(plainRoot, ".git") - - // Bare - bareRoot, err := ioutil.TempDir("", "") - require.NoError(t, err) - defer os.RemoveAll(bareRoot) - - _, err = InitBareGoGitRepo(bareRoot) - require.NoError(t, err) - bareGitDir := bareRoot - - tests := []struct { - inPath string - outPath string - err bool - }{ - // errors - {"/", "", true}, - // parent dir of a repo - {filepath.Dir(plainRoot), "", true}, - - // Plain repo - {plainRoot, plainGitDir, false}, - {plainGitDir, plainGitDir, false}, - {path.Join(plainGitDir, "objects"), plainGitDir, false}, - - // Bare repo - {bareRoot, bareGitDir, false}, - {bareGitDir, bareGitDir, false}, - {path.Join(bareGitDir, "objects"), bareGitDir, false}, - } - - for i, tc := range tests { - r, err := NewGoGitRepo(tc.inPath, nil) - - if tc.err { - require.Error(t, err, i) - } else { - require.NoError(t, err, i) - assert.Equal(t, filepath.ToSlash(tc.outPath), filepath.ToSlash(r.GetPath()), i) - } - } -} - -func TestGoGitRepo(t *testing.T) { - RepoTest(t, CreateGoGitTestRepo, CleanupTestRepos) -} diff --git a/migration3/after/repository/mock_repo_test.go b/migration3/after/repository/mock_repo_test.go deleted file mode 100644 index b56b94f..0000000 --- a/migration3/after/repository/mock_repo_test.go +++ /dev/null @@ -1,10 +0,0 @@ -package repository - -import "testing" - -func TestMockRepo(t *testing.T) { - creator := func(bare bool) TestedRepo { return NewMockRepoForTest() } - cleaner := func(repos ...Repo) {} - - RepoTest(t, creator, cleaner) -} diff --git a/migration3/after/repository/tree_entry_test.go b/migration3/after/repository/tree_entry_test.go deleted file mode 100644 index d57433f..0000000 --- a/migration3/after/repository/tree_entry_test.go +++ /dev/null @@ -1,31 +0,0 @@ -package repository - -import ( - "testing" - - "github.com/stretchr/testify/assert" -) - -func TestTreeEntryFormat(t *testing.T) { - entries := []TreeEntry{ - {Blob, Hash("a85730cf5287d40a1e32d3a671ba2296c73387cb"), "name"}, - {Tree, Hash("a85730cf5287d40a1e32d3a671ba2296c73387cb"), "name"}, - } - - for _, entry := range entries { - _ = entry.Format() - } -} - -func TestTreeEntryParse(t *testing.T) { - lines := []string{ - "100644 blob 1e5ffaffc67049635ba7b01f77143313503f1ca1 .gitignore", - "040000 tree 728421fea4168b874bc1a8aa409d6723ef445a4e bug", - } - - for _, line := range lines { - _, err := ParseTreeEntry(line) - assert.NoError(t, err) - } - -} diff --git a/migration3/after/util/lamport/mem_clock_test.go b/migration3/after/util/lamport/mem_clock_test.go deleted file mode 100644 index e01d2ec..0000000 --- a/migration3/after/util/lamport/mem_clock_test.go +++ /dev/null @@ -1,8 +0,0 @@ -package lamport - -import "testing" - -func TestMemClock(t *testing.T) { - c := NewMemClock() - testClock(t, c) -} diff --git a/migration3/after/util/lamport/persisted_clock_test.go b/migration3/after/util/lamport/persisted_clock_test.go deleted file mode 100644 index aacec3b..0000000 --- a/migration3/after/util/lamport/persisted_clock_test.go +++ /dev/null @@ -1,19 +0,0 @@ -package lamport - -import ( - "io/ioutil" - "path" - "testing" - - "github.com/stretchr/testify/require" -) - -func TestPersistedClock(t *testing.T) { - dir, err := ioutil.TempDir("", "") - require.NoError(t, err) - - c, err := NewPersistedClock(path.Join(dir, "test-clock")) - require.NoError(t, err) - - testClock(t, c) -} diff --git a/migration3/before/bug/bug_actions_test.go b/migration3/before/bug/bug_actions_test.go deleted file mode 100644 index a6170d5..0000000 --- a/migration3/before/bug/bug_actions_test.go +++ /dev/null @@ -1,390 +0,0 @@ -package bug - -import ( - "testing" - "time" - - "github.com/stretchr/testify/assert" - "github.com/stretchr/testify/require" - - "github.com/MichaelMure/git-bug-migration/migration3/before/identity" - "github.com/MichaelMure/git-bug-migration/migration3/before/repository" -) - -func TestPushPull(t *testing.T) { - repoA, repoB, remote := repository.SetupReposAndRemote() - defer repository.CleanupTestRepos(repoA, repoB, remote) - - reneA := identity.NewIdentity("René Descartes", "rene@descartes.fr") - err := reneA.Commit(repoA) - require.NoError(t, err) - - bug1, _, err := Create(reneA, time.Now().Unix(), "bug1", "message") - require.NoError(t, err) - assert.True(t, bug1.NeedCommit()) - err = bug1.Commit(repoA) - require.NoError(t, err) - assert.False(t, bug1.NeedCommit()) - - // distribute the identity - _, err = identity.Push(repoA, "origin") - require.NoError(t, err) - err = identity.Pull(repoB, "origin") - require.NoError(t, err) - - // A --> remote --> B - _, err = Push(repoA, "origin") - require.NoError(t, err) - - err = Pull(repoB, "origin") - require.NoError(t, err) - - bugs := allBugs(t, ReadAllLocal(repoB)) - - if len(bugs) != 1 { - t.Fatal("Unexpected number of bugs") - } - - // B --> remote --> A - reneB, err := identity.ReadLocal(repoA, reneA.Id()) - require.NoError(t, err) - - bug2, _, err := Create(reneB, time.Now().Unix(), "bug2", "message") - require.NoError(t, err) - err = bug2.Commit(repoB) - require.NoError(t, err) - - _, err = Push(repoB, "origin") - require.NoError(t, err) - - err = Pull(repoA, "origin") - require.NoError(t, err) - - bugs = allBugs(t, ReadAllLocal(repoA)) - - if len(bugs) != 2 { - t.Fatal("Unexpected number of bugs") - } -} - -func allBugs(t testing.TB, bugs <-chan StreamedBug) []*Bug { - var result []*Bug - for streamed := range bugs { - if streamed.Err != nil { - t.Fatal(streamed.Err) - } - result = append(result, streamed.Bug) - } - return result -} - -func TestRebaseTheirs(t *testing.T) { - _RebaseTheirs(t) -} - -func BenchmarkRebaseTheirs(b *testing.B) { - for n := 0; n < b.N; n++ { - _RebaseTheirs(b) - } -} - -func _RebaseTheirs(t testing.TB) { - repoA, repoB, remote := repository.SetupReposAndRemote() - defer repository.CleanupTestRepos(repoA, repoB, remote) - - reneA := identity.NewIdentity("René Descartes", "rene@descartes.fr") - err := reneA.Commit(repoA) - require.NoError(t, err) - - bug1, _, err := Create(reneA, time.Now().Unix(), "bug1", "message") - require.NoError(t, err) - assert.True(t, bug1.NeedCommit()) - err = bug1.Commit(repoA) - require.NoError(t, err) - assert.False(t, bug1.NeedCommit()) - - // distribute the identity - _, err = identity.Push(repoA, "origin") - require.NoError(t, err) - err = identity.Pull(repoB, "origin") - require.NoError(t, err) - - // A --> remote - - _, err = Push(repoA, "origin") - require.NoError(t, err) - - // remote --> B - err = Pull(repoB, "origin") - require.NoError(t, err) - - bug2, err := ReadLocal(repoB, bug1.Id()) - require.NoError(t, err) - assert.False(t, bug2.NeedCommit()) - - reneB, err := identity.ReadLocal(repoA, reneA.Id()) - require.NoError(t, err) - - _, err = AddComment(bug2, reneB, time.Now().Unix(), "message2") - require.NoError(t, err) - assert.True(t, bug2.NeedCommit()) - _, err = AddComment(bug2, reneB, time.Now().Unix(), "message3") - require.NoError(t, err) - _, err = AddComment(bug2, reneB, time.Now().Unix(), "message4") - require.NoError(t, err) - err = bug2.Commit(repoB) - require.NoError(t, err) - assert.False(t, bug2.NeedCommit()) - - // B --> remote - _, err = Push(repoB, "origin") - require.NoError(t, err) - - // remote --> A - err = Pull(repoA, "origin") - require.NoError(t, err) - - bugs := allBugs(t, ReadAllLocal(repoB)) - - if len(bugs) != 1 { - t.Fatal("Unexpected number of bugs") - } - - bug3, err := ReadLocal(repoA, bug1.Id()) - require.NoError(t, err) - - if nbOps(bug3) != 4 { - t.Fatal("Unexpected number of operations") - } -} - -func TestRebaseOurs(t *testing.T) { - _RebaseOurs(t) -} - -func BenchmarkRebaseOurs(b *testing.B) { - for n := 0; n < b.N; n++ { - _RebaseOurs(b) - } -} - -func _RebaseOurs(t testing.TB) { - repoA, repoB, remote := repository.SetupReposAndRemote() - defer repository.CleanupTestRepos(repoA, repoB, remote) - - reneA := identity.NewIdentity("René Descartes", "rene@descartes.fr") - err := reneA.Commit(repoA) - require.NoError(t, err) - - bug1, _, err := Create(reneA, time.Now().Unix(), "bug1", "message") - require.NoError(t, err) - err = bug1.Commit(repoA) - require.NoError(t, err) - - // distribute the identity - _, err = identity.Push(repoA, "origin") - require.NoError(t, err) - err = identity.Pull(repoB, "origin") - require.NoError(t, err) - - // A --> remote - _, err = Push(repoA, "origin") - require.NoError(t, err) - - // remote --> B - err = Pull(repoB, "origin") - require.NoError(t, err) - - _, err = AddComment(bug1, reneA, time.Now().Unix(), "message2") - require.NoError(t, err) - _, err = AddComment(bug1, reneA, time.Now().Unix(), "message3") - require.NoError(t, err) - _, err = AddComment(bug1, reneA, time.Now().Unix(), "message4") - require.NoError(t, err) - err = bug1.Commit(repoA) - require.NoError(t, err) - - _, err = AddComment(bug1, reneA, time.Now().Unix(), "message5") - require.NoError(t, err) - _, err = AddComment(bug1, reneA, time.Now().Unix(), "message6") - require.NoError(t, err) - _, err = AddComment(bug1, reneA, time.Now().Unix(), "message7") - require.NoError(t, err) - err = bug1.Commit(repoA) - require.NoError(t, err) - - _, err = AddComment(bug1, reneA, time.Now().Unix(), "message8") - require.NoError(t, err) - _, err = AddComment(bug1, reneA, time.Now().Unix(), "message9") - require.NoError(t, err) - _, err = AddComment(bug1, reneA, time.Now().Unix(), "message10") - require.NoError(t, err) - err = bug1.Commit(repoA) - require.NoError(t, err) - - // remote --> A - err = Pull(repoA, "origin") - require.NoError(t, err) - - bugs := allBugs(t, ReadAllLocal(repoA)) - - if len(bugs) != 1 { - t.Fatal("Unexpected number of bugs") - } - - bug2, err := ReadLocal(repoA, bug1.Id()) - require.NoError(t, err) - - if nbOps(bug2) != 10 { - t.Fatal("Unexpected number of operations") - } -} - -func nbOps(b *Bug) int { - it := NewOperationIterator(b) - counter := 0 - for it.Next() { - counter++ - } - return counter -} - -func TestRebaseConflict(t *testing.T) { - _RebaseConflict(t) -} - -func BenchmarkRebaseConflict(b *testing.B) { - for n := 0; n < b.N; n++ { - _RebaseConflict(b) - } -} - -func _RebaseConflict(t testing.TB) { - repoA, repoB, remote := repository.SetupReposAndRemote() - defer repository.CleanupTestRepos(repoA, repoB, remote) - - reneA := identity.NewIdentity("René Descartes", "rene@descartes.fr") - err := reneA.Commit(repoA) - require.NoError(t, err) - - bug1, _, err := Create(reneA, time.Now().Unix(), "bug1", "message") - require.NoError(t, err) - err = bug1.Commit(repoA) - require.NoError(t, err) - - // distribute the identity - _, err = identity.Push(repoA, "origin") - require.NoError(t, err) - err = identity.Pull(repoB, "origin") - require.NoError(t, err) - - // A --> remote - _, err = Push(repoA, "origin") - require.NoError(t, err) - - // remote --> B - err = Pull(repoB, "origin") - require.NoError(t, err) - - _, err = AddComment(bug1, reneA, time.Now().Unix(), "message2") - require.NoError(t, err) - _, err = AddComment(bug1, reneA, time.Now().Unix(), "message3") - require.NoError(t, err) - _, err = AddComment(bug1, reneA, time.Now().Unix(), "message4") - require.NoError(t, err) - err = bug1.Commit(repoA) - require.NoError(t, err) - - _, err = AddComment(bug1, reneA, time.Now().Unix(), "message5") - require.NoError(t, err) - _, err = AddComment(bug1, reneA, time.Now().Unix(), "message6") - require.NoError(t, err) - _, err = AddComment(bug1, reneA, time.Now().Unix(), "message7") - require.NoError(t, err) - err = bug1.Commit(repoA) - require.NoError(t, err) - - _, err = AddComment(bug1, reneA, time.Now().Unix(), "message8") - require.NoError(t, err) - _, err = AddComment(bug1, reneA, time.Now().Unix(), "message9") - require.NoError(t, err) - _, err = AddComment(bug1, reneA, time.Now().Unix(), "message10") - require.NoError(t, err) - err = bug1.Commit(repoA) - require.NoError(t, err) - - bug2, err := ReadLocal(repoB, bug1.Id()) - require.NoError(t, err) - - reneB, err := identity.ReadLocal(repoA, reneA.Id()) - require.NoError(t, err) - - _, err = AddComment(bug2, reneB, time.Now().Unix(), "message11") - require.NoError(t, err) - _, err = AddComment(bug2, reneB, time.Now().Unix(), "message12") - require.NoError(t, err) - _, err = AddComment(bug2, reneB, time.Now().Unix(), "message13") - require.NoError(t, err) - err = bug2.Commit(repoB) - require.NoError(t, err) - - _, err = AddComment(bug2, reneB, time.Now().Unix(), "message14") - require.NoError(t, err) - _, err = AddComment(bug2, reneB, time.Now().Unix(), "message15") - require.NoError(t, err) - _, err = AddComment(bug2, reneB, time.Now().Unix(), "message16") - require.NoError(t, err) - err = bug2.Commit(repoB) - require.NoError(t, err) - - _, err = AddComment(bug2, reneB, time.Now().Unix(), "message17") - require.NoError(t, err) - _, err = AddComment(bug2, reneB, time.Now().Unix(), "message18") - require.NoError(t, err) - _, err = AddComment(bug2, reneB, time.Now().Unix(), "message19") - require.NoError(t, err) - err = bug2.Commit(repoB) - require.NoError(t, err) - - // A --> remote - _, err = Push(repoA, "origin") - require.NoError(t, err) - - // remote --> B - err = Pull(repoB, "origin") - require.NoError(t, err) - - bugs := allBugs(t, ReadAllLocal(repoB)) - - if len(bugs) != 1 { - t.Fatal("Unexpected number of bugs") - } - - bug3, err := ReadLocal(repoB, bug1.Id()) - require.NoError(t, err) - - if nbOps(bug3) != 19 { - t.Fatal("Unexpected number of operations") - } - - // B --> remote - _, err = Push(repoB, "origin") - require.NoError(t, err) - - // remote --> A - err = Pull(repoA, "origin") - require.NoError(t, err) - - bugs = allBugs(t, ReadAllLocal(repoA)) - - if len(bugs) != 1 { - t.Fatal("Unexpected number of bugs") - } - - bug4, err := ReadLocal(repoA, bug1.Id()) - require.NoError(t, err) - - if nbOps(bug4) != 19 { - t.Fatal("Unexpected number of operations") - } -} diff --git a/migration3/before/bug/bug_test.go b/migration3/before/bug/bug_test.go deleted file mode 100644 index dcc6bb8..0000000 --- a/migration3/before/bug/bug_test.go +++ /dev/null @@ -1,186 +0,0 @@ -package bug - -import ( - "fmt" - "testing" - "time" - - "github.com/stretchr/testify/require" - - "github.com/MichaelMure/git-bug-migration/migration3/before/identity" - "github.com/MichaelMure/git-bug-migration/migration3/before/repository" -) - -func TestBugId(t *testing.T) { - mockRepo := repository.NewMockRepoForTest() - - bug1 := NewBug() - - rene := identity.NewIdentity("René Descartes", "rene@descartes.fr") - err := rene.Commit(mockRepo) - require.NoError(t, err) - - createOp := NewCreateOp(rene, time.Now().Unix(), "title", "message", nil) - - bug1.Append(createOp) - - err = bug1.Commit(mockRepo) - - if err != nil { - t.Fatal(err) - } - - bug1.Id() -} - -func TestBugValidity(t *testing.T) { - mockRepo := repository.NewMockRepoForTest() - - bug1 := NewBug() - - rene := identity.NewIdentity("René Descartes", "rene@descartes.fr") - err := rene.Commit(mockRepo) - require.NoError(t, err) - - createOp := NewCreateOp(rene, time.Now().Unix(), "title", "message", nil) - - if bug1.Validate() == nil { - t.Fatal("Empty bug should be invalid") - } - - bug1.Append(createOp) - - if bug1.Validate() != nil { - t.Fatal("Bug with just a CreateOp should be valid") - } - - err = bug1.Commit(mockRepo) - if err != nil { - t.Fatal(err) - } - - bug1.Append(createOp) - - if bug1.Validate() == nil { - t.Fatal("Bug with multiple CreateOp should be invalid") - } - - err = bug1.Commit(mockRepo) - if err == nil { - t.Fatal("Invalid bug should not commit") - } -} - -func TestBugCommitLoad(t *testing.T) { - repo := repository.NewMockRepoForTest() - - bug1 := NewBug() - - rene := identity.NewIdentity("René Descartes", "rene@descartes.fr") - err := rene.Commit(repo) - require.NoError(t, err) - - createOp := NewCreateOp(rene, time.Now().Unix(), "title", "message", nil) - setTitleOp := NewSetTitleOp(rene, time.Now().Unix(), "title2", "title1") - addCommentOp := NewAddCommentOp(rene, time.Now().Unix(), "message2", nil) - - bug1.Append(createOp) - bug1.Append(setTitleOp) - - require.True(t, bug1.NeedCommit()) - - err = bug1.Commit(repo) - require.Nil(t, err) - require.False(t, bug1.NeedCommit()) - - bug2, err := ReadLocal(repo, bug1.Id()) - require.NoError(t, err) - equivalentBug(t, bug1, bug2) - - // add more op - - bug1.Append(addCommentOp) - - require.True(t, bug1.NeedCommit()) - - err = bug1.Commit(repo) - require.Nil(t, err) - require.False(t, bug1.NeedCommit()) - - bug3, err := ReadLocal(repo, bug1.Id()) - require.NoError(t, err) - equivalentBug(t, bug1, bug3) -} - -func equivalentBug(t *testing.T, expected, actual *Bug) { - require.Equal(t, len(expected.Packs), len(actual.Packs)) - - for i := range expected.Packs { - for j := range expected.Packs[i].Operations { - actual.Packs[i].Operations[j].base().id = expected.Packs[i].Operations[j].base().id - } - } - - require.Equal(t, expected, actual) -} - -func TestBugRemove(t *testing.T) { - repo := repository.CreateGoGitTestRepo(false) - remoteA := repository.CreateGoGitTestRepo(true) - remoteB := repository.CreateGoGitTestRepo(true) - defer repository.CleanupTestRepos(repo, remoteA, remoteB) - - err := repo.AddRemote("remoteA", "file://"+remoteA.GetPath()) - require.NoError(t, err) - - err = repo.AddRemote("remoteB", "file://"+remoteB.GetPath()) - require.NoError(t, err) - - // generate a bunch of bugs - rene := identity.NewIdentity("René Descartes", "rene@descartes.fr") - err = rene.Commit(repo) - require.NoError(t, err) - - for i := 0; i < 100; i++ { - b := NewBug() - createOp := NewCreateOp(rene, time.Now().Unix(), "title", fmt.Sprintf("message%v", i), nil) - b.Append(createOp) - err = b.Commit(repo) - require.NoError(t, err) - } - - // and one more for testing - b := NewBug() - createOp := NewCreateOp(rene, time.Now().Unix(), "title", "message", nil) - b.Append(createOp) - err = b.Commit(repo) - require.NoError(t, err) - - _, err = Push(repo, "remoteA") - require.NoError(t, err) - - _, err = Push(repo, "remoteB") - require.NoError(t, err) - - _, err = Fetch(repo, "remoteA") - require.NoError(t, err) - - _, err = Fetch(repo, "remoteB") - require.NoError(t, err) - - err = RemoveBug(repo, b.Id()) - require.NoError(t, err) - - _, err = ReadLocal(repo, b.Id()) - require.Error(t, ErrBugNotExist, err) - - _, err = ReadRemote(repo, "remoteA", b.Id()) - require.Error(t, ErrBugNotExist, err) - - _, err = ReadRemote(repo, "remoteB", b.Id()) - require.Error(t, ErrBugNotExist, err) - - ids, err := ListLocalIds(repo) - require.NoError(t, err) - require.Len(t, ids, 100) -} diff --git a/migration3/before/bug/label_test.go b/migration3/before/bug/label_test.go deleted file mode 100644 index 49401c4..0000000 --- a/migration3/before/bug/label_test.go +++ /dev/null @@ -1,35 +0,0 @@ -package bug - -import ( - "testing" - - "github.com/stretchr/testify/require" -) - -func TestLabelRGBA(t *testing.T) { - rgba := Label("test1").Color() - expected := LabelColor{R: 0, G: 150, B: 136, A: 255} - - require.Equal(t, expected, rgba) -} - -func TestLabelRGBASimilar(t *testing.T) { - rgba := Label("test2").Color() - expected := LabelColor{R: 3, G: 169, B: 244, A: 255} - - require.Equal(t, expected, rgba) -} - -func TestLabelRGBAReverse(t *testing.T) { - rgba := Label("tset").Color() - expected := LabelColor{R: 63, G: 81, B: 181, A: 255} - - require.Equal(t, expected, rgba) -} - -func TestLabelRGBAEqual(t *testing.T) { - color1 := Label("test").Color() - color2 := Label("test").Color() - - require.Equal(t, color1, color2) -} diff --git a/migration3/before/bug/op_add_comment_test.go b/migration3/before/bug/op_add_comment_test.go deleted file mode 100644 index 1743417..0000000 --- a/migration3/before/bug/op_add_comment_test.go +++ /dev/null @@ -1,39 +0,0 @@ -package bug - -import ( - "encoding/json" - "testing" - "time" - - "github.com/stretchr/testify/assert" - "github.com/stretchr/testify/require" - - "github.com/MichaelMure/git-bug-migration/migration3/before/identity" - "github.com/MichaelMure/git-bug-migration/migration3/before/repository" -) - -func TestAddCommentSerialize(t *testing.T) { - repo := repository.NewMockRepoForTest() - rene := identity.NewIdentity("René Descartes", "rene@descartes.fr") - err := rene.Commit(repo) - require.NoError(t, err) - - unix := time.Now().Unix() - before := NewAddCommentOp(rene, unix, "message", nil) - - data, err := json.Marshal(before) - assert.NoError(t, err) - - var after AddCommentOperation - err = json.Unmarshal(data, &after) - assert.NoError(t, err) - - // enforce creating the ID - before.Id() - - // Replace the identity stub with the real thing - assert.Equal(t, rene.Id(), after.base().Author.Id()) - after.Author = rene - - assert.Equal(t, before, &after) -} diff --git a/migration3/before/bug/op_create_test.go b/migration3/before/bug/op_create_test.go deleted file mode 100644 index 7ba78d7..0000000 --- a/migration3/before/bug/op_create_test.go +++ /dev/null @@ -1,78 +0,0 @@ -package bug - -import ( - "encoding/json" - "testing" - "time" - - "github.com/MichaelMure/git-bug-migration/migration3/before/identity" - "github.com/MichaelMure/git-bug-migration/migration3/before/repository" - "github.com/MichaelMure/git-bug-migration/migration3/before/util/timestamp" - "github.com/stretchr/testify/assert" - "github.com/stretchr/testify/require" -) - -func TestCreate(t *testing.T) { - snapshot := Snapshot{} - - rene := identity.NewIdentity("René Descartes", "rene@descartes.fr") - unix := time.Now().Unix() - - create := NewCreateOp(rene, unix, "title", "message", nil) - - create.Apply(&snapshot) - - id := create.Id() - assert.NoError(t, id.Validate()) - - comment := Comment{ - id: id, - Author: rene, - Message: "message", - UnixTime: timestamp.Timestamp(create.UnixTime), - } - - expected := Snapshot{ - Title: "title", - Comments: []Comment{ - comment, - }, - Author: rene, - Participants: []identity.Interface{rene}, - Actors: []identity.Interface{rene}, - CreateTime: create.Time(), - Timeline: []TimelineItem{ - &CreateTimelineItem{ - CommentTimelineItem: NewCommentTimelineItem(id, comment), - }, - }, - } - - assert.Equal(t, expected, snapshot) -} - -func TestCreateSerialize(t *testing.T) { - repo := repository.NewMockRepoForTest() - rene := identity.NewIdentity("René Descartes", "rene@descartes.fr") - err := rene.Commit(repo) - require.NoError(t, err) - - unix := time.Now().Unix() - before := NewCreateOp(rene, unix, "title", "message", nil) - - data, err := json.Marshal(before) - assert.NoError(t, err) - - var after CreateOperation - err = json.Unmarshal(data, &after) - assert.NoError(t, err) - - // enforce creating the ID - before.Id() - - // Replace the identity stub with the real thing - assert.Equal(t, rene.Id(), after.base().Author.Id()) - after.Author = rene - - assert.Equal(t, before, &after) -} diff --git a/migration3/before/bug/op_edit_comment_test.go b/migration3/before/bug/op_edit_comment_test.go deleted file mode 100644 index 754590f..0000000 --- a/migration3/before/bug/op_edit_comment_test.go +++ /dev/null @@ -1,105 +0,0 @@ -package bug - -import ( - "encoding/json" - "testing" - "time" - - "github.com/stretchr/testify/assert" - "github.com/stretchr/testify/require" - - "github.com/MichaelMure/git-bug-migration/migration3/before/identity" - "github.com/MichaelMure/git-bug-migration/migration3/before/repository" -) - -func TestEdit(t *testing.T) { - snapshot := Snapshot{} - - repo := repository.NewMockRepoForTest() - rene := identity.NewIdentity("René Descartes", "rene@descartes.fr") - err := rene.Commit(repo) - require.NoError(t, err) - - unix := time.Now().Unix() - - create := NewCreateOp(rene, unix, "title", "create", nil) - create.Apply(&snapshot) - - id1 := create.Id() - require.NoError(t, id1.Validate()) - - comment1 := NewAddCommentOp(rene, unix, "comment 1", nil) - comment1.Apply(&snapshot) - - id2 := comment1.Id() - require.NoError(t, id2.Validate()) - - // add another unrelated op in between - setTitle := NewSetTitleOp(rene, unix, "edited title", "title") - setTitle.Apply(&snapshot) - - comment2 := NewAddCommentOp(rene, unix, "comment 2", nil) - comment2.Apply(&snapshot) - - id3 := comment2.Id() - require.NoError(t, id3.Validate()) - - edit := NewEditCommentOp(rene, unix, id1, "create edited", nil) - edit.Apply(&snapshot) - - assert.Equal(t, len(snapshot.Timeline), 4) - assert.Equal(t, len(snapshot.Timeline[0].(*CreateTimelineItem).History), 2) - assert.Equal(t, len(snapshot.Timeline[1].(*AddCommentTimelineItem).History), 1) - assert.Equal(t, len(snapshot.Timeline[3].(*AddCommentTimelineItem).History), 1) - assert.Equal(t, snapshot.Comments[0].Message, "create edited") - assert.Equal(t, snapshot.Comments[1].Message, "comment 1") - assert.Equal(t, snapshot.Comments[2].Message, "comment 2") - - edit2 := NewEditCommentOp(rene, unix, id2, "comment 1 edited", nil) - edit2.Apply(&snapshot) - - assert.Equal(t, len(snapshot.Timeline), 4) - assert.Equal(t, len(snapshot.Timeline[0].(*CreateTimelineItem).History), 2) - assert.Equal(t, len(snapshot.Timeline[1].(*AddCommentTimelineItem).History), 2) - assert.Equal(t, len(snapshot.Timeline[3].(*AddCommentTimelineItem).History), 1) - assert.Equal(t, snapshot.Comments[0].Message, "create edited") - assert.Equal(t, snapshot.Comments[1].Message, "comment 1 edited") - assert.Equal(t, snapshot.Comments[2].Message, "comment 2") - - edit3 := NewEditCommentOp(rene, unix, id3, "comment 2 edited", nil) - edit3.Apply(&snapshot) - - assert.Equal(t, len(snapshot.Timeline), 4) - assert.Equal(t, len(snapshot.Timeline[0].(*CreateTimelineItem).History), 2) - assert.Equal(t, len(snapshot.Timeline[1].(*AddCommentTimelineItem).History), 2) - assert.Equal(t, len(snapshot.Timeline[3].(*AddCommentTimelineItem).History), 2) - assert.Equal(t, snapshot.Comments[0].Message, "create edited") - assert.Equal(t, snapshot.Comments[1].Message, "comment 1 edited") - assert.Equal(t, snapshot.Comments[2].Message, "comment 2 edited") -} - -func TestEditCommentSerialize(t *testing.T) { - repo := repository.NewMockRepoForTest() - rene := identity.NewIdentity("René Descartes", "rene@descartes.fr") - err := rene.Commit(repo) - require.NoError(t, err) - - unix := time.Now().Unix() - before := NewEditCommentOp(rene, unix, "target", "message", nil) - - data, err := json.Marshal(before) - assert.NoError(t, err) - - var after EditCommentOperation - err = json.Unmarshal(data, &after) - assert.NoError(t, err) - - // enforce creating the ID - before.Id() - - // Replace the identity stub with the real thing - assert.Equal(t, rene.Id(), after.base().Author.Id()) - after.Author = rene - - assert.Equal(t, before, &after) -} diff --git a/migration3/before/bug/op_label_change_test.go b/migration3/before/bug/op_label_change_test.go deleted file mode 100644 index 0e4ef06..0000000 --- a/migration3/before/bug/op_label_change_test.go +++ /dev/null @@ -1,40 +0,0 @@ -package bug - -import ( - "encoding/json" - "testing" - "time" - - "github.com/stretchr/testify/require" - - "github.com/MichaelMure/git-bug-migration/migration3/before/identity" - "github.com/MichaelMure/git-bug-migration/migration3/before/repository" - - "github.com/stretchr/testify/assert" -) - -func TestLabelChangeSerialize(t *testing.T) { - repo := repository.NewMockRepoForTest() - rene := identity.NewIdentity("René Descartes", "rene@descartes.fr") - err := rene.Commit(repo) - require.NoError(t, err) - - unix := time.Now().Unix() - before := NewLabelChangeOperation(rene, unix, []Label{"added"}, []Label{"removed"}) - - data, err := json.Marshal(before) - assert.NoError(t, err) - - var after LabelChangeOperation - err = json.Unmarshal(data, &after) - assert.NoError(t, err) - - // enforce creating the ID - before.Id() - - // Replace the identity stub with the real thing - assert.Equal(t, rene.Id(), after.base().Author.Id()) - after.Author = rene - - assert.Equal(t, before, &after) -} diff --git a/migration3/before/bug/op_noop_test.go b/migration3/before/bug/op_noop_test.go deleted file mode 100644 index 3cbdee4..0000000 --- a/migration3/before/bug/op_noop_test.go +++ /dev/null @@ -1,40 +0,0 @@ -package bug - -import ( - "encoding/json" - "testing" - "time" - - "github.com/stretchr/testify/require" - - "github.com/MichaelMure/git-bug-migration/migration3/before/identity" - "github.com/MichaelMure/git-bug-migration/migration3/before/repository" - - "github.com/stretchr/testify/assert" -) - -func TestNoopSerialize(t *testing.T) { - repo := repository.NewMockRepoForTest() - rene := identity.NewIdentity("René Descartes", "rene@descartes.fr") - err := rene.Commit(repo) - require.NoError(t, err) - - unix := time.Now().Unix() - before := NewNoOpOp(rene, unix) - - data, err := json.Marshal(before) - assert.NoError(t, err) - - var after NoOpOperation - err = json.Unmarshal(data, &after) - assert.NoError(t, err) - - // enforce creating the ID - before.Id() - - // Replace the identity stub with the real thing - assert.Equal(t, rene.Id(), after.base().Author.Id()) - after.Author = rene - - assert.Equal(t, before, &after) -} diff --git a/migration3/before/bug/op_set_metadata_test.go b/migration3/before/bug/op_set_metadata_test.go deleted file mode 100644 index d099f9e..0000000 --- a/migration3/before/bug/op_set_metadata_test.go +++ /dev/null @@ -1,128 +0,0 @@ -package bug - -import ( - "encoding/json" - "testing" - "time" - - "github.com/MichaelMure/git-bug-migration/migration3/before/identity" - "github.com/MichaelMure/git-bug-migration/migration3/before/repository" - - "github.com/stretchr/testify/assert" - "github.com/stretchr/testify/require" -) - -func TestSetMetadata(t *testing.T) { - snapshot := Snapshot{} - - repo := repository.NewMockRepoForTest() - rene := identity.NewIdentity("René Descartes", "rene@descartes.fr") - err := rene.Commit(repo) - require.NoError(t, err) - - unix := time.Now().Unix() - - create := NewCreateOp(rene, unix, "title", "create", nil) - create.SetMetadata("key", "value") - create.Apply(&snapshot) - snapshot.Operations = append(snapshot.Operations, create) - - id1 := create.Id() - require.NoError(t, id1.Validate()) - - comment := NewAddCommentOp(rene, unix, "comment", nil) - comment.SetMetadata("key2", "value2") - comment.Apply(&snapshot) - snapshot.Operations = append(snapshot.Operations, comment) - - id2 := comment.Id() - require.NoError(t, id2.Validate()) - - op1 := NewSetMetadataOp(rene, unix, id1, map[string]string{ - "key": "override", - "key2": "value", - }) - - op1.Apply(&snapshot) - snapshot.Operations = append(snapshot.Operations, op1) - - createMetadata := snapshot.Operations[0].AllMetadata() - assert.Equal(t, len(createMetadata), 2) - // original key is not overrided - assert.Equal(t, createMetadata["key"], "value") - // new key is set - assert.Equal(t, createMetadata["key2"], "value") - - commentMetadata := snapshot.Operations[1].AllMetadata() - assert.Equal(t, len(commentMetadata), 1) - assert.Equal(t, commentMetadata["key2"], "value2") - - op2 := NewSetMetadataOp(rene, unix, id2, map[string]string{ - "key2": "value", - "key3": "value3", - }) - - op2.Apply(&snapshot) - snapshot.Operations = append(snapshot.Operations, op2) - - createMetadata = snapshot.Operations[0].AllMetadata() - assert.Equal(t, len(createMetadata), 2) - assert.Equal(t, createMetadata["key"], "value") - assert.Equal(t, createMetadata["key2"], "value") - - commentMetadata = snapshot.Operations[1].AllMetadata() - assert.Equal(t, len(commentMetadata), 2) - // original key is not overrided - assert.Equal(t, commentMetadata["key2"], "value2") - // new key is set - assert.Equal(t, commentMetadata["key3"], "value3") - - op3 := NewSetMetadataOp(rene, unix, id1, map[string]string{ - "key": "override", - "key2": "override", - }) - - op3.Apply(&snapshot) - snapshot.Operations = append(snapshot.Operations, op3) - - createMetadata = snapshot.Operations[0].AllMetadata() - assert.Equal(t, len(createMetadata), 2) - // original key is not overrided - assert.Equal(t, createMetadata["key"], "value") - // previously set key is not overrided - assert.Equal(t, createMetadata["key2"], "value") - - commentMetadata = snapshot.Operations[1].AllMetadata() - assert.Equal(t, len(commentMetadata), 2) - assert.Equal(t, commentMetadata["key2"], "value2") - assert.Equal(t, commentMetadata["key3"], "value3") -} - -func TestSetMetadataSerialize(t *testing.T) { - repo := repository.NewMockRepoForTest() - rene := identity.NewIdentity("René Descartes", "rene@descartes.fr") - err := rene.Commit(repo) - require.NoError(t, err) - - unix := time.Now().Unix() - before := NewSetMetadataOp(rene, unix, "message", map[string]string{ - "key1": "value1", - "key2": "value2", - }) - - data, err := json.Marshal(before) - assert.NoError(t, err) - - var after SetMetadataOperation - err = json.Unmarshal(data, &after) - assert.NoError(t, err) - - // enforce creating the ID - before.Id() - - // Replace the identity stub with the real thing - assert.Equal(t, rene.Id(), after.base().Author.Id()) - after.Author = rene - - assert.Equal(t, before, &after) -} diff --git a/migration3/before/bug/op_set_status_test.go b/migration3/before/bug/op_set_status_test.go deleted file mode 100644 index 18af44f..0000000 --- a/migration3/before/bug/op_set_status_test.go +++ /dev/null @@ -1,40 +0,0 @@ -package bug - -import ( - "encoding/json" - "testing" - "time" - - "github.com/stretchr/testify/require" - - "github.com/MichaelMure/git-bug-migration/migration3/before/identity" - "github.com/MichaelMure/git-bug-migration/migration3/before/repository" - - "github.com/stretchr/testify/assert" -) - -func TestSetStatusSerialize(t *testing.T) { - repo := repository.NewMockRepoForTest() - rene := identity.NewIdentity("René Descartes", "rene@descartes.fr") - err := rene.Commit(repo) - require.NoError(t, err) - - unix := time.Now().Unix() - before := NewSetStatusOp(rene, unix, ClosedStatus) - - data, err := json.Marshal(before) - assert.NoError(t, err) - - var after SetStatusOperation - err = json.Unmarshal(data, &after) - assert.NoError(t, err) - - // enforce creating the ID - before.Id() - - // Replace the identity stub with the real thing - assert.Equal(t, rene.Id(), after.base().Author.Id()) - after.Author = rene - - assert.Equal(t, before, &after) -} diff --git a/migration3/before/bug/op_set_title_test.go b/migration3/before/bug/op_set_title_test.go deleted file mode 100644 index d3e1480..0000000 --- a/migration3/before/bug/op_set_title_test.go +++ /dev/null @@ -1,40 +0,0 @@ -package bug - -import ( - "encoding/json" - "testing" - "time" - - "github.com/stretchr/testify/require" - - "github.com/MichaelMure/git-bug-migration/migration3/before/identity" - "github.com/MichaelMure/git-bug-migration/migration3/before/repository" - - "github.com/stretchr/testify/assert" -) - -func TestSetTitleSerialize(t *testing.T) { - repo := repository.NewMockRepoForTest() - rene := identity.NewIdentity("René Descartes", "rene@descartes.fr") - err := rene.Commit(repo) - require.NoError(t, err) - - unix := time.Now().Unix() - before := NewSetTitleOp(rene, unix, "title", "was") - - data, err := json.Marshal(before) - assert.NoError(t, err) - - var after SetTitleOperation - err = json.Unmarshal(data, &after) - assert.NoError(t, err) - - // enforce creating the ID - before.Id() - - // Replace the identity stub with the real thing - assert.Equal(t, rene.Id(), after.base().Author.Id()) - after.Author = rene - - assert.Equal(t, before, &after) -} diff --git a/migration3/before/bug/operation_iterator_test.go b/migration3/before/bug/operation_iterator_test.go deleted file mode 100644 index 3661f16..0000000 --- a/migration3/before/bug/operation_iterator_test.go +++ /dev/null @@ -1,78 +0,0 @@ -package bug - -import ( - "fmt" - "testing" - "time" - - "github.com/stretchr/testify/require" - - "github.com/MichaelMure/git-bug-migration/migration3/before/identity" - "github.com/MichaelMure/git-bug-migration/migration3/before/repository" -) - -func ExampleOperationIterator() { - b := NewBug() - - // add operations - - it := NewOperationIterator(b) - - for it.Next() { - // do something with each operations - _ = it.Value() - } -} - -func TestOpIterator(t *testing.T) { - mockRepo := repository.NewMockRepoForTest() - - rene := identity.NewIdentity("René Descartes", "rene@descartes.fr") - err := rene.Commit(mockRepo) - require.NoError(t, err) - - unix := time.Now().Unix() - - createOp := NewCreateOp(rene, unix, "title", "message", nil) - addCommentOp := NewAddCommentOp(rene, unix, "message2", nil) - setStatusOp := NewSetStatusOp(rene, unix, ClosedStatus) - labelChangeOp := NewLabelChangeOperation(rene, unix, []Label{"added"}, []Label{"removed"}) - - var i int - genTitleOp := func() Operation { - i++ - return NewSetTitleOp(rene, unix, fmt.Sprintf("title%d", i), "") - } - - bug1 := NewBug() - - // first pack - bug1.Append(createOp) - bug1.Append(addCommentOp) - bug1.Append(setStatusOp) - bug1.Append(labelChangeOp) - err = bug1.Commit(mockRepo) - require.NoError(t, err) - - // second pack - bug1.Append(genTitleOp()) - bug1.Append(genTitleOp()) - bug1.Append(genTitleOp()) - err = bug1.Commit(mockRepo) - require.NoError(t, err) - - // staging - bug1.Append(genTitleOp()) - bug1.Append(genTitleOp()) - bug1.Append(genTitleOp()) - - it := NewOperationIterator(bug1) - - counter := 0 - for it.Next() { - _ = it.Value() - counter++ - } - - require.Equal(t, 10, counter) -} diff --git a/migration3/before/bug/operation_pack_test.go b/migration3/before/bug/operation_pack_test.go deleted file mode 100644 index 23545c9..0000000 --- a/migration3/before/bug/operation_pack_test.go +++ /dev/null @@ -1,79 +0,0 @@ -package bug - -import ( - "encoding/json" - "testing" - "time" - - "github.com/stretchr/testify/assert" - "github.com/stretchr/testify/require" - - "github.com/MichaelMure/git-bug-migration/migration3/before/identity" - "github.com/MichaelMure/git-bug-migration/migration3/before/repository" -) - -func TestOperationPackSerialize(t *testing.T) { - opp := &OperationPack{} - - repo := repository.NewMockRepoForTest() - rene := identity.NewIdentity("René Descartes", "rene@descartes.fr") - err := rene.Commit(repo) - require.NoError(t, err) - - createOp := NewCreateOp(rene, time.Now().Unix(), "title", "message", nil) - setTitleOp := NewSetTitleOp(rene, time.Now().Unix(), "title2", "title1") - addCommentOp := NewAddCommentOp(rene, time.Now().Unix(), "message2", nil) - setStatusOp := NewSetStatusOp(rene, time.Now().Unix(), ClosedStatus) - labelChangeOp := NewLabelChangeOperation(rene, time.Now().Unix(), []Label{"added"}, []Label{"removed"}) - - opp.Append(createOp) - opp.Append(setTitleOp) - opp.Append(addCommentOp) - opp.Append(setStatusOp) - opp.Append(labelChangeOp) - - opMeta := NewSetTitleOp(rene, time.Now().Unix(), "title3", "title2") - opMeta.SetMetadata("key", "value") - opp.Append(opMeta) - - assert.Equal(t, 1, len(opMeta.Metadata)) - - opFile := NewAddCommentOp(rene, time.Now().Unix(), "message", []repository.Hash{ - "abcdef", - "ghijkl", - }) - opp.Append(opFile) - - assert.Equal(t, 2, len(opFile.Files)) - - data, err := json.Marshal(opp) - assert.NoError(t, err) - - var opp2 *OperationPack - err = json.Unmarshal(data, &opp2) - assert.NoError(t, err) - - ensureIds(opp) - ensureAuthors(t, opp, opp2) - - assert.Equal(t, opp, opp2) -} - -func ensureIds(opp *OperationPack) { - for _, op := range opp.Operations { - op.Id() - } -} - -func ensureAuthors(t *testing.T, opp1 *OperationPack, opp2 *OperationPack) { - require.Equal(t, len(opp1.Operations), len(opp2.Operations)) - for i := 0; i < len(opp1.Operations); i++ { - op1 := opp1.Operations[i] - op2 := opp2.Operations[i] - - // ensure we have equivalent authors (IdentityStub vs Identity) then - // enforce equality - require.Equal(t, op1.base().Author.Id(), op2.base().Author.Id()) - op1.base().Author = op2.base().Author - } -} diff --git a/migration3/before/bug/operation_test.go b/migration3/before/bug/operation_test.go deleted file mode 100644 index cdf120d..0000000 --- a/migration3/before/bug/operation_test.go +++ /dev/null @@ -1,119 +0,0 @@ -package bug - -import ( - "testing" - "time" - - "github.com/stretchr/testify/require" - - "github.com/MichaelMure/git-bug-migration/migration3/before/identity" - "github.com/MichaelMure/git-bug-migration/migration3/before/repository" -) - -func TestValidate(t *testing.T) { - rene := identity.NewIdentity("René Descartes", "rene@descartes.fr") - unix := time.Now().Unix() - - good := []Operation{ - NewCreateOp(rene, unix, "title", "message", nil), - NewSetTitleOp(rene, unix, "title2", "title1"), - NewAddCommentOp(rene, unix, "message2", nil), - NewSetStatusOp(rene, unix, ClosedStatus), - NewLabelChangeOperation(rene, unix, []Label{"added"}, []Label{"removed"}), - } - - for _, op := range good { - if err := op.Validate(); err != nil { - t.Fatal(err) - } - } - - bad := []Operation{ - // opbase - NewSetStatusOp(identity.NewIdentity("", "rene@descartes.fr"), unix, ClosedStatus), - NewSetStatusOp(identity.NewIdentity("René Descartes\u001b", "rene@descartes.fr"), unix, ClosedStatus), - NewSetStatusOp(identity.NewIdentity("René Descartes", "rene@descartes.fr\u001b"), unix, ClosedStatus), - NewSetStatusOp(identity.NewIdentity("René \nDescartes", "rene@descartes.fr"), unix, ClosedStatus), - NewSetStatusOp(identity.NewIdentity("René Descartes", "rene@\ndescartes.fr"), unix, ClosedStatus), - &CreateOperation{OpBase: OpBase{ - Author: rene, - UnixTime: 0, - OperationType: CreateOp, - }, - Title: "title", - Message: "message", - }, - - NewCreateOp(rene, unix, "multi\nline", "message", nil), - NewCreateOp(rene, unix, "title", "message", []repository.Hash{repository.Hash("invalid")}), - NewCreateOp(rene, unix, "title\u001b", "message", nil), - NewCreateOp(rene, unix, "title", "message\u001b", nil), - NewSetTitleOp(rene, unix, "multi\nline", "title1"), - NewSetTitleOp(rene, unix, "title", "multi\nline"), - NewSetTitleOp(rene, unix, "title\u001b", "title2"), - NewSetTitleOp(rene, unix, "title", "title2\u001b"), - NewAddCommentOp(rene, unix, "message\u001b", nil), - NewAddCommentOp(rene, unix, "message", []repository.Hash{repository.Hash("invalid")}), - NewSetStatusOp(rene, unix, 1000), - NewSetStatusOp(rene, unix, 0), - NewLabelChangeOperation(rene, unix, []Label{}, []Label{}), - NewLabelChangeOperation(rene, unix, []Label{"multi\nline"}, []Label{}), - } - - for i, op := range bad { - if err := op.Validate(); err == nil { - t.Fatal("validation should have failed", i, op) - } - } -} - -func TestMetadata(t *testing.T) { - rene := identity.NewIdentity("René Descartes", "rene@descartes.fr") - op := NewCreateOp(rene, time.Now().Unix(), "title", "message", nil) - - op.SetMetadata("key", "value") - - val, ok := op.GetMetadata("key") - require.True(t, ok) - require.Equal(t, val, "value") -} - -func TestID(t *testing.T) { - repo := repository.CreateGoGitTestRepo(false) - defer repository.CleanupTestRepos(repo) - - repos := []repository.ClockedRepo{ - repository.NewMockRepoForTest(), - repo, - } - - for _, repo := range repos { - rene := identity.NewIdentity("René Descartes", "rene@descartes.fr") - err := rene.Commit(repo) - require.NoError(t, err) - - b, op, err := Create(rene, time.Now().Unix(), "title", "message") - require.NoError(t, err) - - id1 := op.Id() - require.NoError(t, id1.Validate()) - - err = b.Commit(repo) - require.NoError(t, err) - - op2 := b.FirstOp() - - id2 := op2.Id() - require.NoError(t, id2.Validate()) - require.Equal(t, id1, id2) - - b2, err := ReadLocal(repo, b.Id()) - require.NoError(t, err) - - op3 := b2.FirstOp() - - id3 := op3.Id() - require.NoError(t, id3.Validate()) - require.Equal(t, id1, id3) - } -} diff --git a/migration3/before/identity/identity_actions_test.go b/migration3/before/identity/identity_actions_test.go deleted file mode 100644 index c17e248..0000000 --- a/migration3/before/identity/identity_actions_test.go +++ /dev/null @@ -1,152 +0,0 @@ -package identity - -import ( - "testing" - - "github.com/stretchr/testify/require" - - "github.com/MichaelMure/git-bug-migration/migration3/before/repository" -) - -func TestPushPull(t *testing.T) { - repoA, repoB, remote := repository.SetupReposAndRemote() - defer repository.CleanupTestRepos(repoA, repoB, remote) - - identity1 := NewIdentity("name1", "email1") - err := identity1.Commit(repoA) - require.NoError(t, err) - - // A --> remote --> B - _, err = Push(repoA, "origin") - require.NoError(t, err) - - err = Pull(repoB, "origin") - require.NoError(t, err) - - identities := allIdentities(t, ReadAllLocal(repoB)) - - if len(identities) != 1 { - t.Fatal("Unexpected number of bugs") - } - - // B --> remote --> A - identity2 := NewIdentity("name2", "email2") - err = identity2.Commit(repoB) - require.NoError(t, err) - - _, err = Push(repoB, "origin") - require.NoError(t, err) - - err = Pull(repoA, "origin") - require.NoError(t, err) - - identities = allIdentities(t, ReadAllLocal(repoA)) - - if len(identities) != 2 { - t.Fatal("Unexpected number of bugs") - } - - // Update both - - identity1.addVersionForTest(&Version{ - name: "name1b", - email: "email1b", - }) - err = identity1.Commit(repoA) - require.NoError(t, err) - - identity2.addVersionForTest(&Version{ - name: "name2b", - email: "email2b", - }) - err = identity2.Commit(repoB) - require.NoError(t, err) - - // A --> remote --> B - - _, err = Push(repoA, "origin") - require.NoError(t, err) - - err = Pull(repoB, "origin") - require.NoError(t, err) - - identities = allIdentities(t, ReadAllLocal(repoB)) - - if len(identities) != 2 { - t.Fatal("Unexpected number of bugs") - } - - // B --> remote --> A - - _, err = Push(repoB, "origin") - require.NoError(t, err) - - err = Pull(repoA, "origin") - require.NoError(t, err) - - identities = allIdentities(t, ReadAllLocal(repoA)) - - if len(identities) != 2 { - t.Fatal("Unexpected number of bugs") - } - - // Concurrent update - - identity1.addVersionForTest(&Version{ - name: "name1c", - email: "email1c", - }) - err = identity1.Commit(repoA) - require.NoError(t, err) - - identity1B, err := ReadLocal(repoB, identity1.Id()) - require.NoError(t, err) - - identity1B.addVersionForTest(&Version{ - name: "name1concurrent", - email: "email1concurrent", - }) - err = identity1B.Commit(repoB) - require.NoError(t, err) - - // A --> remote --> B - - _, err = Push(repoA, "origin") - require.NoError(t, err) - - // Pulling a non-fast-forward update should fail - err = Pull(repoB, "origin") - require.Error(t, err) - - identities = allIdentities(t, ReadAllLocal(repoB)) - - if len(identities) != 2 { - t.Fatal("Unexpected number of bugs") - } - - // B --> remote --> A - - // Pushing a non-fast-forward update should fail - _, err = Push(repoB, "origin") - require.Error(t, err) - - err = Pull(repoA, "origin") - require.NoError(t, err) - - identities = allIdentities(t, ReadAllLocal(repoA)) - - if len(identities) != 2 { - t.Fatal("Unexpected number of bugs") - } -} - -func allIdentities(t testing.TB, identities <-chan StreamedIdentity) []*Identity { - var result []*Identity - for streamed := range identities { - if streamed.Err != nil { - t.Fatal(streamed.Err) - } - result = append(result, streamed.Identity) - } - return result -} diff --git a/migration3/before/identity/identity_stub_test.go b/migration3/before/identity/identity_stub_test.go deleted file mode 100644 index b01a718..0000000 --- a/migration3/before/identity/identity_stub_test.go +++ /dev/null @@ -1,26 +0,0 @@ -package identity - -import ( - "encoding/json" - "testing" - - "github.com/stretchr/testify/assert" -) - -func TestIdentityStubSerialize(t *testing.T) { - before := &IdentityStub{ - id: "id1234", - } - - data, err := json.Marshal(before) - assert.NoError(t, err) - - var after IdentityStub - err = json.Unmarshal(data, &after) - assert.NoError(t, err) - - // enforce creating the Id - before.Id() - - assert.Equal(t, before, &after) -} diff --git a/migration3/before/identity/identity_test.go b/migration3/before/identity/identity_test.go deleted file mode 100644 index ff41862..0000000 --- a/migration3/before/identity/identity_test.go +++ /dev/null @@ -1,316 +0,0 @@ -package identity - -import ( - "encoding/json" - "path/filepath" - "testing" - - "github.com/stretchr/testify/assert" - "github.com/stretchr/testify/require" - - "github.com/MichaelMure/git-bug-migration/migration3/before/entity" - "github.com/MichaelMure/git-bug-migration/migration3/before/repository" -) - -// Test the commit and load of an Identity with multiple versions -func TestIdentityCommitLoad(t *testing.T) { - mockRepo := repository.NewMockRepoForTest() - - // single version - - identity := &Identity{ - id: entity.UnsetId, - versions: []*Version{ - { - name: "René Descartes", - email: "rene.descartes@example.com", - }, - }, - } - - err := identity.Commit(mockRepo) - - assert.Nil(t, err) - assert.NotEmpty(t, identity.id) - - loaded, err := ReadLocal(mockRepo, identity.id) - assert.Nil(t, err) - commitsAreSet(t, loaded) - assert.Equal(t, identity, loaded) - - // multiple version - - identity = &Identity{ - id: entity.UnsetId, - versions: []*Version{ - { - time: 100, - name: "René Descartes", - email: "rene.descartes@example.com", - keys: []*Key{ - {PubKey: "pubkeyA"}, - }, - }, - { - time: 200, - name: "René Descartes", - email: "rene.descartes@example.com", - keys: []*Key{ - {PubKey: "pubkeyB"}, - }, - }, - { - time: 201, - name: "René Descartes", - email: "rene.descartes@example.com", - keys: []*Key{ - {PubKey: "pubkeyC"}, - }, - }, - }, - } - - err = identity.Commit(mockRepo) - - assert.Nil(t, err) - assert.NotEmpty(t, identity.id) - - loaded, err = ReadLocal(mockRepo, identity.id) - assert.Nil(t, err) - commitsAreSet(t, loaded) - assert.Equal(t, identity, loaded) - - // add more version - - identity.addVersionForTest(&Version{ - time: 201, - name: "René Descartes", - email: "rene.descartes@example.com", - keys: []*Key{ - {PubKey: "pubkeyD"}, - }, - }) - - identity.addVersionForTest(&Version{ - time: 300, - name: "René Descartes", - email: "rene.descartes@example.com", - keys: []*Key{ - {PubKey: "pubkeyE"}, - }, - }) - - err = identity.Commit(mockRepo) - - assert.Nil(t, err) - assert.NotEmpty(t, identity.id) - - loaded, err = ReadLocal(mockRepo, identity.id) - assert.Nil(t, err) - commitsAreSet(t, loaded) - assert.Equal(t, identity, loaded) -} - -func TestIdentityMutate(t *testing.T) { - identity := NewIdentity("René Descartes", "rene.descartes@example.com") - - assert.Len(t, identity.versions, 1) - - identity.Mutate(func(orig Mutator) Mutator { - orig.Email = "rene@descartes.fr" - orig.Name = "René" - orig.Login = "rene" - return orig - }) - - assert.Len(t, identity.versions, 2) - assert.Equal(t, identity.Email(), "rene@descartes.fr") - assert.Equal(t, identity.Name(), "René") - assert.Equal(t, identity.Login(), "rene") -} - -func commitsAreSet(t *testing.T, identity *Identity) { - for _, version := range identity.versions { - assert.NotEmpty(t, version.commitHash) - } -} - -// Test that the correct crypto keys are returned for a given lamport time -func TestIdentity_ValidKeysAtTime(t *testing.T) { - identity := Identity{ - id: entity.UnsetId, - versions: []*Version{ - { - time: 100, - name: "René Descartes", - email: "rene.descartes@example.com", - keys: []*Key{ - {PubKey: "pubkeyA"}, - }, - }, - { - time: 200, - name: "René Descartes", - email: "rene.descartes@example.com", - keys: []*Key{ - {PubKey: "pubkeyB"}, - }, - }, - { - time: 201, - name: "René Descartes", - email: "rene.descartes@example.com", - keys: []*Key{ - {PubKey: "pubkeyC"}, - }, - }, - { - time: 201, - name: "René Descartes", - email: "rene.descartes@example.com", - keys: []*Key{ - {PubKey: "pubkeyD"}, - }, - }, - { - time: 300, - name: "René Descartes", - email: "rene.descartes@example.com", - keys: []*Key{ - {PubKey: "pubkeyE"}, - }, - }, - }, - } - - assert.Nil(t, identity.ValidKeysAtTime(10)) - assert.Equal(t, identity.ValidKeysAtTime(100), []*Key{{PubKey: "pubkeyA"}}) - assert.Equal(t, identity.ValidKeysAtTime(140), []*Key{{PubKey: "pubkeyA"}}) - assert.Equal(t, identity.ValidKeysAtTime(200), []*Key{{PubKey: "pubkeyB"}}) - assert.Equal(t, identity.ValidKeysAtTime(201), []*Key{{PubKey: "pubkeyD"}}) - assert.Equal(t, identity.ValidKeysAtTime(202), []*Key{{PubKey: "pubkeyD"}}) - assert.Equal(t, identity.ValidKeysAtTime(300), []*Key{{PubKey: "pubkeyE"}}) - assert.Equal(t, identity.ValidKeysAtTime(3000), []*Key{{PubKey: "pubkeyE"}}) -} - -// Test the immutable or mutable metadata search -func TestMetadata(t *testing.T) { - mockRepo := repository.NewMockRepoForTest() - - identity := NewIdentity("René Descartes", "rene.descartes@example.com") - - identity.SetMetadata("key1", "value1") - assertHasKeyValue(t, identity.ImmutableMetadata(), "key1", "value1") - assertHasKeyValue(t, identity.MutableMetadata(), "key1", "value1") - - err := identity.Commit(mockRepo) - assert.NoError(t, err) - - assertHasKeyValue(t, identity.ImmutableMetadata(), "key1", "value1") - assertHasKeyValue(t, identity.MutableMetadata(), "key1", "value1") - - // try override - identity.addVersionForTest(&Version{ - name: "René Descartes", - email: "rene.descartes@example.com", - }) - - identity.SetMetadata("key1", "value2") - assertHasKeyValue(t, identity.ImmutableMetadata(), "key1", "value1") - assertHasKeyValue(t, identity.MutableMetadata(), "key1", "value2") - - err = identity.Commit(mockRepo) - assert.NoError(t, err) - - // reload - loaded, err := ReadLocal(mockRepo, identity.id) - assert.Nil(t, err) - - assertHasKeyValue(t, loaded.ImmutableMetadata(), "key1", "value1") - assertHasKeyValue(t, loaded.MutableMetadata(), "key1", "value2") -} - -func assertHasKeyValue(t *testing.T, metadata map[string]string, key, value string) { - val, ok := metadata[key] - assert.True(t, ok) - assert.Equal(t, val, value) -} - -func TestJSON(t *testing.T) { - mockRepo := repository.NewMockRepoForTest() - - identity := &Identity{ - id: entity.UnsetId, - versions: []*Version{ - { - name: "René Descartes", - email: "rene.descartes@example.com", - }, - }, - } - - // commit to make sure we have an Id - err := identity.Commit(mockRepo) - assert.Nil(t, err) - assert.NotEmpty(t, identity.id) - - // serialize - data, err := json.Marshal(identity) - assert.NoError(t, err) - - // deserialize, got a IdentityStub with the same id - var i Interface - i, err = UnmarshalJSON(data) - assert.NoError(t, err) - assert.Equal(t, identity.id, i.Id()) - - // make sure we can load the identity properly - i, err = ReadLocal(mockRepo, i.Id()) - assert.NoError(t, err) -} - -func TestIdentityRemove(t *testing.T) { - repo := repository.CreateGoGitTestRepo(false) - remoteA := repository.CreateGoGitTestRepo(true) - remoteB := repository.CreateGoGitTestRepo(true) - defer repository.CleanupTestRepos(repo, remoteA, remoteB) - - print(filepath.Join("file://", remoteA.GetPath())) - err := repo.AddRemote("remoteA", filepath.Join("file://", remoteA.GetPath())) - require.NoError(t, err) - - err = repo.AddRemote("remoteB", "file://"+remoteB.GetPath()) - require.NoError(t, err) - - // generate an identity for testing - rene := NewIdentity("René Descartes", "rene@descartes.fr") - err = rene.Commit(repo) - require.NoError(t, err) - - _, err = Push(repo, "remoteA") - require.NoError(t, err) - - _, err = Push(repo, "remoteB") - require.NoError(t, err) - - _, err = Fetch(repo, "remoteA") - require.NoError(t, err) - - _, err = Fetch(repo, "remoteB") - require.NoError(t, err) - - err = RemoveIdentity(repo, rene.Id()) - require.NoError(t, err) - - _, err = ReadLocal(repo, rene.Id()) - require.Error(t, ErrIdentityNotExist, err) - - _, err = ReadRemote(repo, "remoteA", string(rene.Id())) - require.Error(t, ErrIdentityNotExist, err) - - _, err = ReadRemote(repo, "remoteB", string(rene.Id())) - require.Error(t, ErrIdentityNotExist, err) - - ids := ReadAllLocal(repo) - require.Len(t, ids, 0) -} diff --git a/migration3/before/identity/version_test.go b/migration3/before/identity/version_test.go deleted file mode 100644 index 25848eb..0000000 --- a/migration3/before/identity/version_test.go +++ /dev/null @@ -1,41 +0,0 @@ -package identity - -import ( - "encoding/json" - "testing" - - "github.com/stretchr/testify/assert" -) - -func TestVersionSerialize(t *testing.T) { - before := &Version{ - name: "name", - email: "email", - avatarURL: "avatarUrl", - keys: []*Key{ - { - Fingerprint: "fingerprint1", - PubKey: "pubkey1", - }, - { - Fingerprint: "fingerprint2", - PubKey: "pubkey2", - }, - }, - nonce: makeNonce(20), - metadata: map[string]string{ - "key1": "value1", - "key2": "value2", - }, - time: 3, - } - - data, err := json.Marshal(before) - assert.NoError(t, err) - - var after Version - err = json.Unmarshal(data, &after) - assert.NoError(t, err) - - assert.Equal(t, before, &after) -} diff --git a/migration3/before/repository/config_mem_test.go b/migration3/before/repository/config_mem_test.go deleted file mode 100644 index d9c3385..0000000 --- a/migration3/before/repository/config_mem_test.go +++ /dev/null @@ -1,7 +0,0 @@ -package repository - -import "testing" - -func TestNewMemConfig(t *testing.T) { - testConfig(t, NewMemConfig()) -} diff --git a/migration3/before/repository/config_test.go b/migration3/before/repository/config_test.go deleted file mode 100644 index 2a76354..0000000 --- a/migration3/before/repository/config_test.go +++ /dev/null @@ -1,54 +0,0 @@ -package repository - -import ( - "testing" - "time" - - "github.com/stretchr/testify/require" -) - -func TestMergedConfig(t *testing.T) { - local := NewMemConfig() - global := NewMemConfig() - merged := mergeConfig(local, global) - - require.NoError(t, global.StoreBool("bool", true)) - require.NoError(t, global.StoreString("string", "foo")) - require.NoError(t, global.StoreTimestamp("timestamp", time.Unix(1234, 0))) - - val1, err := merged.ReadBool("bool") - require.NoError(t, err) - require.Equal(t, val1, true) - - val2, err := merged.ReadString("string") - require.NoError(t, err) - require.Equal(t, val2, "foo") - - val3, err := merged.ReadTimestamp("timestamp") - require.NoError(t, err) - require.Equal(t, val3, time.Unix(1234, 0)) - - require.NoError(t, local.StoreBool("bool", false)) - require.NoError(t, local.StoreString("string", "bar")) - require.NoError(t, local.StoreTimestamp("timestamp", time.Unix(5678, 0))) - - val1, err = merged.ReadBool("bool") - require.NoError(t, err) - require.Equal(t, val1, false) - - val2, err = merged.ReadString("string") - require.NoError(t, err) - require.Equal(t, val2, "bar") - - val3, err = merged.ReadTimestamp("timestamp") - require.NoError(t, err) - require.Equal(t, val3, time.Unix(5678, 0)) - - all, err := merged.ReadAll("") - require.NoError(t, err) - require.Equal(t, all, map[string]string{ - "bool": "false", - "string": "bar", - "timestamp": "5678", - }) -} diff --git a/migration3/before/repository/git_test.go b/migration3/before/repository/git_test.go deleted file mode 100644 index 1b36fd4..0000000 --- a/migration3/before/repository/git_test.go +++ /dev/null @@ -1,10 +0,0 @@ -// Package repository contains helper methods for working with the Git repo. -package repository - -import ( - "testing" -) - -func TestGitRepo(t *testing.T) { - RepoTest(t, CreateTestRepo, CleanupTestRepos) -} diff --git a/migration3/before/repository/gogit_test.go b/migration3/before/repository/gogit_test.go deleted file mode 100644 index fba990d..0000000 --- a/migration3/before/repository/gogit_test.go +++ /dev/null @@ -1,68 +0,0 @@ -package repository - -import ( - "io/ioutil" - "os" - "path" - "path/filepath" - "testing" - - "github.com/stretchr/testify/assert" - "github.com/stretchr/testify/require" -) - -func TestNewGoGitRepo(t *testing.T) { - // Plain - plainRoot, err := ioutil.TempDir("", "") - require.NoError(t, err) - defer os.RemoveAll(plainRoot) - - _, err = InitGoGitRepo(plainRoot) - require.NoError(t, err) - plainGitDir := path.Join(plainRoot, ".git") - - // Bare - bareRoot, err := ioutil.TempDir("", "") - require.NoError(t, err) - defer os.RemoveAll(bareRoot) - - _, err = InitBareGoGitRepo(bareRoot) - require.NoError(t, err) - bareGitDir := bareRoot - - tests := []struct { - inPath string - outPath string - err bool - }{ - // errors - {"/", "", true}, - // parent dir of a repo - {filepath.Dir(plainRoot), "", true}, - - // Plain repo - {plainRoot, plainGitDir, false}, - {plainGitDir, plainGitDir, false}, - {path.Join(plainGitDir, "objects"), plainGitDir, false}, - - // Bare repo - {bareRoot, bareGitDir, false}, - {bareGitDir, bareGitDir, false}, - {path.Join(bareGitDir, "objects"), bareGitDir, false}, - } - - for i, tc := range tests { - r, err := NewGoGitRepo(tc.inPath, nil) - - if tc.err { - require.Error(t, err, i) - } else { - require.NoError(t, err, i) - assert.Equal(t, filepath.ToSlash(tc.outPath), filepath.ToSlash(r.GetPath()), i) - } - } -} - -func TestGoGitRepo(t *testing.T) { - RepoTest(t, CreateGoGitTestRepo, CleanupTestRepos) -} diff --git a/migration3/before/repository/mock_repo_test.go b/migration3/before/repository/mock_repo_test.go deleted file mode 100644 index b56b94f..0000000 --- a/migration3/before/repository/mock_repo_test.go +++ /dev/null @@ -1,10 +0,0 @@ -package repository - -import "testing" - -func TestMockRepo(t *testing.T) { - creator := func(bare bool) TestedRepo { return NewMockRepoForTest() } - cleaner := func(repos ...Repo) {} - - RepoTest(t, creator, cleaner) -} diff --git a/migration3/before/repository/tree_entry_test.go b/migration3/before/repository/tree_entry_test.go deleted file mode 100644 index d57433f..0000000 --- a/migration3/before/repository/tree_entry_test.go +++ /dev/null @@ -1,31 +0,0 @@ -package repository - -import ( - "testing" - - "github.com/stretchr/testify/assert" -) - -func TestTreeEntryFormat(t *testing.T) { - entries := []TreeEntry{ - {Blob, Hash("a85730cf5287d40a1e32d3a671ba2296c73387cb"), "name"}, - {Tree, Hash("a85730cf5287d40a1e32d3a671ba2296c73387cb"), "name"}, - } - - for _, entry := range entries { - _ = entry.Format() - } -} - -func TestTreeEntryParse(t *testing.T) { - lines := []string{ - "100644 blob 1e5ffaffc67049635ba7b01f77143313503f1ca1 .gitignore", - "040000 tree 728421fea4168b874bc1a8aa409d6723ef445a4e bug", - } - - for _, line := range lines { - _, err := ParseTreeEntry(line) - assert.NoError(t, err) - } - -} diff --git a/migration3/before/util/lamport/mem_clock_test.go b/migration3/before/util/lamport/mem_clock_test.go deleted file mode 100644 index e01d2ec..0000000 --- a/migration3/before/util/lamport/mem_clock_test.go +++ /dev/null @@ -1,8 +0,0 @@ -package lamport - -import "testing" - -func TestMemClock(t *testing.T) { - c := NewMemClock() - testClock(t, c) -} diff --git a/migration3/before/util/lamport/persisted_clock_test.go b/migration3/before/util/lamport/persisted_clock_test.go deleted file mode 100644 index aacec3b..0000000 --- a/migration3/before/util/lamport/persisted_clock_test.go +++ /dev/null @@ -1,19 +0,0 @@ -package lamport - -import ( - "io/ioutil" - "path" - "testing" - - "github.com/stretchr/testify/require" -) - -func TestPersistedClock(t *testing.T) { - dir, err := ioutil.TempDir("", "") - require.NoError(t, err) - - c, err := NewPersistedClock(path.Join(dir, "test-clock")) - require.NoError(t, err) - - testClock(t, c) -} From 42f443cf45968b523d6e52fee4f15fe4edd7bdca Mon Sep 17 00:00:00 2001 From: vince Date: Thu, 26 Nov 2020 20:44:27 +0800 Subject: [PATCH 4/9] update READMEs --- migration1/README.md | 1 + migration3/README.md | 10 +++++++--- 2 files changed, 8 insertions(+), 3 deletions(-) diff --git a/migration1/README.md b/migration1/README.md index 99ede25..3ecec52 100644 --- a/migration1/README.md +++ b/migration1/README.md @@ -4,5 +4,6 @@ git-bug v0.7.0 Changes: - Added functionality to delete bugs - Added functionality to set the author of a bug +- Exposed formatVersion of OperationPack Usage: This version is used to migrate the legacyAuthor from older versions to the newer versions of git-bug. \ No newline at end of file diff --git a/migration3/README.md b/migration3/README.md index 1bc6c03..dfe7274 100644 --- a/migration3/README.md +++ b/migration3/README.md @@ -1,3 +1,7 @@ -Usage: -- Recreate all the bugs -- Recreate all the identities \ No newline at end of file +### Vendor Information +git-bug 11f3991e2be17a5e0740c429f48961bfe468c23a + +Changes: +- Exposed formatVersion of OperationPack + +Usage: This version is used to update the bug and identity ids to meet the new standards \ No newline at end of file From 60e5d2b2725ef9274f5981ea9c12d5233222521e Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Michael=20Mur=C3=A9?= Date: Thu, 26 Nov 2020 14:02:28 +0100 Subject: [PATCH 5/9] migration3: minor cleanups --- migration3/migration3.go | 11 +++++------ migration3/migration3_test.go | 4 ++-- 2 files changed, 7 insertions(+), 8 deletions(-) diff --git a/migration3/migration3.go b/migration3/migration3.go index 969b202..3619d92 100644 --- a/migration3/migration3.go +++ b/migration3/migration3.go @@ -17,16 +17,16 @@ import ( type Migration3 struct{} func (m *Migration3) Description() string { - return "Migrate bridge credentials from the global git config to a keyring" + return "Make bug and identities independent from the storage by making the ID generation self-contained" } func (m *Migration3) Run(repoPath string) error { - oldRepo, err := beforerepo.NewGitRepo(repoPath, nil) + oldRepo, err := beforerepo.NewGoGitRepo(repoPath, nil) if err != nil { return err } - newRepo, err := afterrepo.NewGitRepo(repoPath, nil) + newRepo, err := afterrepo.NewGoGitRepo(repoPath, nil) if err != nil { return err } @@ -42,7 +42,7 @@ func (m *Migration3) migrate(oldRepo beforerepo.ClockedRepo, newRepo afterrepo.C for streamedIdentity := range identities { oldIdentity := streamedIdentity.Identity - fmt.Printf("identity %s:\n", oldIdentity.Id().Human()) + fmt.Printf("identity %s: ", oldIdentity.Id().Human()) newIdentity, err := afteridentity.NewIdentityFull( newRepo, oldIdentity.Name(), @@ -64,12 +64,11 @@ func (m *Migration3) migrate(oldRepo beforerepo.ClockedRepo, newRepo afterrepo.C for streamedBug := range bugs { oldBug := streamedBug.Bug - fmt.Printf("bug %s:\n", oldBug.Id().Human()) + fmt.Printf("bug %s: ", oldBug.Id().Human()) newBug, err := migrateBug(oldBug, migratedIdentities) if err != nil { return err } - fmt.Println(newBug) if err := newBug.Commit(newRepo); err != nil { return err } diff --git a/migration3/migration3_test.go b/migration3/migration3_test.go index 84bc0da..77c1b27 100644 --- a/migration3/migration3_test.go +++ b/migration3/migration3_test.go @@ -36,9 +36,9 @@ func TestMigrate23(t *testing.T) { err = os.Chdir(dir) require.Nil(t, err, "got error when opening temporary repository folder") - oldRepo, err := beforerepo.InitGitRepo(dir) + oldRepo, err := beforerepo.InitGoGitRepo(dir) require.Nil(t, err, "got error when initializing old repository") - newRepo, err := afterrepo.InitGitRepo(dir) + newRepo, err := afterrepo.NewGoGitRepo(dir, nil) require.Nil(t, err, "got error when initializing new repository") oldVinc := beforeidentity.NewIdentityFull( From ead720aa4763eee3c958c843aa06159668ee330b Mon Sep 17 00:00:00 2001 From: vince Date: Thu, 26 Nov 2020 20:50:09 +0800 Subject: [PATCH 6/9] fix bugs --- migration3/README.md | 2 ++ migration3/before/bug/operation_pack.go | 6 ++++-- migration3/before/identity/version.go | 4 +++- migration3/migration3.go | 20 ++++++++++++++++++++ 4 files changed, 29 insertions(+), 3 deletions(-) diff --git a/migration3/README.md b/migration3/README.md index dfe7274..88d7550 100644 --- a/migration3/README.md +++ b/migration3/README.md @@ -3,5 +3,7 @@ git-bug 11f3991e2be17a5e0740c429f48961bfe468c23a Changes: - Exposed formatVersion of OperationPack +- Removed error wrapping for bug and identity decoding +- Added custom error message for invalid formatVersion Usage: This version is used to update the bug and identity ids to meet the new standards \ No newline at end of file diff --git a/migration3/before/bug/operation_pack.go b/migration3/before/bug/operation_pack.go index 7ec85fd..d5454d6 100644 --- a/migration3/before/bug/operation_pack.go +++ b/migration3/before/bug/operation_pack.go @@ -13,6 +13,8 @@ import ( // 2: no more legacy identities const formatVersion = 2 +var ErrInvalidFormatVersion = fmt.Errorf("invalid format version") + // OperationPack represent an ordered set of operation to apply // to a Bug. These operations are stored in a single Git commit. // @@ -50,10 +52,10 @@ func (opp *OperationPack) UnmarshalJSON(data []byte) error { opp.FormatVersion = aux.Version if aux.Version < formatVersion { - return fmt.Errorf("outdated repository format, please use https://github.com/MichaelMure/git-bug-migration to upgrade") + return ErrInvalidFormatVersion } if aux.Version > formatVersion { - return fmt.Errorf("your version of git-bug is too old for this repository (version %v), please upgrade to the latest version", aux.Version) + return ErrInvalidFormatVersion } for _, raw := range aux.Operations { diff --git a/migration3/before/identity/version.go b/migration3/before/identity/version.go index 550999e..5644eae 100644 --- a/migration3/before/identity/version.go +++ b/migration3/before/identity/version.go @@ -16,6 +16,8 @@ import ( // 1: original format const formatVersion = 1 +var ErrInvalidFormatVersion = fmt.Errorf("invalid format version") + // Version is a complete set of information about an Identity at a point in time. type Version struct { // The lamport time at which this version become effective @@ -103,7 +105,7 @@ func (v *Version) UnmarshalJSON(data []byte) error { } if aux.FormatVersion != formatVersion { - return fmt.Errorf("unknown format version %v", aux.FormatVersion) + return ErrInvalidFormatVersion } v.time = aux.Time diff --git a/migration3/migration3.go b/migration3/migration3.go index 3619d92..fc58948 100644 --- a/migration3/migration3.go +++ b/migration3/migration3.go @@ -1,6 +1,7 @@ package migration3 import ( + "errors" "fmt" afterbug "github.com/MichaelMure/git-bug-migration/migration3/after/bug" @@ -41,6 +42,14 @@ func (m *Migration3) migrate(oldRepo beforerepo.ClockedRepo, newRepo afterrepo.C migratedIdentities := map[beforeentity.Id]*afteridentity.Identity{} for streamedIdentity := range identities { + if streamedIdentity.Err != nil { + if errors.Is(streamedIdentity.Err, beforeidentity.ErrInvalidFormatVersion) { + fmt.Print("skipping bug, already updated\n") + continue + } else { + return streamedIdentity.Err + } + } oldIdentity := streamedIdentity.Identity fmt.Printf("identity %s: ", oldIdentity.Id().Human()) newIdentity, err := afteridentity.NewIdentityFull( @@ -63,11 +72,22 @@ func (m *Migration3) migrate(oldRepo beforerepo.ClockedRepo, newRepo afterrepo.C } for streamedBug := range bugs { + if streamedBug.Err != nil { + if errors.Is(streamedBug.Err, beforebug.ErrInvalidFormatVersion) { + fmt.Print("skipping bug, already updated\n") + continue + } else { + return streamedBug.Err + } + } oldBug := streamedBug.Bug fmt.Printf("bug %s: ", oldBug.Id().Human()) newBug, err := migrateBug(oldBug, migratedIdentities) if err != nil { return err + } else if newBug == nil { + fmt.Print("skipping bug, already updated\n") + return nil } if err := newBug.Commit(newRepo); err != nil { return err From e0d2170bfbf535cab0f9402c1dafbeb7da3d668a Mon Sep 17 00:00:00 2001 From: vince Date: Tue, 15 Dec 2020 21:38:00 +0800 Subject: [PATCH 7/9] temp changes --- migration1/after/identity/version.go | 4 +++- migration1/migration1.go | 17 ++++++++++++----- migration3/migration3.go | 18 ++++++------------ 3 files changed, 21 insertions(+), 18 deletions(-) diff --git a/migration1/after/identity/version.go b/migration1/after/identity/version.go index 96d8aff..8cadfe3 100644 --- a/migration1/after/identity/version.go +++ b/migration1/after/identity/version.go @@ -15,6 +15,8 @@ import ( const formatVersion = 1 +var ErrIncorrectIdentityFormatVersion = fmt.Errorf("unknown format version") + // Version is a complete set of information about an Identity at a point in time. type Version struct { // The lamport time at which this version become effective @@ -102,7 +104,7 @@ func (v *Version) UnmarshalJSON(data []byte) error { } if aux.FormatVersion != formatVersion { - return fmt.Errorf("unknown format version %v", aux.FormatVersion) + return ErrIncorrectIdentityFormatVersion } v.time = aux.Time diff --git a/migration1/migration1.go b/migration1/migration1.go index 7e9ac59..3c0b413 100644 --- a/migration1/migration1.go +++ b/migration1/migration1.go @@ -3,6 +3,8 @@ package migration1 import ( "fmt" + "github.com/pkg/errors" + afterbug "github.com/MichaelMure/git-bug-migration/migration1/after/bug" afteridentity "github.com/MichaelMure/git-bug-migration/migration1/after/identity" afterrepo "github.com/MichaelMure/git-bug-migration/migration1/after/repository" @@ -28,7 +30,7 @@ func (m *Migration1) Run(repoPath string) error { func (m *Migration1) migrate(repo afterrepo.ClockedRepo) error { err := m.readIdentities(repo) if err != nil { - fmt.Printf("Error while applying migration") + fmt.Printf("Error while applying migration\n") // stop the migration return nil } @@ -37,7 +39,7 @@ func (m *Migration1) migrate(repo afterrepo.ClockedRepo) error { for streamedBug := range afterbug.ReadAllLocal(repo) { if streamedBug.Err != nil { if streamedBug.Err != afterbug.ErrInvalidFormatVersion { - fmt.Printf("Got error when reading bug: %q\n", streamedBug.Err) + fmt.Printf("got error when reading bug, assuming data is already migrated: %q\n", streamedBug.Err) } else { fmt.Printf("skipping bug, already updated\n") } @@ -77,9 +79,14 @@ func (m *Migration1) migrate(repo afterrepo.ClockedRepo) error { func (m *Migration1) readIdentities(repo afterrepo.ClockedRepo) error { for streamedIdentity := range afteridentity.ReadAllLocal(repo) { - if streamedIdentity.Err != nil { - fmt.Printf("Got error when reading identity: %q", streamedIdentity.Err) - return streamedIdentity.Err + if err := streamedIdentity.Err; err != nil { + if errors.Is(err, afteridentity.ErrIncorrectIdentityFormatVersion) { + fmt.Print("skipping identity, already updated\n") + continue + } else { + fmt.Printf("Got error when reading identity: %q", streamedIdentity.Err) + return streamedIdentity.Err + } } m.allIdentities = append(m.allIdentities, streamedIdentity.Identity) } diff --git a/migration3/migration3.go b/migration3/migration3.go index fc58948..14e5125 100644 --- a/migration3/migration3.go +++ b/migration3/migration3.go @@ -47,7 +47,7 @@ func (m *Migration3) migrate(oldRepo beforerepo.ClockedRepo, newRepo afterrepo.C fmt.Print("skipping bug, already updated\n") continue } else { - return streamedIdentity.Err + fmt.Printf("got error, assuming identity already migrated: %q", streamedIdentity.Err) } } oldIdentity := streamedIdentity.Identity @@ -73,21 +73,19 @@ func (m *Migration3) migrate(oldRepo beforerepo.ClockedRepo, newRepo afterrepo.C for streamedBug := range bugs { if streamedBug.Err != nil { - if errors.Is(streamedBug.Err, beforebug.ErrInvalidFormatVersion) { - fmt.Print("skipping bug, already updated\n") - continue + if streamedBug.Err != beforebug.ErrInvalidFormatVersion { + fmt.Printf("got error when reading bug, assuming data is already migrated: %q\n", streamedBug.Err) } else { - return streamedBug.Err + fmt.Printf("skipping bug, already updated\n") } + continue } + oldBug := streamedBug.Bug fmt.Printf("bug %s: ", oldBug.Id().Human()) newBug, err := migrateBug(oldBug, migratedIdentities) if err != nil { return err - } else if newBug == nil { - fmt.Print("skipping bug, already updated\n") - return nil } if err := newBug.Commit(newRepo); err != nil { return err @@ -108,10 +106,6 @@ func (m *Migration3) migrate(oldRepo beforerepo.ClockedRepo, newRepo afterrepo.C } func migrateBug(oldBug *beforebug.Bug, migratedIdentities map[beforeentity.Id]*afteridentity.Identity) (*afterbug.Bug, error) { - if oldBug.Packs[0].FormatVersion != 2 { - return nil, nil - } - // Making a new bug newBug := afterbug.NewBug() From 4c67dac86dff72c584636b4af177f5ecef2f4f88 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Michael=20Mur=C3=A9?= Date: Sun, 4 Apr 2021 14:02:16 +0200 Subject: [PATCH 8/9] migration3: adapt for the dag-entity migration --- go.mod | 4 + go.sum | 85 +++ migration3/README.md | 2 - migration3/after/bug/bug.go | 634 +++--------------- migration3/after/bug/bug_actions.go | 116 +--- migration3/after/bug/clocks.go | 40 -- migration3/after/bug/err.go | 17 + migration3/after/bug/git_tree.go | 84 --- migration3/after/bug/identity.go | 27 - migration3/after/bug/interface.go | 8 +- migration3/after/bug/op_add_comment.go | 24 +- migration3/after/bug/op_create.go | 63 +- migration3/after/bug/op_edit_comment.go | 17 +- migration3/after/bug/op_label_change.go | 17 +- migration3/after/bug/op_noop.go | 13 +- migration3/after/bug/op_set_metadata.go | 26 +- migration3/after/bug/op_set_status.go | 17 +- migration3/after/bug/op_set_title.go | 29 +- migration3/after/bug/operation.go | 194 ++++-- migration3/after/bug/operation_iterator.go | 72 -- migration3/after/bug/operation_pack.go | 186 ----- migration3/after/bug/snapshot.go | 5 + migration3/after/bug/sorting.go | 8 +- migration3/after/bug/with_snapshot.go | 8 +- migration3/after/entity/dag/clock.go | 38 ++ migration3/after/entity/dag/entity.go | 439 ++++++++++++ migration3/after/entity/dag/entity_actions.go | 260 +++++++ migration3/after/entity/dag/operation.go | 48 ++ migration3/after/entity/dag/operation_pack.go | 358 ++++++++++ migration3/after/entity/doc.go | 8 - migration3/after/entity/err.go | 29 + migration3/after/entity/id.go | 6 +- migration3/after/entity/id_interleaved.go | 68 ++ migration3/after/entity/merge.go | 53 +- migration3/after/entity/refs.go | 20 + migration3/after/identity/identity.go | 86 ++- migration3/after/identity/identity_actions.go | 17 +- migration3/after/identity/identity_stub.go | 5 + migration3/after/identity/interface.go | 4 + migration3/after/identity/key.go | 218 +++++- migration3/after/identity/resolver.go | 35 + migration3/after/identity/version.go | 9 +- migration3/after/repository/common.go | 67 ++ migration3/after/repository/config_mem.go | 19 +- migration3/after/repository/config_testing.go | 39 ++ migration3/after/repository/git.go | 461 ------------- migration3/after/repository/git_cli.go | 56 -- migration3/after/repository/git_config.go | 221 ------ migration3/after/repository/git_testing.go | 74 -- migration3/after/repository/gogit.go | 371 +++++++--- migration3/after/repository/gogit_config.go | 8 +- migration3/after/repository/gogit_testing.go | 14 +- migration3/after/repository/keyring.go | 16 +- migration3/after/repository/mock_repo.go | 311 ++++++--- migration3/after/repository/repo.go | 85 ++- migration3/after/repository/repo_testing.go | 99 ++- migration3/after/repository/tree_entry.go | 10 + .../after/util/lamport/clock_testing.go | 4 +- migration3/after/util/lamport/mem_clock.go | 12 +- .../after/util/lamport/persisted_clock.go | 29 +- migration3/migration3.go | 23 +- migration3/migration3_test.go | 11 +- 62 files changed, 2921 insertions(+), 2406 deletions(-) delete mode 100644 migration3/after/bug/clocks.go create mode 100644 migration3/after/bug/err.go delete mode 100644 migration3/after/bug/git_tree.go delete mode 100644 migration3/after/bug/identity.go delete mode 100644 migration3/after/bug/operation_iterator.go delete mode 100644 migration3/after/bug/operation_pack.go create mode 100644 migration3/after/entity/dag/clock.go create mode 100644 migration3/after/entity/dag/entity.go create mode 100644 migration3/after/entity/dag/entity_actions.go create mode 100644 migration3/after/entity/dag/operation.go create mode 100644 migration3/after/entity/dag/operation_pack.go delete mode 100644 migration3/after/entity/doc.go create mode 100644 migration3/after/entity/id_interleaved.go create mode 100644 migration3/after/entity/refs.go create mode 100644 migration3/after/repository/common.go delete mode 100644 migration3/after/repository/git.go delete mode 100644 migration3/after/repository/git_cli.go delete mode 100644 migration3/after/repository/git_config.go delete mode 100644 migration3/after/repository/git_testing.go diff --git a/go.mod b/go.mod index 5342135..f4225e4 100644 --- a/go.mod +++ b/go.mod @@ -5,12 +5,16 @@ go 1.14 require ( github.com/99designs/keyring v1.1.5 github.com/blang/semver v3.5.1+incompatible + github.com/blevesearch/bleve v1.0.14 github.com/dustin/go-humanize v1.0.0 github.com/fatih/color v1.9.0 + github.com/go-git/go-billy/v5 v5.0.0 github.com/go-git/go-git/v5 v5.2.0 github.com/mattn/go-isatty v0.0.12 // indirect github.com/pkg/errors v0.9.1 github.com/spf13/cobra v1.0.0 github.com/stretchr/testify v1.6.1 + golang.org/x/crypto v0.0.0-20200302210943-78000ba7a073 + golang.org/x/sys v0.0.0-20210403161142-5e06dd20ab57 golang.org/x/text v0.3.3 ) diff --git a/go.sum b/go.sum index 67dbd66..e997212 100644 --- a/go.sum +++ b/go.sum @@ -5,6 +5,8 @@ github.com/BurntSushi/toml v0.3.1/go.mod h1:xHWCNGjB5oqiDr8zfno3MHue2Ht5sIBksp03 github.com/MichaelMure/go-git/v5 v5.1.1-0.20200827115354-b40ca794fe33 h1:QFzkZPUMm0HRZ0dZ+GgDKHPUrgUrH3CbcyuzQlhBeww= github.com/MichaelMure/go-git/v5 v5.1.1-0.20200827115354-b40ca794fe33/go.mod h1:kh02eMX+wdqqxgNMEyq8YgwlIOsDOa9homkUq1PoTMs= github.com/OneOfOne/xxhash v1.2.2/go.mod h1:HSdplMjZKSmBqAxg5vPj2TmRDmfkzw+cTzAElWljhcU= +github.com/RoaringBitmap/roaring v0.4.23 h1:gpyfd12QohbqhFO4NVDUdoPOCXsyahYRQhINmlHxKeo= +github.com/RoaringBitmap/roaring v0.4.23/go.mod h1:D0gp8kJQgE1A4LQ5wFLggQEyvDi06Mq5mKs52e1TwOo= github.com/alcortesm/tgz v0.0.0-20161220082320-9c5fe88206d7 h1:uSoVVbwJiQipAclBbw+8quDsfcvFjOpI5iCf4p/cqCs= github.com/alcortesm/tgz v0.0.0-20161220082320-9c5fe88206d7/go.mod h1:6zEj6s6u/ghQa61ZWa/C2Aw3RkjiTBOix7dkqa1VLIs= github.com/alecthomas/template v0.0.0-20160405071501-a0175ee3bccc/go.mod h1:LOuyumcjzFXgccqObfd/Ljyb9UuFJ6TxHnclSeseNhc= @@ -18,15 +20,46 @@ github.com/beorn7/perks v0.0.0-20180321164747-3a771d992973/go.mod h1:Dwedo/Wpr24 github.com/beorn7/perks v1.0.0/go.mod h1:KWe93zE9D1o94FZ5RNwFwVgaQK1VOXiVxmqh+CedLV8= github.com/blang/semver v3.5.1+incompatible h1:cQNTCjp13qL8KC3Nbxr/y2Bqb63oX6wdnnjpJbkM4JQ= github.com/blang/semver v3.5.1+incompatible/go.mod h1:kRBLl5iJ+tD4TcOOxsy/0fnwebNt5EWlYSAyrTnjyyk= +github.com/blevesearch/bleve v1.0.14 h1:Q8r+fHTt35jtGXJUM0ULwM3Tzg+MRfyai4ZkWDy2xO4= +github.com/blevesearch/bleve v1.0.14/go.mod h1:e/LJTr+E7EaoVdkQZTfoz7dt4KoDNvDbLb8MSKuNTLQ= +github.com/blevesearch/blevex v1.0.0/go.mod h1:2rNVqoG2BZI8t1/P1awgTKnGlx5MP9ZbtEciQaNhswc= +github.com/blevesearch/cld2 v0.0.0-20200327141045-8b5f551d37f5/go.mod h1:PN0QNTLs9+j1bKy3d/GB/59wsNBFC4sWLWG3k69lWbc= +github.com/blevesearch/go-porterstemmer v1.0.3 h1:GtmsqID0aZdCSNiY8SkuPJ12pD4jI+DdXTAn4YRcHCo= +github.com/blevesearch/go-porterstemmer v1.0.3/go.mod h1:angGc5Ht+k2xhJdZi511LtmxuEf0OVpvUUNrwmM1P7M= +github.com/blevesearch/mmap-go v1.0.2 h1:JtMHb+FgQCTTYIhtMvimw15dJwu1Y5lrZDMOFXVWPk0= +github.com/blevesearch/mmap-go v1.0.2/go.mod h1:ol2qBqYaOUsGdm7aRMRrYGgPvnwLe6Y+7LMvAB5IbSA= +github.com/blevesearch/segment v0.9.0 h1:5lG7yBCx98or7gK2cHMKPukPZ/31Kag7nONpoBt22Ac= +github.com/blevesearch/segment v0.9.0/go.mod h1:9PfHYUdQCgHktBgvtUOF4x+pc4/l8rdH0u5spnW85UQ= +github.com/blevesearch/snowballstem v0.9.0 h1:lMQ189YspGP6sXvZQ4WZ+MLawfV8wOmPoD/iWeNXm8s= +github.com/blevesearch/snowballstem v0.9.0/go.mod h1:PivSj3JMc8WuaFkTSRDW2SlrulNWPl4ABg1tC/hlgLs= +github.com/blevesearch/zap/v11 v11.0.14 h1:IrDAvtlzDylh6H2QCmS0OGcN9Hpf6mISJlfKjcwJs7k= +github.com/blevesearch/zap/v11 v11.0.14/go.mod h1:MUEZh6VHGXv1PKx3WnCbdP404LGG2IZVa/L66pyFwnY= +github.com/blevesearch/zap/v12 v12.0.14 h1:2o9iRtl1xaRjsJ1xcqTyLX414qPAwykHNV7wNVmbp3w= +github.com/blevesearch/zap/v12 v12.0.14/go.mod h1:rOnuZOiMKPQj18AEKEHJxuI14236tTQ1ZJz4PAnWlUg= +github.com/blevesearch/zap/v13 v13.0.6 h1:r+VNSVImi9cBhTNNR+Kfl5uiGy8kIbb0JMz/h8r6+O4= +github.com/blevesearch/zap/v13 v13.0.6/go.mod h1:L89gsjdRKGyGrRN6nCpIScCvvkyxvmeDCwZRcjjPCrw= +github.com/blevesearch/zap/v14 v14.0.5 h1:NdcT+81Nvmp2zL+NhwSvGSLh7xNgGL8QRVZ67njR0NU= +github.com/blevesearch/zap/v14 v14.0.5/go.mod h1:bWe8S7tRrSBTIaZ6cLRbgNH4TUDaC9LZSpRGs85AsGY= +github.com/blevesearch/zap/v15 v15.0.3 h1:Ylj8Oe+mo0P25tr9iLPp33lN6d4qcztGjaIsP51UxaY= +github.com/blevesearch/zap/v15 v15.0.3/go.mod h1:iuwQrImsh1WjWJ0Ue2kBqY83a0rFtJTqfa9fp1rbVVU= github.com/cespare/xxhash v1.1.0/go.mod h1:XrSqR1VqqWfGrhpAt58auRo0WTKS1nRRg3ghfAqPWnc= github.com/client9/misspell v0.3.4/go.mod h1:qj6jICC3Q7zFZvVWo7KLAzC3yx5G7kyvSDkc90ppPyw= github.com/coreos/bbolt v1.3.2/go.mod h1:iRUV2dpdMOn7Bo10OQBFzIJO9kkE559Wcmn+qkEiiKk= github.com/coreos/etcd v3.3.10+incompatible/go.mod h1:uF7uidLiAD3TWHmW31ZFd/JWoc32PjwdhPthX9715RE= +github.com/coreos/go-etcd v2.0.0+incompatible/go.mod h1:Jez6KQU2B/sWsbdaef3ED8NzMklzPG4d5KIOhIy30Tk= github.com/coreos/go-semver v0.2.0/go.mod h1:nnelYz7RCh+5ahJtPPxZlU+153eP4D4r3EedlOD2RNk= github.com/coreos/go-systemd v0.0.0-20190321100706-95778dfbb74e/go.mod h1:F5haX7vjVVG0kc13fIWeqUViNPyEJxv/OmvnBo0Yme4= github.com/coreos/pkg v0.0.0-20180928190104-399ea9e2e55f/go.mod h1:E3G3o1h8I7cfcXa63jLwjI0eiQQMgzzUDFVpN/nH/eA= +github.com/couchbase/ghistogram v0.1.0/go.mod h1:s1Jhy76zqfEecpNWJfWUiKZookAFaiGOEoyzgHt9i7k= +github.com/couchbase/moss v0.1.0/go.mod h1:9MaHIaRuy9pvLPUJxB8sh8OrLfyDczECVL37grCIubs= +github.com/couchbase/vellum v1.0.2 h1:BrbP0NKiyDdndMPec8Jjhy0U47CZ0Lgx3xUC2r9rZqw= +github.com/couchbase/vellum v1.0.2/go.mod h1:FcwrEivFpNi24R3jLOs3n+fs5RnuQnQqCLBJ1uAg1W4= +github.com/cpuguy83/go-md2man v1.0.10/go.mod h1:SmD6nW6nTyfqj6ABTjUi3V3JVMnlJmwcJI5acqYI6dE= github.com/cpuguy83/go-md2man/v2 v2.0.0/go.mod h1:maD7wRr/U5Z6m/iR4s+kqSMx2CaBsrgA7czyZG/E6dU= github.com/creack/pty v1.1.9/go.mod h1:oKZEueFk5CKHvIhNR5MUki03XCEU+Q6VDXinZuGJ33E= +github.com/cznic/b v0.0.0-20181122101859-a26611c4d92d/go.mod h1:URriBxXwVq5ijiJ12C7iIZqlA69nTlI+LgI6/pwftG8= +github.com/cznic/mathutil v0.0.0-20181122101859-297441e03548/go.mod h1:e6NPNENfs9mPDVNRekM7lKScauxd5kXTr1Mfyig6TDM= +github.com/cznic/strutil v0.0.0-20181122101858-275e90344537/go.mod h1:AHHPPPXTw0h6pVabbcbyGRK1DckRn7r/STdZEeIDzZc= github.com/danieljoos/wincred v1.0.2 h1:zf4bhty2iLuwgjgpraD2E9UbvO+fe54XXGJbOwe23fU= github.com/danieljoos/wincred v1.0.2/go.mod h1:SnuYRW9lp1oJrZX/dXJqr0cPK5gYXqx3EJbmjhLdK9U= github.com/davecgh/go-spew v1.1.0 h1:ZDRjVQ15GmhC3fiQ8ni8+OwkZQO4DARzQgrnXU1Liz8= @@ -41,6 +74,9 @@ github.com/dvsekhvalnov/jose2go v0.0.0-20180829124132-7f401d37b68a h1:mq+R6XEM6l github.com/dvsekhvalnov/jose2go v0.0.0-20180829124132-7f401d37b68a/go.mod h1:7BvyPhdbLxMXIYTFPLsyJRFMsKmOZnQmzh6Gb+uquuM= github.com/emirpasic/gods v1.12.0 h1:QAUIPSaCu4G+POclxeqb3F+WPpdKqFGlw36+yOzGlrg= github.com/emirpasic/gods v1.12.0/go.mod h1:YfzfFFoVP/catgzJb4IKIqXjX78Ha8FMSDh3ymbK86o= +github.com/facebookgo/ensure v0.0.0-20200202191622-63f1cf65ac4c/go.mod h1:Yg+htXGokKKdzcwhuNDwVvN+uBxDGXJ7G/VN1d8fa64= +github.com/facebookgo/stack v0.0.0-20160209184415-751773369052/go.mod h1:UbMTZqLaRiH3MsBH8va0n7s1pQYcu3uTb8G4tygF4Zg= +github.com/facebookgo/subset v0.0.0-20200203212716-c811ad88dec4/go.mod h1:5tD+neXqOorC30/tWg0LCSkrqj/AR6gu8yY8/fpw1q0= github.com/fatih/color v1.9.0 h1:8xPHl4/q1VyqGIPif1F+1V3Y3lSmrq01EabUW3CoW5s= github.com/fatih/color v1.9.0/go.mod h1:eQcE1qtQxscV5RaZvpXrrb8Drkc3/DdQ+uUYCNjL+zU= github.com/flynn/go-shlex v0.0.0-20150515145356-3f9db97f8568 h1:BHsljHzVlRcyQhjrss6TZTdY2VfCqZPbv5k3iBFa2ZQ= @@ -49,6 +85,9 @@ github.com/fsnotify/fsnotify v1.4.7/go.mod h1:jwhsz4b93w/PPRr/qN1Yymfu8t87LnFCMo github.com/ghodss/yaml v1.0.0/go.mod h1:4dBDuWmgqj2HViK6kFavaiC9ZROes6MMH2rRYeMEF04= github.com/gliderlabs/ssh v0.2.2 h1:6zsha5zo/TWhRhwqCD3+EarCAgZ2yN28ipRnGPnwkI0= github.com/gliderlabs/ssh v0.2.2/go.mod h1:U7qILu1NlMHj9FlMhZLlkCdDnU1DBEAqr0aevW3Awn0= +github.com/glycerine/go-unsnap-stream v0.0.0-20181221182339-f9677308dec2 h1:Ujru1hufTHVb++eG6OuNDKMxZnGIvF6o/u8q/8h2+I4= +github.com/glycerine/go-unsnap-stream v0.0.0-20181221182339-f9677308dec2/go.mod h1:/20jfyN9Y5QPEAprSgKAUr+glWDY39ZiUEAYOEv5dsE= +github.com/glycerine/goconvey v0.0.0-20190410193231-58a59202ab31/go.mod h1:Ogl1Tioa0aV7gstGFO7KhffUsb9M4ydbEbbxpcEDc24= github.com/go-git/gcfg v1.5.0 h1:Q5ViNfGF8zFgyJWPqYwA7qGFoMTEiBmdlkcfRmpIMa4= github.com/go-git/gcfg v1.5.0/go.mod h1:5m20vg6GwYabIxaOonVkTdrILxQMpEShl1xiMF4ua+E= github.com/go-git/go-billy/v5 v5.0.0 h1:7NQHvd9FVid8VL4qVUMm8XifBK+2xCoZ2lSk0agRrHM= @@ -71,10 +110,16 @@ github.com/golang/mock v1.1.1/go.mod h1:oTYuIxOrZwtPieC+H1uAHpcLFnEyAGVDL/k47Jfb github.com/golang/protobuf v1.2.0/go.mod h1:6lQm79b+lXiMfvg/cZm0SGofjICqVBUtrP5yJMmIC1U= github.com/golang/protobuf v1.3.1 h1:YF8+flBXS5eO826T4nzqPrxfhQThhXl0YzfuUPu4SBg= github.com/golang/protobuf v1.3.1/go.mod h1:6lQm79b+lXiMfvg/cZm0SGofjICqVBUtrP5yJMmIC1U= +github.com/golang/protobuf v1.3.2 h1:6nsPYzhq5kReh6QImI3k5qWzO4PEbvbIW2cwSfR/6xs= +github.com/golang/protobuf v1.3.2/go.mod h1:6lQm79b+lXiMfvg/cZm0SGofjICqVBUtrP5yJMmIC1U= +github.com/golang/snappy v0.0.0-20180518054509-2e65f85255db/go.mod h1:/XxbfmMg8lxefKM7IXC3fBNl/7bRcc72aCRzEWrmP2Q= +github.com/golang/snappy v0.0.1 h1:Qgr9rKW7uDUkrbSmQeiDsGa8SjGyCOGtuasMWwvp2P4= +github.com/golang/snappy v0.0.1/go.mod h1:/XxbfmMg8lxefKM7IXC3fBNl/7bRcc72aCRzEWrmP2Q= github.com/google/btree v1.0.0/go.mod h1:lNA+9X1NB3Zf8V7Ke586lFgjr2dZNuvo3lPJSGZ5JPQ= github.com/google/go-cmp v0.2.0/go.mod h1:oXzfMopK8JAjlY9xF4vHSVASa0yLyX7SntLO5aqRK0M= github.com/google/go-cmp v0.3.0 h1:crn/baboCvb5fXaQ0IJ1SGTsTVrWpDsCWC8EGETZijY= github.com/google/go-cmp v0.3.0/go.mod h1:8QqcDgzrUqlUb/G2PQTWiueGozuR1884gddMywk6iLU= +github.com/gopherjs/gopherjs v0.0.0-20190910122728-9d188e94fb99/go.mod h1:wJfORRmW1u3UXTncJ5qlYoELFm8eSnnEO6hX4iZ3EWY= github.com/gorilla/websocket v1.4.0/go.mod h1:E7qHFY5m1UJ88s3WnNqhKjPHQ0heANvMoAMk2YaljkQ= github.com/grpc-ecosystem/go-grpc-middleware v1.0.0/go.mod h1:FiyG127CGDf3tlThmgyCl78X/SZQqEOJBCDaAfeWzPs= github.com/grpc-ecosystem/go-grpc-prometheus v1.2.0/go.mod h1:8NvIoxWQoOIhqOTXgfV/d3M/q6VIi02HzZEHgUlZvzk= @@ -82,6 +127,8 @@ github.com/grpc-ecosystem/grpc-gateway v1.9.0/go.mod h1:vNeuVxBJEsws4ogUvrchl83t github.com/gsterjov/go-libsecret v0.0.0-20161001094733-a6f4afe4910c h1:6rhixN/i8ZofjG1Y75iExal34USq5p+wiN1tpie8IrU= github.com/gsterjov/go-libsecret v0.0.0-20161001094733-a6f4afe4910c/go.mod h1:NMPJylDgVpX0MLRlPy15sqSwOFv/U1GZ2m21JhFfek0= github.com/hashicorp/hcl v1.0.0/go.mod h1:E5yfLk+7swimpb2L/Alb/PJmXilQ/rhwaUYs4T20WEQ= +github.com/hpcloud/tail v1.0.0/go.mod h1:ab1qPbhIpdTxEkNHXyeSf5vhxWSCs/tWer42PpOxQnU= +github.com/ikawaha/kagome.ipadic v1.1.2/go.mod h1:DPSBbU0czaJhAb/5uKQZHMc9MTVRpDugJfX+HddPHHg= github.com/imdario/mergo v0.3.9 h1:UauaLniWCFHWd+Jp9oCEkTBj8VO/9DKg3PV3VCNMDIg= github.com/imdario/mergo v0.3.9/go.mod h1:2EnlNZ0deacrJVfApfmtdGgDfMuh/nq6Ok1EcJh5FfA= github.com/inconshreveable/mousetrap v1.0.0 h1:Z8tu5sraLXCXIcARxBp/8cbvlwVa7Z1NHg9XEKhtSvM= @@ -89,7 +136,9 @@ github.com/inconshreveable/mousetrap v1.0.0/go.mod h1:PxqpIevigyE2G7u3NXJIT2ANyt github.com/jbenet/go-context v0.0.0-20150711004518-d14ea06fba99 h1:BQSFePA1RWJOlocH6Fxy8MmwDt+yVQYULKfN0RoTN8A= github.com/jbenet/go-context v0.0.0-20150711004518-d14ea06fba99/go.mod h1:1lJo3i6rXxKeerYnT8Nvf0QmHCRC1n8sfWVwXF2Frvo= github.com/jessevdk/go-flags v1.4.0/go.mod h1:4FA24M0QyGHXBuZZK/XkWh8h0e1EYbRYJSGM75WSRxI= +github.com/jmhodges/levigo v1.0.0/go.mod h1:Q6Qx+uH3RAqyK4rFQroq9RL7mdkABMcfhEI+nNuzMJQ= github.com/jonboulle/clockwork v0.1.0/go.mod h1:Ii8DK3G1RaLaWxj9trq07+26W01tbo22gdxWY5EU2bo= +github.com/jtolds/gls v4.20.0+incompatible/go.mod h1:QJZ7F/aHp+rZTRtaJ1ow/lLfFfVYBRgL+9YlvaHOwJU= github.com/julienschmidt/httprouter v1.2.0/go.mod h1:SYymIcj16QtmaHHD7aYtjjsJG7VTCxuUUipMqKk8s4w= github.com/kevinburke/ssh_config v0.0.0-20190725054713-01f96b0aa0cd h1:Coekwdh0v2wtGp9Gmz1Ze3eVRAWJMLokvN3QjdzCHLY= github.com/kevinburke/ssh_config v0.0.0-20190725054713-01f96b0aa0cd/go.mod h1:CT57kijsi8u/K/BOFA39wgDQJ9CxiF4nAY/ojJ6r6mM= @@ -97,6 +146,7 @@ github.com/keybase/go-keychain v0.0.0-20190712205309-48d3d31d256d h1:Z+RDyXzjKE0 github.com/keybase/go-keychain v0.0.0-20190712205309-48d3d31d256d/go.mod h1:JJNrCn9otv/2QP4D7SMJBgaleKpOf66PnW6F5WGNRIc= github.com/kisielk/errcheck v1.1.0/go.mod h1:EZBBE59ingxPouuu3KfxchcWSUPOHkagtvWXihfKN4Q= github.com/kisielk/gotool v1.0.0/go.mod h1:XhKaO+MFFWcvkIS/tQcRk01m1F5IRFswLeQ+oQHNcck= +github.com/kljensen/snowball v0.6.0/go.mod h1:27N7E8fVU5H68RlUmnWwZCfxgt4POBJfENGMvNRhldw= github.com/konsorten/go-windows-terminal-sequences v1.0.1/go.mod h1:T0+1ngSBFLxvqU3pZ+m/2kptfBszLMUkC4ZK/EgS/cQ= github.com/kr/logfmt v0.0.0-20140226030751-b84e30acd515/go.mod h1:+0opPa2QZZtGFBFZlji/RkVcI2GknAs/DXo4wKdlNEc= github.com/kr/pretty v0.1.0 h1:L/CwN0zerZDmRFUapSPitk6f+Q3+0za1rQkzVuMiMFI= @@ -118,13 +168,20 @@ github.com/matttproud/golang_protobuf_extensions v1.0.1/go.mod h1:D8He9yQNgCq6Z5 github.com/mitchellh/go-homedir v1.1.0 h1:lukF9ziXFxDFPkA1vsr5zpc1XuPDn/wFntq5mG+4E0Y= github.com/mitchellh/go-homedir v1.1.0/go.mod h1:SfyaCUpYCn1Vlf4IUYiD9fPX4A5wJrkLzIz1N1q0pr0= github.com/mitchellh/mapstructure v1.1.2/go.mod h1:FVVH3fgwuzCH5S8UJGiWEs2h04kUh9fWfEaFds41c1Y= +github.com/mschoch/smat v0.0.0-20160514031455-90eadee771ae/go.mod h1:qAyveg+e4CE+eKJXWVjKXM4ck2QobLqTDytGJbLLhJg= +github.com/mschoch/smat v0.2.0/go.mod h1:kc9mz7DoBKqDyiRL7VZN8KvXQMWeTaVnttLRXOlotKw= github.com/mtibben/percent v0.2.1 h1:5gssi8Nqo8QU/r2pynCm+hBQHpkB/uNK7BJCFogWdzs= github.com/mtibben/percent v0.2.1/go.mod h1:KG9uO+SZkUp+VkRHsCdYQV3XSZrrSpR3O9ibNBTZrns= github.com/mwitkow/go-conntrack v0.0.0-20161129095857-cc309e4a2223/go.mod h1:qRWi+5nqEBWmkhHvq77mSJWrCKwh8bxhgT7d/eI7P4U= github.com/niemeyer/pretty v0.0.0-20200227124842-a10e7caefd8e h1:fD57ERR4JtEqsWbfPhv4DMiApHyliiK5xCTNVSPiaAs= github.com/niemeyer/pretty v0.0.0-20200227124842-a10e7caefd8e/go.mod h1:zD1mROLANZcx1PVRCS0qkT7pwLkGfwJo4zjcN/Tysno= github.com/oklog/ulid v1.3.1/go.mod h1:CirwcVhetQ6Lv90oh/F+FBtV6XMibvdAFo93nm5qn4U= +github.com/onsi/ginkgo v1.6.0/go.mod h1:lLunBs/Ym6LB5Z9jYTR76FiuTmxDTDusOGeTQH+WWjE= +github.com/onsi/ginkgo v1.7.0/go.mod h1:lLunBs/Ym6LB5Z9jYTR76FiuTmxDTDusOGeTQH+WWjE= +github.com/onsi/gomega v1.4.3/go.mod h1:ex+gbHU/CVuBBDIJjb2X0qEXbFg53c61hWP/1CpauHY= github.com/pelletier/go-toml v1.2.0/go.mod h1:5z9KED0ma1S8pY6P1sdut58dfprrGBbd/94hg7ilaic= +github.com/philhofer/fwd v1.0.0 h1:UbZqGr5Y38ApvM/V/jEljVxwocdweyH+vmYvRPBnbqQ= +github.com/philhofer/fwd v1.0.0/go.mod h1:gk3iGcWd9+svBvR0sR+KPcfE+RNWozjowpeBVG3ZVNU= github.com/pkg/errors v0.8.0/go.mod h1:bwawxfHBFNV+L2hUp1rHADufV3IMtnDRdf1r5NINEl0= github.com/pkg/errors v0.8.1/go.mod h1:bwawxfHBFNV+L2hUp1rHADufV3IMtnDRdf1r5NINEl0= github.com/pkg/errors v0.9.1 h1:FEBLx1zS214owpjy7qsBeixbURkuhQAwrK5UwLGTwt4= @@ -140,7 +197,10 @@ github.com/prometheus/common v0.4.0/go.mod h1:TNfzLD0ON7rHzMJeJkieUDPYmFC7Snx/y8 github.com/prometheus/procfs v0.0.0-20181005140218-185b4288413d/go.mod h1:c3At6R/oaqEKCNdg8wHV1ftS6bRYblBhIjjI8uT2IGk= github.com/prometheus/procfs v0.0.0-20190507164030-5867b95ac084/go.mod h1:TjEm7ze935MbeOT/UhFTIMYKhuLP4wbCsTZCD3I8kEA= github.com/prometheus/tsdb v0.7.1/go.mod h1:qhTCs0VvXwvX/y3TZrWD7rabWM+ijKTux40TwIPHuXU= +github.com/rcrowley/go-metrics v0.0.0-20190826022208-cac0b30c2563/go.mod h1:bCqnVzQkZxMG4s8nGwiZ5l3QUCyqpo9Y+/ZMZ9VjZe4= +github.com/remyoudompheng/bigfft v0.0.0-20200410134404-eec4a21b6bb0/go.mod h1:qqbHyh8v60DhA7CoWK5oRCqLrMHRGoxYCSS9EjAz6Eo= github.com/rogpeppe/fastuuid v0.0.0-20150106093220-6724a57986af/go.mod h1:XWv6SoW27p1b0cqNHllgS5HIMJraePCO15w5zCzIWYg= +github.com/russross/blackfriday v1.5.2/go.mod h1:JO/DiYxRf+HjHt06OyowR9PTA263kcR/rfWxYHBV53g= github.com/russross/blackfriday/v2 v2.0.1/go.mod h1:+Rmxgy9KzJVeS9/2gXHxylqXiyQDYRxCVz55jmeOWTM= github.com/sergi/go-diff v1.1.0 h1:we8PVUC3FE2uYfodKH/nBHMSetSfHDR6scGdBi+erh0= github.com/sergi/go-diff v1.1.0/go.mod h1:STckp+ISIX8hZLjrqAeVduY0gWCT9IjLuqbuNXdaHfM= @@ -150,12 +210,16 @@ github.com/soheilhy/cmux v0.1.4/go.mod h1:IM3LyeVVIOuxMH7sFAkER9+bJ4dT7Ms6E4xg4k github.com/spaolacci/murmur3 v0.0.0-20180118202830-f09979ecbc72/go.mod h1:JwIasOWyU6f++ZhiEuf87xNszmSA2myDM2Kzu9HwQUA= github.com/spf13/afero v1.1.2/go.mod h1:j4pytiNVoe2o6bmDsKpLACNPDBIoEAkihy7loJ1B0CQ= github.com/spf13/cast v1.3.0/go.mod h1:Qx5cxh0v+4UWYiBimWS+eyWzqEqokIECu5etghLkUJE= +github.com/spf13/cobra v0.0.5/go.mod h1:3K3wKZymM7VvHMDS9+Akkh4K60UwM26emMESw8tLCHU= github.com/spf13/cobra v1.0.0 h1:6m/oheQuQ13N9ks4hubMG6BnvwOeaJrqSPLahSnczz8= github.com/spf13/cobra v1.0.0/go.mod h1:/6GTrnGXV9HjY+aR4k0oJ5tcvakLuG6EuKReYlHNrgE= github.com/spf13/jwalterweatherman v1.0.0/go.mod h1:cQK4TGJAtQXfYWX+Ddv3mKDzgVb68N+wFjFa4jdeBTo= github.com/spf13/pflag v1.0.3 h1:zPAT6CGy6wXeQ7NtTnaTerfKOsV6V6F8agHXFiazDkg= github.com/spf13/pflag v1.0.3/go.mod h1:DYY7MBk1bdzusC3SYhjObp+wFpr4gzcvqqNjLnInEg4= +github.com/spf13/viper v1.3.2/go.mod h1:ZiWeW+zYFKm7srdB9IoDzzZXaJaI5eL9QjNiN/DMA2s= github.com/spf13/viper v1.4.0/go.mod h1:PTJ7Z/lr49W6bUbkmS1V3by4uWynFiR9p7+dSq/yZzE= +github.com/steveyen/gtreap v0.1.0 h1:CjhzTa274PyJLJuMZwIzCO1PfC00oRa8d1Kc78bFXJM= +github.com/steveyen/gtreap v0.1.0/go.mod h1:kl/5J7XbrOmlIbYIXdRHDDE5QxHqpk0cmkT7Z4dM9/Y= github.com/stretchr/objx v0.1.0/go.mod h1:HFkY916IF+rwdDfMAkV7OtwuqBVzrE8GR6GFx+wExME= github.com/stretchr/objx v0.1.1/go.mod h1:HFkY916IF+rwdDfMAkV7OtwuqBVzrE8GR6GFx+wExME= github.com/stretchr/objx v0.2.0 h1:Hbg2NidpLE8veEBkEZTL3CvlkUIVzuU9jDplZO54c48= @@ -165,17 +229,28 @@ github.com/stretchr/testify v1.3.0/go.mod h1:M5WIy9Dh21IEIfnGCwXGc5bZfKNJtfHm1UV github.com/stretchr/testify v1.4.0/go.mod h1:j7eGeouHqKxXV5pUuKE4zz7dFj8WfuZ+81PSLYec5m4= github.com/stretchr/testify v1.6.1 h1:hDPOHmpOpP40lSULcqw7IrRb/u7w6RpDC9399XyoNd0= github.com/stretchr/testify v1.6.1/go.mod h1:6Fq8oRcR53rry900zMqJjRRixrwX3KX962/h/Wwjteg= +github.com/syndtr/goleveldb v1.0.0/go.mod h1:ZVVdQEZoIme9iO1Ch2Jdy24qqXrMMOU6lpPAyBWyWuQ= +github.com/tebeka/snowball v0.4.2/go.mod h1:4IfL14h1lvwZcp1sfXuuc7/7yCsvVffTWxWxCLfFpYg= +github.com/tecbot/gorocksdb v0.0.0-20191217155057-f0fad39f321c/go.mod h1:ahpPrc7HpcfEWDQRZEmnXMzHY03mLDYMCxeDzy46i+8= +github.com/tinylib/msgp v1.1.0 h1:9fQd+ICuRIu/ue4vxJZu6/LzxN0HwMds2nq/0cFvxHU= +github.com/tinylib/msgp v1.1.0/go.mod h1:+d+yLhGm8mzTaHzB+wgMYrodPfmZrzkirds8fDWklFE= github.com/tmc/grpc-websocket-proxy v0.0.0-20190109142713-0ad062ec5ee5/go.mod h1:ncp9v5uamzpCO7NfCPTXjqaC+bZgJeR0sMTm6dMHP7U= github.com/ugorji/go v1.1.4/go.mod h1:uQMGLiO92mf5W77hV/PUCpI3pbzQx3CRekS0kk+RGrc= +github.com/ugorji/go/codec v0.0.0-20181204163529-d75b2dcb6bc8/go.mod h1:VFNgLljTbGfSG7qAOspJ7OScBnGdDN/yBr0sguwnwf0= +github.com/willf/bitset v1.1.10 h1:NotGKqX0KwQ72NUzqrjZq5ipPNDQex9lo3WpaS8L2sc= +github.com/willf/bitset v1.1.10/go.mod h1:RjeCKbqT1RxIR/KWY6phxZiaY1IyutSBfGjNPySAYV4= github.com/xanzy/ssh-agent v0.2.1 h1:TCbipTQL2JiiCprBWx9frJ2eJlCYT00NmctrHxVAr70= github.com/xanzy/ssh-agent v0.2.1/go.mod h1:mLlQY/MoOhWBj+gOGMQkOeiEvkx+8pJSI+0Bx9h2kr4= github.com/xiang90/probing v0.0.0-20190116061207-43a291ad63a2/go.mod h1:UETIi67q53MR2AWcXfiuqkDkRtnGDLqkBTpCHuJHxtU= github.com/xordataexchange/crypt v0.0.3-0.20170626215501-b2862e3d0a77/go.mod h1:aYKd//L2LvnjZzWKhF00oedf4jCCReLcmhLdhm1A27Q= go.etcd.io/bbolt v1.3.2/go.mod h1:IbVyRI1SCnLcuJnV2u8VeU0CEYM7e686BmAb1XKL+uU= +go.etcd.io/bbolt v1.3.5 h1:XAzx9gjCb0Rxj7EoqcClPD1d5ZBxZJk0jbuoPHenBt0= +go.etcd.io/bbolt v1.3.5/go.mod h1:G5EMThwa9y8QZGBClrRx5EY+Yw9kAhnjy3bSjsnlVTQ= go.uber.org/atomic v1.4.0/go.mod h1:gD2HeocX3+yG+ygLZcrzQJaqmWj9AIm7n08wl/qW/PE= go.uber.org/multierr v1.1.0/go.mod h1:wR5kodmAFQ0UK8QlbwjlSNy0Z68gJhDJUG5sjR94q/0= go.uber.org/zap v1.10.0/go.mod h1:vwi/ZaCAaUcBkycHslxD9B2zi4UTXhF60s6SWpuDF0Q= golang.org/x/crypto v0.0.0-20180904163835-0709b304e793/go.mod h1:6SG95UA2DQfeDnfUPMdvaQW0Q7yPrPDi9nlGo2tz2b4= +golang.org/x/crypto v0.0.0-20181203042331-505ab145d0a9/go.mod h1:6SG95UA2DQfeDnfUPMdvaQW0Q7yPrPDi9nlGo2tz2b4= golang.org/x/crypto v0.0.0-20190219172222-a4c6cb3142f2/go.mod h1:6SG95UA2DQfeDnfUPMdvaQW0Q7yPrPDi9nlGo2tz2b4= golang.org/x/crypto v0.0.0-20190308221718-c2843e01d9a2/go.mod h1:djNgcEr1/C05ACkg1iLfiJU5Ep61QUkGW8qpdssI0+w= golang.org/x/crypto v0.0.0-20190701094942-4def268fd1a4/go.mod h1:yigFU9vqHzYiE8UmvKecakEJjdnWj3jj499lnFckfCI= @@ -184,6 +259,7 @@ golang.org/x/crypto v0.0.0-20200302210943-78000ba7a073/go.mod h1:LzIPMQfyMNhhGPh golang.org/x/lint v0.0.0-20181026193005-c67002cb31c3/go.mod h1:UVdnD1Gm6xHRNCYTkRU2/jEulfH38KcIWyp/GAMgvoE= golang.org/x/lint v0.0.0-20190313153728-d0100b6bd8b3/go.mod h1:6SW0HCj/g11FgYtHlgUYUwCkIfeOF89ocIRzGO/8vkc= golang.org/x/net v0.0.0-20180826012351-8a410e7b638d/go.mod h1:mL1N/T3taQHkDXs73rZJwtUhF3w3ftmwwsq0BUmARs4= +golang.org/x/net v0.0.0-20180906233101-161cd47e91fd/go.mod h1:mL1N/T3taQHkDXs73rZJwtUhF3w3ftmwwsq0BUmARs4= golang.org/x/net v0.0.0-20181114220301-adae6a3d119a/go.mod h1:mL1N/T3taQHkDXs73rZJwtUhF3w3ftmwwsq0BUmARs4= golang.org/x/net v0.0.0-20181220203305-927f97764cc3/go.mod h1:mL1N/T3taQHkDXs73rZJwtUhF3w3ftmwwsq0BUmARs4= golang.org/x/net v0.0.0-20190311183353-d8887717615a/go.mod h1:t9HGtf8HONx5eT2rtn7q6eTqICYqUVnKs3thJo3Qplg= @@ -200,18 +276,25 @@ golang.org/x/sync v0.0.0-20181221193216-37e7f081c4d4 h1:YUO/7uOKsKeq9UokNS62b8FY golang.org/x/sync v0.0.0-20181221193216-37e7f081c4d4/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= golang.org/x/sys v0.0.0-20180830151530-49385e6e1522/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY= golang.org/x/sys v0.0.0-20180905080454-ebe1bf3edb33/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY= +golang.org/x/sys v0.0.0-20180909124046-d0be0721c37e/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY= golang.org/x/sys v0.0.0-20181107165924-66b7b1311ac8/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY= golang.org/x/sys v0.0.0-20181116152217-5ac8a444bdc5/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY= +golang.org/x/sys v0.0.0-20181205085412-a5c9d58dba9a/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY= +golang.org/x/sys v0.0.0-20181221143128-b4a75ba826a6/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY= golang.org/x/sys v0.0.0-20190215142949-d0b11bdaac8a/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY= golang.org/x/sys v0.0.0-20190221075227-b4e8571b14e0/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY= golang.org/x/sys v0.0.0-20190222072716-a9d3bda3a223/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY= golang.org/x/sys v0.0.0-20190412213103-97732733099d/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= golang.org/x/sys v0.0.0-20190712062909-fae7ac547cb7/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20190813064441-fde4db37ae7a/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= golang.org/x/sys v0.0.0-20191026070338-33540a1f6037 h1:YyJpGZS1sBuBCzLAR1VEpK193GlqGZbnPFnPV/5Rsb4= golang.org/x/sys v0.0.0-20191026070338-33540a1f6037/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= golang.org/x/sys v0.0.0-20200116001909-b77594299b42/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20200202164722-d101bd2416d5/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= golang.org/x/sys v0.0.0-20200302150141-5c8b2ff67527 h1:uYVVQ9WP/Ds2ROhcaGPeIdVq0RIXVLwsHlnvJ+cT1So= golang.org/x/sys v0.0.0-20200302150141-5c8b2ff67527/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20210403161142-5e06dd20ab57 h1:F5Gozwx4I1xtr/sr/8CFbb57iKi3297KFs0QDbGN60A= +golang.org/x/sys v0.0.0-20210403161142-5e06dd20ab57/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= golang.org/x/text v0.3.0/go.mod h1:NqM8EUOU14njkJ3fqMW+pc6Ldnwhi/IjpwHt7yyuwOQ= golang.org/x/text v0.3.2/go.mod h1:bEr9sfX3Q8Zfm5fL9x+3itogRgK3+ptLWKqgva+5dAk= golang.org/x/text v0.3.3 h1:cokOdA+Jmi5PJGXLlLllQSgYigAEfHXJAERHVMaCc2k= @@ -234,7 +317,9 @@ gopkg.in/check.v1 v1.0.0-20180628173108-788fd7840127/go.mod h1:Co6ibVJAznAaIkqp8 gopkg.in/check.v1 v1.0.0-20190902080502-41f04d3bba15/go.mod h1:Co6ibVJAznAaIkqp8huTwlJQCZ016jof/cbN4VW5Yz0= gopkg.in/check.v1 v1.0.0-20200227125254-8fa46927fb4f h1:BLraFXnmrev5lT+xlilqcH8XK9/i0At2xKjWk4p6zsU= gopkg.in/check.v1 v1.0.0-20200227125254-8fa46927fb4f/go.mod h1:Co6ibVJAznAaIkqp8huTwlJQCZ016jof/cbN4VW5Yz0= +gopkg.in/fsnotify.v1 v1.4.7/go.mod h1:Tz8NjZHkW78fSQdbUxIjBTcgA1z1m8ZHf0WmKUhAMys= gopkg.in/resty.v1 v1.12.0/go.mod h1:mDo4pnntr5jdWRML875a/NmxYqAlA73dVijT2AXvQQo= +gopkg.in/tomb.v1 v1.0.0-20141024135613-dd632973f1e7/go.mod h1:dt/ZhP58zS4L8KSrWDmTeBkI65Dw0HsyUHuEVlX15mw= gopkg.in/warnings.v0 v0.1.2 h1:wFXVbFY8DY5/xOe1ECiWdKCzZlxgshcYVNkBHstARME= gopkg.in/warnings.v0 v0.1.2/go.mod h1:jksf8JmL6Qr/oQM2OXTHunEvvTAsrWBLb6OOjuVWRNI= gopkg.in/yaml.v2 v2.0.0-20170812160011-eb3733d160e7/go.mod h1:JAlM8MvJe8wmxCU4Bli9HhUf9+ttbYbLASfIpnQbh74= diff --git a/migration3/README.md b/migration3/README.md index 88d7550..d6fd2c2 100644 --- a/migration3/README.md +++ b/migration3/README.md @@ -5,5 +5,3 @@ Changes: - Exposed formatVersion of OperationPack - Removed error wrapping for bug and identity decoding - Added custom error message for invalid formatVersion - -Usage: This version is used to update the bug and identity ids to meet the new standards \ No newline at end of file diff --git a/migration3/after/bug/bug.go b/migration3/after/bug/bug.go index 7b41d8c..8dcb88d 100644 --- a/migration3/after/bug/bug.go +++ b/migration3/after/bug/bug.go @@ -2,230 +2,62 @@ package bug import ( - "encoding/json" "fmt" - "strings" - - "github.com/pkg/errors" "github.com/MichaelMure/git-bug-migration/migration3/after/entity" + "github.com/MichaelMure/git-bug-migration/migration3/after/entity/dag" "github.com/MichaelMure/git-bug-migration/migration3/after/identity" "github.com/MichaelMure/git-bug-migration/migration3/after/repository" - "github.com/MichaelMure/git-bug-migration/migration3/after/util/lamport" ) -const bugsRefPattern = "refs/bugs/" -const bugsRemoteRefPattern = "refs/remotes/%s/bugs/" - -const opsEntryName = "ops" -const mediaEntryName = "media" - -const createClockEntryPrefix = "create-clock-" -const createClockEntryPattern = "create-clock-%d" -const editClockEntryPrefix = "edit-clock-" -const editClockEntryPattern = "edit-clock-%d" - -const creationClockName = "bug-create" -const editClockName = "bug-edit" - -var ErrBugNotExist = errors.New("bug doesn't exist") +var _ Interface = &Bug{} +var _ entity.Interface = &Bug{} -func NewErrMultipleMatchBug(matching []entity.Id) *entity.ErrMultipleMatch { - return entity.NewErrMultipleMatch("bug", matching) -} +// 1: original format +// 2: no more legacy identities +// 3: Ids are generated from the create operation serialized data instead of from the first git commit +// 4: with DAG entity framework +const formatVersion = 4 -func NewErrMultipleMatchOp(matching []entity.Id) *entity.ErrMultipleMatch { - return entity.NewErrMultipleMatch("operation", matching) +var def = dag.Definition{ + Typename: "bug", + Namespace: "bugs", + OperationUnmarshaler: operationUnmarshaller, + FormatVersion: formatVersion, } -var _ Interface = &Bug{} -var _ entity.Interface = &Bug{} +var ClockLoader = dag.ClockLoader(def) // Bug hold the data of a bug thread, organized in a way close to // how it will be persisted inside Git. This is the data structure // used to merge two different version of the same Bug. type Bug struct { - - // A Lamport clock is a logical clock that allow to order event - // inside a distributed system. - // It must be the first field in this struct due to https://github.com/golang/go/issues/599 - createTime lamport.Time - editTime lamport.Time - - // Id used as unique identifier - id entity.Id - - lastCommit repository.Hash - - // all the committed operations - packs []OperationPack - - // a temporary pack of operations used for convenience to pile up new operations - // before a commit - staging OperationPack + *dag.Entity } // NewBug create a new Bug func NewBug() *Bug { - // No id yet - // No logical clock yet - return &Bug{id: entity.UnsetId} -} - -// ReadLocal will read a local bug from its hash -func ReadLocal(repo repository.ClockedRepo, id entity.Id) (*Bug, error) { - ref := bugsRefPattern + id.String() - return read(repo, identity.NewSimpleResolver(repo), ref) -} - -// ReadLocalWithResolver will read a local bug from its hash -func ReadLocalWithResolver(repo repository.ClockedRepo, identityResolver identity.Resolver, id entity.Id) (*Bug, error) { - ref := bugsRefPattern + id.String() - return read(repo, identityResolver, ref) -} - -// ReadRemote will read a remote bug from its hash -func ReadRemote(repo repository.ClockedRepo, remote string, id entity.Id) (*Bug, error) { - ref := fmt.Sprintf(bugsRemoteRefPattern, remote) + id.String() - return read(repo, identity.NewSimpleResolver(repo), ref) -} - -// ReadRemoteWithResolver will read a remote bug from its hash -func ReadRemoteWithResolver(repo repository.ClockedRepo, identityResolver identity.Resolver, remote string, id entity.Id) (*Bug, error) { - ref := fmt.Sprintf(bugsRemoteRefPattern, remote) + id.String() - return read(repo, identityResolver, ref) -} - -// read will read and parse a Bug from git -func read(repo repository.ClockedRepo, identityResolver identity.Resolver, ref string) (*Bug, error) { - id := refToId(ref) - - if err := id.Validate(); err != nil { - return nil, errors.Wrap(err, "invalid ref ") - } - - hashes, err := repo.ListCommits(ref) - if err != nil { - return nil, ErrBugNotExist - } - if len(hashes) == 0 { - return nil, fmt.Errorf("empty bug") - } - - bug := Bug{ - id: id, - } - - // Load each OperationPack - for _, hash := range hashes { - tree, err := readTree(repo, hash) - if err != nil { - return nil, err - } - - // Due to rebase, edit Lamport time are not necessarily ordered - if tree.editTime > bug.editTime { - bug.editTime = tree.editTime - } - - // Update the clocks - err = repo.Witness(creationClockName, bug.createTime) - if err != nil { - return nil, errors.Wrap(err, "failed to update create lamport clock") - } - err = repo.Witness(editClockName, bug.editTime) - if err != nil { - return nil, errors.Wrap(err, "failed to update edit lamport clock") - } - - data, err := repo.ReadData(tree.opsEntry.Hash) - if err != nil { - return nil, errors.Wrap(err, "failed to read git blob data") - } - - opp := &OperationPack{} - err = json.Unmarshal(data, &opp) - if err != nil { - return nil, errors.Wrap(err, "failed to decode OperationPack json") - } - - // tag the pack with the commit hash - opp.commitHash = hash - bug.lastCommit = hash - - // if it's the first OperationPack read - if len(bug.packs) == 0 { - bug.createTime = tree.createTime - } - - bug.packs = append(bug.packs, *opp) - } - - // Bug Id is the Id of the first operation - if len(bug.packs[0].Operations) == 0 { - return nil, fmt.Errorf("first OperationPack is empty") - } - if bug.id != bug.packs[0].Operations[0].Id() { - return nil, fmt.Errorf("bug ID doesn't match the first operation ID") + return &Bug{ + Entity: dag.New(def), } +} - // Make sure that the identities are properly loaded - err = bug.EnsureIdentities(identityResolver) +// Read will read a bug from a repository +func Read(repo repository.ClockedRepo, id entity.Id) (*Bug, error) { + e, err := dag.Read(def, repo, identity.NewSimpleResolver(repo), id) if err != nil { return nil, err } - - return &bug, nil + return &Bug{Entity: e}, nil } -// RemoveBug will remove a local bug from its entity.Id -func RemoveBug(repo repository.ClockedRepo, id entity.Id) error { - var fullMatches []string - - refs, err := repo.ListRefs(bugsRefPattern + id.String()) +// ReadWithResolver will read a bug from its Id, with a custom identity.Resolver +func ReadWithResolver(repo repository.ClockedRepo, identityResolver identity.Resolver, id entity.Id) (*Bug, error) { + e, err := dag.Read(def, repo, identityResolver, id) if err != nil { - return err - } - if len(refs) > 1 { - return NewErrMultipleMatchBug(refsToIds(refs)) - } - if len(refs) == 1 { - // we have the bug locally - fullMatches = append(fullMatches, refs[0]) - } - - remotes, err := repo.GetRemotes() - if err != nil { - return err - } - - for remote := range remotes { - remotePrefix := fmt.Sprintf(bugsRemoteRefPattern+id.String(), remote) - remoteRefs, err := repo.ListRefs(remotePrefix) - if err != nil { - return err - } - if len(remoteRefs) > 1 { - return NewErrMultipleMatchBug(refsToIds(refs)) - } - if len(remoteRefs) == 1 { - // found the bug in a remote - fullMatches = append(fullMatches, remoteRefs[0]) - } - } - - if len(fullMatches) == 0 { - return ErrBugNotExist - } - - for _, ref := range fullMatches { - err = repo.RemoveRef(ref) - if err != nil { - return err - } + return nil, err } - - return nil + return &Bug{Entity: e}, nil } type StreamedBug struct { @@ -233,50 +65,33 @@ type StreamedBug struct { Err error } -// ReadAllLocal read and parse all local bugs -func ReadAllLocal(repo repository.ClockedRepo) <-chan StreamedBug { - return readAll(repo, identity.NewSimpleResolver(repo), bugsRefPattern) +// ReadAll read and parse all local bugs +func ReadAll(repo repository.ClockedRepo) <-chan StreamedBug { + return readAll(repo, identity.NewSimpleResolver(repo)) } -// ReadAllLocalWithResolver read and parse all local bugs -func ReadAllLocalWithResolver(repo repository.ClockedRepo, identityResolver identity.Resolver) <-chan StreamedBug { - return readAll(repo, identityResolver, bugsRefPattern) -} - -// ReadAllRemote read and parse all remote bugs for a given remote -func ReadAllRemote(repo repository.ClockedRepo, remote string) <-chan StreamedBug { - refPrefix := fmt.Sprintf(bugsRemoteRefPattern, remote) - return readAll(repo, identity.NewSimpleResolver(repo), refPrefix) -} - -// ReadAllRemoteWithResolver read and parse all remote bugs for a given remote -func ReadAllRemoteWithResolver(repo repository.ClockedRepo, identityResolver identity.Resolver, remote string) <-chan StreamedBug { - refPrefix := fmt.Sprintf(bugsRemoteRefPattern, remote) - return readAll(repo, identityResolver, refPrefix) +// ReadAllWithResolver read and parse all local bugs +func ReadAllWithResolver(repo repository.ClockedRepo, identityResolver identity.Resolver) <-chan StreamedBug { + return readAll(repo, identityResolver) } // Read and parse all available bug with a given ref prefix -func readAll(repo repository.ClockedRepo, identityResolver identity.Resolver, refPrefix string) <-chan StreamedBug { +func readAll(repo repository.ClockedRepo, identityResolver identity.Resolver) <-chan StreamedBug { out := make(chan StreamedBug) go func() { defer close(out) - refs, err := repo.ListRefs(refPrefix) - if err != nil { - out <- StreamedBug{Err: err} - return - } - - for _, ref := range refs { - b, err := read(repo, identityResolver, ref) - - if err != nil { - out <- StreamedBug{Err: err} - return + for streamedEntity := range dag.ReadAll(def, repo, identityResolver) { + if streamedEntity.Err != nil { + out <- StreamedBug{ + Err: streamedEntity.Err, + } + } else { + out <- StreamedBug{ + Bug: &Bug{Entity: streamedEntity.Entity}, + } } - - out <- StreamedBug{Bug: b} } }() @@ -285,377 +100,78 @@ func readAll(repo repository.ClockedRepo, identityResolver identity.Resolver, re // ListLocalIds list all the available local bug ids func ListLocalIds(repo repository.Repo) ([]entity.Id, error) { - refs, err := repo.ListRefs(bugsRefPattern) - if err != nil { - return nil, err - } - - return refsToIds(refs), nil -} - -func refsToIds(refs []string) []entity.Id { - ids := make([]entity.Id, len(refs)) - - for i, ref := range refs { - ids[i] = refToId(ref) - } - - return ids -} - -func refToId(ref string) entity.Id { - split := strings.Split(ref, "/") - return entity.Id(split[len(split)-1]) + return dag.ListLocalIds(def, repo) } // Validate check if the Bug data is valid func (bug *Bug) Validate() error { - // non-empty - if len(bug.packs) == 0 && bug.staging.IsEmpty() { - return fmt.Errorf("bug has no operations") - } - - // check if each pack and operations are valid - for _, pack := range bug.packs { - if err := pack.Validate(); err != nil { - return err - } - } - - // check if staging is valid if needed - if !bug.staging.IsEmpty() { - if err := bug.staging.Validate(); err != nil { - return errors.Wrap(err, "staging") - } + if err := bug.Entity.Validate(); err != nil { + return err } // The very first Op should be a CreateOp firstOp := bug.FirstOp() - if firstOp == nil || firstOp.base().OperationType != CreateOp { + if firstOp == nil || firstOp.Type() != CreateOp { return fmt.Errorf("first operation should be a Create op") } - // The bug Id should be the id of the first operation - if bug.FirstOp().Id() != bug.id { - fmt.Println("bug", bug.id.String()) - fmt.Println("op", bug.FirstOp().Id().String()) - return fmt.Errorf("bug id should be the first commit hash") - } - // Check that there is no more CreateOp op - // Check that there is no colliding operation's ID - it := NewOperationIterator(bug) - createCount := 0 - ids := make(map[entity.Id]struct{}) - for it.Next() { - if it.Value().base().OperationType == CreateOp { - createCount++ + for i, op := range bug.Operations() { + if i == 0 { + continue } - if _, ok := ids[it.Value().Id()]; ok { - return fmt.Errorf("id collision: %s", it.Value().Id()) + if op.Type() == CreateOp { + return fmt.Errorf("only one Create op allowed") } - ids[it.Value().Id()] = struct{}{} - } - - if createCount != 1 { - return fmt.Errorf("only one Create op allowed") } return nil } -// Append an operation into the staging area, to be committed later +// Append add a new Operation to the Bug func (bug *Bug) Append(op Operation) { - if len(bug.packs) == 0 && len(bug.staging.Operations) == 0 { - if op.base().OperationType != CreateOp { - panic("first operation should be a Create") - } - bug.id = op.Id() - } - bug.staging.Append(op) -} - -// Commit write the staging area in Git and move the operations to the packs -func (bug *Bug) Commit(repo repository.ClockedRepo) error { - if !bug.NeedCommit() { - return fmt.Errorf("can't commit a bug with no pending operation") - } - - if err := bug.Validate(); err != nil { - return errors.Wrap(err, "can't commit a bug with invalid data") - } - - // update clocks - var err error - bug.editTime, err = repo.Increment(editClockName) - if err != nil { - return err - } - if bug.lastCommit == "" { - bug.createTime, err = repo.Increment(creationClockName) - if err != nil { - return err - } - } - - // Write the Ops as a Git blob containing the serialized array - hash, err := bug.staging.Write(repo) - if err != nil { - return err - } - - // Make a Git tree referencing this blob - tree := []repository.TreeEntry{ - // the last pack of ops - {ObjectType: repository.Blob, Hash: hash, Name: opsEntryName}, - } - - // Store the logical clocks as well - // --> edit clock for each OperationPack/commits - // --> create clock only for the first OperationPack/commits - // - // To avoid having one blob for each clock value, clocks are serialized - // directly into the entry name - emptyBlobHash, err := repo.StoreData([]byte{}) - if err != nil { - return err - } - tree = append(tree, repository.TreeEntry{ - ObjectType: repository.Blob, - Hash: emptyBlobHash, - Name: fmt.Sprintf(editClockEntryPattern, bug.editTime), - }) - if bug.lastCommit == "" { - tree = append(tree, repository.TreeEntry{ - ObjectType: repository.Blob, - Hash: emptyBlobHash, - Name: fmt.Sprintf(createClockEntryPattern, bug.createTime), - }) - } - - // Reference, if any, all the files required by the ops - // Git will check that they actually exist in the storage and will make sure - // to push/pull them as needed. - mediaTree := makeMediaTree(bug.staging) - if len(mediaTree) > 0 { - mediaTreeHash, err := repo.StoreTree(mediaTree) - if err != nil { - return err - } - tree = append(tree, repository.TreeEntry{ - ObjectType: repository.Tree, - Hash: mediaTreeHash, - Name: mediaEntryName, - }) - } - - // Store the tree - hash, err = repo.StoreTree(tree) - if err != nil { - return err - } - - // Write a Git commit referencing the tree, with the previous commit as parent - if bug.lastCommit != "" { - hash, err = repo.StoreCommitWithParent(hash, bug.lastCommit) - } else { - hash, err = repo.StoreCommit(hash) - } - if err != nil { - return err - } - - bug.lastCommit = hash - bug.staging.commitHash = hash - bug.packs = append(bug.packs, bug.staging) - bug.staging = OperationPack{} - - // if it was the first commit, use the Id of the first op (create) - if bug.id == "" || bug.id == entity.UnsetId { - bug.id = bug.packs[0].Operations[0].Id() - } - - // Create or update the Git reference for this bug - // When pushing later, the remote will ensure that this ref update - // is fast-forward, that is no data has been overwritten - ref := fmt.Sprintf("%s%s", bugsRefPattern, bug.id) - return repo.UpdateRef(ref, hash) + bug.Entity.Append(op) } -func (bug *Bug) CommitAsNeeded(repo repository.ClockedRepo) error { - if !bug.NeedCommit() { - return nil +// Operations return the ordered operations +func (bug *Bug) Operations() []Operation { + source := bug.Entity.Operations() + result := make([]Operation, len(source)) + for i, op := range source { + result[i] = op.(Operation) } - return bug.Commit(repo) -} - -func (bug *Bug) NeedCommit() bool { - return !bug.staging.IsEmpty() + return result } -// Merge a different version of the same bug by rebasing operations of this bug -// that are not present in the other on top of the chain of operations of the -// other version. -func (bug *Bug) Merge(repo repository.Repo, other Interface) (bool, error) { - var otherBug = bugFromInterface(other) - - // Note: a faster merge should be possible without actually reading and parsing - // all operations pack of our side. - // Reading the other side is still necessary to validate remote data, at least - // for new operations - - if bug.id != otherBug.id { - return false, errors.New("merging unrelated bugs is not supported") - } - - if len(otherBug.staging.Operations) > 0 { - return false, errors.New("merging a bug with a non-empty staging is not supported") - } - - if bug.lastCommit == "" || otherBug.lastCommit == "" { - return false, errors.New("can't merge a bug that has never been stored") - } - - ancestor, err := repo.FindCommonAncestor(bug.lastCommit, otherBug.lastCommit) - if err != nil { - return false, errors.Wrap(err, "can't find common ancestor") - } - - ancestorIndex := 0 - newPacks := make([]OperationPack, 0, len(bug.packs)) - - // Find the root of the rebase - for i, pack := range bug.packs { - newPacks = append(newPacks, pack) - - if pack.commitHash == ancestor { - ancestorIndex = i - break - } - } - - if len(otherBug.packs) == ancestorIndex+1 { - // Nothing to rebase, return early - return false, nil - } - - // get other bug's extra packs - for i := ancestorIndex + 1; i < len(otherBug.packs); i++ { - // clone is probably not necessary - newPack := otherBug.packs[i].Clone() - - newPacks = append(newPacks, newPack) - bug.lastCommit = newPack.commitHash - } - - // rebase our extra packs - for i := ancestorIndex + 1; i < len(bug.packs); i++ { - pack := bug.packs[i] - - // get the referenced git tree - treeHash, err := repo.GetTreeHash(pack.commitHash) - - if err != nil { - return false, err - } - - // create a new commit with the correct ancestor - hash, err := repo.StoreCommitWithParent(treeHash, bug.lastCommit) - - if err != nil { - return false, err - } - - // replace the pack - newPack := pack.Clone() - newPack.commitHash = hash - newPacks = append(newPacks, newPack) - - // update the bug - bug.lastCommit = hash - } - - bug.packs = newPacks - - // Update the git ref - err = repo.UpdateRef(bugsRefPattern+bug.id.String(), bug.lastCommit) - if err != nil { - return false, err +// Compile a bug in a easily usable snapshot +func (bug *Bug) Compile() Snapshot { + snap := Snapshot{ + id: bug.Id(), + Status: OpenStatus, } - return true, nil -} - -// Id return the Bug identifier -func (bug *Bug) Id() entity.Id { - if bug.id == "" || bug.id == entity.UnsetId { - // simply panic as it would be a coding error - // (using an id of a bug without operation yet) - panic("no id yet") + for _, op := range bug.Operations() { + op.Apply(&snap) + snap.Operations = append(snap.Operations, op) } - return bug.id -} - -// CreateLamportTime return the Lamport time of creation -func (bug *Bug) CreateLamportTime() lamport.Time { - return bug.createTime -} -// EditLamportTime return the Lamport time of the last edit -func (bug *Bug) EditLamportTime() lamport.Time { - return bug.editTime + return snap } // Lookup for the very first operation of the bug. // For a valid Bug, this operation should be a CreateOp func (bug *Bug) FirstOp() Operation { - for _, pack := range bug.packs { - for _, op := range pack.Operations { - return op - } + if fo := bug.Entity.FirstOp(); fo != nil { + return fo.(Operation) } - - if !bug.staging.IsEmpty() { - return bug.staging.Operations[0] - } - return nil } // Lookup for the very last operation of the bug. // For a valid Bug, should never be nil func (bug *Bug) LastOp() Operation { - if !bug.staging.IsEmpty() { - return bug.staging.Operations[len(bug.staging.Operations)-1] - } - - if len(bug.packs) == 0 { - return nil - } - - lastPack := bug.packs[len(bug.packs)-1] - - if len(lastPack.Operations) == 0 { - return nil + if lo := bug.Entity.LastOp(); lo != nil { + return lo.(Operation) } - - return lastPack.Operations[len(lastPack.Operations)-1] -} - -// Compile a bug in a easily usable snapshot -func (bug *Bug) Compile() Snapshot { - snap := Snapshot{ - id: bug.id, - Status: OpenStatus, - } - - it := NewOperationIterator(bug) - - for it.Next() { - op := it.Value() - op.Apply(&snap) - snap.Operations = append(snap.Operations, op) - } - - return snap + return nil } diff --git a/migration3/after/bug/bug_actions.go b/migration3/after/bug/bug_actions.go index dff0995..16e8c44 100644 --- a/migration3/after/bug/bug_actions.go +++ b/migration3/after/bug/bug_actions.go @@ -1,42 +1,34 @@ package bug import ( - "fmt" - "strings" + "github.com/pkg/errors" "github.com/MichaelMure/git-bug-migration/migration3/after/entity" + "github.com/MichaelMure/git-bug-migration/migration3/after/entity/dag" "github.com/MichaelMure/git-bug-migration/migration3/after/identity" "github.com/MichaelMure/git-bug-migration/migration3/after/repository" - "github.com/pkg/errors" ) // Fetch retrieve updates from a remote // This does not change the local bugs state func Fetch(repo repository.Repo, remote string) (string, error) { - // "refs/bugs/*:refs/remotes/>/bugs/*" - remoteRefSpec := fmt.Sprintf(bugsRemoteRefPattern, remote) - fetchRefSpec := fmt.Sprintf("%s*:%s*", bugsRefPattern, remoteRefSpec) - - return repo.FetchRefs(remote, fetchRefSpec) + return dag.Fetch(def, repo, remote) } // Push update a remote with the local changes func Push(repo repository.Repo, remote string) (string, error) { - // "refs/bugs/*:refs/bugs/*" - refspec := fmt.Sprintf("%s*:%s*", bugsRefPattern, bugsRefPattern) - - return repo.PushRefs(remote, refspec) + return dag.Push(def, repo, remote) } // Pull will do a Fetch + MergeAll // This function will return an error if a merge fail -func Pull(repo repository.ClockedRepo, remote string) error { +func Pull(repo repository.ClockedRepo, remote string, author identity.Interface) error { _, err := Fetch(repo, remote) if err != nil { return err } - for merge := range MergeAll(repo, remote) { + for merge := range MergeAll(repo, remote, author) { if merge.Err != nil { return merge.Err } @@ -48,96 +40,38 @@ func Pull(repo repository.ClockedRepo, remote string) error { return nil } -// MergeAll will merge all the available remote bug: -// -// - If the remote has new commit, the local bug is updated to match the same history -// (fast-forward update) -// - if the local bug has new commits but the remote don't, nothing is changed -// - if both local and remote bug have new commits (that is, we have a concurrent edition), -// new local commits are rewritten at the head of the remote history (that is, a rebase) -func MergeAll(repo repository.ClockedRepo, remote string) <-chan entity.MergeResult { - out := make(chan entity.MergeResult) - +// MergeAll will merge all the available remote bug +// Note: an author is necessary for the case where a merge commit is created, as this commit will +// have an author and may be signed if a signing key is available. +func MergeAll(repo repository.ClockedRepo, remote string, author identity.Interface) <-chan entity.MergeResult { // no caching for the merge, we load everything from git even if that means multiple // copy of the same entity in memory. The cache layer will intercept the results to // invalidate entities if necessary. identityResolver := identity.NewSimpleResolver(repo) + out := make(chan entity.MergeResult) + go func() { defer close(out) - remoteRefSpec := fmt.Sprintf(bugsRemoteRefPattern, remote) - remoteRefs, err := repo.ListRefs(remoteRefSpec) + results := dag.MergeAll(def, repo, identityResolver, remote, author) - if err != nil { - out <- entity.MergeResult{Err: err} - return - } - - for _, remoteRef := range remoteRefs { - refSplit := strings.Split(remoteRef, "/") - id := entity.Id(refSplit[len(refSplit)-1]) - - if err := id.Validate(); err != nil { - out <- entity.NewMergeInvalidStatus(id, errors.Wrap(err, "invalid ref").Error()) - continue - } - - remoteBug, err := read(repo, identityResolver, remoteRef) - - if err != nil { - out <- entity.NewMergeInvalidStatus(id, errors.Wrap(err, "remote bug is not readable").Error()) - continue - } - - // Check for error in remote data - if err := remoteBug.Validate(); err != nil { - out <- entity.NewMergeInvalidStatus(id, errors.Wrap(err, "remote bug is invalid").Error()) - continue - } - - localRef := bugsRefPattern + remoteBug.Id().String() - localExist, err := repo.RefExist(localRef) - - if err != nil { - out <- entity.NewMergeError(err, id) - continue - } - - // the bug is not local yet, simply create the reference - if !localExist { - err := repo.CopyRef(remoteRef, localRef) - - if err != nil { - out <- entity.NewMergeError(err, id) - return + // wrap the dag.Entity into a complete Bug + for result := range results { + result := result + if result.Entity != nil { + result.Entity = &Bug{ + Entity: result.Entity.(*dag.Entity), } - - out <- entity.NewMergeStatus(entity.MergeStatusNew, id, remoteBug) - continue - } - - localBug, err := read(repo, identityResolver, localRef) - - if err != nil { - out <- entity.NewMergeError(errors.Wrap(err, "local bug is not readable"), id) - return - } - - updated, err := localBug.Merge(repo, remoteBug) - - if err != nil { - out <- entity.NewMergeInvalidStatus(id, errors.Wrap(err, "merge failed").Error()) - return - } - - if updated { - out <- entity.NewMergeStatus(entity.MergeStatusUpdated, id, localBug) - } else { - out <- entity.NewMergeStatus(entity.MergeStatusNothing, id, localBug) } + out <- result } }() return out } + +// RemoveBug will remove a local bug from its entity.Id +func RemoveBug(repo repository.ClockedRepo, id entity.Id) error { + return dag.Remove(def, repo, id) +} diff --git a/migration3/after/bug/clocks.go b/migration3/after/bug/clocks.go deleted file mode 100644 index d63fe4b..0000000 --- a/migration3/after/bug/clocks.go +++ /dev/null @@ -1,40 +0,0 @@ -package bug - -import ( - "github.com/MichaelMure/git-bug-migration/migration3/after/identity" - "github.com/MichaelMure/git-bug-migration/migration3/after/repository" -) - -// ClockLoader is the repository.ClockLoader for the Bug entity -var ClockLoader = repository.ClockLoader{ - Clocks: []string{creationClockName, editClockName}, - Witnesser: func(repo repository.ClockedRepo) error { - // We don't care about the actual identity so an IdentityStub will do - resolver := identity.NewStubResolver() - for b := range ReadAllLocalWithResolver(repo, resolver) { - if b.Err != nil { - return b.Err - } - - createClock, err := repo.GetOrCreateClock(creationClockName) - if err != nil { - return err - } - err = createClock.Witness(b.Bug.createTime) - if err != nil { - return err - } - - editClock, err := repo.GetOrCreateClock(editClockName) - if err != nil { - return err - } - err = editClock.Witness(b.Bug.editTime) - if err != nil { - return err - } - } - - return nil - }, -} diff --git a/migration3/after/bug/err.go b/migration3/after/bug/err.go new file mode 100644 index 0000000..58cd0de --- /dev/null +++ b/migration3/after/bug/err.go @@ -0,0 +1,17 @@ +package bug + +import ( + "errors" + + "github.com/MichaelMure/git-bug-migration/migration3/after/entity" +) + +var ErrBugNotExist = errors.New("bug doesn't exist") + +func NewErrMultipleMatchBug(matching []entity.Id) *entity.ErrMultipleMatch { + return entity.NewErrMultipleMatch("bug", matching) +} + +func NewErrMultipleMatchOp(matching []entity.Id) *entity.ErrMultipleMatch { + return entity.NewErrMultipleMatch("operation", matching) +} diff --git a/migration3/after/bug/git_tree.go b/migration3/after/bug/git_tree.go deleted file mode 100644 index a9abeec..0000000 --- a/migration3/after/bug/git_tree.go +++ /dev/null @@ -1,84 +0,0 @@ -package bug - -import ( - "fmt" - "strings" - - "github.com/pkg/errors" - - "github.com/MichaelMure/git-bug-migration/migration3/after/repository" - "github.com/MichaelMure/git-bug-migration/migration3/after/util/lamport" -) - -type gitTree struct { - opsEntry repository.TreeEntry - createTime lamport.Time - editTime lamport.Time -} - -func readTree(repo repository.RepoData, hash repository.Hash) (*gitTree, error) { - tree := &gitTree{} - - entries, err := repo.ReadTree(hash) - if err != nil { - return nil, errors.Wrap(err, "can't list git tree entries") - } - - opsFound := false - - for _, entry := range entries { - if entry.Name == opsEntryName { - tree.opsEntry = entry - opsFound = true - continue - } - if strings.HasPrefix(entry.Name, createClockEntryPrefix) { - n, err := fmt.Sscanf(entry.Name, createClockEntryPattern, &tree.createTime) - if err != nil { - return nil, errors.Wrap(err, "can't read create lamport time") - } - if n != 1 { - return nil, fmt.Errorf("could not parse create time lamport value") - } - } - if strings.HasPrefix(entry.Name, editClockEntryPrefix) { - n, err := fmt.Sscanf(entry.Name, editClockEntryPattern, &tree.editTime) - if err != nil { - return nil, errors.Wrap(err, "can't read edit lamport time") - } - if n != 1 { - return nil, fmt.Errorf("could not parse edit time lamport value") - } - } - } - - if !opsFound { - return nil, errors.New("invalid tree, missing the ops entry") - } - - return tree, nil -} - -func makeMediaTree(pack OperationPack) []repository.TreeEntry { - var tree []repository.TreeEntry - counter := 0 - added := make(map[repository.Hash]interface{}) - - for _, ops := range pack.Operations { - for _, file := range ops.GetFiles() { - if _, has := added[file]; !has { - tree = append(tree, repository.TreeEntry{ - ObjectType: repository.Blob, - Hash: file, - // The name is not important here, we only need to - // reference the blob. - Name: fmt.Sprintf("file%d", counter), - }) - counter++ - added[file] = struct{}{} - } - } - } - - return tree -} diff --git a/migration3/after/bug/identity.go b/migration3/after/bug/identity.go deleted file mode 100644 index c7a7927..0000000 --- a/migration3/after/bug/identity.go +++ /dev/null @@ -1,27 +0,0 @@ -package bug - -import ( - "github.com/MichaelMure/git-bug-migration/migration3/after/identity" -) - -// EnsureIdentities walk the graph of operations and make sure that all Identity -// are properly loaded. That is, it replace all the IdentityStub with the full -// Identity, loaded through a Resolver. -func (bug *Bug) EnsureIdentities(resolver identity.Resolver) error { - it := NewOperationIterator(bug) - - for it.Next() { - op := it.Value() - base := op.base() - - if stub, ok := base.Author.(*identity.IdentityStub); ok { - i, err := resolver.ResolveIdentity(stub.Id()) - if err != nil { - return err - } - - base.Author = i - } - } - return nil -} diff --git a/migration3/after/bug/interface.go b/migration3/after/bug/interface.go index 98a0560..b040f30 100644 --- a/migration3/after/bug/interface.go +++ b/migration3/after/bug/interface.go @@ -16,17 +16,15 @@ type Interface interface { // Append an operation into the staging area, to be committed later Append(op Operation) + // Operations return the ordered operations + Operations() []Operation + // Indicate that the in-memory state changed and need to be commit in the repository NeedCommit() bool // Commit write the staging area in Git and move the operations to the packs Commit(repo repository.ClockedRepo) error - // Merge a different version of the same bug by rebasing operations of this bug - // that are not present in the other on top of the chain of operations of the - // other version. - Merge(repo repository.Repo, other Interface) (bool, error) - // Lookup for the very first operation of the bug. // For a valid Bug, this operation should be a CreateOp FirstOp() Operation diff --git a/migration3/after/bug/op_add_comment.go b/migration3/after/bug/op_add_comment.go index f0e19f8..baea748 100644 --- a/migration3/after/bug/op_add_comment.go +++ b/migration3/after/bug/op_add_comment.go @@ -5,6 +5,7 @@ import ( "fmt" "github.com/MichaelMure/git-bug-migration/migration3/after/entity" + "github.com/MichaelMure/git-bug-migration/migration3/after/entity/dag" "github.com/MichaelMure/git-bug-migration/migration3/after/identity" "github.com/MichaelMure/git-bug-migration/migration3/after/repository" "github.com/MichaelMure/git-bug-migration/migration3/after/util/text" @@ -12,6 +13,7 @@ import ( ) var _ Operation = &AddCommentOperation{} +var _ dag.OperationWithFiles = &AddCommentOperation{} // AddCommentOperation will add a new comment in the bug type AddCommentOperation struct { @@ -21,25 +23,19 @@ type AddCommentOperation struct { Files []repository.Hash `json:"files"` } -// Sign-post method for gqlgen -func (op *AddCommentOperation) IsOperation() {} - -func (op *AddCommentOperation) base() *OpBase { - return &op.OpBase -} - func (op *AddCommentOperation) Id() entity.Id { - return idOperation(op) + return idOperation(op, &op.OpBase) } func (op *AddCommentOperation) Apply(snapshot *Snapshot) { - snapshot.addActor(op.Author) - snapshot.addParticipant(op.Author) + snapshot.addActor(op.Author_) + snapshot.addParticipant(op.Author_) + commentId := entity.CombineIds(snapshot.Id(), op.Id()) comment := Comment{ - id: op.Id(), + id: commentId, Message: op.Message, - Author: op.Author, + Author: op.Author_, Files: op.Files, UnixTime: timestamp.Timestamp(op.UnixTime), } @@ -47,7 +43,7 @@ func (op *AddCommentOperation) Apply(snapshot *Snapshot) { snapshot.Comments = append(snapshot.Comments, comment) item := &AddCommentTimelineItem{ - CommentTimelineItem: NewCommentTimelineItem(op.Id(), comment), + CommentTimelineItem: NewCommentTimelineItem(commentId, comment), } snapshot.Timeline = append(snapshot.Timeline, item) @@ -58,7 +54,7 @@ func (op *AddCommentOperation) GetFiles() []repository.Hash { } func (op *AddCommentOperation) Validate() error { - if err := opBaseValidate(op, AddCommentOp); err != nil { + if err := op.OpBase.Validate(op, AddCommentOp); err != nil { return err } diff --git a/migration3/after/bug/op_create.go b/migration3/after/bug/op_create.go index 967fbba..ce4f3f8 100644 --- a/migration3/after/bug/op_create.go +++ b/migration3/after/bug/op_create.go @@ -1,12 +1,12 @@ package bug import ( - "crypto/rand" "encoding/json" "fmt" "strings" "github.com/MichaelMure/git-bug-migration/migration3/after/entity" + "github.com/MichaelMure/git-bug-migration/migration3/after/entity/dag" "github.com/MichaelMure/git-bug-migration/migration3/after/identity" "github.com/MichaelMure/git-bug-migration/migration3/after/repository" "github.com/MichaelMure/git-bug-migration/migration3/after/util/text" @@ -14,50 +14,63 @@ import ( ) var _ Operation = &CreateOperation{} +var _ dag.OperationWithFiles = &CreateOperation{} // CreateOperation define the initial creation of a bug type CreateOperation struct { OpBase - // mandatory random bytes to ensure a better randomness of the data of the first - // operation of a bug, used to later generate the ID - // len(Nonce) should be > 20 and < 64 bytes - Nonce []byte `json:"nonce"` Title string `json:"title"` Message string `json:"message"` Files []repository.Hash `json:"files"` } -// Sign-post method for gqlgen -func (op *CreateOperation) IsOperation() {} - -func (op *CreateOperation) base() *OpBase { - return &op.OpBase +func (op *CreateOperation) Id() entity.Id { + return idOperation(op, &op.OpBase) } -func (op *CreateOperation) Id() entity.Id { - return idOperation(op) +// OVERRIDE +func (op *CreateOperation) SetMetadata(key string, value string) { + // sanity check: we make sure we are not in the following scenario: + // - the bug is created with a first operation + // - Id() is used + // - metadata are added, which will change the Id + // - Id() is used again + + if op.id != entity.UnsetId { + panic("usage of Id() after changing the first operation") + } + + op.OpBase.SetMetadata(key, value) } func (op *CreateOperation) Apply(snapshot *Snapshot) { - snapshot.addActor(op.Author) - snapshot.addParticipant(op.Author) + // sanity check: will fail when adding a second Create + if snapshot.id != "" && snapshot.id != entity.UnsetId && snapshot.id != op.Id() { + panic("adding a second Create operation") + } + + snapshot.id = op.Id() + + snapshot.addActor(op.Author_) + snapshot.addParticipant(op.Author_) snapshot.Title = op.Title + commentId := entity.CombineIds(snapshot.Id(), op.Id()) comment := Comment{ - id: op.Id(), + id: commentId, Message: op.Message, - Author: op.Author, + Author: op.Author_, UnixTime: timestamp.Timestamp(op.UnixTime), } snapshot.Comments = []Comment{comment} - snapshot.Author = op.Author + snapshot.Author = op.Author_ snapshot.CreateTime = op.Time() snapshot.Timeline = []TimelineItem{ &CreateTimelineItem{ - CommentTimelineItem: NewCommentTimelineItem(op.Id(), comment), + CommentTimelineItem: NewCommentTimelineItem(commentId, comment), }, } } @@ -67,7 +80,7 @@ func (op *CreateOperation) GetFiles() []repository.Hash { } func (op *CreateOperation) Validate() error { - if err := opBaseValidate(op, CreateOp); err != nil { + if err := op.OpBase.Validate(op, CreateOp); err != nil { return err } @@ -95,7 +108,7 @@ func (op *CreateOperation) Validate() error { return nil } -// UnmarshalJSON is a two step JSON unmarshaling +// UnmarshalJSON is a two step JSON unmarshalling // This workaround is necessary to avoid the inner OpBase.MarshalJSON // overriding the outer op's MarshalJSON func (op *CreateOperation) UnmarshalJSON(data []byte) error { @@ -131,19 +144,9 @@ func (op *CreateOperation) UnmarshalJSON(data []byte) error { // Sign post method for gqlgen func (op *CreateOperation) IsAuthored() {} -func makeNonce(len int) []byte { - result := make([]byte, len) - _, err := rand.Read(result) - if err != nil { - panic(err) - } - return result -} - func NewCreateOp(author identity.Interface, unixTime int64, title, message string, files []repository.Hash) *CreateOperation { return &CreateOperation{ OpBase: newOpBase(CreateOp, author, unixTime), - Nonce: makeNonce(20), Title: title, Message: message, Files: files, diff --git a/migration3/after/bug/op_edit_comment.go b/migration3/after/bug/op_edit_comment.go index eeb5103..cb7cea6 100644 --- a/migration3/after/bug/op_edit_comment.go +++ b/migration3/after/bug/op_edit_comment.go @@ -7,6 +7,7 @@ import ( "github.com/pkg/errors" "github.com/MichaelMure/git-bug-migration/migration3/after/entity" + "github.com/MichaelMure/git-bug-migration/migration3/after/entity/dag" "github.com/MichaelMure/git-bug-migration/migration3/after/identity" "github.com/MichaelMure/git-bug-migration/migration3/after/repository" "github.com/MichaelMure/git-bug-migration/migration3/after/util/timestamp" @@ -15,6 +16,7 @@ import ( ) var _ Operation = &EditCommentOperation{} +var _ dag.OperationWithFiles = &EditCommentOperation{} // EditCommentOperation will change a comment in the bug type EditCommentOperation struct { @@ -24,22 +26,15 @@ type EditCommentOperation struct { Files []repository.Hash `json:"files"` } -// Sign-post method for gqlgen -func (op *EditCommentOperation) IsOperation() {} - -func (op *EditCommentOperation) base() *OpBase { - return &op.OpBase -} - func (op *EditCommentOperation) Id() entity.Id { - return idOperation(op) + return idOperation(op, &op.OpBase) } func (op *EditCommentOperation) Apply(snapshot *Snapshot) { // Todo: currently any message can be edited, even by a different author // crypto signature are needed. - snapshot.addActor(op.Author) + snapshot.addActor(op.Author_) var target TimelineItem @@ -85,7 +80,7 @@ func (op *EditCommentOperation) GetFiles() []repository.Hash { } func (op *EditCommentOperation) Validate() error { - if err := opBaseValidate(op, EditCommentOp); err != nil { + if err := op.OpBase.Validate(op, EditCommentOp); err != nil { return err } @@ -100,7 +95,7 @@ func (op *EditCommentOperation) Validate() error { return nil } -// UnmarshalJSON is a two step JSON unmarshaling +// UnmarshalJSON is a two step JSON unmarshalling // This workaround is necessary to avoid the inner OpBase.MarshalJSON // overriding the outer op's MarshalJSON func (op *EditCommentOperation) UnmarshalJSON(data []byte) error { diff --git a/migration3/after/bug/op_label_change.go b/migration3/after/bug/op_label_change.go index 2c908ad..78535e6 100644 --- a/migration3/after/bug/op_label_change.go +++ b/migration3/after/bug/op_label_change.go @@ -21,20 +21,13 @@ type LabelChangeOperation struct { Removed []Label `json:"removed"` } -// Sign-post method for gqlgen -func (op *LabelChangeOperation) IsOperation() {} - -func (op *LabelChangeOperation) base() *OpBase { - return &op.OpBase -} - func (op *LabelChangeOperation) Id() entity.Id { - return idOperation(op) + return idOperation(op, &op.OpBase) } // Apply apply the operation func (op *LabelChangeOperation) Apply(snapshot *Snapshot) { - snapshot.addActor(op.Author) + snapshot.addActor(op.Author_) // Add in the set AddLoop: @@ -66,7 +59,7 @@ AddLoop: item := &LabelChangeTimelineItem{ id: op.Id(), - Author: op.Author, + Author: op.Author_, UnixTime: timestamp.Timestamp(op.UnixTime), Added: op.Added, Removed: op.Removed, @@ -76,7 +69,7 @@ AddLoop: } func (op *LabelChangeOperation) Validate() error { - if err := opBaseValidate(op, LabelChangeOp); err != nil { + if err := op.OpBase.Validate(op, LabelChangeOp); err != nil { return err } @@ -99,7 +92,7 @@ func (op *LabelChangeOperation) Validate() error { return nil } -// UnmarshalJSON is a two step JSON unmarshaling +// UnmarshalJSON is a two step JSON unmarshalling // This workaround is necessary to avoid the inner OpBase.MarshalJSON // overriding the outer op's MarshalJSON func (op *LabelChangeOperation) UnmarshalJSON(data []byte) error { diff --git a/migration3/after/bug/op_noop.go b/migration3/after/bug/op_noop.go index 570405c..9def880 100644 --- a/migration3/after/bug/op_noop.go +++ b/migration3/after/bug/op_noop.go @@ -16,15 +16,8 @@ type NoOpOperation struct { OpBase } -// Sign-post method for gqlgen -func (op *NoOpOperation) IsOperation() {} - -func (op *NoOpOperation) base() *OpBase { - return &op.OpBase -} - func (op *NoOpOperation) Id() entity.Id { - return idOperation(op) + return idOperation(op, &op.OpBase) } func (op *NoOpOperation) Apply(snapshot *Snapshot) { @@ -32,10 +25,10 @@ func (op *NoOpOperation) Apply(snapshot *Snapshot) { } func (op *NoOpOperation) Validate() error { - return opBaseValidate(op, NoOpOp) + return op.OpBase.Validate(op, NoOpOp) } -// UnmarshalJSON is a two step JSON unmarshaling +// UnmarshalJSON is a two step JSON unmarshalling // This workaround is necessary to avoid the inner OpBase.MarshalJSON // overriding the outer op's MarshalJSON func (op *NoOpOperation) UnmarshalJSON(data []byte) error { diff --git a/migration3/after/bug/op_set_metadata.go b/migration3/after/bug/op_set_metadata.go index 23b720d..deb2e0e 100644 --- a/migration3/after/bug/op_set_metadata.go +++ b/migration3/after/bug/op_set_metadata.go @@ -17,41 +17,25 @@ type SetMetadataOperation struct { NewMetadata map[string]string `json:"new_metadata"` } -// Sign-post method for gqlgen -func (op *SetMetadataOperation) IsOperation() {} - -func (op *SetMetadataOperation) base() *OpBase { - return &op.OpBase -} - func (op *SetMetadataOperation) Id() entity.Id { - return idOperation(op) + return idOperation(op, &op.OpBase) } func (op *SetMetadataOperation) Apply(snapshot *Snapshot) { for _, target := range snapshot.Operations { if target.Id() == op.Target { - base := target.base() - - if base.extraMetadata == nil { - base.extraMetadata = make(map[string]string) - } - // Apply the metadata in an immutable way: if a metadata already // exist, it's not possible to override it. - for key, val := range op.NewMetadata { - if _, exist := base.extraMetadata[key]; !exist { - base.extraMetadata[key] = val - } + for key, value := range op.NewMetadata { + target.setExtraMetadataImmutable(key, value) } - return } } } func (op *SetMetadataOperation) Validate() error { - if err := opBaseValidate(op, SetMetadataOp); err != nil { + if err := op.OpBase.Validate(op, SetMetadataOp); err != nil { return err } @@ -62,7 +46,7 @@ func (op *SetMetadataOperation) Validate() error { return nil } -// UnmarshalJSON is a two step JSON unmarshaling +// UnmarshalJSON is a two step JSON unmarshalling // This workaround is necessary to avoid the inner OpBase.MarshalJSON // overriding the outer op's MarshalJSON func (op *SetMetadataOperation) UnmarshalJSON(data []byte) error { diff --git a/migration3/after/bug/op_set_status.go b/migration3/after/bug/op_set_status.go index 3d17caf..837d2f1 100644 --- a/migration3/after/bug/op_set_status.go +++ b/migration3/after/bug/op_set_status.go @@ -18,24 +18,17 @@ type SetStatusOperation struct { Status Status `json:"status"` } -// Sign-post method for gqlgen -func (op *SetStatusOperation) IsOperation() {} - -func (op *SetStatusOperation) base() *OpBase { - return &op.OpBase -} - func (op *SetStatusOperation) Id() entity.Id { - return idOperation(op) + return idOperation(op, &op.OpBase) } func (op *SetStatusOperation) Apply(snapshot *Snapshot) { snapshot.Status = op.Status - snapshot.addActor(op.Author) + snapshot.addActor(op.Author_) item := &SetStatusTimelineItem{ id: op.Id(), - Author: op.Author, + Author: op.Author_, UnixTime: timestamp.Timestamp(op.UnixTime), Status: op.Status, } @@ -44,7 +37,7 @@ func (op *SetStatusOperation) Apply(snapshot *Snapshot) { } func (op *SetStatusOperation) Validate() error { - if err := opBaseValidate(op, SetStatusOp); err != nil { + if err := op.OpBase.Validate(op, SetStatusOp); err != nil { return err } @@ -55,7 +48,7 @@ func (op *SetStatusOperation) Validate() error { return nil } -// UnmarshalJSON is a two step JSON unmarshaling +// UnmarshalJSON is a two step JSON unmarshalling // This workaround is necessary to avoid the inner OpBase.MarshalJSON // overriding the outer op's MarshalJSON func (op *SetStatusOperation) UnmarshalJSON(data []byte) error { diff --git a/migration3/after/bug/op_set_title.go b/migration3/after/bug/op_set_title.go index 35c6a59..e83d97e 100644 --- a/migration3/after/bug/op_set_title.go +++ b/migration3/after/bug/op_set_title.go @@ -21,24 +21,17 @@ type SetTitleOperation struct { Was string `json:"was"` } -// Sign-post method for gqlgen -func (op *SetTitleOperation) IsOperation() {} - -func (op *SetTitleOperation) base() *OpBase { - return &op.OpBase -} - func (op *SetTitleOperation) Id() entity.Id { - return idOperation(op) + return idOperation(op, &op.OpBase) } func (op *SetTitleOperation) Apply(snapshot *Snapshot) { snapshot.Title = op.Title - snapshot.addActor(op.Author) + snapshot.addActor(op.Author_) item := &SetTitleTimelineItem{ id: op.Id(), - Author: op.Author, + Author: op.Author_, UnixTime: timestamp.Timestamp(op.UnixTime), Title: op.Title, Was: op.Was, @@ -48,7 +41,7 @@ func (op *SetTitleOperation) Apply(snapshot *Snapshot) { } func (op *SetTitleOperation) Validate() error { - if err := opBaseValidate(op, SetTitleOp); err != nil { + if err := op.OpBase.Validate(op, SetTitleOp); err != nil { return err } @@ -75,7 +68,7 @@ func (op *SetTitleOperation) Validate() error { return nil } -// UnmarshalJSON is a two step JSON unmarshaling +// UnmarshalJSON is a two step JSON unmarshalling // This workaround is necessary to avoid the inner OpBase.MarshalJSON // overriding the outer op's MarshalJSON func (op *SetTitleOperation) UnmarshalJSON(data []byte) error { @@ -132,19 +125,17 @@ func (s *SetTitleTimelineItem) IsAuthored() {} // Convenience function to apply the operation func SetTitle(b Interface, author identity.Interface, unixTime int64, title string) (*SetTitleOperation, error) { - it := NewOperationIterator(b) - - var lastTitleOp Operation - for it.Next() { - op := it.Value() - if op.base().OperationType == SetTitleOp { + var lastTitleOp *SetTitleOperation + for _, op := range b.Operations() { + switch op := op.(type) { + case *SetTitleOperation: lastTitleOp = op } } var was string if lastTitleOp != nil { - was = lastTitleOp.(*SetTitleOperation).Title + was = lastTitleOp.Title } else { was = b.FirstOp().(*CreateOperation).Title } diff --git a/migration3/after/bug/operation.go b/migration3/after/bug/operation.go index d7e0ad4..51db297 100644 --- a/migration3/after/bug/operation.go +++ b/migration3/after/bug/operation.go @@ -1,6 +1,7 @@ package bug import ( + "crypto/rand" "encoding/json" "fmt" "time" @@ -8,8 +9,8 @@ import ( "github.com/pkg/errors" "github.com/MichaelMure/git-bug-migration/migration3/after/entity" + "github.com/MichaelMure/git-bug-migration/migration3/after/entity/dag" "github.com/MichaelMure/git-bug-migration/migration3/after/identity" - "github.com/MichaelMure/git-bug-migration/migration3/after/repository" ) // OperationType is an operation type identifier @@ -29,34 +30,27 @@ const ( // Operation define the interface to fulfill for an edit operation of a Bug type Operation interface { - // base return the OpBase of the Operation, for package internal use - base() *OpBase - // Id return the identifier of the operation, to be used for back references - Id() entity.Id + dag.Operation + + // Type return the type of the operation + Type() OperationType + // Time return the time when the operation was added Time() time.Time - // GetFiles return the files needed by this operation - GetFiles() []repository.Hash // Apply the operation to a Snapshot to create the final state Apply(snapshot *Snapshot) - // Validate check if the operation is valid (ex: a title is a single line) - Validate() error + // SetMetadata store arbitrary metadata about the operation SetMetadata(key string, value string) // GetMetadata retrieve arbitrary metadata about the operation GetMetadata(key string) (string, bool) // AllMetadata return all metadata for this operation AllMetadata() map[string]string - // GetAuthor return the author identity - GetAuthor() identity.Interface - // sign-post method for gqlgen - IsOperation() + setExtraMetadataImmutable(key string, value string) } -func idOperation(op Operation) entity.Id { - base := op.base() - +func idOperation(op Operation, base *OpBase) entity.Id { if base.id == "" { // something went really wrong panic("op's id not set") @@ -77,13 +71,80 @@ func idOperation(op Operation) entity.Id { return base.id } +func operationUnmarshaller(author identity.Interface, raw json.RawMessage) (dag.Operation, error) { + var t struct { + OperationType OperationType `json:"type"` + } + + if err := json.Unmarshal(raw, &t); err != nil { + return nil, err + } + + var op Operation + + switch t.OperationType { + case AddCommentOp: + op = &AddCommentOperation{} + case CreateOp: + op = &CreateOperation{} + case EditCommentOp: + op = &EditCommentOperation{} + case LabelChangeOp: + op = &LabelChangeOperation{} + case NoOpOp: + op = &NoOpOperation{} + case SetMetadataOp: + op = &SetMetadataOperation{} + case SetStatusOp: + op = &SetStatusOperation{} + case SetTitleOp: + op = &SetTitleOperation{} + default: + panic(fmt.Sprintf("unknown operation type %v", t.OperationType)) + } + + err := json.Unmarshal(raw, &op) + if err != nil { + return nil, err + } + + switch op := op.(type) { + case *AddCommentOperation: + op.Author_ = author + case *CreateOperation: + op.Author_ = author + case *EditCommentOperation: + op.Author_ = author + case *LabelChangeOperation: + op.Author_ = author + case *NoOpOperation: + op.Author_ = author + case *SetMetadataOperation: + op.Author_ = author + case *SetStatusOperation: + op.Author_ = author + case *SetTitleOperation: + op.Author_ = author + default: + panic(fmt.Sprintf("unknown operation type %T", op)) + } + + return op, nil +} + // OpBase implement the common code for all operations type OpBase struct { OperationType OperationType `json:"type"` - Author identity.Interface `json:"author"` + Author_ identity.Interface `json:"author"` // TODO: part of the data model upgrade, this should eventually be a timestamp + lamport UnixTime int64 `json:"timestamp"` Metadata map[string]string `json:"metadata,omitempty"` + + // mandatory random bytes to ensure a better randomness of the data used to later generate the ID + // len(Nonce) should be > 20 and < 64 bytes + // It has no functional purpose and should be ignored. + Nonce []byte `json:"nonce"` + // Not serialized. Store the op's id in memory. id entity.Id // Not serialized. Store the extra metadata in memory, @@ -95,21 +156,32 @@ type OpBase struct { func newOpBase(opType OperationType, author identity.Interface, unixTime int64) OpBase { return OpBase{ OperationType: opType, - Author: author, + Author_: author, UnixTime: unixTime, + Nonce: makeNonce(20), id: entity.UnsetId, } } -func (op *OpBase) UnmarshalJSON(data []byte) error { +func makeNonce(len int) []byte { + result := make([]byte, len) + _, err := rand.Read(result) + if err != nil { + panic(err) + } + return result +} + +func (base *OpBase) UnmarshalJSON(data []byte) error { // Compute the Id when loading the op from disk. - op.id = entity.DeriveId(data) + base.id = entity.DeriveId(data) aux := struct { OperationType OperationType `json:"type"` Author json.RawMessage `json:"author"` UnixTime int64 `json:"timestamp"` Metadata map[string]string `json:"metadata,omitempty"` + Nonce []byte `json:"nonce"` }{} if err := json.Unmarshal(data, &aux); err != nil { @@ -122,92 +194,110 @@ func (op *OpBase) UnmarshalJSON(data []byte) error { return err } - op.OperationType = aux.OperationType - op.Author = author - op.UnixTime = aux.UnixTime - op.Metadata = aux.Metadata + base.OperationType = aux.OperationType + base.Author_ = author + base.UnixTime = aux.UnixTime + base.Metadata = aux.Metadata + base.Nonce = aux.Nonce return nil } -// Time return the time when the operation was added -func (op *OpBase) Time() time.Time { - return time.Unix(op.UnixTime, 0) +func (base *OpBase) Type() OperationType { + return base.OperationType } -// GetFiles return the files needed by this operation -func (op *OpBase) GetFiles() []repository.Hash { - return nil +// Time return the time when the operation was added +func (base *OpBase) Time() time.Time { + return time.Unix(base.UnixTime, 0) } // Validate check the OpBase for errors -func opBaseValidate(op Operation, opType OperationType) error { - if op.base().OperationType != opType { - return fmt.Errorf("incorrect operation type (expected: %v, actual: %v)", opType, op.base().OperationType) +func (base *OpBase) Validate(op Operation, opType OperationType) error { + if base.OperationType != opType { + return fmt.Errorf("incorrect operation type (expected: %v, actual: %v)", opType, base.OperationType) } if op.Time().Unix() == 0 { return fmt.Errorf("time not set") } - if op.base().Author == nil { + if base.Author_ == nil { return fmt.Errorf("author not set") } - if err := op.base().Author.Validate(); err != nil { + if err := op.Author().Validate(); err != nil { return errors.Wrap(err, "author") } - for _, hash := range op.GetFiles() { - if !hash.IsValid() { - return fmt.Errorf("file with invalid hash %v", hash) + if op, ok := op.(dag.OperationWithFiles); ok { + for _, hash := range op.GetFiles() { + if !hash.IsValid() { + return fmt.Errorf("file with invalid hash %v", hash) + } } } + if len(base.Nonce) > 64 { + return fmt.Errorf("nonce is too big") + } + if len(base.Nonce) < 20 { + return fmt.Errorf("nonce is too small") + } + return nil } // SetMetadata store arbitrary metadata about the operation -func (op *OpBase) SetMetadata(key string, value string) { - if op.Metadata == nil { - op.Metadata = make(map[string]string) +func (base *OpBase) SetMetadata(key string, value string) { + if base.Metadata == nil { + base.Metadata = make(map[string]string) } - op.Metadata[key] = value - op.id = entity.UnsetId + base.Metadata[key] = value + base.id = entity.UnsetId } // GetMetadata retrieve arbitrary metadata about the operation -func (op *OpBase) GetMetadata(key string) (string, bool) { - val, ok := op.Metadata[key] +func (base *OpBase) GetMetadata(key string) (string, bool) { + val, ok := base.Metadata[key] if ok { return val, true } // extraMetadata can't replace the original operations value if any - val, ok = op.extraMetadata[key] + val, ok = base.extraMetadata[key] return val, ok } // AllMetadata return all metadata for this operation -func (op *OpBase) AllMetadata() map[string]string { +func (base *OpBase) AllMetadata() map[string]string { result := make(map[string]string) - for key, val := range op.extraMetadata { + for key, val := range base.extraMetadata { result[key] = val } // Original metadata take precedence - for key, val := range op.Metadata { + for key, val := range base.Metadata { result[key] = val } return result } -// GetAuthor return author identity -func (op *OpBase) GetAuthor() identity.Interface { - return op.Author +func (base *OpBase) setExtraMetadataImmutable(key string, value string) { + if base.extraMetadata == nil { + base.extraMetadata = make(map[string]string) + } + if _, exist := base.extraMetadata[key]; !exist { + base.extraMetadata[key] = value + } +} + +// Author return author identity +func (base *OpBase) Author() identity.Interface { + return base.Author_ } diff --git a/migration3/after/bug/operation_iterator.go b/migration3/after/bug/operation_iterator.go deleted file mode 100644 index f42b177..0000000 --- a/migration3/after/bug/operation_iterator.go +++ /dev/null @@ -1,72 +0,0 @@ -package bug - -type OperationIterator struct { - bug *Bug - packIndex int - opIndex int -} - -func NewOperationIterator(bug Interface) *OperationIterator { - return &OperationIterator{ - bug: bugFromInterface(bug), - packIndex: 0, - opIndex: -1, - } -} - -func (it *OperationIterator) Next() bool { - // Special case of the staging area - if it.packIndex == len(it.bug.packs) { - pack := it.bug.staging - it.opIndex++ - return it.opIndex < len(pack.Operations) - } - - if it.packIndex >= len(it.bug.packs) { - return false - } - - pack := it.bug.packs[it.packIndex] - - it.opIndex++ - - if it.opIndex < len(pack.Operations) { - return true - } - - // Note: this iterator doesn't handle the empty pack case - it.opIndex = 0 - it.packIndex++ - - // Special case of the non-empty staging area - if it.packIndex == len(it.bug.packs) && len(it.bug.staging.Operations) > 0 { - return true - } - - return it.packIndex < len(it.bug.packs) -} - -func (it *OperationIterator) Value() Operation { - // Special case of the staging area - if it.packIndex == len(it.bug.packs) { - pack := it.bug.staging - - if it.opIndex >= len(pack.Operations) { - panic("Iterator is not valid anymore") - } - - return pack.Operations[it.opIndex] - } - - if it.packIndex >= len(it.bug.packs) { - panic("Iterator is not valid anymore") - } - - pack := it.bug.packs[it.packIndex] - - if it.opIndex >= len(pack.Operations) { - panic("Iterator is not valid anymore") - } - - return pack.Operations[it.opIndex] -} diff --git a/migration3/after/bug/operation_pack.go b/migration3/after/bug/operation_pack.go deleted file mode 100644 index cf9a5d4..0000000 --- a/migration3/after/bug/operation_pack.go +++ /dev/null @@ -1,186 +0,0 @@ -package bug - -import ( - "encoding/json" - "fmt" - - "github.com/pkg/errors" - - "github.com/MichaelMure/git-bug-migration/migration3/after/repository" -) - -// 1: original format -// 2: no more legacy identities -// 3: Ids are generated from the create operation serialized data instead of from the first git commit -const formatVersion = 3 - -// OperationPack represent an ordered set of operation to apply -// to a Bug. These operations are stored in a single Git commit. -// -// These commits will be linked together in a linear chain of commits -// inside Git to form the complete ordered chain of operation to -// apply to get the final state of the Bug -type OperationPack struct { - Operations []Operation - - // Private field so not serialized - commitHash repository.Hash -} - -func (opp *OperationPack) MarshalJSON() ([]byte, error) { - return json.Marshal(struct { - Version uint `json:"version"` - Operations []Operation `json:"ops"` - }{ - Version: formatVersion, - Operations: opp.Operations, - }) -} - -func (opp *OperationPack) UnmarshalJSON(data []byte) error { - aux := struct { - Version uint `json:"version"` - Operations []json.RawMessage `json:"ops"` - }{} - - if err := json.Unmarshal(data, &aux); err != nil { - return err - } - - if aux.Version < formatVersion { - return fmt.Errorf("outdated repository format, please use https://github.com/MichaelMure/git-bug-migration to upgrade") - } - if aux.Version > formatVersion { - return fmt.Errorf("your version of git-bug is too old for this repository (bug format %v), please upgrade to the latest version", aux.Version) - } - - for _, raw := range aux.Operations { - var t struct { - OperationType OperationType `json:"type"` - } - - if err := json.Unmarshal(raw, &t); err != nil { - return err - } - - // delegate to specialized unmarshal function - op, err := opp.unmarshalOp(raw, t.OperationType) - if err != nil { - return err - } - - opp.Operations = append(opp.Operations, op) - } - - return nil -} - -func (opp *OperationPack) unmarshalOp(raw []byte, _type OperationType) (Operation, error) { - switch _type { - case AddCommentOp: - op := &AddCommentOperation{} - err := json.Unmarshal(raw, &op) - return op, err - case CreateOp: - op := &CreateOperation{} - err := json.Unmarshal(raw, &op) - return op, err - case EditCommentOp: - op := &EditCommentOperation{} - err := json.Unmarshal(raw, &op) - return op, err - case LabelChangeOp: - op := &LabelChangeOperation{} - err := json.Unmarshal(raw, &op) - return op, err - case NoOpOp: - op := &NoOpOperation{} - err := json.Unmarshal(raw, &op) - return op, err - case SetMetadataOp: - op := &SetMetadataOperation{} - err := json.Unmarshal(raw, &op) - return op, err - case SetStatusOp: - op := &SetStatusOperation{} - err := json.Unmarshal(raw, &op) - return op, err - case SetTitleOp: - op := &SetTitleOperation{} - err := json.Unmarshal(raw, &op) - return op, err - default: - return nil, fmt.Errorf("unknown operation type %v", _type) - } -} - -// Append a new operation to the pack -func (opp *OperationPack) Append(op Operation) { - opp.Operations = append(opp.Operations, op) -} - -// IsEmpty tell if the OperationPack is empty -func (opp *OperationPack) IsEmpty() bool { - return len(opp.Operations) == 0 -} - -// IsValid tell if the OperationPack is considered valid -func (opp *OperationPack) Validate() error { - if opp.IsEmpty() { - return fmt.Errorf("empty") - } - - for _, op := range opp.Operations { - if err := op.Validate(); err != nil { - return errors.Wrap(err, "op") - } - } - - return nil -} - -// Write will serialize and store the OperationPack as a git blob and return -// its hash -func (opp *OperationPack) Write(repo repository.ClockedRepo) (repository.Hash, error) { - // make sure we don't write invalid data - err := opp.Validate() - if err != nil { - return "", errors.Wrap(err, "validation error") - } - - // First, make sure that all the identities are properly Commit as well - // TODO: this might be downgraded to "make sure it exist in git" but then, what make - // sure no data is lost on identities ? - for _, op := range opp.Operations { - if op.base().Author.NeedCommit() { - return "", fmt.Errorf("identity need commmit") - } - } - - data, err := json.Marshal(opp) - if err != nil { - return "", err - } - - hash, err := repo.StoreData(data) - if err != nil { - return "", err - } - - return hash, nil -} - -// Make a deep copy -func (opp *OperationPack) Clone() OperationPack { - - clone := OperationPack{ - Operations: make([]Operation, len(opp.Operations)), - commitHash: opp.commitHash, - } - - for i, op := range opp.Operations { - clone.Operations[i] = op - } - - return clone -} diff --git a/migration3/after/bug/snapshot.go b/migration3/after/bug/snapshot.go index 16f4362..c514f01 100644 --- a/migration3/after/bug/snapshot.go +++ b/migration3/after/bug/snapshot.go @@ -28,6 +28,11 @@ type Snapshot struct { // Return the Bug identifier func (snap *Snapshot) Id() entity.Id { + if snap.id == "" { + // simply panic as it would be a coding error + // (using an id of a bug not stored yet) + panic("no id yet") + } return snap.id } diff --git a/migration3/after/bug/sorting.go b/migration3/after/bug/sorting.go index d1c370d..2e64b92 100644 --- a/migration3/after/bug/sorting.go +++ b/migration3/after/bug/sorting.go @@ -7,11 +7,11 @@ func (b BugsByCreationTime) Len() int { } func (b BugsByCreationTime) Less(i, j int) bool { - if b[i].createTime < b[j].createTime { + if b[i].CreateLamportTime() < b[j].CreateLamportTime() { return true } - if b[i].createTime > b[j].createTime { + if b[i].CreateLamportTime() > b[j].CreateLamportTime() { return false } @@ -35,11 +35,11 @@ func (b BugsByEditTime) Len() int { } func (b BugsByEditTime) Less(i, j int) bool { - if b[i].editTime < b[j].editTime { + if b[i].EditLamportTime() < b[j].EditLamportTime() { return true } - if b[i].editTime > b[j].editTime { + if b[i].EditLamportTime() > b[j].EditLamportTime() { return false } diff --git a/migration3/after/bug/with_snapshot.go b/migration3/after/bug/with_snapshot.go index 17eb089..19f1991 100644 --- a/migration3/after/bug/with_snapshot.go +++ b/migration3/after/bug/with_snapshot.go @@ -47,12 +47,6 @@ func (b *WithSnapshot) Commit(repo repository.ClockedRepo) error { return nil } - b.snap.id = b.Bug.id + b.snap.id = b.Bug.Id() return nil } - -// Merge intercept Bug.Merge() and clear the snapshot -func (b *WithSnapshot) Merge(repo repository.Repo, other Interface) (bool, error) { - b.snap = nil - return b.Bug.Merge(repo, other) -} diff --git a/migration3/after/entity/dag/clock.go b/migration3/after/entity/dag/clock.go new file mode 100644 index 0000000..18f0051 --- /dev/null +++ b/migration3/after/entity/dag/clock.go @@ -0,0 +1,38 @@ +package dag + +import ( + "fmt" + + "github.com/MichaelMure/git-bug-migration/migration3/after/identity" + "github.com/MichaelMure/git-bug-migration/migration3/after/repository" +) + +// ClockLoader is the repository.ClockLoader for Entity +func ClockLoader(defs ...Definition) repository.ClockLoader { + clocks := make([]string, 0, len(defs)*2) + for _, def := range defs { + clocks = append(clocks, fmt.Sprintf(creationClockPattern, def.Namespace)) + clocks = append(clocks, fmt.Sprintf(editClockPattern, def.Namespace)) + } + + return repository.ClockLoader{ + Clocks: clocks, + Witnesser: func(repo repository.ClockedRepo) error { + // we need to actually load the identities because of the commit signature check when reading, + // which require the full identities with crypto keys + resolver := identity.NewCachedResolver(identity.NewSimpleResolver(repo)) + + for _, def := range defs { + // we actually just need to read all entities, + // as that will create and update the clocks + // TODO: concurrent loading to be faster? + for b := range ReadAll(def, repo, resolver) { + if b.Err != nil { + return b.Err + } + } + } + return nil + }, + } +} diff --git a/migration3/after/entity/dag/entity.go b/migration3/after/entity/dag/entity.go new file mode 100644 index 0000000..b9edb3d --- /dev/null +++ b/migration3/after/entity/dag/entity.go @@ -0,0 +1,439 @@ +// Package dag contains the base common code to define an entity stored +// in a chain of git objects, supporting actions like Push, Pull and Merge. +package dag + +import ( + "encoding/json" + "fmt" + "sort" + + "github.com/pkg/errors" + + "github.com/MichaelMure/git-bug-migration/migration3/after/entity" + "github.com/MichaelMure/git-bug-migration/migration3/after/identity" + "github.com/MichaelMure/git-bug-migration/migration3/after/repository" + "github.com/MichaelMure/git-bug-migration/migration3/after/util/lamport" +) + +const refsPattern = "refs/%s/%s" +const creationClockPattern = "%s-create" +const editClockPattern = "%s-edit" + +// Definition hold the details defining one specialization of an Entity. +type Definition struct { + // the name of the entity (bug, pull-request, ...) + Typename string + // the Namespace in git (bugs, prs, ...) + Namespace string + // a function decoding a JSON message into an Operation + OperationUnmarshaler func(author identity.Interface, raw json.RawMessage) (Operation, error) + // the expected format version number, that can be used for data migration/upgrade + FormatVersion uint +} + +// Entity is a data structure stored in a chain of git objects, supporting actions like Push, Pull and Merge. +type Entity struct { + // A Lamport clock is a logical clock that allow to order event + // inside a distributed system. + // It must be the first field in this struct due to https://github.com/golang/go/issues/36606 + createTime lamport.Time + editTime lamport.Time + + Definition + + // operations that are already stored in the repository + ops []Operation + // operations not yet stored in the repository + staging []Operation + + lastCommit repository.Hash +} + +// New create an empty Entity +func New(definition Definition) *Entity { + return &Entity{ + Definition: definition, + } +} + +// Read will read and decode a stored local Entity from a repository +func Read(def Definition, repo repository.ClockedRepo, resolver identity.Resolver, id entity.Id) (*Entity, error) { + if err := id.Validate(); err != nil { + return nil, errors.Wrap(err, "invalid id") + } + + ref := fmt.Sprintf("refs/%s/%s", def.Namespace, id.String()) + + return read(def, repo, resolver, ref) +} + +// readRemote will read and decode a stored remote Entity from a repository +func readRemote(def Definition, repo repository.ClockedRepo, resolver identity.Resolver, remote string, id entity.Id) (*Entity, error) { + if err := id.Validate(); err != nil { + return nil, errors.Wrap(err, "invalid id") + } + + ref := fmt.Sprintf("refs/remotes/%s/%s/%s", def.Namespace, remote, id.String()) + + return read(def, repo, resolver, ref) +} + +// read fetch from git and decode an Entity at an arbitrary git reference. +func read(def Definition, repo repository.ClockedRepo, resolver identity.Resolver, ref string) (*Entity, error) { + rootHash, err := repo.ResolveRef(ref) + if err != nil { + return nil, err + } + + // Perform a breadth-first search to get a topological order of the DAG where we discover the + // parents commit and go back in time up to the chronological root + + queue := make([]repository.Hash, 0, 32) + visited := make(map[repository.Hash]struct{}) + BFSOrder := make([]repository.Commit, 0, 32) + + queue = append(queue, rootHash) + visited[rootHash] = struct{}{} + + for len(queue) > 0 { + // pop + hash := queue[0] + queue = queue[1:] + + commit, err := repo.ReadCommit(hash) + if err != nil { + return nil, err + } + + BFSOrder = append(BFSOrder, commit) + + for _, parent := range commit.Parents { + if _, ok := visited[parent]; !ok { + queue = append(queue, parent) + // mark as visited + visited[parent] = struct{}{} + } + } + } + + // Now, we can reverse this topological order and read the commits in an order where + // we are sure to have read all the chronological ancestors when we read a commit. + + // Next step is to: + // 1) read the operationPacks + // 2) make sure that the clocks causality respect the DAG topology. + + oppMap := make(map[repository.Hash]*operationPack) + var opsCount int + + for i := len(BFSOrder) - 1; i >= 0; i-- { + commit := BFSOrder[i] + isFirstCommit := i == len(BFSOrder)-1 + isMerge := len(commit.Parents) > 1 + + // Verify DAG structure: single chronological root, so only the root + // can have no parents. Said otherwise, the DAG need to have exactly + // one leaf. + if !isFirstCommit && len(commit.Parents) == 0 { + return nil, fmt.Errorf("multiple leafs in the entity DAG") + } + + opp, err := readOperationPack(def, repo, resolver, commit) + if err != nil { + return nil, err + } + + err = opp.Validate() + if err != nil { + return nil, err + } + + if isMerge && len(opp.Operations) > 0 { + return nil, fmt.Errorf("merge commit cannot have operations") + } + + // Check that the create lamport clock is set (not checked in Validate() as it's optional) + if isFirstCommit && opp.CreateTime <= 0 { + return nil, fmt.Errorf("creation lamport time not set") + } + + // make sure that the lamport clocks causality match the DAG topology + for _, parentHash := range commit.Parents { + parentPack, ok := oppMap[parentHash] + if !ok { + panic("DFS failed") + } + + if parentPack.EditTime >= opp.EditTime { + return nil, fmt.Errorf("lamport clock ordering doesn't match the DAG") + } + + // to avoid an attack where clocks are pushed toward the uint64 rollover, make sure + // that the clocks don't jump too far in the future + // we ignore merge commits here to allow merging after a loooong time without breaking anything, + // as long as there is one valid chain of small hops, it's fine. + if !isMerge && opp.EditTime-parentPack.EditTime > 1_000_000 { + return nil, fmt.Errorf("lamport clock jumping too far in the future, likely an attack") + } + } + + oppMap[commit.Hash] = opp + opsCount += len(opp.Operations) + } + + // The clocks are fine, we witness them + for _, opp := range oppMap { + err = repo.Witness(fmt.Sprintf(creationClockPattern, def.Namespace), opp.CreateTime) + if err != nil { + return nil, err + } + err = repo.Witness(fmt.Sprintf(editClockPattern, def.Namespace), opp.EditTime) + if err != nil { + return nil, err + } + } + + // Now that we know that the topological order and clocks are fine, we order the operationPacks + // based on the logical clocks, entirely ignoring the DAG topology + + oppSlice := make([]*operationPack, 0, len(oppMap)) + for _, pack := range oppMap { + oppSlice = append(oppSlice, pack) + } + sort.Slice(oppSlice, func(i, j int) bool { + // Primary ordering with the EditTime. + if oppSlice[i].EditTime != oppSlice[j].EditTime { + return oppSlice[i].EditTime < oppSlice[j].EditTime + } + // We have equal EditTime, which means we have concurrent edition over different machines and we + // can't tell which one came first. So, what now? We still need a total ordering and the most stable possible. + // As a secondary ordering, we can order based on a hash of the serialized Operations in the + // operationPack. It doesn't carry much meaning but it's unbiased and hard to abuse. + // This is a lexicographic ordering on the stringified ID. + return oppSlice[i].Id() < oppSlice[j].Id() + }) + + // Now that we ordered the operationPacks, we have the order of the Operations + + ops := make([]Operation, 0, opsCount) + var createTime lamport.Time + var editTime lamport.Time + for _, pack := range oppSlice { + for _, operation := range pack.Operations { + ops = append(ops, operation) + } + if pack.CreateTime > createTime { + createTime = pack.CreateTime + } + if pack.EditTime > editTime { + editTime = pack.EditTime + } + } + + return &Entity{ + Definition: def, + ops: ops, + lastCommit: rootHash, + createTime: createTime, + editTime: editTime, + }, nil +} + +type StreamedEntity struct { + Entity *Entity + Err error +} + +// ReadAll read and parse all local Entity +func ReadAll(def Definition, repo repository.ClockedRepo, resolver identity.Resolver) <-chan StreamedEntity { + out := make(chan StreamedEntity) + + go func() { + defer close(out) + + refPrefix := fmt.Sprintf("refs/%s/", def.Namespace) + + refs, err := repo.ListRefs(refPrefix) + if err != nil { + out <- StreamedEntity{Err: err} + return + } + + for _, ref := range refs { + e, err := read(def, repo, resolver, ref) + + if err != nil { + out <- StreamedEntity{Err: err} + return + } + + out <- StreamedEntity{Entity: e} + } + }() + + return out +} + +// Id return the Entity identifier +func (e *Entity) Id() entity.Id { + // id is the id of the first operation + return e.FirstOp().Id() +} + +// Validate check if the Entity data is valid +func (e *Entity) Validate() error { + // non-empty + if len(e.ops) == 0 && len(e.staging) == 0 { + return fmt.Errorf("entity has no operations") + } + + // check if each operations are valid + for _, op := range e.ops { + if err := op.Validate(); err != nil { + return err + } + } + + // check if staging is valid if needed + for _, op := range e.staging { + if err := op.Validate(); err != nil { + return err + } + } + + // Check that there is no colliding operation's ID + ids := make(map[entity.Id]struct{}) + for _, op := range e.Operations() { + if _, ok := ids[op.Id()]; ok { + return fmt.Errorf("id collision: %s", op.Id()) + } + ids[op.Id()] = struct{}{} + } + + return nil +} + +// Operations return the ordered operations +func (e *Entity) Operations() []Operation { + return append(e.ops, e.staging...) +} + +// FirstOp lookup for the very first operation of the Entity +func (e *Entity) FirstOp() Operation { + for _, op := range e.ops { + return op + } + for _, op := range e.staging { + return op + } + return nil +} + +// LastOp lookup for the very last operation of the Entity +func (e *Entity) LastOp() Operation { + if len(e.staging) > 0 { + return e.staging[len(e.staging)-1] + } + if len(e.ops) > 0 { + return e.ops[len(e.ops)-1] + } + return nil +} + +// Append add a new Operation to the Entity +func (e *Entity) Append(op Operation) { + e.staging = append(e.staging, op) +} + +// NeedCommit indicate if the in-memory state changed and need to be commit in the repository +func (e *Entity) NeedCommit() bool { + return len(e.staging) > 0 +} + +// CommitAsNeeded execute a Commit only if necessary. This function is useful to avoid getting an error if the Entity +// is already in sync with the repository. +func (e *Entity) CommitAsNeeded(repo repository.ClockedRepo) error { + if e.NeedCommit() { + return e.Commit(repo) + } + return nil +} + +// Commit write the appended operations in the repository +func (e *Entity) Commit(repo repository.ClockedRepo) error { + if !e.NeedCommit() { + return fmt.Errorf("can't commit an entity with no pending operation") + } + + err := e.Validate() + if err != nil { + return errors.Wrapf(err, "can't commit a %s with invalid data", e.Definition.Typename) + } + + for len(e.staging) > 0 { + var author identity.Interface + var toCommit []Operation + + // Split into chunks with the same author + for len(e.staging) > 0 { + op := e.staging[0] + if author != nil && op.Author().Id() != author.Id() { + break + } + author = e.staging[0].Author() + toCommit = append(toCommit, op) + e.staging = e.staging[1:] + } + + e.editTime, err = repo.Increment(fmt.Sprintf(editClockPattern, e.Namespace)) + if err != nil { + return err + } + + opp := &operationPack{ + Author: author, + Operations: toCommit, + EditTime: e.editTime, + } + + if e.lastCommit == "" { + e.createTime, err = repo.Increment(fmt.Sprintf(creationClockPattern, e.Namespace)) + if err != nil { + return err + } + opp.CreateTime = e.createTime + } + + var parentCommit []repository.Hash + if e.lastCommit != "" { + parentCommit = []repository.Hash{e.lastCommit} + } + + commitHash, err := opp.Write(e.Definition, repo, parentCommit...) + if err != nil { + return err + } + + e.lastCommit = commitHash + e.ops = append(e.ops, toCommit...) + } + + // not strictly necessary but make equality testing easier in tests + e.staging = nil + + // Create or update the Git reference for this entity + // When pushing later, the remote will ensure that this ref update + // is fast-forward, that is no data has been overwritten. + ref := fmt.Sprintf(refsPattern, e.Namespace, e.Id().String()) + return repo.UpdateRef(ref, e.lastCommit) +} + +// CreateLamportTime return the Lamport time of creation +func (e *Entity) CreateLamportTime() lamport.Time { + return e.createTime +} + +// EditLamportTime return the Lamport time of the last edition +func (e *Entity) EditLamportTime() lamport.Time { + return e.editTime +} diff --git a/migration3/after/entity/dag/entity_actions.go b/migration3/after/entity/dag/entity_actions.go new file mode 100644 index 0000000..9777212 --- /dev/null +++ b/migration3/after/entity/dag/entity_actions.go @@ -0,0 +1,260 @@ +package dag + +import ( + "fmt" + + "github.com/pkg/errors" + + "github.com/MichaelMure/git-bug-migration/migration3/after/entity" + "github.com/MichaelMure/git-bug-migration/migration3/after/identity" + "github.com/MichaelMure/git-bug-migration/migration3/after/repository" +) + +// ListLocalIds list all the available local Entity's Id +func ListLocalIds(def Definition, repo repository.RepoData) ([]entity.Id, error) { + refs, err := repo.ListRefs(fmt.Sprintf("refs/%s/", def.Namespace)) + if err != nil { + return nil, err + } + return entity.RefsToIds(refs), nil +} + +// Fetch retrieve updates from a remote +// This does not change the local entity state +func Fetch(def Definition, repo repository.Repo, remote string) (string, error) { + return repo.FetchRefs(remote, def.Namespace) +} + +// Push update a remote with the local changes +func Push(def Definition, repo repository.Repo, remote string) (string, error) { + return repo.PushRefs(remote, def.Namespace) +} + +// Pull will do a Fetch + MergeAll +// Contrary to MergeAll, this function will return an error if a merge fail. +func Pull(def Definition, repo repository.ClockedRepo, resolver identity.Resolver, remote string, author identity.Interface) error { + _, err := Fetch(def, repo, remote) + if err != nil { + return err + } + + for merge := range MergeAll(def, repo, resolver, remote, author) { + if merge.Err != nil { + return merge.Err + } + if merge.Status == entity.MergeStatusInvalid { + return errors.Errorf("merge failure: %s", merge.Reason) + } + } + + return nil +} + +// MergeAll will merge all the available remote Entity: +// +// Multiple scenario exist: +// 1. if the remote Entity doesn't exist locally, it's created +// --> emit entity.MergeStatusNew +// 2. if the remote and local Entity have the same state, nothing is changed +// --> emit entity.MergeStatusNothing +// 3. if the local Entity has new commits but the remote don't, nothing is changed +// --> emit entity.MergeStatusNothing +// 4. if the remote has new commit, the local bug is updated to match the same history +// (fast-forward update) +// --> emit entity.MergeStatusUpdated +// 5. if both local and remote Entity have new commits (that is, we have a concurrent edition), +// a merge commit with an empty operationPack is created to join both branch and form a DAG. +// --> emit entity.MergeStatusUpdated +// +// Note: an author is necessary for the case where a merge commit is created, as this commit will +// have an author and may be signed if a signing key is available. +func MergeAll(def Definition, repo repository.ClockedRepo, resolver identity.Resolver, remote string, author identity.Interface) <-chan entity.MergeResult { + out := make(chan entity.MergeResult) + + go func() { + defer close(out) + + remoteRefSpec := fmt.Sprintf("refs/remotes/%s/%s/", remote, def.Namespace) + remoteRefs, err := repo.ListRefs(remoteRefSpec) + if err != nil { + out <- entity.MergeResult{Err: err} + return + } + + for _, remoteRef := range remoteRefs { + out <- merge(def, repo, resolver, remoteRef, author) + } + }() + + return out +} + +// merge perform a merge to make sure a local Entity is up to date. +// See MergeAll for more details. +func merge(def Definition, repo repository.ClockedRepo, resolver identity.Resolver, remoteRef string, author identity.Interface) entity.MergeResult { + id := entity.RefToId(remoteRef) + + if err := id.Validate(); err != nil { + return entity.NewMergeInvalidStatus(id, errors.Wrap(err, "invalid ref").Error()) + } + + remoteEntity, err := read(def, repo, resolver, remoteRef) + if err != nil { + return entity.NewMergeInvalidStatus(id, + errors.Wrapf(err, "remote %s is not readable", def.Typename).Error()) + } + + // Check for error in remote data + if err := remoteEntity.Validate(); err != nil { + return entity.NewMergeInvalidStatus(id, + errors.Wrapf(err, "remote %s data is invalid", def.Typename).Error()) + } + + localRef := fmt.Sprintf("refs/%s/%s", def.Namespace, id.String()) + + // SCENARIO 1 + // if the remote Entity doesn't exist locally, it's created + + localExist, err := repo.RefExist(localRef) + if err != nil { + return entity.NewMergeError(err, id) + } + + if !localExist { + // the bug is not local yet, simply create the reference + err := repo.CopyRef(remoteRef, localRef) + if err != nil { + return entity.NewMergeError(err, id) + } + + return entity.NewMergeNewStatus(id, remoteEntity) + } + + localCommit, err := repo.ResolveRef(localRef) + if err != nil { + return entity.NewMergeError(err, id) + } + + remoteCommit, err := repo.ResolveRef(remoteRef) + if err != nil { + return entity.NewMergeError(err, id) + } + + // SCENARIO 2 + // if the remote and local Entity have the same state, nothing is changed + + if localCommit == remoteCommit { + // nothing to merge + return entity.NewMergeNothingStatus(id) + } + + // SCENARIO 3 + // if the local Entity has new commits but the remote don't, nothing is changed + + localCommits, err := repo.ListCommits(localRef) + if err != nil { + return entity.NewMergeError(err, id) + } + + for _, hash := range localCommits { + if hash == remoteCommit { + return entity.NewMergeNothingStatus(id) + } + } + + // SCENARIO 4 + // if the remote has new commit, the local bug is updated to match the same history + // (fast-forward update) + + remoteCommits, err := repo.ListCommits(remoteRef) + if err != nil { + return entity.NewMergeError(err, id) + } + + // fast-forward is possible if otherRef include ref + fastForwardPossible := false + for _, hash := range remoteCommits { + if hash == localCommit { + fastForwardPossible = true + break + } + } + + if fastForwardPossible { + err = repo.UpdateRef(localRef, remoteCommit) + if err != nil { + return entity.NewMergeError(err, id) + } + return entity.NewMergeUpdatedStatus(id, remoteEntity) + } + + // SCENARIO 5 + // if both local and remote Entity have new commits (that is, we have a concurrent edition), + // a merge commit with an empty operationPack is created to join both branch and form a DAG. + + // fast-forward is not possible, we need to create a merge commit + // For simplicity when reading and to have clocks that record this change, we store + // an empty operationPack. + // First step is to collect those clocks. + + localEntity, err := read(def, repo, resolver, localRef) + if err != nil { + return entity.NewMergeError(err, id) + } + + editTime, err := repo.Increment(fmt.Sprintf(editClockPattern, def.Namespace)) + if err != nil { + return entity.NewMergeError(err, id) + } + + opp := &operationPack{ + Author: author, + Operations: nil, + CreateTime: 0, + EditTime: editTime, + } + + commitHash, err := opp.Write(def, repo, localCommit, remoteCommit) + if err != nil { + return entity.NewMergeError(err, id) + } + + // finally update the ref + err = repo.UpdateRef(localRef, commitHash) + if err != nil { + return entity.NewMergeError(err, id) + } + + // Note: we don't need to update localEntity state (lastCommit, operations...) as we + // discard it entirely anyway. + + return entity.NewMergeUpdatedStatus(id, localEntity) +} + +// Remove delete an Entity. +// Remove is idempotent. +func Remove(def Definition, repo repository.ClockedRepo, id entity.Id) error { + var matches []string + + ref := fmt.Sprintf("refs/%s/%s", def.Namespace, id.String()) + matches = append(matches, ref) + + remotes, err := repo.GetRemotes() + if err != nil { + return err + } + + for remote := range remotes { + ref = fmt.Sprintf("refs/remotes/%s/%s/%s", remote, def.Namespace, id.String()) + matches = append(matches, ref) + } + + for _, ref = range matches { + err = repo.RemoveRef(ref) + if err != nil { + return err + } + } + + return nil +} diff --git a/migration3/after/entity/dag/operation.go b/migration3/after/entity/dag/operation.go new file mode 100644 index 0000000..c8852ca --- /dev/null +++ b/migration3/after/entity/dag/operation.go @@ -0,0 +1,48 @@ +package dag + +import ( + "github.com/MichaelMure/git-bug-migration/migration3/after/entity" + "github.com/MichaelMure/git-bug-migration/migration3/after/identity" + "github.com/MichaelMure/git-bug-migration/migration3/after/repository" +) + +// Operation is a piece of data defining a change to reflect on the state of an Entity. +// What this Operation or Entity's state looks like is not of the resort of this package as it only deals with the +// data structure and storage. +type Operation interface { + // Id return the Operation identifier + // + // Some care need to be taken to define a correct Id derivation and enough entropy in the data used to avoid + // collisions. Notably: + // - the Id of the first Operation will be used as the Id of the Entity. Collision need to be avoided across entities + // of the same type (example: no collision within the "bug" namespace). + // - collisions can also happen within the set of Operations of an Entity. Simple Operation might not have enough + // entropy to yield unique Ids (example: two "close" operation within the same second, same author). + // If this is a concern, it is recommended to include a piece of random data in the operation's data, to guarantee + // a minimal amount of entropy and avoid collision. + // + // Author's note: I tried to find a clever way around that inelegance (stuffing random useless data into the stored + // structure is not exactly elegant) but I failed to find a proper way. Essentially, anything that would reuse some + // other data (parent operation's Id, lamport clock) or the graph structure (depth) impose that the Id would only + // make sense in the context of the graph and yield some deep coupling between Entity and Operation. This in turn + // make the whole thing even less elegant. + // + // A common way to derive an Id will be to use the entity.DeriveId() function on the serialized operation data. + Id() entity.Id + // Validate check if the Operation data is valid + Validate() error + // Author returns the author of this operation + Author() identity.Interface +} + +// OperationWithFiles is an extended Operation that has files dependency, stored in git. +type OperationWithFiles interface { + Operation + + // GetFiles return the files needed by this operation + // This implies that the Operation maintain and store internally the references to those files. This is how + // this information is read later, when loading from storage. + // For example, an operation that has a text value referencing some files would maintain a mapping (text ref --> + // hash). + GetFiles() []repository.Hash +} diff --git a/migration3/after/entity/dag/operation_pack.go b/migration3/after/entity/dag/operation_pack.go new file mode 100644 index 0000000..dafded2 --- /dev/null +++ b/migration3/after/entity/dag/operation_pack.go @@ -0,0 +1,358 @@ +package dag + +import ( + "encoding/json" + "fmt" + "strconv" + "strings" + + "github.com/pkg/errors" + "golang.org/x/crypto/openpgp" + + "github.com/MichaelMure/git-bug-migration/migration3/after/entity" + "github.com/MichaelMure/git-bug-migration/migration3/after/identity" + "github.com/MichaelMure/git-bug-migration/migration3/after/repository" + "github.com/MichaelMure/git-bug-migration/migration3/after/util/lamport" +) + +const opsEntryName = "ops" +const extraEntryName = "extra" +const versionEntryPrefix = "version-" +const createClockEntryPrefix = "create-clock-" +const editClockEntryPrefix = "edit-clock-" + +// operationPack is a wrapper structure to store multiple operations in a single git blob. +// Additionally, it holds and store the metadata for those operations. +type operationPack struct { + // An identifier, taken from a hash of the serialized Operations. + id entity.Id + + // The author of the Operations. Must be the same author for all the Operations. + Author identity.Interface + // The list of Operation stored in the operationPack + Operations []Operation + // Encode the entity's logical time of creation across all entities of the same type. + // Only exist on the root operationPack + CreateTime lamport.Time + // Encode the entity's logical time of last edition across all entities of the same type. + // Exist on all operationPack + EditTime lamport.Time +} + +func (opp *operationPack) Id() entity.Id { + if opp.id == "" || opp.id == entity.UnsetId { + // This means we are trying to get the opp's Id *before* it has been stored. + // As the Id is computed based on the actual bytes written on the disk, we are going to predict + // those and then get the Id. This is safe as it will be the exact same code writing on disk later. + + data, err := json.Marshal(opp) + if err != nil { + panic(err) + } + opp.id = entity.DeriveId(data) + } + + return opp.id +} + +func (opp *operationPack) MarshalJSON() ([]byte, error) { + return json.Marshal(struct { + Author identity.Interface `json:"author"` + Operations []Operation `json:"ops"` + }{ + Author: opp.Author, + Operations: opp.Operations, + }) +} + +func (opp *operationPack) Validate() error { + if opp.Author == nil { + return fmt.Errorf("missing author") + } + for _, op := range opp.Operations { + if op.Author().Id() != opp.Author.Id() { + return fmt.Errorf("operation has different author than the operationPack's") + } + } + if opp.EditTime == 0 { + return fmt.Errorf("lamport edit time is zero") + } + return nil +} + +// Write write the OperationPack in git, with zero, one or more parent commits. +// If the repository has a keypair able to sign (that is, with a private key), the resulting commit is signed with that key. +// Return the hash of the created commit. +func (opp *operationPack) Write(def Definition, repo repository.Repo, parentCommit ...repository.Hash) (repository.Hash, error) { + if err := opp.Validate(); err != nil { + return "", err + } + + // For different reason, we store the clocks and format version directly in the git tree. + // Version has to be accessible before any attempt to decode to return early with a unique error. + // Clocks could possibly be stored in the git blob but it's nice to separate data and metadata, and + // we are storing something directly in the tree already so why not. + // + // To have a valid Tree, we point the "fake" entries to always the same value, the empty blob. + emptyBlobHash, err := repo.StoreData([]byte{}) + if err != nil { + return "", err + } + + // Write the Ops as a Git blob containing the serialized array of operations + data, err := json.Marshal(opp) + if err != nil { + return "", err + } + + // compute the Id while we have the serialized data + opp.id = entity.DeriveId(data) + + hash, err := repo.StoreData(data) + if err != nil { + return "", err + } + + // Make a Git tree referencing this blob and encoding the other values: + // - format version + // - clocks + // - extra data + tree := []repository.TreeEntry{ + {ObjectType: repository.Blob, Hash: emptyBlobHash, + Name: fmt.Sprintf(versionEntryPrefix+"%d", def.FormatVersion)}, + {ObjectType: repository.Blob, Hash: hash, + Name: opsEntryName}, + {ObjectType: repository.Blob, Hash: emptyBlobHash, + Name: fmt.Sprintf(editClockEntryPrefix+"%d", opp.EditTime)}, + } + if opp.CreateTime > 0 { + tree = append(tree, repository.TreeEntry{ + ObjectType: repository.Blob, + Hash: emptyBlobHash, + Name: fmt.Sprintf(createClockEntryPrefix+"%d", opp.CreateTime), + }) + } + if extraTree := opp.makeExtraTree(); len(extraTree) > 0 { + extraTreeHash, err := repo.StoreTree(extraTree) + if err != nil { + return "", err + } + tree = append(tree, repository.TreeEntry{ + ObjectType: repository.Tree, + Hash: extraTreeHash, + Name: extraEntryName, + }) + } + + // Store the tree + treeHash, err := repo.StoreTree(tree) + if err != nil { + return "", err + } + + // Write a Git commit referencing the tree, with the previous commit as parent + // If we have keys, sign. + var commitHash repository.Hash + + // Sign the commit if we have a key + signingKey, err := opp.Author.SigningKey(repo) + if err != nil { + return "", err + } + + if signingKey != nil { + commitHash, err = repo.StoreSignedCommit(treeHash, signingKey.PGPEntity(), parentCommit...) + } else { + commitHash, err = repo.StoreCommit(treeHash, parentCommit...) + } + + if err != nil { + return "", err + } + + return commitHash, nil +} + +func (opp *operationPack) makeExtraTree() []repository.TreeEntry { + var tree []repository.TreeEntry + counter := 0 + added := make(map[repository.Hash]interface{}) + + for _, ops := range opp.Operations { + ops, ok := ops.(OperationWithFiles) + if !ok { + continue + } + + for _, file := range ops.GetFiles() { + if _, has := added[file]; !has { + tree = append(tree, repository.TreeEntry{ + ObjectType: repository.Blob, + Hash: file, + // The name is not important here, we only need to + // reference the blob. + Name: fmt.Sprintf("file%d", counter), + }) + counter++ + added[file] = struct{}{} + } + } + } + + return tree +} + +// readOperationPack read the operationPack encoded in git at the given Tree hash. +// +// Validity of the Lamport clocks is left for the caller to decide. +func readOperationPack(def Definition, repo repository.RepoData, resolver identity.Resolver, commit repository.Commit) (*operationPack, error) { + entries, err := repo.ReadTree(commit.TreeHash) + if err != nil { + return nil, err + } + + // check the format version first, fail early instead of trying to read something + var version uint + for _, entry := range entries { + if strings.HasPrefix(entry.Name, versionEntryPrefix) { + v, err := strconv.ParseUint(strings.TrimPrefix(entry.Name, versionEntryPrefix), 10, 64) + if err != nil { + return nil, errors.Wrap(err, "can't read format version") + } + if v > 1<<12 { + return nil, fmt.Errorf("format version too big") + } + version = uint(v) + break + } + } + if version == 0 { + return nil, entity.NewErrUnknownFormat(def.FormatVersion) + } + if version != def.FormatVersion { + return nil, entity.NewErrInvalidFormat(version, def.FormatVersion) + } + + var id entity.Id + var author identity.Interface + var ops []Operation + var createTime lamport.Time + var editTime lamport.Time + + for _, entry := range entries { + switch { + case entry.Name == opsEntryName: + data, err := repo.ReadData(entry.Hash) + if err != nil { + return nil, errors.Wrap(err, "failed to read git blob data") + } + ops, author, err = unmarshallPack(def, resolver, data) + if err != nil { + return nil, err + } + id = entity.DeriveId(data) + + case strings.HasPrefix(entry.Name, createClockEntryPrefix): + v, err := strconv.ParseUint(strings.TrimPrefix(entry.Name, createClockEntryPrefix), 10, 64) + if err != nil { + return nil, errors.Wrap(err, "can't read creation lamport time") + } + createTime = lamport.Time(v) + + case strings.HasPrefix(entry.Name, editClockEntryPrefix): + v, err := strconv.ParseUint(strings.TrimPrefix(entry.Name, editClockEntryPrefix), 10, 64) + if err != nil { + return nil, errors.Wrap(err, "can't read edit lamport time") + } + editTime = lamport.Time(v) + } + } + + // Verify signature if we expect one + keys := author.ValidKeysAtTime(fmt.Sprintf(editClockPattern, def.Namespace), editTime) + if len(keys) > 0 { + keyring := PGPKeyring(keys) + _, err = openpgp.CheckDetachedSignature(keyring, commit.SignedData, commit.Signature) + if err != nil { + return nil, fmt.Errorf("signature failure: %v", err) + } + } + + return &operationPack{ + id: id, + Author: author, + Operations: ops, + CreateTime: createTime, + EditTime: editTime, + }, nil +} + +// unmarshallPack delegate the unmarshalling of the Operation's JSON to the decoding +// function provided by the concrete entity. This gives access to the concrete type of each +// Operation. +func unmarshallPack(def Definition, resolver identity.Resolver, data []byte) ([]Operation, identity.Interface, error) { + aux := struct { + Author identity.IdentityStub `json:"author"` + Operations []json.RawMessage `json:"ops"` + }{} + + if err := json.Unmarshal(data, &aux); err != nil { + return nil, nil, err + } + + if aux.Author.Id() == "" || aux.Author.Id() == entity.UnsetId { + return nil, nil, fmt.Errorf("missing author") + } + + author, err := resolver.ResolveIdentity(aux.Author.Id()) + if err != nil { + return nil, nil, err + } + + ops := make([]Operation, 0, len(aux.Operations)) + + for _, raw := range aux.Operations { + // delegate to specialized unmarshal function + op, err := def.OperationUnmarshaler(author, raw) + if err != nil { + return nil, nil, err + } + ops = append(ops, op) + } + + return ops, author, nil +} + +var _ openpgp.KeyRing = &PGPKeyring{} + +// PGPKeyring implement a openpgp.KeyRing from an slice of Key +type PGPKeyring []*identity.Key + +func (pk PGPKeyring) KeysById(id uint64) []openpgp.Key { + var result []openpgp.Key + for _, key := range pk { + if key.Public().KeyId == id { + result = append(result, openpgp.Key{ + PublicKey: key.Public(), + PrivateKey: key.Private(), + }) + } + } + return result +} + +func (pk PGPKeyring) KeysByIdUsage(id uint64, requiredUsage byte) []openpgp.Key { + // the only usage we care about is the ability to sign, which all keys should already be capable of + return pk.KeysById(id) +} + +func (pk PGPKeyring) DecryptionKeys() []openpgp.Key { + result := make([]openpgp.Key, len(pk)) + for i, key := range pk { + result[i] = openpgp.Key{ + PublicKey: key.Public(), + PrivateKey: key.Private(), + } + } + return result +} diff --git a/migration3/after/entity/doc.go b/migration3/after/entity/doc.go deleted file mode 100644 index 4682d54..0000000 --- a/migration3/after/entity/doc.go +++ /dev/null @@ -1,8 +0,0 @@ -// Package entity contains the base common code to define an entity stored -// in a chain of git objects, supporting actions like Push, Pull and Merge. -package entity - -// TODO: Bug and Identity are very similar, right ? I expect that this package -// will eventually hold the common code to define an entity and the related -// helpers, errors and so on. When this work is done, it will become easier -// to add new entities, for example to support pull requests. diff --git a/migration3/after/entity/err.go b/migration3/after/entity/err.go index 7d6c662..9b0587e 100644 --- a/migration3/after/entity/err.go +++ b/migration3/after/entity/err.go @@ -30,3 +30,32 @@ func IsErrMultipleMatch(err error) bool { _, ok := err.(*ErrMultipleMatch) return ok } + +type ErrInvalidFormat struct { + version uint + expected uint +} + +func NewErrInvalidFormat(version uint, expected uint) *ErrInvalidFormat { + return &ErrInvalidFormat{ + version: version, + expected: expected, + } +} + +func NewErrUnknownFormat(expected uint) *ErrInvalidFormat { + return &ErrInvalidFormat{ + version: 0, + expected: expected, + } +} + +func (e ErrInvalidFormat) Error() string { + if e.version == 0 { + return fmt.Sprintf("unreadable data, you likely have an outdated repository format, please use https://github.com/MichaelMure/git-bug-migration/migration3/after-migration to upgrade to format version %v", e.expected) + } + if e.version < e.expected { + return fmt.Sprintf("outdated repository format %v, please use https://github.com/MichaelMure/git-bug-migration/migration3/after-migration to upgrade to format version %v", e.version, e.expected) + } + return fmt.Sprintf("your version of git-bug is too old for this repository (format version %v, expected %v), please upgrade to the latest version", e.version, e.expected) +} diff --git a/migration3/after/entity/id.go b/migration3/after/entity/id.go index 9e72401..a25f330 100644 --- a/migration3/after/entity/id.go +++ b/migration3/after/entity/id.go @@ -18,7 +18,7 @@ const UnsetId = Id("unset") // Id is an identifier for an entity or part of an entity type Id string -// DeriveId generate an Id from some data, taken from a root part of the entity. +// DeriveId generate an Id from the serialization of the object or part of the object. func DeriveId(data []byte) Id { // My understanding is that sha256 is enough to prevent collision (git use that, so ...?) // If you read this code, I'd be happy to be schooled. @@ -65,9 +65,9 @@ func (i Id) MarshalGQL(w io.Writer) { // IsValid tell if the Id is valid func (i Id) Validate() error { - // Special case to + // Special case to detect outdated repo if len(i) == 40 { - return fmt.Errorf("outdated repository format, please use https://github.com/MichaelMure/git-bug-migration to upgrade") + return fmt.Errorf("outdated repository format, please use https://github.com/MichaelMure/git-bug-migration/migration3/after-migration to upgrade") } if len(i) != idLength { return fmt.Errorf("invalid length") diff --git a/migration3/after/entity/id_interleaved.go b/migration3/after/entity/id_interleaved.go new file mode 100644 index 0000000..5423afe --- /dev/null +++ b/migration3/after/entity/id_interleaved.go @@ -0,0 +1,68 @@ +package entity + +import ( + "strings" +) + +// CombineIds compute a merged Id holding information from both the primary Id +// and the secondary Id. +// +// This allow to later find efficiently a secondary element because we can access +// the primary one directly instead of searching for a primary that has a +// secondary matching the Id. +// +// An example usage is Comment in a Bug. The interleaved Id will hold part of the +// Bug Id and part of the Comment Id. +// +// To allow the use of an arbitrary length prefix of this Id, Ids from primary +// and secondary are interleaved with this irregular pattern to give the +// best chance to find the secondary even with a 7 character prefix. +// +// Format is: PSPSPSPPPSPPPPSPPPPSPPPPSPPPPSPPPPSPPPPSPPPPSPPPPSPPPPSPPPPSPPPP +// +// A complete interleaved Id hold 50 characters for the primary and 14 for the +// secondary, which give a key space of 36^50 for the primary (~6 * 10^77) and +// 36^14 for the secondary (~6 * 10^21). This asymmetry assume a reasonable number +// of secondary within a primary Entity, while still allowing for a vast key space +// for the primary (that is, a globally merged database) with a low risk of collision. +// +// Here is the breakdown of several common prefix length: +// +// 5: 3P, 2S +// 7: 4P, 3S +// 10: 6P, 4S +// 16: 11P, 5S +func CombineIds(primary Id, secondary Id) Id { + var id strings.Builder + + for i := 0; i < idLength; i++ { + switch { + default: + id.WriteByte(primary[0]) + primary = primary[1:] + case i == 1, i == 3, i == 5, i == 9, i >= 10 && i%5 == 4: + id.WriteByte(secondary[0]) + secondary = secondary[1:] + } + } + + return Id(id.String()) +} + +// SeparateIds extract primary and secondary prefix from an arbitrary length prefix +// of an Id created with CombineIds. +func SeparateIds(prefix string) (primaryPrefix string, secondaryPrefix string) { + var primary strings.Builder + var secondary strings.Builder + + for i, r := range prefix { + switch { + default: + primary.WriteRune(r) + case i == 1, i == 3, i == 5, i == 9, i >= 10 && i%5 == 4: + secondary.WriteRune(r) + } + } + + return primary.String(), secondary.String() +} diff --git a/migration3/after/entity/merge.go b/migration3/after/entity/merge.go index 3ce8eda..0661b7f 100644 --- a/migration3/after/entity/merge.go +++ b/migration3/after/entity/merge.go @@ -8,14 +8,15 @@ import ( type MergeStatus int const ( - _ MergeStatus = iota - MergeStatusNew - MergeStatusInvalid - MergeStatusUpdated - MergeStatusNothing - MergeStatusError + _ MergeStatus = iota + MergeStatusNew // a new Entity was created locally + MergeStatusInvalid // the remote data is invalid + MergeStatusUpdated // a local Entity has been updated + MergeStatusNothing // no changes were made to a local Entity (already up to date) + MergeStatusError // a terminal error happened ) +// MergeResult hold the result of a merge operation on an Entity. type MergeResult struct { // Err is set when a terminal error occur in the process Err error @@ -23,10 +24,10 @@ type MergeResult struct { Id Id Status MergeStatus - // Only set for invalid status + // Only set for Invalid status Reason string - // Not set for invalid status + // Only set for New or Updated status Entity Interface } @@ -41,34 +42,50 @@ func (mr MergeResult) String() string { case MergeStatusNothing: return "nothing to do" case MergeStatusError: - return fmt.Sprintf("merge error on %s: %s", mr.Id, mr.Err.Error()) + if mr.Id != "" { + return fmt.Sprintf("merge error on %s: %s", mr.Id, mr.Err.Error()) + } + return fmt.Sprintf("merge error: %s", mr.Err.Error()) default: panic("unknown merge status") } } -func NewMergeError(err error, id Id) MergeResult { +func NewMergeNewStatus(id Id, entity Interface) MergeResult { return MergeResult{ - Err: err, Id: id, - Status: MergeStatusError, + Status: MergeStatusNew, + Entity: entity, } } -func NewMergeStatus(status MergeStatus, id Id, entity Interface) MergeResult { +func NewMergeInvalidStatus(id Id, reason string) MergeResult { return MergeResult{ Id: id, - Status: status, + Status: MergeStatusInvalid, + Reason: reason, + } +} - // Entity is not set for an invalid merge result +func NewMergeUpdatedStatus(id Id, entity Interface) MergeResult { + return MergeResult{ + Id: id, + Status: MergeStatusUpdated, Entity: entity, } } -func NewMergeInvalidStatus(id Id, reason string) MergeResult { +func NewMergeNothingStatus(id Id) MergeResult { return MergeResult{ Id: id, - Status: MergeStatusInvalid, - Reason: reason, + Status: MergeStatusNothing, + } +} + +func NewMergeError(err error, id Id) MergeResult { + return MergeResult{ + Id: id, + Status: MergeStatusError, + Err: err, } } diff --git a/migration3/after/entity/refs.go b/migration3/after/entity/refs.go new file mode 100644 index 0000000..070d4db --- /dev/null +++ b/migration3/after/entity/refs.go @@ -0,0 +1,20 @@ +package entity + +import "strings" + +// RefsToIds parse a slice of git references and return the corresponding Entity's Id. +func RefsToIds(refs []string) []Id { + ids := make([]Id, len(refs)) + + for i, ref := range refs { + ids[i] = RefToId(ref) + } + + return ids +} + +// RefsToIds parse a git reference and return the corresponding Entity's Id. +func RefToId(ref string) Id { + split := strings.Split(ref, "/") + return Id(split[len(split)-1]) +} diff --git a/migration3/after/identity/identity.go b/migration3/after/identity/identity.go index 40485c6..3972b8a 100644 --- a/migration3/after/identity/identity.go +++ b/migration3/after/identity/identity.go @@ -5,7 +5,6 @@ import ( "encoding/json" "fmt" "reflect" - "strings" "github.com/pkg/errors" @@ -26,6 +25,10 @@ var ErrNoIdentitySet = errors.New("No identity is set.\n" + "\"git bug user create\"") var ErrMultipleIdentitiesSet = errors.New("multiple user identities set") +func NewErrMultipleMatchIdentity(matching []entity.Id) *entity.ErrMultipleMatch { + return entity.NewErrMultipleMatch("identity", matching) +} + var _ Interface = &Identity{} var _ entity.Interface = &Identity{} @@ -98,8 +101,7 @@ func ReadRemote(repo repository.Repo, remote string, id string) (*Identity, erro // read will load and parse an identity from git func read(repo repository.Repo, ref string) (*Identity, error) { - refSplit := strings.Split(ref, "/") - id := entity.Id(refSplit[len(refSplit)-1]) + id := entity.RefToId(ref) if err := id.Validate(); err != nil { return nil, errors.Wrap(err, "invalid ref") @@ -153,6 +155,66 @@ func read(repo repository.Repo, ref string) (*Identity, error) { return i, nil } +// ListLocalIds list all the available local identity ids +func ListLocalIds(repo repository.Repo) ([]entity.Id, error) { + refs, err := repo.ListRefs(identityRefPattern) + if err != nil { + return nil, err + } + + return entity.RefsToIds(refs), nil +} + +// RemoveIdentity will remove a local identity from its entity.Id +func RemoveIdentity(repo repository.ClockedRepo, id entity.Id) error { + var fullMatches []string + + refs, err := repo.ListRefs(identityRefPattern + id.String()) + if err != nil { + return err + } + if len(refs) > 1 { + return NewErrMultipleMatchIdentity(entity.RefsToIds(refs)) + } + if len(refs) == 1 { + // we have the identity locally + fullMatches = append(fullMatches, refs[0]) + } + + remotes, err := repo.GetRemotes() + if err != nil { + return err + } + + for remote := range remotes { + remotePrefix := fmt.Sprintf(identityRemoteRefPattern+id.String(), remote) + remoteRefs, err := repo.ListRefs(remotePrefix) + if err != nil { + return err + } + if len(remoteRefs) > 1 { + return NewErrMultipleMatchIdentity(entity.RefsToIds(refs)) + } + if len(remoteRefs) == 1 { + // found the identity in a remote + fullMatches = append(fullMatches, remoteRefs[0]) + } + } + + if len(fullMatches) == 0 { + return ErrIdentityNotExist + } + + for _, ref := range fullMatches { + err = repo.RemoveRef(ref) + if err != nil { + return err + } + } + + return nil +} + type StreamedIdentity struct { Identity *Identity Err error @@ -282,7 +344,7 @@ func (i *Identity) Commit(repo repository.ClockedRepo) error { var commitHash repository.Hash if lastCommit != "" { - commitHash, err = repo.StoreCommitWithParent(treeHash, lastCommit) + commitHash, err = repo.StoreCommit(treeHash, lastCommit) } else { commitHash, err = repo.StoreCommit(treeHash) } @@ -456,6 +518,22 @@ func (i *Identity) Keys() []*Key { return i.lastVersion().keys } +// SigningKey return the key that should be used to sign new messages. If no key is available, return nil. +func (i *Identity) SigningKey(repo repository.RepoKeyring) (*Key, error) { + keys := i.Keys() + for _, key := range keys { + err := key.ensurePrivateKey(repo) + if err == errNoPrivateKey { + continue + } + if err != nil { + return nil, err + } + return key, nil + } + return nil, nil +} + // ValidKeysAtTime return the set of keys valid at a given lamport time func (i *Identity) ValidKeysAtTime(clockName string, time lamport.Time) []*Key { var result []*Key diff --git a/migration3/after/identity/identity_actions.go b/migration3/after/identity/identity_actions.go index d5c1d7f..56f04ba 100644 --- a/migration3/after/identity/identity_actions.go +++ b/migration3/after/identity/identity_actions.go @@ -13,19 +13,12 @@ import ( // Fetch retrieve updates from a remote // This does not change the local identities state func Fetch(repo repository.Repo, remote string) (string, error) { - // "refs/identities/*:refs/remotes//identities/*" - remoteRefSpec := fmt.Sprintf(identityRemoteRefPattern, remote) - fetchRefSpec := fmt.Sprintf("%s*:%s*", identityRefPattern, remoteRefSpec) - - return repo.FetchRefs(remote, fetchRefSpec) + return repo.FetchRefs(remote, "identities") } // Push update a remote with the local changes func Push(repo repository.Repo, remote string) (string, error) { - // "refs/identities/*:refs/identities/*" - refspec := fmt.Sprintf("%s*:%s*", identityRefPattern, identityRefPattern) - - return repo.PushRefs(remote, refspec) + return repo.PushRefs(remote, "identities") } // Pull will do a Fetch + MergeAll @@ -102,7 +95,7 @@ func MergeAll(repo repository.ClockedRepo, remote string) <-chan entity.MergeRes return } - out <- entity.NewMergeStatus(entity.MergeStatusNew, id, remoteIdentity) + out <- entity.NewMergeNewStatus(id, remoteIdentity) continue } @@ -121,9 +114,9 @@ func MergeAll(repo repository.ClockedRepo, remote string) <-chan entity.MergeRes } if updated { - out <- entity.NewMergeStatus(entity.MergeStatusUpdated, id, localIdentity) + out <- entity.NewMergeUpdatedStatus(id, localIdentity) } else { - out <- entity.NewMergeStatus(entity.MergeStatusNothing, id, localIdentity) + out <- entity.NewMergeNothingStatus(id) } } }() diff --git a/migration3/after/identity/identity_stub.go b/migration3/after/identity/identity_stub.go index 0eaf449..db476dc 100644 --- a/migration3/after/identity/identity_stub.go +++ b/migration3/after/identity/identity_stub.go @@ -4,6 +4,7 @@ import ( "encoding/json" "github.com/MichaelMure/git-bug-migration/migration3/after/entity" + "github.com/MichaelMure/git-bug-migration/migration3/after/repository" "github.com/MichaelMure/git-bug-migration/migration3/after/util/lamport" "github.com/MichaelMure/git-bug-migration/migration3/after/util/timestamp" ) @@ -71,6 +72,10 @@ func (IdentityStub) Keys() []*Key { panic("identities needs to be properly loaded with identity.ReadLocal()") } +func (i *IdentityStub) SigningKey(repo repository.RepoKeyring) (*Key, error) { + panic("identities needs to be properly loaded with identity.ReadLocal()") +} + func (IdentityStub) ValidKeysAtTime(_ string, _ lamport.Time) []*Key { panic("identities needs to be properly loaded with identity.ReadLocal()") } diff --git a/migration3/after/identity/interface.go b/migration3/after/identity/interface.go index 1ef07be..8441c2c 100644 --- a/migration3/after/identity/interface.go +++ b/migration3/after/identity/interface.go @@ -2,6 +2,7 @@ package identity import ( "github.com/MichaelMure/git-bug-migration/migration3/after/entity" + "github.com/MichaelMure/git-bug-migration/migration3/after/repository" "github.com/MichaelMure/git-bug-migration/migration3/after/util/lamport" "github.com/MichaelMure/git-bug-migration/migration3/after/util/timestamp" ) @@ -36,6 +37,9 @@ type Interface interface { // Can be empty. Keys() []*Key + // SigningKey return the key that should be used to sign new messages. If no key is available, return nil. + SigningKey(repo repository.RepoKeyring) (*Key, error) + // ValidKeysAtTime return the set of keys valid at a given lamport time for a given clock of another entity // Can be empty. ValidKeysAtTime(clockName string, time lamport.Time) []*Key diff --git a/migration3/after/identity/key.go b/migration3/after/identity/key.go index cc94839..c4a9db9 100644 --- a/migration3/after/identity/key.go +++ b/migration3/after/identity/key.go @@ -1,18 +1,224 @@ package identity +import ( + "bytes" + "encoding/json" + "fmt" + "io" + "strings" + "time" + + "github.com/pkg/errors" + "golang.org/x/crypto/openpgp" + "golang.org/x/crypto/openpgp/armor" + "golang.org/x/crypto/openpgp/packet" + + "github.com/MichaelMure/git-bug-migration/migration3/after/repository" +) + +var errNoPrivateKey = fmt.Errorf("no private key") + type Key struct { - // The GPG fingerprint of the key - Fingerprint string `json:"fingerprint"` - PubKey string `json:"pub_key"` + public *packet.PublicKey + private *packet.PrivateKey +} + +// GenerateKey generate a keypair (public+private) +// The type and configuration of the key is determined by the default value in go's OpenPGP. +func GenerateKey() *Key { + entity, err := openpgp.NewEntity("", "", "", &packet.Config{ + // The armored format doesn't include the creation time, which makes the round-trip data not being fully equal. + // We don't care about the creation time so we can set it to the zero value. + Time: func() time.Time { + return time.Time{} + }, + }) + if err != nil { + panic(err) + } + return &Key{ + public: entity.PrimaryKey, + private: entity.PrivateKey, + } +} + +// generatePublicKey generate only a public key (only useful for testing) +// See GenerateKey for the details. +func generatePublicKey() *Key { + k := GenerateKey() + k.private = nil + return k +} + +func (k *Key) Public() *packet.PublicKey { + return k.public +} + +func (k *Key) Private() *packet.PrivateKey { + return k.private } func (k *Key) Validate() error { - // Todo + if k.public == nil { + return fmt.Errorf("nil public key") + } + if !k.public.CanSign() { + return fmt.Errorf("public key can't sign") + } + + if k.private != nil { + if !k.private.CanSign() { + return fmt.Errorf("private key can't sign") + } + } return nil } func (k *Key) Clone() *Key { - clone := *k - return &clone + clone := &Key{} + + pub := *k.public + clone.public = &pub + + if k.private != nil { + priv := *k.private + clone.private = &priv + } + + return clone +} + +func (k *Key) MarshalJSON() ([]byte, error) { + // Serialize only the public key, in the armored format. + var buf bytes.Buffer + w, err := armor.Encode(&buf, openpgp.PublicKeyType, nil) + if err != nil { + return nil, err + } + + err = k.public.Serialize(w) + if err != nil { + return nil, err + } + err = w.Close() + if err != nil { + return nil, err + } + return json.Marshal(buf.String()) +} + +func (k *Key) UnmarshalJSON(data []byte) error { + // De-serialize only the public key, in the armored format. + var armored string + err := json.Unmarshal(data, &armored) + if err != nil { + return err + } + + block, err := armor.Decode(strings.NewReader(armored)) + if err == io.EOF { + return fmt.Errorf("no armored data found") + } + if err != nil { + return err + } + + if block.Type != openpgp.PublicKeyType { + return fmt.Errorf("invalid key type") + } + + p, err := packet.Read(block.Body) + if err != nil { + return errors.Wrap(err, "failed to read public key packet") + } + + public, ok := p.(*packet.PublicKey) + if !ok { + return errors.New("got no packet.publicKey") + } + + // The armored format doesn't include the creation time, which makes the round-trip data not being fully equal. + // We don't care about the creation time so we can set it to the zero value. + public.CreationTime = time.Time{} + + k.public = public + return nil +} + +func (k *Key) loadPrivate(repo repository.RepoKeyring) error { + item, err := repo.Keyring().Get(k.public.KeyIdString()) + if err == repository.ErrKeyringKeyNotFound { + return errNoPrivateKey + } + if err != nil { + return err + } + + block, err := armor.Decode(bytes.NewReader(item.Data)) + if err == io.EOF { + return fmt.Errorf("no armored data found") + } + if err != nil { + return err + } + + if block.Type != openpgp.PrivateKeyType { + return fmt.Errorf("invalid key type") + } + + p, err := packet.Read(block.Body) + if err != nil { + return errors.Wrap(err, "failed to read private key packet") + } + + private, ok := p.(*packet.PrivateKey) + if !ok { + return errors.New("got no packet.privateKey") + } + + // The armored format doesn't include the creation time, which makes the round-trip data not being fully equal. + // We don't care about the creation time so we can set it to the zero value. + private.CreationTime = time.Time{} + + k.private = private + return nil +} + +// ensurePrivateKey attempt to load the corresponding private key if it is not loaded already. +// If no private key is found, returns errNoPrivateKey +func (k *Key) ensurePrivateKey(repo repository.RepoKeyring) error { + if k.private != nil { + return nil + } + + return k.loadPrivate(repo) +} + +func (k *Key) storePrivate(repo repository.RepoKeyring) error { + var buf bytes.Buffer + w, err := armor.Encode(&buf, openpgp.PrivateKeyType, nil) + if err != nil { + return err + } + err = k.private.Serialize(w) + if err != nil { + return err + } + err = w.Close() + if err != nil { + return err + } + + return repo.Keyring().Set(repository.Item{ + Key: k.public.KeyIdString(), + Data: buf.Bytes(), + }) +} + +func (k *Key) PGPEntity() *openpgp.Entity { + return &openpgp.Entity{ + PrimaryKey: k.public, + PrivateKey: k.private, + } } diff --git a/migration3/after/identity/resolver.go b/migration3/after/identity/resolver.go index e3ec1a2..89db6c1 100644 --- a/migration3/after/identity/resolver.go +++ b/migration3/after/identity/resolver.go @@ -1,6 +1,8 @@ package identity import ( + "sync" + "github.com/MichaelMure/git-bug-migration/migration3/after/entity" "github.com/MichaelMure/git-bug-migration/migration3/after/repository" ) @@ -34,3 +36,36 @@ func NewStubResolver() *StubResolver { func (s *StubResolver) ResolveIdentity(id entity.Id) (Interface, error) { return &IdentityStub{id: id}, nil } + +// CachedResolver is a resolver ensuring that loading is done only once through another Resolver. +type CachedResolver struct { + mu sync.RWMutex + resolver Resolver + identities map[entity.Id]Interface +} + +func NewCachedResolver(resolver Resolver) *CachedResolver { + return &CachedResolver{ + resolver: resolver, + identities: make(map[entity.Id]Interface), + } +} + +func (c *CachedResolver) ResolveIdentity(id entity.Id) (Interface, error) { + c.mu.RLock() + if i, ok := c.identities[id]; ok { + c.mu.RUnlock() + return i, nil + } + c.mu.RUnlock() + + c.mu.Lock() + defer c.mu.Unlock() + + i, err := c.resolver.ResolveIdentity(id) + if err != nil { + return nil, err + } + c.identities[id] = i + return i, nil +} diff --git a/migration3/after/identity/version.go b/migration3/after/identity/version.go index cf5c0f8..75850e8 100644 --- a/migration3/after/identity/version.go +++ b/migration3/after/identity/version.go @@ -37,7 +37,7 @@ type version struct { keys []*Key // mandatory random bytes to ensure a better randomness of the data of the first - // version of a bug, used to later generate the ID + // version of an identity, used to later generate the ID // len(Nonce) should be > 20 and < 64 bytes // It has no functional purpose and should be ignored. // TODO: optional after first version? @@ -159,11 +159,8 @@ func (v *version) UnmarshalJSON(data []byte) error { return err } - if aux.FormatVersion < formatVersion { - return fmt.Errorf("outdated repository format, please use https://github.com/MichaelMure/git-bug-migration to upgrade") - } - if aux.FormatVersion > formatVersion { - return fmt.Errorf("your version of git-bug is too old for this repository (identity format %v), please upgrade to the latest version", aux.FormatVersion) + if aux.FormatVersion != formatVersion { + return entity.NewErrInvalidFormat(aux.FormatVersion, formatVersion) } v.id = entity.DeriveId(data) diff --git a/migration3/after/repository/common.go b/migration3/after/repository/common.go new file mode 100644 index 0000000..4cefbd9 --- /dev/null +++ b/migration3/after/repository/common.go @@ -0,0 +1,67 @@ +package repository + +import ( + "io" + + "golang.org/x/crypto/openpgp" + "golang.org/x/crypto/openpgp/armor" + "golang.org/x/crypto/openpgp/errors" +) + +// nonNativeListCommits is an implementation for ListCommits, for the case where +// the underlying git implementation doesn't support if natively. +func nonNativeListCommits(repo RepoData, ref string) ([]Hash, error) { + var result []Hash + + stack := make([]Hash, 0, 32) + visited := make(map[Hash]struct{}) + + hash, err := repo.ResolveRef(ref) + if err != nil { + return nil, err + } + + stack = append(stack, hash) + + for len(stack) > 0 { + // pop + hash := stack[len(stack)-1] + stack = stack[:len(stack)-1] + + if _, ok := visited[hash]; ok { + continue + } + + // mark as visited + visited[hash] = struct{}{} + result = append(result, hash) + + commit, err := repo.ReadCommit(hash) + if err != nil { + return nil, err + } + + for _, parent := range commit.Parents { + stack = append(stack, parent) + } + } + + // reverse + for i, j := 0, len(result)-1; i < j; i, j = i+1, j-1 { + result[i], result[j] = result[j], result[i] + } + + return result, nil +} + +// deArmorSignature convert an armored (text serialized) signature into raw binary +func deArmorSignature(armoredSig io.Reader) (io.Reader, error) { + block, err := armor.Decode(armoredSig) + if err != nil { + return nil, err + } + if block.Type != openpgp.SignatureType { + return nil, errors.InvalidArgumentError("expected '" + openpgp.SignatureType + "', got: " + block.Type) + } + return block.Body, nil +} diff --git a/migration3/after/repository/config_mem.go b/migration3/after/repository/config_mem.go index 9725e8d..019bc11 100644 --- a/migration3/after/repository/config_mem.go +++ b/migration3/after/repository/config_mem.go @@ -20,6 +20,7 @@ func NewMemConfig() *MemConfig { } func (mc *MemConfig) StoreString(key, value string) error { + key = normalizeKey(key) mc.config[key] = value return nil } @@ -33,6 +34,7 @@ func (mc *MemConfig) StoreTimestamp(key string, value time.Time) error { } func (mc *MemConfig) ReadAll(keyPrefix string) (map[string]string, error) { + keyPrefix = normalizeKey(keyPrefix) result := make(map[string]string) for key, val := range mc.config { if strings.HasPrefix(key, keyPrefix) { @@ -44,6 +46,7 @@ func (mc *MemConfig) ReadAll(keyPrefix string) (map[string]string, error) { func (mc *MemConfig) ReadString(key string) (string, error) { // unlike git, the mock can only store one value for the same key + key = normalizeKey(key) val, ok := mc.config[key] if !ok { return "", ErrNoConfigEntry @@ -54,9 +57,9 @@ func (mc *MemConfig) ReadString(key string) (string, error) { func (mc *MemConfig) ReadBool(key string) (bool, error) { // unlike git, the mock can only store one value for the same key - val, ok := mc.config[key] - if !ok { - return false, ErrNoConfigEntry + val, err := mc.ReadString(key) + if err != nil { + return false, err } return strconv.ParseBool(val) @@ -78,6 +81,7 @@ func (mc *MemConfig) ReadTimestamp(key string) (time.Time, error) { // RmConfigs remove all key/value pair matching the key prefix func (mc *MemConfig) RemoveAll(keyPrefix string) error { + keyPrefix = normalizeKey(keyPrefix) found := false for key := range mc.config { if strings.HasPrefix(key, keyPrefix) { @@ -92,3 +96,12 @@ func (mc *MemConfig) RemoveAll(keyPrefix string) error { return nil } + +func normalizeKey(key string) string { + // this feels so wrong, but that's apparently how git behave. + // only section and final segment are case insensitive, subsection in between are not. + s := strings.Split(key, ".") + s[0] = strings.ToLower(s[0]) + s[len(s)-1] = strings.ToLower(s[len(s)-1]) + return strings.Join(s, ".") +} diff --git a/migration3/after/repository/config_testing.go b/migration3/after/repository/config_testing.go index 445f872..f8a2762 100644 --- a/migration3/after/repository/config_testing.go +++ b/migration3/after/repository/config_testing.go @@ -113,4 +113,43 @@ func testConfig(t *testing.T, config Config) { "section.subsection.subsection.opt1": "foo5", "section.subsection.subsection.opt2": "foo6", }, all) + + // missing section + case insensitive + val, err = config.ReadString("section2.opt1") + require.Error(t, err) + + val, err = config.ReadString("section.opt1") + require.NoError(t, err) + require.Equal(t, "foo", val) + + val, err = config.ReadString("SECTION.OPT1") + require.NoError(t, err) + require.Equal(t, "foo", val) + + _, err = config.ReadString("SECTION2.OPT3") + require.Error(t, err) + + // missing subsection + case insensitive + val, err = config.ReadString("section.subsection.opt1") + require.NoError(t, err) + require.Equal(t, "foo3", val) + + // for some weird reason, subsection ARE case sensitive + _, err = config.ReadString("SECTION.SUBSECTION.OPT1") + require.Error(t, err) + + _, err = config.ReadString("SECTION.SUBSECTION1.OPT1") + require.Error(t, err) + + // missing sub-subsection + case insensitive + val, err = config.ReadString("section.subsection.subsection.opt1") + require.NoError(t, err) + require.Equal(t, "foo5", val) + + // for some weird reason, subsection ARE case sensitive + _, err = config.ReadString("SECTION.SUBSECTION.SUBSECTION.OPT1") + require.Error(t, err) + + _, err = config.ReadString("SECTION.SUBSECTION.SUBSECTION1.OPT1") + require.Error(t, err) } diff --git a/migration3/after/repository/git.go b/migration3/after/repository/git.go deleted file mode 100644 index 6540fb5..0000000 --- a/migration3/after/repository/git.go +++ /dev/null @@ -1,461 +0,0 @@ -// Package repository contains helper methods for working with the Git repo. -package repository - -import ( - "bytes" - "fmt" - "io/ioutil" - "os" - "path" - "strings" - "sync" - - "github.com/MichaelMure/git-bug-migration/migration3/after/util/lamport" -) - -const ( - clockPath = "git-bug/clocks" -) - -var _ ClockedRepo = &GitRepo{} -var _ TestedRepo = &GitRepo{} - -// GitRepo represents an instance of a (local) git repository. -type GitRepo struct { - gitCli - path string - - clocksMutex sync.Mutex - clocks map[string]lamport.Clock - - keyring Keyring -} - -// NewGitRepo determines if the given working directory is inside of a git repository, -// and returns the corresponding GitRepo instance if it is. -func NewGitRepo(path string, clockLoaders []ClockLoader) (*GitRepo, error) { - k, err := defaultKeyring() - if err != nil { - return nil, err - } - - repo := &GitRepo{ - gitCli: gitCli{path: path}, - path: path, - clocks: make(map[string]lamport.Clock), - keyring: k, - } - - // Check the repo and retrieve the root path - stdout, err := repo.runGitCommand("rev-parse", "--absolute-git-dir") - - // Now dir is fetched with "git rev-parse --git-dir". May be it can - // still return nothing in some cases. Then empty stdout check is - // kept. - if err != nil || stdout == "" { - return nil, ErrNotARepo - } - - // Fix the path to be sure we are at the root - repo.path = stdout - repo.gitCli.path = stdout - - for _, loader := range clockLoaders { - allExist := true - for _, name := range loader.Clocks { - if _, err := repo.getClock(name); err != nil { - allExist = false - } - } - - if !allExist { - err = loader.Witnesser(repo) - if err != nil { - return nil, err - } - } - } - - return repo, nil -} - -// InitGitRepo create a new empty git repo at the given path -func InitGitRepo(path string) (*GitRepo, error) { - repo := &GitRepo{ - gitCli: gitCli{path: path}, - path: path + "/.git", - clocks: make(map[string]lamport.Clock), - } - - _, err := repo.runGitCommand("init", path) - if err != nil { - return nil, err - } - - return repo, nil -} - -// InitBareGitRepo create a new --bare empty git repo at the given path -func InitBareGitRepo(path string) (*GitRepo, error) { - repo := &GitRepo{ - gitCli: gitCli{path: path}, - path: path, - clocks: make(map[string]lamport.Clock), - } - - _, err := repo.runGitCommand("init", "--bare", path) - if err != nil { - return nil, err - } - - return repo, nil -} - -// LocalConfig give access to the repository scoped configuration -func (repo *GitRepo) LocalConfig() Config { - return newGitConfig(repo.gitCli, false) -} - -// GlobalConfig give access to the global scoped configuration -func (repo *GitRepo) GlobalConfig() Config { - return newGitConfig(repo.gitCli, true) -} - -// AnyConfig give access to a merged local/global configuration -func (repo *GitRepo) AnyConfig() ConfigRead { - return mergeConfig(repo.LocalConfig(), repo.GlobalConfig()) -} - -// Keyring give access to a user-wide storage for secrets -func (repo *GitRepo) Keyring() Keyring { - return repo.keyring -} - -// GetPath returns the path to the repo. -func (repo *GitRepo) GetPath() string { - return repo.path -} - -// GetUserName returns the name the the user has used to configure git -func (repo *GitRepo) GetUserName() (string, error) { - return repo.runGitCommand("config", "user.name") -} - -// GetUserEmail returns the email address that the user has used to configure git. -func (repo *GitRepo) GetUserEmail() (string, error) { - return repo.runGitCommand("config", "user.email") -} - -// GetCoreEditor returns the name of the editor that the user has used to configure git. -func (repo *GitRepo) GetCoreEditor() (string, error) { - return repo.runGitCommand("var", "GIT_EDITOR") -} - -// GetRemotes returns the configured remotes repositories. -func (repo *GitRepo) GetRemotes() (map[string]string, error) { - stdout, err := repo.runGitCommand("remote", "--verbose") - if err != nil { - return nil, err - } - - lines := strings.Split(stdout, "\n") - remotes := make(map[string]string, len(lines)) - - for _, line := range lines { - if strings.TrimSpace(line) == "" { - continue - } - elements := strings.Fields(line) - if len(elements) != 3 { - return nil, fmt.Errorf("git remote: unexpected output format: %s", line) - } - - remotes[elements[0]] = elements[1] - } - - return remotes, nil -} - -// FetchRefs fetch git refs from a remote -func (repo *GitRepo) FetchRefs(remote, refSpec string) (string, error) { - stdout, err := repo.runGitCommand("fetch", remote, refSpec) - - if err != nil { - return stdout, fmt.Errorf("failed to fetch from the remote '%s': %v", remote, err) - } - - return stdout, err -} - -// PushRefs push git refs to a remote -func (repo *GitRepo) PushRefs(remote string, refSpec string) (string, error) { - stdout, stderr, err := repo.runGitCommandRaw(nil, "push", remote, refSpec) - - if err != nil { - return stdout + stderr, fmt.Errorf("failed to push to the remote '%s': %v", remote, stderr) - } - return stdout + stderr, nil -} - -// StoreData will store arbitrary data and return the corresponding hash -func (repo *GitRepo) StoreData(data []byte) (Hash, error) { - var stdin = bytes.NewReader(data) - - stdout, err := repo.runGitCommandWithStdin(stdin, "hash-object", "--stdin", "-w") - - return Hash(stdout), err -} - -// ReadData will attempt to read arbitrary data from the given hash -func (repo *GitRepo) ReadData(hash Hash) ([]byte, error) { - var stdout bytes.Buffer - var stderr bytes.Buffer - - err := repo.runGitCommandWithIO(nil, &stdout, &stderr, "cat-file", "-p", string(hash)) - - if err != nil { - return []byte{}, err - } - - return stdout.Bytes(), nil -} - -// StoreTree will store a mapping key-->Hash as a Git tree -func (repo *GitRepo) StoreTree(entries []TreeEntry) (Hash, error) { - buffer := prepareTreeEntries(entries) - - stdout, err := repo.runGitCommandWithStdin(&buffer, "mktree") - - if err != nil { - return "", err - } - - return Hash(stdout), nil -} - -// StoreCommit will store a Git commit with the given Git tree -func (repo *GitRepo) StoreCommit(treeHash Hash) (Hash, error) { - stdout, err := repo.runGitCommand("commit-tree", string(treeHash)) - - if err != nil { - return "", err - } - - return Hash(stdout), nil -} - -// StoreCommitWithParent will store a Git commit with the given Git tree -func (repo *GitRepo) StoreCommitWithParent(treeHash Hash, parent Hash) (Hash, error) { - stdout, err := repo.runGitCommand("commit-tree", string(treeHash), - "-p", string(parent)) - - if err != nil { - return "", err - } - - return Hash(stdout), nil -} - -// UpdateRef will create or update a Git reference -func (repo *GitRepo) UpdateRef(ref string, hash Hash) error { - _, err := repo.runGitCommand("update-ref", ref, string(hash)) - - return err -} - -// RemoveRef will remove a Git reference -func (repo *GitRepo) RemoveRef(ref string) error { - _, err := repo.runGitCommand("update-ref", "-d", ref) - - return err -} - -// ListRefs will return a list of Git ref matching the given refspec -func (repo *GitRepo) ListRefs(refPrefix string) ([]string, error) { - stdout, err := repo.runGitCommand("for-each-ref", "--format=%(refname)", refPrefix) - - if err != nil { - return nil, err - } - - split := strings.Split(stdout, "\n") - - if len(split) == 1 && split[0] == "" { - return []string{}, nil - } - - return split, nil -} - -// RefExist will check if a reference exist in Git -func (repo *GitRepo) RefExist(ref string) (bool, error) { - stdout, err := repo.runGitCommand("for-each-ref", ref) - - if err != nil { - return false, err - } - - return stdout != "", nil -} - -// CopyRef will create a new reference with the same value as another one -func (repo *GitRepo) CopyRef(source string, dest string) error { - _, err := repo.runGitCommand("update-ref", dest, source) - - return err -} - -// ListCommits will return the list of commit hashes of a ref, in chronological order -func (repo *GitRepo) ListCommits(ref string) ([]Hash, error) { - stdout, err := repo.runGitCommand("rev-list", "--first-parent", "--reverse", ref) - - if err != nil { - return nil, err - } - - split := strings.Split(stdout, "\n") - - casted := make([]Hash, len(split)) - for i, line := range split { - casted[i] = Hash(line) - } - - return casted, nil - -} - -// ReadTree will return the list of entries in a Git tree -func (repo *GitRepo) ReadTree(hash Hash) ([]TreeEntry, error) { - stdout, err := repo.runGitCommand("ls-tree", string(hash)) - - if err != nil { - return nil, err - } - - return readTreeEntries(stdout) -} - -// FindCommonAncestor will return the last common ancestor of two chain of commit -func (repo *GitRepo) FindCommonAncestor(hash1 Hash, hash2 Hash) (Hash, error) { - stdout, err := repo.runGitCommand("merge-base", string(hash1), string(hash2)) - - if err != nil { - return "", err - } - - return Hash(stdout), nil -} - -// GetTreeHash return the git tree hash referenced in a commit -func (repo *GitRepo) GetTreeHash(commit Hash) (Hash, error) { - stdout, err := repo.runGitCommand("rev-parse", string(commit)+"^{tree}") - - if err != nil { - return "", err - } - - return Hash(stdout), nil -} - -func (repo *GitRepo) AllClocks() (map[string]lamport.Clock, error) { - repo.clocksMutex.Lock() - defer repo.clocksMutex.Unlock() - - result := make(map[string]lamport.Clock) - - files, err := ioutil.ReadDir(path.Join(repo.path, clockPath)) - if os.IsNotExist(err) { - return nil, nil - } - if err != nil { - return nil, err - } - - for _, file := range files { - name := file.Name() - if c, ok := repo.clocks[name]; ok { - result[name] = c - } else { - c, err := lamport.LoadPersistedClock(path.Join(repo.path, clockPath, name)) - if err != nil { - return nil, err - } - repo.clocks[name] = c - result[name] = c - } - } - - return result, nil -} - -// GetOrCreateClock return a Lamport clock stored in the Repo. -// If the clock doesn't exist, it's created. -func (repo *GitRepo) GetOrCreateClock(name string) (lamport.Clock, error) { - c, err := repo.getClock(name) - if err == nil { - return c, nil - } - if err != ErrClockNotExist { - return nil, err - } - - repo.clocksMutex.Lock() - defer repo.clocksMutex.Unlock() - - p := path.Join(repo.path, clockPath, name) - - c, err = lamport.NewPersistedClock(p) - if err != nil { - return nil, err - } - - repo.clocks[name] = c - return c, nil -} - -func (repo *GitRepo) getClock(name string) (lamport.Clock, error) { - repo.clocksMutex.Lock() - defer repo.clocksMutex.Unlock() - - if c, ok := repo.clocks[name]; ok { - return c, nil - } - - p := path.Join(repo.path, clockPath, name) - - c, err := lamport.LoadPersistedClock(p) - if err == nil { - repo.clocks[name] = c - return c, nil - } - if err == lamport.ErrClockNotExist { - return nil, ErrClockNotExist - } - return nil, err -} - -// Increment is equivalent to c = GetOrCreateClock(name) + c.Increment() -func (repo *GitRepo) Increment(name string) (lamport.Time, error) { - c, err := repo.GetOrCreateClock(name) - if err != nil { - return lamport.Time(0), err - } - return c.Increment() -} - -// Witness is equivalent to c = GetOrCreateClock(name) + c.Witness(time) -func (repo *GitRepo) Witness(name string, time lamport.Time) error { - c, err := repo.GetOrCreateClock(name) - if err != nil { - return err - } - return c.Witness(time) -} - -// AddRemote add a new remote to the repository -// Not in the interface because it's only used for testing -func (repo *GitRepo) AddRemote(name string, url string) error { - _, err := repo.runGitCommand("remote", "add", name, url) - - return err -} diff --git a/migration3/after/repository/git_cli.go b/migration3/after/repository/git_cli.go deleted file mode 100644 index 085b1cd..0000000 --- a/migration3/after/repository/git_cli.go +++ /dev/null @@ -1,56 +0,0 @@ -package repository - -import ( - "bytes" - "fmt" - "io" - "os/exec" - "strings" -) - -// gitCli is a helper to launch CLI git commands -type gitCli struct { - path string -} - -// Run the given git command with the given I/O reader/writers, returning an error if it fails. -func (cli gitCli) runGitCommandWithIO(stdin io.Reader, stdout, stderr io.Writer, args ...string) error { - // make sure that the working directory for the command - // always exist, in particular when running "git init". - path := strings.TrimSuffix(cli.path, ".git") - - // fmt.Printf("[%s] Running git %s\n", path, strings.Join(args, " ")) - - cmd := exec.Command("git", args...) - cmd.Dir = path - cmd.Stdin = stdin - cmd.Stdout = stdout - cmd.Stderr = stderr - - return cmd.Run() -} - -// Run the given git command and return its stdout, or an error if the command fails. -func (cli gitCli) runGitCommandRaw(stdin io.Reader, args ...string) (string, string, error) { - var stdout bytes.Buffer - var stderr bytes.Buffer - err := cli.runGitCommandWithIO(stdin, &stdout, &stderr, args...) - return strings.TrimSpace(stdout.String()), strings.TrimSpace(stderr.String()), err -} - -// Run the given git command and return its stdout, or an error if the command fails. -func (cli gitCli) runGitCommandWithStdin(stdin io.Reader, args ...string) (string, error) { - stdout, stderr, err := cli.runGitCommandRaw(stdin, args...) - if err != nil { - if stderr == "" { - stderr = "Error running git command: " + strings.Join(args, " ") - } - err = fmt.Errorf(stderr) - } - return stdout, err -} - -// Run the given git command and return its stdout, or an error if the command fails. -func (cli gitCli) runGitCommand(args ...string) (string, error) { - return cli.runGitCommandWithStdin(nil, args...) -} diff --git a/migration3/after/repository/git_config.go b/migration3/after/repository/git_config.go deleted file mode 100644 index b46cc69..0000000 --- a/migration3/after/repository/git_config.go +++ /dev/null @@ -1,221 +0,0 @@ -package repository - -import ( - "fmt" - "regexp" - "strconv" - "strings" - "time" - - "github.com/blang/semver" - "github.com/pkg/errors" -) - -var _ Config = &gitConfig{} - -type gitConfig struct { - cli gitCli - localityFlag string -} - -func newGitConfig(cli gitCli, global bool) *gitConfig { - localityFlag := "--local" - if global { - localityFlag = "--global" - } - return &gitConfig{ - cli: cli, - localityFlag: localityFlag, - } -} - -// StoreString store a single key/value pair in the config of the repo -func (gc *gitConfig) StoreString(key string, value string) error { - _, err := gc.cli.runGitCommand("config", gc.localityFlag, "--replace-all", key, value) - return err -} - -func (gc *gitConfig) StoreBool(key string, value bool) error { - return gc.StoreString(key, strconv.FormatBool(value)) -} - -func (gc *gitConfig) StoreTimestamp(key string, value time.Time) error { - return gc.StoreString(key, strconv.Itoa(int(value.Unix()))) -} - -// ReadAll read all key/value pair matching the key prefix -func (gc *gitConfig) ReadAll(keyPrefix string) (map[string]string, error) { - stdout, err := gc.cli.runGitCommand("config", gc.localityFlag, "--includes", "--get-regexp", keyPrefix) - - // / \ - // / ! \ - // ------- - // - // There can be a legitimate error here, but I see no portable way to - // distinguish them from the git error that say "no matching value exist" - if err != nil { - return nil, nil - } - - lines := strings.Split(stdout, "\n") - - result := make(map[string]string, len(lines)) - - for _, line := range lines { - if strings.TrimSpace(line) == "" { - continue - } - - parts := strings.SplitN(line, " ", 2) - result[parts[0]] = parts[1] - } - - return result, nil -} - -func (gc *gitConfig) ReadString(key string) (string, error) { - stdout, err := gc.cli.runGitCommand("config", gc.localityFlag, "--includes", "--get-all", key) - - // / \ - // / ! \ - // ------- - // - // There can be a legitimate error here, but I see no portable way to - // distinguish them from the git error that say "no matching value exist" - if err != nil { - return "", ErrNoConfigEntry - } - - lines := strings.Split(stdout, "\n") - - if len(lines) == 0 { - return "", ErrNoConfigEntry - } - if len(lines) > 1 { - return "", ErrMultipleConfigEntry - } - - return lines[0], nil -} - -func (gc *gitConfig) ReadBool(key string) (bool, error) { - val, err := gc.ReadString(key) - if err != nil { - return false, err - } - - return strconv.ParseBool(val) -} - -func (gc *gitConfig) ReadTimestamp(key string) (time.Time, error) { - value, err := gc.ReadString(key) - if err != nil { - return time.Time{}, err - } - return ParseTimestamp(value) -} - -func (gc *gitConfig) rmSection(keyPrefix string) error { - _, err := gc.cli.runGitCommand("config", gc.localityFlag, "--remove-section", keyPrefix) - return err -} - -func (gc *gitConfig) unsetAll(keyPrefix string) error { - _, err := gc.cli.runGitCommand("config", gc.localityFlag, "--unset-all", keyPrefix) - return err -} - -// return keyPrefix section -// example: sectionFromKey(a.b.c.d) return a.b.c -func sectionFromKey(keyPrefix string) string { - s := strings.Split(keyPrefix, ".") - if len(s) == 1 { - return keyPrefix - } - - return strings.Join(s[:len(s)-1], ".") -} - -// rmConfigs with git version lesser than 2.18 -func (gc *gitConfig) rmConfigsGitVersionLT218(keyPrefix string) error { - // try to remove key/value pair by key - err := gc.unsetAll(keyPrefix) - if err != nil { - return gc.rmSection(keyPrefix) - } - - m, err := gc.ReadAll(sectionFromKey(keyPrefix)) - if err != nil { - return err - } - - // if section doesn't have any left key/value remove the section - if len(m) == 0 { - return gc.rmSection(sectionFromKey(keyPrefix)) - } - - return nil -} - -// RmConfigs remove all key/value pair matching the key prefix -func (gc *gitConfig) RemoveAll(keyPrefix string) error { - // starting from git 2.18.0 sections are automatically deleted when the last existing - // key/value is removed. Before 2.18.0 we should remove the section - // see https://github.com/git/git/blob/master/Documentation/RelNotes/2.18.0.txt#L379 - lt218, err := gc.gitVersionLT218() - if err != nil { - return errors.Wrap(err, "getting git version") - } - - if lt218 { - return gc.rmConfigsGitVersionLT218(keyPrefix) - } - - err = gc.unsetAll(keyPrefix) - if err != nil { - return gc.rmSection(keyPrefix) - } - - return nil -} - -func (gc *gitConfig) gitVersion() (*semver.Version, error) { - versionOut, err := gc.cli.runGitCommand("version") - if err != nil { - return nil, err - } - return parseGitVersion(versionOut) -} - -func parseGitVersion(versionOut string) (*semver.Version, error) { - // extract the version and truncate potential bad parts - // ex: 2.23.0.rc1 instead of 2.23.0-rc1 - r := regexp.MustCompile(`(\d+\.){1,2}\d+`) - - extracted := r.FindString(versionOut) - if extracted == "" { - return nil, fmt.Errorf("unreadable git version %s", versionOut) - } - - version, err := semver.Make(extracted) - if err != nil { - return nil, err - } - - return &version, nil -} - -func (gc *gitConfig) gitVersionLT218() (bool, error) { - version, err := gc.gitVersion() - if err != nil { - return false, err - } - - version218string := "2.18.0" - gitVersion218, err := semver.Make(version218string) - if err != nil { - return false, err - } - - return version.LT(gitVersion218), nil -} diff --git a/migration3/after/repository/git_testing.go b/migration3/after/repository/git_testing.go deleted file mode 100644 index 874cc86..0000000 --- a/migration3/after/repository/git_testing.go +++ /dev/null @@ -1,74 +0,0 @@ -package repository - -import ( - "io/ioutil" - "log" - - "github.com/99designs/keyring" -) - -// This is intended for testing only - -func CreateTestRepo(bare bool) TestedRepo { - dir, err := ioutil.TempDir("", "") - if err != nil { - log.Fatal(err) - } - - var creator func(string) (*GitRepo, error) - - if bare { - creator = InitBareGitRepo - } else { - creator = InitGitRepo - } - - repo, err := creator(dir) - if err != nil { - log.Fatal(err) - } - - config := repo.LocalConfig() - if err := config.StoreString("user.name", "testuser"); err != nil { - log.Fatal("failed to set user.name for test repository: ", err) - } - if err := config.StoreString("user.email", "testuser@example.com"); err != nil { - log.Fatal("failed to set user.email for test repository: ", err) - } - - // make sure we use a mock keyring for testing to not interact with the global system - return &replaceKeyring{ - TestedRepo: repo, - keyring: keyring.NewArrayKeyring(nil), - } -} - -func SetupReposAndRemote() (repoA, repoB, remote TestedRepo) { - repoA = CreateGoGitTestRepo(false) - repoB = CreateGoGitTestRepo(false) - remote = CreateGoGitTestRepo(true) - - remoteAddr := "file://" + remote.GetPath() - - err := repoA.AddRemote("origin", remoteAddr) - if err != nil { - log.Fatal(err) - } - - err = repoB.AddRemote("origin", remoteAddr) - if err != nil { - log.Fatal(err) - } - - return repoA, repoB, remote -} - -// replaceKeyring allow to replace the Keyring of the underlying repo -type replaceKeyring struct { - TestedRepo - keyring Keyring -} - -func (rk replaceKeyring) Keyring() Keyring { - return rk.keyring -} diff --git a/migration3/after/repository/gogit.go b/migration3/after/repository/gogit.go index b533409..1f5965b 100644 --- a/migration3/after/repository/gogit.go +++ b/migration3/after/repository/gogit.go @@ -5,24 +5,30 @@ import ( "fmt" "io/ioutil" "os" - "os/exec" - stdpath "path" "path/filepath" "sort" "strings" "sync" "time" + "github.com/blevesearch/bleve" + "github.com/go-git/go-billy/v5" + "github.com/go-git/go-billy/v5/osfs" gogit "github.com/go-git/go-git/v5" "github.com/go-git/go-git/v5/config" "github.com/go-git/go-git/v5/plumbing" "github.com/go-git/go-git/v5/plumbing/filemode" "github.com/go-git/go-git/v5/plumbing/object" + "golang.org/x/crypto/openpgp" + "golang.org/x/sys/execabs" "github.com/MichaelMure/git-bug-migration/migration3/after/util/lamport" ) +const clockPath = "clocks" + var _ ClockedRepo = &GoGitRepo{} +var _ TestedRepo = &GoGitRepo{} type GoGitRepo struct { r *gogit.Repository @@ -31,10 +37,15 @@ type GoGitRepo struct { clocksMutex sync.Mutex clocks map[string]lamport.Clock - keyring Keyring + indexesMutex sync.Mutex + indexes map[string]bleve.Index + + keyring Keyring + localStorage billy.Filesystem } -func NewGoGitRepo(path string, clockLoaders []ClockLoader) (*GoGitRepo, error) { +// OpenGoGitRepo open an already existing repo at the given path +func OpenGoGitRepo(path string, clockLoaders []ClockLoader) (*GoGitRepo, error) { path, err := detectGitPath(path) if err != nil { return nil, err @@ -51,10 +62,12 @@ func NewGoGitRepo(path string, clockLoaders []ClockLoader) (*GoGitRepo, error) { } repo := &GoGitRepo{ - r: r, - path: path, - clocks: make(map[string]lamport.Clock), - keyring: k, + r: r, + path: path, + clocks: make(map[string]lamport.Clock), + indexes: make(map[string]bleve.Index), + keyring: k, + localStorage: osfs.New(filepath.Join(path, "git-bug")), } for _, loader := range clockLoaders { @@ -76,6 +89,50 @@ func NewGoGitRepo(path string, clockLoaders []ClockLoader) (*GoGitRepo, error) { return repo, nil } +// InitGoGitRepo create a new empty git repo at the given path +func InitGoGitRepo(path string) (*GoGitRepo, error) { + r, err := gogit.PlainInit(path, false) + if err != nil { + return nil, err + } + + k, err := defaultKeyring() + if err != nil { + return nil, err + } + + return &GoGitRepo{ + r: r, + path: filepath.Join(path, ".git"), + clocks: make(map[string]lamport.Clock), + indexes: make(map[string]bleve.Index), + keyring: k, + localStorage: osfs.New(filepath.Join(path, ".git", "git-bug")), + }, nil +} + +// InitBareGoGitRepo create a new --bare empty git repo at the given path +func InitBareGoGitRepo(path string) (*GoGitRepo, error) { + r, err := gogit.PlainInit(path, true) + if err != nil { + return nil, err + } + + k, err := defaultKeyring() + if err != nil { + return nil, err + } + + return &GoGitRepo{ + r: r, + path: path, + clocks: make(map[string]lamport.Clock), + indexes: make(map[string]bleve.Index), + keyring: k, + localStorage: osfs.New(filepath.Join(path, "git-bug")), + }, nil +} + func detectGitPath(path string) (string, error) { // normalize the path path, err := filepath.Abs(path) @@ -84,12 +141,12 @@ func detectGitPath(path string) (string, error) { } for { - fi, err := os.Stat(stdpath.Join(path, ".git")) + fi, err := os.Stat(filepath.Join(path, ".git")) if err == nil { if !fi.IsDir() { return "", fmt.Errorf(".git exist but is not a directory") } - return stdpath.Join(path, ".git"), nil + return filepath.Join(path, ".git"), nil } if !os.IsNotExist(err) { // unknown error @@ -117,7 +174,7 @@ func isGitDir(path string) (bool, error) { markers := []string{"HEAD", "objects", "refs"} for _, marker := range markers { - _, err := os.Stat(stdpath.Join(path, marker)) + _, err := os.Stat(filepath.Join(path, marker)) if err == nil { continue } @@ -132,44 +189,15 @@ func isGitDir(path string) (bool, error) { return true, nil } -// InitGoGitRepo create a new empty git repo at the given path -func InitGoGitRepo(path string) (*GoGitRepo, error) { - r, err := gogit.PlainInit(path, false) - if err != nil { - return nil, err - } - - k, err := defaultKeyring() - if err != nil { - return nil, err - } - - return &GoGitRepo{ - r: r, - path: path + "/.git", - clocks: make(map[string]lamport.Clock), - keyring: k, - }, nil -} - -// InitBareGoGitRepo create a new --bare empty git repo at the given path -func InitBareGoGitRepo(path string) (*GoGitRepo, error) { - r, err := gogit.PlainInit(path, true) - if err != nil { - return nil, err - } - - k, err := defaultKeyring() - if err != nil { - return nil, err +func (repo *GoGitRepo) Close() error { + var firstErr error + for _, index := range repo.indexes { + err := index.Close() + if err != nil && firstErr == nil { + firstErr = err + } } - - return &GoGitRepo{ - r: r, - path: path, - clocks: make(map[string]lamport.Clock), - keyring: k, - }, nil + return firstErr } // LocalConfig give access to the repository scoped configuration @@ -179,10 +207,7 @@ func (repo *GoGitRepo) LocalConfig() Config { // GlobalConfig give access to the global scoped configuration func (repo *GoGitRepo) GlobalConfig() Config { - // TODO: replace that with go-git native implementation once it's supported - // see: https://github.com/go-git/go-git - // see: https://github.com/src-d/go-git/issues/760 - return newGoGitGlobalConfig(repo.r) + return newGoGitGlobalConfig() } // AnyConfig give access to a merged local/global configuration @@ -195,11 +220,6 @@ func (repo *GoGitRepo) Keyring() Keyring { return repo.keyring } -// GetPath returns the path to the repo. -func (repo *GoGitRepo) GetPath() string { - return repo.path -} - // GetUserName returns the name the the user has used to configure git func (repo *GoGitRepo) GetUserName() (string, error) { return repo.AnyConfig().ReadString("user.name") @@ -244,7 +264,7 @@ func (repo *GoGitRepo) GetCoreEditor() (string, error) { } for _, cmd := range priorities { - if _, err = exec.LookPath(cmd); err == nil { + if _, err = execabs.LookPath(cmd); err == nil { return cmd, nil } @@ -270,13 +290,80 @@ func (repo *GoGitRepo) GetRemotes() (map[string]string, error) { return result, nil } -// FetchRefs fetch git refs from a remote -func (repo *GoGitRepo) FetchRefs(remote string, refSpec string) (string, error) { +// LocalStorage return a billy.Filesystem giving access to $RepoPath/.git/git-bug +func (repo *GoGitRepo) LocalStorage() billy.Filesystem { + return repo.localStorage +} + +// GetBleveIndex return a bleve.Index that can be used to index documents +func (repo *GoGitRepo) GetBleveIndex(name string) (bleve.Index, error) { + repo.indexesMutex.Lock() + defer repo.indexesMutex.Unlock() + + if index, ok := repo.indexes[name]; ok { + return index, nil + } + + path := filepath.Join(repo.path, "git-bug", "indexes", name) + + index, err := bleve.Open(path) + if err == nil { + repo.indexes[name] = index + return index, nil + } + + err = os.MkdirAll(path, os.ModePerm) + if err != nil { + return nil, err + } + + mapping := bleve.NewIndexMapping() + mapping.DefaultAnalyzer = "en" + + index, err = bleve.New(path, mapping) + if err != nil { + return nil, err + } + + repo.indexes[name] = index + + return index, nil +} + +// ClearBleveIndex will wipe the given index +func (repo *GoGitRepo) ClearBleveIndex(name string) error { + repo.indexesMutex.Lock() + defer repo.indexesMutex.Unlock() + + path := filepath.Join(repo.path, "git-bug", "indexes", name) + + err := os.RemoveAll(path) + if err != nil { + return err + } + + if index, ok := repo.indexes[name]; ok { + err = index.Close() + if err != nil { + return err + } + delete(repo.indexes, name) + } + + return nil +} + +// FetchRefs fetch git refs matching a directory prefix to a remote +// Ex: prefix="foo" will fetch any remote refs matching "refs/foo/*" locally. +// The equivalent git refspec would be "refs/foo/*:refs/remotes//foo/*" +func (repo *GoGitRepo) FetchRefs(remote string, prefix string) (string, error) { + refspec := fmt.Sprintf("refs/%s/*:refs/remotes/%s/%s/*", prefix, remote, prefix) + buf := bytes.NewBuffer(nil) err := repo.r.Fetch(&gogit.FetchOptions{ RemoteName: remote, - RefSpecs: []config.RefSpec{config.RefSpec(refSpec)}, + RefSpecs: []config.RefSpec{config.RefSpec(refspec)}, Progress: buf, }) if err == gogit.NoErrAlreadyUpToDate { @@ -289,13 +376,41 @@ func (repo *GoGitRepo) FetchRefs(remote string, refSpec string) (string, error) return buf.String(), nil } -// PushRefs push git refs to a remote -func (repo *GoGitRepo) PushRefs(remote string, refSpec string) (string, error) { +// PushRefs push git refs matching a directory prefix to a remote +// Ex: prefix="foo" will push any local refs matching "refs/foo/*" to the remote. +// The equivalent git refspec would be "refs/foo/*:refs/foo/*" +// +// Additionally, PushRefs will update the local references in refs/remotes//foo to match +// the remote state. +func (repo *GoGitRepo) PushRefs(remote string, prefix string) (string, error) { + refspec := fmt.Sprintf("refs/%s/*:refs/%s/*", prefix, prefix) + + remo, err := repo.r.Remote(remote) + if err != nil { + return "", err + } + + // to make sure that the push also create the corresponding refs/remotes//... references, + // we need to have a default fetch refspec configured on the remote, to make our refs "track" the remote ones. + // This does not change the config on disk, only on memory. + hasCustomFetch := false + fetchRefspec := fmt.Sprintf("refs/%s/*:refs/remotes/%s/%s/*", prefix, remote, prefix) + for _, r := range remo.Config().Fetch { + if string(r) == fetchRefspec { + hasCustomFetch = true + break + } + } + + if !hasCustomFetch { + remo.Config().Fetch = append(remo.Config().Fetch, config.RefSpec(fetchRefspec)) + } + buf := bytes.NewBuffer(nil) - err := repo.r.Push(&gogit.PushOptions{ + err = remo.Push(&gogit.PushOptions{ RemoteName: remote, - RefSpecs: []config.RefSpec{config.RefSpec(refSpec)}, + RefSpecs: []config.RefSpec{config.RefSpec(refspec)}, Progress: buf, }) if err == gogit.NoErrAlreadyUpToDate { @@ -439,12 +554,13 @@ func (repo *GoGitRepo) ReadTree(hash Hash) ([]TreeEntry, error) { } // StoreCommit will store a Git commit with the given Git tree -func (repo *GoGitRepo) StoreCommit(treeHash Hash) (Hash, error) { - return repo.StoreCommitWithParent(treeHash, "") +func (repo *GoGitRepo) StoreCommit(treeHash Hash, parents ...Hash) (Hash, error) { + return repo.StoreSignedCommit(treeHash, nil, parents...) } -// StoreCommit will store a Git commit with the given Git tree -func (repo *GoGitRepo) StoreCommitWithParent(treeHash Hash, parent Hash) (Hash, error) { +// StoreCommit will store a Git commit with the given Git tree. If signKey is not nil, the commit +// will be signed accordingly. +func (repo *GoGitRepo) StoreSignedCommit(treeHash Hash, signKey *openpgp.Entity, parents ...Hash) (Hash, error) { cfg, err := repo.r.Config() if err != nil { return "", err @@ -465,8 +581,28 @@ func (repo *GoGitRepo) StoreCommitWithParent(treeHash Hash, parent Hash) (Hash, TreeHash: plumbing.NewHash(treeHash.String()), } - if parent != "" { - commit.ParentHashes = []plumbing.Hash{plumbing.NewHash(parent.String())} + for _, parent := range parents { + commit.ParentHashes = append(commit.ParentHashes, plumbing.NewHash(parent.String())) + } + + // Compute the signature if needed + if signKey != nil { + // first get the serialized commit + encoded := &plumbing.MemoryObject{} + if err := commit.Encode(encoded); err != nil { + return "", err + } + r, err := encoded.Reader() + if err != nil { + return "", err + } + + // sign the data + var sig bytes.Buffer + if err := openpgp.ArmoredDetachSign(&sig, signKey, r, nil); err != nil { + return "", err + } + commit.PGPSignature = sig.String() } obj := repo.r.Storer.NewEncodedObject() @@ -513,6 +649,14 @@ func (repo *GoGitRepo) FindCommonAncestor(commit1 Hash, commit2 Hash) (Hash, err return Hash(commits[0].Hash.String()), nil } +func (repo *GoGitRepo) ResolveRef(ref string) (Hash, error) { + r, err := repo.r.Reference(plumbing.ReferenceName(ref), false) + if err != nil { + return "", err + } + return Hash(r.Hash().String()), nil +} + // UpdateRef will create or update a Git reference func (repo *GoGitRepo) UpdateRef(ref string, hash Hash) error { return repo.r.Storer.SetReference(plumbing.NewHashReference(plumbing.ReferenceName(ref), plumbing.NewHash(hash.String()))) @@ -567,34 +711,48 @@ func (repo *GoGitRepo) CopyRef(source string, dest string) error { // ListCommits will return the list of tree hashes of a ref, in chronological order func (repo *GoGitRepo) ListCommits(ref string) ([]Hash, error) { - r, err := repo.r.Reference(plumbing.ReferenceName(ref), false) + return nonNativeListCommits(repo, ref) +} + +func (repo *GoGitRepo) ReadCommit(hash Hash) (Commit, error) { + commit, err := repo.r.CommitObject(plumbing.NewHash(hash.String())) if err != nil { - return nil, err + return Commit{}, err } - commit, err := repo.r.CommitObject(r.Hash()) - if err != nil { - return nil, err + parents := make([]Hash, len(commit.ParentHashes)) + for i, parentHash := range commit.ParentHashes { + parents[i] = Hash(parentHash.String()) } - hashes := []Hash{Hash(commit.Hash.String())} - for { - commit, err = commit.Parent(0) - if err == object.ErrParentNotFound { - break - } + result := Commit{ + Hash: hash, + Parents: parents, + TreeHash: Hash(commit.TreeHash.String()), + } + + if commit.PGPSignature != "" { + // I can't find a way to just remove the signature when reading the encoded commit so we need to + // re-encode the commit without signature. + + encoded := &plumbing.MemoryObject{} + err := commit.EncodeWithoutSignature(encoded) if err != nil { - return nil, err + return Commit{}, err } - if commit.NumParents() > 1 { - return nil, fmt.Errorf("multiple parents") + result.SignedData, err = encoded.Reader() + if err != nil { + return Commit{}, err } - hashes = append([]Hash{Hash(commit.Hash.String())}, hashes...) + result.Signature, err = deArmorSignature(strings.NewReader(commit.PGPSignature)) + if err != nil { + return Commit{}, err + } } - return hashes, nil + return result, nil } func (repo *GoGitRepo) AllClocks() (map[string]lamport.Clock, error) { @@ -603,7 +761,7 @@ func (repo *GoGitRepo) AllClocks() (map[string]lamport.Clock, error) { result := make(map[string]lamport.Clock) - files, err := ioutil.ReadDir(stdpath.Join(repo.path, clockPath)) + files, err := ioutil.ReadDir(filepath.Join(repo.path, "git-bug", clockPath)) if os.IsNotExist(err) { return nil, nil } @@ -616,7 +774,7 @@ func (repo *GoGitRepo) AllClocks() (map[string]lamport.Clock, error) { if c, ok := repo.clocks[name]; ok { result[name] = c } else { - c, err := lamport.LoadPersistedClock(stdpath.Join(repo.path, clockPath, name)) + c, err := lamport.LoadPersistedClock(repo.LocalStorage(), filepath.Join(clockPath, name)) if err != nil { return nil, err } @@ -631,6 +789,9 @@ func (repo *GoGitRepo) AllClocks() (map[string]lamport.Clock, error) { // GetOrCreateClock return a Lamport clock stored in the Repo. // If the clock doesn't exist, it's created. func (repo *GoGitRepo) GetOrCreateClock(name string) (lamport.Clock, error) { + repo.clocksMutex.Lock() + defer repo.clocksMutex.Unlock() + c, err := repo.getClock(name) if err == nil { return c, nil @@ -639,12 +800,7 @@ func (repo *GoGitRepo) GetOrCreateClock(name string) (lamport.Clock, error) { return nil, err } - repo.clocksMutex.Lock() - defer repo.clocksMutex.Unlock() - - p := stdpath.Join(repo.path, clockPath, name) - - c, err = lamport.NewPersistedClock(p) + c, err = lamport.NewPersistedClock(repo.LocalStorage(), filepath.Join(clockPath, name)) if err != nil { return nil, err } @@ -654,16 +810,11 @@ func (repo *GoGitRepo) GetOrCreateClock(name string) (lamport.Clock, error) { } func (repo *GoGitRepo) getClock(name string) (lamport.Clock, error) { - repo.clocksMutex.Lock() - defer repo.clocksMutex.Unlock() - if c, ok := repo.clocks[name]; ok { return c, nil } - p := stdpath.Join(repo.path, clockPath, name) - - c, err := lamport.LoadPersistedClock(p) + c, err := lamport.LoadPersistedClock(repo.LocalStorage(), filepath.Join(clockPath, name)) if err == nil { repo.clocks[name] = c return c, nil @@ -702,3 +853,21 @@ func (repo *GoGitRepo) AddRemote(name string, url string) error { return err } + +// GetLocalRemote return the URL to use to add this repo as a local remote +func (repo *GoGitRepo) GetLocalRemote() string { + return repo.path +} + +// EraseFromDisk delete this repository entirely from the disk +func (repo *GoGitRepo) EraseFromDisk() error { + err := repo.Close() + if err != nil { + return err + } + + path := filepath.Clean(strings.TrimSuffix(repo.path, string(filepath.Separator)+".git")) + + // fmt.Println("Cleaning repo:", path) + return os.RemoveAll(path) +} diff --git a/migration3/after/repository/gogit_config.go b/migration3/after/repository/gogit_config.go index 2f9a4cc..891e3ff 100644 --- a/migration3/after/repository/gogit_config.go +++ b/migration3/after/repository/gogit_config.go @@ -24,7 +24,11 @@ func newGoGitLocalConfig(repo *gogit.Repository) *goGitConfig { } } -func newGoGitGlobalConfig(repo *gogit.Repository) *goGitConfig { +func newGoGitGlobalConfig() *goGitConfig { + // TODO: replace that with go-git native implementation once it's supported + // see: https://github.com/go-git/go-git + // see: https://github.com/src-d/go-git/issues/760 + return &goGitConfig{ ConfigRead: &goGitConfigReader{getConfig: func() (*config.Config, error) { return config.LoadConfig(config.GlobalScope) @@ -130,7 +134,7 @@ func (cr *goGitConfigReader) ReadString(key string) (string, error) { } return section.Option(optionName), nil default: - subsectionName := strings.Join(split[1:len(split)-2], ".") + subsectionName := strings.Join(split[1:len(split)-1], ".") optionName := split[len(split)-1] if !section.HasSubsection(subsectionName) { return "", ErrNoConfigEntry diff --git a/migration3/after/repository/gogit_testing.go b/migration3/after/repository/gogit_testing.go index f20ff6b..cad776b 100644 --- a/migration3/after/repository/gogit_testing.go +++ b/migration3/after/repository/gogit_testing.go @@ -3,6 +3,8 @@ package repository import ( "io/ioutil" "log" + + "github.com/99designs/keyring" ) // This is intended for testing only @@ -34,7 +36,11 @@ func CreateGoGitTestRepo(bare bool) TestedRepo { log.Fatal("failed to set user.email for test repository: ", err) } - return repo + // make sure we use a mock keyring for testing to not interact with the global system + return &replaceKeyring{ + TestedRepo: repo, + keyring: keyring.NewArrayKeyring(nil), + } } func SetupGoGitReposAndRemote() (repoA, repoB, remote TestedRepo) { @@ -42,14 +48,12 @@ func SetupGoGitReposAndRemote() (repoA, repoB, remote TestedRepo) { repoB = CreateGoGitTestRepo(false) remote = CreateGoGitTestRepo(true) - remoteAddr := "file://" + remote.GetPath() - - err := repoA.AddRemote("origin", remoteAddr) + err := repoA.AddRemote("origin", remote.GetLocalRemote()) if err != nil { log.Fatal(err) } - err = repoB.AddRemote("origin", remoteAddr) + err = repoB.AddRemote("origin", remote.GetLocalRemote()) if err != nil { log.Fatal(err) } diff --git a/migration3/after/repository/keyring.go b/migration3/after/repository/keyring.go index f690b0b..6cba303 100644 --- a/migration3/after/repository/keyring.go +++ b/migration3/after/repository/keyring.go @@ -2,7 +2,7 @@ package repository import ( "os" - "path" + "path/filepath" "github.com/99designs/keyring" ) @@ -15,7 +15,7 @@ var ErrKeyringKeyNotFound = keyring.ErrKeyNotFound type Keyring interface { // Returns an Item matching the key or ErrKeyringKeyNotFound Get(key string) (Item, error) - // Stores an Item on the keyring + // Stores an Item on the keyring. Set is idempotent. Set(item Item) error // Removes the item with matching key Remove(key string) error @@ -38,7 +38,7 @@ func defaultKeyring() (Keyring, error) { ServiceName: "git-bug", // Fallback encrypted file - FileDir: path.Join(ucd, "git-bug", "keyring"), + FileDir: filepath.Join(ucd, "git-bug", "keyring"), // As we write the file in the user's config directory, this file should already be protected by the OS against // other user's access. We actually don't terribly need to protect it further and a password prompt across all // UI's would be a pain. Therefore we use here a constant password so the file will be unreadable by generic file @@ -48,3 +48,13 @@ func defaultKeyring() (Keyring, error) { }, }) } + +// replaceKeyring allow to replace the Keyring of the underlying repo +type replaceKeyring struct { + TestedRepo + keyring Keyring +} + +func (rk replaceKeyring) Keyring() Keyring { + return rk.keyring +} diff --git a/migration3/after/repository/mock_repo.go b/migration3/after/repository/mock_repo.go index 6d0d388..eab526f 100644 --- a/migration3/after/repository/mock_repo.go +++ b/migration3/after/repository/mock_repo.go @@ -1,34 +1,48 @@ package repository import ( + "bytes" "crypto/sha1" "fmt" "strings" + "sync" "github.com/99designs/keyring" + "github.com/blevesearch/bleve" + "github.com/go-git/go-billy/v5" + "github.com/go-git/go-billy/v5/memfs" + "golang.org/x/crypto/openpgp" "github.com/MichaelMure/git-bug-migration/migration3/after/util/lamport" ) -var _ ClockedRepo = &mockRepoForTest{} -var _ TestedRepo = &mockRepoForTest{} +var _ ClockedRepo = &mockRepo{} +var _ TestedRepo = &mockRepo{} -// mockRepoForTest defines an instance of Repo that can be used for testing. -type mockRepoForTest struct { +// mockRepo defines an instance of Repo that can be used for testing. +type mockRepo struct { *mockRepoConfig *mockRepoKeyring *mockRepoCommon + *mockRepoStorage + *mockRepoBleve *mockRepoData *mockRepoClock + *mockRepoTest } -func NewMockRepoForTest() *mockRepoForTest { - return &mockRepoForTest{ +func (m *mockRepo) Close() error { return nil } + +func NewMockRepo() *mockRepo { + return &mockRepo{ mockRepoConfig: NewMockRepoConfig(), mockRepoKeyring: NewMockRepoKeyring(), mockRepoCommon: NewMockRepoCommon(), + mockRepoStorage: NewMockRepoStorage(), + mockRepoBleve: newMockRepoBleve(), mockRepoData: NewMockRepoData(), mockRepoClock: NewMockRepoClock(), + mockRepoTest: NewMockRepoTest(), } } @@ -86,11 +100,6 @@ func NewMockRepoCommon() *mockRepoCommon { return &mockRepoCommon{} } -// GetPath returns the path to the repo. -func (r *mockRepoCommon) GetPath() string { - return "~/mockRepo/" -} - func (r *mockRepoCommon) GetUserName() (string, error) { return "René Descartes", nil } @@ -108,15 +117,72 @@ func (r *mockRepoCommon) GetCoreEditor() (string, error) { // GetRemotes returns the configured remotes repositories. func (r *mockRepoCommon) GetRemotes() (map[string]string, error) { return map[string]string{ - "origin": "git://github.com/MichaelMure/git-bug", + "origin": "git://github.com/MichaelMure/git-bug-migration/migration3/after", }, nil } +var _ RepoStorage = &mockRepoStorage{} + +type mockRepoStorage struct { + localFs billy.Filesystem +} + +func NewMockRepoStorage() *mockRepoStorage { + return &mockRepoStorage{localFs: memfs.New()} +} + +func (m *mockRepoStorage) LocalStorage() billy.Filesystem { + return m.localFs +} + +var _ RepoBleve = &mockRepoBleve{} + +type mockRepoBleve struct { + indexesMutex sync.Mutex + indexes map[string]bleve.Index +} + +func newMockRepoBleve() *mockRepoBleve { + return &mockRepoBleve{ + indexes: make(map[string]bleve.Index), + } +} + +func (m *mockRepoBleve) GetBleveIndex(name string) (bleve.Index, error) { + m.indexesMutex.Lock() + defer m.indexesMutex.Unlock() + + if index, ok := m.indexes[name]; ok { + return index, nil + } + + mapping := bleve.NewIndexMapping() + mapping.DefaultAnalyzer = "en" + + index, err := bleve.NewMemOnly(mapping) + if err != nil { + return nil, err + } + + m.indexes[name] = index + + return index, nil +} + +func (m *mockRepoBleve) ClearBleveIndex(name string) error { + m.indexesMutex.Lock() + defer m.indexesMutex.Unlock() + + delete(m.indexes, name) + return nil +} + var _ RepoData = &mockRepoData{} type commit struct { treeHash Hash - parent Hash + parents []Hash + sig string } type mockRepoData struct { @@ -135,13 +201,13 @@ func NewMockRepoData() *mockRepoData { } } -// PushRefs push git refs to a remote -func (r *mockRepoData) PushRefs(remote string, refSpec string) (string, error) { - return "", nil +func (r *mockRepoData) FetchRefs(remote string, prefix string) (string, error) { + panic("implement me") } -func (r *mockRepoData) FetchRefs(remote string, refSpec string) (string, error) { - return "", nil +// PushRefs push git refs to a remote +func (r *mockRepoData) PushRefs(remote string, prefix string) (string, error) { + panic("implement me") } func (r *mockRepoData) StoreData(data []byte) (Hash, error) { @@ -153,7 +219,6 @@ func (r *mockRepoData) StoreData(data []byte) (Hash, error) { func (r *mockRepoData) ReadData(hash Hash) ([]byte, error) { data, ok := r.blobs[hash] - if !ok { return nil, fmt.Errorf("unknown hash") } @@ -170,48 +235,103 @@ func (r *mockRepoData) StoreTree(entries []TreeEntry) (Hash, error) { return hash, nil } -func (r *mockRepoData) StoreCommit(treeHash Hash) (Hash, error) { - rawHash := sha1.Sum([]byte(treeHash)) - hash := Hash(fmt.Sprintf("%x", rawHash)) - r.commits[hash] = commit{ - treeHash: treeHash, +func (r *mockRepoData) ReadTree(hash Hash) ([]TreeEntry, error) { + var data string + + data, ok := r.trees[hash] + + if !ok { + // Git will understand a commit hash to reach a tree + commit, ok := r.commits[hash] + + if !ok { + return nil, fmt.Errorf("unknown hash") + } + + data, ok = r.trees[commit.treeHash] + + if !ok { + return nil, fmt.Errorf("unknown hash") + } } - return hash, nil + + return readTreeEntries(data) } -func (r *mockRepoData) StoreCommitWithParent(treeHash Hash, parent Hash) (Hash, error) { - rawHash := sha1.Sum([]byte(treeHash + parent)) +func (r *mockRepoData) StoreCommit(treeHash Hash, parents ...Hash) (Hash, error) { + return r.StoreSignedCommit(treeHash, nil, parents...) +} + +func (r *mockRepoData) StoreSignedCommit(treeHash Hash, signKey *openpgp.Entity, parents ...Hash) (Hash, error) { + hasher := sha1.New() + hasher.Write([]byte(treeHash)) + for _, parent := range parents { + hasher.Write([]byte(parent)) + } + rawHash := hasher.Sum(nil) hash := Hash(fmt.Sprintf("%x", rawHash)) - r.commits[hash] = commit{ + c := commit{ treeHash: treeHash, - parent: parent, + parents: parents, } + if signKey != nil { + // unlike go-git, we only sign the tree hash for simplicity instead of all the fields (parents ...) + var sig bytes.Buffer + if err := openpgp.DetachSign(&sig, signKey, strings.NewReader(string(treeHash)), nil); err != nil { + return "", err + } + c.sig = sig.String() + } + r.commits[hash] = c return hash, nil } -func (r *mockRepoData) UpdateRef(ref string, hash Hash) error { - r.refs[ref] = hash - return nil -} +func (r *mockRepoData) ReadCommit(hash Hash) (Commit, error) { + c, ok := r.commits[hash] + if !ok { + return Commit{}, fmt.Errorf("unknown commit") + } -func (r *mockRepoData) RemoveRef(ref string) error { - delete(r.refs, ref) - return nil -} + result := Commit{ + Hash: hash, + Parents: c.parents, + TreeHash: c.treeHash, + } -func (r *mockRepoData) RefExist(ref string) (bool, error) { - _, exist := r.refs[ref] - return exist, nil + if c.sig != "" { + // Note: this is actually incorrect as the signed data should be the full commit (+comment, +date ...) + // but only the tree hash work for our purpose here. + result.SignedData = strings.NewReader(string(c.treeHash)) + result.Signature = strings.NewReader(c.sig) + } + + return result, nil } -func (r *mockRepoData) CopyRef(source string, dest string) error { - hash, exist := r.refs[source] +func (r *mockRepoData) GetTreeHash(commit Hash) (Hash, error) { + c, ok := r.commits[commit] + if !ok { + return "", fmt.Errorf("unknown commit") + } - if !exist { - return fmt.Errorf("Unknown ref") + return c.treeHash, nil +} + +func (r *mockRepoData) ResolveRef(ref string) (Hash, error) { + h, ok := r.refs[ref] + if !ok { + return "", fmt.Errorf("unknown ref") } + return h, nil +} - r.refs[dest] = hash +func (r *mockRepoData) UpdateRef(ref string, hash Hash) error { + r.refs[ref] = hash + return nil +} + +func (r *mockRepoData) RemoveRef(ref string) error { + delete(r.refs, ref) return nil } @@ -227,46 +347,20 @@ func (r *mockRepoData) ListRefs(refPrefix string) ([]string, error) { return keys, nil } -func (r *mockRepoData) ListCommits(ref string) ([]Hash, error) { - var hashes []Hash - - hash := r.refs[ref] - - for { - commit, ok := r.commits[hash] - - if !ok { - break - } - - hashes = append([]Hash{hash}, hashes...) - hash = commit.parent - } - - return hashes, nil +func (r *mockRepoData) RefExist(ref string) (bool, error) { + _, exist := r.refs[ref] + return exist, nil } -func (r *mockRepoData) ReadTree(hash Hash) ([]TreeEntry, error) { - var data string - - data, ok := r.trees[hash] - - if !ok { - // Git will understand a commit hash to reach a tree - commit, ok := r.commits[hash] - - if !ok { - return nil, fmt.Errorf("unknown hash") - } - - data, ok = r.trees[commit.treeHash] +func (r *mockRepoData) CopyRef(source string, dest string) error { + hash, exist := r.refs[source] - if !ok { - return nil, fmt.Errorf("unknown hash") - } + if !exist { + return fmt.Errorf("Unknown ref") } - return readTreeEntries(data) + r.refs[dest] = hash + return nil } func (r *mockRepoData) FindCommonAncestor(hash1 Hash, hash2 Hash) (Hash, error) { @@ -277,8 +371,11 @@ func (r *mockRepoData) FindCommonAncestor(hash1 Hash, hash2 Hash) (Hash, error) if !ok { return "", fmt.Errorf("unknown commit %v", hash1) } - ancestor1 = append(ancestor1, c.parent) - hash1 = c.parent + if len(c.parents) == 0 { + break + } + ancestor1 = append(ancestor1, c.parents[0]) + hash1 = c.parents[0] } for { @@ -293,44 +390,39 @@ func (r *mockRepoData) FindCommonAncestor(hash1 Hash, hash2 Hash) (Hash, error) return "", fmt.Errorf("unknown commit %v", hash1) } - if c.parent == "" { + if c.parents[0] == "" { return "", fmt.Errorf("no ancestor found") } - hash2 = c.parent - } -} - -func (r *mockRepoData) GetTreeHash(commit Hash) (Hash, error) { - c, ok := r.commits[commit] - if !ok { - return "", fmt.Errorf("unknown commit") + hash2 = c.parents[0] } - - return c.treeHash, nil } -func (r *mockRepoData) AddRemote(name string, url string) error { - panic("implement me") +func (r *mockRepoData) ListCommits(ref string) ([]Hash, error) { + return nonNativeListCommits(r, ref) } var _ RepoClock = &mockRepoClock{} type mockRepoClock struct { + mu sync.Mutex clocks map[string]lamport.Clock } -func (r *mockRepoClock) AllClocks() (map[string]lamport.Clock, error) { - return r.clocks, nil -} - func NewMockRepoClock() *mockRepoClock { return &mockRepoClock{ clocks: make(map[string]lamport.Clock), } } +func (r *mockRepoClock) AllClocks() (map[string]lamport.Clock, error) { + return r.clocks, nil +} + func (r *mockRepoClock) GetOrCreateClock(name string) (lamport.Clock, error) { + r.mu.Lock() + defer r.mu.Unlock() + if c, ok := r.clocks[name]; ok { return c, nil } @@ -355,3 +447,24 @@ func (r *mockRepoClock) Witness(name string, time lamport.Time) error { } return c.Witness(time) } + +var _ repoTest = &mockRepoTest{} + +type mockRepoTest struct{} + +func NewMockRepoTest() *mockRepoTest { + return &mockRepoTest{} +} + +func (r *mockRepoTest) AddRemote(name string, url string) error { + panic("implement me") +} + +func (r mockRepoTest) GetLocalRemote() string { + panic("implement me") +} + +func (r mockRepoTest) EraseFromDisk() error { + // nothing to do + return nil +} diff --git a/migration3/after/repository/repo.go b/migration3/after/repository/repo.go index 4d66f21..30ce18d 100644 --- a/migration3/after/repository/repo.go +++ b/migration3/after/repository/repo.go @@ -3,12 +3,17 @@ package repository import ( "errors" + "io" + + "github.com/blevesearch/bleve" + "github.com/go-git/go-billy/v5" + "golang.org/x/crypto/openpgp" "github.com/MichaelMure/git-bug-migration/migration3/after/util/lamport" ) var ( - // ErrNotARepo is the error returned when the git repo root wan't be found + // ErrNotARepo is the error returned when the git repo root can't be found ErrNotARepo = errors.New("not a git repository") // ErrClockNotExist is the error returned when a clock can't be found ErrClockNotExist = errors.New("clock doesn't exist") @@ -19,7 +24,16 @@ type Repo interface { RepoConfig RepoKeyring RepoCommon + RepoStorage + RepoBleve RepoData + + Close() error +} + +type RepoCommonStorage interface { + RepoCommon + RepoStorage } // ClockedRepo is a Repo that also has Lamport clocks @@ -48,9 +62,6 @@ type RepoKeyring interface { // RepoCommon represent the common function the we want all the repo to implement type RepoCommon interface { - // GetPath returns the path to the repo. - GetPath() string - // GetUserName returns the name the the user has used to configure git GetUserName() (string, error) @@ -64,13 +75,43 @@ type RepoCommon interface { GetRemotes() (map[string]string, error) } +// RepoStorage give access to the filesystem +type RepoStorage interface { + // LocalStorage return a billy.Filesystem giving access to $RepoPath/.git/git-bug + LocalStorage() billy.Filesystem +} + +// RepoBleve give access to Bleve to implement full-text search indexes. +type RepoBleve interface { + // GetBleveIndex return a bleve.Index that can be used to index documents + GetBleveIndex(name string) (bleve.Index, error) + + // ClearBleveIndex will wipe the given index + ClearBleveIndex(name string) error +} + +type Commit struct { + Hash Hash + Parents []Hash // hashes of the parents, if any + TreeHash Hash // hash of the git Tree + SignedData io.Reader // if signed, reader for the signed data (likely, the serialized commit) + Signature io.Reader // if signed, reader for the (non-armored) signature +} + // RepoData give access to the git data storage type RepoData interface { - // FetchRefs fetch git refs from a remote - FetchRefs(remote string, refSpec string) (string, error) - - // PushRefs push git refs to a remote - PushRefs(remote string, refSpec string) (string, error) + // FetchRefs fetch git refs matching a directory prefix to a remote + // Ex: prefix="foo" will fetch any remote refs matching "refs/foo/*" locally. + // The equivalent git refspec would be "refs/foo/*:refs/remotes//foo/*" + FetchRefs(remote string, prefix string) (string, error) + + // PushRefs push git refs matching a directory prefix to a remote + // Ex: prefix="foo" will push any local refs matching "refs/foo/*" to the remote. + // The equivalent git refspec would be "refs/foo/*:refs/foo/*" + // + // Additionally, PushRefs will update the local references in refs/remotes//foo to match + // the remote state. + PushRefs(remote string, prefix string) (string, error) // StoreData will store arbitrary data and return the corresponding hash StoreData(data []byte) (Hash, error) @@ -86,21 +127,27 @@ type RepoData interface { ReadTree(hash Hash) ([]TreeEntry, error) // StoreCommit will store a Git commit with the given Git tree - StoreCommit(treeHash Hash) (Hash, error) + StoreCommit(treeHash Hash, parents ...Hash) (Hash, error) - // StoreCommit will store a Git commit with the given Git tree - StoreCommitWithParent(treeHash Hash, parent Hash) (Hash, error) + // StoreCommit will store a Git commit with the given Git tree. If signKey is not nil, the commit + // will be signed accordingly. + StoreSignedCommit(treeHash Hash, signKey *openpgp.Entity, parents ...Hash) (Hash, error) + + // ReadCommit read a Git commit and returns some of its characteristic + ReadCommit(hash Hash) (Commit, error) // GetTreeHash return the git tree hash referenced in a commit + // Deprecated GetTreeHash(commit Hash) (Hash, error) - // FindCommonAncestor will return the last common ancestor of two chain of commit - FindCommonAncestor(commit1 Hash, commit2 Hash) (Hash, error) + // ResolveRef returns the hash of the target commit of the given ref + ResolveRef(ref string) (Hash, error) // UpdateRef will create or update a Git reference UpdateRef(ref string, hash Hash) error // RemoveRef will remove a Git reference + // RemoveRef is idempotent. RemoveRef(ref string) error // ListRefs will return a list of Git ref matching the given refspec @@ -112,6 +159,10 @@ type RepoData interface { // CopyRef will create a new reference with the same value as another one CopyRef(source string, dest string) error + // FindCommonAncestor will return the last common ancestor of two chain of commit + // Deprecated + FindCommonAncestor(commit1 Hash, commit2 Hash) (Hash, error) + // ListCommits will return the list of tree hashes of a ref, in chronological order ListCommits(ref string) ([]Hash, error) } @@ -154,4 +205,10 @@ type TestedRepo interface { type repoTest interface { // AddRemote add a new remote to the repository AddRemote(name string, url string) error + + // GetLocalRemote return the URL to use to add this repo as a local remote + GetLocalRemote() string + + // EraseFromDisk delete this repository entirely from the disk + EraseFromDisk() error } diff --git a/migration3/after/repository/repo_testing.go b/migration3/after/repository/repo_testing.go index a4604ef..2541457 100644 --- a/migration3/after/repository/repo_testing.go +++ b/migration3/after/repository/repo_testing.go @@ -3,37 +3,27 @@ package repository import ( "log" "math/rand" - "os" - "strings" "testing" "github.com/stretchr/testify/require" + "golang.org/x/crypto/openpgp" "github.com/MichaelMure/git-bug-migration/migration3/after/util/lamport" ) +// TODO: add tests for RepoBleve +// TODO: add tests for RepoStorage + func CleanupTestRepos(repos ...Repo) { var firstErr error for _, repo := range repos { - path := repo.GetPath() - if strings.HasSuffix(path, "/.git") { - // for a normal repository (not --bare), we want to remove everything - // including the parent directory where files are checked out - path = strings.TrimSuffix(path, "/.git") - - // Testing non-bare repo should also check path is - // only .git (i.e. ./.git), but doing so, we should - // try to remove the current directory and hav some - // trouble. In the present case, this case should not - // occur. - // TODO consider warning or error when path == ".git" - } - // fmt.Println("Cleaning repo:", path) - err := os.RemoveAll(path) - if err != nil { - log.Println(err) - if firstErr == nil { - firstErr = err + if repo, ok := repo.(TestedRepo); ok { + err := repo.EraseFromDisk() + if err != nil { + log.Println(err) + if firstErr == nil { + firstErr = err + } } } } @@ -58,6 +48,7 @@ func RepoTest(t *testing.T, creator RepoCreator, cleaner RepoCleaner) { t.Run("Data", func(t *testing.T) { RepoDataTest(t, repo) + RepoDataSignatureTest(t, repo) }) t.Run("Config", func(t *testing.T) { @@ -149,7 +140,8 @@ func RepoDataTest(t *testing.T, repo RepoData) { require.NoError(t, err) require.Equal(t, treeHash1, treeHash1Read) - commit2, err := repo.StoreCommitWithParent(treeHash2, commit1) + // commit with a parent + commit2, err := repo.StoreCommit(treeHash2, commit1) require.NoError(t, err) require.True(t, commit2.IsValid()) @@ -162,6 +154,11 @@ func RepoDataTest(t *testing.T, repo RepoData) { require.NoError(t, err) require.Equal(t, tree1read, tree1) + c2, err := repo.ReadCommit(commit2) + require.NoError(t, err) + c2expected := Commit{Hash: commit2, Parents: []Hash{commit1}, TreeHash: treeHash2} + require.Equal(t, c2expected, c2) + // Ref exist1, err := repo.RefExist("refs/bugs/ref1") @@ -175,6 +172,10 @@ func RepoDataTest(t *testing.T, repo RepoData) { require.NoError(t, err) require.True(t, exist1) + h, err := repo.ResolveRef("refs/bugs/ref1") + require.NoError(t, err) + require.Equal(t, commit2, h) + ls, err := repo.ListRefs("refs/bugs") require.NoError(t, err) require.ElementsMatch(t, []string{"refs/bugs/ref1"}, ls) @@ -192,7 +193,7 @@ func RepoDataTest(t *testing.T, repo RepoData) { // Graph - commit3, err := repo.StoreCommitWithParent(treeHash1, commit1) + commit3, err := repo.StoreCommit(treeHash1, commit1) require.NoError(t, err) ancestorHash, err := repo.FindCommonAncestor(commit2, commit3) @@ -201,6 +202,58 @@ func RepoDataTest(t *testing.T, repo RepoData) { err = repo.RemoveRef("refs/bugs/ref1") require.NoError(t, err) + + // RemoveRef is idempotent + err = repo.RemoveRef("refs/bugs/ref1") + require.NoError(t, err) +} + +func RepoDataSignatureTest(t *testing.T, repo RepoData) { + data := randomData() + + blobHash, err := repo.StoreData(data) + require.NoError(t, err) + + treeHash, err := repo.StoreTree([]TreeEntry{ + { + ObjectType: Blob, + Hash: blobHash, + Name: "blob", + }, + }) + require.NoError(t, err) + + pgpEntity1, err := openpgp.NewEntity("", "", "", nil) + require.NoError(t, err) + keyring1 := openpgp.EntityList{pgpEntity1} + + pgpEntity2, err := openpgp.NewEntity("", "", "", nil) + require.NoError(t, err) + keyring2 := openpgp.EntityList{pgpEntity2} + + commitHash1, err := repo.StoreSignedCommit(treeHash, pgpEntity1) + require.NoError(t, err) + + commit1, err := repo.ReadCommit(commitHash1) + require.NoError(t, err) + + _, err = openpgp.CheckDetachedSignature(keyring1, commit1.SignedData, commit1.Signature) + require.NoError(t, err) + + _, err = openpgp.CheckDetachedSignature(keyring2, commit1.SignedData, commit1.Signature) + require.Error(t, err) + + commitHash2, err := repo.StoreSignedCommit(treeHash, pgpEntity1, commitHash1) + require.NoError(t, err) + + commit2, err := repo.ReadCommit(commitHash2) + require.NoError(t, err) + + _, err = openpgp.CheckDetachedSignature(keyring1, commit2.SignedData, commit2.Signature) + require.NoError(t, err) + + _, err = openpgp.CheckDetachedSignature(keyring2, commit2.SignedData, commit2.Signature) + require.Error(t, err) } // helper to test a RepoClock diff --git a/migration3/after/repository/tree_entry.go b/migration3/after/repository/tree_entry.go index 6c5ec1a..9d70814 100644 --- a/migration3/after/repository/tree_entry.go +++ b/migration3/after/repository/tree_entry.go @@ -100,3 +100,13 @@ func readTreeEntries(s string) ([]TreeEntry, error) { return casted, nil } + +// SearchTreeEntry search a TreeEntry by name from an array +func SearchTreeEntry(entries []TreeEntry, name string) (TreeEntry, bool) { + for _, entry := range entries { + if entry.Name == name { + return entry, true + } + } + return TreeEntry{}, false +} diff --git a/migration3/after/util/lamport/clock_testing.go b/migration3/after/util/lamport/clock_testing.go index 4bf6d2b..de66c5c 100644 --- a/migration3/after/util/lamport/clock_testing.go +++ b/migration3/after/util/lamport/clock_testing.go @@ -14,11 +14,11 @@ func testClock(t *testing.T, c Clock) { assert.Equal(t, Time(2), val) assert.Equal(t, Time(2), c.Time()) - err = c.Witness(41) + err = c.Witness(42) assert.NoError(t, err) assert.Equal(t, Time(42), c.Time()) - err = c.Witness(41) + err = c.Witness(42) assert.NoError(t, err) assert.Equal(t, Time(42), c.Time()) diff --git a/migration3/after/util/lamport/mem_clock.go b/migration3/after/util/lamport/mem_clock.go index f113b50..d824d83 100644 --- a/migration3/after/util/lamport/mem_clock.go +++ b/migration3/after/util/lamport/mem_clock.go @@ -25,6 +25,14 @@ */ +// Note: this code originally originate from Hashicorp's Serf but has been changed since to fit git-bug's need. + +// Note: this Lamport clock implementation is different than the algorithms you can find, notably Wikipedia or the +// original Serf implementation. The reason is lie to what constitute an event in this distributed system. +// Commonly, events happen when messages are sent or received, whereas in git-bug events happen when some data is +// written, but *not* when read. This is why Witness set the time to the max seen value instead of max seen value +1. +// See https://cs.stackexchange.com/a/133730/129795 + package lamport import ( @@ -72,12 +80,12 @@ WITNESS: // If the other value is old, we do not need to do anything cur := atomic.LoadUint64(&mc.counter) other := uint64(v) - if other < cur { + if other <= cur { return nil } // Ensure that our local clock is at least one ahead. - if !atomic.CompareAndSwapUint64(&mc.counter, cur, other+1) { + if !atomic.CompareAndSwapUint64(&mc.counter, cur, other) { // CAS: CompareAndSwap // The CAS failed, so we just retry. Eventually our CAS should // succeed or a future witness will pass us by and our witness diff --git a/migration3/after/util/lamport/persisted_clock.go b/migration3/after/util/lamport/persisted_clock.go index e70b01e..b9246f7 100644 --- a/migration3/after/util/lamport/persisted_clock.go +++ b/migration3/after/util/lamport/persisted_clock.go @@ -5,30 +5,28 @@ import ( "fmt" "io/ioutil" "os" - "path/filepath" + + "github.com/go-git/go-billy/v5" + "github.com/go-git/go-billy/v5/util" ) var ErrClockNotExist = errors.New("clock doesn't exist") type PersistedClock struct { *MemClock + root billy.Filesystem filePath string } // NewPersistedClock create a new persisted Lamport clock -func NewPersistedClock(filePath string) (*PersistedClock, error) { +func NewPersistedClock(root billy.Filesystem, filePath string) (*PersistedClock, error) { clock := &PersistedClock{ MemClock: NewMemClock(), + root: root, filePath: filePath, } - dir := filepath.Dir(filePath) - err := os.MkdirAll(dir, 0777) - if err != nil { - return nil, err - } - - err = clock.Write() + err := clock.Write() if err != nil { return nil, err } @@ -37,8 +35,9 @@ func NewPersistedClock(filePath string) (*PersistedClock, error) { } // LoadPersistedClock load a persisted Lamport clock from a file -func LoadPersistedClock(filePath string) (*PersistedClock, error) { +func LoadPersistedClock(root billy.Filesystem, filePath string) (*PersistedClock, error) { clock := &PersistedClock{ + root: root, filePath: filePath, } @@ -71,13 +70,19 @@ func (pc *PersistedClock) Witness(time Time) error { } func (pc *PersistedClock) read() error { - content, err := ioutil.ReadFile(pc.filePath) + f, err := pc.root.Open(pc.filePath) if os.IsNotExist(err) { return ErrClockNotExist } if err != nil { return err } + defer f.Close() + + content, err := ioutil.ReadAll(f) + if err != nil { + return err + } var value uint64 n, err := fmt.Sscanf(string(content), "%d", &value) @@ -96,5 +101,5 @@ func (pc *PersistedClock) read() error { func (pc *PersistedClock) Write() error { data := []byte(fmt.Sprintf("%d", pc.counter)) - return ioutil.WriteFile(pc.filePath, data, 0644) + return util.WriteFile(pc.root, pc.filePath, data, 0644) } diff --git a/migration3/migration3.go b/migration3/migration3.go index 14e5125..ecae320 100644 --- a/migration3/migration3.go +++ b/migration3/migration3.go @@ -18,7 +18,8 @@ import ( type Migration3 struct{} func (m *Migration3) Description() string { - return "Make bug and identities independent from the storage by making the ID generation self-contained" + return "Make bug and identities independent from the storage by making the ID generation self-contained. " + + "Also, migrate to the new full DAG entity data model." } func (m *Migration3) Run(repoPath string) error { @@ -27,7 +28,7 @@ func (m *Migration3) Run(repoPath string) error { return err } - newRepo, err := afterrepo.NewGoGitRepo(repoPath, nil) + newRepo, err := afterrepo.OpenGoGitRepo(repoPath, nil) if err != nil { return err } @@ -36,12 +37,14 @@ func (m *Migration3) Run(repoPath string) error { } func (m *Migration3) migrate(oldRepo beforerepo.ClockedRepo, newRepo afterrepo.ClockedRepo) error { - identities := beforeidentity.ReadAllLocal(oldRepo) - bugs := beforebug.ReadAllLocal(oldRepo) + userId, err := beforeidentity.GetUserIdentityId(oldRepo) + if err != nil && err != beforeidentity.ErrNoIdentitySet { + return err + } migratedIdentities := map[beforeentity.Id]*afteridentity.Identity{} - for streamedIdentity := range identities { + for streamedIdentity := range beforeidentity.ReadAllLocal(oldRepo) { if streamedIdentity.Err != nil { if errors.Is(streamedIdentity.Err, beforeidentity.ErrInvalidFormatVersion) { fmt.Print("skipping bug, already updated\n") @@ -71,7 +74,7 @@ func (m *Migration3) migrate(oldRepo beforerepo.ClockedRepo, newRepo afterrepo.C fmt.Printf("migrated to %s\n", newIdentity.Id().Human()) } - for streamedBug := range bugs { + for streamedBug := range beforebug.ReadAllLocal(oldRepo) { if streamedBug.Err != nil { if streamedBug.Err != beforebug.ErrInvalidFormatVersion { fmt.Printf("got error when reading bug, assuming data is already migrated: %q\n", streamedBug.Err) @@ -102,6 +105,14 @@ func (m *Migration3) migrate(oldRepo beforerepo.ClockedRepo, newRepo afterrepo.C } } + if userId != beforeentity.UnsetId { + newUserId := migratedIdentities[userId] + err = afteridentity.SetUserIdentity(newRepo, newUserId) + if err != nil { + return err + } + } + return nil } diff --git a/migration3/migration3_test.go b/migration3/migration3_test.go index 77c1b27..0c8bf2d 100644 --- a/migration3/migration3_test.go +++ b/migration3/migration3_test.go @@ -38,7 +38,7 @@ func TestMigrate23(t *testing.T) { oldRepo, err := beforerepo.InitGoGitRepo(dir) require.Nil(t, err, "got error when initializing old repository") - newRepo, err := afterrepo.NewGoGitRepo(dir, nil) + newRepo, err := afterrepo.OpenGoGitRepo(dir, nil) require.Nil(t, err, "got error when initializing new repository") oldVinc := beforeidentity.NewIdentityFull( @@ -62,20 +62,17 @@ func TestMigrate23(t *testing.T) { err = m.migrate(oldRepo, newRepo) require.Nil(t, err, "got error when migrating repository") - bugs1 := afterbug.ReadAllLocal(newRepo) + bugs1 := afterbug.ReadAll(newRepo) bug1 := (<-bugs1).Bug - operations := afterbug.NewOperationIterator(bug1) - require.Equal(t, true, operations.Next(), "unable to get first operation") - - operation := operations.Value() + operation := bug1.Operations()[0] createOperation, ok := operation.(*afterbug.CreateOperation) require.True(t, ok) require.Equal(t, title, createOperation.Title) require.Equal(t, unix, createOperation.UnixTime) require.Equal(t, message, createOperation.Message) - author := operation.GetAuthor() + author := operation.Author() require.Equal(t, oldVinc.Name(), author.Name()) require.Equal(t, oldVinc.Login(), author.Login()) require.Equal(t, oldVinc.Email(), author.Email()) From 3561cc9be24af12c7e153774a6717521ce487d17 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Michael=20Mur=C3=A9?= Date: Sun, 4 Apr 2021 14:03:44 +0200 Subject: [PATCH 9/9] root: data loss disclaimer and prompt for explicit agreement --- root.go | 20 ++++++++++++++++++-- 1 file changed, 18 insertions(+), 2 deletions(-) diff --git a/root.go b/root.go index 583883d..8536c70 100644 --- a/root.go +++ b/root.go @@ -13,8 +13,13 @@ import ( const rootCommandName = "git-bug-migration" +type rootOpts struct { + forReal bool +} + func NewRootCommand() *cobra.Command { env := newEnv() + opts := rootOpts{} migrations := []Migration{ &migration1.Migration1{}, @@ -36,19 +41,30 @@ To migrate a repository, go to the corresponding repository and run "git-bug-mig PreRunE: findRepo(env), RunE: func(_ *cobra.Command, _ []string) error { - return runRootCmd(env, migrations) + return runRootCmd(env, opts, migrations) }, SilenceUsage: true, DisableAutoGenTag: true, } + flags := cmd.Flags() + flags.BoolVar(&opts.forReal, "for-real", false, "Indicate that your really want to run this tool and possibly ruin your data.") + cmd.AddCommand(newVersionCommand()) return cmd } -func runRootCmd(env *Env, migrations []Migration) error { +func runRootCmd(env *Env, opts rootOpts, migrations []Migration) error { + if !opts.forReal { + env.err.Println("DISCLAIMER: This tool exist for your convenience to migrate your data and allow git-bug's authors" + + " to break things and make it better. However, this migration tool is quite crude and experimental. DO NOT TRUST IT BLINDLY.\n\n" + + "Please make a backup of your .git folder before running it.\n\n" + + "When done, run this tool again with the --for-real flag.") + os.Exit(1) + } + for i, migration := range migrations { if i > 0 { env.out.Println()