Refactor repo filtering and processing logic and GitLab Integration tests (#551)

This commit is contained in:
gabrie30 2025-08-24 11:34:33 -07:00 committed by GitHub
parent 03f798736d
commit be75c52bc9
No known key found for this signature in database
GPG Key ID: B5690EEEBB952194
20 changed files with 4024 additions and 1588 deletions

4
.gitignore vendored
View File

@ -5,3 +5,7 @@ debug
coverage.out
dist/
# GitLab integration test Go binaries (platform-specific, built locally)
scripts/local-gitlab/seeder/gitlab-seeder
scripts/local-gitlab/test-runner/gitlab-test-runner

View File

@ -10,10 +10,8 @@ import (
"net/url"
"os"
"path/filepath"
"regexp"
"strconv"
"strings"
"sync"
"time"
"github.com/gabrie30/ghorg/colorlog"
@ -439,74 +437,6 @@ func readGhorgIgnore() ([]string, error) {
return lines, scanner.Err()
}
func filterByRegexMatch(repos []scm.Repo) []scm.Repo {
filteredRepos := []scm.Repo{}
regex := fmt.Sprint(os.Getenv("GHORG_MATCH_REGEX"))
for i, r := range repos {
re := regexp.MustCompile(regex)
match := re.FindString(r.Name)
if match != "" {
filteredRepos = append(filteredRepos, repos[i])
}
}
return filteredRepos
}
func filterByExcludeRegexMatch(repos []scm.Repo) []scm.Repo {
filteredRepos := []scm.Repo{}
regex := fmt.Sprint(os.Getenv("GHORG_EXCLUDE_MATCH_REGEX"))
for i, r := range repos {
exclude := false
re := regexp.MustCompile(regex)
match := re.FindString(r.Name)
if match != "" {
exclude = true
}
if !exclude {
filteredRepos = append(filteredRepos, repos[i])
}
}
return filteredRepos
}
func filterByMatchPrefix(repos []scm.Repo) []scm.Repo {
filteredRepos := []scm.Repo{}
for i, r := range repos {
pfs := strings.Split(os.Getenv("GHORG_MATCH_PREFIX"), ",")
for _, p := range pfs {
if strings.HasPrefix(strings.ToLower(r.Name), strings.ToLower(p)) {
filteredRepos = append(filteredRepos, repos[i])
}
}
}
return filteredRepos
}
func filterByExcludeMatchPrefix(repos []scm.Repo) []scm.Repo {
filteredRepos := []scm.Repo{}
for i, r := range repos {
var exclude bool
pfs := strings.Split(os.Getenv("GHORG_EXCLUDE_MATCH_PREFIX"), ",")
for _, p := range pfs {
if strings.HasPrefix(strings.ToLower(r.Name), strings.ToLower(p)) {
exclude = true
}
}
if !exclude {
filteredRepos = append(filteredRepos, repos[i])
}
}
return filteredRepos
}
func hasRepoNameCollisions(repos []scm.Repo) (map[string]bool, bool) {
repoNameWithCollisions := make(map[string]bool)
@ -625,30 +555,9 @@ func getRelativePathRepositories(root string) ([]string, error) {
// CloneAllRepos clones all repos
func CloneAllRepos(git git.Gitter, cloneTargets []scm.Repo) {
// Filter repos that have attributes that don't need specific scm api calls
if os.Getenv("GHORG_MATCH_REGEX") != "" {
colorlog.PrintInfo("Filtering repos down by including regex matches...")
cloneTargets = filterByRegexMatch(cloneTargets)
}
if os.Getenv("GHORG_EXCLUDE_MATCH_REGEX") != "" {
colorlog.PrintInfo("Filtering repos down by excluding regex matches...")
cloneTargets = filterByExcludeRegexMatch(cloneTargets)
}
if os.Getenv("GHORG_MATCH_PREFIX") != "" {
colorlog.PrintInfo("Filtering repos down by including prefix matches...")
cloneTargets = filterByMatchPrefix(cloneTargets)
}
if os.Getenv("GHORG_EXCLUDE_MATCH_PREFIX") != "" {
colorlog.PrintInfo("Filtering repos down by excluding prefix matches...")
cloneTargets = filterByExcludeMatchPrefix(cloneTargets)
}
if os.Getenv("GHORG_TARGET_REPOS_PATH") != "" {
colorlog.PrintInfo("Filtering repos down by target repos path...")
cloneTargets = filterByTargetReposPath(cloneTargets)
}
cloneTargets = filterByGhorgignore(cloneTargets)
// Initialize filter and apply all filtering
filter := NewRepositoryFilter()
cloneTargets = filter.ApplyAllFilters(cloneTargets)
totalResourcesToClone, reposToCloneCount, snippetToCloneCount, wikisToCloneCount := getCloneableInventory(cloneTargets)
@ -682,11 +591,8 @@ func CloneAllRepos(git git.Gitter, cloneTargets []scm.Repo) {
limit := limiter.NewConcurrencyLimiter(l)
var cloneCount, pulledCount, updateRemoteCount, newCommits int
// maps in go are not safe for concurrent use
var mutex = &sync.RWMutex{}
var untouchedReposToPrune []string
// Initialize repository processor
processor := NewRepositoryProcessor(git)
for i := range cloneTargets {
repo := cloneTargets[i]
@ -707,307 +613,16 @@ func CloneAllRepos(git git.Gitter, cloneTargets []scm.Repo) {
repoSlug = repo.Path
}
mutex.Lock()
var inHash bool
if repo.IsGitLabSnippet && !repo.IsGitLabRootLevelSnippet {
inHash = repoNameWithCollisions[repo.GitLabSnippetInfo.NameOfRepo]
} else {
inHash = repoNameWithCollisions[repo.Name]
}
mutex.Unlock()
// Only GitLab repos can have collisions due to groups and subgroups
// If there are collisions and this is a repo with a naming collision change name to avoid collisions
if hasCollisions && inHash {
repoSlug = trimCollisionFilename(strings.Replace(repo.Path, string(os.PathSeparator), "_", -1))
if repo.IsWiki {
if !strings.HasSuffix(repoSlug, ".wiki") {
repoSlug = repoSlug + ".wiki"
}
}
if repo.IsGitLabSnippet && !repo.IsGitLabRootLevelSnippet {
if !strings.HasSuffix(repoSlug, ".snippets") {
repoSlug = repoSlug + ".snippets"
}
}
mutex.Lock()
slugCollision := repoNameWithCollisions[repoSlug]
mutex.Unlock()
// If a collision has another collision with trimmed name append a number
if ok := slugCollision; ok {
repoSlug = fmt.Sprintf("_%v_%v", strconv.Itoa(i), repoSlug)
} else {
mutex.Lock()
repoNameWithCollisions[repoSlug] = true
mutex.Unlock()
}
}
if repo.IsWiki {
if !strings.HasSuffix(repoSlug, ".wiki") {
repoSlug = repoSlug + ".wiki"
}
}
if repo.IsGitLabSnippet && !repo.IsGitLabRootLevelSnippet {
if !strings.HasSuffix(repoSlug, ".snippets") {
repoSlug = repoSlug + ".snippets"
}
}
repo.HostPath = filepath.Join(outputDirAbsolutePath, repoSlug)
if repo.IsGitLabRootLevelSnippet {
repo.HostPath = filepath.Join(outputDirAbsolutePath, "_ghorg_root_level_snippets", repo.GitLabSnippetInfo.Title+"-"+repo.GitLabSnippetInfo.ID)
} else if repo.IsGitLabSnippet {
repo.HostPath = filepath.Join(outputDirAbsolutePath, repoSlug, repo.GitLabSnippetInfo.Title+"-"+repo.GitLabSnippetInfo.ID)
}
repoWillBePulled := repoExistsLocally(repo)
// Repos are considered untouched if
// 1. No new branches or zero branches
// 2. No new commits
// 3. No modified changes
if os.Getenv("GHORG_PRUNE_UNTOUCHED") == "true" && repoWillBePulled {
git.FetchCloneBranch(repo)
branches, err := git.Branch(repo)
if err != nil {
colorlog.PrintError(fmt.Sprintf("Failed to list local branches for repository %s: %v", repo.Name, err))
return
}
// Delete if it has no branches
if branches == "" {
untouchedReposToPrune = append(untouchedReposToPrune, repo.HostPath)
return
}
if len(strings.Split(strings.TrimSpace(branches), "\n")) > 1 {
return
}
status, err := git.ShortStatus(repo)
if err != nil {
colorlog.PrintError(fmt.Sprintf("Failed to get short status for repository %s: %v", repo.Name, err))
return
}
if status != "" {
return
}
// Check for new commits on the branch that exist locally but not on the remote
commits, err := git.RevListCompare(repo, "HEAD", "@{u}")
if err != nil {
colorlog.PrintError(fmt.Sprintf("Failed to get commit differences for repository %s. The repository may be empty or does not have a .git directory. Error: %v", repo.Name, err))
return
}
if commits != "" {
return
}
untouchedReposToPrune = append(untouchedReposToPrune, repo.HostPath)
}
// Don't clone any new repos when prune untouched is active
if os.Getenv("GHORG_PRUNE_UNTOUCHED") == "true" {
return
}
action := "cloning"
if repoWillBePulled {
// prevents git from asking for user for credentials, needs to be unset so creds aren't stored
err := git.SetOriginWithCredentials(repo)
if err != nil {
e := fmt.Sprintf("Problem setting remote with credentials on: %s Error: %v", repo.Name, err)
cloneErrors = append(cloneErrors, e)
return
}
if os.Getenv("GHORG_BACKUP") == "true" {
err := git.UpdateRemote(repo)
action = "updating remote"
// Theres no way to tell if a github repo has a wiki to clone
if err != nil && repo.IsWiki {
e := fmt.Sprintf("Wiki may be enabled but there was no content to clone on: %s Error: %v", repo.URL, err)
cloneInfos = append(cloneInfos, e)
return
}
if err != nil {
e := fmt.Sprintf("Could not update remotes: %s Error: %v", repo.URL, err)
cloneErrors = append(cloneErrors, e)
return
}
updateRemoteCount++
} else if os.Getenv("GHORG_NO_CLEAN") == "true" {
action = "fetching"
err := git.FetchAll(repo)
// Theres no way to tell if a github repo has a wiki to clone
if err != nil && repo.IsWiki {
e := fmt.Sprintf("Wiki may be enabled but there was no content to clone on: %s Error: %v", repo.URL, err)
cloneInfos = append(cloneInfos, e)
return
}
if err != nil {
e := fmt.Sprintf("Could not fetch remotes: %s Error: %v", repo.URL, err)
cloneErrors = append(cloneErrors, e)
return
}
} else {
if os.Getenv("GHORG_FETCH_ALL") == "true" {
err = git.FetchAll(repo)
if err != nil {
e := fmt.Sprintf("Could not fetch remotes: %s Error: %v", repo.URL, err)
cloneErrors = append(cloneErrors, e)
return
}
}
err := git.Checkout(repo)
if err != nil {
git.FetchCloneBranch(repo)
// Retry checkout
errRetry := git.Checkout(repo)
if errRetry != nil {
hasRemoteHeads, errHasRemoteHeads := git.HasRemoteHeads(repo)
if errHasRemoteHeads != nil {
e := fmt.Sprintf("Could not checkout %s, branch may not exist or may not have any contents/commits, no changes made on: %s Errors: %v %v", repo.CloneBranch, repo.URL, errRetry, errHasRemoteHeads)
cloneErrors = append(cloneErrors, e)
return
}
if hasRemoteHeads {
// weird, should not happen, return original checkout error
e := fmt.Sprintf("Could not checkout %s, branch may not exist or may not have any contents/commits, no changes made on: %s Error: %v", repo.CloneBranch, repo.URL, errRetry)
cloneErrors = append(cloneErrors, e)
return
} else {
// this is _just_ an empty repository
e := fmt.Sprintf("Could not checkout %s due to repository being empty, no changes made on: %s", repo.CloneBranch, repo.URL)
cloneInfos = append(cloneInfos, e)
return
}
}
}
count, _ := git.RepoCommitCount(repo)
if err != nil {
e := fmt.Sprintf("Problem trying to get pre pull commit count for on repo: %s", repo.URL)
cloneInfos = append(cloneInfos, e)
}
repo.Commits.CountPrePull = count
err = git.Clean(repo)
if err != nil {
e := fmt.Sprintf("Problem running git clean: %s Error: %v", repo.URL, err)
cloneErrors = append(cloneErrors, e)
return
}
err = git.Reset(repo)
if err != nil {
e := fmt.Sprintf("Problem resetting branch: %s for: %s Error: %v", repo.CloneBranch, repo.URL, err)
cloneErrors = append(cloneErrors, e)
return
}
err = git.Pull(repo)
if err != nil {
e := fmt.Sprintf("Problem trying to pull branch: %v for: %s Error: %v", repo.CloneBranch, repo.URL, err)
cloneErrors = append(cloneErrors, e)
return
}
count, err = git.RepoCommitCount(repo)
if err != nil {
e := fmt.Sprintf("Problem trying to get post pull commit count for on repo: %s", repo.URL)
cloneInfos = append(cloneInfos, e)
}
repo.Commits.CountPostPull = count
repo.Commits.CountDiff = (repo.Commits.CountPostPull - repo.Commits.CountPrePull)
newCommits = (newCommits + repo.Commits.CountDiff)
action = "pulling"
pulledCount++
}
err = git.SetOrigin(repo)
if err != nil {
e := fmt.Sprintf("Problem resetting remote: %s Error: %v", repo.Name, err)
cloneErrors = append(cloneErrors, e)
return
}
} else {
// if https clone and github/gitlab add personal access token to url
err = git.Clone(repo)
// Theres no way to tell if a github repo has a wiki to clone
if err != nil && repo.IsWiki {
e := fmt.Sprintf("Wiki may be enabled but there was no content to clone: %s Error: %v", repo.URL, err)
cloneInfos = append(cloneInfos, e)
return
}
if err != nil {
e := fmt.Sprintf("Problem trying to clone: %s Error: %v", repo.URL, err)
cloneErrors = append(cloneErrors, e)
return
}
if os.Getenv("GHORG_BRANCH") != "" {
err := git.Checkout(repo)
if err != nil {
e := fmt.Sprintf("Could not checkout out %s, branch may not exist or may not have any contents/commits, no changes to: %s Error: %v", repo.CloneBranch, repo.URL, err)
cloneInfos = append(cloneInfos, e)
return
}
}
cloneCount++
// TODO: make configs around remote name
// we clone with api-key in clone url
err = git.SetOrigin(repo)
// if repo has wiki, but content does not exist this is going to error
if err != nil {
e := fmt.Sprintf("Problem trying to set remote: %s Error: %v", repo.URL, err)
cloneErrors = append(cloneErrors, e)
return
}
if os.Getenv("GHORG_FETCH_ALL") == "true" {
err = git.FetchAll(repo)
if err != nil {
e := fmt.Sprintf("Could not fetch remotes: %s Error: %v", repo.URL, err)
cloneErrors = append(cloneErrors, e)
return
}
}
}
if repoWillBePulled && repo.Commits.CountDiff > 0 {
colorlog.PrintSuccess(fmt.Sprintf("Success %s %s, branch: %s, new commits: %d", action, repo.URL, repo.CloneBranch, repo.Commits.CountDiff))
} else {
colorlog.PrintSuccess(fmt.Sprintf("Success %s %s, branch: %s", action, repo.URL, repo.CloneBranch))
}
processor.ProcessRepository(&repo, repoNameWithCollisions, hasCollisions, repoSlug, i)
})
}
limit.WaitAndClose()
// Get statistics and untouched repos from processor
stats := processor.GetStats()
untouchedReposToPrune := processor.GetUntouchedRepos()
var untouchedPrunes int
if os.Getenv("GHORG_PRUNE_UNTOUCHED") == "true" && len(untouchedReposToPrune) > 0 {
@ -1030,8 +645,12 @@ func CloneAllRepos(git git.Gitter, cloneTargets []scm.Repo) {
}
}
// Update global error/info arrays for backward compatibility
cloneInfos = stats.CloneInfos
cloneErrors = stats.CloneErrors
printRemainingMessages()
printCloneStatsMessage(cloneCount, pulledCount, updateRemoteCount, newCommits, untouchedPrunes)
printCloneStatsMessage(stats.CloneCount, stats.PulledCount, stats.UpdateRemoteCount, stats.NewCommits, untouchedPrunes)
if hasCollisions {
fmt.Println("")
@ -1048,8 +667,8 @@ func CloneAllRepos(git git.Gitter, cloneTargets []scm.Repo) {
}
var pruneCount int
cloneInfosCount := len(cloneInfos)
cloneErrorsCount := len(cloneErrors)
cloneInfosCount := len(stats.CloneInfos)
cloneErrorsCount := len(stats.CloneErrors)
allReposToCloneCount := len(cloneTargets)
// Now, clean up local repos that don't exist in remote, if prune flag is set
if os.Getenv("GHORG_PRUNE") == "true" {
@ -1067,7 +686,7 @@ func CloneAllRepos(git git.Gitter, cloneTargets []scm.Repo) {
// This needs to be called after printFinishedWithDirSize()
if os.Getenv("GHORG_STATS_ENABLED") == "true" {
date := time.Now().Format("2006-01-02 15:04:05")
writeGhorgStats(date, allReposToCloneCount, cloneCount, pulledCount, cloneInfosCount, cloneErrorsCount, updateRemoteCount, newCommits, pruneCount, hasCollisions)
writeGhorgStats(date, allReposToCloneCount, stats.CloneCount, stats.PulledCount, cloneInfosCount, cloneErrorsCount, stats.UpdateRemoteCount, stats.NewCommits, pruneCount, hasCollisions)
}
if os.Getenv("GHORG_DONT_EXIT_UNDER_TEST") != "true" {

257
cmd/repository_filter.go Normal file
View File

@ -0,0 +1,257 @@
package cmd
import (
"fmt"
"os"
"path/filepath"
"regexp"
"strings"
"github.com/gabrie30/ghorg/colorlog"
"github.com/gabrie30/ghorg/scm"
)
// RepositoryFilter handles filtering of repositories based on various criteria
type RepositoryFilter struct{}
// NewRepositoryFilter creates a new repository filter
func NewRepositoryFilter() *RepositoryFilter {
return &RepositoryFilter{}
}
// ApplyAllFilters applies all configured filters to the repository list
func (rf *RepositoryFilter) ApplyAllFilters(cloneTargets []scm.Repo) []scm.Repo {
// Apply regex match filter
if os.Getenv("GHORG_MATCH_REGEX") != "" {
colorlog.PrintInfo("Filtering repos down by including regex matches...")
cloneTargets = rf.FilterByRegexMatch(cloneTargets)
}
// Apply exclude regex match filter
if os.Getenv("GHORG_EXCLUDE_MATCH_REGEX") != "" {
colorlog.PrintInfo("Filtering repos down by excluding regex matches...")
cloneTargets = rf.FilterByExcludeRegexMatch(cloneTargets)
}
// Apply prefix match filter
if os.Getenv("GHORG_MATCH_PREFIX") != "" {
colorlog.PrintInfo("Filtering repos down by including prefix matches...")
cloneTargets = rf.FilterByMatchPrefix(cloneTargets)
}
// Apply exclude prefix match filter
if os.Getenv("GHORG_EXCLUDE_MATCH_PREFIX") != "" {
colorlog.PrintInfo("Filtering repos down by excluding prefix matches...")
cloneTargets = rf.FilterByExcludeMatchPrefix(cloneTargets)
}
// Apply target repos path filter
if os.Getenv("GHORG_TARGET_REPOS_PATH") != "" {
colorlog.PrintInfo("Filtering repos down by target repos path...")
cloneTargets = rf.FilterByTargetReposPath(cloneTargets)
}
// Apply ghorgignore filter
cloneTargets = rf.FilterByGhorgignore(cloneTargets)
return cloneTargets
}
// FilterByRegexMatch filters repositories that match the regex pattern
func (rf *RepositoryFilter) FilterByRegexMatch(repos []scm.Repo) []scm.Repo {
regex := os.Getenv("GHORG_MATCH_REGEX")
if regex == "" {
return repos
}
filteredRepos := []scm.Repo{}
re := regexp.MustCompile(regex)
for _, repo := range repos {
if re.FindString(repo.Name) != "" {
filteredRepos = append(filteredRepos, repo)
}
}
return filteredRepos
}
// FilterByExcludeRegexMatch filters out repositories that match the regex pattern
func (rf *RepositoryFilter) FilterByExcludeRegexMatch(repos []scm.Repo) []scm.Repo {
regex := os.Getenv("GHORG_EXCLUDE_MATCH_REGEX")
if regex == "" {
return repos
}
filteredRepos := []scm.Repo{}
re := regexp.MustCompile(regex)
for _, repo := range repos {
if re.FindString(repo.Name) == "" {
filteredRepos = append(filteredRepos, repo)
}
}
return filteredRepos
}
// FilterByMatchPrefix filters repositories that start with the specified prefix(es)
func (rf *RepositoryFilter) FilterByMatchPrefix(repos []scm.Repo) []scm.Repo {
prefixes := os.Getenv("GHORG_MATCH_PREFIX")
if prefixes == "" {
return repos
}
filteredRepos := []scm.Repo{}
prefixList := strings.Split(prefixes, ",")
for _, repo := range repos {
for _, prefix := range prefixList {
if strings.HasPrefix(strings.ToLower(repo.Name), strings.ToLower(prefix)) {
filteredRepos = append(filteredRepos, repo)
break
}
}
}
return filteredRepos
}
// FilterByExcludeMatchPrefix filters out repositories that start with the specified prefix(es)
func (rf *RepositoryFilter) FilterByExcludeMatchPrefix(repos []scm.Repo) []scm.Repo {
prefixes := os.Getenv("GHORG_EXCLUDE_MATCH_PREFIX")
if prefixes == "" {
return repos
}
filteredRepos := []scm.Repo{}
prefixList := strings.Split(prefixes, ",")
for _, repo := range repos {
exclude := false
for _, prefix := range prefixList {
if strings.HasPrefix(strings.ToLower(repo.Name), strings.ToLower(prefix)) {
exclude = true
break
}
}
if !exclude {
filteredRepos = append(filteredRepos, repo)
}
}
return filteredRepos
}
// FilterByTargetReposPath filters repositories based on a file containing target repo names
func (rf *RepositoryFilter) FilterByTargetReposPath(cloneTargets []scm.Repo) []scm.Repo {
targetReposPath := os.Getenv("GHORG_TARGET_REPOS_PATH")
if targetReposPath == "" {
return cloneTargets
}
_, err := os.Stat(targetReposPath)
if err != nil {
colorlog.PrintErrorAndExit(fmt.Sprintf("Error finding your GHORG_TARGET_REPOS_PATH file, error: %v", err))
}
// Read target repos from file
toTarget, err := readTargetReposFile()
if err != nil {
colorlog.PrintErrorAndExit(fmt.Sprintf("Error parsing your GHORG_TARGET_REPOS_PATH file, error: %v", err))
}
colorlog.PrintInfo("Using GHORG_TARGET_REPOS_PATH, filtering repos down...")
filteredCloneTargets := []scm.Repo{}
targetRepoSeenOnOrg := make(map[string]bool)
for _, cloneTarget := range cloneTargets {
found := false
for _, targetRepo := range toTarget {
if _, ok := targetRepoSeenOnOrg[targetRepo]; !ok {
targetRepoSeenOnOrg[targetRepo] = false
}
clonedRepoName := strings.TrimSuffix(filepath.Base(cloneTarget.URL), ".git")
if strings.EqualFold(clonedRepoName, targetRepo) {
found = true
targetRepoSeenOnOrg[targetRepo] = true
}
// Handle wiki matching
if os.Getenv("GHORG_CLONE_WIKI") == "true" {
targetRepoWiki := targetRepo + ".wiki"
if strings.EqualFold(targetRepoWiki, clonedRepoName) {
found = true
targetRepoSeenOnOrg[targetRepo] = true
}
}
// Handle snippet matching
if os.Getenv("GHORG_CLONE_SNIPPETS") == "true" && cloneTarget.IsGitLabSnippet {
targetSnippetOriginalRepo := strings.TrimSuffix(filepath.Base(cloneTarget.GitLabSnippetInfo.URLOfRepo), ".git")
if strings.EqualFold(targetSnippetOriginalRepo, targetRepo) {
found = true
targetRepoSeenOnOrg[targetRepo] = true
}
}
}
if found {
filteredCloneTargets = append(filteredCloneTargets, cloneTarget)
}
}
// Print repos from the file that were not found in the org
for targetRepo, seen := range targetRepoSeenOnOrg {
if !seen {
msg := fmt.Sprintf("Target in GHORG_TARGET_REPOS_PATH was not found in the org, repo: %v", targetRepo)
cloneInfos = append(cloneInfos, msg)
}
}
return filteredCloneTargets
}
// FilterByGhorgignore filters out repositories listed in the ghorgignore file
func (rf *RepositoryFilter) FilterByGhorgignore(cloneTargets []scm.Repo) []scm.Repo {
ignoreLocation := os.Getenv("GHORG_IGNORE_PATH")
if ignoreLocation != "" {
_, err := os.Stat(ignoreLocation)
if os.IsNotExist(err) {
return cloneTargets
}
} else {
// Use default location
defaultIgnorePath := filepath.Join(os.Getenv("HOME"), ".config", "ghorg", "ghorgignore")
_, err := os.Stat(defaultIgnorePath)
if os.IsNotExist(err) {
return cloneTargets
}
}
// Read ghorgignore patterns
toIgnore, err := readGhorgIgnore()
if err != nil {
colorlog.PrintErrorAndExit(fmt.Sprintf("Error parsing your ghorgignore, error: %v", err))
}
colorlog.PrintInfo("Using ghorgignore, filtering repos down...")
filteredCloneTargets := []scm.Repo{}
for _, repo := range cloneTargets {
ignored := false
for _, ignorePattern := range toIgnore {
if strings.Contains(repo.URL, ignorePattern) {
ignored = true
break
}
}
if !ignored {
filteredCloneTargets = append(filteredCloneTargets, repo)
}
}
return filteredCloneTargets
}

View File

@ -0,0 +1,423 @@
package cmd
import (
"os"
"reflect"
"testing"
"github.com/gabrie30/ghorg/scm"
)
func TestRepositoryFilter_FilterByRegexMatch(t *testing.T) {
filter := NewRepositoryFilter()
testCases := []struct {
name string
regex string
repos []scm.Repo
expectedRepos []scm.Repo
}{
{
name: "matches repos with prefix",
regex: "^test-",
repos: []scm.Repo{
{Name: "test-repo1"},
{Name: "test-repo2"},
{Name: "other-repo"},
},
expectedRepos: []scm.Repo{
{Name: "test-repo1"},
{Name: "test-repo2"},
},
},
{
name: "matches repos with suffix",
regex: "-lib$",
repos: []scm.Repo{
{Name: "utils-lib"},
{Name: "core-lib"},
{Name: "main-app"},
},
expectedRepos: []scm.Repo{
{Name: "utils-lib"},
{Name: "core-lib"},
},
},
{
name: "no matches",
regex: "^nonexistent",
repos: []scm.Repo{{Name: "repo1"}, {Name: "repo2"}},
expectedRepos: []scm.Repo{},
},
{
name: "empty regex returns all",
regex: "",
repos: []scm.Repo{{Name: "repo1"}, {Name: "repo2"}},
expectedRepos: []scm.Repo{{Name: "repo1"}, {Name: "repo2"}},
},
}
for _, tc := range testCases {
t.Run(tc.name, func(t *testing.T) {
os.Setenv("GHORG_MATCH_REGEX", tc.regex)
defer os.Unsetenv("GHORG_MATCH_REGEX")
result := filter.FilterByRegexMatch(tc.repos)
if !reflect.DeepEqual(result, tc.expectedRepos) {
t.Errorf("Expected %v, got %v", tc.expectedRepos, result)
}
})
}
}
func TestRepositoryFilter_FilterByExcludeRegexMatch(t *testing.T) {
filter := NewRepositoryFilter()
testCases := []struct {
name string
regex string
repos []scm.Repo
expectedRepos []scm.Repo
}{
{
name: "excludes repos with prefix",
regex: "^test-",
repos: []scm.Repo{
{Name: "test-repo1"},
{Name: "test-repo2"},
{Name: "other-repo"},
},
expectedRepos: []scm.Repo{
{Name: "other-repo"},
},
},
{
name: "excludes repos with suffix",
regex: "-test$",
repos: []scm.Repo{
{Name: "utils-test"},
{Name: "core-lib"},
{Name: "main-test"},
},
expectedRepos: []scm.Repo{
{Name: "core-lib"},
},
},
{
name: "no exclusions",
regex: "^nonexistent",
repos: []scm.Repo{{Name: "repo1"}, {Name: "repo2"}},
expectedRepos: []scm.Repo{{Name: "repo1"}, {Name: "repo2"}},
},
}
for _, tc := range testCases {
t.Run(tc.name, func(t *testing.T) {
os.Setenv("GHORG_EXCLUDE_MATCH_REGEX", tc.regex)
defer os.Unsetenv("GHORG_EXCLUDE_MATCH_REGEX")
result := filter.FilterByExcludeRegexMatch(tc.repos)
if !reflect.DeepEqual(result, tc.expectedRepos) {
t.Errorf("Expected %v, got %v", tc.expectedRepos, result)
}
})
}
}
func TestRepositoryFilter_FilterByMatchPrefix(t *testing.T) {
filter := NewRepositoryFilter()
testCases := []struct {
name string
prefix string
repos []scm.Repo
expectedRepos []scm.Repo
}{
{
name: "matches single prefix",
prefix: "test",
repos: []scm.Repo{
{Name: "test-repo1"},
{Name: "Test-Repo2"}, // Should match case-insensitive
{Name: "other-repo"},
},
expectedRepos: []scm.Repo{
{Name: "test-repo1"},
{Name: "Test-Repo2"},
},
},
{
name: "matches multiple prefixes",
prefix: "test,lib",
repos: []scm.Repo{
{Name: "test-repo1"},
{Name: "lib-utils"},
{Name: "other-repo"},
{Name: "lib-core"},
},
expectedRepos: []scm.Repo{
{Name: "test-repo1"},
{Name: "lib-utils"},
{Name: "lib-core"},
},
},
{
name: "no matches",
prefix: "nonexistent",
repos: []scm.Repo{{Name: "repo1"}, {Name: "repo2"}},
expectedRepos: []scm.Repo{},
},
}
for _, tc := range testCases {
t.Run(tc.name, func(t *testing.T) {
os.Setenv("GHORG_MATCH_PREFIX", tc.prefix)
defer os.Unsetenv("GHORG_MATCH_PREFIX")
result := filter.FilterByMatchPrefix(tc.repos)
if !reflect.DeepEqual(result, tc.expectedRepos) {
t.Errorf("Expected %v, got %v", tc.expectedRepos, result)
}
})
}
}
func TestRepositoryFilter_FilterByExcludeMatchPrefix(t *testing.T) {
filter := NewRepositoryFilter()
testCases := []struct {
name string
prefix string
repos []scm.Repo
expectedRepos []scm.Repo
}{
{
name: "excludes single prefix",
prefix: "test",
repos: []scm.Repo{
{Name: "test-repo1"},
{Name: "Test-Repo2"}, // Should exclude case-insensitive
{Name: "other-repo"},
},
expectedRepos: []scm.Repo{
{Name: "other-repo"},
},
},
{
name: "excludes multiple prefixes",
prefix: "test,lib",
repos: []scm.Repo{
{Name: "test-repo1"},
{Name: "lib-utils"},
{Name: "other-repo"},
{Name: "main-app"},
},
expectedRepos: []scm.Repo{
{Name: "other-repo"},
{Name: "main-app"},
},
},
{
name: "no exclusions",
prefix: "nonexistent",
repos: []scm.Repo{{Name: "repo1"}, {Name: "repo2"}},
expectedRepos: []scm.Repo{{Name: "repo1"}, {Name: "repo2"}},
},
}
for _, tc := range testCases {
t.Run(tc.name, func(t *testing.T) {
os.Setenv("GHORG_EXCLUDE_MATCH_PREFIX", tc.prefix)
defer os.Unsetenv("GHORG_EXCLUDE_MATCH_PREFIX")
result := filter.FilterByExcludeMatchPrefix(tc.repos)
if !reflect.DeepEqual(result, tc.expectedRepos) {
t.Errorf("Expected %v, got %v", tc.expectedRepos, result)
}
})
}
}
func TestRepositoryFilter_FilterByGhorgignore(t *testing.T) {
filter := NewRepositoryFilter()
testCases := []struct {
name string
ignoreContent string
repos []scm.Repo
expectedRepos []scm.Repo
}{
{
name: "filters out matching URLs",
ignoreContent: "shouldbeignored",
repos: []scm.Repo{
{Name: "repo1", URL: "https://github.com/org/repo1"},
{Name: "shouldbeignored", URL: "https://github.com/org/shouldbeignored"},
},
expectedRepos: []scm.Repo{
{Name: "repo1", URL: "https://github.com/org/repo1"},
},
},
{
name: "filters multiple patterns",
ignoreContent: "test-repo\nold-project",
repos: []scm.Repo{
{Name: "repo1", URL: "https://github.com/org/repo1"},
{Name: "test-repo", URL: "https://github.com/org/test-repo"},
{Name: "old-project", URL: "https://github.com/org/old-project"},
},
expectedRepos: []scm.Repo{
{Name: "repo1", URL: "https://github.com/org/repo1"},
},
},
}
for _, tc := range testCases {
t.Run(tc.name, func(t *testing.T) {
// Create temporary ignore file
tmpfile, err := createTempFileWithContent(tc.ignoreContent)
if err != nil {
t.Fatalf("Failed to create temp file: %v", err)
}
defer os.Remove(tmpfile.Name())
os.Setenv("GHORG_IGNORE_PATH", tmpfile.Name())
defer os.Unsetenv("GHORG_IGNORE_PATH")
result := filter.FilterByGhorgignore(tc.repos)
if !reflect.DeepEqual(result, tc.expectedRepos) {
t.Errorf("Expected %v, got %v", tc.expectedRepos, result)
}
})
}
}
func TestRepositoryFilter_FilterByTargetReposPath(t *testing.T) {
filter := NewRepositoryFilter()
testCases := []struct {
name string
targetContent string
repos []scm.Repo
expectedRepos []scm.Repo
}{
{
name: "filters to target repos only",
targetContent: "target-repo\nother-target",
repos: []scm.Repo{
{Name: "target-repo", URL: "https://github.com/org/target-repo.git"},
{Name: "other-target", URL: "https://github.com/org/other-target.git"},
{Name: "unwanted", URL: "https://github.com/org/unwanted.git"},
},
expectedRepos: []scm.Repo{
{Name: "target-repo", URL: "https://github.com/org/target-repo.git"},
{Name: "other-target", URL: "https://github.com/org/other-target.git"},
},
},
{
name: "handles case insensitive matching",
targetContent: "Target-Repo",
repos: []scm.Repo{
{Name: "target-repo", URL: "https://github.com/org/target-repo.git"},
{Name: "other", URL: "https://github.com/org/other.git"},
},
expectedRepos: []scm.Repo{
{Name: "target-repo", URL: "https://github.com/org/target-repo.git"},
},
},
}
for _, tc := range testCases {
t.Run(tc.name, func(t *testing.T) {
// Create temporary target file
tmpfile, err := createTempFileWithContent(tc.targetContent)
if err != nil {
t.Fatalf("Failed to create temp file: %v", err)
}
defer os.Remove(tmpfile.Name())
os.Setenv("GHORG_TARGET_REPOS_PATH", tmpfile.Name())
defer os.Unsetenv("GHORG_TARGET_REPOS_PATH")
result := filter.FilterByTargetReposPath(tc.repos)
if !reflect.DeepEqual(result, tc.expectedRepos) {
t.Errorf("Expected %v, got %v", tc.expectedRepos, result)
}
})
}
}
func TestRepositoryFilter_ApplyAllFilters(t *testing.T) {
defer UnsetEnv("GHORG_")()
filter := NewRepositoryFilter()
repos := []scm.Repo{
{Name: "test-repo1", URL: "https://github.com/org/test-repo1.git"},
{Name: "test-repo2", URL: "https://github.com/org/test-repo2.git"},
{Name: "lib-utils", URL: "https://github.com/org/lib-utils.git"},
{Name: "ignored", URL: "https://github.com/org/ignored.git"},
{Name: "other", URL: "https://github.com/org/other.git"},
}
// Set up regex filter to match test- prefix
os.Setenv("GHORG_MATCH_REGEX", "^test-")
// Set up ghorgignore
tmpfile, err := createTempFileWithContent("ignored")
if err != nil {
t.Fatalf("Failed to create temp file: %v", err)
}
defer os.Remove(tmpfile.Name())
os.Setenv("GHORG_IGNORE_PATH", tmpfile.Name())
result := filter.ApplyAllFilters(repos)
expected := []scm.Repo{
{Name: "test-repo1", URL: "https://github.com/org/test-repo1.git"},
{Name: "test-repo2", URL: "https://github.com/org/test-repo2.git"},
}
if !reflect.DeepEqual(result, expected) {
t.Errorf("Expected %v, got %v", expected, result)
}
}
// Benchmark tests for performance validation
func BenchmarkRepositoryFilter_FilterByRegexMatch(b *testing.B) {
filter := NewRepositoryFilter()
os.Setenv("GHORG_MATCH_REGEX", "^test-")
defer os.Unsetenv("GHORG_MATCH_REGEX")
// Create 1000 test repos
repos := make([]scm.Repo, 1000)
for i := 0; i < 1000; i++ {
if i%2 == 0 {
repos[i] = scm.Repo{Name: "test-repo" + string(rune(i))}
} else {
repos[i] = scm.Repo{Name: "other-repo" + string(rune(i))}
}
}
b.ResetTimer()
for i := 0; i < b.N; i++ {
filter.FilterByRegexMatch(repos)
}
}
func BenchmarkRepositoryFilter_FilterByPrefix(b *testing.B) {
filter := NewRepositoryFilter()
os.Setenv("GHORG_MATCH_PREFIX", "test,lib,core")
defer os.Unsetenv("GHORG_MATCH_PREFIX")
// Create 1000 test repos
repos := make([]scm.Repo, 1000)
prefixes := []string{"test", "lib", "core", "other", "main"}
for i := 0; i < 1000; i++ {
repos[i] = scm.Repo{Name: prefixes[i%5] + "-repo" + string(rune(i))}
}
b.ResetTimer()
for i := 0; i < b.N; i++ {
filter.FilterByMatchPrefix(repos)
}
}

446
cmd/repository_processor.go Normal file
View File

@ -0,0 +1,446 @@
package cmd
import (
"fmt"
"os"
"path/filepath"
"strconv"
"strings"
"sync"
"github.com/gabrie30/ghorg/colorlog"
"github.com/gabrie30/ghorg/git"
"github.com/gabrie30/ghorg/scm"
)
// RepositoryProcessor handles the processing of individual repositories
type RepositoryProcessor struct {
git git.Gitter
stats *CloneStats
mutex *sync.RWMutex
untouchedRepos []string
}
// CloneStats tracks statistics during clone operations
type CloneStats struct {
CloneCount int
PulledCount int
UpdateRemoteCount int
NewCommits int
UntouchedPrunes int
CloneInfos []string
CloneErrors []string
}
// NewRepositoryProcessor creates a new repository processor
func NewRepositoryProcessor(git git.Gitter) *RepositoryProcessor {
return &RepositoryProcessor{
git: git,
stats: &CloneStats{},
mutex: &sync.RWMutex{},
}
}
// ProcessRepository handles the cloning or updating of a single repository
func (rp *RepositoryProcessor) ProcessRepository(repo *scm.Repo, repoNameWithCollisions map[string]bool, hasCollisions bool, repoSlug string, index int) {
// Update repo slug for collisions if needed
finalRepoSlug := rp.handleNameCollisions(*repo, repoNameWithCollisions, hasCollisions, repoSlug, index)
// Set the final host path
repo.HostPath = rp.buildHostPath(*repo, finalRepoSlug)
// Handle prune untouched logic
if rp.shouldPruneUntouched(repo) {
return
}
// Skip if prune untouched is active (only prune, don't clone)
if os.Getenv("GHORG_PRUNE_UNTOUCHED") == "true" {
return
}
// Determine if this repo exists locally
repoWillBePulled := repoExistsLocally(*repo)
var action string
// Process the repository (clone or update)
if repoWillBePulled {
success := rp.handleExistingRepository(repo, &action)
if !success {
return
}
} else {
success := rp.handleNewRepository(repo, &action)
if !success {
return
}
}
// Print unified success message (matching original behavior)
if repoWillBePulled && repo.Commits.CountDiff > 0 {
colorlog.PrintSuccess(fmt.Sprintf("Success %s %s, branch: %s, new commits: %d", action, repo.URL, repo.CloneBranch, repo.Commits.CountDiff))
} else {
colorlog.PrintSuccess(fmt.Sprintf("Success %s %s, branch: %s", action, repo.URL, repo.CloneBranch))
}
}
// handleNameCollisions manages repository name collisions
func (rp *RepositoryProcessor) handleNameCollisions(repo scm.Repo, repoNameWithCollisions map[string]bool, hasCollisions bool, repoSlug string, index int) string {
if !hasCollisions {
return rp.addSuffixesIfNeeded(repo, repoSlug)
}
rp.mutex.Lock()
var inHash bool
if repo.IsGitLabSnippet && !repo.IsGitLabRootLevelSnippet {
inHash = repoNameWithCollisions[repo.GitLabSnippetInfo.NameOfRepo]
} else {
inHash = repoNameWithCollisions[repo.Name]
}
rp.mutex.Unlock()
if inHash {
// Replace both forward slashes and backslashes with underscores for cross-platform compatibility
pathWithUnderscores := strings.ReplaceAll(repo.Path, "/", "_")
pathWithUnderscores = strings.ReplaceAll(pathWithUnderscores, "\\", "_")
repoSlug = trimCollisionFilename(pathWithUnderscores)
repoSlug = rp.addSuffixesIfNeeded(repo, repoSlug)
rp.mutex.Lock()
slugCollision := repoNameWithCollisions[repoSlug]
rp.mutex.Unlock()
if slugCollision {
repoSlug = fmt.Sprintf("_%v_%v", strconv.Itoa(index), repoSlug)
} else {
rp.mutex.Lock()
repoNameWithCollisions[repoSlug] = true
rp.mutex.Unlock()
}
}
return rp.addSuffixesIfNeeded(repo, repoSlug)
}
// addSuffixesIfNeeded adds appropriate suffixes for wikis and snippets
func (rp *RepositoryProcessor) addSuffixesIfNeeded(repo scm.Repo, repoSlug string) string {
if repo.IsWiki && !strings.HasSuffix(repoSlug, ".wiki") {
repoSlug = repoSlug + ".wiki"
}
if repo.IsGitLabSnippet && !repo.IsGitLabRootLevelSnippet && !strings.HasSuffix(repoSlug, ".snippets") {
repoSlug = repoSlug + ".snippets"
}
return repoSlug
}
// buildHostPath constructs the final host path for the repository
func (rp *RepositoryProcessor) buildHostPath(repo scm.Repo, repoSlug string) string {
if repo.IsGitLabRootLevelSnippet {
return filepath.Join(outputDirAbsolutePath, "_ghorg_root_level_snippets", repo.GitLabSnippetInfo.Title+"-"+repo.GitLabSnippetInfo.ID)
}
if repo.IsGitLabSnippet {
return filepath.Join(outputDirAbsolutePath, repoSlug, repo.GitLabSnippetInfo.Title+"-"+repo.GitLabSnippetInfo.ID)
}
return filepath.Join(outputDirAbsolutePath, repoSlug)
}
// shouldPruneUntouched determines if a repository should be pruned as untouched
func (rp *RepositoryProcessor) shouldPruneUntouched(repo *scm.Repo) bool {
if os.Getenv("GHORG_PRUNE_UNTOUCHED") != "true" || !repoExistsLocally(*repo) {
return false
}
// Fetch and check branches
rp.git.FetchCloneBranch(*repo)
branches, err := rp.git.Branch(*repo)
if err != nil {
colorlog.PrintError(fmt.Sprintf("Failed to list local branches for repository %s: %v", repo.Name, err))
return false
}
// Delete if it has no branches
if branches == "" {
rp.untouchedRepos = append(rp.untouchedRepos, repo.HostPath)
return true
}
// Skip if multiple branches
if len(strings.Split(strings.TrimSpace(branches), "\n")) > 1 {
return false
}
// Check for modified changes
status, err := rp.git.ShortStatus(*repo)
if err != nil {
colorlog.PrintError(fmt.Sprintf("Failed to get short status for repository %s: %v", repo.Name, err))
return false
}
if status != "" {
return false
}
// Check for new commits on the branch that exist locally but not on the remote
commits, err := rp.git.RevListCompare(*repo, "HEAD", "@{u}")
if err != nil {
colorlog.PrintError(fmt.Sprintf("Failed to get commit differences for repository %s. The repository may be empty or does not have a .git directory. Error: %v", repo.Name, err))
return false
}
if commits != "" {
return false
}
rp.untouchedRepos = append(rp.untouchedRepos, repo.HostPath)
return true
}
// handleExistingRepository processes repositories that already exist locally
func (rp *RepositoryProcessor) handleExistingRepository(repo *scm.Repo, action *string) bool {
*action = "pulling"
// Set origin with credentials
err := rp.git.SetOriginWithCredentials(*repo)
if err != nil {
rp.addError(fmt.Sprintf("Problem setting remote with credentials on: %s Error: %v", repo.Name, err))
return false
}
if os.Getenv("GHORG_BACKUP") == "true" {
*action = "updating remote"
success := rp.handleBackupMode(repo)
if !success {
return false
}
} else if os.Getenv("GHORG_NO_CLEAN") == "true" {
*action = "fetching"
success := rp.handleNoCleanMode(repo)
if !success {
return false
}
} else {
// Standard pull mode
success := rp.handleStandardPull(repo)
if !success {
return false
}
}
// Reset origin
err = rp.git.SetOrigin(*repo)
if err != nil {
rp.addError(fmt.Sprintf("Problem resetting remote: %s Error: %v", repo.Name, err))
return false
}
rp.mutex.Lock()
rp.stats.PulledCount++
rp.mutex.Unlock()
return true
}
// handleNewRepository processes repositories that don't exist locally
func (rp *RepositoryProcessor) handleNewRepository(repo *scm.Repo, action *string) bool {
*action = "cloning"
err := rp.git.Clone(*repo)
// Handle wiki clone attempts that might fail
if err != nil && repo.IsWiki {
rp.addInfo(fmt.Sprintf("Wiki may be enabled but there was no content to clone: %s Error: %v", repo.URL, err))
return false
}
if err != nil {
rp.addError(fmt.Sprintf("Problem trying to clone: %s Error: %v", repo.URL, err))
return false
}
// Checkout specific branch if specified
if os.Getenv("GHORG_BRANCH") != "" {
err := rp.git.Checkout(*repo)
if err != nil {
rp.addInfo(fmt.Sprintf("Could not checkout out %s, branch may not exist or may not have any contents/commits, no changes to: %s Error: %v", repo.CloneBranch, repo.URL, err))
return false
}
}
rp.mutex.Lock()
rp.stats.CloneCount++
rp.mutex.Unlock()
// Set origin to remove credentials from URL
err = rp.git.SetOrigin(*repo)
if err != nil {
rp.addError(fmt.Sprintf("Problem trying to set remote: %s Error: %v", repo.URL, err))
return false
}
// Fetch all if enabled
if os.Getenv("GHORG_FETCH_ALL") == "true" {
err = rp.git.FetchAll(*repo)
if err != nil {
rp.addError(fmt.Sprintf("Could not fetch remotes: %s Error: %v", repo.URL, err))
return false
}
}
return true
}
// handleBackupMode processes repositories in backup mode
func (rp *RepositoryProcessor) handleBackupMode(repo *scm.Repo) bool {
err := rp.git.UpdateRemote(*repo)
if err != nil && repo.IsWiki {
rp.addInfo(fmt.Sprintf("Wiki may be enabled but there was no content to clone on: %s Error: %v", repo.URL, err))
return false
}
if err != nil {
rp.addError(fmt.Sprintf("Could not update remotes: %s Error: %v", repo.URL, err))
return false
}
rp.mutex.Lock()
rp.stats.UpdateRemoteCount++
rp.mutex.Unlock()
return true
}
// handleNoCleanMode processes repositories in no-clean mode
func (rp *RepositoryProcessor) handleNoCleanMode(repo *scm.Repo) bool {
err := rp.git.FetchAll(*repo)
if err != nil && repo.IsWiki {
rp.addInfo(fmt.Sprintf("Wiki may be enabled but there was no content to clone on: %s Error: %v", repo.URL, err))
return false
}
if err != nil {
rp.addError(fmt.Sprintf("Could not fetch remotes: %s Error: %v", repo.URL, err))
return false
}
return true
}
// handleStandardPull processes repositories in standard pull mode
func (rp *RepositoryProcessor) handleStandardPull(repo *scm.Repo) bool {
// Fetch all if enabled
if os.Getenv("GHORG_FETCH_ALL") == "true" {
err := rp.git.FetchAll(*repo)
if err != nil {
rp.addError(fmt.Sprintf("Could not fetch remotes: %s Error: %v", repo.URL, err))
return false
}
}
// Checkout branch
err := rp.git.Checkout(*repo)
if err != nil {
rp.git.FetchCloneBranch(*repo)
// Retry checkout
errRetry := rp.git.Checkout(*repo)
if errRetry != nil {
hasRemoteHeads, errHasRemoteHeads := rp.git.HasRemoteHeads(*repo)
if errHasRemoteHeads != nil {
rp.addError(fmt.Sprintf("Could not checkout %s, branch may not exist or may not have any contents/commits, no changes made on: %s Errors: %v %v", repo.CloneBranch, repo.URL, errRetry, errHasRemoteHeads))
return false
}
if hasRemoteHeads {
rp.addError(fmt.Sprintf("Could not checkout %s, branch may not exist or may not have any contents/commits, no changes made on: %s Error: %v", repo.CloneBranch, repo.URL, errRetry))
return false
} else {
rp.addInfo(fmt.Sprintf("Could not checkout %s due to repository being empty, no changes made on: %s", repo.CloneBranch, repo.URL))
return false
}
}
}
// Get pre-pull commit count
count, err := rp.git.RepoCommitCount(*repo)
if err != nil {
rp.addInfo(fmt.Sprintf("Problem trying to get pre pull commit count for on repo: %s", repo.URL))
}
repo.Commits.CountPrePull = count
// Clean
err = rp.git.Clean(*repo)
if err != nil {
rp.addError(fmt.Sprintf("Problem running git clean: %s Error: %v", repo.URL, err))
return false
}
// Reset
err = rp.git.Reset(*repo)
if err != nil {
rp.addError(fmt.Sprintf("Problem resetting branch: %s for: %s Error: %v", repo.CloneBranch, repo.URL, err))
return false
}
// Pull
err = rp.git.Pull(*repo)
if err != nil {
rp.addError(fmt.Sprintf("Problem trying to pull branch: %v for: %s Error: %v", repo.CloneBranch, repo.URL, err))
return false
}
// Get post-pull commit count
count, err = rp.git.RepoCommitCount(*repo)
if err != nil {
rp.addInfo(fmt.Sprintf("Problem trying to get post pull commit count for on repo: %s", repo.URL))
}
repo.Commits.CountPostPull = count
repo.Commits.CountDiff = (repo.Commits.CountPostPull - repo.Commits.CountPrePull)
rp.mutex.Lock()
rp.stats.NewCommits += repo.Commits.CountDiff
rp.mutex.Unlock()
return true
}
// addError adds an error to the stats in a thread-safe manner
func (rp *RepositoryProcessor) addError(msg string) {
rp.mutex.Lock()
rp.stats.CloneErrors = append(rp.stats.CloneErrors, msg)
rp.mutex.Unlock()
}
// addInfo adds an info message to the stats in a thread-safe manner
func (rp *RepositoryProcessor) addInfo(msg string) {
rp.mutex.Lock()
rp.stats.CloneInfos = append(rp.stats.CloneInfos, msg)
rp.mutex.Unlock()
}
// GetStats returns a copy of the current statistics
func (rp *RepositoryProcessor) GetStats() CloneStats {
rp.mutex.RLock()
defer rp.mutex.RUnlock()
return CloneStats{
CloneCount: rp.stats.CloneCount,
PulledCount: rp.stats.PulledCount,
UpdateRemoteCount: rp.stats.UpdateRemoteCount,
NewCommits: rp.stats.NewCommits,
UntouchedPrunes: rp.stats.UntouchedPrunes,
CloneInfos: append([]string(nil), rp.stats.CloneInfos...),
CloneErrors: append([]string(nil), rp.stats.CloneErrors...),
}
}
// GetUntouchedRepos returns the list of untouched repositories
func (rp *RepositoryProcessor) GetUntouchedRepos() []string {
return rp.untouchedRepos
}

View File

@ -0,0 +1,550 @@
package cmd
import (
"errors"
"os"
"path/filepath"
"testing"
"github.com/gabrie30/ghorg/scm"
)
// ExtendedMockGitClient extends the existing MockGitClient with additional methods needed for RepositoryProcessor
type ExtendedMockGitClient struct {
MockGitClient
shouldFailClone bool
shouldFailCheckout bool
shouldFailSetOrigin bool
shouldReturnEmptyRepo bool
preCommitCount int
postCommitCount int
}
func NewExtendedMockGit() *ExtendedMockGitClient {
return &ExtendedMockGitClient{
MockGitClient: NewMockGit(),
preCommitCount: 5,
postCommitCount: 7,
}
}
func (g *ExtendedMockGitClient) Clone(repo scm.Repo) error {
if g.shouldFailClone {
return errors.New("mock clone error")
}
return g.MockGitClient.Clone(repo)
}
func (g *ExtendedMockGitClient) Checkout(repo scm.Repo) error {
if g.shouldFailCheckout {
return errors.New("mock checkout error")
}
if g.shouldReturnEmptyRepo {
return errors.New("Cannot checkout any specific branch in an empty repository")
}
return g.MockGitClient.Checkout(repo)
}
func (g *ExtendedMockGitClient) SetOrigin(repo scm.Repo) error {
if g.shouldFailSetOrigin {
return errors.New("mock set origin error")
}
return g.MockGitClient.SetOrigin(repo)
}
func (g *ExtendedMockGitClient) RepoCommitCount(repo scm.Repo) (int, error) {
// First call returns pre-pull count, second call returns post-pull count
if repo.Commits.CountPrePull == 0 {
return g.preCommitCount, nil
}
return g.postCommitCount, nil
}
func TestRepositoryProcessor_NewRepositoryProcessor(t *testing.T) {
mockGit := NewExtendedMockGit()
processor := NewRepositoryProcessor(mockGit)
if processor == nil {
t.Fatal("Expected processor to be created")
}
if processor.git != mockGit {
t.Error("Expected git client to be set correctly")
}
if processor.stats == nil {
t.Error("Expected stats to be initialized")
}
if processor.mutex == nil {
t.Error("Expected mutex to be initialized")
}
}
func TestRepositoryProcessor_ProcessRepository_NewRepository(t *testing.T) {
defer UnsetEnv("GHORG_")()
// Set up temporary directory
dir, err := os.MkdirTemp("", "ghorg_test_process_new_repo")
if err != nil {
t.Fatal(err)
}
defer os.RemoveAll(dir)
outputDirAbsolutePath = dir
mockGit := NewExtendedMockGit()
processor := NewRepositoryProcessor(mockGit)
repo := scm.Repo{
Name: "test-repo",
URL: "https://github.com/org/test-repo",
CloneBranch: "main",
}
repoNameWithCollisions := make(map[string]bool)
processor.ProcessRepository(&repo, repoNameWithCollisions, false, "test-repo", 0)
stats := processor.GetStats()
if stats.CloneCount != 1 {
t.Errorf("Expected clone count to be 1, got %d", stats.CloneCount)
}
if stats.PulledCount != 0 {
t.Errorf("Expected pulled count to be 0, got %d", stats.PulledCount)
}
if len(stats.CloneErrors) != 0 {
t.Errorf("Expected no clone errors, got %v", stats.CloneErrors)
}
}
func TestRepositoryProcessor_ProcessRepository_ExistingRepository(t *testing.T) {
defer UnsetEnv("GHORG_")()
// Set up temporary directory with existing repo
dir, err := os.MkdirTemp("", "ghorg_test_process_existing_repo")
if err != nil {
t.Fatal(err)
}
defer os.RemoveAll(dir)
outputDirAbsolutePath = dir
// Create existing repo directory
repoDir := filepath.Join(dir, "test-repo")
err = os.MkdirAll(repoDir, 0755)
if err != nil {
t.Fatal(err)
}
mockGit := NewExtendedMockGit()
processor := NewRepositoryProcessor(mockGit)
repo := scm.Repo{
Name: "test-repo",
URL: "https://github.com/org/test-repo",
CloneBranch: "main",
HostPath: repoDir,
}
repoNameWithCollisions := make(map[string]bool)
processor.ProcessRepository(&repo, repoNameWithCollisions, false, "test-repo", 0)
stats := processor.GetStats()
if stats.CloneCount != 0 {
t.Errorf("Expected clone count to be 0, got %d", stats.CloneCount)
}
if stats.PulledCount != 1 {
t.Errorf("Expected pulled count to be 1, got %d", stats.PulledCount)
}
// Check that commit diff was calculated
if stats.NewCommits != (mockGit.postCommitCount - mockGit.preCommitCount) {
t.Errorf("Expected new commits to be %d, got %d",
mockGit.postCommitCount-mockGit.preCommitCount, stats.NewCommits)
}
// Verify that CountDiff was properly calculated on the repo
if repo.Commits.CountDiff != (mockGit.postCommitCount - mockGit.preCommitCount) {
t.Errorf("Expected repo CountDiff to be %d, got %d",
mockGit.postCommitCount-mockGit.preCommitCount, repo.Commits.CountDiff)
}
}
func TestRepositoryProcessor_ProcessRepository_CloneError(t *testing.T) {
defer UnsetEnv("GHORG_")()
dir, err := os.MkdirTemp("", "ghorg_test_process_clone_error")
if err != nil {
t.Fatal(err)
}
defer os.RemoveAll(dir)
outputDirAbsolutePath = dir
mockGit := NewExtendedMockGit()
mockGit.shouldFailClone = true
processor := NewRepositoryProcessor(mockGit)
repo := scm.Repo{
Name: "test-repo",
URL: "https://github.com/org/test-repo",
CloneBranch: "main",
}
repoNameWithCollisions := make(map[string]bool)
processor.ProcessRepository(&repo, repoNameWithCollisions, false, "test-repo", 0)
stats := processor.GetStats()
if stats.CloneCount != 0 {
t.Errorf("Expected clone count to be 0, got %d", stats.CloneCount)
}
if len(stats.CloneErrors) != 1 {
t.Errorf("Expected 1 clone error, got %d", len(stats.CloneErrors))
}
if stats.CloneErrors[0] == "" {
t.Error("Expected error message to be set")
}
}
func TestRepositoryProcessor_ProcessRepository_WikiHandling(t *testing.T) {
defer UnsetEnv("GHORG_")()
dir, err := os.MkdirTemp("", "ghorg_test_process_wiki")
if err != nil {
t.Fatal(err)
}
defer os.RemoveAll(dir)
outputDirAbsolutePath = dir
mockGit := NewExtendedMockGit()
mockGit.shouldFailClone = true // Simulate wiki with no content
processor := NewRepositoryProcessor(mockGit)
repo := scm.Repo{
Name: "test-repo",
URL: "https://github.com/org/test-repo.wiki",
CloneBranch: "main",
IsWiki: true,
}
repoNameWithCollisions := make(map[string]bool)
processor.ProcessRepository(&repo, repoNameWithCollisions, false, "test-repo.wiki", 0)
stats := processor.GetStats()
if len(stats.CloneInfos) != 1 {
t.Errorf("Expected 1 clone info message, got %d", len(stats.CloneInfos))
}
if len(stats.CloneErrors) != 0 {
t.Errorf("Expected no clone errors for wiki, got %d", len(stats.CloneErrors))
}
}
func TestRepositoryProcessor_ProcessRepository_BackupMode(t *testing.T) {
defer UnsetEnv("GHORG_")()
os.Setenv("GHORG_BACKUP", "true")
// Set up temporary directory with existing repo
dir, err := os.MkdirTemp("", "ghorg_test_backup_mode")
if err != nil {
t.Fatal(err)
}
defer os.RemoveAll(dir)
outputDirAbsolutePath = dir
// Create existing repo directory
repoDir := filepath.Join(dir, "test-repo")
err = os.MkdirAll(repoDir, 0755)
if err != nil {
t.Fatal(err)
}
mockGit := NewExtendedMockGit()
processor := NewRepositoryProcessor(mockGit)
repo := scm.Repo{
Name: "test-repo",
URL: "https://github.com/org/test-repo",
CloneBranch: "main",
HostPath: repoDir,
}
repoNameWithCollisions := make(map[string]bool)
processor.ProcessRepository(&repo, repoNameWithCollisions, false, "test-repo", 0)
stats := processor.GetStats()
if stats.UpdateRemoteCount != 1 {
t.Errorf("Expected update remote count to be 1, got %d", stats.UpdateRemoteCount)
}
}
func TestRepositoryProcessor_ProcessRepository_NoCleanMode(t *testing.T) {
defer UnsetEnv("GHORG_")()
os.Setenv("GHORG_NO_CLEAN", "true")
// Set up temporary directory with existing repo
dir, err := os.MkdirTemp("", "ghorg_test_no_clean_mode")
if err != nil {
t.Fatal(err)
}
defer os.RemoveAll(dir)
outputDirAbsolutePath = dir
// Create existing repo directory
repoDir := filepath.Join(dir, "test-repo")
err = os.MkdirAll(repoDir, 0755)
if err != nil {
t.Fatal(err)
}
mockGit := NewExtendedMockGit()
processor := NewRepositoryProcessor(mockGit)
repo := scm.Repo{
Name: "test-repo",
URL: "https://github.com/org/test-repo",
CloneBranch: "main",
HostPath: repoDir,
}
repoNameWithCollisions := make(map[string]bool)
processor.ProcessRepository(&repo, repoNameWithCollisions, false, "test-repo", 0)
stats := processor.GetStats()
// In no-clean mode, we still increment pulled count
if stats.PulledCount != 1 {
t.Errorf("Expected pulled count to be 1, got %d", stats.PulledCount)
}
}
func TestRepositoryProcessor_ProcessRepository_NameCollisions(t *testing.T) {
defer UnsetEnv("GHORG_")()
dir, err := os.MkdirTemp("", "ghorg_test_name_collisions")
if err != nil {
t.Fatal(err)
}
defer os.RemoveAll(dir)
outputDirAbsolutePath = dir
mockGit := NewExtendedMockGit()
processor := NewRepositoryProcessor(mockGit)
repo := scm.Repo{
Name: "test-repo",
URL: "https://github.com/org/test-repo",
CloneBranch: "main",
Path: "group/subgroup/test-repo",
}
repoNameWithCollisions := map[string]bool{
"test-repo": true,
}
processor.ProcessRepository(&repo, repoNameWithCollisions, true, "test-repo", 1)
// Check that the repo was processed despite collisions
stats := processor.GetStats()
if stats.CloneCount != 1 {
t.Errorf("Expected clone count to be 1, got %d", stats.CloneCount)
}
// The host path should be modified due to collision handling
expectedPath := filepath.Join(outputDirAbsolutePath, "group_subgroup_test-repo")
if repo.HostPath != expectedPath {
t.Errorf("Expected host path to be modified for collisions, got %s", repo.HostPath)
}
}
func TestRepositoryProcessor_ProcessRepository_CrossPlatformPaths(t *testing.T) {
defer UnsetEnv("GHORG_")()
dir, err := os.MkdirTemp("", "ghorg_test_cross_platform")
if err != nil {
t.Fatal(err)
}
defer os.RemoveAll(dir)
outputDirAbsolutePath = dir
mockGit := NewExtendedMockGit()
processor := NewRepositoryProcessor(mockGit)
// Test with forward slashes (Unix-style)
repoUnix := scm.Repo{
Name: "test-repo",
URL: "https://github.com/org/test-repo",
CloneBranch: "main",
Path: "group/subgroup/test-repo",
}
// Test with backslashes (Windows-style)
repoWindows := scm.Repo{
Name: "test-repo2",
URL: "https://github.com/org/test-repo2",
CloneBranch: "main",
Path: "group\\subgroup\\test-repo2",
}
repoNameWithCollisions := map[string]bool{
"test-repo": true,
"test-repo2": true,
}
// Process Unix-style path
processor.ProcessRepository(&repoUnix, repoNameWithCollisions, true, "test-repo", 0)
expectedUnixPath := filepath.Join(outputDirAbsolutePath, "group_subgroup_test-repo")
if repoUnix.HostPath != expectedUnixPath {
t.Errorf("Expected Unix-style path to be %s, got %s", expectedUnixPath, repoUnix.HostPath)
}
// Process Windows-style path
processor.ProcessRepository(&repoWindows, repoNameWithCollisions, true, "test-repo2", 1)
expectedWindowsPath := filepath.Join(outputDirAbsolutePath, "group_subgroup_test-repo2")
if repoWindows.HostPath != expectedWindowsPath {
t.Errorf("Expected Windows-style path to be %s, got %s", expectedWindowsPath, repoWindows.HostPath)
}
stats := processor.GetStats()
if stats.CloneCount != 2 {
t.Errorf("Expected clone count to be 2, got %d", stats.CloneCount)
}
}
func TestRepositoryProcessor_ProcessRepository_GitLabSnippets(t *testing.T) {
defer UnsetEnv("GHORG_")()
dir, err := os.MkdirTemp("", "ghorg_test_gitlab_snippets")
if err != nil {
t.Fatal(err)
}
defer os.RemoveAll(dir)
outputDirAbsolutePath = dir
mockGit := NewExtendedMockGit()
processor := NewRepositoryProcessor(mockGit)
// Test regular snippet
repo := scm.Repo{
Name: "test-repo",
URL: "https://gitlab.com/org/test-repo",
CloneBranch: "main",
IsGitLabSnippet: true,
GitLabSnippetInfo: scm.GitLabSnippet{
Title: "My Snippet",
ID: "123",
URLOfRepo: "https://gitlab.com/org/test-repo.git",
},
}
repoNameWithCollisions := make(map[string]bool)
processor.ProcessRepository(&repo, repoNameWithCollisions, false, "test-repo", 0)
expectedPath := filepath.Join(outputDirAbsolutePath, "test-repo.snippets", "My Snippet-123")
if repo.HostPath != expectedPath {
t.Errorf("Expected host path %s, got %s", expectedPath, repo.HostPath)
}
// Test root level snippet
rootSnippetRepo := scm.Repo{
Name: "root-snippet",
URL: "https://gitlab.com/snippets/456",
CloneBranch: "main",
IsGitLabSnippet: true,
IsGitLabRootLevelSnippet: true,
GitLabSnippetInfo: scm.GitLabSnippet{
Title: "Root Snippet",
ID: "456",
},
}
processor.ProcessRepository(&rootSnippetRepo, repoNameWithCollisions, false, "root-snippet", 0)
expectedRootPath := filepath.Join(outputDirAbsolutePath, "_ghorg_root_level_snippets", "Root Snippet-456")
if rootSnippetRepo.HostPath != expectedRootPath {
t.Errorf("Expected host path %s, got %s", expectedRootPath, rootSnippetRepo.HostPath)
}
stats := processor.GetStats()
if stats.CloneCount != 2 {
t.Errorf("Expected clone count to be 2, got %d", stats.CloneCount)
}
}
func TestRepositoryProcessor_GetStats(t *testing.T) {
mockGit := NewExtendedMockGit()
processor := NewRepositoryProcessor(mockGit)
// Add some stats manually
processor.addError("test error")
processor.addInfo("test info")
stats := processor.GetStats()
if len(stats.CloneErrors) != 1 {
t.Errorf("Expected 1 clone error, got %d", len(stats.CloneErrors))
}
if stats.CloneErrors[0] != "test error" {
t.Errorf("Expected error message 'test error', got '%s'", stats.CloneErrors[0])
}
if len(stats.CloneInfos) != 1 {
t.Errorf("Expected 1 clone info, got %d", len(stats.CloneInfos))
}
if stats.CloneInfos[0] != "test info" {
t.Errorf("Expected info message 'test info', got '%s'", stats.CloneInfos[0])
}
}
func TestRepositoryProcessor_ThreadSafety(t *testing.T) {
defer UnsetEnv("GHORG_")()
dir, err := os.MkdirTemp("", "ghorg_test_thread_safety")
if err != nil {
t.Fatal(err)
}
defer os.RemoveAll(dir)
outputDirAbsolutePath = dir
mockGit := NewExtendedMockGit()
processor := NewRepositoryProcessor(mockGit)
// Simulate concurrent access
numGoroutines := 10
done := make(chan bool, numGoroutines)
for i := 0; i < numGoroutines; i++ {
go func(index int) {
processor.addError("error " + string(rune(index)))
processor.addInfo("info " + string(rune(index)))
done <- true
}(i)
}
// Wait for all goroutines to complete
for i := 0; i < numGoroutines; i++ {
<-done
}
stats := processor.GetStats()
if len(stats.CloneErrors) != numGoroutines {
t.Errorf("Expected %d clone errors, got %d", numGoroutines, len(stats.CloneErrors))
}
if len(stats.CloneInfos) != numGoroutines {
t.Errorf("Expected %d clone infos, got %d", numGoroutines, len(stats.CloneInfos))
}
}

View File

@ -43,7 +43,7 @@ if [ -z "$(ls -A $HOME/ghorg/github.com/$GITHUB_ORG)" ]
then
echo "Pass: github org clone preserving scm hostname prune untouched"
else
echo "Fail: github org clone preserving scm hostnamey prune untouched"
echo "Fail: github org clone preserving scm hostname prune untouched"
exit 1
fi

View File

@ -1,25 +1,413 @@
# Local GitLab
# Refactored GitLab Integration Tests
Allows you to spin up GitLab locally with Docker to test cloning. Would eventually like to turn these into integration tests.
This directory contains the refactored GitLab integration test system that replaces the monolithic bash scripts with modular, maintainable Go-based tools.
For enterprise GitLab, start docker then run the following command from the root of the repo
## Overview
The refactored system consists of:
1. **Configuration-based Seeding**: JSON configuration files define the seed data
2. **Go-based Seeder**: A Go tool that reads configuration and creates GitLab resources
3. **Test Framework**: A Go-based test runner that executes configurable test scenarios
4. **Modular Scripts**: Clean shell scripts that orchestrate the components
## Directory Structure
```
./scripts/local-gitlab/start-ee.sh [STOP_GITLAB_WHEN_FINISHED? (Default: true)] [PERSIST_GITLAB_LOCALLY? (Default: false)]
scripts/local-gitlab/
├── configs/
│ ├── seed-data.json # Defines GitLab resources to create
│ └── test-scenarios.json # Defines integration test scenarios
├── seeder/
│ ├── main.go # Go-based seeder implementation
│ └── go.mod # Seeder dependencies
├── test-runner/
│ ├── main.go # Go-based test runner implementation
│ └── go.mod # Test runner dependencies
├── start-ee.sh # Refactored main entry point
├── seed.sh # New seeding script using Go seeder
├── integration-tests.sh # New test script using Go test runner
├── add-test-scenario.sh # Utility to add new test scenarios
└── README-refactored.md # This file
```
TODO: Do the same for the community edition of GitLab
## Quick Start
If running locally you'll also need to update your /etc/hosts
### Running All Tests (Refactored Version)
`echo "127.0.0.1 gitlab.example.com" >> /etc/hosts`
Once github is running you can vist
http://gitlab.example.com in your browser
You can get the root token by running
```bash
# Run the refactored integration tests
./start-ee.sh
# Or with custom parameters
./start-ee.sh true false latest
```
docker exec -it gitlab grep 'Password:' /etc/gitlab/initial_root_password | awk '{print $2}'
## Script Arguments
### Quick Reference
| **Script** | **Arguments** | **Purpose** |
|---|---|---|
| `start-ee.sh` | 7 optional args | Main entry point - runs entire test suite |
| `seed.sh` | 3 optional args | Seeds GitLab with test data |
| `integration-tests.sh` | 3 optional args | Runs integration tests only |
| `run-ee.sh` | 4 optional args | Starts GitLab container (internal) |
### `start-ee.sh` Arguments
The main entry point script accepts up to 7 optional arguments. All arguments have sensible defaults if not provided.
**Usage:**
```bash
./start-ee.sh [STOP_GITLAB_WHEN_FINISHED] [PERSIST_GITLAB_LOCALLY] [GITLAB_IMAGE_TAG] [GITLAB_HOME] [GITLAB_HOST] [GITLAB_URL] [LOCAL_GITLAB_GHORG_DIR]
```
| **Argument** | **Default** | **Description** |
|---|---|---|
| `STOP_GITLAB_WHEN_FINISHED` | `'true'` | Whether to stop and remove the GitLab container after tests complete. Set to `'false'` to keep GitLab running for debugging. |
| `PERSIST_GITLAB_LOCALLY` | `'false'` | Whether to persist GitLab data locally across container restarts. Set to `'true'` to keep data between runs. |
| `GITLAB_IMAGE_TAG` | `'latest'` | GitLab Docker image tag to use. Can be specific version like `'16.4.0-ce.0'` or `'latest'`. |
| `GITLAB_HOME` | `"$HOME/ghorg/local-gitlab-ee-data-${GITLAB_IMAGE_TAG}"` | Directory where GitLab stores persistent data on the host machine. |
| `GITLAB_HOST` | `'gitlab.example.com'` | Hostname for the GitLab instance. Used for container networking and /etc/hosts entries. |
| `GITLAB_URL` | `'http://gitlab.example.com'` | Full URL to access the GitLab instance. Used by ghorg and the test tools. |
| `LOCAL_GITLAB_GHORG_DIR` | `"${HOME}/ghorg"` | Local directory where ghorg will clone repositories and store its working files. |
**Examples:**
```bash
# Default behavior - run tests and clean up
./start-ee.sh
# Keep GitLab running after tests for debugging
./start-ee.sh false
# Use specific GitLab version and keep it running
./start-ee.sh false false 16.4.0-ce.0
# Full custom configuration
./start-ee.sh true true latest /tmp/gitlab-data gitlab.local http://gitlab.local /tmp/ghorg
```
**Common Scenarios:**
```bash
# Development - keep GitLab running for multiple test iterations
./start-ee.sh false false latest
# CI/CD - use clean environment and cleanup afterwards (default)
./start-ee.sh true false latest
# Testing specific GitLab version
./start-ee.sh true false 16.3.0-ce.0
# Custom data persistence for repeated testing
./start-ee.sh false true latest /data/gitlab-persistent
```
### Individual Component Arguments
#### `seed.sh` Arguments
Seeds the GitLab instance with test data using the Go-based seeder.
**Usage:**
```bash
./seed.sh [API_TOKEN] [GITLAB_URL] [LOCAL_GITLAB_GHORG_DIR]
```
| **Argument** | **Default** | **Description** |
|---|---|---|
| `API_TOKEN` | `"password"` | GitLab API token for authentication (default root password) |
| `GITLAB_URL` | `"http://gitlab.example.com"` | Full URL to the GitLab instance |
| `LOCAL_GITLAB_GHORG_DIR` | `"${HOME}/ghorg"` | Directory where ghorg stores its configuration and temp files |
**Example:**
```bash
# Use defaults
./seed.sh
# Custom parameters
./seed.sh "my-token" "http://gitlab.local:8080" "/tmp/ghorg"
```
#### `integration-tests.sh` Arguments
Runs the integration tests using the Go-based test runner.
**Usage:**
```bash
./integration-tests.sh [LOCAL_GITLAB_GHORG_DIR] [API_TOKEN] [GITLAB_URL]
```
| **Argument** | **Default** | **Description** |
|---|---|---|
| `LOCAL_GITLAB_GHORG_DIR` | `"${HOME}/ghorg"` | Directory where ghorg will clone repositories for testing |
| `API_TOKEN` | `"password"` | GitLab API token for authentication |
| `GITLAB_URL` | `"http://gitlab.example.com"` | Full URL to the GitLab instance |
**Example:**
```bash
# Use defaults
./integration-tests.sh
# Custom parameters
./integration-tests.sh "/tmp/ghorg" "my-token" "http://gitlab.local:8080"
```
#### `run-ee.sh` Arguments (Internal)
Starts the GitLab Docker container. Called internally by `start-ee.sh`.
**Usage:**
```bash
./run-ee.sh [GITLAB_IMAGE_TAG] [GITLAB_HOME] [GITLAB_HOST] [PERSIST_GITLAB_LOCALLY]
```
| **Argument** | **Default** | **Description** |
|---|---|---|
| `GITLAB_IMAGE_TAG` | `"latest"` | GitLab Docker image tag |
| `GITLAB_HOME` | Dynamic | Host directory for GitLab data persistence |
| `GITLAB_HOST` | `"gitlab.example.com"` | Container hostname |
| `PERSIST_GITLAB_LOCALLY` | `"false"` | Whether to persist data between container restarts |
#### Go Tool Arguments (Direct Usage)
For advanced usage, you can run the Go tools directly:
**Seeder (`seeder/gitlab-seeder`)**:
```bash
./gitlab-seeder [flags]
-config string
Path to seed data configuration file (default "configs/seed-data.json")
-token string
GitLab API token (required)
-base-url string
GitLab base URL (required)
```
**Test Runner (`test-runner/gitlab-test-runner`)**:
```bash
./gitlab-test-runner [flags]
-config string
Path to test scenarios configuration file (default "configs/test-scenarios.json")
-token string
GitLab API token (required)
-base-url string
GitLab base URL (required)
-ghorg-dir string
Ghorg directory path (default "${HOME}/ghorg")
-test string
Run specific test by name (optional)
-list
List all available tests and exit
```
**Examples:**
```bash
# List all available test scenarios
./test-runner/gitlab-test-runner -list -token="password"
# Run specific test
./test-runner/gitlab-test-runner -test="all-groups-preserve-dir-output-dir" -token="password" -base-url="http://gitlab.example.com"
# Seed with custom config
./seeder/gitlab-seeder -config="my-seed-data.json" -token="password" -base-url="http://gitlab.example.com"
```
### Running Individual Components
```bash
# Seed GitLab instance only
./seed.sh "password" "http://gitlab.example.com" "${HOME}/ghorg"
# Run integration tests only (assumes seeded instance)
./integration-tests.sh "${HOME}/ghorg" "password" "http://gitlab.example.com"
```
## Configuration
### Seed Data Configuration (`configs/seed-data.json`)
Defines the GitLab resources to create during seeding:
```json
{
"groups": [
{
"name": "my-group",
"path": "my-group",
"description": "My test group",
"repositories": [
{
"name": "my-repo",
"initialize_with_readme": true,
"snippets": [
{
"title": "My Snippet",
"file_name": "test.txt",
"content": "Test content",
"visibility": "public"
}
]
}
],
"subgroups": [...]
}
],
"users": [...],
"root_user": {...},
"root_snippets": [...]
}
```
### Test Scenarios Configuration (`configs/test-scenarios.json`)
Defines the integration test scenarios:
```json
{
"test_scenarios": [
{
"name": "my-test-scenario",
"description": "Test description",
"command": "ghorg clone all-groups --scm=gitlab --base-url={{.BaseURL}} --token={{.Token}} --output-dir=test-output",
"run_twice": true,
"setup_commands": ["git init {{.GhorgDir}}/test-setup"],
"verify_commands": ["test -d '{{.GhorgDir}}/test-output'"],
"expected_structure": [
"test-output/group1/repo1",
"test-output/group2/repo2"
]
}
]
}
```
## Adding New Seed Data
1. **Edit the configuration**: Modify `configs/seed-data.json` to add new groups, repositories, users, or snippets
2. **Test the changes**: Run `./seed.sh` to verify the new seed data is created correctly
### Example: Adding a New Group
```json
{
"name": "new-group",
"path": "new-group",
"description": "Description of the new group",
"repositories": [
{
"name": "new-repo",
"initialize_with_readme": true
}
]
}
```
## Adding New Test Scenarios
### Method 1: Use the Helper Script
```bash
./add-test-scenario.sh
```
This interactive script will guide you through creating a new test scenario.
### Method 2: Manual Configuration
1. Edit `configs/test-scenarios.json`
2. Add a new test scenario object to the `test_scenarios` array
3. Test with: `./test-runner/gitlab-test-runner -test="your-test-name"`
### Method 3: Programmatically
```bash
# Build the test runner
cd test-runner && go build -o gitlab-test-runner main.go
# List available tests
./gitlab-test-runner -list
# Run a specific test
./gitlab-test-runner -test="specific-test-name" -token="password" -base-url="http://gitlab.example.com"
```
## Template Variables
Both seeder and test runner support template variables:
- `{{.BaseURL}}` - GitLab base URL
- `{{.Token}}` - GitLab API token
- `{{.GhorgDir}}` - Ghorg directory path
## Development
### Building the Components
```bash
# Build seeder
cd seeder && go build -o gitlab-seeder main.go
# Build test runner
cd test-runner && go build -o gitlab-test-runner main.go
```
### Running Tests in Development
```bash
# Run specific test scenario
cd test-runner
go run main.go -test="all-groups-preserve-dir-output-dir" -token="password" -base-url="http://gitlab.example.com"
# List all available test scenarios
go run main.go -list -token="password"
```
## Advantages of Refactored System
1. **Maintainability**: Configuration-driven approach makes it easy to modify tests and seed data
2. **Modularity**: Separate components for seeding and testing
3. **Reusability**: Test scenarios can be easily copied and modified
4. **Better Error Handling**: Go-based tools provide clearer error messages
5. **Extensibility**: Easy to add new test scenarios or seed data configurations
6. **Documentation**: Clear separation of concerns and self-documenting configuration
## Migration from Old System
The refactored system is designed to be fully backward-compatible. The original scripts (`seed.sh`, `integration-tests.sh`, `start-ee.sh`) remain unchanged and continue to work.
To migrate to the refactored system:
1. Use `start-ee.sh` for the refactored system
2. All existing test scenarios have been converted to the new configuration format
3. The test results should be identical between old and new systems
## Troubleshooting
### Build Errors
```bash
# Ensure Go modules are downloaded
cd seeder && go mod download
cd test-runner && go mod download
```
### Test Failures
```bash
# Check GitLab is accessible
curl -I http://gitlab.example.com
# Verify seeding completed
./seeder/gitlab-seeder -token="password" -base-url="http://gitlab.example.com"
# Run specific failing test
./test-runner/gitlab-test-runner -test="failing-test-name" -token="password"
```
### Configuration Issues
```bash
# Validate JSON configuration
python3 -m json.tool configs/seed-data.json
python3 -m json.tool configs/test-scenarios.json
```

View File

@ -0,0 +1,148 @@
#!/bin/bash
set -euo pipefail
# Utility script to add a new test scenario
# Usage: ./add-test-scenario.sh
SCRIPT_DIR="$(cd "$(dirname "${BASH_SOURCE[0]}")" && pwd)"
CONFIG_PATH="${SCRIPT_DIR}/configs/test-scenarios.json"
TEMP_CONFIG="${CONFIG_PATH}.tmp"
echo "=== Add New Test Scenario ==="
echo ""
# Prompt for test details
read -p "Test name (kebab-case, e.g., 'my-new-test'): " test_name
read -p "Test description: " test_description
read -p "Ghorg command (use {{.BaseURL}}, {{.Token}}, {{.GhorgDir}} for templating): " test_command
echo ""
echo "Run command twice? (for testing clone then pull)"
read -p "Run twice (y/n): " run_twice_input
run_twice=$(if [[ "$run_twice_input" =~ ^[Yy]$ ]]; then echo "true"; else echo "false"; fi)
echo ""
echo "Expected structure (relative paths from ghorg directory):"
echo "Enter paths one by one, empty line to finish:"
expected_structure=()
while true; do
read -p "Path (or empty to finish): " path
if [[ -z "$path" ]]; then
break
fi
expected_structure+=("$path")
done
echo ""
echo "Setup commands (optional, executed before main command):"
echo "Enter commands one by one, empty line to finish:"
setup_commands=()
while true; do
read -p "Setup command (or empty to finish): " cmd
if [[ -z "$cmd" ]]; then
break
fi
setup_commands+=("$cmd")
done
echo ""
echo "Verification commands (optional, executed after main command):"
echo "Enter commands one by one, empty line to finish:"
verify_commands=()
while true; do
read -p "Verify command (or empty to finish): " cmd
if [[ -z "$cmd" ]]; then
break
fi
verify_commands+=("$cmd")
done
# Create the new test scenario JSON
cat > /tmp/new_scenario.json << EOF
{
"name": "$test_name",
"description": "$test_description",
"command": "$test_command",
"run_twice": $run_twice,
EOF
# Add setup commands if any
if [[ ${#setup_commands[@]} -gt 0 ]]; then
echo ' "setup_commands": [' >> /tmp/new_scenario.json
for i in "${!setup_commands[@]}"; do
if [[ $i -eq $((${#setup_commands[@]} - 1)) ]]; then
echo " \"${setup_commands[$i]}\"" >> /tmp/new_scenario.json
else
echo " \"${setup_commands[$i]}\"," >> /tmp/new_scenario.json
fi
done
echo ' ],' >> /tmp/new_scenario.json
fi
# Add verify commands if any
if [[ ${#verify_commands[@]} -gt 0 ]]; then
echo ' "verify_commands": [' >> /tmp/new_scenario.json
for i in "${!verify_commands[@]}"; do
if [[ $i -eq $((${#verify_commands[@]} - 1)) ]]; then
echo " \"${verify_commands[$i]}\"" >> /tmp/new_scenario.json
else
echo " \"${verify_commands[$i]}\"," >> /tmp/new_scenario.json
fi
done
echo ' ],' >> /tmp/new_scenario.json
fi
# Add expected structure
echo ' "expected_structure": [' >> /tmp/new_scenario.json
for i in "${!expected_structure[@]}"; do
if [[ $i -eq $((${#expected_structure[@]} - 1)) ]]; then
echo " \"${expected_structure[$i]}\"" >> /tmp/new_scenario.json
else
echo " \"${expected_structure[$i]}\"," >> /tmp/new_scenario.json
fi
done
echo ' ]' >> /tmp/new_scenario.json
echo '}' >> /tmp/new_scenario.json
echo ""
echo "=== Preview of New Test Scenario ==="
cat /tmp/new_scenario.json
echo ""
read -p "Add this test scenario to the configuration? (y/n): " confirm
if [[ "$confirm" =~ ^[Yy]$ ]]; then
# Parse the current config and add the new scenario
python3 << EOF
import json
# Read current config
with open('$CONFIG_PATH', 'r') as f:
config = json.load(f)
# Read new scenario
with open('/tmp/new_scenario.json', 'r') as f:
new_scenario = json.load(f)
# Add to scenarios
config['test_scenarios'].append(new_scenario)
# Write back
with open('$CONFIG_PATH', 'w') as f:
json.dump(config, f, indent=2)
print(f"Added test scenario '{new_scenario['name']}' to configuration")
EOF
echo "Test scenario added successfully!"
echo "You can now run it with:"
echo " ./integration-tests.sh # (runs all tests)"
echo " or"
echo " ./test-runner/gitlab-test-runner -test=\"$test_name\" # (runs specific test)"
else
echo "Test scenario was not added."
fi
# Clean up
rm -f /tmp/new_scenario.json

View File

@ -0,0 +1,273 @@
{
"groups": [
{
"name": "local-gitlab-group1",
"path": "local-gitlab-group1",
"description": "Test group 1 for GitLab integration tests",
"repositories": [
{
"name": "baz0",
"initialize_with_readme": true
},
{
"name": "baz1",
"initialize_with_readme": true
},
{
"name": "baz2",
"initialize_with_readme": true
},
{
"name": "baz3",
"initialize_with_readme": true
}
]
},
{
"name": "local-gitlab-group2",
"path": "local-gitlab-group2",
"description": "Test group 2 for GitLab integration tests",
"repositories": [
{
"name": "baz0",
"initialize_with_readme": true,
"snippets": [
{
"title": "Snippet for baz0",
"file_name": "snippet.txt",
"content": "This is a snippet for baz0",
"visibility": "public"
}
]
},
{
"name": "baz1",
"initialize_with_readme": true,
"snippets": [
{
"title": "Snippet for baz1",
"file_name": "snippet.txt",
"content": "This is a snippet for baz1",
"visibility": "public"
}
]
},
{
"name": "baz2",
"initialize_with_readme": true,
"snippets": [
{
"title": "Snippet for baz2",
"file_name": "snippet.txt",
"content": "This is a snippet for baz2",
"visibility": "public"
}
]
},
{
"name": "baz3",
"initialize_with_readme": true,
"snippets": [
{
"title": "Snippet for baz3",
"file_name": "snippet.txt",
"content": "This is a snippet for baz3",
"visibility": "public"
}
]
}
]
},
{
"name": "local-gitlab-group3",
"path": "local-gitlab-group3",
"description": "Test group 3 with nested subgroups",
"subgroups": [
{
"name": "subgroup-a",
"path": "subgroup-a",
"description": "Subgroup A for testing nested structures",
"repositories": [
{
"name": "subgroup_a_repo_0",
"initialize_with_readme": true,
"snippets": [
{
"title": "Snippet for subgroup_a_repo_0",
"file_name": "snippet.txt",
"content": "This is a snippet for subgroup_a_repo_0",
"visibility": "public"
}
]
},
{
"name": "subgroup_a_repo_1",
"initialize_with_readme": true,
"snippets": [
{
"title": "Snippet for subgroup_a_repo_1",
"file_name": "snippet.txt",
"content": "This is a snippet for subgroup_a_repo_1",
"visibility": "public"
}
]
},
{
"name": "subgroup_a_repo_2",
"initialize_with_readme": true,
"snippets": [
{
"title": "Snippet for subgroup_a_repo_2",
"file_name": "snippet.txt",
"content": "This is a snippet for subgroup_a_repo_2",
"visibility": "public"
}
]
},
{
"name": "subgroup_a_repo_3",
"initialize_with_readme": true,
"snippets": [
{
"title": "Snippet for subgroup_a_repo_3",
"file_name": "snippet.txt",
"content": "This is a snippet for subgroup_a_repo_3",
"visibility": "public"
}
]
}
],
"subgroups": [
{
"name": "subgroup-b",
"path": "subgroup-b",
"description": "Nested subgroup B for testing deeper structures",
"repositories": [
{
"name": "subgroup_b_repo_0",
"initialize_with_readme": true,
"snippets": [
{
"title": "Snippet for subgroup_b_repo_0",
"file_name": "snippet.txt",
"content": "This is a snippet for subgroup_b_repo_0",
"visibility": "public"
}
]
},
{
"name": "subgroup_b_repo_1",
"initialize_with_readme": true,
"snippets": [
{
"title": "Snippet for subgroup_b_repo_1",
"file_name": "snippet.txt",
"content": "This is a snippet for subgroup_b_repo_1",
"visibility": "public"
}
]
},
{
"name": "subgroup_b_repo_2",
"initialize_with_readme": true,
"snippets": [
{
"title": "Snippet for subgroup_b_repo_2",
"file_name": "snippet.txt",
"content": "This is a snippet for subgroup_b_repo_2",
"visibility": "public"
}
]
},
{
"name": "subgroup_b_repo_3",
"initialize_with_readme": true,
"snippets": [
{
"title": "Snippet for subgroup_b_repo_3",
"file_name": "snippet.txt",
"content": "This is a snippet for subgroup_b_repo_3",
"visibility": "public"
}
]
}
]
}
]
}
]
}
],
"users": [
{
"username": "testuser1",
"email": "testuser1@example.com",
"password": "adminadmin1",
"name": "testuser1",
"repositories": [
{
"name": "testuser1-repo",
"initialize_with_readme": true,
"snippets": [
{
"title": "my-first-snippet",
"file_name": "snippet.txt",
"content": "This is my first snippet",
"visibility": "public"
}
]
}
]
},
{
"username": "testuser2",
"email": "testuser2@example.com",
"password": "adminadmin1",
"name": "testuser2"
}
],
"root_user": {
"repositories": [
{
"name": "rootrepos0",
"initialize_with_readme": true
},
{
"name": "rootrepos1",
"initialize_with_readme": true,
"snippets": [
{
"title": "my-first-snippet",
"file_name": "snippet.txt",
"content": "This is my first snippet",
"visibility": "public"
}
]
},
{
"name": "rootrepos2",
"initialize_with_readme": true
},
{
"name": "rootrepos3",
"initialize_with_readme": true
}
]
},
"root_snippets": [
{
"title": "snippet1",
"file_name": "file1",
"content": "content1",
"description": "description1",
"visibility": "public"
},
{
"title": "snippet2",
"file_name": "file2",
"content": "content2",
"description": "description2",
"visibility": "public"
}
]
}

View File

@ -0,0 +1,562 @@
{
"test_scenarios": [
{
"name": "all-groups-preserve-dir-output-dir",
"description": "Test cloning all groups with preserve dir and output dir",
"command": "ghorg clone all-groups --scm=gitlab --base-url={{.BaseURL}} --token={{.Token}} --preserve-dir --output-dir=local-gitlab-latest-repos",
"run_twice": true,
"expected_structure": [
"local-gitlab-latest-repos/local-gitlab-group1/baz0",
"local-gitlab-latest-repos/local-gitlab-group1/baz1",
"local-gitlab-latest-repos/local-gitlab-group1/baz2",
"local-gitlab-latest-repos/local-gitlab-group1/baz3",
"local-gitlab-latest-repos/local-gitlab-group2/baz0",
"local-gitlab-latest-repos/local-gitlab-group2/baz1",
"local-gitlab-latest-repos/local-gitlab-group2/baz2",
"local-gitlab-latest-repos/local-gitlab-group2/baz3",
"local-gitlab-latest-repos/local-gitlab-group3/subgroup-a/subgroup-b",
"local-gitlab-latest-repos/local-gitlab-group3/subgroup-a/subgroup_a_repo_0",
"local-gitlab-latest-repos/local-gitlab-group3/subgroup-a/subgroup_a_repo_1",
"local-gitlab-latest-repos/local-gitlab-group3/subgroup-a/subgroup_a_repo_2",
"local-gitlab-latest-repos/local-gitlab-group3/subgroup-a/subgroup_a_repo_3"
]
},
{
"name": "all-groups-preserve-dir-output-dir-snippets",
"description": "Test cloning all groups with preserve dir, output dir, and snippets",
"command": "ghorg clone all-groups --scm=gitlab --base-url={{.BaseURL}} --token={{.Token}} --preserve-dir --output-dir=local-gitlab-latest-repos-snippets --clone-snippets",
"run_twice": true,
"expected_structure": [
"local-gitlab-latest-repos-snippets/_ghorg_root_level_snippets",
"local-gitlab-latest-repos-snippets/local-gitlab-group1/baz0",
"local-gitlab-latest-repos-snippets/local-gitlab-group1/baz1",
"local-gitlab-latest-repos-snippets/local-gitlab-group1/baz2",
"local-gitlab-latest-repos-snippets/local-gitlab-group1/baz3",
"local-gitlab-latest-repos-snippets/local-gitlab-group2/baz0",
"local-gitlab-latest-repos-snippets/local-gitlab-group2/baz0.snippets",
"local-gitlab-latest-repos-snippets/local-gitlab-group2/baz1",
"local-gitlab-latest-repos-snippets/local-gitlab-group2/baz1.snippets",
"local-gitlab-latest-repos-snippets/local-gitlab-group2/baz2",
"local-gitlab-latest-repos-snippets/local-gitlab-group2/baz2.snippets",
"local-gitlab-latest-repos-snippets/local-gitlab-group2/baz3",
"local-gitlab-latest-repos-snippets/local-gitlab-group2/baz3.snippets",
"local-gitlab-latest-repos-snippets/local-gitlab-group3/subgroup-a/subgroup-b",
"local-gitlab-latest-repos-snippets/local-gitlab-group3/subgroup-a/subgroup_a_repo_0",
"local-gitlab-latest-repos-snippets/local-gitlab-group3/subgroup-a/subgroup_a_repo_0.snippets",
"local-gitlab-latest-repos-snippets/local-gitlab-group3/subgroup-a/subgroup_a_repo_1",
"local-gitlab-latest-repos-snippets/local-gitlab-group3/subgroup-a/subgroup_a_repo_1.snippets",
"local-gitlab-latest-repos-snippets/local-gitlab-group3/subgroup-a/subgroup_a_repo_2",
"local-gitlab-latest-repos-snippets/local-gitlab-group3/subgroup-a/subgroup_a_repo_2.snippets",
"local-gitlab-latest-repos-snippets/local-gitlab-group3/subgroup-a/subgroup_a_repo_3",
"local-gitlab-latest-repos-snippets/local-gitlab-group3/subgroup-a/subgroup_a_repo_3.snippets"
]
},
{
"name": "all-groups-preserve-dir-output-dir-snippets-preserve-scm-hostname",
"description": "Test cloning all groups with preserve dir, output dir, snippets, and preserve SCM hostname",
"command": "ghorg clone all-groups --scm=gitlab --base-url={{.BaseURL}} --token={{.Token}} --preserve-dir --output-dir=local-gitlab-latest-repos-snippets --clone-snippets --preserve-scm-hostname",
"run_twice": true,
"expected_structure": [
"gitlab.example.com/local-gitlab-latest-repos-snippets/_ghorg_root_level_snippets",
"gitlab.example.com/local-gitlab-latest-repos-snippets/local-gitlab-group1/baz0",
"gitlab.example.com/local-gitlab-latest-repos-snippets/local-gitlab-group1/baz1",
"gitlab.example.com/local-gitlab-latest-repos-snippets/local-gitlab-group1/baz2",
"gitlab.example.com/local-gitlab-latest-repos-snippets/local-gitlab-group1/baz3",
"gitlab.example.com/local-gitlab-latest-repos-snippets/local-gitlab-group2/baz0",
"gitlab.example.com/local-gitlab-latest-repos-snippets/local-gitlab-group2/baz0.snippets",
"gitlab.example.com/local-gitlab-latest-repos-snippets/local-gitlab-group2/baz1",
"gitlab.example.com/local-gitlab-latest-repos-snippets/local-gitlab-group2/baz1.snippets",
"gitlab.example.com/local-gitlab-latest-repos-snippets/local-gitlab-group2/baz2",
"gitlab.example.com/local-gitlab-latest-repos-snippets/local-gitlab-group2/baz2.snippets",
"gitlab.example.com/local-gitlab-latest-repos-snippets/local-gitlab-group2/baz3",
"gitlab.example.com/local-gitlab-latest-repos-snippets/local-gitlab-group2/baz3.snippets",
"gitlab.example.com/local-gitlab-latest-repos-snippets/local-gitlab-group3/subgroup-a/subgroup-b",
"gitlab.example.com/local-gitlab-latest-repos-snippets/local-gitlab-group3/subgroup-a/subgroup_a_repo_0",
"gitlab.example.com/local-gitlab-latest-repos-snippets/local-gitlab-group3/subgroup-a/subgroup_a_repo_0.snippets",
"gitlab.example.com/local-gitlab-latest-repos-snippets/local-gitlab-group3/subgroup-a/subgroup_a_repo_1",
"gitlab.example.com/local-gitlab-latest-repos-snippets/local-gitlab-group3/subgroup-a/subgroup_a_repo_1.snippets",
"gitlab.example.com/local-gitlab-latest-repos-snippets/local-gitlab-group3/subgroup-a/subgroup_a_repo_2",
"gitlab.example.com/local-gitlab-latest-repos-snippets/local-gitlab-group3/subgroup-a/subgroup_a_repo_2.snippets",
"gitlab.example.com/local-gitlab-latest-repos-snippets/local-gitlab-group3/subgroup-a/subgroup_a_repo_3",
"gitlab.example.com/local-gitlab-latest-repos-snippets/local-gitlab-group3/subgroup-a/subgroup_a_repo_3.snippets"
]
},
{
"name": "all-groups-preserve-dir-no-output",
"description": "Test cloning all groups with preserve dir and no output dir",
"command": "ghorg clone all-groups --scm=gitlab --base-url={{.BaseURL}} --token={{.Token}} --preserve-dir",
"run_twice": true,
"expected_structure": [
"gitlab.example.com/local-gitlab-group1/baz0",
"gitlab.example.com/local-gitlab-group1/baz1",
"gitlab.example.com/local-gitlab-group1/baz2",
"gitlab.example.com/local-gitlab-group1/baz3"
]
},
{
"name": "all-groups-flat-output-dir",
"description": "Test cloning all groups to flat output directory",
"command": "ghorg clone all-groups --scm=gitlab --base-url={{.BaseURL}} --token={{.Token}} --output-dir=local-gitlab-latest-repos-flat",
"run_twice": true,
"expected_structure": [
"local-gitlab-latest-repos-flat/local-gitlab-group1_baz0",
"local-gitlab-latest-repos-flat/local-gitlab-group1_baz1",
"local-gitlab-latest-repos-flat/local-gitlab-group1_baz2",
"local-gitlab-latest-repos-flat/local-gitlab-group1_baz3",
"local-gitlab-latest-repos-flat/local-gitlab-group2_baz0",
"local-gitlab-latest-repos-flat/local-gitlab-group2_baz1",
"local-gitlab-latest-repos-flat/local-gitlab-group2_baz2",
"local-gitlab-latest-repos-flat/local-gitlab-group2_baz3",
"local-gitlab-latest-repos-flat/subgroup_a_repo_0",
"local-gitlab-latest-repos-flat/subgroup_a_repo_1",
"local-gitlab-latest-repos-flat/subgroup_a_repo_2",
"local-gitlab-latest-repos-flat/subgroup_a_repo_3",
"local-gitlab-latest-repos-flat/subgroup_b_repo_0",
"local-gitlab-latest-repos-flat/subgroup_b_repo_1",
"local-gitlab-latest-repos-flat/subgroup_b_repo_2",
"local-gitlab-latest-repos-flat/subgroup_b_repo_3"
]
},
{
"name": "all-groups-flat-snippets",
"description": "Test cloning all groups to flat output directory with snippets",
"command": "ghorg clone all-groups --scm=gitlab --base-url={{.BaseURL}} --token={{.Token}} --output-dir=local-gitlab-latest-repos-all-groups-snippets --clone-snippets",
"run_twice": true,
"expected_structure": [
"local-gitlab-latest-repos-all-groups-snippets/_ghorg_root_level_snippets",
"local-gitlab-latest-repos-all-groups-snippets/local-gitlab-group1_baz0",
"local-gitlab-latest-repos-all-groups-snippets/local-gitlab-group1_baz1",
"local-gitlab-latest-repos-all-groups-snippets/local-gitlab-group1_baz2",
"local-gitlab-latest-repos-all-groups-snippets/local-gitlab-group1_baz3",
"local-gitlab-latest-repos-all-groups-snippets/local-gitlab-group2_baz0",
"local-gitlab-latest-repos-all-groups-snippets/local-gitlab-group2_baz0.snippets",
"local-gitlab-latest-repos-all-groups-snippets/local-gitlab-group2_baz1",
"local-gitlab-latest-repos-all-groups-snippets/local-gitlab-group2_baz1.snippets",
"local-gitlab-latest-repos-all-groups-snippets/local-gitlab-group2_baz2",
"local-gitlab-latest-repos-all-groups-snippets/local-gitlab-group2_baz2.snippets",
"local-gitlab-latest-repos-all-groups-snippets/local-gitlab-group2_baz3",
"local-gitlab-latest-repos-all-groups-snippets/local-gitlab-group2_baz3.snippets",
"local-gitlab-latest-repos-all-groups-snippets/subgroup_a_repo_0",
"local-gitlab-latest-repos-all-groups-snippets/subgroup_a_repo_0.snippets",
"local-gitlab-latest-repos-all-groups-snippets/subgroup_a_repo_1",
"local-gitlab-latest-repos-all-groups-snippets/subgroup_a_repo_1.snippets",
"local-gitlab-latest-repos-all-groups-snippets/subgroup_a_repo_2",
"local-gitlab-latest-repos-all-groups-snippets/subgroup_a_repo_2.snippets",
"local-gitlab-latest-repos-all-groups-snippets/subgroup_a_repo_3",
"local-gitlab-latest-repos-all-groups-snippets/subgroup_a_repo_3.snippets",
"local-gitlab-latest-repos-all-groups-snippets/subgroup_b_repo_0",
"local-gitlab-latest-repos-all-groups-snippets/subgroup_b_repo_0.snippets",
"local-gitlab-latest-repos-all-groups-snippets/subgroup_b_repo_1",
"local-gitlab-latest-repos-all-groups-snippets/subgroup_b_repo_1.snippets",
"local-gitlab-latest-repos-all-groups-snippets/subgroup_b_repo_2",
"local-gitlab-latest-repos-all-groups-snippets/subgroup_b_repo_2.snippets",
"local-gitlab-latest-repos-all-groups-snippets/subgroup_b_repo_3",
"local-gitlab-latest-repos-all-groups-snippets/subgroup_b_repo_3.snippets"
]
},
{
"name": "all-groups-clone-wiki",
"description": "Test cloning all groups with wiki",
"command": "ghorg clone all-groups --scm=gitlab --base-url={{.BaseURL}} --token={{.Token}} --clone-wiki --output-dir=local-gitlab-latest-repos-flat-wiki",
"run_twice": true,
"expected_structure": [
"local-gitlab-latest-repos-flat-wiki/local-gitlab-group1_baz0",
"local-gitlab-latest-repos-flat-wiki/local-gitlab-group1_baz0.wiki",
"local-gitlab-latest-repos-flat-wiki/local-gitlab-group1_baz1",
"local-gitlab-latest-repos-flat-wiki/local-gitlab-group1_baz1.wiki",
"local-gitlab-latest-repos-flat-wiki/local-gitlab-group1_baz2",
"local-gitlab-latest-repos-flat-wiki/local-gitlab-group1_baz2.wiki",
"local-gitlab-latest-repos-flat-wiki/local-gitlab-group1_baz3",
"local-gitlab-latest-repos-flat-wiki/local-gitlab-group1_baz3.wiki",
"local-gitlab-latest-repos-flat-wiki/local-gitlab-group2_baz0",
"local-gitlab-latest-repos-flat-wiki/local-gitlab-group2_baz0.wiki",
"local-gitlab-latest-repos-flat-wiki/local-gitlab-group2_baz1",
"local-gitlab-latest-repos-flat-wiki/local-gitlab-group2_baz1.wiki",
"local-gitlab-latest-repos-flat-wiki/local-gitlab-group2_baz2",
"local-gitlab-latest-repos-flat-wiki/local-gitlab-group2_baz2.wiki",
"local-gitlab-latest-repos-flat-wiki/local-gitlab-group2_baz3",
"local-gitlab-latest-repos-flat-wiki/local-gitlab-group2_baz3.wiki",
"local-gitlab-latest-repos-flat-wiki/subgroup_a_repo_0",
"local-gitlab-latest-repos-flat-wiki/subgroup_a_repo_0.wiki",
"local-gitlab-latest-repos-flat-wiki/subgroup_a_repo_1",
"local-gitlab-latest-repos-flat-wiki/subgroup_a_repo_1.wiki",
"local-gitlab-latest-repos-flat-wiki/subgroup_a_repo_2",
"local-gitlab-latest-repos-flat-wiki/subgroup_a_repo_2.wiki",
"local-gitlab-latest-repos-flat-wiki/subgroup_a_repo_3",
"local-gitlab-latest-repos-flat-wiki/subgroup_a_repo_3.wiki",
"local-gitlab-latest-repos-flat-wiki/subgroup_b_repo_0",
"local-gitlab-latest-repos-flat-wiki/subgroup_b_repo_0.wiki",
"local-gitlab-latest-repos-flat-wiki/subgroup_b_repo_1",
"local-gitlab-latest-repos-flat-wiki/subgroup_b_repo_1.wiki",
"local-gitlab-latest-repos-flat-wiki/subgroup_b_repo_2",
"local-gitlab-latest-repos-flat-wiki/subgroup_b_repo_2.wiki",
"local-gitlab-latest-repos-flat-wiki/subgroup_b_repo_3",
"local-gitlab-latest-repos-flat-wiki/subgroup_b_repo_3.wiki"
]
},
{
"name": "all-groups-flat-wiki-snippets",
"description": "Test cloning all groups with wiki and snippets to flat output directory",
"command": "ghorg clone all-groups --scm=gitlab --base-url={{.BaseURL}} --token={{.Token}} --clone-wiki --clone-snippets --output-dir=local-gitlab-latest-repos-flat-wiki-snippets",
"run_twice": true,
"expected_structure": [
"local-gitlab-latest-repos-flat-wiki-snippets/_ghorg_root_level_snippets",
"local-gitlab-latest-repos-flat-wiki-snippets/local-gitlab-group1_baz0",
"local-gitlab-latest-repos-flat-wiki-snippets/local-gitlab-group1_baz0.wiki",
"local-gitlab-latest-repos-flat-wiki-snippets/local-gitlab-group1_baz1",
"local-gitlab-latest-repos-flat-wiki-snippets/local-gitlab-group1_baz1.wiki",
"local-gitlab-latest-repos-flat-wiki-snippets/local-gitlab-group1_baz2",
"local-gitlab-latest-repos-flat-wiki-snippets/local-gitlab-group1_baz2.wiki",
"local-gitlab-latest-repos-flat-wiki-snippets/local-gitlab-group1_baz3",
"local-gitlab-latest-repos-flat-wiki-snippets/local-gitlab-group1_baz3.wiki",
"local-gitlab-latest-repos-flat-wiki-snippets/local-gitlab-group2_baz0",
"local-gitlab-latest-repos-flat-wiki-snippets/local-gitlab-group2_baz0.snippets",
"local-gitlab-latest-repos-flat-wiki-snippets/local-gitlab-group2_baz0.wiki",
"local-gitlab-latest-repos-flat-wiki-snippets/local-gitlab-group2_baz1",
"local-gitlab-latest-repos-flat-wiki-snippets/local-gitlab-group2_baz1.snippets",
"local-gitlab-latest-repos-flat-wiki-snippets/local-gitlab-group2_baz1.wiki",
"local-gitlab-latest-repos-flat-wiki-snippets/local-gitlab-group2_baz2",
"local-gitlab-latest-repos-flat-wiki-snippets/local-gitlab-group2_baz2.snippets",
"local-gitlab-latest-repos-flat-wiki-snippets/local-gitlab-group2_baz2.wiki",
"local-gitlab-latest-repos-flat-wiki-snippets/local-gitlab-group2_baz3",
"local-gitlab-latest-repos-flat-wiki-snippets/local-gitlab-group2_baz3.snippets",
"local-gitlab-latest-repos-flat-wiki-snippets/local-gitlab-group2_baz3.wiki",
"local-gitlab-latest-repos-flat-wiki-snippets/subgroup_a_repo_0",
"local-gitlab-latest-repos-flat-wiki-snippets/subgroup_a_repo_0.snippets",
"local-gitlab-latest-repos-flat-wiki-snippets/subgroup_a_repo_0.wiki",
"local-gitlab-latest-repos-flat-wiki-snippets/subgroup_a_repo_1",
"local-gitlab-latest-repos-flat-wiki-snippets/subgroup_a_repo_1.snippets",
"local-gitlab-latest-repos-flat-wiki-snippets/subgroup_a_repo_1.wiki",
"local-gitlab-latest-repos-flat-wiki-snippets/subgroup_a_repo_2",
"local-gitlab-latest-repos-flat-wiki-snippets/subgroup_a_repo_2.snippets",
"local-gitlab-latest-repos-flat-wiki-snippets/subgroup_a_repo_2.wiki",
"local-gitlab-latest-repos-flat-wiki-snippets/subgroup_a_repo_3",
"local-gitlab-latest-repos-flat-wiki-snippets/subgroup_a_repo_3.snippets",
"local-gitlab-latest-repos-flat-wiki-snippets/subgroup_a_repo_3.wiki",
"local-gitlab-latest-repos-flat-wiki-snippets/subgroup_b_repo_0",
"local-gitlab-latest-repos-flat-wiki-snippets/subgroup_b_repo_0.snippets",
"local-gitlab-latest-repos-flat-wiki-snippets/subgroup_b_repo_0.wiki",
"local-gitlab-latest-repos-flat-wiki-snippets/subgroup_b_repo_1",
"local-gitlab-latest-repos-flat-wiki-snippets/subgroup_b_repo_1.snippets",
"local-gitlab-latest-repos-flat-wiki-snippets/subgroup_b_repo_1.wiki",
"local-gitlab-latest-repos-flat-wiki-snippets/subgroup_b_repo_2",
"local-gitlab-latest-repos-flat-wiki-snippets/subgroup_b_repo_2.snippets",
"local-gitlab-latest-repos-flat-wiki-snippets/subgroup_b_repo_2.wiki",
"local-gitlab-latest-repos-flat-wiki-snippets/subgroup_b_repo_3",
"local-gitlab-latest-repos-flat-wiki-snippets/subgroup_b_repo_3.snippets",
"local-gitlab-latest-repos-flat-wiki-snippets/subgroup_b_repo_3.wiki"
]
},
{
"name": "all-groups-backup-wiki",
"description": "Test cloning all groups with backup and wiki",
"command": "ghorg clone all-groups --scm=gitlab --base-url={{.BaseURL}} --token={{.Token}} --backup --clone-wiki --output-dir=local-gitlab-latest-backup",
"run_twice": true,
"expected_structure": []
},
{
"name": "single-user-snippets-prune-untouched",
"description": "Test cloning single user with snippets and prune untouched",
"command": "ghorg clone root --clone-type=user --scm=gitlab --base-url={{.BaseURL}} --token={{.Token}} --clone-snippets --output-dir=local-gitlab-latest-root-user-repos-snippets --prune-untouched --prune-untouched-no-confirm",
"run_twice": false,
"expected_structure": []
},
{
"name": "top-level-group-backup",
"description": "Test cloning top level group with backup",
"command": "ghorg clone local-gitlab-group1 --scm=gitlab --base-url={{.BaseURL}} --token={{.Token}} --backup --output-dir=local-gitlab-latest-group1-backup",
"run_twice": true,
"expected_structure": []
},
{
"name": "top-level-group-basic-no-test",
"description": "Test cloning top level group basic (no structure verification)",
"command": "ghorg clone local-gitlab-group1 --scm=gitlab --base-url={{.BaseURL}} --token={{.Token}} --output-dir=local-gitlab-latest-group1",
"run_twice": true,
"expected_structure": []
},
{
"name": "single-group-basic-test",
"description": "Test cloning a single top-level group",
"command": "ghorg clone local-gitlab-group3 --scm=gitlab --base-url={{.BaseURL}} --token={{.Token}} --output-dir=local-gitlab-latest-top-level-group",
"run_twice": true,
"expected_structure": [
"local-gitlab-latest-top-level-group/subgroup_a_repo_0",
"local-gitlab-latest-top-level-group/subgroup_a_repo_1",
"local-gitlab-latest-top-level-group/subgroup_a_repo_2",
"local-gitlab-latest-top-level-group/subgroup_a_repo_3",
"local-gitlab-latest-top-level-group/subgroup_b_repo_0",
"local-gitlab-latest-top-level-group/subgroup_b_repo_1",
"local-gitlab-latest-top-level-group/subgroup_b_repo_2",
"local-gitlab-latest-top-level-group/subgroup_b_repo_3"
]
},
{
"name": "top-level-group-prune-untouched",
"description": "Test cloning top level group with prune untouched",
"command": "ghorg clone local-gitlab-group3 --scm=gitlab --base-url={{.BaseURL}} --token={{.Token}} --output-dir=local-gitlab-latest-top-level-group --prune-untouched --prune-untouched-no-confirm",
"run_twice": false,
"expected_structure": []
},
{
"name": "top-level-group-preserve-scm-hostname",
"description": "Test cloning top level group with preserve SCM hostname",
"command": "ghorg clone local-gitlab-group3 --scm=gitlab --base-url={{.BaseURL}} --token={{.Token}} --output-dir=local-gitlab-latest-top-level-group --preserve-scm-hostname",
"run_twice": true,
"expected_structure": [
"gitlab.example.com/local-gitlab-latest-top-level-group/subgroup_a_repo_0",
"gitlab.example.com/local-gitlab-latest-top-level-group/subgroup_a_repo_1",
"gitlab.example.com/local-gitlab-latest-top-level-group/subgroup_a_repo_2",
"gitlab.example.com/local-gitlab-latest-top-level-group/subgroup_a_repo_3",
"gitlab.example.com/local-gitlab-latest-top-level-group/subgroup_b_repo_0",
"gitlab.example.com/local-gitlab-latest-top-level-group/subgroup_b_repo_1",
"gitlab.example.com/local-gitlab-latest-top-level-group/subgroup_b_repo_2",
"gitlab.example.com/local-gitlab-latest-top-level-group/subgroup_b_repo_3"
]
},
{
"name": "top-level-group-nested-preserve-dir-output",
"description": "Test cloning top level group with nested subgroup, preserve dir, output dir",
"command": "ghorg clone local-gitlab-group3 --scm=gitlab --base-url={{.BaseURL}} --token={{.Token}} --preserve-dir --output-dir=local-gitlab-latest-group3-preserve",
"run_twice": true,
"expected_structure": [
"local-gitlab-latest-group3-preserve/subgroup-a/subgroup-b",
"local-gitlab-latest-group3-preserve/subgroup-a/subgroup_a_repo_0",
"local-gitlab-latest-group3-preserve/subgroup-a/subgroup_a_repo_1",
"local-gitlab-latest-group3-preserve/subgroup-a/subgroup_a_repo_2",
"local-gitlab-latest-group3-preserve/subgroup-a/subgroup_a_repo_3"
]
},
{
"name": "top-level-group-nested-output-dir",
"description": "Test cloning top level group with nested subgroup, output dir",
"command": "ghorg clone local-gitlab-group3 --scm=gitlab --base-url={{.BaseURL}} --token={{.Token}} --output-dir=local-gitlab-latest-group3",
"run_twice": true,
"expected_structure": [
"local-gitlab-latest-group3/subgroup_a_repo_0",
"local-gitlab-latest-group3/subgroup_a_repo_1",
"local-gitlab-latest-group3/subgroup_a_repo_2",
"local-gitlab-latest-group3/subgroup_a_repo_3",
"local-gitlab-latest-group3/subgroup_b_repo_0",
"local-gitlab-latest-group3/subgroup_b_repo_1",
"local-gitlab-latest-group3/subgroup_b_repo_2",
"local-gitlab-latest-group3/subgroup_b_repo_3"
]
},
{
"name": "top-level-group-nested-preserve-dir",
"description": "Test cloning top level group with nested subgroup, preserve dir",
"command": "ghorg clone local-gitlab-group3 --scm=gitlab --base-url={{.BaseURL}} --token={{.Token}} --preserve-dir",
"run_twice": true,
"expected_structure": [
"local-gitlab-group3/subgroup-a/subgroup-b",
"local-gitlab-group3/subgroup-a/subgroup_a_repo_0",
"local-gitlab-group3/subgroup-a/subgroup_a_repo_1",
"local-gitlab-group3/subgroup-a/subgroup_a_repo_2",
"local-gitlab-group3/subgroup-a/subgroup_a_repo_3"
]
},
{
"name": "subgroup-test",
"description": "Test cloning a subgroup with nested subgroups",
"command": "ghorg clone local-gitlab-group3/subgroup-a --scm=gitlab --base-url={{.BaseURL}} --token={{.Token}}",
"run_twice": true,
"expected_structure": [
"local-gitlab-group3/subgroup-a/subgroup_a_repo_0",
"local-gitlab-group3/subgroup-a/subgroup_a_repo_1",
"local-gitlab-group3/subgroup-a/subgroup_a_repo_2",
"local-gitlab-group3/subgroup-a/subgroup_a_repo_3",
"local-gitlab-group3/subgroup-a/subgroup_b_repo_0",
"local-gitlab-group3/subgroup-a/subgroup_b_repo_1",
"local-gitlab-group3/subgroup-a/subgroup_b_repo_2",
"local-gitlab-group3/subgroup-a/subgroup_b_repo_3"
]
},
{
"name": "subgroup-nested-preserve-dir",
"description": "Test cloning subgroup with nested subgroups, preserve dir",
"command": "ghorg clone local-gitlab-group3/subgroup-a --scm=gitlab --base-url={{.BaseURL}} --token={{.Token}} --preserve-dir",
"run_twice": true,
"expected_structure": [
"local-gitlab-group3/subgroup-a/subgroup-b",
"local-gitlab-group3/subgroup-a/subgroup_a_repo_0",
"local-gitlab-group3/subgroup-a/subgroup_a_repo_1",
"local-gitlab-group3/subgroup-a/subgroup_a_repo_2",
"local-gitlab-group3/subgroup-a/subgroup_a_repo_3"
]
},
{
"name": "subgroup-nested-output-dir",
"description": "Test cloning subgroup with nested subgroups, output dir",
"command": "ghorg clone local-gitlab-group3/subgroup-a --scm=gitlab --base-url={{.BaseURL}} --token={{.Token}} --output-dir=local-gitlab-latest-group3-subgroup-a",
"run_twice": true,
"expected_structure": [
"local-gitlab-latest-group3-subgroup-a/subgroup_a_repo_0",
"local-gitlab-latest-group3-subgroup-a/subgroup_a_repo_1",
"local-gitlab-latest-group3-subgroup-a/subgroup_a_repo_2",
"local-gitlab-latest-group3-subgroup-a/subgroup_a_repo_3",
"local-gitlab-latest-group3-subgroup-a/subgroup_b_repo_0",
"local-gitlab-latest-group3-subgroup-a/subgroup_b_repo_1",
"local-gitlab-latest-group3-subgroup-a/subgroup_b_repo_2",
"local-gitlab-latest-group3-subgroup-a/subgroup_b_repo_3"
]
},
{
"name": "subgroup-nested-output-preserve-scm",
"description": "Test cloning subgroup with nested subgroups, output dir, preserve SCM hostname",
"command": "ghorg clone local-gitlab-group3/subgroup-a --scm=gitlab --base-url={{.BaseURL}} --token={{.Token}} --output-dir=local-gitlab-latest-group3-subgroup-a --preserve-scm-hostname",
"run_twice": true,
"expected_structure": [
"gitlab.example.com/local-gitlab-latest-group3-subgroup-a/subgroup_a_repo_0",
"gitlab.example.com/local-gitlab-latest-group3-subgroup-a/subgroup_a_repo_1",
"gitlab.example.com/local-gitlab-latest-group3-subgroup-a/subgroup_a_repo_2",
"gitlab.example.com/local-gitlab-latest-group3-subgroup-a/subgroup_a_repo_3",
"gitlab.example.com/local-gitlab-latest-group3-subgroup-a/subgroup_b_repo_0",
"gitlab.example.com/local-gitlab-latest-group3-subgroup-a/subgroup_b_repo_1",
"gitlab.example.com/local-gitlab-latest-group3-subgroup-a/subgroup_b_repo_2",
"gitlab.example.com/local-gitlab-latest-group3-subgroup-a/subgroup_b_repo_3"
]
},
{
"name": "subgroup-nested-no-output-preserve-scm",
"description": "Test cloning subgroup with nested subgroups, no output dir, preserve SCM hostname",
"command": "ghorg clone local-gitlab-group3/subgroup-a --scm=gitlab --base-url={{.BaseURL}} --token={{.Token}} --preserve-scm-hostname",
"run_twice": true,
"expected_structure": [
"gitlab.example.com/local-gitlab-group3/subgroup-a/subgroup_a_repo_0",
"gitlab.example.com/local-gitlab-group3/subgroup-a/subgroup_a_repo_1",
"gitlab.example.com/local-gitlab-group3/subgroup-a/subgroup_a_repo_2",
"gitlab.example.com/local-gitlab-group3/subgroup-a/subgroup_a_repo_3",
"gitlab.example.com/local-gitlab-group3/subgroup-a/subgroup_b_repo_0",
"gitlab.example.com/local-gitlab-group3/subgroup-a/subgroup_b_repo_1",
"gitlab.example.com/local-gitlab-group3/subgroup-a/subgroup_b_repo_2",
"gitlab.example.com/local-gitlab-group3/subgroup-a/subgroup_b_repo_3"
]
},
{
"name": "subgroup-nested-preserve-output",
"description": "Test cloning subgroup with nested subgroups, preserve dir, output dir",
"command": "ghorg clone local-gitlab-group3/subgroup-a --scm=gitlab --base-url={{.BaseURL}} --token={{.Token}} --preserve-dir --output-dir=local-gitlab-latest-subgroups-preserve-output",
"run_twice": true,
"expected_structure": [
"local-gitlab-latest-subgroups-preserve-output/local-gitlab-group3/subgroup-a/subgroup-b",
"local-gitlab-latest-subgroups-preserve-output/local-gitlab-group3/subgroup-a/subgroup_a_repo_0",
"local-gitlab-latest-subgroups-preserve-output/local-gitlab-group3/subgroup-a/subgroup_a_repo_1",
"local-gitlab-latest-subgroups-preserve-output/local-gitlab-group3/subgroup-a/subgroup_a_repo_2",
"local-gitlab-latest-subgroups-preserve-output/local-gitlab-group3/subgroup-a/subgroup_a_repo_3"
]
},
{
"name": "all-users-preserve-dir-basic",
"description": "Test cloning all users with preserve dir",
"command": "ghorg clone all-users --scm=gitlab --clone-type=user --base-url={{.BaseURL}} --token={{.Token}} --preserve-dir",
"run_twice": false,
"expected_structure": [
"gitlab.example.com/root/rootrepos0",
"gitlab.example.com/root/rootrepos1",
"gitlab.example.com/root/rootrepos2",
"gitlab.example.com/root/rootrepos3"
]
},
{
"name": "all-users-preserve-dir-scm-hostname",
"description": "Test cloning all users with preserve dir and preserve SCM hostname",
"command": "ghorg clone all-users --scm=gitlab --clone-type=user --base-url={{.BaseURL}} --token={{.Token}} --preserve-dir --preserve-scm-hostname",
"run_twice": false,
"expected_structure": [
"gitlab.example.com/root/rootrepos0",
"gitlab.example.com/root/rootrepos1",
"gitlab.example.com/root/rootrepos2",
"gitlab.example.com/root/rootrepos3"
]
},
{
"name": "all-users-preserve-dir-output",
"description": "Test cloning all users with preserve dir and output dir",
"command": "ghorg clone all-users --scm=gitlab --clone-type=user --base-url={{.BaseURL}} --token={{.Token}} --output-dir=local-gitlab-latest-all-users-preserve --preserve-dir",
"run_twice": false,
"expected_structure": [
"local-gitlab-latest-all-users-preserve/root/rootrepos0",
"local-gitlab-latest-all-users-preserve/root/rootrepos1",
"local-gitlab-latest-all-users-preserve/root/rootrepos2",
"local-gitlab-latest-all-users-preserve/root/rootrepos3"
]
},
{
"name": "all-users-test",
"description": "Test cloning all users",
"command": "ghorg clone all-users --scm=gitlab --clone-type=user --base-url={{.BaseURL}} --token={{.Token}} --output-dir=local-gitlab-latest-all-users",
"run_twice": false,
"expected_structure": [
"local-gitlab-latest-all-users/rootrepos0",
"local-gitlab-latest-all-users/rootrepos1",
"local-gitlab-latest-all-users/rootrepos2",
"local-gitlab-latest-all-users/rootrepos3",
"local-gitlab-latest-all-users/testuser1-repo"
]
},
{
"_comment": "COMMENTED OUT TEST FROM ORIGINAL SCRIPT - TODO FIXME",
"name": "root-level-snippets-test-disabled",
"description": "Test root level snippets (DISABLED - was commented in original)",
"command": "ghorg clone all-groups --scm=gitlab --base-url={{.BaseURL}} --token={{.Token}} --preserve-dir --clone-snippets --output-dir=local-gitlab-latest-snippets-preserve-dir-output-dir-all-groups",
"run_twice": true,
"disabled": true,
"expected_structure": [
"local-gitlab-latest-snippets-preserve-dir-output-dir-all-groups/_ghorg_root_level_snippets/snippet1-2",
"local-gitlab-latest-snippets-preserve-dir-output-dir-all-groups/_ghorg_root_level_snippets/snippet2-3"
]
},
{
"_comment": "COMMENTED OUT TEST FROM ORIGINAL SCRIPT",
"name": "top-level-group-preserve-snippets-disabled",
"description": "Test cloning top level group with nested subgroup, preserve dir, snippets (DISABLED - was commented in original)",
"command": "ghorg clone local-gitlab-group3 --scm=gitlab --base-url={{.BaseURL}} --token={{.Token}} --preserve-dir --clone-snippets --output-dir=local-gitlab-latest-group-3-perserve-snippets",
"run_twice": true,
"disabled": true,
"expected_structure": [
"local-gitlab-group3/subgroup-a/subgroup-b",
"local-gitlab-group3/subgroup-a/subgroup_a_repo_0",
"local-gitlab-group3/subgroup-a/subgroup_a_repo_1",
"local-gitlab-group3/subgroup-a/subgroup_a_repo_2",
"local-gitlab-group3/subgroup-a/subgroup_a_repo_3"
]
},
{
"_comment": "COMMENTED OUT TEST FROM ORIGINAL SCRIPT",
"name": "all-users-snippets-disabled",
"description": "Test cloning all users with output dir and snippets (DISABLED - was commented in original)",
"command": "ghorg clone all-users --scm=gitlab --clone-type=user --base-url={{.BaseURL}} --token={{.Token}} --output-dir=local-gitlab-latest-all-users-snippets --clone-snippets",
"run_twice": false,
"disabled": true,
"expected_structure": [
"local-gitlab-latest-all-users-snippets/_ghorg_root_level_snippets",
"local-gitlab-latest-all-users-snippets/rootrepos0",
"local-gitlab-latest-all-users-snippets/rootrepos1",
"local-gitlab-latest-all-users-snippets/rootrepos1.snippets",
"local-gitlab-latest-all-users-snippets/rootrepos2",
"local-gitlab-latest-all-users-snippets/rootrepos3",
"local-gitlab-latest-all-users-snippets/testuser1-repo",
"local-gitlab-latest-all-users-snippets/testuser1-repo.snippets"
]
},
{
"_comment": "COMMENTED OUT TEST FROM ORIGINAL SCRIPT",
"name": "all-users-snippets-preserve-scm-disabled",
"description": "Test cloning all users with output dir, snippets, and preserve SCM hostname (DISABLED - was commented in original)",
"command": "ghorg clone all-users --scm=gitlab --clone-type=user --base-url={{.BaseURL}} --token={{.Token}} --output-dir=local-gitlab-latest-all-users-snippets --clone-snippets --preserve-scm-hostname",
"run_twice": false,
"disabled": true,
"expected_structure": [
"gitlab.example.com/local-gitlab-latest-all-users-snippets/_ghorg_root_level_snippets",
"gitlab.example.com/local-gitlab-latest-all-users-snippets/rootrepos0",
"gitlab.example.com/local-gitlab-latest-all-users-snippets/rootrepos1",
"gitlab.example.com/local-gitlab-latest-all-users-snippets/rootrepos1.snippets",
"gitlab.example.com/local-gitlab-latest-all-users-snippets/rootrepos2",
"gitlab.example.com/local-gitlab-latest-all-users-snippets/rootrepos3",
"gitlab.example.com/local-gitlab-latest-all-users-snippets/testuser1-repo",
"gitlab.example.com/local-gitlab-latest-all-users-snippets/testuser1-repo.snippets"
]
}
]
}

File diff suppressed because it is too large Load Diff

View File

@ -19,7 +19,7 @@ PERSIST_GITLAB_LOCALLY=$4
echo ""
echo "Starting fresh install of GitLab Enterprise Edition, using tag: ${GITLAB_IMAGE_TAG}"
if [ "${GHORG_GHA_CI}" == "true" ]; then
if [ "${GHORG_GHA_CI:-}" == "true" ]; then
GHORG_SSH_PORT=2222
else
GHORG_SSH_PORT=22

View File

@ -1,243 +1,64 @@
#! /bin/bash
#!/bin/bash
set -xv
set -euo pipefail
# https://docs.gitlab.com/ee/install/docker.html#install-gitlab-using-docker-engine
# New Go-based seeding script
# Usage: ./seed.sh <TOKEN> <GITLAB_URL> <LOCAL_GITLAB_GHORG_DIR>
TOKEN=$1
GITLAB_URL=$2
LOCAL_GITLAB_GHORG_DIR=$3
LOCAL_GITLAB_GHORG_DIR=${3:-"${HOME}/ghorg"}
# Create 3 groups, namespace_id will start at 2 (same thing as Group ID you can find in the UI)
curl --request POST --header "PRIVATE-TOKEN: $TOKEN" \
--header "Content-Type: application/json" \
--data '{"path": "local-gitlab-group1", "name": "local-gitlab-group1" }' \
"${GITLAB_URL}/api/v4/groups"
SCRIPT_DIR="$(cd "$(dirname "${BASH_SOURCE[0]}")" && pwd)"
SEEDER_DIR="${SCRIPT_DIR}/seeder"
CONFIG_PATH="${SCRIPT_DIR}/configs/seed-data.json"
echo ""
echo ""
echo ""
sleep 1
echo "Starting GitLab seeding with Go-based seeder..."
echo "GitLab URL: ${GITLAB_URL}"
echo "Config: ${CONFIG_PATH}"
GROUP1_NAMESPACE_ID=$(curl --request GET --header "PRIVATE-TOKEN: $TOKEN" \
"${GITLAB_URL}/api/v4/namespaces/local-gitlab-group1" | jq '.id')
# Build the seeder if it doesn't exist or if source files are newer
SEEDER_BINARY="${SEEDER_DIR}/gitlab-seeder"
# Force rebuild in CI environments or if binary doesn't exist or is newer
FORCE_BUILD=false
if [[ "${CI:-}" == "true" ]] || [[ "${GITHUB_ACTIONS:-}" == "true" ]]; then
echo "CI environment detected - forcing clean build of seeder..."
FORCE_BUILD=true
fi
echo ""
echo ""
echo ""
sleep 1
if [[ ! -f "${SEEDER_BINARY}" ]] || [[ "${SEEDER_DIR}/main.go" -nt "${SEEDER_BINARY}" ]] || [[ "${FORCE_BUILD}" == "true" ]]; then
echo "Building GitLab seeder..."
cd "${SEEDER_DIR}"
curl --request POST --header "PRIVATE-TOKEN: $TOKEN" \
--header "Content-Type: application/json" \
--data '{"path": "local-gitlab-group2", "name": "local-gitlab-group2" }' \
"${GITLAB_URL}/api/v4/groups"
# Remove existing binary to ensure clean build
rm -f gitlab-seeder
echo ""
echo ""
echo ""
sleep 1
go mod download
go build -o gitlab-seeder main.go
GROUP2_NAMESPACE_ID=$(curl --request GET --header "PRIVATE-TOKEN: $TOKEN" \
"${GITLAB_URL}/api/v4/namespaces/local-gitlab-group2" | jq '.id')
# Verify binary was created and is executable
if [[ ! -f "gitlab-seeder" ]]; then
echo "Error: Failed to build gitlab-seeder binary"
exit 1
fi
chmod +x gitlab-seeder
cd -
fi
echo ""
echo ""
echo ""
sleep 1
curl --request POST --header "PRIVATE-TOKEN: $TOKEN" \
--header "Content-Type: application/json" \
--data '{"path": "local-gitlab-group3", "name": "local-gitlab-group3" }' \
"${GITLAB_URL}/api/v4/groups"
echo ""
echo ""
echo ""
sleep 1
GROUP3_NAMESPACE_ID=$(curl --request GET --header "PRIVATE-TOKEN: $TOKEN" \
"${GITLAB_URL}/api/v4/namespaces/local-gitlab-group3" | jq '.id')
echo ""
echo ""
echo ""
sleep 1
# group3/subgroup-a
curl --request POST --header "PRIVATE-TOKEN: $TOKEN" \
--header "Content-Type: application/json" \
--data '{"path": "subgroup-a", "name": "subgroup-a" }' \
"${GITLAB_URL}/api/v4/groups?parent_id=${GROUP3_NAMESPACE_ID}"
echo ""
echo ""
echo ""
sleep 1
GROUP3_SUBGROUPA_NAMESPACE_ID=$(curl --request GET --header "PRIVATE-TOKEN: $TOKEN" \
"${GITLAB_URL}/api/v4/namespaces/local-gitlab-group3%2Fsubgroup-a" | jq '.id')
echo ""
echo ""
echo ""
sleep 1
# group3/subgroup-a/subgroup-b
curl --request POST --header "PRIVATE-TOKEN: $TOKEN" \
--header "Content-Type: application/json" \
--data '{"path": "subgroup-b", "name": "subgroup-b" }' \
"${GITLAB_URL}/api/v4/groups?parent_id=${GROUP3_SUBGROUPA_NAMESPACE_ID}"
echo ""
echo ""
echo ""
sleep 2
GROUP3_SUBGROUPA_SUBGROUPB_NAMESPACE_ID=$(curl --request GET --header "PRIVATE-TOKEN: $TOKEN" \
"${GITLAB_URL}/api/v4/namespaces/local-gitlab-group3%2Fsubgroup-a%2Fsubgroup-b" | jq '.id')
echo ""
echo ""
echo ""
sleep 1
# Create 2 users
curl --request POST --header "PRIVATE-TOKEN: $TOKEN" \
--header "Content-Type: application/json" \
--data '{"email": "testuser1@example.com", "password": "adminadmin1","name": "testuser1","username": "testuser1"}' \
"${GITLAB_URL}/api/v4/users"
echo ""
echo ""
echo ""
sleep 1
curl --request POST --header "PRIVATE-TOKEN: $TOKEN" \
--header "Content-Type: application/json" \
--data '{"email": "testuser2@example.com", "password": "adminadmin1","name": "testuser2","username": "testuser2"}' \
"${GITLAB_URL}/api/v4/users"
echo ""
echo ""
echo ""
sleep 1
# create repos for root user
for ((a=0; a <= 3 ; a++))
do
curl --header "PRIVATE-TOKEN: $TOKEN" -X POST "${GITLAB_URL}/api/v4/projects?name=rootrepos${a}&initialize_with_readme=true"
done
sleep 1
SNIPPET_DATA='{"title": "my-first-snippet", "file_name": "snippet.txt", "content": "This is my first snippet", "visibility": "public"}'
curl --request POST --header "PRIVATE-TOKEN: $TOKEN" \
--header "Content-Type: application/json" \
--data "${SNIPPET_DATA}" \
"${GITLAB_URL}/api/v4/projects/root%2Frootrepos1/snippets"
echo -e "\n\n\n"
echo ""
echo ""
echo ""
sleep 1
# create a repo for testuser1, this user has an id of 2
curl --header "PRIVATE-TOKEN: $TOKEN" -X POST "${GITLAB_URL}/api/v4/projects/user/2?name=testuser1-repo&initialize_with_readme=true"
echo -e "\n\n\n"
sleep 1
# create a snippet for testuser1's repo
SNIPPET_DATA='{"title": "my-first-snippet", "file_name": "snippet.txt", "content": "This is my first snippet", "visibility": "public"}'
curl --request POST --header "PRIVATE-TOKEN: $TOKEN" \
--header "Content-Type: application/json" \
--data "${SNIPPET_DATA}" \
"${GITLAB_URL}/api/v4/projects/testuser1%2Ftestuser1-repo/snippets"
echo -e "\n\n\n"
sleep 1
# create repos in group1
for ((a=0; a <= 3 ; a++))
do
curl --header "PRIVATE-TOKEN: $TOKEN" -X POST "${GITLAB_URL}/api/v4/projects?name=baz${a}&namespace_id=${GROUP1_NAMESPACE_ID}&initialize_with_readme=true"
done
echo ""
echo ""
echo ""
sleep 1
# create snippets at the root level
for ((a=1; a <= 2 ; a++))
do
curl --header "PRIVATE-TOKEN: $TOKEN" -X POST "${GITLAB_URL}/api/v4/snippets?title=snippet${a}&file_name=file${a}&content=content${a}&description=description${a}&visibility=public"
done
echo ""
echo ""
echo ""
sleep 1
# create repos and snippets in group2
for ((a=0; a <= 3 ; a++))
do
curl --header "PRIVATE-TOKEN: $TOKEN" -X POST "${GITLAB_URL}/api/v4/projects?name=baz${a}&namespace_id=${GROUP2_NAMESPACE_ID}&initialize_with_readme=true"
sleep 1
# Create non-empty snippet for the repo
curl --request POST --header "PRIVATE-TOKEN: $TOKEN" \
--header "Content-Type: application/json" \
--data '{"title": "Snippet for subgroup_a_repo_'${a}'", "file_name": "snippet.txt", "content": "This is a snippet for subgroup_a_repo_'${a}'", "visibility": "public"}' \
"${GITLAB_URL}/api/v4/projects/local-gitlab-group2%2Fbaz${a}/snippets"
done
echo ""
echo ""
echo ""
sleep 1
# create repos and snippets in group3/subgroup-a
for ((a=0; a <= 3 ; a++))
do
# Create repo
curl --header "PRIVATE-TOKEN: $TOKEN" -X POST "${GITLAB_URL}/api/v4/projects?name=subgroup_a_repo_${a}&namespace_id=${GROUP3_SUBGROUPA_NAMESPACE_ID}&initialize_with_readme=true"
echo ""
sleep 1
# Create non-empty snippet for the repo
curl --request POST --header "PRIVATE-TOKEN: $TOKEN" \
--header "Content-Type: application/json" \
--data '{"title": "Snippet for subgroup_a_repo_'${a}'", "file_name": "snippet.txt", "content": "This is a snippet for subgroup_a_repo_'${a}'", "visibility": "public"}' \
"${GITLAB_URL}/api/v4/projects/local-gitlab-group3%2Fsubgroup-a%2Fsubgroup_a_repo_${a}/snippets"
done
echo ""
echo ""
echo ""
sleep 1
# create repos and snippets in group3/subgroup-a/subgroup-b
for ((a=0; a <= 3 ; a++))
do
# Create repo
curl --header "PRIVATE-TOKEN: $TOKEN" -X POST "${GITLAB_URL}/api/v4/projects?name=subgroup_b_repo_${a}&namespace_id=${GROUP3_SUBGROUPA_SUBGROUPB_NAMESPACE_ID}&initialize_with_readme=true"
echo ""
sleep 1
# Create non-empty snippet for the repo
curl --request POST --header "PRIVATE-TOKEN: $TOKEN" \
--header "Content-Type: application/json" \
--data '{"title": "Snippet for subgroup_b_repo_'${a}'", "file_name": "snippet.txt", "content": "This is a snippet for subgroup_b_repo_'${a}'", "visibility": "public"}' \
"${GITLAB_URL}/api/v4/projects/local-gitlab-group3%2Fsubgroup-a%2Fsubgroup-b%2Fsubgroup_b_repo_${a}/snippets"
done
echo ""
echo ""
echo ""
sleep 1
echo "sleeping before running integration tests, to ensure all resources are created"
sleep 5
# Run the seeder
echo "Running GitLab seeder..."
"${SEEDER_BINARY}" \
-token="${TOKEN}" \
-base-url="${GITLAB_URL}" \
-config="${CONFIG_PATH}"
if [[ $? -eq 0 ]]; then
echo "GitLab seeding completed successfully!"
echo "Sleeping 5 seconds to ensure all resources are ready..."
sleep 5
else
echo "GitLab seeding failed!"
exit 1
fi

View File

@ -0,0 +1,18 @@
module gitlab-seeder
go 1.20
require (
github.com/xanzy/go-gitlab v0.95.2
)
require (
github.com/golang/protobuf v1.5.3 // indirect
github.com/google/go-querystring v1.1.0 // indirect
github.com/hashicorp/go-cleanhttp v0.5.2 // indirect
github.com/hashicorp/go-retryablehttp v0.7.4 // indirect
golang.org/x/oauth2 v0.13.0 // indirect
golang.org/x/time v0.3.0 // indirect
google.golang.org/appengine v1.6.8 // indirect
google.golang.org/protobuf v1.31.0 // indirect
)

View File

@ -0,0 +1,53 @@
github.com/davecgh/go-spew v1.1.1/go.mod h1:J7Y8YcW2NihsgmVo/mv3lAwl/skON4iLHjSsI+c5H38=
github.com/golang/protobuf v1.5.0/go.mod h1:FsONVRAS9T7sI+LIUmWTfcYkHO4aIWwzhcaSAoJOfIk=
github.com/golang/protobuf v1.5.2/go.mod h1:XVQd3VNwM+JqD3oG2Ue2ip4fOMUkwXdXDdiuN0vRsmY=
github.com/golang/protobuf v1.5.3 h1:KhyjKVUg7Usr/dYsdSqoFveMYd5ko72D+zANwlG1mmg=
github.com/golang/protobuf v1.5.3/go.mod h1:XVQd3VNwM+JqD3oG2Ue2ip4fOMUkwXdXDdiuN0vRsmY=
github.com/google/go-cmp v0.5.2/go.mod h1:v8dTdLbMG2kIc/vJvl+f65V22dbkXbowE6jgT/gNBxE=
github.com/google/go-cmp v0.5.5/go.mod h1:v8dTdLbMG2kIc/vJvl+f65V22dbkXbowE6jgT/gNBxE=
github.com/google/go-querystring v1.1.0 h1:AnCroh3fv4ZBgVIf1Iwtovgjaw/GiKJo8M8yD/fhyJ8=
github.com/google/go-querystring v1.1.0/go.mod h1:Kcdr2DB4koayq7X8pmAG4sNG59So17icRSOU623lUBU=
github.com/hashicorp/go-cleanhttp v0.5.2 h1:035FKYIWjmULyFRBKPs8TBQoi0x6d9G4xc9neXJWAZQ=
github.com/hashicorp/go-cleanhttp v0.5.2/go.mod h1:kO/YDlP8L1346E6Sodw+PrpBSV4/SoxCXGY6BqNFT48=
github.com/hashicorp/go-hclog v0.9.2/go.mod h1:5CU+agLiy3J7N7QjHK5d05KxGsuXiQLrjA0H7acj2lQ=
github.com/hashicorp/go-retryablehttp v0.7.4 h1:ZQgVdpTdAL7WpMIwLzCfbalOcSUdkDZnpUv3/+BxzFA=
github.com/hashicorp/go-retryablehttp v0.7.4/go.mod h1:Jy/gPYAdjqffZ/yFGCFV2doI5wjtH1ewM9u8iYVjtX8=
github.com/pmezard/go-difflib v1.0.0/go.mod h1:iKH77koFhYxTK1pcRnkKkqfTogsbg7gZNVY4sRDYZ/4=
github.com/stretchr/testify v1.2.2/go.mod h1:a8OnRcib4nhh0OaRAV+Yts87kKdq0PP7pXfy6kDkUVs=
github.com/xanzy/go-gitlab v0.95.2 h1:4p0IirHqEp5f0baK/aQqr4TR57IsD+8e4fuyAA1yi88=
github.com/xanzy/go-gitlab v0.95.2/go.mod h1:ETg8tcj4OhrB84UEgeE8dSuV/0h4BBL1uOV/qK0vlyI=
github.com/yuin/goldmark v1.4.13/go.mod h1:6yULJ656Px+3vBD8DxQVa3kxgyrAnzto9xy5taEt/CY=
golang.org/x/crypto v0.0.0-20190308221718-c2843e01d9a2/go.mod h1:djNgcEr1/C05ACkg1iLfiJU5Ep61QUkGW8qpdssI0+w=
golang.org/x/crypto v0.0.0-20210921155107-089bfa567519/go.mod h1:GvvjBRRGRdwPK5ydBHafDWAxML/pGHZbMvKqRZ5+Abc=
golang.org/x/mod v0.6.0-dev.0.20220419223038-86c51ed26bb4/go.mod h1:jJ57K6gSWd91VN4djpZkiMVwK6gcyfeH4XE8wZrZaV4=
golang.org/x/net v0.0.0-20190620200207-3b0461eec859/go.mod h1:z5CRVTTTmAJ677TzLLGU+0bjPO0LkuOLi4/5GtJWs/s=
golang.org/x/net v0.0.0-20210226172049-e18ecbb05110/go.mod h1:m0MpNAwzfU5UDzcl9v0D8zg8gWTRqZa9RBIspLL5mdg=
golang.org/x/net v0.0.0-20220722155237-a158d28d115b/go.mod h1:XRhObCWvk6IyKnWLug+ECip1KBveYUHfp+8e9klMJ9c=
golang.org/x/oauth2 v0.13.0 h1:jDDenyj+WgFtmV3zYVoi8aE2BwtXFLWOA67ZfNWftiY=
golang.org/x/oauth2 v0.13.0/go.mod h1:/JMhi4ZRXAf4HG9LiNmxvk+45+96RUlVThiH8FzNBn0=
golang.org/x/sync v0.0.0-20190423024810-112230192c58/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM=
golang.org/x/sync v0.0.0-20220722155255-886fb9371eb4/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM=
golang.org/x/sys v0.0.0-20190215142949-d0b11bdaac8a/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY=
golang.org/x/sys v0.0.0-20201119102817-f84b799fce68/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
golang.org/x/sys v0.0.0-20210615035016-665e8c7367d1/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
golang.org/x/sys v0.0.0-20220520151302-bc2c85ada10a/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
golang.org/x/sys v0.0.0-20220722155257-8c9f86f7a55f/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
golang.org/x/term v0.0.0-20201126162022-7de9c90e9dd1/go.mod h1:bj7SfCRtBDWHUb9snDiAeCFNEtKQo2Wmx5Cou7ajbmo=
golang.org/x/term v0.0.0-20210927222741-03fcf44c2211/go.mod h1:jbD1KX2456YbFQfuXm/mYQcufACuNUgVhRMnK/tPxf8=
golang.org/x/text v0.3.0/go.mod h1:NqM8EUOU14njkJ3fqMW+pc6Ldnwhi/IjpwHt7yyuwOQ=
golang.org/x/text v0.3.3/go.mod h1:5Zoc/QRtKVWzQhOtBMvqHzDpF6irO9z98xDceosuGiQ=
golang.org/x/text v0.3.7/go.mod h1:u+2+/6zg+i71rQMx5EYifcz6MCKuco9NR6JIITiCfzQ=
golang.org/x/text v0.3.8/go.mod h1:E6s5w1FMmriuDzIBO73fBruAKo1PCIq6d2Q6DHfQ8WQ=
golang.org/x/time v0.3.0 h1:rg5rLMjNzMS1RkNLzCG38eapWhnYLFYXDXj2gOlr8j4=
golang.org/x/time v0.3.0/go.mod h1:tRJNPiyCQ0inRvYxbN9jk5I+vvW/OXSQhTDSoE431IQ=
golang.org/x/tools v0.0.0-20180917221912-90fa682c2a6e/go.mod h1:n7NCudcB/nEzxVGmLbDWY5pfWTLqBcC2KZ6jyYvM4mQ=
golang.org/x/tools v0.0.0-20191119224855-298f0cb1881e/go.mod h1:b+2E5dAYhXwXZwtnZ6UAqBI28+e2cm9otk0dWdXHAEo=
golang.org/x/tools v0.1.12/go.mod h1:hNGJHUnrk76NpqgfD5Aqm5Crs+Hm0VOH/i9J2+nxYbc=
golang.org/x/xerrors v0.0.0-20190717185122-a985d3407aa7/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0=
golang.org/x/xerrors v0.0.0-20191204190536-9bdfabe68543/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0=
google.golang.org/appengine v1.6.8 h1:IhEN5q69dyKagZPYMSdIjS2HqprW324FRQZJcGqPAsM=
google.golang.org/appengine v1.6.8/go.mod h1:1jJ3jBArFh5pcgW8gCtRJnepW8FzD1V44FJffLiz/Ds=
google.golang.org/protobuf v1.26.0-rc.1/go.mod h1:jlhhOSvTdKEhbULTjvd4ARK9grFBp09yW+WbY/TyQbw=
google.golang.org/protobuf v1.26.0/go.mod h1:9q0QmTI4eRPtz6boOQmLYwt+qCgq0jsYwAQnmE0givc=
google.golang.org/protobuf v1.31.0 h1:g0LDEJHgrBl9N9r17Ru3sqWhkIx2NB67okBHPwC7hs8=
google.golang.org/protobuf v1.31.0/go.mod h1:HV8QOd/L58Z+nl8r43ehVNZIU/HEI6OcFqwMG9pJV4I=

View File

@ -0,0 +1,378 @@
package main
import (
"encoding/json"
"flag"
"fmt"
"io/ioutil"
"log"
"github.com/xanzy/go-gitlab"
)
type Snippet struct {
Title string `json:"title"`
FileName string `json:"file_name"`
Content string `json:"content"`
Description string `json:"description,omitempty"`
Visibility string `json:"visibility"`
}
type Repository struct {
Name string `json:"name"`
InitializeWithReadme bool `json:"initialize_with_readme"`
Snippets []Snippet `json:"snippets,omitempty"`
}
type Group struct {
Name string `json:"name"`
Path string `json:"path"`
Description string `json:"description"`
Repositories []Repository `json:"repositories,omitempty"`
Subgroups []Group `json:"subgroups,omitempty"`
}
type User struct {
Username string `json:"username"`
Email string `json:"email"`
Password string `json:"password"`
Name string `json:"name"`
Repositories []Repository `json:"repositories,omitempty"`
}
type RootUser struct {
Repositories []Repository `json:"repositories"`
}
type SeedData struct {
Groups []Group `json:"groups"`
Users []User `json:"users"`
RootUser RootUser `json:"root_user"`
RootSnippets []Snippet `json:"root_snippets"`
}
type GitLabSeeder struct {
client *gitlab.Client
seedData *SeedData
baseURL string
}
func NewGitLabSeeder(token, baseURL string) (*GitLabSeeder, error) {
client, err := gitlab.NewClient(token, gitlab.WithBaseURL(baseURL))
if err != nil {
return nil, fmt.Errorf("failed to create GitLab client: %w", err)
}
return &GitLabSeeder{
client: client,
baseURL: baseURL,
}, nil
}
func (g *GitLabSeeder) LoadSeedData(configPath string) error {
data, err := ioutil.ReadFile(configPath)
if err != nil {
return fmt.Errorf("failed to read seed config: %w", err)
}
g.seedData = &SeedData{}
if err := json.Unmarshal(data, g.seedData); err != nil {
return fmt.Errorf("failed to parse seed config: %w", err)
}
return nil
}
func (g *GitLabSeeder) CreateGroups() error {
log.Println("Creating groups...")
for _, group := range g.seedData.Groups {
if err := g.createGroup(&group, nil); err != nil {
return fmt.Errorf("failed to create group %s: %w", group.Name, err)
}
}
return nil
}
func (g *GitLabSeeder) createGroup(group *Group, parentID *int) error {
log.Printf("Creating group: %s", group.Name)
createOptions := &gitlab.CreateGroupOptions{
Name: gitlab.String(group.Name),
Path: gitlab.String(group.Path),
Description: gitlab.String(group.Description),
}
if parentID != nil {
createOptions.ParentID = parentID
}
createdGroup, _, err := g.client.Groups.CreateGroup(createOptions)
if err != nil {
return fmt.Errorf("failed to create group: %w", err)
}
log.Printf("Created group: %s (ID: %d)", createdGroup.Name, createdGroup.ID)
// Create repositories in this group
for _, repo := range group.Repositories {
if err := g.createRepository(&repo, &createdGroup.ID, group.Path); err != nil {
return fmt.Errorf("failed to create repository %s in group %s: %w", repo.Name, group.Name, err)
}
}
// Create subgroups recursively
for _, subgroup := range group.Subgroups {
if err := g.createGroup(&subgroup, &createdGroup.ID); err != nil {
return fmt.Errorf("failed to create subgroup %s: %w", subgroup.Name, err)
}
}
return nil
}
func (g *GitLabSeeder) createRepository(repo *Repository, namespaceID *int, groupPath string) error {
log.Printf("Creating repository: %s", repo.Name)
createOptions := &gitlab.CreateProjectOptions{
Name: gitlab.String(repo.Name),
InitializeWithReadme: gitlab.Bool(repo.InitializeWithReadme),
}
if namespaceID != nil {
createOptions.NamespaceID = namespaceID
}
project, _, err := g.client.Projects.CreateProject(createOptions)
if err != nil {
return fmt.Errorf("failed to create repository: %w", err)
}
log.Printf("Created repository: %s (ID: %d)", project.Name, project.ID)
// Create snippets for this repository
for _, snippet := range repo.Snippets {
if err := g.createProjectSnippet(&snippet, project.ID, groupPath, repo.Name); err != nil {
return fmt.Errorf("failed to create snippet for repository %s: %w", repo.Name, err)
}
}
return nil
}
func (g *GitLabSeeder) createProjectSnippet(snippet *Snippet, projectID int, groupPath, repoName string) error {
log.Printf("Creating project snippet: %s for project %d", snippet.Title, projectID)
visibility := gitlab.PublicVisibility
switch snippet.Visibility {
case "private":
visibility = gitlab.PrivateVisibility
case "internal":
visibility = gitlab.InternalVisibility
}
createOptions := &gitlab.CreateProjectSnippetOptions{
Title: gitlab.String(snippet.Title),
FileName: gitlab.String(snippet.FileName),
Content: gitlab.String(snippet.Content),
Visibility: &visibility,
Description: gitlab.String(snippet.Description),
}
_, _, err := g.client.ProjectSnippets.CreateSnippet(projectID, createOptions)
if err != nil {
return fmt.Errorf("failed to create project snippet: %w", err)
}
log.Printf("Created project snippet: %s", snippet.Title)
return nil
}
func (g *GitLabSeeder) CreateUsers() error {
log.Println("Creating users...")
for _, user := range g.seedData.Users {
if err := g.createUser(&user); err != nil {
return fmt.Errorf("failed to create user %s: %w", user.Username, err)
}
}
return nil
}
func (g *GitLabSeeder) createUser(user *User) error {
log.Printf("Creating user: %s", user.Username)
createOptions := &gitlab.CreateUserOptions{
Username: gitlab.String(user.Username),
Email: gitlab.String(user.Email),
Password: gitlab.String(user.Password),
Name: gitlab.String(user.Name),
}
createdUser, _, err := g.client.Users.CreateUser(createOptions)
if err != nil {
return fmt.Errorf("failed to create user: %w", err)
}
log.Printf("Created user: %s (ID: %d)", createdUser.Username, createdUser.ID)
// Create repositories for this user
for _, repo := range user.Repositories {
if err := g.createUserRepository(&repo, createdUser.ID, user.Username); err != nil {
return fmt.Errorf("failed to create repository %s for user %s: %w", repo.Name, user.Username, err)
}
}
return nil
}
func (g *GitLabSeeder) createUserRepository(repo *Repository, userID int, username string) error {
log.Printf("Creating user repository: %s for user %s", repo.Name, username)
// Create project for user using the correct API format
createOptions := &gitlab.CreateProjectOptions{
Name: gitlab.String(repo.Name),
InitializeWithReadme: gitlab.Bool(repo.InitializeWithReadme),
}
// We need to get the user's namespace first
user, _, err := g.client.Users.GetUser(userID, gitlab.GetUsersOptions{})
if err != nil {
return fmt.Errorf("failed to get user: %w", err)
}
// Find the user's personal namespace
namespaces, _, err := g.client.Namespaces.ListNamespaces(&gitlab.ListNamespacesOptions{})
if err != nil {
return fmt.Errorf("failed to list namespaces: %w", err)
}
var userNamespaceID *int
for _, ns := range namespaces {
if ns.Kind == "user" && ns.Path == user.Username {
userNamespaceID = &ns.ID
break
}
}
if userNamespaceID == nil {
return fmt.Errorf("could not find user namespace for user %s", username)
}
createOptions.NamespaceID = userNamespaceID
project, _, err := g.client.Projects.CreateProject(createOptions)
if err != nil {
return fmt.Errorf("failed to create user repository: %w", err)
}
log.Printf("Created user repository: %s (ID: %d)", project.Name, project.ID)
// Create snippets for this repository
for _, snippet := range repo.Snippets {
if err := g.createProjectSnippet(&snippet, project.ID, username, repo.Name); err != nil {
return fmt.Errorf("failed to create snippet for user repository %s: %w", repo.Name, err)
}
}
return nil
}
func (g *GitLabSeeder) CreateRootUserRepositories() error {
log.Println("Creating root user repositories...")
for _, repo := range g.seedData.RootUser.Repositories {
if err := g.createRepository(&repo, nil, "root"); err != nil {
return fmt.Errorf("failed to create root repository %s: %w", repo.Name, err)
}
}
return nil
}
func (g *GitLabSeeder) CreateRootSnippets() error {
log.Println("Creating root-level snippets...")
for _, snippet := range g.seedData.RootSnippets {
if err := g.createRootSnippet(&snippet); err != nil {
return fmt.Errorf("failed to create root snippet %s: %w", snippet.Title, err)
}
}
return nil
}
func (g *GitLabSeeder) createRootSnippet(snippet *Snippet) error {
log.Printf("Creating root snippet: %s", snippet.Title)
visibility := gitlab.PublicVisibility
switch snippet.Visibility {
case "private":
visibility = gitlab.PrivateVisibility
case "internal":
visibility = gitlab.InternalVisibility
}
createOptions := &gitlab.CreateSnippetOptions{
Title: gitlab.String(snippet.Title),
FileName: gitlab.String(snippet.FileName),
Content: gitlab.String(snippet.Content),
Visibility: &visibility,
Description: gitlab.String(snippet.Description),
}
_, _, err := g.client.Snippets.CreateSnippet(createOptions)
if err != nil {
return fmt.Errorf("failed to create root snippet: %w", err)
}
log.Printf("Created root snippet: %s", snippet.Title)
return nil
}
func (g *GitLabSeeder) SeedAll() error {
log.Println("Starting GitLab seeding process...")
if err := g.CreateGroups(); err != nil {
return err
}
if err := g.CreateUsers(); err != nil {
return err
}
if err := g.CreateRootUserRepositories(); err != nil {
return err
}
if err := g.CreateRootSnippets(); err != nil {
return err
}
log.Println("GitLab seeding completed successfully!")
return nil
}
func main() {
var (
token = flag.String("token", "", "GitLab API token")
baseURL = flag.String("base-url", "http://gitlab.example.com", "GitLab base URL")
configPath = flag.String("config", "configs/seed-data.json", "Path to seed data configuration file")
)
flag.Parse()
if *token == "" {
log.Fatal("Token is required")
}
seeder, err := NewGitLabSeeder(*token, *baseURL)
if err != nil {
log.Fatalf("Failed to create seeder: %v", err)
}
if err := seeder.LoadSeedData(*configPath); err != nil {
log.Fatalf("Failed to load seed data: %v", err)
}
if err := seeder.SeedAll(); err != nil {
log.Fatalf("Failed to seed GitLab: %v", err)
}
}

View File

@ -1,6 +1,9 @@
#!/bin/bash
set -xv
set -euo pipefail
# Refactored GitLab EE integration test script
# Usage: ./start-ee.sh [STOP_GITLAB_WHEN_FINISHED] [PERSIST_GITLAB_LOCALLY] [GITLAB_IMAGE_TAG] [GITLAB_HOME] [GITLAB_HOST] [GITLAB_URL] [LOCAL_GITLAB_GHORG_DIR]
STOP_GITLAB_WHEN_FINISHED=${1:-'true'}
PERSIST_GITLAB_LOCALLY=${2:-'false'}
@ -11,40 +14,73 @@ GITLAB_URL=${6:-'http://gitlab.example.com'}
LOCAL_GITLAB_GHORG_DIR=${7:-"${HOME}/ghorg"}
API_TOKEN="password"
if [ "${ENV}" == "ci" ];then
SCRIPT_DIR="$(cd "$(dirname "${BASH_SOURCE[0]}")" && pwd)"
echo "=== GitLab EE Integration Test (Refactored) ==="
echo "Stop when finished: ${STOP_GITLAB_WHEN_FINISHED}"
echo "Persist locally: ${PERSIST_GITLAB_LOCALLY}"
echo "GitLab tag: ${GITLAB_IMAGE_TAG}"
echo "GitLab home: ${GITLAB_HOME}"
echo "GitLab host: ${GITLAB_HOST}"
echo "GitLab URL: ${GITLAB_URL}"
echo "Ghorg dir: ${LOCAL_GITLAB_GHORG_DIR}"
if [ "${ENV:-}" == "ci" ];then
echo "127.0.0.1 gitlab.example.com" >> /etc/hosts
fi
docker rm gitlab --force --volumes
echo "Stopping and removing any existing GitLab containers..."
docker rm gitlab --force --volumes || true
rm -rf $HOME/ghorg/local-gitlab-*
echo "Cleaning up old data..."
rm -rf "$HOME/ghorg/local-gitlab-*" || true
echo ""
echo "To follow gitlab container logs use the following command in a new window"
echo "To follow gitlab container logs use the following command in a new window:"
echo "$ docker logs -f gitlab"
echo ""
./scripts/local-gitlab/run-ee.sh "${GITLAB_IMAGE_TAG}" "${GITLAB_HOME}" "${GITLAB_HOST}" "${PERSIST_GITLAB_LOCALLY}"
echo "=== Starting GitLab Container ==="
"${SCRIPT_DIR}/run-ee.sh" "${GITLAB_IMAGE_TAG}" "${GITLAB_HOME}" "${GITLAB_HOST}" "${PERSIST_GITLAB_LOCALLY}"
if [ $? -ne 0 ]; then
echo "Failed to start GitLab container"
exit 1
fi
./scripts/local-gitlab/get_credentials.sh "${GITLAB_URL}" "${LOCAL_GITLAB_GHORG_DIR}"
echo "=== Waiting for GitLab to be Ready and Getting Credentials ==="
"${SCRIPT_DIR}/get_credentials.sh" "${GITLAB_URL}" "${LOCAL_GITLAB_GHORG_DIR}"
if [ $? -ne 0 ]; then
echo "Failed to get GitLab credentials"
exit 1
fi
# seed new instance using
./scripts/local-gitlab/seed.sh "${API_TOKEN}" "${GITLAB_URL}" "${LOCAL_GITLAB_GHORG_DIR}"
echo "=== Seeding GitLab Instance (Using Go Seeder) ==="
"${SCRIPT_DIR}/seed.sh" "${API_TOKEN}" "${GITLAB_URL}" "${LOCAL_GITLAB_GHORG_DIR}"
if [ $? -ne 0 ]; then
echo "Failed to seed GitLab instance"
exit 1
fi
./scripts/local-gitlab/integration-tests.sh "${LOCAL_GITLAB_GHORG_DIR}" "${TOKEN}" "${GITLAB_URL}"
echo "=== Running Integration Tests (Using Go Test Runner) ==="
"${SCRIPT_DIR}/integration-tests.sh" "${LOCAL_GITLAB_GHORG_DIR}" "${API_TOKEN}" "${GITLAB_URL}"
if [ $? -ne 0 ]; then
echo "Integration tests failed"
if [ "${STOP_GITLAB_WHEN_FINISHED}" == "true" ];then
docker rm gitlab --force --volumes
fi
exit 1
fi
echo "=== Integration Tests Completed Successfully ==="
if [ "${STOP_GITLAB_WHEN_FINISHED}" == "true" ];then
echo "Stopping and removing GitLab container..."
docker rm gitlab --force --volumes
echo "GitLab container stopped and removed"
else
echo "GitLab container is still running. You can access it at: ${GITLAB_URL}"
echo "To stop it manually, run: docker stop gitlab && docker rm gitlab"
fi
echo ""
echo "🎉 GitLab EE integration tests completed successfully!"

View File

@ -0,0 +1,3 @@
module gitlab-test-runner
go 1.20

View File

@ -0,0 +1,342 @@
package main
import (
"encoding/json"
"flag"
"fmt"
"io/ioutil"
"log"
"os"
"os/exec"
"path/filepath"
"strings"
"text/template"
)
type TestScenario struct {
Name string `json:"name"`
Description string `json:"description"`
Command string `json:"command"`
RunTwice bool `json:"run_twice"`
SetupCommands []string `json:"setup_commands,omitempty"`
VerifyCommands []string `json:"verify_commands,omitempty"`
ExpectedStructure []string `json:"expected_structure"`
Disabled bool `json:"disabled,omitempty"`
}
type TestConfig struct {
TestScenarios []TestScenario `json:"test_scenarios"`
}
type TestContext struct {
BaseURL string
Token string
GhorgDir string
}
type TestRunner struct {
config *TestConfig
context *TestContext
}
func NewTestRunner(configPath string, context *TestContext) (*TestRunner, error) {
data, err := ioutil.ReadFile(configPath)
if err != nil {
return nil, fmt.Errorf("failed to read test config: %w", err)
}
config := &TestConfig{}
if err := json.Unmarshal(data, config); err != nil {
return nil, fmt.Errorf("failed to parse test config: %w", err)
}
return &TestRunner{
config: config,
context: context,
}, nil
}
func (tr *TestRunner) RunAllTests() error {
log.Printf("Starting integration tests with %d scenarios...", len(tr.config.TestScenarios))
// Ensure the ghorg directory exists
if err := tr.ensureGhorgDirectoryExists(); err != nil {
return fmt.Errorf("failed to create ghorg directory: %w", err)
}
// Clean up any existing test directories
if err := tr.cleanupTestDirectories(); err != nil {
log.Printf("Warning: Failed to clean up test directories: %v", err)
}
passed := 0
failed := 0
skipped := 0
for i, scenario := range tr.config.TestScenarios {
log.Printf("\n=== Running Test %d/%d: %s ===", i+1, len(tr.config.TestScenarios), scenario.Name)
log.Printf("Description: %s", scenario.Description)
if scenario.Disabled {
log.Printf("⏭️ SKIPPED: %s (test is disabled)", scenario.Name)
skipped++
continue
}
if err := tr.runTest(&scenario); err != nil {
log.Printf("❌ FAILED: %s - %v", scenario.Name, err)
failed++
} else {
log.Printf("✅ PASSED: %s", scenario.Name)
passed++
}
}
log.Printf("\n=== Test Results ===")
log.Printf("Passed: %d", passed)
log.Printf("Failed: %d", failed)
log.Printf("Skipped: %d", skipped)
log.Printf("Total: %d", len(tr.config.TestScenarios))
if failed > 0 {
return fmt.Errorf("%d tests failed", failed)
}
log.Println("All integration tests passed successfully!")
return nil
}
func (tr *TestRunner) runTest(scenario *TestScenario) error {
// Execute setup commands if any
for _, setupCmd := range scenario.SetupCommands {
renderedCmd, err := tr.renderTemplate(setupCmd)
if err != nil {
return fmt.Errorf("failed to render setup command: %w", err)
}
log.Printf("Setup: %s", renderedCmd)
if err := tr.executeCommand(renderedCmd); err != nil {
return fmt.Errorf("setup command failed: %w", err)
}
}
// Render the main command
renderedCmd, err := tr.renderTemplate(scenario.Command)
if err != nil {
return fmt.Errorf("failed to render command: %w", err)
}
// Execute the command once
log.Printf("Executing: %s", renderedCmd)
if err := tr.executeCommand(renderedCmd); err != nil {
return fmt.Errorf("first execution failed: %w", err)
}
// Execute the command twice if specified (for testing clone then pull)
if scenario.RunTwice {
log.Printf("Executing (second time): %s", renderedCmd)
if err := tr.executeCommand(renderedCmd); err != nil {
return fmt.Errorf("second execution failed: %w", err)
}
}
// Verify the expected structure
if err := tr.verifyExpectedStructure(scenario.ExpectedStructure); err != nil {
return fmt.Errorf("structure verification failed: %w", err)
}
// Execute verification commands if any
for _, verifyCmd := range scenario.VerifyCommands {
renderedCmd, err := tr.renderTemplate(verifyCmd)
if err != nil {
return fmt.Errorf("failed to render verify command: %w", err)
}
log.Printf("Verify: %s", renderedCmd)
if err := tr.executeCommand(renderedCmd); err != nil {
return fmt.Errorf("verification command failed: %w", err)
}
}
return nil
}
func (tr *TestRunner) renderTemplate(tmplText string) (string, error) {
tmpl, err := template.New("command").Parse(tmplText)
if err != nil {
return "", err
}
var buf strings.Builder
if err := tmpl.Execute(&buf, tr.context); err != nil {
return "", err
}
return buf.String(), nil
}
func (tr *TestRunner) executeCommand(command string) error {
parts := strings.Fields(command)
if len(parts) == 0 {
return fmt.Errorf("empty command")
}
cmd := exec.Command(parts[0], parts[1:]...)
cmd.Dir = tr.context.GhorgDir
output, err := cmd.CombinedOutput()
if err != nil {
return fmt.Errorf("command failed: %s\nOutput: %s", err, string(output))
}
return nil
}
func (tr *TestRunner) verifyExpectedStructure(expectedPaths []string) error {
log.Printf("Verifying expected structure (%d paths)...", len(expectedPaths))
for _, expectedPath := range expectedPaths {
fullPath := filepath.Join(tr.context.GhorgDir, expectedPath)
if _, err := os.Stat(fullPath); err != nil {
if os.IsNotExist(err) {
// Use ghorg ls to check what actually exists
return fmt.Errorf("expected path does not exist: %s", expectedPath)
}
return fmt.Errorf("failed to check path %s: %w", expectedPath, err)
}
log.Printf("✓ Found: %s", expectedPath)
}
return nil
}
func (tr *TestRunner) ensureGhorgDirectoryExists() error {
log.Printf("Ensuring ghorg directory exists: %s", tr.context.GhorgDir)
// Check if directory already exists
if _, err := os.Stat(tr.context.GhorgDir); err == nil {
log.Printf("Ghorg directory already exists: %s", tr.context.GhorgDir)
return nil
}
// Create the directory with appropriate permissions
if err := os.MkdirAll(tr.context.GhorgDir, 0755); err != nil {
return fmt.Errorf("failed to create directory %s: %w", tr.context.GhorgDir, err)
}
log.Printf("Created ghorg directory: %s", tr.context.GhorgDir)
return nil
}
func (tr *TestRunner) cleanupTestDirectories() error {
log.Println("Cleaning up test directories...")
// Delete all folders that start with local-gitlab-* in the ghorg directory
matches, err := filepath.Glob(filepath.Join(tr.context.GhorgDir, "local-gitlab-*"))
if err != nil {
return err
}
for _, match := range matches {
if err := os.RemoveAll(match); err != nil {
log.Printf("Warning: Failed to remove %s: %v", match, err)
} else {
log.Printf("Removed: %s", match)
}
}
// Also clean up gitlab.example.com directory if it exists
gitlabDir := filepath.Join(tr.context.GhorgDir, "gitlab.example.com")
if _, err := os.Stat(gitlabDir); err == nil {
if err := os.RemoveAll(gitlabDir); err != nil {
log.Printf("Warning: Failed to remove %s: %v", gitlabDir, err)
} else {
log.Printf("Removed: %s", gitlabDir)
}
}
return nil
}
func (tr *TestRunner) RunSpecificTest(testName string) error {
// Ensure the ghorg directory exists
if err := tr.ensureGhorgDirectoryExists(); err != nil {
return fmt.Errorf("failed to create ghorg directory: %w", err)
}
for _, scenario := range tr.config.TestScenarios {
if scenario.Name == testName {
if scenario.Disabled {
return fmt.Errorf("test '%s' is disabled and cannot be run", testName)
}
log.Printf("Running specific test: %s", testName)
return tr.runTest(&scenario)
}
}
return fmt.Errorf("test not found: %s", testName)
}
func (tr *TestRunner) ListTests() {
log.Printf("Available tests:")
for i, scenario := range tr.config.TestScenarios {
status := ""
if scenario.Disabled {
status = " (DISABLED)"
}
log.Printf("%d. %s - %s%s", i+1, scenario.Name, scenario.Description, status)
}
}
func main() {
var (
configPath = flag.String("config", "configs/test-scenarios.json", "Path to test scenarios configuration file")
baseURL = flag.String("base-url", "http://gitlab.example.com", "GitLab base URL")
token = flag.String("token", "", "GitLab API token")
ghorgDir = flag.String("ghorg-dir", "", "Ghorg directory (default: $HOME/ghorg)")
testName = flag.String("test", "", "Run specific test by name")
listTests = flag.Bool("list", false, "List available tests")
)
flag.Parse()
if *token == "" {
log.Fatal("Token is required")
}
if *ghorgDir == "" {
homeDir, err := os.UserHomeDir()
if err != nil {
log.Fatalf("Failed to get home directory: %v", err)
}
*ghorgDir = filepath.Join(homeDir, "ghorg")
}
context := &TestContext{
BaseURL: *baseURL,
Token: *token,
GhorgDir: *ghorgDir,
}
runner, err := NewTestRunner(*configPath, context)
if err != nil {
log.Fatalf("Failed to create test runner: %v", err)
}
if *listTests {
runner.ListTests()
return
}
if *testName != "" {
if err := runner.RunSpecificTest(*testName); err != nil {
log.Fatalf("Test failed: %v", err)
}
return
}
if err := runner.RunAllTests(); err != nil {
log.Fatalf("Integration tests failed: %v", err)
}
}