Merge with upstream

This commit is contained in:
Florian Scholdei
2019-11-12 14:31:10 +01:00
86 changed files with 1576 additions and 717 deletions

View File

@@ -131,6 +131,12 @@
<artifactId>mapstruct-jdk8</artifactId>
</dependency>
<dependency>
<groupId>org.mapstruct</groupId>
<artifactId>mapstruct-processor</artifactId>
<scope>provided</scope>
</dependency>
<!-- rest documentation -->
<dependency>
<groupId>com.webcohesion.enunciate</groupId>
@@ -208,6 +214,12 @@
<artifactId>shiro-unit</artifactId>
<scope>test</scope>
</dependency>
<dependency>
<groupId>org.hibernate</groupId>
<artifactId>hibernate-validator</artifactId>
<version>5.3.6.Final</version>
<scope>compile</scope>
</dependency>
</dependencies>

View File

@@ -68,6 +68,10 @@ public final class BranchCommandBuilder {
return command.branch(request);
}
public void delete(String branchName) {
command.deleteOrClose(branchName);
}
private BranchCommand command;
private BranchRequest request = new BranchRequest();
}

View File

@@ -178,7 +178,7 @@ public final class LogCommandBuilder
logger.debug("get changeset for {} with disabled cache", id);
}
changeset = logCommand.getChangeset(id);
changeset = logCommand.getChangeset(id, request);
}
else
{
@@ -192,7 +192,7 @@ public final class LogCommandBuilder
logger.debug("get changeset for {}", id);
}
changeset = logCommand.getChangeset(id);
changeset = logCommand.getChangeset(id, request);
if (changeset != null)
{

View File

@@ -6,6 +6,8 @@ import sonia.scm.repository.spi.MergeCommand;
import sonia.scm.repository.spi.MergeCommandRequest;
import sonia.scm.repository.util.AuthorUtil;
import java.util.Set;
/**
* Use this {@link MergeCommandBuilder} to merge two branches of a repository ({@link #executeMerge()}) or to check if
* the branches could be merged without conflicts ({@link #dryRun()}). To do so, you have to specify the name of
@@ -55,6 +57,24 @@ public class MergeCommandBuilder {
this.mergeCommand = mergeCommand;
}
/**
* Use this to check if merge-strategy is supported by mergeCommand.
*
* @return boolean.
*/
public boolean isSupported(MergeStrategy strategy) {
return mergeCommand.isSupported(strategy);
}
/**
* Use this to get a Set of all supported merge strategies by merge command.
*
* @return boolean.
*/
public Set<MergeStrategy> getSupportedMergeStrategies() {
return mergeCommand.getSupportedMergeStrategies();
}
/**
* Use this to set the branch that should be merged into the target branch.
*
@@ -92,6 +112,21 @@ public class MergeCommandBuilder {
return this;
}
/**
* Use this to set the strategy of the merge commit manually.
*
* This is optional and for {@link #executeMerge()} only.
*
* @return This builder instance.
*/
public MergeCommandBuilder setMergeStrategy(MergeStrategy strategy) {
if (!mergeCommand.isSupported(strategy)) {
throw new IllegalArgumentException("merge strategy not supported: " + strategy);
}
request.setMergeStrategy(strategy);
return this;
}
/**
* Use this to set a template for the commit message. If no message is set, a default message will be used.
*

View File

@@ -0,0 +1,7 @@
package sonia.scm.repository.api;
public enum MergeStrategy {
MERGE_COMMIT,
FAST_FORWARD_IF_POSSIBLE,
SQUASH
}

View File

@@ -0,0 +1,27 @@
package sonia.scm.repository.api;
import sonia.scm.BadRequestException;
import sonia.scm.repository.Repository;
import static sonia.scm.ContextEntry.ContextBuilder.entity;
@SuppressWarnings("squid:MaximumInheritanceDepth") // exceptions have a deep inheritance depth themselves; therefore we accept this here
public class MergeStrategyNotSupportedException extends BadRequestException {
private static final long serialVersionUID = 256498734456613496L;
private static final String CODE = "6eRhF9gU41";
public MergeStrategyNotSupportedException(Repository repository, MergeStrategy strategy) {
super(entity(repository).build(), createMessage(strategy));
}
@Override
public String getCode() {
return CODE;
}
private static String createMessage(MergeStrategy strategy) {
return "merge strategy " + strategy + " is not supported by this repository";
}
}

View File

@@ -41,4 +41,6 @@ import sonia.scm.repository.api.BranchRequest;
*/
public interface BranchCommand {
Branch branch(BranchRequest name);
void deleteOrClose(String branchName);
}

View File

@@ -0,0 +1,19 @@
package sonia.scm.repository.spi;
import sonia.scm.ContextEntry;
import sonia.scm.ExceptionWithContext;
import sonia.scm.repository.Repository;
public class CannotDeleteDefaultBranchException extends ExceptionWithContext {
public static final String CODE = "78RhWxTIw1";
public CannotDeleteDefaultBranchException(Repository repository, String branchName) {
super(ContextEntry.ContextBuilder.entity("Branch", branchName).in(repository).build(), "default branch cannot be deleted");
}
@Override
public String getCode() {
return CODE;
}
}

View File

@@ -49,7 +49,7 @@ import java.io.IOException;
*/
public interface LogCommand {
Changeset getChangeset(String id) throws IOException;
Changeset getChangeset(String id, LogCommandRequest request) throws IOException;
ChangesetPagingResult getChangesets(LogCommandRequest request) throws IOException;
}

View File

@@ -2,9 +2,16 @@ package sonia.scm.repository.spi;
import sonia.scm.repository.api.MergeCommandResult;
import sonia.scm.repository.api.MergeDryRunCommandResult;
import sonia.scm.repository.api.MergeStrategy;
import java.util.Set;
public interface MergeCommand {
MergeCommandResult merge(MergeCommandRequest request);
MergeDryRunCommandResult dryRun(MergeCommandRequest request);
boolean isSupported(MergeStrategy strategy);
Set<MergeStrategy> getSupportedMergeStrategies();
}

View File

@@ -5,6 +5,7 @@ import com.google.common.base.Objects;
import com.google.common.base.Strings;
import sonia.scm.Validateable;
import sonia.scm.repository.Person;
import sonia.scm.repository.api.MergeStrategy;
import sonia.scm.repository.util.AuthorUtil.CommandWithAuthor;
import java.io.Serializable;
@@ -17,6 +18,7 @@ public class MergeCommandRequest implements Validateable, Resetable, Serializabl
private String targetBranch;
private Person author;
private String messageTemplate;
private MergeStrategy mergeStrategy;
public String getBranchToMerge() {
return branchToMerge;
@@ -50,6 +52,14 @@ public class MergeCommandRequest implements Validateable, Resetable, Serializabl
this.messageTemplate = messageTemplate;
}
public MergeStrategy getMergeStrategy() {
return mergeStrategy;
}
public void setMergeStrategy(MergeStrategy mergeStrategy) {
this.mergeStrategy = mergeStrategy;
}
public boolean isValid() {
return !Strings.isNullOrEmpty(getBranchToMerge())
&& !Strings.isNullOrEmpty(getTargetBranch());
@@ -74,12 +84,13 @@ public class MergeCommandRequest implements Validateable, Resetable, Serializabl
return Objects.equal(branchToMerge, other.branchToMerge)
&& Objects.equal(targetBranch, other.targetBranch)
&& Objects.equal(author, other.author);
&& Objects.equal(author, other.author)
&& Objects.equal(mergeStrategy, other.mergeStrategy);
}
@Override
public int hashCode() {
return Objects.hashCode(branchToMerge, targetBranch, author);
return Objects.hashCode(branchToMerge, targetBranch, author, mergeStrategy);
}
@Override
@@ -88,6 +99,7 @@ public class MergeCommandRequest implements Validateable, Resetable, Serializabl
.add("branchToMerge", branchToMerge)
.add("targetBranch", targetBranch)
.add("author", author)
.add("mergeStrategy", mergeStrategy)
.toString();
}
}

View File

@@ -8,7 +8,7 @@ import sonia.scm.repository.Repository;
import java.io.File;
import java.io.IOException;
public abstract class SimpleWorkdirFactory<R, C> implements WorkdirFactory<R, C> {
public abstract class SimpleWorkdirFactory<R, W, C> implements WorkdirFactory<R, W, C> {
private static final Logger logger = LoggerFactory.getLogger(SimpleWorkdirFactory.class);
@@ -19,11 +19,11 @@ public abstract class SimpleWorkdirFactory<R, C> implements WorkdirFactory<R, C>
}
@Override
public WorkingCopy<R> createWorkingCopy(C context, String initialBranch) {
public WorkingCopy<R, W> createWorkingCopy(C context, String initialBranch) {
try {
File directory = workdirProvider.createNewWorkdir();
ParentAndClone<R> parentAndClone = cloneRepository(context, directory, initialBranch);
return new WorkingCopy<>(parentAndClone.getClone(), parentAndClone.getParent(), this::close, directory);
ParentAndClone<R, W> parentAndClone = cloneRepository(context, directory, initialBranch);
return new WorkingCopy<>(parentAndClone.getClone(), parentAndClone.getParent(), this::closeWorkdir, this::closeCentral, directory);
} catch (IOException e) {
throw new InternalRepositoryException(getScmRepository(context), "could not clone repository in temporary directory", e);
}
@@ -32,12 +32,15 @@ public abstract class SimpleWorkdirFactory<R, C> implements WorkdirFactory<R, C>
protected abstract Repository getScmRepository(C context);
@SuppressWarnings("squid:S00112")
// We do allow implementations to throw arbitrary exceptions here, so that we can handle them in close
// We do allow implementations to throw arbitrary exceptions here, so that we can handle them in closeCentral
protected abstract void closeRepository(R repository) throws Exception;
@SuppressWarnings("squid:S00112")
// We do allow implementations to throw arbitrary exceptions here, so that we can handle them in closeWorkdir
protected abstract void closeWorkdirInternal(W workdir) throws Exception;
protected abstract ParentAndClone<R> cloneRepository(C context, File target, String initialBranch) throws IOException;
protected abstract ParentAndClone<R, W> cloneRepository(C context, File target, String initialBranch) throws IOException;
private void close(R repository) {
private void closeCentral(R repository) {
try {
closeRepository(repository);
} catch (Exception e) {
@@ -45,11 +48,19 @@ public abstract class SimpleWorkdirFactory<R, C> implements WorkdirFactory<R, C>
}
}
protected static class ParentAndClone<R> {
private final R parent;
private final R clone;
private void closeWorkdir(W repository) {
try {
closeWorkdirInternal(repository);
} catch (Exception e) {
logger.warn("could not close temporary repository clone", e);
}
}
public ParentAndClone(R parent, R clone) {
protected static class ParentAndClone<R, W> {
private final R parent;
private final W clone;
public ParentAndClone(R parent, W clone) {
this.parent = parent;
this.clone = clone;
}
@@ -58,7 +69,7 @@ public abstract class SimpleWorkdirFactory<R, C> implements WorkdirFactory<R, C>
return parent;
}
public R getClone() {
public W getClone() {
return clone;
}
}

View File

@@ -1,5 +1,5 @@
package sonia.scm.repository.util;
public interface WorkdirFactory<R, C> {
WorkingCopy<R> createWorkingCopy(C context, String initialBranch);
public interface WorkdirFactory<R, W, C> {
WorkingCopy<R, W> createWorkingCopy(C context, String initialBranch);
}

View File

@@ -8,23 +8,25 @@ import java.io.File;
import java.io.IOException;
import java.util.function.Consumer;
public class WorkingCopy<R> implements AutoCloseable {
public class WorkingCopy<R, W> implements AutoCloseable {
private static final Logger LOG = LoggerFactory.getLogger(WorkingCopy.class);
private final File directory;
private final R workingRepository;
private final W workingRepository;
private final R centralRepository;
private final Consumer<R> cleanup;
private final Consumer<W> cleanupWorkdir;
private final Consumer<R> cleanupCentral;
public WorkingCopy(R workingRepository, R centralRepository, Consumer<R> cleanup, File directory) {
public WorkingCopy(W workingRepository, R centralRepository, Consumer<W> cleanupWorkdir, Consumer<R> cleanupCentral, File directory) {
this.directory = directory;
this.workingRepository = workingRepository;
this.centralRepository = centralRepository;
this.cleanup = cleanup;
this.cleanupCentral = cleanupCentral;
this.cleanupWorkdir = cleanupWorkdir;
}
public R getWorkingRepository() {
public W getWorkingRepository() {
return workingRepository;
}
@@ -39,8 +41,8 @@ public class WorkingCopy<R> implements AutoCloseable {
@Override
public void close() {
try {
cleanup.accept(workingRepository);
cleanup.accept(centralRepository);
cleanupWorkdir.accept(workingRepository);
cleanupCentral.accept(centralRepository);
IOUtil.delete(directory);
} catch (IOException e) {
LOG.warn("could not delete temporary workdir '{}'", directory, e);

View File

@@ -24,14 +24,14 @@ public class SimpleWorkdirFactoryTest {
@Rule
public TemporaryFolder temporaryFolder = new TemporaryFolder();
private SimpleWorkdirFactory<Closeable, Context> simpleWorkdirFactory;
private SimpleWorkdirFactory<Closeable, Closeable, Context> simpleWorkdirFactory;
private String initialBranchForLastCloneCall;
@Before
public void initFactory() throws IOException {
WorkdirProvider workdirProvider = new WorkdirProvider(temporaryFolder.newFolder());
simpleWorkdirFactory = new SimpleWorkdirFactory<Closeable, Context>(workdirProvider) {
simpleWorkdirFactory = new SimpleWorkdirFactory<Closeable, Closeable, Context>(workdirProvider) {
@Override
protected Repository getScmRepository(Context context) {
return REPOSITORY;
@@ -43,7 +43,12 @@ public class SimpleWorkdirFactoryTest {
}
@Override
protected ParentAndClone<Closeable> cloneRepository(Context context, File target, String initialBranch) {
protected void closeWorkdirInternal(Closeable workdir) throws Exception {
workdir.close();
}
@Override
protected ParentAndClone<Closeable, Closeable> cloneRepository(Context context, File target, String initialBranch) {
initialBranchForLastCloneCall = initialBranch;
return new ParentAndClone<>(parent, clone);
}
@@ -53,7 +58,7 @@ public class SimpleWorkdirFactoryTest {
@Test
public void shouldCreateParentAndClone() {
Context context = new Context();
try (WorkingCopy<Closeable> workingCopy = simpleWorkdirFactory.createWorkingCopy(context, null)) {
try (WorkingCopy<Closeable, Closeable> workingCopy = simpleWorkdirFactory.createWorkingCopy(context, null)) {
assertThat(workingCopy.getCentralRepository()).isSameAs(parent);
assertThat(workingCopy.getWorkingRepository()).isSameAs(clone);
}
@@ -62,7 +67,7 @@ public class SimpleWorkdirFactoryTest {
@Test
public void shouldCloseParent() throws IOException {
Context context = new Context();
try (WorkingCopy<Closeable> workingCopy = simpleWorkdirFactory.createWorkingCopy(context, null)) {}
try (WorkingCopy<Closeable, Closeable> workingCopy = simpleWorkdirFactory.createWorkingCopy(context, null)) {}
verify(parent).close();
}
@@ -70,7 +75,7 @@ public class SimpleWorkdirFactoryTest {
@Test
public void shouldCloseClone() throws IOException {
Context context = new Context();
try (WorkingCopy<Closeable> workingCopy = simpleWorkdirFactory.createWorkingCopy(context, null)) {}
try (WorkingCopy<Closeable, Closeable> workingCopy = simpleWorkdirFactory.createWorkingCopy(context, null)) {}
verify(clone).close();
}
@@ -78,7 +83,7 @@ public class SimpleWorkdirFactoryTest {
@Test
public void shouldPropagateInitialBranch() {
Context context = new Context();
try (WorkingCopy<Closeable> workingCopy = simpleWorkdirFactory.createWorkingCopy(context, "some")) {
try (WorkingCopy<Closeable, Closeable> workingCopy = simpleWorkdirFactory.createWorkingCopy(context, "some")) {
assertThat(initialBranchForLastCloneCall).isEqualTo("some");
}
}

View File

@@ -4,5 +4,5 @@ import org.eclipse.jgit.lib.Repository;
import sonia.scm.repository.spi.GitContext;
import sonia.scm.repository.util.WorkdirFactory;
public interface GitWorkdirFactory extends WorkdirFactory<Repository, GitContext> {
public interface GitWorkdirFactory extends WorkdirFactory<Repository, Repository, GitContext> {
}

View File

@@ -142,7 +142,7 @@ class AbstractGitCommand
}
<R, W extends GitCloneWorker<R>> R inClone(Function<Git, W> workerSupplier, GitWorkdirFactory workdirFactory, String initialBranch) {
try (WorkingCopy<Repository> workingCopy = workdirFactory.createWorkingCopy(context, initialBranch)) {
try (WorkingCopy<Repository, Repository> workingCopy = workdirFactory.createWorkingCopy(context, initialBranch)) {
Repository repository = workingCopy.getWorkingRepository();
logger.debug("cloned repository to folder {}", repository.getWorkTree());
return workerSupplier.apply(new Git(repository)).run();
@@ -152,19 +152,28 @@ class AbstractGitCommand
}
ObjectId resolveRevisionOrThrowNotFound(Repository repository, String revision) throws IOException {
sonia.scm.repository.Repository scmRepository = context.getRepository();
return resolveRevisionOrThrowNotFound(repository, revision, scmRepository);
}
static ObjectId resolveRevisionOrThrowNotFound(Repository repository, String revision, sonia.scm.repository.Repository scmRepository) throws IOException {
ObjectId resolved = repository.resolve(revision);
if (resolved == null) {
throw notFound(entity("Revision", revision).in(context.getRepository()));
throw notFound(entity("Revision", revision).in(scmRepository));
} else {
return resolved;
}
}
abstract class GitCloneWorker<R> {
abstract static class GitCloneWorker<R> {
private final Git clone;
private final GitContext context;
private final sonia.scm.repository.Repository repository;
GitCloneWorker(Git clone) {
GitCloneWorker(Git clone, GitContext context, sonia.scm.repository.Repository repository) {
this.clone = clone;
this.context = context;
this.repository = repository;
}
abstract R run() throws IOException;
@@ -173,6 +182,10 @@ class AbstractGitCommand
return clone;
}
GitContext getContext() {
return context;
}
void checkOutBranch(String branchName) throws IOException {
try {
clone.checkout().setName(branchName).call();
@@ -199,7 +212,7 @@ class AbstractGitCommand
ObjectId resolveRevision(String revision) throws IOException {
ObjectId resolved = clone.getRepository().resolve(revision);
if (resolved == null) {
return resolveRevisionOrThrowNotFound(clone.getRepository(), "origin/" + revision);
return resolveRevisionOrThrowNotFound(clone.getRepository(), "origin/" + revision, context.getRepository());
} else {
return resolved;
}

View File

@@ -33,51 +33,120 @@
package sonia.scm.repository.spi;
import org.eclipse.jgit.api.Git;
import org.eclipse.jgit.api.errors.CannotDeleteCurrentBranchException;
import org.eclipse.jgit.api.errors.GitAPIException;
import org.eclipse.jgit.lib.Ref;
import org.eclipse.jgit.transport.PushResult;
import org.eclipse.jgit.transport.RemoteRefUpdate;
import sonia.scm.event.ScmEventBus;
import sonia.scm.repository.Branch;
import sonia.scm.repository.GitUtil;
import sonia.scm.repository.GitWorkdirFactory;
import sonia.scm.repository.InternalRepositoryException;
import sonia.scm.repository.PostReceiveRepositoryHookEvent;
import sonia.scm.repository.PreReceiveRepositoryHookEvent;
import sonia.scm.repository.Repository;
import sonia.scm.repository.RepositoryHookEvent;
import sonia.scm.repository.RepositoryHookType;
import sonia.scm.repository.api.BranchRequest;
import sonia.scm.repository.util.WorkingCopy;
import sonia.scm.repository.api.HookBranchProvider;
import sonia.scm.repository.api.HookContext;
import sonia.scm.repository.api.HookContextFactory;
import sonia.scm.repository.api.HookFeature;
import java.util.stream.StreamSupport;
import java.io.IOException;
import java.util.List;
import java.util.Set;
import static java.util.Collections.emptyList;
import static java.util.Collections.singleton;
import static java.util.Collections.singletonList;
import static sonia.scm.ContextEntry.ContextBuilder.entity;
public class GitBranchCommand extends AbstractGitCommand implements BranchCommand {
private final GitWorkdirFactory workdirFactory;
private final HookContextFactory hookContextFactory;
private final ScmEventBus eventBus;
GitBranchCommand(GitContext context, Repository repository, GitWorkdirFactory workdirFactory) {
GitBranchCommand(GitContext context, Repository repository, HookContextFactory hookContextFactory, ScmEventBus eventBus) {
super(context, repository);
this.workdirFactory = workdirFactory;
this.hookContextFactory = hookContextFactory;
this.eventBus = eventBus;
}
@Override
public Branch branch(BranchRequest request) {
try (WorkingCopy<org.eclipse.jgit.lib.Repository> workingCopy = workdirFactory.createWorkingCopy(context, request.getParentBranch())) {
Git clone = new Git(workingCopy.getWorkingRepository());
Ref ref = clone.branchCreate().setName(request.getNewBranch()).call();
Iterable<PushResult> call = clone.push().add(request.getNewBranch()).call();
StreamSupport.stream(call.spliterator(), false)
.flatMap(pushResult -> pushResult.getRemoteUpdates().stream())
.filter(remoteRefUpdate -> remoteRefUpdate.getStatus() != RemoteRefUpdate.Status.OK)
.findFirst()
.ifPresent(r -> this.handlePushError(r, request, context.getRepository()));
try (Git git = new Git(context.open())) {
RepositoryHookEvent hookEvent = createBranchHookEvent(BranchHookContextProvider.createHookEvent(request.getNewBranch()));
eventBus.post(new PreReceiveRepositoryHookEvent(hookEvent));
Ref ref = git.branchCreate().setStartPoint(request.getParentBranch()).setName(request.getNewBranch()).call();
eventBus.post(new PostReceiveRepositoryHookEvent(hookEvent));
return Branch.normalBranch(request.getNewBranch(), GitUtil.getId(ref.getObjectId()));
} catch (GitAPIException ex) {
} catch (GitAPIException | IOException ex) {
throw new InternalRepositoryException(repository, "could not create branch " + request.getNewBranch(), ex);
}
}
private void handlePushError(RemoteRefUpdate remoteRefUpdate, BranchRequest request, Repository repository) {
if (remoteRefUpdate.getStatus() != RemoteRefUpdate.Status.OK) {
// TODO handle failed remote update
throw new IntegrateChangesFromWorkdirException(repository,
String.format("Could not push new branch '%s' into central repository", request.getNewBranch()));
@Override
public void deleteOrClose(String branchName) {
try (Git gitRepo = new Git(context.open())) {
RepositoryHookEvent hookEvent = createBranchHookEvent(BranchHookContextProvider.deleteHookEvent(branchName));
eventBus.post(new PreReceiveRepositoryHookEvent(hookEvent));
gitRepo
.branchDelete()
.setBranchNames(branchName)
.setForce(true)
.call();
eventBus.post(new PostReceiveRepositoryHookEvent(hookEvent));
} catch (CannotDeleteCurrentBranchException e) {
throw new CannotDeleteDefaultBranchException(context.getRepository(), branchName);
} catch (GitAPIException | IOException ex) {
throw new InternalRepositoryException(entity(context.getRepository()), String.format("Could not delete branch: %s", branchName));
}
}
private RepositoryHookEvent createBranchHookEvent(BranchHookContextProvider hookEvent) {
HookContext context = hookContextFactory.createContext(hookEvent, this.context.getRepository());
return new RepositoryHookEvent(context, this.context.getRepository(), RepositoryHookType.PRE_RECEIVE);
}
private static class BranchHookContextProvider extends HookContextProvider {
private final List<String> newBranches;
private final List<String> deletedBranches;
private BranchHookContextProvider(List<String> newBranches, List<String> deletedBranches) {
this.newBranches = newBranches;
this.deletedBranches = deletedBranches;
}
static BranchHookContextProvider createHookEvent(String newBranch) {
return new BranchHookContextProvider(singletonList(newBranch), emptyList());
}
static BranchHookContextProvider deleteHookEvent(String deletedBranch) {
return new BranchHookContextProvider(emptyList(), singletonList(deletedBranch));
}
@Override
public Set<HookFeature> getSupportedFeatures() {
return singleton(HookFeature.BRANCH_PROVIDER);
}
@Override
public HookBranchProvider getBranchProvider() {
return new HookBranchProvider() {
@Override
public List<String> getCreatedOrModified() {
return newBranches;
}
@Override
public List<String> getDeletedOrClosed() {
return deletedBranches;
}
};
}
@Override
public HookChangesetProvider getChangesetProvider() {
return r -> new HookChangesetResponse(emptyList());
}
}
}

View File

@@ -0,0 +1,36 @@
package sonia.scm.repository.spi;
import org.eclipse.jgit.api.Git;
import org.eclipse.jgit.api.MergeCommand;
import org.eclipse.jgit.api.MergeResult;
import sonia.scm.repository.Repository;
import sonia.scm.repository.api.MergeCommandResult;
import java.io.IOException;
class GitFastForwardIfPossible extends GitMergeStrategy {
private GitMergeStrategy fallbackMerge;
GitFastForwardIfPossible(Git clone, MergeCommandRequest request, GitContext context, Repository repository) {
super(clone, request, context, repository);
fallbackMerge = new GitMergeCommit(clone, request, context, repository);
}
@Override
MergeCommandResult run() throws IOException {
MergeResult fastForwardResult = mergeWithFastForwardOnlyMode();
if (fastForwardResult.getMergeStatus().isSuccessful()) {
push();
return MergeCommandResult.success();
} else {
return fallbackMerge.run();
}
}
private MergeResult mergeWithFastForwardOnlyMode() throws IOException {
MergeCommand mergeCommand = getClone().merge();
mergeCommand.setFastForward(MergeCommand.FastForwardMode.FF_ONLY);
return doMergeInClone(mergeCommand);
}
}

View File

@@ -106,7 +106,7 @@ public class GitLogCommand extends AbstractGitCommand implements LogCommand
* @return
*/
@Override
public Changeset getChangeset(String revision)
public Changeset getChangeset(String revision, LogCommandRequest request)
{
if (logger.isDebugEnabled())
{
@@ -131,7 +131,18 @@ public class GitLogCommand extends AbstractGitCommand implements LogCommand
if (commit != null)
{
converter = new GitChangesetConverter(gr, revWalk);
changeset = converter.createChangeset(commit);
if (isBranchRequested(request)) {
String branch = request.getBranch();
if (isMergedIntoBranch(gr, revWalk, commit, branch)) {
logger.trace("returning commit {} with branch {}", commit.getId(), branch);
changeset = converter.createChangeset(commit, branch);
} else {
logger.debug("returning null, because commit {} was not merged into branch {}", commit.getId(), branch);
}
} else {
changeset = converter.createChangeset(commit);
}
}
else if (logger.isWarnEnabled())
{
@@ -157,6 +168,18 @@ public class GitLogCommand extends AbstractGitCommand implements LogCommand
return changeset;
}
private boolean isMergedIntoBranch(Repository repository, RevWalk revWalk, RevCommit commit, String branchName) throws IOException {
return revWalk.isMergedInto(commit, findHeadCommitOfBranch(repository, revWalk, branchName));
}
private boolean isBranchRequested(LogCommandRequest request) {
return request != null && !Strings.isNullOrEmpty(request.getBranch());
}
private RevCommit findHeadCommitOfBranch(Repository repository, RevWalk revWalk, String branchName) throws IOException {
return revWalk.parseCommit(GitUtil.getCommit(repository, revWalk, repository.findRef(branchName)));
}
/**
* Method description
*

View File

@@ -1,36 +1,30 @@
package sonia.scm.repository.spi;
import com.google.common.base.Strings;
import org.eclipse.jgit.api.Git;
import org.eclipse.jgit.api.MergeCommand.FastForwardMode;
import org.eclipse.jgit.api.MergeResult;
import org.eclipse.jgit.api.errors.GitAPIException;
import org.eclipse.jgit.lib.ObjectId;
import com.google.common.collect.ImmutableSet;
import org.eclipse.jgit.lib.Repository;
import org.eclipse.jgit.merge.MergeStrategy;
import org.eclipse.jgit.merge.ResolveMerger;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import sonia.scm.repository.GitWorkdirFactory;
import sonia.scm.repository.InternalRepositoryException;
import sonia.scm.repository.Person;
import sonia.scm.repository.api.MergeCommandResult;
import sonia.scm.repository.api.MergeDryRunCommandResult;
import sonia.scm.repository.api.MergeStrategy;
import sonia.scm.repository.api.MergeStrategyNotSupportedException;
import java.io.IOException;
import java.text.MessageFormat;
import java.util.Set;
import static org.eclipse.jgit.merge.MergeStrategy.RECURSIVE;
public class GitMergeCommand extends AbstractGitCommand implements MergeCommand {
private static final Logger logger = LoggerFactory.getLogger(GitMergeCommand.class);
private static final String MERGE_COMMIT_MESSAGE_TEMPLATE = String.join("\n",
"Merge of branch {0} into {1}",
"",
"Automatic merge by SCM-Manager.");
private final GitWorkdirFactory workdirFactory;
private static final Set<MergeStrategy> STRATEGIES = ImmutableSet.of(
MergeStrategy.MERGE_COMMIT,
MergeStrategy.FAST_FORWARD_IF_POSSIBLE,
MergeStrategy.SQUASH
);
GitMergeCommand(GitContext context, sonia.scm.repository.Repository repository, GitWorkdirFactory workdirFactory) {
super(context, repository);
this.workdirFactory = workdirFactory;
@@ -38,14 +32,30 @@ public class GitMergeCommand extends AbstractGitCommand implements MergeCommand
@Override
public MergeCommandResult merge(MergeCommandRequest request) {
return inClone(clone -> new MergeWorker(clone, request), workdirFactory, request.getTargetBranch());
return mergeWithStrategy(request);
}
private MergeCommandResult mergeWithStrategy(MergeCommandRequest request) {
switch(request.getMergeStrategy()) {
case SQUASH:
return inClone(clone -> new GitMergeWithSquash(clone, request, context, repository), workdirFactory, request.getTargetBranch());
case FAST_FORWARD_IF_POSSIBLE:
return inClone(clone -> new GitFastForwardIfPossible(clone, request, context, repository), workdirFactory, request.getTargetBranch());
case MERGE_COMMIT:
return inClone(clone -> new GitMergeCommit(clone, request, context, repository), workdirFactory, request.getTargetBranch());
default:
throw new MergeStrategyNotSupportedException(repository, request.getMergeStrategy());
}
}
@Override
public MergeDryRunCommandResult dryRun(MergeCommandRequest request) {
try {
Repository repository = context.open();
ResolveMerger merger = (ResolveMerger) MergeStrategy.RECURSIVE.newMerger(repository, true);
ResolveMerger merger = (ResolveMerger) RECURSIVE.newMerger(repository, true);
return new MergeDryRunCommandResult(
merger.merge(
resolveRevisionOrThrowNotFound(repository, request.getBranchToMerge()),
@@ -55,64 +65,14 @@ public class GitMergeCommand extends AbstractGitCommand implements MergeCommand
}
}
private class MergeWorker extends GitCloneWorker<MergeCommandResult> {
private final String target;
private final String toMerge;
private final Person author;
private final String messageTemplate;
private MergeWorker(Git clone, MergeCommandRequest request) {
super(clone);
this.target = request.getTargetBranch();
this.toMerge = request.getBranchToMerge();
this.author = request.getAuthor();
this.messageTemplate = request.getMessageTemplate();
}
@Override
MergeCommandResult run() throws IOException {
MergeResult result = doMergeInClone();
if (result.getMergeStatus().isSuccessful()) {
doCommit();
push();
return MergeCommandResult.success();
} else {
return analyseFailure(result);
}
}
private MergeResult doMergeInClone() throws IOException {
MergeResult result;
try {
ObjectId sourceRevision = resolveRevision(toMerge);
result = getClone().merge()
.setFastForward(FastForwardMode.NO_FF)
.setCommit(false) // we want to set the author manually
.include(toMerge, sourceRevision)
.call();
} catch (GitAPIException e) {
throw new InternalRepositoryException(context.getRepository(), "could not merge branch " + toMerge + " into " + target, e);
}
return result;
}
private void doCommit() {
logger.debug("merged branch {} into {}", toMerge, target);
doCommit(MessageFormat.format(determineMessageTemplate(), toMerge, target), author);
}
private String determineMessageTemplate() {
if (Strings.isNullOrEmpty(messageTemplate)) {
return MERGE_COMMIT_MESSAGE_TEMPLATE;
} else {
return messageTemplate;
}
}
private MergeCommandResult analyseFailure(MergeResult result) {
logger.info("could not merged branch {} into {} due to conflict in paths {}", toMerge, target, result.getConflicts().keySet());
return MergeCommandResult.failure(result.getConflicts().keySet());
}
@Override
public boolean isSupported(MergeStrategy strategy) {
return STRATEGIES.contains(strategy);
}
@Override
public Set<MergeStrategy> getSupportedMergeStrategies() {
return STRATEGIES;
}
}

View File

@@ -0,0 +1,31 @@
package sonia.scm.repository.spi;
import org.eclipse.jgit.api.Git;
import org.eclipse.jgit.api.MergeCommand;
import org.eclipse.jgit.api.MergeResult;
import sonia.scm.repository.Repository;
import sonia.scm.repository.api.MergeCommandResult;
import java.io.IOException;
class GitMergeCommit extends GitMergeStrategy {
GitMergeCommit(Git clone, MergeCommandRequest request, GitContext context, Repository repository) {
super(clone, request, context, repository);
}
@Override
MergeCommandResult run() throws IOException {
MergeCommand mergeCommand = getClone().merge();
mergeCommand.setFastForward(MergeCommand.FastForwardMode.NO_FF);
MergeResult result = doMergeInClone(mergeCommand);
if (result.getMergeStatus().isSuccessful()) {
doCommit();
push();
return MergeCommandResult.success();
} else {
return analyseFailure(result);
}
}
}

View File

@@ -0,0 +1,72 @@
package sonia.scm.repository.spi;
import com.google.common.base.Strings;
import org.eclipse.jgit.api.Git;
import org.eclipse.jgit.api.MergeCommand;
import org.eclipse.jgit.api.MergeResult;
import org.eclipse.jgit.api.errors.GitAPIException;
import org.eclipse.jgit.lib.ObjectId;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import sonia.scm.repository.InternalRepositoryException;
import sonia.scm.repository.Person;
import sonia.scm.repository.api.MergeCommandResult;
import java.io.IOException;
import java.text.MessageFormat;
abstract class GitMergeStrategy extends AbstractGitCommand.GitCloneWorker<MergeCommandResult> {
private static final Logger logger = LoggerFactory.getLogger(GitMergeStrategy.class);
private static final String MERGE_COMMIT_MESSAGE_TEMPLATE = String.join("\n",
"Merge of branch {0} into {1}",
"",
"Automatic merge by SCM-Manager.");
private final String target;
private final String toMerge;
private final Person author;
private final String messageTemplate;
GitMergeStrategy(Git clone, MergeCommandRequest request, GitContext context, sonia.scm.repository.Repository repository) {
super(clone, context, repository);
this.target = request.getTargetBranch();
this.toMerge = request.getBranchToMerge();
this.author = request.getAuthor();
this.messageTemplate = request.getMessageTemplate();
}
MergeResult doMergeInClone(MergeCommand mergeCommand) throws IOException {
MergeResult result;
try {
ObjectId sourceRevision = resolveRevision(toMerge);
mergeCommand
.setCommit(false) // we want to set the author manually
.include(toMerge, sourceRevision);
result = mergeCommand.call();
} catch (GitAPIException e) {
throw new InternalRepositoryException(getContext().getRepository(), "could not merge branch " + toMerge + " into " + target, e);
}
return result;
}
void doCommit() {
logger.debug("merged branch {} into {}", toMerge, target);
doCommit(MessageFormat.format(determineMessageTemplate(), toMerge, target), author);
}
private String determineMessageTemplate() {
if (Strings.isNullOrEmpty(messageTemplate)) {
return MERGE_COMMIT_MESSAGE_TEMPLATE;
} else {
return messageTemplate;
}
}
MergeCommandResult analyseFailure(MergeResult result) {
logger.info("could not merge branch {} into {} due to conflict in paths {}", toMerge, target, result.getConflicts().keySet());
return MergeCommandResult.failure(result.getConflicts().keySet());
}
}

View File

@@ -0,0 +1,31 @@
package sonia.scm.repository.spi;
import org.eclipse.jgit.api.Git;
import org.eclipse.jgit.api.MergeResult;
import sonia.scm.repository.Repository;
import sonia.scm.repository.api.MergeCommandResult;
import org.eclipse.jgit.api.MergeCommand;
import java.io.IOException;
class GitMergeWithSquash extends GitMergeStrategy {
GitMergeWithSquash(Git clone, MergeCommandRequest request, GitContext context, Repository repository) {
super(clone, request, context, repository);
}
@Override
MergeCommandResult run() throws IOException {
MergeCommand mergeCommand = getClone().merge();
mergeCommand.setSquash(true);
MergeResult result = doMergeInClone(mergeCommand);
if (result.getMergeStatus().isSuccessful()) {
doCommit();
push();
return MergeCommandResult.success();
} else {
return analyseFailure(result);
}
}
}

View File

@@ -46,7 +46,7 @@ public class GitModifyCommand extends AbstractGitCommand implements ModifyComman
private final ModifyCommandRequest request;
ModifyWorker(Git clone, ModifyCommandRequest request) {
super(clone);
super(clone, context, repository);
this.workDir = clone.getRepository().getWorkTree();
this.request = request;
}

View File

@@ -35,10 +35,12 @@ package sonia.scm.repository.spi;
import com.google.common.collect.ImmutableSet;
import sonia.scm.api.v2.resources.GitRepositoryConfigStoreProvider;
import sonia.scm.event.ScmEventBus;
import sonia.scm.repository.Feature;
import sonia.scm.repository.GitRepositoryHandler;
import sonia.scm.repository.Repository;
import sonia.scm.repository.api.Command;
import sonia.scm.repository.api.HookContextFactory;
import sonia.scm.web.lfs.LfsBlobStoreFactory;
import java.io.IOException;
@@ -64,6 +66,7 @@ public class GitRepositoryServiceProvider extends RepositoryServiceProvider
Command.DIFF_RESULT,
Command.LOG,
Command.TAGS,
Command.BRANCH,
Command.BRANCHES,
Command.INCOMING,
Command.OUTGOING,
@@ -77,10 +80,12 @@ public class GitRepositoryServiceProvider extends RepositoryServiceProvider
//~--- constructors ---------------------------------------------------------
public GitRepositoryServiceProvider(GitRepositoryHandler handler, Repository repository, GitRepositoryConfigStoreProvider storeProvider, LfsBlobStoreFactory lfsBlobStoreFactory) {
public GitRepositoryServiceProvider(GitRepositoryHandler handler, Repository repository, GitRepositoryConfigStoreProvider storeProvider, LfsBlobStoreFactory lfsBlobStoreFactory, HookContextFactory hookContextFactory, ScmEventBus eventBus) {
this.handler = handler;
this.repository = repository;
this.lfsBlobStoreFactory = lfsBlobStoreFactory;
this.hookContextFactory = hookContextFactory;
this.eventBus = eventBus;
this.context = new GitContext(handler.getDirectory(repository.getId()), repository, storeProvider);
}
@@ -133,7 +138,7 @@ public class GitRepositoryServiceProvider extends RepositoryServiceProvider
@Override
public BranchCommand getBranchCommand()
{
return new GitBranchCommand(context, repository, handler.getWorkdirFactory());
return new GitBranchCommand(context, repository, hookContextFactory, eventBus);
}
/**
@@ -292,4 +297,8 @@ public class GitRepositoryServiceProvider extends RepositoryServiceProvider
private final Repository repository;
private final LfsBlobStoreFactory lfsBlobStoreFactory;
private final HookContextFactory hookContextFactory;
private final ScmEventBus eventBus;
}

View File

@@ -36,9 +36,11 @@ package sonia.scm.repository.spi;
import com.google.inject.Inject;
import sonia.scm.api.v2.resources.GitRepositoryConfigStoreProvider;
import sonia.scm.event.ScmEventBus;
import sonia.scm.plugin.Extension;
import sonia.scm.repository.GitRepositoryHandler;
import sonia.scm.repository.Repository;
import sonia.scm.repository.api.HookContextFactory;
import sonia.scm.web.lfs.LfsBlobStoreFactory;
/**
@@ -51,12 +53,16 @@ public class GitRepositoryServiceResolver implements RepositoryServiceResolver {
private final GitRepositoryHandler handler;
private final GitRepositoryConfigStoreProvider storeProvider;
private final LfsBlobStoreFactory lfsBlobStoreFactory;
private final HookContextFactory hookContextFactory;
private final ScmEventBus eventBus;
@Inject
public GitRepositoryServiceResolver(GitRepositoryHandler handler, GitRepositoryConfigStoreProvider storeProvider, LfsBlobStoreFactory lfsBlobStoreFactory) {
public GitRepositoryServiceResolver(GitRepositoryHandler handler, GitRepositoryConfigStoreProvider storeProvider, LfsBlobStoreFactory lfsBlobStoreFactory, HookContextFactory hookContextFactory, ScmEventBus eventBus) {
this.handler = handler;
this.storeProvider = storeProvider;
this.lfsBlobStoreFactory = lfsBlobStoreFactory;
this.hookContextFactory = hookContextFactory;
this.eventBus = eventBus;
}
@Override
@@ -64,7 +70,7 @@ public class GitRepositoryServiceResolver implements RepositoryServiceResolver {
GitRepositoryServiceProvider provider = null;
if (GitRepositoryHandler.TYPE_NAME.equalsIgnoreCase(repository.getType())) {
provider = new GitRepositoryServiceProvider(handler, repository, storeProvider, lfsBlobStoreFactory);
provider = new GitRepositoryServiceProvider(handler, repository, storeProvider, lfsBlobStoreFactory, hookContextFactory, eventBus);
}
return provider;

View File

@@ -18,7 +18,7 @@ import java.io.IOException;
import static sonia.scm.ContextEntry.ContextBuilder.entity;
import static sonia.scm.NotFoundException.notFound;
public class SimpleGitWorkdirFactory extends SimpleWorkdirFactory<Repository, GitContext> implements GitWorkdirFactory {
public class SimpleGitWorkdirFactory extends SimpleWorkdirFactory<Repository, Repository, GitContext> implements GitWorkdirFactory {
@Inject
public SimpleGitWorkdirFactory(WorkdirProvider workdirProvider) {
@@ -26,7 +26,7 @@ public class SimpleGitWorkdirFactory extends SimpleWorkdirFactory<Repository, Gi
}
@Override
public ParentAndClone<Repository> cloneRepository(GitContext context, File target, String initialBranch) {
public ParentAndClone<Repository, Repository> cloneRepository(GitContext context, File target, String initialBranch) {
try {
Repository clone = Git.cloneRepository()
.setURI(createScmTransportProtocolUri(context.getDirectory()))
@@ -60,6 +60,13 @@ public class SimpleGitWorkdirFactory extends SimpleWorkdirFactory<Repository, Gi
}
}
@Override
protected void closeWorkdirInternal(Repository workdir) throws Exception {
if (workdir != null) {
workdir.close();
}
}
@Override
protected sonia.scm.repository.Repository getScmRepository(GitContext context) {
return context.getRepository();

View File

@@ -1,20 +1,37 @@
package sonia.scm.repository.spi;
import org.assertj.core.api.Assertions;
import org.junit.Rule;
import org.junit.Test;
import org.junit.runner.RunWith;
import org.mockito.ArgumentCaptor;
import org.mockito.Mock;
import org.mockito.invocation.InvocationOnMock;
import org.mockito.junit.MockitoJUnitRunner;
import sonia.scm.event.ScmEventBus;
import sonia.scm.repository.Branch;
import sonia.scm.repository.PostReceiveRepositoryHookEvent;
import sonia.scm.repository.PreReceiveRepositoryHookEvent;
import sonia.scm.repository.api.BranchRequest;
import sonia.scm.repository.util.WorkdirProvider;
import sonia.scm.repository.api.HookContext;
import sonia.scm.repository.api.HookContextFactory;
import java.io.IOException;
import java.util.List;
import static org.assertj.core.api.Assertions.assertThat;
import static org.junit.jupiter.api.Assertions.assertThrows;
import static org.mockito.ArgumentMatchers.any;
import static org.mockito.Mockito.doNothing;
import static org.mockito.Mockito.mock;
import static org.mockito.Mockito.when;
@RunWith(MockitoJUnitRunner.class)
public class GitBranchCommandTest extends AbstractGitCommandTestBase {
@Rule
public BindTransportProtocolRule transportProtocolRule = new BindTransportProtocolRule();
@Mock
private HookContextFactory hookContextFactory;
@Mock
private ScmEventBus eventBus;
@Test
public void shouldCreateBranchWithDefinedSourceBranch() throws IOException {
@@ -26,10 +43,10 @@ public class GitBranchCommandTest extends AbstractGitCommandTestBase {
branchRequest.setParentBranch(source.getName());
branchRequest.setNewBranch("new_branch");
new GitBranchCommand(context, repository, new SimpleGitWorkdirFactory(new WorkdirProvider())).branch(branchRequest);
createCommand().branch(branchRequest);
Branch newBranch = findBranch(context, "new_branch");
Assertions.assertThat(newBranch.getRevision()).isEqualTo(source.getRevision());
assertThat(newBranch.getRevision()).isEqualTo(source.getRevision());
}
private Branch findBranch(GitContext context, String name) throws IOException {
@@ -41,17 +58,79 @@ public class GitBranchCommandTest extends AbstractGitCommandTestBase {
public void shouldCreateBranch() throws IOException {
GitContext context = createContext();
Assertions.assertThat(readBranches(context)).filteredOn(b -> b.getName().equals("new_branch")).isEmpty();
assertThat(readBranches(context)).filteredOn(b -> b.getName().equals("new_branch")).isEmpty();
BranchRequest branchRequest = new BranchRequest();
branchRequest.setNewBranch("new_branch");
new GitBranchCommand(context, repository, new SimpleGitWorkdirFactory(new WorkdirProvider())).branch(branchRequest);
createCommand().branch(branchRequest);
Assertions.assertThat(readBranches(context)).filteredOn(b -> b.getName().equals("new_branch")).isNotEmpty();
assertThat(readBranches(context)).filteredOn(b -> b.getName().equals("new_branch")).isNotEmpty();
}
@Test
public void shouldDeleteBranch() throws IOException {
GitContext context = createContext();
String branchToBeDeleted = "squash";
createCommand().deleteOrClose(branchToBeDeleted);
assertThat(readBranches(context)).filteredOn(b -> b.getName().equals(branchToBeDeleted)).isEmpty();
}
@Test
public void shouldThrowExceptionWhenDeletingDefaultBranch() {
String branchToBeDeleted = "master";
assertThrows(CannotDeleteDefaultBranchException.class, () -> createCommand().deleteOrClose(branchToBeDeleted));
}
private GitBranchCommand createCommand() {
return new GitBranchCommand(createContext(), repository, hookContextFactory, eventBus);
}
private List<Branch> readBranches(GitContext context) throws IOException {
return new GitBranchesCommand(context, repository).getBranches();
}
@Test
public void shouldPostCreateEvents() {
ArgumentCaptor<Object> captor = ArgumentCaptor.forClass(Object.class);
doNothing().when(eventBus).post(captor.capture());
when(hookContextFactory.createContext(any(), any())).thenAnswer(this::createMockedContext);
BranchRequest branchRequest = new BranchRequest();
branchRequest.setParentBranch("mergeable");
branchRequest.setNewBranch("new_branch");
createCommand().branch(branchRequest);
List<Object> events = captor.getAllValues();
assertThat(events.get(0)).isInstanceOf(PreReceiveRepositoryHookEvent.class);
assertThat(events.get(1)).isInstanceOf(PostReceiveRepositoryHookEvent.class);
PreReceiveRepositoryHookEvent event = (PreReceiveRepositoryHookEvent) events.get(0);
assertThat(event.getContext().getBranchProvider().getCreatedOrModified()).containsExactly("new_branch");
assertThat(event.getContext().getBranchProvider().getDeletedOrClosed()).isEmpty();
}
@Test
public void shouldPostDeleteEvents() {
ArgumentCaptor<Object> captor = ArgumentCaptor.forClass(Object.class);
doNothing().when(eventBus).post(captor.capture());
when(hookContextFactory.createContext(any(), any())).thenAnswer(this::createMockedContext);
createCommand().deleteOrClose("squash");
List<Object> events = captor.getAllValues();
assertThat(events.get(0)).isInstanceOf(PreReceiveRepositoryHookEvent.class);
assertThat(events.get(1)).isInstanceOf(PostReceiveRepositoryHookEvent.class);
PreReceiveRepositoryHookEvent event = (PreReceiveRepositoryHookEvent) events.get(0);
assertThat(event.getContext().getBranchProvider().getDeletedOrClosed()).containsExactly("squash");
assertThat(event.getContext().getBranchProvider().getCreatedOrModified()).isEmpty();
}
private HookContext createMockedContext(InvocationOnMock invocation) {
HookContext mock = mock(HookContext.class);
when(mock.getBranchProvider()).thenReturn(((HookContextProvider) invocation.getArgument(0)).getBranchProvider());
return mock;
}
}

View File

@@ -35,7 +35,11 @@
package sonia.scm.repository.spi;
import com.google.common.io.Files;
import org.assertj.core.api.Assertions;
import org.junit.Test;
import org.junit.runner.RunWith;
import org.mockito.Mock;
import org.mockito.junit.MockitoJUnitRunner;
import sonia.scm.repository.Changeset;
import sonia.scm.repository.ChangesetPagingResult;
import sonia.scm.repository.GitRepositoryConfig;
@@ -51,14 +55,18 @@ import static org.junit.Assert.assertFalse;
import static org.junit.Assert.assertNotNull;
import static org.junit.Assert.assertThat;
import static org.junit.Assert.assertTrue;
import static org.mockito.Mockito.when;
/**
* Unit tests for {@link GitLogCommand}.
*
* @author Sebastian Sdorra
*/
@RunWith(MockitoJUnitRunner.class)
public class GitLogCommandTest extends AbstractGitCommandTestBase
{
@Mock
LogCommandRequest request;
/**
* Tests log command with the usage of a default branch.
@@ -171,7 +179,7 @@ public class GitLogCommandTest extends AbstractGitCommandTestBase
public void testGetCommit()
{
GitLogCommand command = createCommand();
Changeset c = command.getChangeset("435df2f061add3589cb3");
Changeset c = command.getChangeset("435df2f061add3589cb3", null);
assertNotNull(c);
String revision = "435df2f061add3589cb326cc64be9b9c3897ceca";
@@ -193,6 +201,23 @@ public class GitLogCommandTest extends AbstractGitCommandTestBase
assertThat(modifications.getAdded(), contains("a.txt", "b.txt"));
}
@Test
public void commitShouldContainBranchIfLogCommandRequestHasBranch()
{
when(request.getBranch()).thenReturn("master");
GitLogCommand command = createCommand();
Changeset c = command.getChangeset("435df2f061add3589cb3", request);
Assertions.assertThat(c.getBranches()).containsOnly("master");
}
@Test
public void shouldNotReturnCommitFromDifferentBranch() {
when(request.getBranch()).thenReturn("master");
Changeset changeset = createCommand().getChangeset("3f76a12f08a6ba0dc988c68b7f0b2cd190efc3c4", request);
Assertions.assertThat(changeset).isNull();
}
@Test
public void testGetRange()
{

View File

@@ -15,10 +15,12 @@ import org.junit.Test;
import sonia.scm.NotFoundException;
import sonia.scm.repository.Person;
import sonia.scm.repository.api.MergeCommandResult;
import sonia.scm.repository.api.MergeStrategy;
import sonia.scm.repository.util.WorkdirProvider;
import sonia.scm.user.User;
import java.io.IOException;
import java.util.List;
import static org.assertj.core.api.Assertions.assertThat;
@@ -62,6 +64,7 @@ public class GitMergeCommandTest extends AbstractGitCommandTestBase {
MergeCommandRequest request = new MergeCommandRequest();
request.setTargetBranch("master");
request.setBranchToMerge("mergeable");
request.setMergeStrategy(MergeStrategy.MERGE_COMMIT);
request.setAuthor(new Person("Dirk Gently", "dirk@holistic.det"));
MergeCommandResult mergeCommandResult = command.merge(request);
@@ -88,6 +91,7 @@ public class GitMergeCommandTest extends AbstractGitCommandTestBase {
MergeCommandRequest request = new MergeCommandRequest();
request.setTargetBranch("master");
request.setBranchToMerge("empty_merge");
request.setMergeStrategy(MergeStrategy.MERGE_COMMIT);
request.setAuthor(new Person("Dirk Gently", "dirk@holistic.det"));
MergeCommandResult mergeCommandResult = command.merge(request);
@@ -109,6 +113,7 @@ public class GitMergeCommandTest extends AbstractGitCommandTestBase {
request.setTargetBranch("master");
request.setBranchToMerge("mergeable");
request.setAuthor(new Person("Dirk Gently", "dirk@holistic.det"));
request.setMergeStrategy(MergeStrategy.MERGE_COMMIT);
MergeCommandResult mergeCommandResult = command.merge(request);
@@ -132,6 +137,7 @@ public class GitMergeCommandTest extends AbstractGitCommandTestBase {
MergeCommandRequest request = new MergeCommandRequest();
request.setTargetBranch("master");
request.setBranchToMerge("mergeable");
request.setMergeStrategy(MergeStrategy.MERGE_COMMIT);
request.setAuthor(new Person("Dirk Gently", "dirk@holistic.det"));
request.setMessageTemplate("simple");
@@ -152,6 +158,7 @@ public class GitMergeCommandTest extends AbstractGitCommandTestBase {
MergeCommandRequest request = new MergeCommandRequest();
request.setBranchToMerge("test-branch");
request.setTargetBranch("master");
request.setMergeStrategy(MergeStrategy.MERGE_COMMIT);
MergeCommandResult mergeCommandResult = command.merge(request);
@@ -173,6 +180,7 @@ public class GitMergeCommandTest extends AbstractGitCommandTestBase {
MergeCommandRequest request = new MergeCommandRequest();
request.setTargetBranch("master");
request.setBranchToMerge("mergeable");
request.setMergeStrategy(MergeStrategy.MERGE_COMMIT);
MergeCommandResult mergeCommandResult = command.merge(request);
@@ -192,6 +200,7 @@ public class GitMergeCommandTest extends AbstractGitCommandTestBase {
request.setAuthor(new Person("Dirk Gently", "dirk@holistic.det"));
request.setTargetBranch("mergeable");
request.setBranchToMerge("master");
request.setMergeStrategy(MergeStrategy.MERGE_COMMIT);
MergeCommandResult mergeCommandResult = command.merge(request);
@@ -211,12 +220,112 @@ public class GitMergeCommandTest extends AbstractGitCommandTestBase {
assertThat(new String(contentOfFileB)).isEqualTo("b\ncontent from branch\n");
}
@Test
public void shouldSquashCommitsIfSquashIsEnabled() throws IOException, GitAPIException {
GitMergeCommand command = createCommand();
MergeCommandRequest request = new MergeCommandRequest();
request.setAuthor(new Person("Dirk Gently", "dirk@holistic.det"));
request.setBranchToMerge("squash");
request.setTargetBranch("master");
request.setMessageTemplate("this is a squash");
request.setMergeStrategy(MergeStrategy.SQUASH);
MergeCommandResult mergeCommandResult = command.merge(request);
Repository repository = createContext().open();
assertThat(mergeCommandResult.isSuccess()).isTrue();
Iterable<RevCommit> commits = new Git(repository).log().add(repository.resolve("master")).setMaxCount(1).call();
RevCommit mergeCommit = commits.iterator().next();
PersonIdent mergeAuthor = mergeCommit.getAuthorIdent();
String message = mergeCommit.getFullMessage();
assertThat(mergeAuthor.getName()).isEqualTo("Dirk Gently");
assertThat(message).isEqualTo("this is a squash");
}
@Test
public void shouldSquashThreeCommitsIntoOne() throws IOException, GitAPIException {
GitMergeCommand command = createCommand();
MergeCommandRequest request = new MergeCommandRequest();
request.setAuthor(new Person("Dirk Gently", "dirk@holistic.det"));
request.setBranchToMerge("squash");
request.setTargetBranch("master");
request.setMessageTemplate("squash three commits");
request.setMergeStrategy(MergeStrategy.SQUASH);
Repository gitRepository = createContext().open();
MergeCommandResult mergeCommandResult = command.merge(request);
assertThat(mergeCommandResult.isSuccess()).isTrue();
Iterable<RevCommit> commits = new Git(gitRepository).log().add(gitRepository.resolve("master")).setMaxCount(1).call();
RevCommit mergeCommit = commits.iterator().next();
PersonIdent mergeAuthor = mergeCommit.getAuthorIdent();
String message = mergeCommit.getFullMessage();
assertThat(mergeAuthor.getName()).isEqualTo("Dirk Gently");
assertThat(message).isEqualTo("squash three commits");
GitModificationsCommand modificationsCommand = new GitModificationsCommand(createContext(), repository);
List<String> changes = modificationsCommand.getModifications("master").getAdded();
assertThat(changes.size()).isEqualTo(3);
}
@Test
public void shouldMergeWithFastForward() throws IOException, GitAPIException {
Repository repository = createContext().open();
ObjectId featureBranchHead = new Git(repository).log().add(repository.resolve("squash")).setMaxCount(1).call().iterator().next().getId();
GitMergeCommand command = createCommand();
MergeCommandRequest request = new MergeCommandRequest();
request.setBranchToMerge("squash");
request.setTargetBranch("master");
request.setMergeStrategy(MergeStrategy.FAST_FORWARD_IF_POSSIBLE);
request.setAuthor(new Person("Dirk Gently", "dirk@holistic.det"));
MergeCommandResult mergeCommandResult = command.merge(request);
assertThat(mergeCommandResult.isSuccess()).isTrue();
Iterable<RevCommit> commits = new Git(repository).log().add(repository.resolve("master")).setMaxCount(1).call();
RevCommit mergeCommit = commits.iterator().next();
assertThat(mergeCommit.getParentCount()).isEqualTo(1);
PersonIdent mergeAuthor = mergeCommit.getAuthorIdent();
assertThat(mergeAuthor.getName()).isEqualTo("Philip J Fry");
assertThat(mergeCommit.getId()).isEqualTo(featureBranchHead);
}
@Test
public void shouldDoMergeCommitIfFastForwardIsNotPossible() throws IOException, GitAPIException {
GitMergeCommand command = createCommand();
MergeCommandRequest request = new MergeCommandRequest();
request.setTargetBranch("master");
request.setBranchToMerge("mergeable");
request.setMergeStrategy(MergeStrategy.FAST_FORWARD_IF_POSSIBLE);
request.setAuthor(new Person("Dirk Gently", "dirk@holistic.det"));
MergeCommandResult mergeCommandResult = command.merge(request);
assertThat(mergeCommandResult.isSuccess()).isTrue();
Repository repository = createContext().open();
Iterable<RevCommit> commits = new Git(repository).log().add(repository.resolve("master")).setMaxCount(1).call();
RevCommit mergeCommit = commits.iterator().next();
PersonIdent mergeAuthor = mergeCommit.getAuthorIdent();
assertThat(mergeCommit.getParentCount()).isEqualTo(2);
String message = mergeCommit.getFullMessage();
assertThat(mergeAuthor.getName()).isEqualTo("Dirk Gently");
assertThat(mergeAuthor.getEmailAddress()).isEqualTo("dirk@holistic.det");
assertThat(message).contains("master", "mergeable");
}
@Test(expected = NotFoundException.class)
public void shouldHandleNotExistingSourceBranchInMerge() {
GitMergeCommand command = createCommand();
MergeCommandRequest request = new MergeCommandRequest();
request.setTargetBranch("mergeable");
request.setBranchToMerge("not_existing");
request.setMergeStrategy(MergeStrategy.MERGE_COMMIT);
command.merge(request);
}
@@ -225,6 +334,7 @@ public class GitMergeCommandTest extends AbstractGitCommandTestBase {
public void shouldHandleNotExistingTargetBranchInMerge() {
GitMergeCommand command = createCommand();
MergeCommandRequest request = new MergeCommandRequest();
request.setMergeStrategy(MergeStrategy.MERGE_COMMIT);
request.setTargetBranch("not_existing");
request.setBranchToMerge("master");

View File

@@ -45,7 +45,7 @@ public class SimpleGitWorkdirFactoryTest extends AbstractGitCommandTestBase {
SimpleGitWorkdirFactory factory = new SimpleGitWorkdirFactory(workdirProvider);
File masterRepo = createRepositoryDirectory();
try (WorkingCopy<Repository> workingCopy = factory.createWorkingCopy(createContext(), null)) {
try (WorkingCopy<Repository, Repository> workingCopy = factory.createWorkingCopy(createContext(), null)) {
assertThat(workingCopy.getDirectory())
.exists()
@@ -62,7 +62,7 @@ public class SimpleGitWorkdirFactoryTest extends AbstractGitCommandTestBase {
public void shouldCheckoutInitialBranch() {
SimpleGitWorkdirFactory factory = new SimpleGitWorkdirFactory(workdirProvider);
try (WorkingCopy<Repository> workingCopy = factory.createWorkingCopy(createContext(), "test-branch")) {
try (WorkingCopy<Repository, Repository> workingCopy = factory.createWorkingCopy(createContext(), "test-branch")) {
assertThat(new File(workingCopy.getWorkingRepository().getWorkTree(), "a.txt"))
.exists()
.isFile()
@@ -75,10 +75,10 @@ public class SimpleGitWorkdirFactoryTest extends AbstractGitCommandTestBase {
SimpleGitWorkdirFactory factory = new SimpleGitWorkdirFactory(workdirProvider);
File firstDirectory;
try (WorkingCopy<Repository> workingCopy = factory.createWorkingCopy(createContext(), null)) {
try (WorkingCopy<Repository, Repository> workingCopy = factory.createWorkingCopy(createContext(), null)) {
firstDirectory = workingCopy.getDirectory();
}
try (WorkingCopy<Repository> workingCopy = factory.createWorkingCopy(createContext(), null)) {
try (WorkingCopy<Repository, Repository> workingCopy = factory.createWorkingCopy(createContext(), null)) {
File secondDirectory = workingCopy.getDirectory();
assertThat(secondDirectory).isNotEqualTo(firstDirectory);
}
@@ -89,7 +89,7 @@ public class SimpleGitWorkdirFactoryTest extends AbstractGitCommandTestBase {
SimpleGitWorkdirFactory factory = new SimpleGitWorkdirFactory(workdirProvider);
File directory;
try (WorkingCopy<Repository> workingCopy = factory.createWorkingCopy(createContext(), null)) {
try (WorkingCopy<Repository, Repository> workingCopy = factory.createWorkingCopy(createContext(), null)) {
directory = workingCopy.getWorkingRepository().getWorkTree();
}
assertThat(directory).doesNotExist();

View File

@@ -36,7 +36,9 @@ import com.aragost.javahg.commands.PullCommand;
import org.apache.shiro.SecurityUtils;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import sonia.scm.ContextEntry;
import sonia.scm.repository.Branch;
import sonia.scm.repository.InternalRepositoryException;
import sonia.scm.repository.Repository;
import sonia.scm.repository.api.BranchRequest;
import sonia.scm.repository.util.WorkingCopy;
@@ -59,7 +61,7 @@ public class HgBranchCommand extends AbstractCommand implements BranchCommand {
@Override
public Branch branch(BranchRequest request) {
try (WorkingCopy<com.aragost.javahg.Repository> workingCopy = workdirFactory.createWorkingCopy(getContext(), request.getParentBranch())) {
try (WorkingCopy<com.aragost.javahg.Repository, com.aragost.javahg.Repository> workingCopy = workdirFactory.createWorkingCopy(getContext(), request.getParentBranch())) {
com.aragost.javahg.Repository repository = workingCopy.getWorkingRepository();
Changeset emptyChangeset = createNewBranchWithEmptyCommit(request, repository);
@@ -67,23 +69,46 @@ public class HgBranchCommand extends AbstractCommand implements BranchCommand {
LOG.debug("Created new branch '{}' in repository {} with changeset {}",
request.getNewBranch(), getRepository().getNamespaceAndName(), emptyChangeset.getNode());
pullNewBranchIntoCentralRepository(request, workingCopy);
pullChangesIntoCentralRepository(workingCopy, request.getNewBranch());
return Branch.normalBranch(request.getNewBranch(), emptyChangeset.getNode());
}
}
@Override
public void deleteOrClose(String branchName) {
try (WorkingCopy<com.aragost.javahg.Repository, com.aragost.javahg.Repository> workingCopy = workdirFactory.createWorkingCopy(getContext(), branchName)) {
User currentUser = SecurityUtils.getSubject().getPrincipals().oneByType(User.class);
LOG.debug("Closing branch '{}' in repository {}", branchName, getRepository().getNamespaceAndName());
com.aragost.javahg.commands.CommitCommand
.on(workingCopy.getWorkingRepository())
.user(getFormattedUser(currentUser))
.message(String.format("Close branch: %s", branchName))
.closeBranch()
.execute();
pullChangesIntoCentralRepository(workingCopy, branchName);
} catch (Exception ex) {
throw new InternalRepositoryException(ContextEntry.ContextBuilder.entity(getContext().getScmRepository()), String.format("Could not close branch: %s", branchName));
}
}
private String getFormattedUser(User currentUser) {
return String.format("%s <%s>", currentUser.getDisplayName(), currentUser.getMail());
}
private Changeset createNewBranchWithEmptyCommit(BranchRequest request, com.aragost.javahg.Repository repository) {
com.aragost.javahg.commands.BranchCommand.on(repository).set(request.getNewBranch());
User currentUser = SecurityUtils.getSubject().getPrincipals().oneByType(User.class);
return CommitCommand
.on(repository)
.user(String.format("%s <%s>", currentUser.getDisplayName(), currentUser.getMail()))
.user(getFormattedUser(currentUser))
.message("Create new branch " + request.getNewBranch())
.execute();
}
private void pullNewBranchIntoCentralRepository(BranchRequest request, WorkingCopy<com.aragost.javahg.Repository> workingCopy) {
private void pullChangesIntoCentralRepository(WorkingCopy<com.aragost.javahg.Repository, com.aragost.javahg.Repository> workingCopy, String branch) {
try {
PullCommand pullCommand = PullCommand.on(workingCopy.getCentralRepository());
workdirFactory.configure(pullCommand);
@@ -91,7 +116,7 @@ public class HgBranchCommand extends AbstractCommand implements BranchCommand {
} catch (Exception e) {
// TODO handle failed update
throw new IntegrateChangesFromWorkdirException(getRepository(),
String.format("Could not pull new branch '%s' into central repository", request.getNewBranch()),
String.format("Could not pull changes '%s' into central repository", branch),
e);
}
}

View File

@@ -35,6 +35,8 @@ package sonia.scm.repository.spi;
//~--- non-JDK imports --------------------------------------------------------
import com.aragost.javahg.Changeset;
import com.aragost.javahg.commands.LogCommand;
import com.google.common.base.MoreObjects;
import com.google.common.base.Strings;
import sonia.scm.repository.BrowserResult;
@@ -72,10 +74,10 @@ public class HgBrowseCommand extends AbstractCommand implements BrowseCommand
public BrowserResult getBrowserResult(BrowseCommandRequest request) throws IOException {
HgFileviewCommand cmd = HgFileviewCommand.on(open());
if (!Strings.isNullOrEmpty(request.getRevision()))
{
cmd.rev(request.getRevision());
}
String revision = MoreObjects.firstNonNull(request.getRevision(), "tip");
Changeset c = LogCommand.on(getContext().open()).rev(revision).limit(1).single();
cmd.rev(c.getNode());
if (!Strings.isNullOrEmpty(request.getPath()))
{
@@ -98,6 +100,6 @@ public class HgBrowseCommand extends AbstractCommand implements BrowseCommand
}
FileObject file = cmd.execute();
return new BrowserResult(MoreObjects.firstNonNull(request.getRevision(), "tip"), file);
return new BrowserResult(c.getNode(), revision, file);
}
}

View File

@@ -68,7 +68,7 @@ public class HgLogCommand extends AbstractCommand implements LogCommand
//~--- get methods ----------------------------------------------------------
@Override
public Changeset getChangeset(String id) {
public Changeset getChangeset(String id, LogCommandRequest request) {
com.aragost.javahg.Repository repository = open();
HgLogChangesetCommand cmd = on(repository);

View File

@@ -29,7 +29,7 @@ public class HgModifyCommand implements ModifyCommand {
@Override
public String execute(ModifyCommandRequest request) {
try (WorkingCopy<com.aragost.javahg.Repository> workingCopy = workdirFactory.createWorkingCopy(context, request.getBranch())) {
try (WorkingCopy<com.aragost.javahg.Repository, com.aragost.javahg.Repository> workingCopy = workdirFactory.createWorkingCopy(context, request.getBranch())) {
Repository workingRepository = workingCopy.getWorkingRepository();
request.getRequests().forEach(
partialRequest -> {
@@ -85,7 +85,7 @@ public class HgModifyCommand implements ModifyCommand {
}
}
private List<Changeset> pullModifyChangesToCentralRepository(ModifyCommandRequest request, WorkingCopy<com.aragost.javahg.Repository> workingCopy) {
private List<Changeset> pullModifyChangesToCentralRepository(ModifyCommandRequest request, WorkingCopy<com.aragost.javahg.Repository, com.aragost.javahg.Repository> workingCopy) {
try {
com.aragost.javahg.commands.PullCommand pullCommand = PullCommand.on(workingCopy.getCentralRepository());
workdirFactory.configure(pullCommand);

View File

@@ -62,6 +62,7 @@ public class HgRepositoryServiceProvider extends RepositoryServiceProvider
Command.DIFF,
Command.LOG,
Command.TAGS,
Command.BRANCH,
Command.BRANCHES,
Command.INCOMING,
Command.OUTGOING,

View File

@@ -4,6 +4,6 @@ import com.aragost.javahg.Repository;
import com.aragost.javahg.commands.PullCommand;
import sonia.scm.repository.util.WorkdirFactory;
public interface HgWorkdirFactory extends WorkdirFactory<Repository, HgCommandContext> {
public interface HgWorkdirFactory extends WorkdirFactory<Repository, Repository, HgCommandContext> {
void configure(PullCommand pullCommand);
}

View File

@@ -16,7 +16,7 @@ import java.io.IOException;
import java.util.Map;
import java.util.function.BiConsumer;
public class SimpleHgWorkdirFactory extends SimpleWorkdirFactory<Repository, HgCommandContext> implements HgWorkdirFactory {
public class SimpleHgWorkdirFactory extends SimpleWorkdirFactory<Repository, Repository, HgCommandContext> implements HgWorkdirFactory {
private final Provider<HgRepositoryEnvironmentBuilder> hgRepositoryEnvironmentBuilder;
@@ -26,7 +26,7 @@ public class SimpleHgWorkdirFactory extends SimpleWorkdirFactory<Repository, HgC
this.hgRepositoryEnvironmentBuilder = hgRepositoryEnvironmentBuilder;
}
@Override
public ParentAndClone<Repository> cloneRepository(HgCommandContext context, File target, String initialBranch) throws IOException {
public ParentAndClone<Repository, Repository> cloneRepository(HgCommandContext context, File target, String initialBranch) throws IOException {
BiConsumer<sonia.scm.repository.Repository, Map<String, String>> repositoryMapBiConsumer =
(repository, environment) -> hgRepositoryEnvironmentBuilder.get().buildFor(repository, null, environment);
Repository centralRepository = context.openWithSpecialEnvironment(repositoryMapBiConsumer);
@@ -46,6 +46,11 @@ public class SimpleHgWorkdirFactory extends SimpleWorkdirFactory<Repository, HgC
repository.close();
}
@Override
protected void closeWorkdirInternal(Repository workdir) throws Exception {
workdir.close();
}
@Override
protected sonia.scm.repository.Repository getScmRepository(HgCommandContext context) {
return context.getScmRepository();

View File

@@ -6,6 +6,7 @@ import org.junit.Before;
import org.junit.Test;
import sonia.scm.repository.Branch;
import sonia.scm.repository.HgTestUtil;
import sonia.scm.repository.InternalRepositoryException;
import sonia.scm.repository.api.BranchRequest;
import sonia.scm.repository.util.WorkdirProvider;
import sonia.scm.web.HgRepositoryEnvironmentBuilder;
@@ -13,6 +14,7 @@ import sonia.scm.web.HgRepositoryEnvironmentBuilder;
import java.util.List;
import static org.assertj.core.api.Assertions.assertThat;
import static org.junit.jupiter.api.Assertions.assertThrows;
public class HgBranchCommandTest extends AbstractHgCommandTestBase {
@@ -54,6 +56,22 @@ public class HgBranchCommandTest extends AbstractHgCommandTestBase {
assertThat(cmdContext.open().changeset(newBranch.getRevision()).getParent1().getBranch()).isEqualTo("test-branch");
}
@Test
public void shouldCloseBranch() {
String branchToBeClosed = "test-branch";
new HgBranchCommand(cmdContext, repository, workdirFactory).deleteOrClose(branchToBeClosed);
assertThat(readBranches()).filteredOn(b -> b.getName().equals(branchToBeClosed)).isEmpty();
}
@Test
public void shouldThrowInternalRepositoryException() {
String branchToBeClosed = "default";
new HgBranchCommand(cmdContext, repository, workdirFactory).deleteOrClose(branchToBeClosed);
assertThrows(InternalRepositoryException.class, () -> new HgBranchCommand(cmdContext, repository, workdirFactory).deleteOrClose(branchToBeClosed));
}
private List<Branch> readBranches() {
return new HgBranchesCommand(cmdContext, repository).getBranches();
}

View File

@@ -33,6 +33,7 @@
package sonia.scm.repository.spi;
import com.aragost.javahg.commands.LogCommand;
import org.junit.Test;
import sonia.scm.repository.BrowserResult;
import sonia.scm.repository.FileObject;
@@ -40,6 +41,7 @@ import sonia.scm.repository.FileObject;
import java.io.IOException;
import java.util.Collection;
import static org.assertj.core.api.Assertions.assertThat;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertFalse;
import static org.junit.Assert.assertNotNull;
@@ -79,6 +81,19 @@ public class HgBrowseCommandTest extends AbstractHgCommandTestBase {
assertEquals("c", c.getPath());
}
@Test
public void testBrowseShouldResolveBranchForRevision() throws IOException {
String defaultBranchRevision = new LogCommand(cmdContext.open()).rev("default").single().getNode();
BrowseCommandRequest browseCommandRequest = new BrowseCommandRequest();
browseCommandRequest.setRevision("default");
BrowserResult result = new HgBrowseCommand(cmdContext,
repository).getBrowserResult(browseCommandRequest);
assertThat(result.getRevision()).isEqualTo(defaultBranchRevision);
}
@Test
public void testBrowseSubDirectory() throws IOException {
BrowseCommandRequest request = new BrowseCommandRequest();

View File

@@ -154,7 +154,7 @@ public class HgLogCommandTest extends AbstractHgCommandTestBase
HgLogCommand command = createComamnd();
String revision = "a9bacaf1b7fa0cebfca71fed4e59ed69a6319427";
Changeset c =
command.getChangeset(revision);
command.getChangeset(revision, null);
assertNotNull(c);
assertEquals(revision, c.getId());

View File

@@ -0,0 +1,9 @@
package sonia.scm.repository;
import sonia.scm.repository.spi.SvnContext;
import sonia.scm.repository.util.WorkdirFactory;
import java.io.File;
public interface SvnWorkDirFactory extends WorkdirFactory<File, File, SvnContext> {
}

View File

@@ -0,0 +1,63 @@
package sonia.scm.repository.spi;
import org.apache.commons.lang.exception.CloneFailedException;
import org.tmatesoft.svn.core.SVNException;
import org.tmatesoft.svn.core.SVNURL;
import org.tmatesoft.svn.core.wc2.SvnCheckout;
import org.tmatesoft.svn.core.wc2.SvnOperationFactory;
import org.tmatesoft.svn.core.wc2.SvnTarget;
import sonia.scm.repository.Repository;
import sonia.scm.repository.SvnWorkDirFactory;
import sonia.scm.repository.util.SimpleWorkdirFactory;
import sonia.scm.repository.util.WorkdirProvider;
import javax.inject.Inject;
import java.io.File;
import java.io.IOException;
public class SimpleSvnWorkDirFactory extends SimpleWorkdirFactory<File, File, SvnContext> implements SvnWorkDirFactory {
@Inject
public SimpleSvnWorkDirFactory(WorkdirProvider workdirProvider) {
super(workdirProvider);
}
@Override
protected Repository getScmRepository(SvnContext context) {
return context.getRepository();
}
@Override
protected ParentAndClone<File, File> cloneRepository(SvnContext context, File workingCopy, String initialBranch) {
final SvnOperationFactory svnOperationFactory = new SvnOperationFactory();
SVNURL source;
try {
source = SVNURL.fromFile(context.getDirectory());
} catch (SVNException ex) {
throw new CloneFailedException(ex.getMessage());
}
try {
final SvnCheckout checkout = svnOperationFactory.createCheckout();
checkout.setSingleTarget(SvnTarget.fromFile(workingCopy));
checkout.setSource(SvnTarget.fromURL(source));
checkout.run();
} catch (SVNException ex) {
throw new CloneFailedException(ex.getMessage());
} finally {
svnOperationFactory.dispose();
}
return new ParentAndClone<>(context.getDirectory(), workingCopy);
}
@Override
protected void closeRepository(File workingCopy) {
}
@Override
protected void closeWorkdirInternal(File workdir) {
}
}

View File

@@ -42,113 +42,57 @@ import org.tmatesoft.svn.core.SVNURL;
import org.tmatesoft.svn.core.io.SVNRepository;
import org.tmatesoft.svn.core.io.SVNRepositoryFactory;
import sonia.scm.repository.Repository;
import sonia.scm.repository.SvnUtil;
//~--- JDK imports ------------------------------------------------------------
import java.io.Closeable;
import java.io.File;
import java.io.IOException;
/**
*
* @author Sebastian Sdorra
*/
public class SvnContext implements Closeable
{
public class SvnContext implements Closeable {
/**
* the logger for SvnContext
*/
private static final Logger logger =
LoggerFactory.getLogger(SvnContext.class);
private static final Logger LOG = LoggerFactory.getLogger(SvnContext.class);
//~--- constructors ---------------------------------------------------------
private final Repository repository;
private final File directory;
/**
* Constructs ...
*
*
* @param directory
*/
public SvnContext(File directory)
{
private SVNRepository svnRepository;
public SvnContext(Repository repository, File directory) {
this.repository = repository;
this.directory = directory;
}
//~--- methods --------------------------------------------------------------
/**
* Method description
*
*
* @throws IOException
*/
@Override
public void close() throws IOException
{
if (logger.isTraceEnabled())
{
logger.trace("close svn repository {}", directory);
}
SvnUtil.closeSession(repository);
}
/**
* Method description
*
*
* @return
*
* @throws SVNException
*/
public SVNURL createUrl() throws SVNException
{
return SVNURL.fromFile(directory);
}
/**
* Method description
*
*
* @return
*
* @throws SVNException
*/
public SVNRepository open() throws SVNException
{
if (repository == null)
{
if (logger.isTraceEnabled())
{
logger.trace("open svn repository {}", directory);
}
repository = SVNRepositoryFactory.create(createUrl());
}
public Repository getRepository() {
return repository;
}
//~--- get methods ----------------------------------------------------------
/**
* Method description
*
*
* @return
*/
public File getDirectory()
{
public File getDirectory() {
return directory;
}
//~--- fields ---------------------------------------------------------------
public SVNURL createUrl() throws SVNException {
return SVNURL.fromFile(directory);
}
/** Field description */
private File directory;
public SVNRepository open() throws SVNException {
if (svnRepository == null) {
LOG.trace("open svn repository {}", directory);
svnRepository = SVNRepositoryFactory.create(createUrl());
}
return svnRepository;
}
@Override
public void close() {
LOG.trace("close svn repository {}", directory);
SvnUtil.closeSession(svnRepository);
}
/** Field description */
private SVNRepository repository;
}

View File

@@ -75,7 +75,7 @@ public class SvnLogCommand extends AbstractSvnCommand implements LogCommand
@Override
@SuppressWarnings("unchecked")
public Changeset getChangeset(String revision) {
public Changeset getChangeset(String revision, LogCommandRequest request) {
Changeset changeset = null;
if (logger.isDebugEnabled())

View File

@@ -0,0 +1,118 @@
package sonia.scm.repository.spi;
import org.tmatesoft.svn.core.SVNCommitInfo;
import org.tmatesoft.svn.core.SVNDepth;
import org.tmatesoft.svn.core.SVNException;
import org.tmatesoft.svn.core.wc.SVNClientManager;
import org.tmatesoft.svn.core.wc.SVNWCClient;
import sonia.scm.repository.InternalRepositoryException;
import sonia.scm.repository.Repository;
import sonia.scm.repository.SvnWorkDirFactory;
import sonia.scm.repository.util.WorkingCopy;
import java.io.File;
import java.io.IOException;
import java.nio.file.Path;
public class SvnModifyCommand implements ModifyCommand {
private SvnContext context;
private SvnWorkDirFactory workDirFactory;
private Repository repository;
SvnModifyCommand(SvnContext context, Repository repository, SvnWorkDirFactory workDirFactory) {
this.context = context;
this.repository = repository;
this.workDirFactory = workDirFactory;
}
@Override
public String execute(ModifyCommandRequest request) {
SVNClientManager clientManager = SVNClientManager.newInstance();
try (WorkingCopy<File, File> workingCopy = workDirFactory.createWorkingCopy(context, null)) {
File workingDirectory = workingCopy.getDirectory();
modifyWorkingDirectory(request, clientManager, workingDirectory);
return commitChanges(clientManager, workingDirectory, request.getCommitMessage());
}
}
private String commitChanges(SVNClientManager clientManager, File workingDirectory, String commitMessage) {
try {
SVNCommitInfo svnCommitInfo = clientManager.getCommitClient().doCommit(
new File[]{workingDirectory},
false,
commitMessage,
null,
null,
false,
true,
SVNDepth.INFINITY
);
return String.valueOf(svnCommitInfo.getNewRevision());
} catch (SVNException e) {
throw new InternalRepositoryException(repository, "could not commit changes on repository");
}
}
private void modifyWorkingDirectory(ModifyCommandRequest request, SVNClientManager clientManager, File workingDirectory) {
for (ModifyCommandRequest.PartialRequest partialRequest : request.getRequests()) {
try {
SVNWCClient wcClient = clientManager.getWCClient();
partialRequest.execute(new ModifyWorker(wcClient, workingDirectory));
} catch (IOException e) {
throw new InternalRepositoryException(repository, "could not read files from repository");
}
}
}
private class ModifyWorker implements ModifyWorkerHelper {
private final SVNWCClient wcClient;
private final File workingDirectory;
private ModifyWorker(SVNWCClient wcClient, File workingDirectory) {
this.wcClient = wcClient;
this.workingDirectory = workingDirectory;
}
@Override
public void doScmDelete(String toBeDeleted) {
try {
wcClient.doDelete(new File(workingDirectory, toBeDeleted), true, true, false);
} catch (SVNException e) {
throw new InternalRepositoryException(repository, "could not delete file from repository");
}
}
@Override
public void addFileToScm(String name, Path file) {
try {
wcClient.doAdd(
file.toFile(),
true,
false,
true,
SVNDepth.INFINITY,
false,
true
);
} catch (SVNException e) {
throw new InternalRepositoryException(repository, "could not add file to repository");
}
}
@Override
public File getWorkDir() {
return workingDirectory;
}
@Override
public Repository getRepository() {
return repository;
}
@Override
public String getBranch() {
return null;
}
}
}

View File

@@ -37,8 +37,10 @@ import com.google.common.collect.ImmutableSet;
import com.google.common.io.Closeables;
import sonia.scm.repository.Repository;
import sonia.scm.repository.SvnRepositoryHandler;
import sonia.scm.repository.SvnWorkDirFactory;
import sonia.scm.repository.api.Command;
import javax.inject.Inject;
import java.io.IOException;
import java.util.Set;
@@ -53,17 +55,19 @@ public class SvnRepositoryServiceProvider extends RepositoryServiceProvider
//J-
public static final Set<Command> COMMANDS = ImmutableSet.of(
Command.BLAME, Command.BROWSE, Command.CAT, Command.DIFF,
Command.LOG, Command.BUNDLE, Command.UNBUNDLE
Command.LOG, Command.BUNDLE, Command.UNBUNDLE, Command.MODIFY
);
//J+
//~--- constructors ---------------------------------------------------------
@Inject
SvnRepositoryServiceProvider(SvnRepositoryHandler handler,
Repository repository)
Repository repository, SvnWorkDirFactory workdirFactory)
{
this.repository = repository;
this.context = new SvnContext(handler.getDirectory(repository.getId()));
this.context = new SvnContext(repository, handler.getDirectory(repository.getId()));
this.workDirFactory = workdirFactory;
}
//~--- methods --------------------------------------------------------------
@@ -158,6 +162,10 @@ public class SvnRepositoryServiceProvider extends RepositoryServiceProvider
return new SvnModificationsCommand(context, repository);
}
public ModifyCommand getModifyCommand() {
return new SvnModifyCommand(context, repository, workDirFactory);
}
/**
* Method description
*
@@ -189,4 +197,6 @@ public class SvnRepositoryServiceProvider extends RepositoryServiceProvider
/** Field description */
private final Repository repository;
private final SvnWorkDirFactory workDirFactory;
}

View File

@@ -36,15 +36,18 @@ import com.google.inject.Inject;
import sonia.scm.plugin.Extension;
import sonia.scm.repository.Repository;
import sonia.scm.repository.SvnRepositoryHandler;
import sonia.scm.repository.SvnWorkDirFactory;
@Extension
public class SvnRepositoryServiceResolver implements RepositoryServiceResolver {
private SvnRepositoryHandler handler;
private SvnWorkDirFactory workdirFactory;
@Inject
public SvnRepositoryServiceResolver(SvnRepositoryHandler handler) {
public SvnRepositoryServiceResolver(SvnRepositoryHandler handler, SvnWorkDirFactory workdirFactory) {
this.handler = handler;
this.workdirFactory = workdirFactory;
}
@Override
@@ -52,7 +55,7 @@ public class SvnRepositoryServiceResolver implements RepositoryServiceResolver {
SvnRepositoryServiceProvider provider = null;
if (SvnRepositoryHandler.TYPE_NAME.equalsIgnoreCase(repository.getType())) {
provider = new SvnRepositoryServiceProvider(handler, repository);
provider = new SvnRepositoryServiceProvider(handler, repository, workdirFactory);
}
return provider;

View File

@@ -38,6 +38,8 @@ import org.mapstruct.factory.Mappers;
import sonia.scm.api.v2.resources.SvnConfigDtoToSvnConfigMapper;
import sonia.scm.api.v2.resources.SvnConfigToSvnConfigDtoMapper;
import sonia.scm.plugin.Extension;
import sonia.scm.repository.SvnWorkDirFactory;
import sonia.scm.repository.spi.SimpleSvnWorkDirFactory;
/**
*
@@ -50,5 +52,6 @@ public class SvnServletModule extends ServletModule {
protected void configureServlets() {
bind(SvnConfigDtoToSvnConfigMapper.class).to(Mappers.getMapper(SvnConfigDtoToSvnConfigMapper.class).getClass());
bind(SvnConfigToSvnConfigDtoMapper.class).to(Mappers.getMapper(SvnConfigToSvnConfigDtoMapper.class).getClass());
bind(SvnWorkDirFactory.class).to(SimpleSvnWorkDirFactory.class);
}
}

View File

@@ -72,7 +72,7 @@ public class AbstractSvnCommandTestBase extends ZippedRepositoryTestBase
{
if (context == null)
{
context = new SvnContext(repositoryDirectory);
context = new SvnContext(repository, repositoryDirectory);
}
return context;

View File

@@ -0,0 +1,77 @@
package sonia.scm.repository.spi;
import org.junit.Before;
import org.junit.Rule;
import org.junit.Test;
import org.junit.rules.TemporaryFolder;
import org.tmatesoft.svn.core.SVNException;
import sonia.scm.repository.Repository;
import sonia.scm.repository.util.WorkdirProvider;
import sonia.scm.repository.util.WorkingCopy;
import java.io.File;
import java.io.IOException;
import static org.assertj.core.api.Assertions.assertThat;
public class SimpleSvnWorkDirFactoryTest extends AbstractSvnCommandTestBase {
@Rule
public TemporaryFolder temporaryFolder = new TemporaryFolder();
// keep this so that it will not be garbage collected (Transport keeps this in a week reference)
private WorkdirProvider workdirProvider;
@Before
public void initWorkDirProvider() throws IOException {
workdirProvider = new WorkdirProvider(temporaryFolder.newFolder());
}
@Test
public void shouldCheckoutLatestRevision() throws SVNException, IOException {
SimpleSvnWorkDirFactory factory = new SimpleSvnWorkDirFactory(workdirProvider);
try (WorkingCopy<File, File> workingCopy = factory.createWorkingCopy(createContext(), null)) {
assertThat(new File(workingCopy.getWorkingRepository(), "a.txt"))
.exists()
.isFile()
.hasContent("a and b\nline for blame test");
}
}
@Test
public void cloneFromPoolshouldNotBeReused() {
SimpleSvnWorkDirFactory factory = new SimpleSvnWorkDirFactory(workdirProvider);
File firstDirectory;
try (WorkingCopy<File, File> workingCopy = factory.createWorkingCopy(createContext(), null)) {
firstDirectory = workingCopy.getDirectory();
}
try (WorkingCopy<File, File> workingCopy = factory.createWorkingCopy(createContext(), null)) {
File secondDirectory = workingCopy.getDirectory();
assertThat(secondDirectory).isNotEqualTo(firstDirectory);
}
}
@Test
public void shouldDeleteCloneOnClose() {
SimpleSvnWorkDirFactory factory = new SimpleSvnWorkDirFactory(workdirProvider);
File directory;
File workingRepository;
try (WorkingCopy<File, File> workingCopy = factory.createWorkingCopy(createContext(), null)) {
directory = workingCopy.getDirectory();
workingRepository = workingCopy.getWorkingRepository();
}
assertThat(directory).doesNotExist();
assertThat(workingRepository).doesNotExist();
}
@Test
public void shouldReturnRepository() {
SimpleSvnWorkDirFactory factory = new SimpleSvnWorkDirFactory(workdirProvider);
Repository scmRepository = factory.getScmRepository(createContext());
assertThat(scmRepository).isSameAs(repository);
}
}

View File

@@ -128,7 +128,7 @@ public class SvnLogCommandTest extends AbstractSvnCommandTestBase
@Test
public void testGetCommit() {
Changeset c = createCommand().getChangeset("3");
Changeset c = createCommand().getChangeset("3", null);
assertNotNull(c);
assertEquals("3", c.getId());

View File

@@ -0,0 +1,89 @@
package sonia.scm.repository.spi;
import org.junit.Before;
import org.junit.Rule;
import org.junit.Test;
import org.junit.rules.TemporaryFolder;
import sonia.scm.AlreadyExistsException;
import sonia.scm.repository.Person;
import sonia.scm.repository.util.WorkdirProvider;
import sonia.scm.repository.util.WorkingCopy;
import java.io.File;
import java.io.IOException;
import static org.assertj.core.api.Assertions.assertThat;
import static org.junit.jupiter.api.Assertions.assertThrows;
public class SvnModifyCommandTest extends AbstractSvnCommandTestBase {
private SvnModifyCommand svnModifyCommand;
private SvnContext context;
private SimpleSvnWorkDirFactory workDirFactory;
@Rule
public TemporaryFolder temporaryFolder = new TemporaryFolder();
@Before
public void initSvnModifyCommand() {
context = createContext();
workDirFactory = new SimpleSvnWorkDirFactory(new WorkdirProvider(context.getDirectory()));
svnModifyCommand = new SvnModifyCommand(context, createRepository(), workDirFactory);
}
@Test
public void shouldRemoveFiles() {
ModifyCommandRequest request = new ModifyCommandRequest();
request.addRequest(new ModifyCommandRequest.DeleteFileRequest("a.txt"));
request.setCommitMessage("this is great");
request.setAuthor(new Person("Arthur Dent", "dent@hitchhiker.com"));
svnModifyCommand.execute(request);
WorkingCopy<File, File> workingCopy = workDirFactory.createWorkingCopy(context, null);
assertThat(new File(workingCopy.getWorkingRepository().getAbsolutePath() + "/a.txt")).doesNotExist();
assertThat(new File(workingCopy.getWorkingRepository().getAbsolutePath() + "/c")).exists();
}
@Test
public void shouldAddNewFile() throws IOException {
File testfile = temporaryFolder.newFile("Test123");
ModifyCommandRequest request = new ModifyCommandRequest();
request.addRequest(new ModifyCommandRequest.CreateFileRequest("Test123", testfile, false));
request.setCommitMessage("this is great");
request.setAuthor(new Person("Arthur Dent", "dent@hitchhiker.com"));
svnModifyCommand.execute(request);
WorkingCopy<File, File> workingCopy = workDirFactory.createWorkingCopy(context, null);
assertThat(new File(workingCopy.getWorkingRepository(), "Test123")).exists();
}
@Test
public void shouldThrowFileAlreadyExistsException() throws IOException {
File testfile = temporaryFolder.newFile("a.txt");
ModifyCommandRequest request = new ModifyCommandRequest();
request.addRequest(new ModifyCommandRequest.CreateFileRequest("a.txt", testfile, false));
request.setCommitMessage("this is great");
request.setAuthor(new Person("Arthur Dent", "dent@hitchhiker.com"));
assertThrows(AlreadyExistsException.class, () -> svnModifyCommand.execute(request));
}
@Test
public void shouldUpdateExistingFile() throws IOException {
File testfile = temporaryFolder.newFile("a.txt");
ModifyCommandRequest request = new ModifyCommandRequest();
request.addRequest(new ModifyCommandRequest.CreateFileRequest("a.txt", testfile, true));
request.setCommitMessage("this is great");
request.setAuthor(new Person("Arthur Dent", "dent@hitchhiker.com"));
svnModifyCommand.execute(request);
WorkingCopy<File, File> workingCopy = workDirFactory.createWorkingCopy(context, null);
assertThat(new File(workingCopy.getWorkingRepository(), "a.txt")).exists();
assertThat(new File(workingCopy.getWorkingRepository(), "a.txt")).hasContent("");
}
}

View File

@@ -116,6 +116,6 @@ public class SvnUnbundleCommandTest extends AbstractSvnCommandTestBase
SVNRepositoryFactory.createLocalRepository(folder, true, true);
return new SvnContext(folder);
return new SvnContext(repository, folder);
}
}

View File

@@ -212,8 +212,8 @@ public final class MockUtil
{
SCMContextProvider provider = mock(SCMContextProvider.class);
when(provider.getBaseDirectory()).thenReturn(directory);
when(provider.resolve(any(Path.class))).then(ic -> {
lenient().when(provider.getBaseDirectory()).thenReturn(directory);
lenient().when(provider.resolve(any(Path.class))).then(ic -> {
Path p = ic.getArgument(0);
return directory.toPath().resolve(p);
});

View File

@@ -11,10 +11,10 @@ type Props = WithTranslation & {
repository: Repository;
branch: Branch;
defaultBranch: Branch;
branches: Branch[];
revision: string;
path: string;
baseUrl: string;
sources: File;
};
const FlexStartNav = styled.nav`
@@ -59,7 +59,12 @@ class Breadcrumb extends React.Component<Props> {
}
render() {
const { baseUrl, branch, defaultBranch, branches, revision, path, repository, t } = this.props;
const { repository, baseUrl, branch, defaultBranch, sources, revision, path, t } = this.props;
let homeUrl = baseUrl + "/";
if (revision) {
homeUrl += encodeURIComponent(revision) + "/";
}
return (
<>
@@ -67,7 +72,7 @@ class Breadcrumb extends React.Component<Props> {
<FlexStartNav className={classNames("breadcrumb", "sources-breadcrumb")} aria-label="breadcrumbs">
<ul>
<li>
<Link to={baseUrl + "/" + revision + "/"}>
<Link to={homeUrl}>
<HomeIcon title={t("breadcrumb.home")} name="home" color="inherit" />
</Link>
</li>
@@ -80,9 +85,10 @@ class Breadcrumb extends React.Component<Props> {
name="repos.sources.actionbar"
props={{
baseUrl,
revision,
branch: branch ? branch : defaultBranch,
path,
isBranchUrl: branches && branches.filter(b => b.name.replace("/", "%2F") === revision).length > 0,
sources,
repository
}}
renderAll={true}

View File

@@ -113,7 +113,10 @@
"description": "Beschreibung",
"size": "Größe"
},
"noSources": "Keine Sources in diesem Branch gefunden."
"noSources": "Keine Sources in diesem Branch gefunden.",
"extension" : {
"notBound": "Keine Erweiterung angebunden."
}
},
"permission": {
"title": "Berechtigungen bearbeiten",

View File

@@ -113,7 +113,10 @@
"description": "Description",
"size": "Size"
},
"noSources": "No sources found for this branch."
"noSources": "No sources found for this branch.",
"extension" : {
"notBound": "No extension bound."
}
},
"permission": {
"title": "Edit Permissions",

View File

@@ -113,7 +113,10 @@
"description": "Discripción",
"size": "tamaño"
},
"noSources": "No se han encontrado fuentes para esta rama."
"noSources": "No se han encontrado fuentes para esta rama.",
"extension" : {
"notBound": "Sin extensión conectada."
}
},
"permission": {
"title": "Editar permisos",

View File

@@ -20,6 +20,7 @@ import PermissionsNavLink from "../components/PermissionsNavLink";
import Sources from "../sources/containers/Sources";
import RepositoryNavLink from "../components/RepositoryNavLink";
import { getLinks, getRepositoriesLink } from "../../modules/indexResource";
import SourceExtensions from "../sources/containers/SourceExtensions";
type Props = WithTranslation & {
namespace: string;
@@ -67,6 +68,12 @@ class RepositoryRoot extends React.Component<Props> {
return route.location.pathname.match(regex);
};
matchesSources = (route: any) => {
const url = this.matchedUrl();
const regex = new RegExp(`${url}(/sources|/sourceext)/.*`);
return route.location.pathname.match(regex);
};
render() {
const { loading, error, indexLinks, repository, t } = this.props;
@@ -120,6 +127,15 @@ class RepositoryRoot extends React.Component<Props> {
path={`${url}/sources/:revision/:path*`}
render={() => <Sources repository={repository} baseUrl={`${url}/sources`} />}
/>
<Route
path={`${url}/sourceext/:extension`}
exact={true}
render={() => <SourceExtensions repository={repository} />}
/>
<Route
path={`${url}/sourceext/:extension/:revision/:path*`}
render={() => <SourceExtensions repository={repository} />}
/>
<Route
path={`${url}/changesets`}
render={() => (
@@ -186,6 +202,7 @@ class RepositoryRoot extends React.Component<Props> {
to={`${url}/sources`}
icon="fas fa-code"
label={t("repositoryRoot.menu.sourcesNavLink")}
activeWhenMatch={this.matchesSources}
activeOnlyWhenExact={false}
/>
<ExtensionPoint name="repository.navigation" props={extensionProps} renderAll={true} />

View File

@@ -75,7 +75,7 @@ class Content extends React.Component<Props, State> {
};
showHeader() {
const { file, revision } = this.props;
const { repository, file, revision } = this.props;
const { showHistory, collapsed } = this.state;
const icon = collapsed ? "angle-right" : "angle-down";
@@ -99,6 +99,7 @@ class Content extends React.Component<Props, State> {
<ExtensionPoint
name="repos.sources.content.actionbar"
props={{
repository,
file,
revision,
handleExtensionError: this.handleExtensionError

View File

@@ -0,0 +1,92 @@
import React from "react";
import { Repository, File } from "@scm-manager/ui-types";
import { withRouter, RouteComponentProps } from "react-router-dom";
import { ExtensionPoint, binder } from "@scm-manager/ui-extensions";
import { fetchSources, getFetchSourcesFailure, getSources, isFetchSourcesPending } from "../modules/sources";
import { connect } from "react-redux";
import { Loading, ErrorNotification } from "@scm-manager/ui-components";
import Notification from "@scm-manager/ui-components/src/Notification";
import {WithTranslation, withTranslation} from "react-i18next";
type Props = WithTranslation & RouteComponentProps & {
repository: Repository;
// url params
extension: string;
revision?: string;
path?: string;
// redux state
loading: boolean;
error?: Error | null;
sources?: File | null;
// dispatch props
fetchSources: (repository: Repository, revision: string, path: string) => void;
};
const extensionPointName = "repos.sources.extensions";
class SourceExtensions extends React.Component<Props> {
componentDidMount() {
const { fetchSources, repository, revision, path } = this.props;
// TODO get typing right
fetchSources(repository,revision || "", path || "");
}
render() {
const { loading, error, repository, extension, revision, path, sources, t } = this.props;
if (error) {
return <ErrorNotification error={error} />;
}
if (loading) {
return <Loading />;
}
const extprops = { extension, repository, revision, path, sources };
if (!binder.hasExtension(extensionPointName, extprops)) {
return <Notification type="warning">{t("sources.extension.notBound")}</Notification>;
}
return <ExtensionPoint name={extensionPointName} props={extprops} />;
}
}
const mapStateToProps = (state: any, ownProps: Props): Partial<Props> => {
const { repository, match } = ownProps;
// @ts-ignore
const revision: string = match.params.revision;
// @ts-ignore
const path: string = match.params.path;
// @ts-ignore
const extension: string = match.params.extension;
const loading = isFetchSourcesPending(state, repository, revision, path);
const error = getFetchSourcesFailure(state, repository, revision, path);
const sources = getSources(state, repository, revision, path);
return {
repository,
extension,
revision,
path,
loading,
error,
sources
};
};
const mapDispatchToProps = (dispatch: any) => {
return {
fetchSources: (repository: Repository, revision: string, path: string) => {
dispatch(fetchSources(repository, decodeURIComponent(revision), path));
}
};
};
export default withRouter(
connect(
mapStateToProps,
mapDispatchToProps
)(withTranslation("repos")(SourceExtensions))
);

View File

@@ -13,7 +13,7 @@ import {
} from "../../branches/modules/branches";
import { compose } from "redux";
import Content from "./Content";
import { fetchSources, isDirectory } from "../modules/sources";
import {fetchSources, getSources, isDirectory} from "../modules/sources";
type Props = WithTranslation & {
repository: Repository;
@@ -24,6 +24,7 @@ type Props = WithTranslation & {
revision: string;
path: string;
currentFileIsDirectory: boolean;
sources: File;
// dispatch props
fetchBranches: (p: Repository) => void;
@@ -52,7 +53,7 @@ class Sources extends React.Component<Props, State> {
const { fetchBranches, repository, revision, path, fetchSources } = this.props;
fetchBranches(repository);
fetchSources(repository, revision, path);
fetchSources(repository, this.decodeRevision(revision), path);
this.redirectToDefaultBranch();
}
@@ -60,12 +61,16 @@ class Sources extends React.Component<Props, State> {
componentDidUpdate(prevProps) {
const { fetchSources, repository, revision, path } = this.props;
if (prevProps.revision !== revision || prevProps.path !== path) {
fetchSources(repository, revision, path);
fetchSources(repository, this.decodeRevision(revision), path);
}
this.redirectToDefaultBranch();
}
decodeRevision = (revision: string) => {
return revision ? decodeURIComponent(revision) : revision;
};
redirectToDefaultBranch = () => {
const { branches } = this.props;
if (this.shouldRedirectToDefaultBranch()) {
@@ -148,23 +153,22 @@ class Sources extends React.Component<Props, State> {
};
renderBreadcrumb = () => {
const { revision, path, baseUrl, branches, repository } = this.props;
const { revision, path, baseUrl, branches, sources, repository } = this.props;
const { selectedBranch } = this.state;
if (revision) {
return (
<Breadcrumb
revision={encodeURIComponent(revision)}
path={path}
baseUrl={baseUrl}
branch={selectedBranch}
defaultBranch={branches && branches.filter(b => b.defaultBranch === true)[0]}
branches={branches}
repository={repository}
/>
);
}
return null;
return (
<Breadcrumb
repository={repository}
revision={revision}
path={path}
baseUrl={baseUrl}
branch={selectedBranch}
defaultBranch={
branches && branches.filter(b => b.defaultBranch === true)[0]
}
sources={sources}
/>
);
};
}
@@ -178,15 +182,17 @@ const mapStateToProps = (state, ownProps) => {
const currentFileIsDirectory = decodedRevision
? isDirectory(state, repository, decodedRevision, path)
: isDirectory(state, repository, revision, path);
const sources = getSources(state, repository, decodedRevision, path);
return {
repository,
revision: decodedRevision,
revision,
path,
loading,
error,
branches,
currentFileIsDirectory
currentFileIsDirectory,
sources
};
};

View File

@@ -1,5 +1,5 @@
import * as types from "../../../modules/types";
import { Repository, File, Action } from "@scm-manager/ui-types";
import { Repository, File, Action, Link } from "@scm-manager/ui-types";
import { apiClient } from "@scm-manager/ui-components";
import { isPending } from "../../../modules/pending";
import { getFailure } from "../../../modules/failure";
@@ -25,7 +25,7 @@ export function fetchSources(repository: Repository, revision: string, path: str
}
function createUrl(repository: Repository, revision: string, path: string) {
const base = repository._links.sources.href;
const base = (repository._links.sources as Link).href;
if (!revision && !path) {
return base;
}
@@ -61,7 +61,7 @@ export function fetchSourcesFailure(repository: Repository, revision: string, pa
function createItemId(repository: Repository, revision: string, path: string) {
const revPart = revision ? revision : "_";
const pathPart = path ? path : "";
return `${repository.namespace}/${repository.name}/${revPart}/${pathPart}`;
return `${repository.namespace}/${repository.name}/${decodeURIComponent(revPart)}/${pathPart}`;
}
// reducer

View File

@@ -0,0 +1,55 @@
package sonia.scm.api.v2.resources;
import com.google.common.annotations.VisibleForTesting;
import de.otto.edison.hal.Embedded;
import de.otto.edison.hal.Links;
import org.mapstruct.Context;
import org.mapstruct.MapperConfig;
import org.mapstruct.ObjectFactory;
import sonia.scm.repository.BrowserResult;
import sonia.scm.repository.FileObject;
import sonia.scm.repository.NamespaceAndName;
import sonia.scm.repository.SubRepository;
import javax.inject.Inject;
import static de.otto.edison.hal.Embedded.embeddedBuilder;
import static de.otto.edison.hal.Link.link;
@MapperConfig
abstract class BaseFileObjectDtoMapper extends HalAppenderMapper implements InstantAttributeMapper {
@Inject
private ResourceLinks resourceLinks;
@VisibleForTesting
void setResourceLinks(ResourceLinks resourceLinks) {
this.resourceLinks = resourceLinks;
}
abstract SubRepositoryDto mapSubrepository(SubRepository subRepository);
@ObjectFactory
FileObjectDto createDto(@Context NamespaceAndName namespaceAndName, @Context BrowserResult browserResult, FileObject fileObject) {
String path = removeFirstSlash(fileObject.getPath());
Links.Builder links = Links.linkingTo();
if (fileObject.isDirectory()) {
links.self(resourceLinks.source().sourceWithPath(namespaceAndName.getNamespace(), namespaceAndName.getName(), browserResult.getRevision(), path));
} else {
links.self(resourceLinks.source().content(namespaceAndName.getNamespace(), namespaceAndName.getName(), browserResult.getRevision(), path));
links.single(link("history", resourceLinks.fileHistory().self(namespaceAndName.getNamespace(), namespaceAndName.getName(), browserResult.getRevision(), path)));
}
Embedded.Builder embeddedBuilder = embeddedBuilder();
applyEnrichers(links, embeddedBuilder, namespaceAndName, browserResult, fileObject);
return new FileObjectDto(links.build(), embeddedBuilder.build());
}
abstract void applyEnrichers(Links.Builder links, Embedded.Builder embeddedBuilder, NamespaceAndName namespaceAndName, BrowserResult browserResult, FileObject fileObject);
private String removeFirstSlash(String source) {
return source.startsWith("/") ? source.substring(1) : source;
}
}

View File

@@ -1,22 +1,60 @@
package sonia.scm.api.v2.resources;
import com.google.common.annotations.VisibleForTesting;
import de.otto.edison.hal.Embedded;
import de.otto.edison.hal.Links;
import org.mapstruct.Context;
import org.mapstruct.Mapper;
import org.mapstruct.Mapping;
import org.mapstruct.Qualifier;
import sonia.scm.repository.BrowserResult;
import sonia.scm.repository.FileObject;
import sonia.scm.repository.NamespaceAndName;
import javax.inject.Inject;
import java.lang.annotation.ElementType;
import java.lang.annotation.Retention;
import java.lang.annotation.RetentionPolicy;
import java.lang.annotation.Target;
public class BrowserResultToFileObjectDtoMapper {
private final FileObjectToFileObjectDtoMapper fileObjectToFileObjectDtoMapper;
@Mapper
public abstract class BrowserResultToFileObjectDtoMapper extends BaseFileObjectDtoMapper {
@Inject
public BrowserResultToFileObjectDtoMapper(FileObjectToFileObjectDtoMapper fileObjectToFileObjectDtoMapper) {
this.fileObjectToFileObjectDtoMapper = fileObjectToFileObjectDtoMapper;
private FileObjectToFileObjectDtoMapper childrenMapper;
@VisibleForTesting
void setChildrenMapper(FileObjectToFileObjectDtoMapper childrenMapper) {
this.childrenMapper = childrenMapper;
}
public FileObjectDto map(BrowserResult browserResult, NamespaceAndName namespaceAndName) {
FileObjectDto fileObjectDto = fileObjectToFileObjectDtoMapper.map(browserResult.getFile(), namespaceAndName, browserResult);
fileObjectDto.setRevision( browserResult.getRevision() );
FileObjectDto map(BrowserResult browserResult, @Context NamespaceAndName namespaceAndName) {
FileObjectDto fileObjectDto = fileObjectToDto(browserResult.getFile(), namespaceAndName, browserResult);
fileObjectDto.setRevision(browserResult.getRevision());
return fileObjectDto;
}
@Mapping(target = "attributes", ignore = true) // We do not map HAL attributes
@Mapping(target = "children", qualifiedBy = Children.class)
protected abstract FileObjectDto fileObjectToDto(FileObject fileObject, @Context NamespaceAndName namespaceAndName, @Context BrowserResult browserResult);
@Children
protected FileObjectDto childrenToDto(FileObject fileObject, @Context NamespaceAndName namespaceAndName, @Context BrowserResult browserResult) {
return childrenMapper.map(fileObject, namespaceAndName, browserResult);
}
@Override
void applyEnrichers(Links.Builder links, Embedded.Builder embeddedBuilder, NamespaceAndName namespaceAndName, BrowserResult browserResult, FileObject fileObject) {
EdisonHalAppender appender = new EdisonHalAppender(links, embeddedBuilder);
// we call enrichers, which are only responsible for top level browseresults
applyEnrichers(appender, browserResult, namespaceAndName);
// we call enrichers, which are responsible for all file object top level browse result and its children
applyEnrichers(appender, fileObject, namespaceAndName, browserResult, browserResult.getRevision());
}
@Qualifier
@Target(ElementType.METHOD)
@Retention(RetentionPolicy.CLASS)
@interface Children {
}
}

View File

@@ -5,46 +5,18 @@ import de.otto.edison.hal.Links;
import org.mapstruct.Context;
import org.mapstruct.Mapper;
import org.mapstruct.Mapping;
import org.mapstruct.ObjectFactory;
import sonia.scm.repository.BrowserResult;
import sonia.scm.repository.FileObject;
import sonia.scm.repository.NamespaceAndName;
import sonia.scm.repository.SubRepository;
import javax.inject.Inject;
import static de.otto.edison.hal.Embedded.embeddedBuilder;
import static de.otto.edison.hal.Link.link;
@Mapper
public abstract class FileObjectToFileObjectDtoMapper extends HalAppenderMapper implements InstantAttributeMapper {
@Inject
private ResourceLinks resourceLinks;
public abstract class FileObjectToFileObjectDtoMapper extends BaseFileObjectDtoMapper {
@Mapping(target = "attributes", ignore = true) // We do not map HAL attributes
protected abstract FileObjectDto map(FileObject fileObject, @Context NamespaceAndName namespaceAndName, @Context BrowserResult browserResult);
abstract SubRepositoryDto mapSubrepository(SubRepository subRepository);
@ObjectFactory
FileObjectDto createDto(@Context NamespaceAndName namespaceAndName, @Context BrowserResult browserResult, FileObject fileObject) {
String path = removeFirstSlash(fileObject.getPath());
Links.Builder links = Links.linkingTo();
if (fileObject.isDirectory()) {
links.self(resourceLinks.source().sourceWithPath(namespaceAndName.getNamespace(), namespaceAndName.getName(), browserResult.getRevision(), path));
} else {
links.self(resourceLinks.source().content(namespaceAndName.getNamespace(), namespaceAndName.getName(), browserResult.getRevision(), path));
links.single(link("history", resourceLinks.fileHistory().self(namespaceAndName.getNamespace(), namespaceAndName.getName(), browserResult.getRevision(), path)));
}
Embedded.Builder embeddedBuilder = embeddedBuilder();
@Override
void applyEnrichers(Links.Builder links, Embedded.Builder embeddedBuilder, NamespaceAndName namespaceAndName, BrowserResult browserResult, FileObject fileObject) {
applyEnrichers(new EdisonHalAppender(links, embeddedBuilder), fileObject, namespaceAndName, browserResult, browserResult.getRevision());
return new FileObjectDto(links.build(), embeddedBuilder.build());
}
private String removeFirstSlash(String source) {
return source.startsWith("/") ? source.substring(1) : source;
}
}

View File

@@ -38,6 +38,7 @@ public class MapperModule extends AbstractModule {
bind(TagToTagDtoMapper.class).to(Mappers.getMapper(TagToTagDtoMapper.class).getClass());
bind(FileObjectToFileObjectDtoMapper.class).to(Mappers.getMapper(FileObjectToFileObjectDtoMapper.class).getClass());
bind(BrowserResultToFileObjectDtoMapper.class).to(Mappers.getMapper(BrowserResultToFileObjectDtoMapper.class).getClass());
bind(ModificationsToDtoMapper.class).to(Mappers.getMapper(ModificationsToDtoMapper.class).getClass());
bind(ReducedObjectModelToDtoMapper.class).to(Mappers.getMapper(ReducedObjectModelToDtoMapper.class).getClass());
@@ -46,8 +47,6 @@ public class MapperModule extends AbstractModule {
bind(ScmViolationExceptionToErrorDtoMapper.class).to(Mappers.getMapper(ScmViolationExceptionToErrorDtoMapper.class).getClass());
bind(ExceptionWithContextToErrorDtoMapper.class).to(Mappers.getMapper(ExceptionWithContextToErrorDtoMapper.class).getClass());
bind(MergeResultToDtoMapper.class).to(Mappers.getMapper(MergeResultToDtoMapper.class).getClass());
// no mapstruct required
bind(MeDtoFactory.class);
bind(UIPluginDtoMapper.class);

View File

@@ -1,95 +0,0 @@
package sonia.scm.api.v2.resources;
import com.webcohesion.enunciate.metadata.rs.ResponseCode;
import com.webcohesion.enunciate.metadata.rs.StatusCodes;
import lombok.extern.slf4j.Slf4j;
import org.apache.http.HttpStatus;
import sonia.scm.ConcurrentModificationException;
import sonia.scm.repository.NamespaceAndName;
import sonia.scm.repository.RepositoryPermissions;
import sonia.scm.repository.api.MergeCommandBuilder;
import sonia.scm.repository.api.MergeCommandResult;
import sonia.scm.repository.api.MergeDryRunCommandResult;
import sonia.scm.repository.api.RepositoryService;
import sonia.scm.repository.api.RepositoryServiceFactory;
import sonia.scm.web.VndMediaType;
import javax.inject.Inject;
import javax.validation.Valid;
import javax.ws.rs.Consumes;
import javax.ws.rs.POST;
import javax.ws.rs.Path;
import javax.ws.rs.PathParam;
import javax.ws.rs.Produces;
import javax.ws.rs.core.Response;
@Slf4j
public class MergeResource {
private final RepositoryServiceFactory serviceFactory;
private final MergeResultToDtoMapper mapper;
@Inject
public MergeResource(RepositoryServiceFactory serviceFactory, MergeResultToDtoMapper mapper) {
this.serviceFactory = serviceFactory;
this.mapper = mapper;
}
@POST
@Path("")
@Produces(VndMediaType.MERGE_RESULT)
@Consumes(VndMediaType.MERGE_COMMAND)
@StatusCodes({
@ResponseCode(code = 204, condition = "merge has been executed successfully"),
@ResponseCode(code = 401, condition = "not authenticated / invalid credentials"),
@ResponseCode(code = 403, condition = "not authorized, the current user does not have the privilege to write the repository"),
@ResponseCode(code = 409, condition = "The branches could not be merged automatically due to conflicts (conflicting files will be returned)"),
@ResponseCode(code = 500, condition = "internal server error")
})
public Response merge(@PathParam("namespace") String namespace, @PathParam("name") String name, @Valid MergeCommandDto mergeCommand) {
NamespaceAndName namespaceAndName = new NamespaceAndName(namespace, name);
log.info("Merge in Repository {}/{} from {} to {}", namespace, name, mergeCommand.getSourceRevision(), mergeCommand.getTargetRevision());
try (RepositoryService repositoryService = serviceFactory.create(namespaceAndName)) {
RepositoryPermissions.push(repositoryService.getRepository()).check();
MergeCommandResult mergeCommandResult = createMergeCommand(mergeCommand, repositoryService).executeMerge();
if (mergeCommandResult.isSuccess()) {
return Response.noContent().build();
} else {
return Response.status(HttpStatus.SC_CONFLICT).entity(mapper.map(mergeCommandResult)).build();
}
}
}
@POST
@Path("dry-run/")
@StatusCodes({
@ResponseCode(code = 204, condition = "merge can be done automatically"),
@ResponseCode(code = 401, condition = "not authenticated / invalid credentials"),
@ResponseCode(code = 409, condition = "The branches can not be merged automatically due to conflicts"),
@ResponseCode(code = 500, condition = "internal server error")
})
public Response dryRun(@PathParam("namespace") String namespace, @PathParam("name") String name, @Valid MergeCommandDto mergeCommand) {
NamespaceAndName namespaceAndName = new NamespaceAndName(namespace, name);
log.info("Merge in Repository {}/{} from {} to {}", namespace, name, mergeCommand.getSourceRevision(), mergeCommand.getTargetRevision());
try (RepositoryService repositoryService = serviceFactory.create(namespaceAndName)) {
if (RepositoryPermissions.push(repositoryService.getRepository()).isPermitted()) {
MergeDryRunCommandResult mergeCommandResult = createMergeCommand(mergeCommand, repositoryService).dryRun();
if (mergeCommandResult.isMergeable()) {
return Response.noContent().build();
} else {
throw new ConcurrentModificationException("revision", mergeCommand.getTargetRevision());
}
} else {
return Response.noContent().build();
}
}
}
private MergeCommandBuilder createMergeCommand(MergeCommandDto mergeCommand, RepositoryService repositoryService) {
return repositoryService
.getMergeCommand()
.setBranchToMerge(mergeCommand.getSourceRevision())
.setTargetBranch(mergeCommand.getTargetRevision());
}
}

View File

@@ -1,12 +0,0 @@
package sonia.scm.api.v2.resources;
import lombok.Getter;
import lombok.Setter;
import java.util.Collection;
@Getter
@Setter
public class MergeResultDto {
private Collection<String> filesWithConflict;
}

View File

@@ -1,9 +0,0 @@
package sonia.scm.api.v2.resources;
import org.mapstruct.Mapper;
import sonia.scm.repository.api.MergeCommandResult;
@Mapper
public interface MergeResultToDtoMapper {
MergeResultDto map(MergeCommandResult result);
}

View File

@@ -42,7 +42,6 @@ public class RepositoryResource {
private final Provider<DiffRootResource> diffRootResource;
private final Provider<ModificationsRootResource> modificationsRootResource;
private final Provider<FileHistoryRootResource> fileHistoryRootResource;
private final Provider<MergeResource> mergeResource;
private final Provider<IncomingRootResource> incomingRootResource;
@Inject
@@ -57,8 +56,8 @@ public class RepositoryResource {
Provider<DiffRootResource> diffRootResource,
Provider<ModificationsRootResource> modificationsRootResource,
Provider<FileHistoryRootResource> fileHistoryRootResource,
Provider<IncomingRootResource> incomingRootResource,
Provider<MergeResource> mergeResource) {
Provider<IncomingRootResource> incomingRootResource
) {
this.dtoToRepositoryMapper = dtoToRepositoryMapper;
this.manager = manager;
this.repositoryToDtoMapper = repositoryToDtoMapper;
@@ -72,8 +71,8 @@ public class RepositoryResource {
this.diffRootResource = diffRootResource;
this.modificationsRootResource = modificationsRootResource;
this.fileHistoryRootResource = fileHistoryRootResource;
this.mergeResource = mergeResource;
this.incomingRootResource = incomingRootResource;
}
/**
@@ -208,9 +207,6 @@ public class RepositoryResource {
return incomingRootResource.get();
}
@Path("merge/")
public MergeResource merge() {return mergeResource.get(); }
private Supplier<Repository> loadBy(String namespace, String name) {
NamespaceAndName namespaceAndName = new NamespaceAndName(namespace, name);
return () -> Optional.ofNullable(manager.get(namespaceAndName)).orElseThrow(() -> notFound(entity(namespaceAndName)));

View File

@@ -63,12 +63,6 @@ public abstract class RepositoryToRepositoryDtoMapper extends BaseMapper<Reposit
linksBuilder.single(link("incomingChangesets", resourceLinks.incoming().changesets(repository.getNamespace(), repository.getName())));
linksBuilder.single(link("incomingDiff", resourceLinks.incoming().diff(repository.getNamespace(), repository.getName())));
}
if (repositoryService.isSupported(Command.MERGE)) {
if (RepositoryPermissions.push(repository).isPermitted()) {
linksBuilder.single(link("merge", resourceLinks.merge().merge(repository.getNamespace(), repository.getName())));
}
linksBuilder.single(link("mergeDryRun", resourceLinks.merge().dryRun(repository.getNamespace(), repository.getName())));
}
}
linksBuilder.single(link("changesets", resourceLinks.changeset().all(repository.getNamespace(), repository.getName())));
linksBuilder.single(link("sources", resourceLinks.source().selfWithoutRevision(repository.getNamespace(), repository.getName())));

View File

@@ -792,26 +792,6 @@ class ResourceLinks {
}
}
public MergeLinks merge() {
return new MergeLinks(scmPathInfoStore.get());
}
static class MergeLinks {
private final LinkBuilder mergeLinkBuilder;
MergeLinks(ScmPathInfo pathInfo) {
this.mergeLinkBuilder = new LinkBuilder(pathInfo, RepositoryRootResource.class, RepositoryResource.class, MergeResource.class);
}
String merge(String namespace, String name) {
return mergeLinkBuilder.method("getRepositoryResource").parameters(namespace, name).method("merge").parameters().method("merge").parameters().href();
}
String dryRun(String namespace, String name) {
return mergeLinkBuilder.method("getRepositoryResource").parameters(namespace, name).method("merge").parameters().method("dryRun").parameters().href();
}
}
public PermissionsLinks permissions() {
return new PermissionsLinks(scmPathInfoStore.get());
}

View File

@@ -14,6 +14,7 @@ import javax.ws.rs.PathParam;
import javax.ws.rs.Produces;
import javax.ws.rs.core.Response;
import java.io.IOException;
import java.net.URLDecoder;
import static sonia.scm.ContextEntry.ContextBuilder.entity;
import static sonia.scm.NotFoundException.notFound;
@@ -57,7 +58,7 @@ public class SourceRootResource {
BrowseCommandBuilder browseCommand = repositoryService.getBrowseCommand();
browseCommand.setPath(path);
if (revision != null && !revision.isEmpty()) {
browseCommand.setRevision(revision);
browseCommand.setRevision(URLDecoder.decode(revision, "UTF-8"));
}
browseCommand.setDisableCache(true);
BrowserResult browserResult = browseCommand.getBrowserResult();

View File

@@ -82,8 +82,6 @@ public class XsrfAccessTokenEnricher implements AccessTokenEnricher {
if (configuration.isEnabledXsrfProtection()) {
if (isEnrichable()) {
builder.custom(Xsrf.TOKEN_KEY, createToken());
} else {
LOG.trace("skip xsrf enrichment, because jwt session is started from a non wui client");
}
} else {
LOG.trace("xsrf is disabled, skip xsrf enrichment");
@@ -98,15 +96,16 @@ public class XsrfAccessTokenEnricher implements AccessTokenEnricher {
return true;
} else {
LOG.trace("skip xsrf enrichment, because jwt session is started from a non wui client");
return false;
}
} catch (ProvisionException ex) {
if (ex.getCause() instanceof OutOfScopeException) {
LOG.trace("skip xsrf enrichment, because no request scope is available");
return false;
} else {
throw ex;
}
}
return false;
}
@VisibleForTesting

View File

@@ -183,6 +183,14 @@
"65RdZ5atX1": {
"displayName": "Fehler beim Löschen von Plugin-Dateien",
"description": "Einige Dateien für die Plugin-Deinstallation konnten nicht gelöscht werden. Dieses kann zu Inkonsistenzen führen, so dass der SCM-Manager nicht mehr korrekt starten kann. Bitte prüfen Sie die Logs und bereinigen Sie das Plugin-Verzeichnis des SCM-Managers manuell. Um die Installation eines Plugins abzubrechen, löschen Sie die zugehörige smp Datei aus dem Plugin-Verzeichnis. Um ein Entfernen eines Plugins zu verhindern, entfernen Sie die Datei namens 'uninstall' aus dem entsprechenden Verzeichnis des Plugins."
},
"6eRhF9gU41": {
"displayName": "Nicht unterstützte Merge-Strategie",
"description": "Die gewählte Merge-Strategie wird von dem Repository nicht unterstützt."
},
"78RhWxTIw1": {
"displayName": "Der Default-Branch kann nicht gelöscht werden",
"description": "Der Default-Branch kann nicht gelöscht werden. Bitte wählen Sie zuerst einen neuen Default-Branch."
}
},
"namespaceStrategies": {

View File

@@ -183,6 +183,14 @@
"65RdZ5atX1": {
"displayName": "Error removing plugin files",
"description": "Some files to cancel the plugin (un)installation could not be deleted. This can lead to inconsistencies so that the SCM-Manager cannot restart properly. Please check the logs and clean up the plugin folder manually. To cancel the installation of a plugin, remove the corresponding smp file. To cancel the uninstallation, remove the file named 'uninstall' inside the directory for this plugin."
},
"6eRhF9gU41": {
"displayName": "Merge strategy not supported",
"description": "The selected merge strategy is not supported by the repository."
},
"78RhWxTIw1": {
"displayName": "Default branch cannot be deleted",
"description": "The default branch of a repository cannot be deleted. Please select another default branch first."
}
},
"namespaceStrategies": {

View File

@@ -7,6 +7,7 @@ import org.apache.shiro.util.ThreadState;
import org.junit.After;
import org.junit.Before;
import org.junit.Test;
import org.mapstruct.factory.Mappers;
import org.mockito.InjectMocks;
import sonia.scm.repository.BrowserResult;
import sonia.scm.repository.FileObject;
@@ -21,7 +22,6 @@ import static org.mockito.MockitoAnnotations.initMocks;
public class BrowserResultToFileObjectDtoMapperTest {
private final URI baseUri = URI.create("http://example.com/base/");
@SuppressWarnings("unused") // Is injected
private final ResourceLinks resourceLinks = ResourceLinksMock.createMock(baseUri);
@InjectMocks
@@ -39,7 +39,10 @@ public class BrowserResultToFileObjectDtoMapperTest {
@Before
public void init() {
initMocks(this);
mapper = new BrowserResultToFileObjectDtoMapper(fileObjectToFileObjectDtoMapper);
mapper = Mappers.getMapper(BrowserResultToFileObjectDtoMapper.class);
mapper.setChildrenMapper(fileObjectToFileObjectDtoMapper);
mapper.setResourceLinks(resourceLinks);
subjectThreadState.bind();
ThreadContext.bind(subject);

View File

@@ -1,205 +0,0 @@
package sonia.scm.api.v2.resources;
import com.github.sdorra.shiro.SubjectAware;
import com.google.common.io.Resources;
import com.google.inject.util.Providers;
import org.apache.shiro.subject.PrincipalCollection;
import org.apache.shiro.subject.Subject;
import org.apache.shiro.util.ThreadContext;
import org.jboss.resteasy.core.Dispatcher;
import org.jboss.resteasy.mock.MockHttpRequest;
import org.jboss.resteasy.mock.MockHttpResponse;
import org.junit.jupiter.api.AfterEach;
import org.junit.jupiter.api.BeforeEach;
import org.junit.jupiter.api.Nested;
import org.junit.jupiter.api.Test;
import org.junit.jupiter.api.extension.ExtendWith;
import org.mockito.InjectMocks;
import org.mockito.Mock;
import org.mockito.junit.jupiter.MockitoExtension;
import sonia.scm.repository.NamespaceAndName;
import sonia.scm.repository.Repository;
import sonia.scm.repository.api.MergeCommandBuilder;
import sonia.scm.repository.api.MergeCommandResult;
import sonia.scm.repository.api.MergeDryRunCommandResult;
import sonia.scm.repository.api.RepositoryService;
import sonia.scm.repository.api.RepositoryServiceFactory;
import sonia.scm.repository.spi.MergeCommand;
import sonia.scm.user.User;
import sonia.scm.web.VndMediaType;
import java.net.URL;
import static java.util.Arrays.asList;
import static org.assertj.core.api.Assertions.assertThat;
import static org.mockito.ArgumentMatchers.any;
import static org.mockito.Mockito.lenient;
import static org.mockito.Mockito.mock;
import static org.mockito.Mockito.when;
import static sonia.scm.repository.RepositoryTestData.createHeartOfGold;
@ExtendWith(MockitoExtension.class)
@SubjectAware(
configuration = "classpath:sonia/scm/shiro-001.ini",
username = "trillian",
password = "secret"
)
public class MergeResourceTest extends RepositoryTestBase {
public static final String MERGE_URL = "/" + RepositoryRootResource.REPOSITORIES_PATH_V2 + "space/repo/merge/";
private Repository repository = createHeartOfGold();
private Dispatcher dispatcher;
@Mock
private RepositoryServiceFactory serviceFactory;
@Mock
private RepositoryService repositoryService;
@Mock
private MergeCommand mergeCommand;
@InjectMocks
private MergeCommandBuilder mergeCommandBuilder;
private MergeResultToDtoMapperImpl mapper = new MergeResultToDtoMapperImpl();
private MergeResource mergeResource;
@BeforeEach
void init() {
mergeResource = new MergeResource(serviceFactory, mapper);
super.mergeResource = Providers.of(mergeResource);
dispatcher = DispatcherMock.createDispatcher(getRepositoryRootResource());
}
@Test
void shouldHandleIllegalInput() throws Exception {
URL url = Resources.getResource("sonia/scm/api/v2/mergeCommand_invalid.json");
byte[] mergeCommandJson = Resources.toByteArray(url);
MockHttpRequest request = MockHttpRequest
.post(MERGE_URL + "dry-run/")
.content(mergeCommandJson)
.contentType(VndMediaType.MERGE_COMMAND);
MockHttpResponse response = new MockHttpResponse();
dispatcher.invoke(request, response);
assertThat(response.getStatus()).isEqualTo(400);
System.out.println(response.getContentAsString());
}
@Nested
class ExecutingMergeCommand {
@Mock
private Subject subject;
@BeforeEach
void initRepository() {
when(serviceFactory.create(new NamespaceAndName("space", "repo"))).thenReturn(repositoryService);
lenient().when(repositoryService.getMergeCommand()).thenReturn(mergeCommandBuilder);
when(repositoryService.getRepository()).thenReturn(repository);
ThreadContext.bind(subject);
}
@AfterEach
void tearDownShiro() {
ThreadContext.unbindSubject();
}
@Test
void shouldHandleSuccessfulMerge() throws Exception {
when(mergeCommand.merge(any())).thenReturn(MergeCommandResult.success());
mockUser();
URL url = Resources.getResource("sonia/scm/api/v2/mergeCommand.json");
byte[] mergeCommandJson = Resources.toByteArray(url);
MockHttpRequest request = MockHttpRequest
.post(MERGE_URL)
.content(mergeCommandJson)
.contentType(VndMediaType.MERGE_COMMAND);
MockHttpResponse response = new MockHttpResponse();
dispatcher.invoke(request, response);
assertThat(response.getStatus()).isEqualTo(204);
}
@Test
void shouldHandleFailedMerge() throws Exception {
when(mergeCommand.merge(any())).thenReturn(MergeCommandResult.failure(asList("file1", "file2")));
mockUser();
URL url = Resources.getResource("sonia/scm/api/v2/mergeCommand.json");
byte[] mergeCommandJson = Resources.toByteArray(url);
MockHttpRequest request = MockHttpRequest
.post(MERGE_URL)
.content(mergeCommandJson)
.contentType(VndMediaType.MERGE_COMMAND);
MockHttpResponse response = new MockHttpResponse();
dispatcher.invoke(request, response);
assertThat(response.getStatus()).isEqualTo(409);
assertThat(response.getContentAsString()).contains("file1", "file2");
}
@Test
void shouldHandleSuccessfulDryRun() throws Exception {
when(subject.isPermitted("repository:push:" + repositoryService.getRepository().getId())).thenReturn(true);
when(mergeCommand.dryRun(any())).thenReturn(new MergeDryRunCommandResult(true));
URL url = Resources.getResource("sonia/scm/api/v2/mergeCommand.json");
byte[] mergeCommandJson = Resources.toByteArray(url);
MockHttpRequest request = MockHttpRequest
.post(MERGE_URL + "dry-run/")
.content(mergeCommandJson)
.contentType(VndMediaType.MERGE_COMMAND);
MockHttpResponse response = new MockHttpResponse();
dispatcher.invoke(request, response);
assertThat(response.getStatus()).isEqualTo(204);
}
@Test
void shouldHandleFailedDryRun() throws Exception {
when(subject.isPermitted("repository:push:" + repositoryService.getRepository().getId())).thenReturn(true);
when(mergeCommand.dryRun(any())).thenReturn(new MergeDryRunCommandResult(false));
URL url = Resources.getResource("sonia/scm/api/v2/mergeCommand.json");
byte[] mergeCommandJson = Resources.toByteArray(url);
MockHttpRequest request = MockHttpRequest
.post(MERGE_URL + "dry-run/")
.content(mergeCommandJson)
.contentType(VndMediaType.MERGE_COMMAND);
MockHttpResponse response = new MockHttpResponse();
dispatcher.invoke(request, response);
assertThat(response.getStatus()).isEqualTo(409);
}
@Test
void shouldSkipDryRunIfSubjectHasNoPushPermission() throws Exception {
when(subject.isPermitted("repository:push:" + repositoryService.getRepository().getId())).thenReturn(false);
URL url = Resources.getResource("sonia/scm/api/v2/mergeCommand.json");
byte[] mergeCommandJson = Resources.toByteArray(url);
MockHttpRequest request = MockHttpRequest
.post(MERGE_URL + "dry-run/")
.content(mergeCommandJson)
.contentType(VndMediaType.MERGE_COMMAND);
MockHttpResponse response = new MockHttpResponse();
dispatcher.invoke(request, response);
assertThat(response.getStatus()).isEqualTo(204);
}
private void mockUser() {
PrincipalCollection collection = mock(PrincipalCollection.class);
when(subject.getPrincipals()).thenReturn(collection);
when(collection.oneByType(User.class)).thenReturn(new User("dummy"));
}
}
}

View File

@@ -22,7 +22,6 @@ public abstract class RepositoryTestBase {
protected Provider<FileHistoryRootResource> fileHistoryRootResource;
protected Provider<RepositoryCollectionResource> repositoryCollectionResource;
protected Provider<IncomingRootResource> incomingRootResource;
protected Provider<MergeResource> mergeResource;
RepositoryRootResource getRepositoryRootResource() {
@@ -39,8 +38,7 @@ public abstract class RepositoryTestBase {
diffRootResource,
modificationsRootResource,
fileHistoryRootResource,
incomingRootResource,
mergeResource)), repositoryCollectionResource);
incomingRootResource)), repositoryCollectionResource);
}

View File

@@ -45,7 +45,6 @@ public class ResourceLinksMock {
when(resourceLinks.uiPlugin()).thenReturn(new ResourceLinks.UIPluginLinks(uriInfo));
when(resourceLinks.authentication()).thenReturn(new ResourceLinks.AuthenticationLinks(uriInfo));
when(resourceLinks.index()).thenReturn(new ResourceLinks.IndexLinks(uriInfo));
when(resourceLinks.merge()).thenReturn(new ResourceLinks.MergeLinks(uriInfo));
when(resourceLinks.permissions()).thenReturn(new ResourceLinks.PermissionsLinks(uriInfo));
when(resourceLinks.repositoryVerbs()).thenReturn(new ResourceLinks.RepositoryVerbLinks(uriInfo));
when(resourceLinks.repositoryRole()).thenReturn(new ResourceLinks.RepositoryRoleLinks(uriInfo));

View File

@@ -7,6 +7,7 @@ import org.jboss.resteasy.mock.MockHttpResponse;
import org.junit.Before;
import org.junit.Test;
import org.junit.runner.RunWith;
import org.mapstruct.factory.Mappers;
import org.mockito.InjectMocks;
import org.mockito.Mock;
import org.mockito.junit.MockitoJUnitRunner;
@@ -49,7 +50,9 @@ public class SourceRootResourceTest extends RepositoryTestBase {
@Before
public void prepareEnvironment() throws Exception {
browserResultToFileObjectDtoMapper = new BrowserResultToFileObjectDtoMapper(fileObjectToFileObjectDtoMapper);
browserResultToFileObjectDtoMapper = Mappers.getMapper(BrowserResultToFileObjectDtoMapper.class);
browserResultToFileObjectDtoMapper.setChildrenMapper(fileObjectToFileObjectDtoMapper);
browserResultToFileObjectDtoMapper.setResourceLinks(resourceLinks);
when(serviceFactory.create(new NamespaceAndName("space", "repo"))).thenReturn(service);
when(service.getBrowseCommand()).thenReturn(browseCommandBuilder);