Merge with default

This commit is contained in:
Rene Pfeuffer
2020-01-08 11:22:47 +01:00
66 changed files with 2676 additions and 2494 deletions

8
Jenkinsfile vendored
View File

@@ -55,7 +55,13 @@ node('docker') {
if (isMainBranch()) {
stage('Lifecycle') {
nexusPolicyEvaluation iqApplication: selectedApplication('scm'), iqScanPatterns: [[scanPattern: 'scm-server/target/scm-server-app.zip']], iqStage: 'build'
try {
// failBuildOnNetworkError -> so we can catch the exception and neither fail nor make our build unstable
nexusPolicyEvaluation iqApplication: selectedApplication('scm'), iqScanPatterns: [[scanPattern: 'scm-server/target/scm-server-app.zip']], iqStage: 'build', failBuildOnNetworkError: true
} catch (Exception e) {
echo "ERROR: iQ Server policy eval failed. Not marking build unstable for now."
echo "ERROR: iQ Server Exception: ${e.getMessage()}"
}
}
stage('Archive') {

View File

@@ -46,8 +46,11 @@ import java.io.Serializable;
import java.util.ArrayList;
import java.util.Collection;
import java.util.List;
import java.util.Optional;
import java.util.OptionalLong;
import static java.util.Collections.unmodifiableCollection;
import static java.util.Optional.ofNullable;
/**
* The FileObject represents a file or a directory in a repository.
@@ -90,7 +93,9 @@ public class FileObject implements LastModifiedAware, Serializable
&& Objects.equal(description, other.description)
&& Objects.equal(length, other.length)
&& Objects.equal(subRepository, other.subRepository)
&& Objects.equal(lastModified, other.lastModified);
&& Objects.equal(commitDate, other.commitDate)
&& Objects.equal(partialResult, other.partialResult)
&& Objects.equal(computationAborted, other.computationAborted);
//J+
}
@@ -100,8 +105,16 @@ public class FileObject implements LastModifiedAware, Serializable
@Override
public int hashCode()
{
return Objects.hashCode(name, path, directory, description, length,
subRepository, lastModified);
return Objects.hashCode(
name,
path,
directory,
description,
length,
subRepository,
commitDate,
partialResult,
computationAborted);
}
/**
@@ -118,7 +131,9 @@ public class FileObject implements LastModifiedAware, Serializable
.add("description", description)
.add("length", length)
.add("subRepository", subRepository)
.add("lastModified", lastModified)
.add("commitDate", commitDate)
.add("partialResult", partialResult)
.add("computationAborted", computationAborted)
.toString();
//J+
}
@@ -130,35 +145,44 @@ public class FileObject implements LastModifiedAware, Serializable
* if the repository provider is not able to get the last commit for the path.
*
*
* @return last commit message
* @return Last commit message or <code>null</code>, when this value has not been computed
* (see {@link #isPartialResult()}).
*/
public String getDescription()
public Optional<String> getDescription()
{
return description;
return ofNullable(description);
}
/**
* Returns the last commit date for this. The method will return null,
* if the repository provider is not able to get the last commit for the path.
* if the repository provider is not able to get the last commit for the path
* or it has not been computed.
*
*
* @return last commit date
*/
@Override
public Long getLastModified()
{
return lastModified;
public Long getLastModified() {
return this.isPartialResult()? null: this.commitDate;
}
/**
* Returns the length of the file.
*
*
* @return length of file
* Returns the last commit date for this. The method will return {@link OptionalLong#empty()},
* if the repository provider is not able to get the last commit for the path or if this value has not been computed
* (see {@link #isPartialResult()} and {@link #isComputationAborted()}).
*/
public long getLength()
public OptionalLong getCommitDate()
{
return length;
return commitDate == null? OptionalLong.empty(): OptionalLong.of(commitDate);
}
/**
* Returns the length of the file or {@link OptionalLong#empty()}, when this value has not been computed
* (see {@link #isPartialResult()} and {@link #isComputationAborted()}).
*/
public OptionalLong getLength()
{
return length == null? OptionalLong.empty(): OptionalLong.of(length);
}
/**
@@ -200,7 +224,7 @@ public class FileObject implements LastModifiedAware, Serializable
}
/**
* Return sub repository informations or null if the file is not
* Return sub repository information or null if the file is not
* sub repository.
*
* @since 1.10
@@ -222,6 +246,42 @@ public class FileObject implements LastModifiedAware, Serializable
return directory;
}
/**
* Returns the children of this file.
*
* @return The children of this file if it is a directory.
*/
public Collection<FileObject> getChildren() {
return children == null? null: unmodifiableCollection(children);
}
/**
* If this is <code>true</code>, some values for this object have not been computed, yet. These values (like
* {@link #getLength()}, {@link #getDescription()} or {@link #getCommitDate()})
* will return {@link Optional#empty()} (or {@link OptionalLong#empty()} respectively), unless they are computed.
* There may be an asynchronous task running, that will set these values in the future.
*
* @since 2.0.0
*
* @return <code>true</code>, whenever some values of this object have not been computed, yet.
*/
public boolean isPartialResult() {
return partialResult;
}
/**
* If this is <code>true</code>, some values for this object have not been computed and will not be computed. These
* values (like {@link #getLength()}, {@link #getDescription()} or {@link #getCommitDate()})
* will return {@link Optional#empty()} (or {@link OptionalLong#empty()} respectively), unless they are computed.
*
* @since 2.0.0
*
* @return <code>true</code>, whenever some values of this object finally are not computed.
*/
public boolean isComputationAborted() {
return computationAborted;
}
//~--- set methods ----------------------------------------------------------
/**
@@ -247,14 +307,14 @@ public class FileObject implements LastModifiedAware, Serializable
}
/**
* Sets the last modified date of the file.
* Sets the commit date of the file.
*
*
* @param lastModified last modified date
* @param commitDate commit date
*/
public void setLastModified(Long lastModified)
public void setCommitDate(Long commitDate)
{
this.lastModified = lastModified;
this.commitDate = commitDate;
}
/**
@@ -263,7 +323,7 @@ public class FileObject implements LastModifiedAware, Serializable
*
* @param length file length
*/
public void setLength(long length)
public void setLength(Long length)
{
this.length = length;
}
@@ -302,22 +362,47 @@ public class FileObject implements LastModifiedAware, Serializable
this.subRepository = subRepository;
}
public Collection<FileObject> getChildren() {
return unmodifiableCollection(children);
/**
* Set marker, that some values for this object are not computed, yet.
*
* @since 2.0.0
*
* @param partialResult Set this to <code>true</code>, whenever some values of this object are not computed, yet.
*/
public void setPartialResult(boolean partialResult) {
this.partialResult = partialResult;
}
/**
* Set marker, that computation of some values for this object has been aborted.
*
* @since 2.0.0
*
* @param computationAborted Set this to <code>true</code>, whenever some values of this object are not computed and
* will not be computed in the future.
*/
public void setComputationAborted(boolean computationAborted) {
this.computationAborted = computationAborted;
}
/**
* Set the children for this file.
*
* @param children The new childre.
*/
public void setChildren(List<FileObject> children) {
this.children = new ArrayList<>(children);
}
/**
* Adds a child to the list of children .
*
* @param child The additional child.
*/
public void addChild(FileObject child) {
this.children.add(child);
}
public boolean hasChildren() {
return !children.isEmpty();
}
//~--- fields ---------------------------------------------------------------
/** file description */
@@ -326,11 +411,11 @@ public class FileObject implements LastModifiedAware, Serializable
/** directory indicator */
private boolean directory;
/** last modified date */
private Long lastModified;
/** commit date */
private Long commitDate;
/** file length */
private long length;
private Long length;
/** filename */
private String name;
@@ -338,9 +423,16 @@ public class FileObject implements LastModifiedAware, Serializable
/** file path */
private String path;
/** Marker for partial result. */
private boolean partialResult = false;
/** Marker for aborted computation. */
private boolean computationAborted = false;
/** sub repository informations */
@XmlElement(name = "subrepository")
private SubRepository subRepository;
/** Children of this file (aka directory). */
private Collection<FileObject> children = new ArrayList<>();
}

View File

@@ -300,6 +300,13 @@ public final class BrowseCommandBuilder
return this;
}
private void updateCache(BrowserResult updatedResult) {
if (!disableCache) {
CacheKey key = new CacheKey(repository, request);
cache.put(key, updatedResult);
}
}
//~--- inner classes --------------------------------------------------------
/**
@@ -416,5 +423,5 @@ public final class BrowseCommandBuilder
private final Repository repository;
/** request for the command */
private final BrowseCommandRequest request = new BrowseCommandRequest();
private final BrowseCommandRequest request = new BrowseCommandRequest(this::updateCache);
}

View File

@@ -12,18 +12,25 @@ import static java.util.Collections.unmodifiableCollection;
* case you can use {@link #getFilesWithConflict()} to get a list of files with merge conflicts.
*/
public class MergeCommandResult {
private final Collection<String> filesWithConflict;
private final String newHeadRevision;
private final String targetRevision;
private final String revisionToMerge;
private MergeCommandResult(Collection<String> filesWithConflict) {
private MergeCommandResult(Collection<String> filesWithConflict, String targetRevision, String revisionToMerge, String newHeadRevision) {
this.filesWithConflict = filesWithConflict;
this.targetRevision = targetRevision;
this.revisionToMerge = revisionToMerge;
this.newHeadRevision = newHeadRevision;
}
public static MergeCommandResult success() {
return new MergeCommandResult(emptyList());
public static MergeCommandResult success(String targetRevision, String revisionToMerge, String newHeadRevision) {
return new MergeCommandResult(emptyList(), targetRevision, revisionToMerge, newHeadRevision);
}
public static MergeCommandResult failure(Collection<String> filesWithConflict) {
return new MergeCommandResult(new HashSet<>(filesWithConflict));
public static MergeCommandResult failure(String targetRevision, String revisionToMerge, Collection<String> filesWithConflict) {
return new MergeCommandResult(new HashSet<>(filesWithConflict), targetRevision, revisionToMerge, null);
}
/**
@@ -31,7 +38,7 @@ public class MergeCommandResult {
* merge conflicts. In this case you can use {@link #getFilesWithConflict()} to check what files could not be merged.
*/
public boolean isSuccess() {
return filesWithConflict.isEmpty();
return filesWithConflict.isEmpty() && newHeadRevision != null;
}
/**
@@ -41,4 +48,26 @@ public class MergeCommandResult {
public Collection<String> getFilesWithConflict() {
return unmodifiableCollection(filesWithConflict);
}
/**
* Returns the revision of the new head of the target branch, if the merge was successful ({@link #isSuccess()})
*/
public String getNewHeadRevision() {
return newHeadRevision;
}
/**
* Returns the revision of the target branch prior to the merge.
*/
public String getTargetRevision() {
return targetRevision;
}
/**
* Returns the revision of the branch that was merged into the target (or in case of a conflict of the revision that
* should have been merged).
*/
public String getRevisionToMerge() {
return revisionToMerge;
}
}

View File

@@ -37,6 +37,10 @@ package sonia.scm.repository.spi;
import com.google.common.base.MoreObjects;
import com.google.common.base.Objects;
import sonia.scm.repository.BrowserResult;
import java.util.function.Consumer;
/**
*
* @author Sebastian Sdorra
@@ -48,6 +52,14 @@ public final class BrowseCommandRequest extends FileBaseCommandRequest
/** Field description */
private static final long serialVersionUID = 7956624623516803183L;
public BrowseCommandRequest() {
this(null);
}
public BrowseCommandRequest(Consumer<BrowserResult> updater) {
this.updater = updater;
}
//~--- methods --------------------------------------------------------------
/**
@@ -220,6 +232,12 @@ public final class BrowseCommandRequest extends FileBaseCommandRequest
return recursive;
}
public void updateCache(BrowserResult update) {
if (updater != null) {
updater.accept(update);
}
}
//~--- fields ---------------------------------------------------------------
/** disable last commit */
@@ -230,4 +248,8 @@ public final class BrowseCommandRequest extends FileBaseCommandRequest
/** browse file objects recursive */
private boolean recursive = false;
// WARNING / TODO: This field creates a reverse channel from the implementation to the API. This will break
// whenever the API runs in a different process than the SPI (for example to run explicit hosts for git repositories).
private final transient Consumer<BrowserResult> updater;
}

View File

@@ -7,6 +7,12 @@ import sonia.scm.repository.api.MergeStrategy;
import java.util.Set;
public interface MergeCommand {
/**
* Executes the merge.
* @param request The parameters specifying the merge.
* @return Result holding either the new revision or a list of conflicting files.
* @throws sonia.scm.NoChangesMadeException If the merge neither had a conflict nor made any change.
*/
MergeCommandResult merge(MergeCommandRequest request);
MergeDryRunCommandResult dryRun(MergeCommandRequest request);

View File

@@ -0,0 +1,83 @@
package sonia.scm.repository.spi;
import java.util.function.Consumer;
/**
* Tasks submitted to this executor will be run synchronously up to a given time, after which they will be queued and
* processed asynchronously. After a maximum amount of time consumed by these tasks, they will be skipped. Note that
* this only works for short-living tasks.
* <p>
* Get instances of this using a {@link SyncAsyncExecutorProvider}.
*/
public interface SyncAsyncExecutor {
/**
* Execute the given task (either synchronously or asynchronously). If this task is skipped due to
* timeouts, nothing will be done.
*
* @param task The {@link Runnable} to be executed.
* @return Either {@link ExecutionType#SYNCHRONOUS} when the given {@link Runnable} has been executed immediately or
* {@link ExecutionType#ASYNCHRONOUS}, when the task was queued to be executed asynchronously in the future.
*/
default ExecutionType execute(Runnable task) {
return execute(
ignored -> task.run(),
() -> {}
);
}
/**
* Execute the given <code>task</code> (either synchronously or asynchronously). If this task is
* skipped due to timeouts, the <code>abortionFallback</code> will be called.
*
* @param task The {@link Runnable} to be executed.
* @param abortionFallback This will only be run, when this and all remaining tasks are aborted. This task should
* only consume a negligible amount of time.
* @return Either {@link ExecutionType#SYNCHRONOUS} when the given {@link Runnable} has been executed immediately or
* {@link ExecutionType#ASYNCHRONOUS}, when the task was queued to be executed asynchronously in the future.
*/
default ExecutionType execute(Runnable task, Runnable abortionFallback) {
return execute(ignored -> task.run(), abortionFallback);
}
/**
* Execute the given <code>task</code> (either synchronously or asynchronously). If this task is skipped due to
* timeouts, nothing will be done.
*
* @param task The {@link Consumer} to be executed. The parameter given to this is either
* {@link ExecutionType#SYNCHRONOUS} when the given {@link Consumer} is executed immediately
* or {@link ExecutionType#ASYNCHRONOUS}, when the task had been queued and now is executed
* asynchronously.
* @return Either {@link ExecutionType#SYNCHRONOUS} when the given {@link Runnable} has been executed immediately or
* {@link ExecutionType#ASYNCHRONOUS}, when the task was queued to be executed asynchronously in the future.
*/
default ExecutionType execute(Consumer<ExecutionType> task) {
return execute(task, () -> {});
}
/**
* Execute the given <code>task</code> (either synchronously or asynchronously). If this task is
* skipped due to timeouts, the <code>abortionFallback</code> will be called.
*
* @param task The {@link Consumer} to be executed. The parameter given to this is either
* {@link ExecutionType#SYNCHRONOUS} when the given {@link Consumer} is executed immediately
* or {@link ExecutionType#ASYNCHRONOUS}, when the task had been queued and now is executed
* asynchronously.
* @param abortionFallback This will only be run, when this and all remaining tasks are aborted. This task should
* only consume a negligible amount of time.
* @return Either {@link ExecutionType#SYNCHRONOUS} when the given {@link Runnable} has been executed immediately or
* {@link ExecutionType#ASYNCHRONOUS}, when the task was queued to be executed asynchronously in the future.
*/
ExecutionType execute(Consumer<ExecutionType> task, Runnable abortionFallback);
/**
* When all submitted tasks have been executed synchronously, this will return <code>true</code>. If at least one task
* has been enqueued to be executed asynchronously, this returns <code>false</code> (even when none of the enqueued
* tasks have been run, yet).
*/
boolean hasExecutedAllSynchronously();
enum ExecutionType {
SYNCHRONOUS, ASYNCHRONOUS
}
}

View File

@@ -0,0 +1,56 @@
package sonia.scm.repository.spi;
/**
* Use this provider to get {@link SyncAsyncExecutor} instances to execute a number of normally short-lived tasks, that
* should be run asynchronously (or even be skipped) whenever they take too long in summary.
* <p>
* The goal of this is a "best effort" approach: The submitted tasks are run immediately when they are submitted, unless
* a given timespan (<code>switchToAsyncInSeconds</code>) has passed. From this moment on the tasks are put into a queue to be
* processed asynchronously. If even then they take too long and their accumulated asynchronous runtime exceeds another
* limit (<code>maxAsyncAbortSeconds</code>), the tasks are skipped.
* <p>
* Note that whenever a task has been started either synchronously or asynchronously it will neither be terminated nor
* switched from foreground to background execution, so this will only work well for short-living tasks. A long running
* task can still block this for longer than the configured amount of seconds.
*/
public interface SyncAsyncExecutorProvider {
int DEFAULT_SWITCH_TO_ASYNC_IN_SECONDS = 2;
/**
* Creates an {@link SyncAsyncExecutor} that will run tasks synchronously for
* {@link #DEFAULT_SWITCH_TO_ASYNC_IN_SECONDS} seconds. The limit of asynchronous runtime is implementation dependant.
*
* @return The executor.
*/
default SyncAsyncExecutor createExecutorWithDefaultTimeout() {
return createExecutorWithSecondsToTimeout(DEFAULT_SWITCH_TO_ASYNC_IN_SECONDS);
}
/**
* Creates an {@link SyncAsyncExecutor} that will run tasks synchronously for
* <code>switchToAsyncInSeconds</code> seconds. The limit of asynchronous runtime is implementation dependant.
*
* @param switchToAsyncInSeconds The amount of seconds submitted tasks will be run synchronously. After this time,
* further tasks will be run asynchronously. To run all tasks asynchronously no matter
* what, set this to <code>0</code>.
* @return The executor.
*/
SyncAsyncExecutor createExecutorWithSecondsToTimeout(int switchToAsyncInSeconds);
/**
* Creates an {@link SyncAsyncExecutor} that will run tasks synchronously for
* <code>switchToAsyncInSeconds</code> seconds and will abort tasks after they ran
* <code>maxAsyncAbortSeconds</code> asynchronously.
*
* @param switchToAsyncInSeconds The amount of seconds submitted tasks will be run synchronously. After this time,
* further tasks will be run asynchronously. To run all tasks asynchronously no matter
* what, set this to <code>0</code>.
* @param maxAsyncAbortSeconds The amount of seconds, tasks that were started asynchronously may run in summary
* before remaining tasks will not be executed at all anymore. To abort all tasks that
* are submitted after <code>switchToAsyncInSeconds</code> immediately, set this to
* <code>0</code>.
* @return The executor.
*/
SyncAsyncExecutor createExecutorWithSecondsToTimeout(int switchToAsyncInSeconds, int maxAsyncAbortSeconds);
}

View File

@@ -8,6 +8,14 @@ import java.nio.file.Files;
import java.nio.file.Path;
import java.util.UUID;
/**
* CopyOnWrite creates a copy of the target file, before it is modified. This should prevent empty or incomplete files
* on errors such as full disk.
*
* javasecurity:S2083: SonarQube thinks that the path (targetFile) is generated from an http header (HttpUtil), but
* this is not true. It looks like a false-positive, so we suppress the warning for now.
*/
@SuppressWarnings("javasecurity:S2083")
public final class CopyOnWrite {
private static final Logger LOG = LoggerFactory.getLogger(CopyOnWrite.class);

View File

@@ -81,19 +81,19 @@ public class GitGcTask implements Runnable {
{
if (repository.isValid() && repository.isHealthy())
{
logger.info("start git gc for repository {}", repository.getName());
logger.info("start git gc for repository {}", repository.getNamespaceAndName());
Stopwatch sw = Stopwatch.createStarted();
gc(repository);
logger.debug("gc of repository {} has finished after {}", repository.getName(), sw.stop());
logger.debug("gc of repository {} has finished after {}", repository.getNamespaceAndName(), sw.stop());
}
else
{
logger.debug("skip non valid/healthy repository {}", repository.getName());
logger.debug("skip non valid/healthy repository {}", repository.getNamespaceAndName());
}
}
else
{
logger.trace("skip non git repository {}", repository.getName());
logger.trace("skip non git repository {}", repository.getNamespaceAndName());
}
}

View File

@@ -745,6 +745,10 @@ public final class GitUtil
public static Optional<LfsPointer> getLfsPointer(org.eclipse.jgit.lib.Repository repo, String path, RevCommit commit, TreeWalk treeWalk) throws IOException {
Attributes attributes = LfsFactory.getAttributesForPath(repo, path, commit);
return getLfsPointer(repo, treeWalk, attributes);
}
public static Optional<LfsPointer> getLfsPointer(org.eclipse.jgit.lib.Repository repo, TreeWalk treeWalk, Attributes attributes) throws IOException {
Attribute filter = attributes.get("filter");
if (filter != null && "lfs".equals(filter.getValue())) {
ObjectId blobId = treeWalk.getObjectId(0);

View File

@@ -186,6 +186,10 @@ class AbstractGitCommand
return context;
}
sonia.scm.repository.Repository getRepository() {
return repository;
}
void checkOutBranch(String branchName) throws IOException {
try {
clone.checkout().setName(branchName).call();

View File

@@ -2,6 +2,7 @@ package sonia.scm.repository.spi;
import com.google.common.base.Strings;
import org.eclipse.jgit.diff.DiffEntry;
import org.eclipse.jgit.errors.MissingObjectException;
import org.eclipse.jgit.lib.ObjectId;
import org.eclipse.jgit.lib.Repository;
import org.eclipse.jgit.revwalk.RevCommit;
@@ -10,6 +11,8 @@ import org.eclipse.jgit.revwalk.RevWalk;
import org.eclipse.jgit.treewalk.EmptyTreeIterator;
import org.eclipse.jgit.treewalk.TreeWalk;
import org.eclipse.jgit.treewalk.filter.PathFilter;
import sonia.scm.ContextEntry;
import sonia.scm.NotFoundException;
import sonia.scm.repository.GitUtil;
import sonia.scm.util.Util;
@@ -38,7 +41,15 @@ final class Differ implements AutoCloseable {
RevWalk walk = new RevWalk(repository);
ObjectId revision = repository.resolve(request.getRevision());
RevCommit commit = walk.parseCommit(revision);
if (revision == null) {
throw NotFoundException.notFound(ContextEntry.ContextBuilder.entity("revision not found", request.getRevision()));
}
RevCommit commit;
try {
commit = walk.parseCommit(revision);
} catch (MissingObjectException ex) {
throw NotFoundException.notFound(ContextEntry.ContextBuilder.entity("revision not found", request.getRevision()));
}
walk.markStart(commit);
commit = walk.next();
@@ -46,34 +57,25 @@ final class Differ implements AutoCloseable {
treeWalk.reset();
treeWalk.setRecursive(true);
if (Util.isNotEmpty(request.getPath()))
{
if (Util.isNotEmpty(request.getPath())) {
treeWalk.setFilter(PathFilter.create(request.getPath()));
}
if (!Strings.isNullOrEmpty(request.getAncestorChangeset()))
{
if (!Strings.isNullOrEmpty(request.getAncestorChangeset())) {
ObjectId otherRevision = repository.resolve(request.getAncestorChangeset());
ObjectId ancestorId = GitUtil.computeCommonAncestor(repository, revision, otherRevision);
RevTree tree = walk.parseCommit(ancestorId).getTree();
treeWalk.addTree(tree);
}
else if (commit.getParentCount() > 0)
{
} else if (commit.getParentCount() > 0) {
RevTree tree = commit.getParent(0).getTree();
if (tree != null)
{
if (tree != null) {
treeWalk.addTree(tree);
}
else
{
} else {
treeWalk.addTree(new EmptyTreeIterator());
}
}
else
{
} else {
treeWalk.addTree(new EmptyTreeIterator());
}

View File

@@ -35,9 +35,11 @@ package sonia.scm.repository.spi;
//~--- non-JDK imports --------------------------------------------------------
import com.google.common.base.Stopwatch;
import com.google.common.base.Strings;
import com.google.common.collect.Lists;
import com.google.common.collect.Maps;
import org.eclipse.jgit.attributes.Attributes;
import org.eclipse.jgit.lfs.LfsPointer;
import org.eclipse.jgit.lib.Constants;
import org.eclipse.jgit.lib.ObjectId;
@@ -49,6 +51,7 @@ import org.eclipse.jgit.treewalk.TreeWalk;
import org.eclipse.jgit.treewalk.filter.AndTreeFilter;
import org.eclipse.jgit.treewalk.filter.PathFilter;
import org.eclipse.jgit.treewalk.filter.TreeFilter;
import org.eclipse.jgit.util.LfsFactory;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import sonia.scm.NotFoundException;
@@ -56,6 +59,7 @@ import sonia.scm.repository.BrowserResult;
import sonia.scm.repository.FileObject;
import sonia.scm.repository.GitSubModuleParser;
import sonia.scm.repository.GitUtil;
import sonia.scm.repository.InternalRepositoryException;
import sonia.scm.repository.Repository;
import sonia.scm.repository.SubRepository;
import sonia.scm.store.Blob;
@@ -69,10 +73,13 @@ import java.util.Collections;
import java.util.List;
import java.util.Map;
import java.util.Optional;
import java.util.function.Consumer;
import static java.util.Optional.empty;
import static java.util.Optional.of;
import static sonia.scm.ContextEntry.ContextBuilder.entity;
import static sonia.scm.NotFoundException.notFound;
import static sonia.scm.repository.spi.SyncAsyncExecutor.ExecutionType.ASYNCHRONOUS;
//~--- JDK imports ------------------------------------------------------------
@@ -90,71 +97,56 @@ public class GitBrowseCommand extends AbstractGitCommand
/**
* the logger for GitBrowseCommand
*/
private static final Logger logger =
LoggerFactory.getLogger(GitBrowseCommand.class);
private static final Logger logger = LoggerFactory.getLogger(GitBrowseCommand.class);
/** sub repository cache */
private final Map<ObjectId, Map<String, SubRepository>> subrepositoryCache = Maps.newHashMap();
private final Object asyncMonitor = new Object();
private final LfsBlobStoreFactory lfsBlobStoreFactory;
//~--- constructors ---------------------------------------------------------
private final SyncAsyncExecutor executor;
/**
* Constructs ...
* @param context
* @param repository
* @param lfsBlobStoreFactory
*/
public GitBrowseCommand(GitContext context, Repository repository, LfsBlobStoreFactory lfsBlobStoreFactory)
{
private BrowserResult browserResult;
public GitBrowseCommand(GitContext context, Repository repository, LfsBlobStoreFactory lfsBlobStoreFactory, SyncAsyncExecutor executor) {
super(context, repository);
this.lfsBlobStoreFactory = lfsBlobStoreFactory;
this.executor = executor;
}
//~--- get methods ----------------------------------------------------------
@Override
@SuppressWarnings("unchecked")
public BrowserResult getBrowserResult(BrowseCommandRequest request)
throws IOException {
logger.debug("try to create browse result for {}", request);
BrowserResult result;
org.eclipse.jgit.lib.Repository repo = open();
ObjectId revId;
ObjectId revId = computeRevIdToBrowse(request, repo);
if (Util.isEmpty(request.getRevision()))
{
revId = getDefaultBranch(repo);
if (revId != null) {
browserResult = new BrowserResult(revId.getName(), request.getRevision(), getEntry(repo, request, revId));
return browserResult;
} else {
logger.warn("could not find head of repository {}, empty?", repository.getNamespaceAndName());
return new BrowserResult(Constants.HEAD, request.getRevision(), createEmptyRoot());
}
else
{
revId = GitUtil.getRevisionId(repo, request.getRevision());
}
if (revId != null)
{
result = new BrowserResult(revId.getName(), request.getRevision(), getEntry(repo, request, revId));
}
else
{
if (Util.isNotEmpty(request.getRevision()))
{
private ObjectId computeRevIdToBrowse(BrowseCommandRequest request, org.eclipse.jgit.lib.Repository repo) throws IOException {
if (Util.isEmpty(request.getRevision())) {
return getDefaultBranch(repo);
} else {
ObjectId revId = GitUtil.getRevisionId(repo, request.getRevision());
if (revId == null) {
logger.error("could not find revision {}", request.getRevision());
throw notFound(entity("Revision", request.getRevision()).in(this.repository));
}
else if (logger.isWarnEnabled())
{
logger.warn("could not find head of repository, empty?");
return revId;
}
}
result = new BrowserResult(Constants.HEAD, request.getRevision(), createEmtpyRoot());
}
return result;
}
//~--- methods --------------------------------------------------------------
private FileObject createEmtpyRoot() {
private FileObject createEmptyRoot() {
FileObject fileObject = new FileObject();
fileObject.setName("");
fileObject.setPath("");
@@ -162,18 +154,6 @@ public class GitBrowseCommand extends AbstractGitCommand
return fileObject;
}
/**
* Method description
*
* @param repo
* @param request
* @param revId
* @param treeWalk
*
* @return
*
* @throws IOException
*/
private FileObject createFileObject(org.eclipse.jgit.lib.Repository repo,
BrowseCommandRequest request, ObjectId revId, TreeWalk treeWalk)
throws IOException {
@@ -207,128 +187,63 @@ public class GitBrowseCommand extends AbstractGitCommand
// don't show message and date for directories to improve performance
if (!file.isDirectory() &&!request.isDisableLastCommit())
{
logger.trace("fetch last commit for {} at {}", path, revId.getName());
RevCommit commit = getLatestCommit(repo, revId, path);
Optional<LfsPointer> lfsPointer = commit == null? empty(): GitUtil.getLfsPointer(repo, path, commit, treeWalk);
file.setPartialResult(true);
RevCommit commit;
try (RevWalk walk = new RevWalk(repo)) {
commit = walk.parseCommit(revId);
}
Optional<LfsPointer> lfsPointer = getLfsPointer(repo, path, commit, treeWalk);
if (lfsPointer.isPresent()) {
BlobStore lfsBlobStore = lfsBlobStoreFactory.getLfsBlobStore(repository);
String oid = lfsPointer.get().getOid().getName();
Blob blob = lfsBlobStore.get(oid);
if (blob == null) {
logger.error("lfs blob for lob id {} not found in lfs store of repository {}", oid, repository.getNamespaceAndName());
file.setLength(-1);
} else {
file.setLength(blob.getSize());
}
setFileLengthFromLfsBlob(lfsPointer.get(), file);
} else {
file.setLength(loader.getSize());
}
if (commit != null)
{
file.setLastModified(GitUtil.getCommitTime(commit));
file.setDescription(commit.getShortMessage());
}
else if (logger.isWarnEnabled())
{
logger.warn("could not find latest commit for {} on {}", path,
revId);
}
executor.execute(
new CompleteFileInformation(path, revId, repo, file, request),
new AbortFileInformation(request)
);
}
}
return file;
}
//~--- get methods ----------------------------------------------------------
/**
* Method description
*
*
*
* @param repo
* @param revId
* @param path
*
* @return
*/
private RevCommit getLatestCommit(org.eclipse.jgit.lib.Repository repo,
ObjectId revId, String path)
{
RevCommit result = null;
RevWalk walk = null;
try
{
walk = new RevWalk(repo);
walk.setTreeFilter(AndTreeFilter.create(PathFilter.create(path),
TreeFilter.ANY_DIFF));
RevCommit commit = walk.parseCommit(revId);
walk.markStart(commit);
result = Util.getFirst(walk);
}
catch (IOException ex)
{
logger.error("could not parse commit for file", ex);
}
finally
{
GitUtil.release(walk);
}
return result;
private void updateCache(BrowseCommandRequest request) {
request.updateCache(browserResult);
logger.info("updated browser result for repository {}", repository.getNamespaceAndName());
}
private FileObject getEntry(org.eclipse.jgit.lib.Repository repo, BrowseCommandRequest request, ObjectId revId) throws IOException {
RevWalk revWalk = null;
TreeWalk treeWalk = null;
FileObject result;
try {
try (RevWalk revWalk = new RevWalk(repo); TreeWalk treeWalk = new TreeWalk(repo)) {
logger.debug("load repository browser for revision {}", revId.name());
treeWalk = new TreeWalk(repo);
if (!isRootRequest(request)) {
treeWalk.setFilter(PathFilter.create(request.getPath()));
}
revWalk = new RevWalk(repo);
RevTree tree = revWalk.parseTree(revId);
if (tree != null)
{
if (tree != null) {
treeWalk.addTree(tree);
}
else
{
} else {
throw new IllegalStateException("could not find tree for " + revId.name());
}
if (isRootRequest(request)) {
result = createEmtpyRoot();
FileObject result = createEmptyRoot();
findChildren(result, repo, request, revId, treeWalk);
return result;
} else {
result = findFirstMatch(repo, request, revId, treeWalk);
FileObject result = findFirstMatch(repo, request, revId, treeWalk);
if ( result.isDirectory() ) {
treeWalk.enterSubtree();
findChildren(result, repo, request, revId, treeWalk);
}
}
}
finally
{
GitUtil.release(revWalk);
GitUtil.release(treeWalk);
}
return result;
}
}
}
private boolean isRootRequest(BrowseCommandRequest request) {
return Strings.isNullOrEmpty(request.getPath()) || "/".equals(request.getPath());
@@ -384,56 +299,144 @@ public class GitBrowseCommand extends AbstractGitCommand
throw notFound(entity("File", request.getPath()).in("Revision", revId.getName()).in(this.repository));
}
@SuppressWarnings("unchecked")
private Map<String,
SubRepository> getSubRepositories(org.eclipse.jgit.lib.Repository repo,
ObjectId revision)
private Map<String, SubRepository> getSubRepositories(org.eclipse.jgit.lib.Repository repo, ObjectId revision)
throws IOException {
if (logger.isDebugEnabled())
{
logger.debug("read submodules of {} at {}", repository.getName(),
revision);
}
Map<String, SubRepository> subRepositories;
try ( ByteArrayOutputStream baos = new ByteArrayOutputStream() )
{
logger.debug("read submodules of {} at {}", repository.getName(), revision);
try ( ByteArrayOutputStream baos = new ByteArrayOutputStream() ) {
new GitCatCommand(context, repository, lfsBlobStoreFactory).getContent(repo, revision,
PATH_MODULES, baos);
subRepositories = GitSubModuleParser.parse(baos.toString());
return GitSubModuleParser.parse(baos.toString());
} catch (NotFoundException ex) {
logger.trace("could not find .gitmodules: {}", ex.getMessage());
return Collections.emptyMap();
}
catch (NotFoundException ex)
{
logger.trace("could not find .gitmodules", ex);
subRepositories = Collections.EMPTY_MAP;
}
return subRepositories;
}
private SubRepository getSubRepository(org.eclipse.jgit.lib.Repository repo,
ObjectId revId, String path)
private SubRepository getSubRepository(org.eclipse.jgit.lib.Repository repo, ObjectId revId, String path)
throws IOException {
Map<String, SubRepository> subRepositories = subrepositoryCache.get(revId);
if (subRepositories == null)
{
if (subRepositories == null) {
subRepositories = getSubRepositories(repo, revId);
subrepositoryCache.put(revId, subRepositories);
}
SubRepository sub = null;
if (subRepositories != null)
{
sub = subRepositories.get(path);
if (subRepositories != null) {
return subRepositories.get(path);
}
return null;
}
return sub;
private Optional<LfsPointer> getLfsPointer(org.eclipse.jgit.lib.Repository repo, String path, RevCommit commit, TreeWalk treeWalk) {
try {
Attributes attributes = LfsFactory.getAttributesForPath(repo, path, commit);
return GitUtil.getLfsPointer(repo, treeWalk, attributes);
} catch (IOException e) {
throw new InternalRepositoryException(repository, "could not read lfs pointer", e);
}
}
//~--- fields ---------------------------------------------------------------
private void setFileLengthFromLfsBlob(LfsPointer lfsPointer, FileObject file) {
BlobStore lfsBlobStore = lfsBlobStoreFactory.getLfsBlobStore(repository);
String oid = lfsPointer.getOid().getName();
Blob blob = lfsBlobStore.get(oid);
if (blob == null) {
logger.error("lfs blob for lob id {} not found in lfs store of repository {}", oid, repository.getNamespaceAndName());
file.setLength(null);
} else {
file.setLength(blob.getSize());
}
}
/** sub repository cache */
private final Map<ObjectId, Map<String, SubRepository>> subrepositoryCache = Maps.newHashMap();
private class CompleteFileInformation implements Consumer<SyncAsyncExecutor.ExecutionType> {
private final String path;
private final ObjectId revId;
private final org.eclipse.jgit.lib.Repository repo;
private final FileObject file;
private final BrowseCommandRequest request;
public CompleteFileInformation(String path, ObjectId revId, org.eclipse.jgit.lib.Repository repo, FileObject file, BrowseCommandRequest request) {
this.path = path;
this.revId = revId;
this.repo = repo;
this.file = file;
this.request = request;
}
@Override
public void accept(SyncAsyncExecutor.ExecutionType executionType) {
logger.trace("fetch last commit for {} at {}", path, revId.getName());
Stopwatch sw = Stopwatch.createStarted();
Optional<RevCommit> commit = getLatestCommit(repo, revId, path);
synchronized (asyncMonitor) {
file.setPartialResult(false);
if (commit.isPresent()) {
applyValuesFromCommit(executionType, commit.get());
} else {
logger.warn("could not find latest commit for {} on {}", path, revId);
}
}
logger.trace("finished loading of last commit {} of {} in {}", revId.getName(), path, sw.stop());
}
private Optional<RevCommit> getLatestCommit(org.eclipse.jgit.lib.Repository repo,
ObjectId revId, String path) {
try (RevWalk walk = new RevWalk(repo)) {
walk.setTreeFilter(AndTreeFilter.create(TreeFilter.ANY_DIFF, PathFilter.create(path)));
RevCommit commit = walk.parseCommit(revId);
walk.markStart(commit);
return of(Util.getFirst(walk));
} catch (IOException ex) {
logger.error("could not parse commit for file", ex);
return empty();
}
}
private void applyValuesFromCommit(SyncAsyncExecutor.ExecutionType executionType, RevCommit commit) {
file.setCommitDate(GitUtil.getCommitTime(commit));
file.setDescription(commit.getShortMessage());
if (executionType == ASYNCHRONOUS && browserResult != null) {
updateCache(request);
}
}
}
private class AbortFileInformation implements Runnable {
private final BrowseCommandRequest request;
public AbortFileInformation(BrowseCommandRequest request) {
this.request = request;
}
@Override
public void run() {
synchronized (asyncMonitor) {
if (markPartialAsAborted(browserResult.getFile())) {
updateCache(request);
}
}
}
private boolean markPartialAsAborted(FileObject file) {
boolean changed = false;
if (file.isPartialResult()) {
file.setPartialResult(false);
file.setComputationAborted(true);
changed = true;
}
for (FileObject child : file.getChildren()) {
changed |= markPartialAsAborted(child);
}
return changed;
}
}
}

View File

@@ -7,6 +7,7 @@ import sonia.scm.repository.Repository;
import sonia.scm.repository.api.MergeCommandResult;
import java.io.IOException;
import java.util.Collections;
class GitFastForwardIfPossible extends GitMergeStrategy {
@@ -22,7 +23,7 @@ class GitFastForwardIfPossible extends GitMergeStrategy {
MergeResult fastForwardResult = mergeWithFastForwardOnlyMode();
if (fastForwardResult.getMergeStatus().isSuccessful()) {
push();
return MergeCommandResult.success();
return createSuccessResult(fastForwardResult.getNewHead().name());
} else {
return fallbackMerge.run();
}

View File

@@ -3,10 +3,16 @@ package sonia.scm.repository.spi;
import org.eclipse.jgit.api.Git;
import org.eclipse.jgit.api.MergeCommand;
import org.eclipse.jgit.api.MergeResult;
import org.eclipse.jgit.revwalk.RevCommit;
import sonia.scm.NoChangesMadeException;
import sonia.scm.repository.Repository;
import sonia.scm.repository.api.MergeCommandResult;
import java.io.IOException;
import java.util.Collections;
import java.util.Optional;
import static sonia.scm.repository.spi.GitRevisionExtractor.extractRevisionFromRevCommit;
class GitMergeCommit extends GitMergeStrategy {
@@ -21,11 +27,12 @@ class GitMergeCommit extends GitMergeStrategy {
MergeResult result = doMergeInClone(mergeCommand);
if (result.getMergeStatus().isSuccessful()) {
doCommit();
RevCommit revCommit = doCommit().orElseThrow(() -> new NoChangesMadeException(getRepository()));
push();
return MergeCommandResult.success();
return createSuccessResult(extractRevisionFromRevCommit(revCommit));
} else {
return analyseFailure(result);
}
}
}

View File

@@ -6,6 +6,7 @@ import org.eclipse.jgit.api.MergeCommand;
import org.eclipse.jgit.api.MergeResult;
import org.eclipse.jgit.api.errors.GitAPIException;
import org.eclipse.jgit.lib.ObjectId;
import org.eclipse.jgit.revwalk.RevCommit;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import sonia.scm.repository.InternalRepositoryException;
@@ -14,6 +15,7 @@ import sonia.scm.repository.api.MergeCommandResult;
import java.io.IOException;
import java.text.MessageFormat;
import java.util.Optional;
abstract class GitMergeStrategy extends AbstractGitCommand.GitCloneWorker<MergeCommandResult> {
@@ -24,37 +26,57 @@ abstract class GitMergeStrategy extends AbstractGitCommand.GitCloneWorker<MergeC
"",
"Automatic merge by SCM-Manager.");
private final String target;
private final String toMerge;
private final String targetBranch;
private final ObjectId targetRevision;
private final String branchToMerge;
private final ObjectId revisionToMerge;
private final Person author;
private final String messageTemplate;
GitMergeStrategy(Git clone, MergeCommandRequest request, GitContext context, sonia.scm.repository.Repository repository) {
super(clone, context, repository);
this.target = request.getTargetBranch();
this.toMerge = request.getBranchToMerge();
this.targetBranch = request.getTargetBranch();
this.branchToMerge = request.getBranchToMerge();
this.author = request.getAuthor();
this.messageTemplate = request.getMessageTemplate();
try {
this.targetRevision = resolveRevision(request.getTargetBranch());
this.revisionToMerge = resolveRevision(request.getBranchToMerge());
} catch (IOException e) {
throw new InternalRepositoryException(repository, "Could not resolve revisions of target branch or branch to merge", e);
}
}
MergeResult doMergeInClone(MergeCommand mergeCommand) throws IOException {
MergeResult result;
try {
ObjectId sourceRevision = resolveRevision(toMerge);
ObjectId sourceRevision = resolveRevision(branchToMerge);
mergeCommand
.setCommit(false) // we want to set the author manually
.include(toMerge, sourceRevision);
.include(branchToMerge, sourceRevision);
result = mergeCommand.call();
} catch (GitAPIException e) {
throw new InternalRepositoryException(getContext().getRepository(), "could not merge branch " + toMerge + " into " + target, e);
throw new InternalRepositoryException(getContext().getRepository(), "could not merge branch " + branchToMerge + " into " + targetBranch, e);
}
return result;
}
void doCommit() {
logger.debug("merged branch {} into {}", toMerge, target);
doCommit(MessageFormat.format(determineMessageTemplate(), toMerge, target), author);
Optional<RevCommit> doCommit() {
logger.debug("merged branch {} into {}", branchToMerge, targetBranch);
return doCommit(MessageFormat.format(determineMessageTemplate(), branchToMerge, targetBranch), author);
}
MergeCommandResult createSuccessResult(String newRevision) {
return MergeCommandResult.success(targetRevision.name(), revisionToMerge.name(), newRevision);
}
ObjectId getTargetRevision() {
return targetRevision;
}
ObjectId getRevisionToMerge() {
return revisionToMerge;
}
private String determineMessageTemplate() {
@@ -66,7 +88,7 @@ abstract class GitMergeStrategy extends AbstractGitCommand.GitCloneWorker<MergeC
}
MergeCommandResult analyseFailure(MergeResult result) {
logger.info("could not merge branch {} into {} due to conflict in paths {}", toMerge, target, result.getConflicts().keySet());
return MergeCommandResult.failure(result.getConflicts().keySet());
logger.info("could not merge branch {} into {} due to conflict in paths {}", branchToMerge, targetBranch, result.getConflicts().keySet());
return MergeCommandResult.failure(targetRevision.name(), revisionToMerge.name(), result.getConflicts().keySet());
}
}

View File

@@ -1,13 +1,17 @@
package sonia.scm.repository.spi;
import org.eclipse.jgit.api.Git;
import org.eclipse.jgit.api.MergeCommand;
import org.eclipse.jgit.api.MergeResult;
import org.eclipse.jgit.revwalk.RevCommit;
import sonia.scm.NoChangesMadeException;
import sonia.scm.repository.Repository;
import sonia.scm.repository.api.MergeCommandResult;
import org.eclipse.jgit.api.MergeCommand;
import java.io.IOException;
import static sonia.scm.repository.spi.GitRevisionExtractor.extractRevisionFromRevCommit;
class GitMergeWithSquash extends GitMergeStrategy {
GitMergeWithSquash(Git clone, MergeCommandRequest request, GitContext context, Repository repository) {
@@ -21,9 +25,9 @@ class GitMergeWithSquash extends GitMergeStrategy {
MergeResult result = doMergeInClone(mergeCommand);
if (result.getMergeStatus().isSuccessful()) {
doCommit();
RevCommit revCommit = doCommit().orElseThrow(() -> new NoChangesMadeException(getRepository()));
push();
return MergeCommandResult.success();
return MergeCommandResult.success(getTargetRevision().name(), revCommit.name(), extractRevisionFromRevCommit(revCommit));
} else {
return analyseFailure(result);
}

View File

@@ -80,12 +80,13 @@ public class GitRepositoryServiceProvider extends RepositoryServiceProvider
//~--- constructors ---------------------------------------------------------
public GitRepositoryServiceProvider(GitRepositoryHandler handler, Repository repository, GitRepositoryConfigStoreProvider storeProvider, LfsBlobStoreFactory lfsBlobStoreFactory, HookContextFactory hookContextFactory, ScmEventBus eventBus) {
public GitRepositoryServiceProvider(GitRepositoryHandler handler, Repository repository, GitRepositoryConfigStoreProvider storeProvider, LfsBlobStoreFactory lfsBlobStoreFactory, HookContextFactory hookContextFactory, ScmEventBus eventBus, SyncAsyncExecutorProvider executorProvider) {
this.handler = handler;
this.repository = repository;
this.lfsBlobStoreFactory = lfsBlobStoreFactory;
this.hookContextFactory = hookContextFactory;
this.eventBus = eventBus;
this.executorProvider = executorProvider;
this.context = new GitContext(handler.getDirectory(repository.getId()), repository, storeProvider);
}
@@ -150,7 +151,7 @@ public class GitRepositoryServiceProvider extends RepositoryServiceProvider
@Override
public BrowseCommand getBrowseCommand()
{
return new GitBrowseCommand(context, repository, lfsBlobStoreFactory);
return new GitBrowseCommand(context, repository, lfsBlobStoreFactory, executorProvider.createExecutorWithDefaultTimeout());
}
/**
@@ -301,4 +302,6 @@ public class GitRepositoryServiceProvider extends RepositoryServiceProvider
private final HookContextFactory hookContextFactory;
private final ScmEventBus eventBus;
private final SyncAsyncExecutorProvider executorProvider;
}

View File

@@ -55,14 +55,16 @@ public class GitRepositoryServiceResolver implements RepositoryServiceResolver {
private final LfsBlobStoreFactory lfsBlobStoreFactory;
private final HookContextFactory hookContextFactory;
private final ScmEventBus eventBus;
private final SyncAsyncExecutorProvider executorProvider;
@Inject
public GitRepositoryServiceResolver(GitRepositoryHandler handler, GitRepositoryConfigStoreProvider storeProvider, LfsBlobStoreFactory lfsBlobStoreFactory, HookContextFactory hookContextFactory, ScmEventBus eventBus) {
public GitRepositoryServiceResolver(GitRepositoryHandler handler, GitRepositoryConfigStoreProvider storeProvider, LfsBlobStoreFactory lfsBlobStoreFactory, HookContextFactory hookContextFactory, ScmEventBus eventBus, SyncAsyncExecutorProvider executorProvider) {
this.handler = handler;
this.storeProvider = storeProvider;
this.lfsBlobStoreFactory = lfsBlobStoreFactory;
this.hookContextFactory = hookContextFactory;
this.eventBus = eventBus;
this.executorProvider = executorProvider;
}
@Override
@@ -70,7 +72,7 @@ public class GitRepositoryServiceResolver implements RepositoryServiceResolver {
GitRepositoryServiceProvider provider = null;
if (GitRepositoryHandler.TYPE_NAME.equalsIgnoreCase(repository.getType())) {
provider = new GitRepositoryServiceProvider(handler, repository, storeProvider, lfsBlobStoreFactory, hookContextFactory, eventBus);
provider = new GitRepositoryServiceProvider(handler, repository, storeProvider, lfsBlobStoreFactory, hookContextFactory, eventBus, executorProvider);
}
return provider;

View File

@@ -0,0 +1,12 @@
package sonia.scm.repository.spi;
import org.eclipse.jgit.revwalk.RevCommit;
import java.util.Optional;
public class GitRevisionExtractor {
static String extractRevisionFromRevCommit(RevCommit revCommit) {
return revCommit.toString().split(" ")[1];
}
}

View File

@@ -35,15 +35,21 @@ import org.junit.Test;
import sonia.scm.repository.BrowserResult;
import sonia.scm.repository.FileObject;
import sonia.scm.repository.GitRepositoryConfig;
import sonia.scm.repository.spi.SyncAsyncExecutors.AsyncExecutorStepper;
import java.io.IOException;
import java.util.Collection;
import java.util.LinkedList;
import java.util.List;
import static org.assertj.core.api.Assertions.assertThat;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertFalse;
import static org.junit.Assert.assertNotNull;
import static org.junit.Assert.assertNull;
import static org.junit.Assert.assertTrue;
import static sonia.scm.repository.spi.SyncAsyncExecutors.stepperAsynchronousExecutor;
import static sonia.scm.repository.spi.SyncAsyncExecutors.synchronousExecutor;
/**
* Unit tests for {@link GitBrowseCommand}.
@@ -102,15 +108,55 @@ public class GitBrowseCommandTest extends AbstractGitCommandTestBase {
assertFalse(a.isDirectory());
assertEquals("a.txt", a.getName());
assertEquals("a.txt", a.getPath());
assertEquals("added new line for blame", a.getDescription());
assertTrue(a.getLength() > 0);
checkDate(a.getLastModified());
assertEquals("added new line for blame", a.getDescription().get());
assertTrue(a.getLength().getAsLong() > 0);
checkDate(a.getCommitDate().getAsLong());
assertTrue(c.isDirectory());
assertEquals("c", c.getName());
assertEquals("c", c.getPath());
}
@Test
public void testAsynchronousBrowse() throws IOException {
try (AsyncExecutorStepper executor = stepperAsynchronousExecutor()) {
GitBrowseCommand command = new GitBrowseCommand(createContext(), repository, null, executor);
List<BrowserResult> updatedResults = new LinkedList<>();
BrowseCommandRequest request = new BrowseCommandRequest(updatedResults::add);
FileObject root = command.getBrowserResult(request).getFile();
assertNotNull(root);
Collection<FileObject> foList = root.getChildren();
FileObject a = findFile(foList, "a.txt");
FileObject b = findFile(foList, "b.txt");
assertTrue(a.isPartialResult());
assertFalse("expected empty name before commit could have been read", a.getDescription().isPresent());
assertFalse("expected empty date before commit could have been read", a.getCommitDate().isPresent());
assertTrue(b.isPartialResult());
assertFalse("expected empty name before commit could have been read", b.getDescription().isPresent());
assertFalse("expected empty date before commit could have been read", b.getCommitDate().isPresent());
executor.next();
assertEquals(1, updatedResults.size());
assertFalse(a.isPartialResult());
assertNotNull("expected correct name after commit could have been read", a.getDescription());
assertTrue("expected correct date after commit could have been read", a.getCommitDate().isPresent());
assertTrue(b.isPartialResult());
assertFalse("expected empty name before commit could have been read", b.getDescription().isPresent());
assertFalse("expected empty date before commit could have been read", b.getCommitDate().isPresent());
executor.next();
assertEquals(2, updatedResults.size());
assertFalse(b.isPartialResult());
assertNotNull("expected correct name after commit could have been read", b.getDescription());
assertTrue("expected correct date after commit could have been read", b.getCommitDate().isPresent());
}
}
@Test
public void testBrowseSubDirectory() throws IOException {
BrowseCommandRequest request = new BrowseCommandRequest();
@@ -129,20 +175,20 @@ public class GitBrowseCommandTest extends AbstractGitCommandTestBase {
assertFalse(d.isDirectory());
assertEquals("d.txt", d.getName());
assertEquals("c/d.txt", d.getPath());
assertEquals("added file d and e in folder c", d.getDescription());
assertTrue(d.getLength() > 0);
checkDate(d.getLastModified());
assertEquals("added file d and e in folder c", d.getDescription().get());
assertTrue(d.getLength().getAsLong() > 0);
checkDate(d.getCommitDate().getAsLong());
assertFalse(e.isDirectory());
assertEquals("e.txt", e.getName());
assertEquals("c/e.txt", e.getPath());
assertEquals("added file d and e in folder c", e.getDescription());
assertTrue(e.getLength() > 0);
checkDate(e.getLastModified());
assertEquals("added file d and e in folder c", e.getDescription().get());
assertTrue(e.getLength().getAsLong() > 0);
checkDate(e.getCommitDate().getAsLong());
}
@Test
public void testRecusive() throws IOException {
public void testRecursive() throws IOException {
BrowseCommandRequest request = new BrowseCommandRequest();
request.setRecursive(true);
@@ -171,6 +217,6 @@ public class GitBrowseCommandTest extends AbstractGitCommandTestBase {
}
private GitBrowseCommand createCommand() {
return new GitBrowseCommand(createContext(), repository, null);
return new GitBrowseCommand(createContext(), repository, null, synchronousExecutor());
}
}

View File

@@ -37,6 +37,20 @@ public class GitDiffCommandTest extends AbstractGitCommandTestBase {
"+++ b/f.txt\n" +
"@@ -0,0 +1 @@\n" +
"+f\n";
public static final String DIFF_FILE_PARTIAL_MERGE = "diff --git a/a.txt b/a.txt\n" +
"index 7898192..8cd63ec 100644\n" +
"--- a/a.txt\n" +
"+++ b/a.txt\n" +
"@@ -1 +1,2 @@\n" +
" a\n" +
"+change\n" +
"diff --git a/b.txt b/b.txt\n" +
"index 6178079..09ccdf0 100644\n" +
"--- a/b.txt\n" +
"+++ b/b.txt\n" +
"@@ -1 +1,2 @@\n" +
" b\n" +
"+change\n";
@Test
public void diffForOneRevisionShouldCreateDiff() throws IOException {
@@ -91,4 +105,15 @@ public class GitDiffCommandTest extends AbstractGitCommandTestBase {
gitDiffCommand.getDiffResult(diffCommandRequest).accept(output);
assertEquals(DIFF_FILE_A_MULTIPLE_REVISIONS, output.toString());
}
@Test
public void diffBetweenTwoBranchesWithMergedIntegrationBranchShouldCreateDiffOfAllIncomingChanges() throws IOException {
GitDiffCommand gitDiffCommand = new GitDiffCommand(createContext(), repository);
DiffCommandRequest diffCommandRequest = new DiffCommandRequest();
diffCommandRequest.setRevision("partially_merged");
diffCommandRequest.setAncestorChangeset("master");
ByteArrayOutputStream output = new ByteArrayOutputStream();
gitDiffCommand.getDiffResult(diffCommandRequest).accept(output);
assertEquals(DIFF_FILE_PARTIAL_MERGE, output.toString());
}
}

View File

@@ -12,6 +12,7 @@ import org.eclipse.jgit.lib.Repository;
import org.eclipse.jgit.revwalk.RevCommit;
import org.junit.Rule;
import org.junit.Test;
import sonia.scm.NoChangesMadeException;
import sonia.scm.NotFoundException;
import sonia.scm.repository.Person;
import sonia.scm.repository.api.MergeCommandResult;
@@ -70,6 +71,8 @@ public class GitMergeCommandTest extends AbstractGitCommandTestBase {
MergeCommandResult mergeCommandResult = command.merge(request);
assertThat(mergeCommandResult.isSuccess()).isTrue();
assertThat(mergeCommandResult.getRevisionToMerge()).isEqualTo("91b99de908fcd04772798a31c308a64aea1a5523");
assertThat(mergeCommandResult.getTargetRevision()).isEqualTo("fcd0ef1831e4002ac43ea539f4094334c79ea9ec");
Repository repository = createContext().open();
Iterable<RevCommit> commits = new Git(repository).log().add(repository.resolve("master")).setMaxCount(1).call();
@@ -106,7 +109,7 @@ public class GitMergeCommandTest extends AbstractGitCommandTestBase {
assertThat(mergeCommit.getParent(1).name()).isEqualTo("d81ad6c63d7e2162308d69637b339dedd1d9201c");
}
@Test
@Test(expected = NoChangesMadeException.class)
public void shouldNotMergeTwice() throws IOException, GitAPIException {
GitMergeCommand command = createCommand();
MergeCommandRequest request = new MergeCommandRequest();
@@ -120,15 +123,9 @@ public class GitMergeCommandTest extends AbstractGitCommandTestBase {
assertThat(mergeCommandResult.isSuccess()).isTrue();
Repository repository = createContext().open();
ObjectId firstMergeCommit = new Git(repository).log().add(repository.resolve("master")).setMaxCount(1).call().iterator().next().getId();
new Git(repository).log().add(repository.resolve("master")).setMaxCount(1).call().iterator().next().getId();
MergeCommandResult secondMergeCommandResult = command.merge(request);
assertThat(secondMergeCommandResult.isSuccess()).isTrue();
ObjectId secondMergeCommit = new Git(repository).log().add(repository.resolve("master")).setMaxCount(1).call().iterator().next().getId();
assertThat(secondMergeCommit).isEqualTo(firstMergeCommit);
command.merge(request);
}
@Test
@@ -234,6 +231,8 @@ public class GitMergeCommandTest extends AbstractGitCommandTestBase {
Repository repository = createContext().open();
assertThat(mergeCommandResult.isSuccess()).isTrue();
assertThat(mergeCommandResult.getRevisionToMerge()).isEqualTo(mergeCommandResult.getNewHeadRevision());
assertThat(mergeCommandResult.getTargetRevision()).isEqualTo("fcd0ef1831e4002ac43ea539f4094334c79ea9ec");
Iterable<RevCommit> commits = new Git(repository).log().add(repository.resolve("master")).setMaxCount(1).call();
RevCommit mergeCommit = commits.iterator().next();
@@ -284,6 +283,9 @@ public class GitMergeCommandTest extends AbstractGitCommandTestBase {
request.setAuthor(new Person("Dirk Gently", "dirk@holistic.det"));
MergeCommandResult mergeCommandResult = command.merge(request);
assertThat(mergeCommandResult.getNewHeadRevision()).isEqualTo("35597e9e98fe53167266583848bfef985c2adb27");
assertThat(mergeCommandResult.getRevisionToMerge()).isEqualTo("35597e9e98fe53167266583848bfef985c2adb27");
assertThat(mergeCommandResult.getTargetRevision()).isEqualTo("fcd0ef1831e4002ac43ea539f4094334c79ea9ec");
assertThat(mergeCommandResult.isSuccess()).isTrue();

View File

@@ -17,16 +17,18 @@ public class GitMergeCommand_Conflict_Test extends AbstractGitCommandTestBase {
static final String DIFF_HEADER = "diff --git a/Main.java b/Main.java";
static final String DIFF_FILE_CONFLICT = "--- a/Main.java\n" +
"+++ b/Main.java\n" +
"@@ -3,7 +3,11 @@\n" +
"@@ -1,6 +1,13 @@\n" +
"+import java.util.Arrays;\n" +
"+\n" +
" class Main {\n" +
" public static void main(String[] args) {\n" +
" System.out.println(\"Expect nothing more to happen.\");\n" +
"+<<<<<<< HEAD\n" +
" System.out.println(\"Parameters:\");\n" +
" Arrays.stream(args).map(arg -> \"- \" + arg).forEach(System.out::println);\n" +
" System.out.println(\"This is for demonstration, only.\");\n" +
"+=======\n" +
"+ System.out.println(\"This is for demonstration, only.\");\n" +
"+>>>>>>> integration\n" +
"+ System.out.println(\"Parameters:\");\n" +
"+ Arrays.stream(args).map(arg -> \"- \" + arg).forEach(System.out::println);\n" +
"+>>>>>>> feature/print_args\n" +
" }\n" +
" }";

View File

@@ -0,0 +1,21 @@
package sonia.scm.repository.spi;
import org.eclipse.jgit.revwalk.RevCommit;
import org.junit.jupiter.api.Test;
import java.util.Optional;
import static org.assertj.core.api.Assertions.assertThat;
import static org.mockito.Mockito.mock;
import static org.mockito.Mockito.when;
public class GitRevisionExtractorTest {
@Test
void shouldReturnRevisionFromRevCommit() {
RevCommit revCommit = mock(RevCommit.class);
when(revCommit.toString()).thenReturn("commit 123456abcdef -t 4561");
String revision = GitRevisionExtractor.extractRevisionFromRevCommit(revCommit);
assertThat(revision).isEqualTo("123456abcdef");
}
}

View File

@@ -231,13 +231,13 @@ public class HgFileviewCommand extends AbstractCommand
file.setName(getNameFromPath(path));
file.setPath(path);
file.setDirectory(false);
file.setLength(stream.decimalIntUpTo(' '));
file.setLength((long) stream.decimalIntUpTo(' '));
DateTime timestamp = stream.dateTimeUpTo(' ');
String description = stream.textUpTo('\0');
if (!disableLastCommit) {
file.setLastModified(timestamp.getDate().getTime());
file.setCommitDate(timestamp.getDate().getTime());
file.setDescription(description);
}

View File

@@ -61,7 +61,7 @@ public class HgBrowseCommandTest extends AbstractHgCommandTestBase {
FileObject file = new HgBrowseCommand(cmdContext, repository).getBrowserResult(request).getFile();
assertEquals("a.txt", file.getName());
assertFalse(file.isDirectory());
assertTrue(file.getChildren().isEmpty());
assertTrue(file.getChildren() == null || file.getChildren().isEmpty());
}
@Test
@@ -73,9 +73,9 @@ public class HgBrowseCommandTest extends AbstractHgCommandTestBase {
assertFalse(a.isDirectory());
assertEquals("a.txt", a.getName());
assertEquals("a.txt", a.getPath());
assertEquals("added new line for blame", a.getDescription());
assertTrue(a.getLength() > 0);
checkDate(a.getLastModified());
assertEquals("added new line for blame", a.getDescription().get());
assertTrue(a.getLength().getAsLong() > 0);
checkDate(a.getCommitDate().getAsLong());
assertTrue(c.isDirectory());
assertEquals("c", c.getName());
assertEquals("c", c.getPath());
@@ -132,16 +132,16 @@ public class HgBrowseCommandTest extends AbstractHgCommandTestBase {
assertFalse(d.isDirectory());
assertEquals("d.txt", d.getName());
assertEquals("c/d.txt", d.getPath());
assertEquals("added file d and e in folder c", d.getDescription());
assertTrue(d.getLength() > 0);
checkDate(d.getLastModified());
assertEquals("added file d and e in folder c", d.getDescription().get());
assertTrue(d.getLength().getAsLong() > 0);
checkDate(d.getCommitDate().getAsLong());
assertNotNull(e);
assertFalse(e.isDirectory());
assertEquals("e.txt", e.getName());
assertEquals("c/e.txt", e.getPath());
assertEquals("added file d and e in folder c", e.getDescription());
assertTrue(e.getLength() > 0);
checkDate(e.getLastModified());
assertEquals("added file d and e in folder c", e.getDescription().get());
assertTrue(e.getLength().getAsLong() > 0);
checkDate(e.getCommitDate().getAsLong());
}
@Test
@@ -154,8 +154,8 @@ public class HgBrowseCommandTest extends AbstractHgCommandTestBase {
FileObject a = getFileObject(foList, "a.txt");
assertNull(a.getDescription());
assertNull(a.getLastModified());
assertFalse(a.getDescription().isPresent());
assertFalse(a.getCommitDate().isPresent());
}
@Test

View File

@@ -173,7 +173,7 @@ public class SvnBrowseCommand extends AbstractSvnCommand
{
if (entry.getDate() != null)
{
fileObject.setLastModified(entry.getDate().getTime());
fileObject.setCommitDate(entry.getDate().getTime());
}
fileObject.setDescription(entry.getCommitMessage());

View File

@@ -60,7 +60,7 @@ public class SvnBrowseCommandTest extends AbstractSvnCommandTestBase
FileObject file = createCommand().getBrowserResult(request).getFile();
assertEquals("a.txt", file.getName());
assertFalse(file.isDirectory());
assertTrue(file.getChildren().isEmpty());
assertTrue(file.getChildren() == null || file.getChildren().isEmpty());
}
@Test
@@ -73,9 +73,9 @@ public class SvnBrowseCommandTest extends AbstractSvnCommandTestBase
assertFalse(a.isDirectory());
assertEquals("a.txt", a.getName());
assertEquals("a.txt", a.getPath());
assertEquals("added line for blame test", a.getDescription());
assertTrue(a.getLength() > 0);
checkDate(a.getLastModified());
assertEquals("added line for blame test", a.getDescription().get());
assertTrue(a.getLength().getAsLong() > 0);
checkDate(a.getCommitDate().getAsLong());
assertTrue(c.isDirectory());
assertEquals("c", c.getName());
assertEquals("c", c.getPath());
@@ -122,16 +122,16 @@ public class SvnBrowseCommandTest extends AbstractSvnCommandTestBase
assertFalse(d.isDirectory());
assertEquals("d.txt", d.getName());
assertEquals("c/d.txt", d.getPath());
assertEquals("added d and e in folder c", d.getDescription());
assertTrue(d.getLength() > 0);
checkDate(d.getLastModified());
assertEquals("added d and e in folder c", d.getDescription().get());
assertTrue(d.getLength().getAsLong() > 0);
checkDate(d.getCommitDate().getAsLong());
assertNotNull(e);
assertFalse(e.isDirectory());
assertEquals("e.txt", e.getName());
assertEquals("c/e.txt", e.getPath());
assertEquals("added d and e in folder c", e.getDescription());
assertTrue(e.getLength() > 0);
checkDate(e.getLastModified());
assertEquals("added d and e in folder c", e.getDescription().get());
assertTrue(e.getLength().getAsLong() > 0);
checkDate(e.getCommitDate().getAsLong());
}
@Test
@@ -144,8 +144,8 @@ public class SvnBrowseCommandTest extends AbstractSvnCommandTestBase
FileObject a = getFileObject(foList, "a.txt");
assertNull(a.getDescription());
assertNull(a.getLastModified());
assertFalse(a.getDescription().isPresent());
assertFalse(a.getCommitDate().isPresent());
}
@Test

View File

@@ -0,0 +1,107 @@
package sonia.scm.repository.spi;
import java.io.Closeable;
import java.util.concurrent.Executor;
import java.util.concurrent.Executors;
import java.util.concurrent.Semaphore;
import java.util.function.Consumer;
import static sonia.scm.repository.spi.SyncAsyncExecutor.ExecutionType.ASYNCHRONOUS;
import static sonia.scm.repository.spi.SyncAsyncExecutor.ExecutionType.SYNCHRONOUS;
public final class SyncAsyncExecutors {
public static SyncAsyncExecutor synchronousExecutor() {
return new SyncAsyncExecutor() {
@Override
public ExecutionType execute(Consumer<ExecutionType> runnable, Runnable abortionFallback) {
runnable.accept(SYNCHRONOUS);
return SYNCHRONOUS;
}
@Override
public boolean hasExecutedAllSynchronously() {
return true;
}
};
}
public static SyncAsyncExecutor asynchronousExecutor() {
Executor executor = Executors.newSingleThreadExecutor();
return new SyncAsyncExecutor() {
@Override
public ExecutionType execute(Consumer<ExecutionType> runnable, Runnable abortionFallback) {
executor.execute(() -> runnable.accept(ASYNCHRONOUS));
return ASYNCHRONOUS;
}
@Override
public boolean hasExecutedAllSynchronously() {
return true;
}
};
}
public static AsyncExecutorStepper stepperAsynchronousExecutor() {
return new AsyncExecutorStepper() {
Executor executor = Executors.newSingleThreadExecutor();
Semaphore enterSemaphore = new Semaphore(0);
Semaphore exitSemaphore = new Semaphore(0);
boolean timedOut = false;
@Override
public void close() {
enterSemaphore.release(Integer.MAX_VALUE/2);
exitSemaphore.release(Integer.MAX_VALUE/2);
}
@Override
public ExecutionType execute(Consumer<ExecutionType> runnable, Runnable abortionFallback) {
executor.execute(() -> {
try {
enterSemaphore.acquire();
} catch (InterruptedException e) {
Thread.currentThread().interrupt();
}
if (timedOut) {
abortionFallback.run();
} else {
runnable.accept(ASYNCHRONOUS);
exitSemaphore.release();
}
});
return ASYNCHRONOUS;
}
@Override
public void next() {
enterSemaphore.release();
try {
exitSemaphore.acquire();
} catch (InterruptedException e) {
Thread.currentThread().interrupt();
}
}
@Override
public void timeout() {
timedOut = true;
close();
}
@Override
public boolean hasExecutedAllSynchronously() {
return true;
}
};
}
public interface AsyncExecutorStepper extends SyncAsyncExecutor, Closeable {
void next();
void timeout();
}
}

View File

@@ -8,8 +8,8 @@
"private": false,
"prettier": "@scm-manager/prettier-config",
"dependencies": {
"@typescript-eslint/eslint-plugin": "^2.4.0",
"@typescript-eslint/parser": "^2.4.0",
"@typescript-eslint/eslint-plugin": "^2.12.0",
"@typescript-eslint/parser": "^2.12.0",
"babel-eslint": "^10.0.3",
"eslint": "^6.5.1",
"eslint-config-prettier": "^6.4.0",

View File

@@ -37,8 +37,6 @@
"enzyme-context": "^1.1.2",
"enzyme-context-react-router-4": "^2.0.0",
"fetch-mock": "^7.5.1",
"flow-bin": "^0.109.0",
"flow-typed": "^2.5.1",
"raf": "^3.4.0",
"react-test-renderer": "^16.10.2",
"storybook-addon-i18next": "^1.2.1",

View File

@@ -336,7 +336,7 @@ exports[`Storyshots DateFromNow Default 1`] = `
exports[`Storyshots Forms|Checkbox Default 1`] = `
<div
className="sc-caSCKo brLbbv"
className="sc-gisBJw jHakbY"
>
<div
className="field"
@@ -381,7 +381,7 @@ exports[`Storyshots Forms|Checkbox Default 1`] = `
exports[`Storyshots Forms|Checkbox Disabled 1`] = `
<div
className="sc-caSCKo brLbbv"
className="sc-gisBJw jHakbY"
>
<div
className="field"
@@ -409,10 +409,10 @@ exports[`Storyshots Forms|Checkbox Disabled 1`] = `
exports[`Storyshots Forms|Radio Default 1`] = `
<div
className="sc-gisBJw jHakbY"
className="sc-kjoXOD hVPZau"
>
<label
className="radio"
className="sc-cMljjf kOqpHe radio"
>
<input
checked={false}
@@ -423,7 +423,7 @@ exports[`Storyshots Forms|Radio Default 1`] = `
Not checked
</label>
<label
className="radio"
className="sc-cMljjf kOqpHe radio"
>
<input
checked={true}
@@ -438,10 +438,10 @@ exports[`Storyshots Forms|Radio Default 1`] = `
exports[`Storyshots Forms|Radio Disabled 1`] = `
<div
className="sc-gisBJw jHakbY"
className="sc-kjoXOD hVPZau"
>
<label
className="radio"
className="sc-cMljjf kOqpHe radio"
disabled={true}
>
<input
@@ -458,7 +458,7 @@ exports[`Storyshots Forms|Radio Disabled 1`] = `
exports[`Storyshots Forms|Textarea OnCancel 1`] = `
<div
className="sc-kjoXOD hVPZau"
className="sc-cHGsZl klfJMr"
>
<div
className="field"
@@ -481,7 +481,7 @@ exports[`Storyshots Forms|Textarea OnCancel 1`] = `
exports[`Storyshots Forms|Textarea OnChange 1`] = `
<div
className="sc-kjoXOD hVPZau"
className="sc-cHGsZl klfJMr"
>
<div
className="field"
@@ -508,7 +508,7 @@ exports[`Storyshots Forms|Textarea OnChange 1`] = `
exports[`Storyshots Forms|Textarea OnSubmit 1`] = `
<div
className="sc-kjoXOD hVPZau"
className="sc-cHGsZl klfJMr"
>
<div
className="field"
@@ -2391,7 +2391,7 @@ PORT_NUMBER =
exports[`Storyshots Table|Table Default 1`] = `
<table
className="sc-jhAzac hmXDXQ table content is-hoverable"
className="sc-fBuWsC eeihxG table content is-hoverable"
>
<thead>
<tr>
@@ -2409,7 +2409,7 @@ exports[`Storyshots Table|Table Default 1`] = `
>
Last Name
<i
className="fas fa-sort-amount-down has-text-grey-light sc-hzDkRC escBde"
className="fas fa-sort-amount-down has-text-grey-light sc-jhAzac gDbcZp"
/>
</th>
<th
@@ -2482,7 +2482,7 @@ exports[`Storyshots Table|Table Empty 1`] = `
exports[`Storyshots Table|Table TextColumn 1`] = `
<table
className="sc-jhAzac hmXDXQ table content is-hoverable"
className="sc-fBuWsC eeihxG table content is-hoverable"
>
<thead>
<tr>
@@ -2494,7 +2494,7 @@ exports[`Storyshots Table|Table TextColumn 1`] = `
>
Id
<i
className="fas fa-sort-alpha-down has-text-grey-light sc-hzDkRC escBde"
className="fas fa-sort-alpha-down has-text-grey-light sc-jhAzac gDbcZp"
/>
</th>
<th
@@ -2505,7 +2505,7 @@ exports[`Storyshots Table|Table TextColumn 1`] = `
>
Name
<i
className="fas fa-sort-alpha-down has-text-grey-light sc-hzDkRC escBde"
className="fas fa-sort-alpha-down has-text-grey-light sc-jhAzac gDbcZp"
/>
</th>
<th
@@ -2516,7 +2516,7 @@ exports[`Storyshots Table|Table TextColumn 1`] = `
>
Description
<i
className="fas fa-sort-alpha-down has-text-grey-light sc-hzDkRC escBde"
className="fas fa-sort-alpha-down has-text-grey-light sc-jhAzac gDbcZp"
/>
</th>
</tr>

View File

@@ -1,5 +1,11 @@
import React, { ChangeEvent } from "react";
import { Help } from "../index";
import styled from "styled-components";
const StyledRadio = styled.label`
margin-right: 0.5em;
`;
type Props = {
label?: string;
@@ -33,7 +39,7 @@ class Radio extends React.Component<Props> {
because jsx label does not the custom disabled attribute
but bulma does.
// @ts-ignore */}
<label className="radio" disabled={this.props.disabled}>
<StyledRadio className="radio" disabled={this.props.disabled}>
<input
type="radio"
name={this.props.name}
@@ -44,7 +50,7 @@ class Radio extends React.Component<Props> {
/>{" "}
{this.props.label}
{this.renderHelp()}
</label>
</StyledRadio>
</>
);
}

View File

@@ -7,8 +7,11 @@ import parser from "gitdiff-parser";
import Loading from "../Loading";
import Diff from "./Diff";
import { DiffObjectProps, File } from "./DiffTypes";
import { NotFoundError } from "../errors";
import { Notification } from "../index";
import {withTranslation, WithTranslation} from "react-i18next";
type Props = DiffObjectProps & {
type Props = WithTranslation & DiffObjectProps & {
url: string;
defaultCollapse?: boolean;
};
@@ -43,7 +46,7 @@ class LoadingDiff extends React.Component<Props, State> {
fetchDiff = () => {
const { url } = this.props;
this.setState({loading: true});
this.setState({ loading: true });
apiClient
.get(url)
.then(response => response.text())
@@ -66,6 +69,9 @@ class LoadingDiff extends React.Component<Props, State> {
render() {
const { diff, loading, error } = this.state;
if (error) {
if (error instanceof NotFoundError) {
return <Notification type="info">{this.props.t("changesets.noChangesets")}</Notification>;
}
return <ErrorNotification error={error} />;
} else if (loading) {
return <Loading />;
@@ -77,4 +83,4 @@ class LoadingDiff extends React.Component<Props, State> {
}
}
export default LoadingDiff;
export default withTranslation("repos")(LoadingDiff);

View File

@@ -28,7 +28,7 @@ class ChangesetDiff extends React.Component<Props> {
return <Notification type="danger">{t("changeset.diffNotSupported")}</Notification>;
} else {
const url = this.createUrl(changeset);
return <LoadingDiff url={url} defaultCollapse={defaultCollapse} />;
return <LoadingDiff url={url} defaultCollapse={defaultCollapse} sideBySide={false}/>;
}
}
}

View File

@@ -0,0 +1,42 @@
#!/usr/bin/env node
/* eslint-disable no-console */
const { spawnSync } = require("child_process");
const commands = ["postinstall"];
const args = process.argv.slice(2);
const commandIndex = args.findIndex(arg => {
return commands.includes(arg);
});
const command = commandIndex === -1 ? args[0] : args[commandIndex];
const nodeArgs = commandIndex > 0 ? args.slice(0, commandIndex) : [];
if (commands.includes(command)) {
const result = spawnSync(
"node",
nodeArgs.concat(require.resolve("../src/commands/" + command)).concat(args.slice(commandIndex + 1)),
{ stdio: "inherit" }
);
if (result.signal) {
if (result.signal === "SIGKILL") {
console.log(
"The build failed because the process exited too early. " +
"This probably means the system ran out of memory or someone called " +
"`kill -9` on the process."
);
} else if (result.signal === "SIGTERM") {
console.log(
"The build failed because the process exited too early. " +
"Someone might have called `kill` or `killall`, or the system could " +
"be shutting down."
);
}
process.exit(1);
}
process.exit(result.status);
} else {
console.log(`Unknown script "${command}".`);
console.log("Perhaps you need to update ui-plugins?");
}

View File

@@ -2,14 +2,27 @@
"name": "@scm-manager/ui-plugins",
"version": "2.0.0-SNAPSHOT",
"license": "BSD-3-Clause",
"bin": {
"ui-plugins": "./bin/ui-plugins.js"
},
"dependencies": {
"@scm-manager/ui-components": "^2.0.0-SNAPSHOT",
"@scm-manager/ui-extensions": "^2.0.0-SNAPSHOT",
"classnames": "^2.2.6",
"query-string": "^5.0.1",
"react": "^16.10.2",
"react-i18next": "^10.13.1",
"react-redux": "^5.0.7",
"react-router-dom": "^5.1.2",
"redux": "^4.0.0",
"styled-components": "^4.4.0"
},
"devDependencies": {
"@scm-manager/babel-preset": "^2.0.0-SNAPSHOT",
"@scm-manager/eslint-config": "^2.0.0-SNAPSHOT",
"@scm-manager/jest-preset": "^2.0.0-SNAPSHOT",
"@scm-manager/prettier-config": "^2.0.0-SNAPSHOT",
"@scm-manager/tsconfig": "^2.0.0-SNAPSHOT",
"@scm-manager/ui-components": "^2.0.0-SNAPSHOT",
"@scm-manager/ui-extensions": "^2.0.0-SNAPSHOT",
"@scm-manager/ui-scripts": "^2.0.0-SNAPSHOT",
"@scm-manager/ui-tests": "^2.0.0-SNAPSHOT",
"@scm-manager/ui-types": "^2.0.0-SNAPSHOT",
@@ -23,15 +36,7 @@
"@types/react-redux": "5.0.7",
"@types/react-router-dom": "^5.1.0",
"@types/styled-components": "^4.1.19",
"classnames": "^2.2.6",
"jest": "^24.9.0",
"query-string": "^5.0.1",
"react": "^16.10.2",
"react-i18next": "^10.13.1",
"react-redux": "^5.0.7",
"react-router-dom": "^5.1.2",
"redux": "^4.0.0",
"styled-components": "^4.4.0"
"jest": "^24.9.0"
},
"publishConfig": {
"access": "public"

View File

@@ -0,0 +1,49 @@
/* eslint-disable no-console */
const path = require("path");
const fs = require("fs");
const { spawnSync } = require("child_process");
const packageJsonPath = path.join(process.cwd(), "package.json");
const packageJSON = JSON.parse(fs.readFileSync(packageJsonPath, "UTF-8"));
const reference = require("../../package.json");
const sync = (left, right, key) => {
if (!right[key]) {
right[key] = {};
}
let changed = false;
const keys = Object.keys(left[key]);
keys.forEach(name => {
if (right[key][name] !== left[key][name]) {
console.log(name, "has changed from", right[key][name], "to", left[key][name]);
right[key][name] = left[key][name];
changed = true;
}
});
return changed;
};
const update = () => {
let dep = sync(reference, packageJSON, "dependencies");
let devDep = sync(reference, packageJSON, "devDependencies");
return dep || devDep;
};
if (update()) {
console.log("dependencies changed, install new dependencies");
fs.writeFileSync(packageJsonPath, JSON.stringify(packageJSON, null, " "), { encoding: "UTF-8" });
const result = spawnSync("yarn", ["install"], { stdio: "inherit" });
if (result.error) {
console.log("could not start yarn command:", result.error);
process.exit(2);
} else if (result.status !== 0) {
console.log("yarn process ends with status code:", result.status);
process.exit(3);
}
}

View File

@@ -1,4 +1,5 @@
#!/usr/bin/env node
/* eslint-disable no-console */
const { spawnSync } = require("child_process");
const commands = ["plugin", "plugin-watch", "publish", "version"];
@@ -15,9 +16,7 @@ const nodeArgs = commandIndex > 0 ? args.slice(0, commandIndex) : [];
if (commands.includes(command)) {
const result = spawnSync(
"node",
nodeArgs
.concat(require.resolve("../src/commands/" + command))
.concat(args.slice(commandIndex + 1)),
nodeArgs.concat(require.resolve("../src/commands/" + command)).concat(args.slice(commandIndex + 1)),
{ stdio: "inherit" }
);
if (result.signal) {
@@ -38,9 +37,6 @@ if (commands.includes(command)) {
}
process.exit(result.status);
} else {
console.log("Unknown script \"" + command + "\".");
console.log("Perhaps you need to update react-scripts?");
console.log(
"See: https://facebook.github.io/create-react-app/docs/updating-to-new-releases"
);
console.log(`Unknown script "${command}".`);
console.log("Perhaps you need to update ui-scripts?");
}

View File

@@ -813,6 +813,12 @@ form .field:not(.is-grouped) {
}
}
// radio
//overwrite bulma's default margin
.radio + .radio {
margin-left: 0;
}
// cursor
.has-cursor-pointer {
cursor: pointer;

View File

@@ -13,9 +13,11 @@ export type File = {
directory: boolean;
description?: string;
revision: string;
length: number;
lastModified?: string;
length?: number;
commitDate?: string;
subRepository?: SubRepository; // TODO
partialResult: boolean;
computationAborted: boolean;
_links: Links;
_embedded: {
children: File[] | null | undefined;

View File

@@ -42,8 +42,6 @@
"@types/styled-components": "^4.1.19",
"@types/systemjs": "^0.20.6",
"fetch-mock": "^7.5.1",
"flow-bin": "^0.109.0",
"flow-typed": "^2.6.1",
"react-test-renderer": "^16.10.2",
"redux-mock-store": "^1.5.3"
},

View File

@@ -72,7 +72,7 @@
"changesets": {
"errorTitle": "Fehler",
"errorSubtitle": "Changesets konnten nicht abgerufen werden",
"noChangesets": "Keine Changesets in diesem Branch gefunden.",
"noChangesets": "Keine Changesets in diesem Branch gefunden. Die Commits könnten gelöscht worden sein.",
"branchSelectorLabel": "Branches",
"collapseDiffs": "Auf-/Zuklappen"
},
@@ -99,9 +99,11 @@
"file-tree": {
"name": "Name",
"length": "Größe",
"lastModified": "Zuletzt bearbeitet",
"commitDate": "Commitdatum",
"description": "Beschreibung",
"branch": "Branch"
"branch": "Branch",
"notYetComputed": "Noch nicht berechnet; Der Wert wird in Kürze aktualisiert",
"computationAborted": "Die Berechnung dauert zu lange und wurde abgebrochen"
},
"content": {
"historyButton": "History",
@@ -109,7 +111,7 @@
"downloadButton": "Download",
"path": "Pfad",
"branch": "Branch",
"lastModified": "Zuletzt bearbeitet",
"commitDate": "Commitdatum",
"description": "Beschreibung",
"size": "Größe"
},

View File

@@ -72,7 +72,7 @@
"changesets": {
"errorTitle": "Error",
"errorSubtitle": "Could not fetch changesets",
"noChangesets": "No changesets found for this branch.",
"noChangesets": "No changesets found for this branch. The commits could have been removed.",
"branchSelectorLabel": "Branches",
"collapseDiffs": "Collapse"
},
@@ -99,9 +99,11 @@
"file-tree": {
"name": "Name",
"length": "Length",
"lastModified": "Last modified",
"commitDate": "Commit date",
"description": "Description",
"branch": "Branch"
"branch": "Branch",
"notYetComputed": "Not yet computed, will be updated in a short while",
"computationAborted": "The computation took too long and was aborted"
},
"content": {
"historyButton": "History",
@@ -109,7 +111,7 @@
"downloadButton": "Download",
"path": "Path",
"branch": "Branch",
"lastModified": "Last modified",
"commitDate": "Commit date",
"description": "Description",
"size": "Size"
},

View File

@@ -72,7 +72,7 @@
"changesets": {
"errorTitle": "Error",
"errorSubtitle": "No se han podido recuperar los changesets",
"noChangesets": "No se han encontrado changesets para esta rama branch.",
"noChangesets": "No se han encontrado changesets para esta rama branch. Los commits podrían haber sido eliminados.",
"branchSelectorLabel": "Ramas",
"collapseDiffs": "Colapso"
},
@@ -99,9 +99,11 @@
"file-tree": {
"name": "Nombre",
"length": "Longitud",
"lastModified": "Última modificación",
"commitDate": "Fecha de cometer",
"description": "Descripción",
"branch": "Rama"
"branch": "Rama",
"notYetComputed": "Aún no calculado, se actualizará en poco tiempo",
"computationAborted": "El cálculo tomó demasiado tiempo y fue abortado"
},
"content": {
"historyButton": "Historia",
@@ -109,7 +111,7 @@
"downloadButton": "Descargar",
"path": "Ruta",
"branch": "Rama",
"lastModified": "Última modificación",
"commitDate": "Fecha de cometer",
"description": "Discripción",
"size": "tamaño"
},

View File

@@ -7,7 +7,7 @@ import styled from "styled-components";
import { binder } from "@scm-manager/ui-extensions";
import { Repository, File } from "@scm-manager/ui-types";
import { ErrorNotification, Loading, Notification } from "@scm-manager/ui-components";
import { getFetchSourcesFailure, isFetchSourcesPending, getSources } from "../modules/sources";
import { getFetchSourcesFailure, isFetchSourcesPending, getSources, fetchSources } from "../modules/sources";
import FileTreeLeaf from "./FileTreeLeaf";
type Props = WithTranslation & {
@@ -19,10 +19,16 @@ type Props = WithTranslation & {
path: string;
baseUrl: string;
updateSources: () => void;
// context props
match: any;
};
type State = {
stoppableUpdateHandler?: number;
};
const FixedWidthTh = styled.th`
width: 16px;
`;
@@ -39,7 +45,28 @@ export function findParent(path: string) {
return "";
}
class FileTree extends React.Component<Props> {
class FileTree extends React.Component<Props, State> {
constructor(props: Props) {
super(props);
this.state = {};
}
componentDidUpdate(prevProps: Readonly<Props>, prevState: Readonly<State>): void {
if (prevState.stoppableUpdateHandler === this.state.stoppableUpdateHandler) {
const { tree, updateSources } = this.props;
if (tree?._embedded?.children && tree._embedded.children.find(c => c.partialResult)) {
const stoppableUpdateHandler = setTimeout(updateSources, 3000);
this.setState({ stoppableUpdateHandler: stoppableUpdateHandler });
}
}
}
componentWillUnmount(): void {
if (this.state.stoppableUpdateHandler) {
clearTimeout(this.state.stoppableUpdateHandler);
}
}
render() {
const { error, loading, tree } = this.props;
@@ -106,7 +133,7 @@ class FileTree extends React.Component<Props> {
<FixedWidthTh />
<th>{t("sources.file-tree.name")}</th>
<th className="is-hidden-mobile">{t("sources.file-tree.length")}</th>
<th className="is-hidden-mobile">{t("sources.file-tree.lastModified")}</th>
<th className="is-hidden-mobile">{t("sources.file-tree.commitDate")}</th>
<th className="is-hidden-touch">{t("sources.file-tree.description")}</th>
{binder.hasExtension("repos.sources.tree.row.right") && <th className="is-hidden-mobile" />}
</tr>
@@ -123,6 +150,14 @@ class FileTree extends React.Component<Props> {
}
}
const mapDispatchToProps = (dispatch: any, ownProps: Props) => {
const { repository, revision, path } = ownProps;
const updateSources = () => dispatch(fetchSources(repository, revision, path, false));
return { updateSources };
};
const mapStateToProps = (state: any, ownProps: Props) => {
const { repository, revision, path } = ownProps;
@@ -141,5 +176,8 @@ const mapStateToProps = (state: any, ownProps: Props) => {
export default compose(
withRouter,
connect(mapStateToProps)
connect(
mapStateToProps,
mapDispatchToProps
)
)(withTranslation("repos")(FileTree));

View File

@@ -4,10 +4,12 @@ import classNames from "classnames";
import styled from "styled-components";
import { binder, ExtensionPoint } from "@scm-manager/ui-extensions";
import { File } from "@scm-manager/ui-types";
import { DateFromNow, FileSize } from "@scm-manager/ui-components";
import { DateFromNow, FileSize, Tooltip } from "@scm-manager/ui-components";
import FileIcon from "./FileIcon";
import { Icon } from "@scm-manager/ui-components/src";
import { WithTranslation, withTranslation } from "react-i18next";
type Props = {
type Props = WithTranslation & {
file: File;
baseUrl: string;
};
@@ -35,7 +37,7 @@ export function createLink(base: string, file: File) {
return link;
}
export default class FileTreeLeaf extends React.Component<Props> {
class FileTreeLeaf extends React.Component<Props> {
createLink = (file: File) => {
return createLink(this.props.baseUrl, file);
};
@@ -62,20 +64,42 @@ export default class FileTreeLeaf extends React.Component<Props> {
return <Link to={this.createLink(file)}>{file.name}</Link>;
};
contentIfPresent = (file: File, attribute: string, content: (file: File) => any) => {
const { t } = this.props;
if (file.hasOwnProperty(attribute)) {
return content(file);
} else if (file.computationAborted) {
return (
<Tooltip location="top" message={t("sources.file-tree.computationAborted")}>
<Icon name={"question-circle"} />
</Tooltip>
);
} else if (file.partialResult) {
return (
<Tooltip location="top" message={t("sources.file-tree.notYetComputed")}>
<Icon name={"hourglass"} />
</Tooltip>
);
} else {
return content(file);
}
};
render() {
const { file } = this.props;
const fileSize = file.directory ? "" : <FileSize bytes={file.length} />;
const renderFileSize = (file: File) => <FileSize bytes={file.length} />;
const renderCommitDate = (file: File) => <DateFromNow date={file.commitDate} />;
return (
<tr>
<td>{this.createFileIcon(file)}</td>
<MinWidthTd className="is-word-break">{this.createFileName(file)}</MinWidthTd>
<NoWrapTd className="is-hidden-mobile">{fileSize}</NoWrapTd>
<td className="is-hidden-mobile">
<DateFromNow date={file.lastModified} />
</td>
<MinWidthTd className={classNames("is-word-break", "is-hidden-touch")}>{file.description}</MinWidthTd>
<NoWrapTd className="is-hidden-mobile">{file.directory ? "" : this.contentIfPresent(file, "length", renderFileSize)}</NoWrapTd>
<td className="is-hidden-mobile">{this.contentIfPresent(file, "commitDate", renderCommitDate)}</td>
<MinWidthTd className={classNames("is-word-break", "is-hidden-touch")}>
{this.contentIfPresent(file, "description", file => file.description)}
</MinWidthTd>
{binder.hasExtension("repos.sources.tree.row.right") && (
<td className="is-hidden-mobile">
{!file.directory && (
@@ -93,3 +117,5 @@ export default class FileTreeLeaf extends React.Component<Props> {
);
}
}
export default withTranslation("repos")(FileTreeLeaf);

View File

@@ -115,7 +115,7 @@ class Content extends React.Component<Props, State> {
showMoreInformation() {
const collapsed = this.state.collapsed;
const { file, revision, t, repository } = this.props;
const date = <DateFromNow date={file.lastModified} />;
const date = <DateFromNow date={file.commitDate} />;
const description = file.description ? (
<p>
{file.description.split("\n").map((item, key) => {
@@ -147,7 +147,7 @@ class Content extends React.Component<Props, State> {
<td>{fileSize}</td>
</tr>
<tr>
<td>{t("sources.content.lastModified")}</td>
<td>{t("sources.content.commitDate")}</td>
<td>{date}</td>
</tr>
<tr>

View File

@@ -49,10 +49,8 @@ const collection = {
name: "src",
path: "src",
directory: true,
description: "",
length: 176,
revision: "76aae4bb4ceacf0e88938eb5b6832738b7d537b4",
lastModified: "",
subRepository: undefined,
_links: {
self: {
@@ -71,7 +69,7 @@ const collection = {
description: "bump version",
length: 780,
revision: "76aae4bb4ceacf0e88938eb5b6832738b7d537b4",
lastModified: "2017-07-31T11:17:19Z",
commitDate: "2017-07-31T11:17:19Z",
subRepository: undefined,
_links: {
self: {
@@ -127,7 +125,7 @@ describe("sources fetch", () => {
{
type: FETCH_SOURCES_SUCCESS,
itemId: "scm/core/_/",
payload: collection
payload: { updatePending: false, sources: collection }
}
];
@@ -148,7 +146,7 @@ describe("sources fetch", () => {
{
type: FETCH_SOURCES_SUCCESS,
itemId: "scm/core/abc/src",
payload: collection
payload: { updatePending: false, sources: collection }
}
];
@@ -182,14 +180,14 @@ describe("reducer tests", () => {
it("should store the collection, without revision and path", () => {
const expectedState = {
"scm/core/_/": collection
"scm/core/_/": { updatePending: false, sources: collection }
};
expect(reducer({}, fetchSourcesSuccess(repository, "", "", collection))).toEqual(expectedState);
});
it("should store the collection, with revision and path", () => {
const expectedState = {
"scm/core/abc/src/main": collection
"scm/core/abc/src/main": { updatePending: false, sources: collection }
};
expect(reducer({}, fetchSourcesSuccess(repository, "abc", "src/main", collection))).toEqual(expectedState);
});
@@ -200,7 +198,7 @@ describe("selector tests", () => {
const state = {
sources: {
"scm/core/abc/src/main/package.json": {
noDirectory
sources: {noDirectory}
}
}
};
@@ -223,7 +221,9 @@ describe("selector tests", () => {
it("should return the source collection without revision and path", () => {
const state = {
sources: {
"scm/core/_/": collection
"scm/core/_/": {
sources: collection
}
}
};
expect(getSources(state, repository, "", "")).toBe(collection);
@@ -232,7 +232,9 @@ describe("selector tests", () => {
it("should return the source collection with revision and path", () => {
const state = {
sources: {
"scm/core/abc/src/main": collection
"scm/core/abc/src/main": {
sources: collection
}
}
};
expect(getSources(state, repository, "abc", "src/main")).toBe(collection);

View File

@@ -6,16 +6,29 @@ import { getFailure } from "../../../modules/failure";
export const FETCH_SOURCES = "scm/repos/FETCH_SOURCES";
export const FETCH_SOURCES_PENDING = `${FETCH_SOURCES}_${types.PENDING_SUFFIX}`;
export const FETCH_UPDATES_PENDING = `${FETCH_SOURCES}_UPDATE_PENDING`;
export const FETCH_SOURCES_SUCCESS = `${FETCH_SOURCES}_${types.SUCCESS_SUFFIX}`;
export const FETCH_SOURCES_FAILURE = `${FETCH_SOURCES}_${types.FAILURE_SUFFIX}`;
export function fetchSources(repository: Repository, revision: string, path: string) {
return function(dispatch: any) {
export function fetchSources(repository: Repository, revision: string, path: string, initialLoad = true) {
return function(dispatch: any, getState: () => any) {
const state = getState();
if (
isFetchSourcesPending(state, repository, revision, path) ||
isUpdateSourcePending(state, repository, revision, path)
) {
return;
}
if (initialLoad) {
dispatch(fetchSourcesPending(repository, revision, path));
} else {
dispatch(updateSourcesPending(repository, revision, path, getSources(state, repository, revision, path)));
}
return apiClient
.get(createUrl(repository, revision, path))
.then(response => response.json())
.then(sources => {
.then((sources: File) => {
dispatch(fetchSourcesSuccess(repository, revision, path, sources));
})
.catch(err => {
@@ -42,10 +55,23 @@ export function fetchSourcesPending(repository: Repository, revision: string, pa
};
}
export function updateSourcesPending(
repository: Repository,
revision: string,
path: string,
currentSources: any
): Action {
return {
type: FETCH_UPDATES_PENDING,
payload: { updatePending: true, sources: currentSources },
itemId: createItemId(repository, revision, path)
};
}
export function fetchSourcesSuccess(repository: Repository, revision: string, path: string, sources: File) {
return {
type: FETCH_SOURCES_SUCCESS,
payload: sources,
payload: { updatePending: false, sources },
itemId: createItemId(repository, revision, path)
};
}
@@ -72,7 +98,7 @@ export default function reducer(
type: "UNKNOWN"
}
): any {
if (action.itemId && action.type === FETCH_SOURCES_SUCCESS) {
if (action.itemId && (action.type === FETCH_SOURCES_SUCCESS || action.type === FETCH_UPDATES_PENDING)) {
return {
...state,
[action.itemId]: action.payload
@@ -99,13 +125,17 @@ export function getSources(
path: string
): File | null | undefined {
if (state.sources) {
return state.sources[createItemId(repository, revision, path)];
return state.sources[createItemId(repository, revision, path)]?.sources;
}
return null;
}
export function isFetchSourcesPending(state: any, repository: Repository, revision: string, path: string): boolean {
return isPending(state, FETCH_SOURCES, createItemId(repository, revision, path));
return state && isPending(state, FETCH_SOURCES, createItemId(repository, revision, path));
}
function isUpdateSourcePending(state: any, repository: Repository, revision: string, path: string): boolean {
return state?.sources && state.sources[createItemId(repository, revision, path)]?.updatePending;
}
export function getFetchSourcesFailure(

View File

@@ -1,6 +1,5 @@
package sonia.scm.api.v2.resources;
import com.google.common.annotations.VisibleForTesting;
import de.otto.edison.hal.Embedded;
import de.otto.edison.hal.Links;
import org.mapstruct.Context;
@@ -16,18 +15,13 @@ import java.lang.annotation.ElementType;
import java.lang.annotation.Retention;
import java.lang.annotation.RetentionPolicy;
import java.lang.annotation.Target;
import java.time.Instant;
import java.util.Optional;
import java.util.OptionalLong;
@Mapper
public abstract class BrowserResultToFileObjectDtoMapper extends BaseFileObjectDtoMapper {
@Inject
private FileObjectToFileObjectDtoMapper childrenMapper;
@VisibleForTesting
void setChildrenMapper(FileObjectToFileObjectDtoMapper childrenMapper) {
this.childrenMapper = childrenMapper;
}
FileObjectDto map(BrowserResult browserResult, @Context NamespaceAndName namespaceAndName) {
FileObjectDto fileObjectDto = fileObjectToDto(browserResult.getFile(), namespaceAndName, browserResult);
fileObjectDto.setRevision(browserResult.getRevision());
@@ -36,12 +30,8 @@ public abstract class BrowserResultToFileObjectDtoMapper extends BaseFileObjectD
@Mapping(target = "attributes", ignore = true) // We do not map HAL attributes
@Mapping(target = "children", qualifiedBy = Children.class)
protected abstract FileObjectDto fileObjectToDto(FileObject fileObject, @Context NamespaceAndName namespaceAndName, @Context BrowserResult browserResult);
@Children
protected FileObjectDto childrenToDto(FileObject fileObject, @Context NamespaceAndName namespaceAndName, @Context BrowserResult browserResult) {
return childrenMapper.map(fileObject, namespaceAndName, browserResult);
}
protected abstract FileObjectDto fileObjectToDto(FileObject fileObject, @Context NamespaceAndName namespaceAndName, @Context BrowserResult browserResult);
@Override
void applyEnrichers(Links.Builder links, Embedded.Builder embeddedBuilder, NamespaceAndName namespaceAndName, BrowserResult browserResult, FileObject fileObject) {
@@ -52,6 +42,14 @@ public abstract class BrowserResultToFileObjectDtoMapper extends BaseFileObjectD
applyEnrichers(appender, fileObject, namespaceAndName, browserResult, browserResult.getRevision());
}
Optional<Instant> mapOptionalInstant(OptionalLong optionalLong) {
if (optionalLong.isPresent()) {
return Optional.of(Instant.ofEpochMilli(optionalLong.getAsLong()));
} else {
return Optional.empty();
}
}
@Qualifier
@Target(ElementType.METHOD)
@Retention(RetentionPolicy.CLASS)

View File

@@ -10,6 +10,8 @@ import lombok.Setter;
import java.time.Instant;
import java.util.List;
import java.util.Optional;
import java.util.OptionalLong;
@Getter
@Setter
@@ -19,14 +21,16 @@ public class FileObjectDto extends HalRepresentation {
private String path;
private boolean directory;
@JsonInclude(JsonInclude.Include.NON_EMPTY)
private String description;
private long length;
private Optional<String> description;
private OptionalLong length;
@JsonInclude(JsonInclude.Include.NON_EMPTY)
private Instant lastModified;
private Optional<Instant> commitDate;
@JsonInclude(JsonInclude.Include.NON_EMPTY)
private SubRepositoryDto subRepository;
@JsonInclude(JsonInclude.Include.NON_EMPTY)
private String revision;
private boolean partialResult;
private boolean computationAborted;
public FileObjectDto(Links links, Embedded embedded) {
super(links, embedded);

View File

@@ -1,22 +0,0 @@
package sonia.scm.api.v2.resources;
import de.otto.edison.hal.Embedded;
import de.otto.edison.hal.Links;
import org.mapstruct.Context;
import org.mapstruct.Mapper;
import org.mapstruct.Mapping;
import sonia.scm.repository.BrowserResult;
import sonia.scm.repository.FileObject;
import sonia.scm.repository.NamespaceAndName;
@Mapper
public abstract class FileObjectToFileObjectDtoMapper extends BaseFileObjectDtoMapper {
@Mapping(target = "attributes", ignore = true) // We do not map HAL attributes
protected abstract FileObjectDto map(FileObject fileObject, @Context NamespaceAndName namespaceAndName, @Context BrowserResult browserResult);
@Override
void applyEnrichers(Links.Builder links, Embedded.Builder embeddedBuilder, NamespaceAndName namespaceAndName, BrowserResult browserResult, FileObject fileObject) {
applyEnrichers(new EdisonHalAppender(links, embeddedBuilder), fileObject, namespaceAndName, browserResult, browserResult.getRevision());
}
}

View File

@@ -37,7 +37,6 @@ public class MapperModule extends AbstractModule {
bind(TagToTagDtoMapper.class).to(Mappers.getMapper(TagToTagDtoMapper.class).getClass());
bind(FileObjectToFileObjectDtoMapper.class).to(Mappers.getMapper(FileObjectToFileObjectDtoMapper.class).getClass());
bind(BrowserResultToFileObjectDtoMapper.class).to(Mappers.getMapper(BrowserResultToFileObjectDtoMapper.class).getClass());
bind(ModificationsToDtoMapper.class).to(Mappers.getMapper(ModificationsToDtoMapper.class).getClass());

View File

@@ -12,6 +12,7 @@ import sonia.scm.debug.DebugModule;
import sonia.scm.filter.WebElementModule;
import sonia.scm.plugin.ExtensionProcessor;
import sonia.scm.plugin.PluginLoader;
import sonia.scm.repository.ExecutorModule;
import javax.servlet.ServletContext;
import java.util.ArrayList;
@@ -51,6 +52,7 @@ public class ApplicationModuleProvider implements ModuleProvider {
moduleList.add(new DebugModule());
}
moduleList.add(new MapperModule());
moduleList.add(new ExecutorModule());
return moduleList;
}

View File

@@ -0,0 +1,49 @@
package sonia.scm.repository;
import sonia.scm.repository.spi.SyncAsyncExecutor;
import java.time.Instant;
import java.util.concurrent.Executor;
import java.util.concurrent.atomic.AtomicLong;
import java.util.function.Consumer;
import static sonia.scm.repository.spi.SyncAsyncExecutor.ExecutionType.ASYNCHRONOUS;
import static sonia.scm.repository.spi.SyncAsyncExecutor.ExecutionType.SYNCHRONOUS;
public class DefaultSyncAsyncExecutor implements SyncAsyncExecutor {
private final Executor executor;
private final Instant switchToAsyncTime;
private final long maxAsyncAbortMilliseconds;
private AtomicLong accumulatedAsyncRuntime = new AtomicLong(0L);
private boolean executedAllSynchronously = true;
DefaultSyncAsyncExecutor(Executor executor, Instant switchToAsyncTime, int maxAsyncAbortSeconds) {
this.executor = executor;
this.switchToAsyncTime = switchToAsyncTime;
this.maxAsyncAbortMilliseconds = maxAsyncAbortSeconds * 1000L;
}
public ExecutionType execute(Consumer<ExecutionType> task, Runnable abortionFallback) {
if (switchToAsyncTime.isBefore(Instant.now())) {
executor.execute(() -> {
if (accumulatedAsyncRuntime.get() < maxAsyncAbortMilliseconds) {
long chunkStartTime = System.currentTimeMillis();
task.accept(ASYNCHRONOUS);
accumulatedAsyncRuntime.addAndGet(System.currentTimeMillis() - chunkStartTime);
} else {
abortionFallback.run();
}
});
executedAllSynchronously = false;
return ASYNCHRONOUS;
} else {
task.accept(SYNCHRONOUS);
return SYNCHRONOUS;
}
}
public boolean hasExecutedAllSynchronously() {
return executedAllSynchronously;
}
}

View File

@@ -0,0 +1,55 @@
package sonia.scm.repository;
import sonia.scm.repository.spi.SyncAsyncExecutor;
import sonia.scm.repository.spi.SyncAsyncExecutorProvider;
import javax.inject.Inject;
import javax.inject.Singleton;
import java.io.Closeable;
import java.time.Instant;
import java.time.temporal.ChronoUnit;
import java.util.concurrent.ExecutorService;
import java.util.concurrent.Executors;
@Singleton
public class DefaultSyncAsyncExecutorProvider implements SyncAsyncExecutorProvider, Closeable {
public static final int DEFAULT_MAX_ASYNC_ABORT_SECONDS = 60;
public static final String MAX_ASYNC_ABORT_SECONDS_PROPERTY = "scm.maxAsyncAbortSeconds";
public static final int DEFAULT_NUMBER_OF_THREADS = 4;
public static final String NUMBER_OF_THREADS_PROPERTY = "scm.asyncThreads";
private final ExecutorService executor;
private final int defaultMaxAsyncAbortSeconds;
@Inject
public DefaultSyncAsyncExecutorProvider() {
this(Executors.newFixedThreadPool(getProperty(NUMBER_OF_THREADS_PROPERTY, DEFAULT_NUMBER_OF_THREADS)));
}
public DefaultSyncAsyncExecutorProvider(ExecutorService executor) {
this.executor = executor;
this.defaultMaxAsyncAbortSeconds = getProperty(MAX_ASYNC_ABORT_SECONDS_PROPERTY, DEFAULT_MAX_ASYNC_ABORT_SECONDS);
}
public SyncAsyncExecutor createExecutorWithSecondsToTimeout(int switchToAsyncInSeconds) {
return createExecutorWithSecondsToTimeout(switchToAsyncInSeconds, defaultMaxAsyncAbortSeconds);
}
public SyncAsyncExecutor createExecutorWithSecondsToTimeout(int switchToAsyncInSeconds, int maxAsyncAbortSeconds) {
return new DefaultSyncAsyncExecutor(
executor,
Instant.now().plus(switchToAsyncInSeconds, ChronoUnit.SECONDS),
maxAsyncAbortSeconds);
}
@Override
public void close() {
executor.shutdownNow();
}
private static int getProperty(String key, int defaultValue) {
return Integer.parseInt(System.getProperty(key, Integer.toString(defaultValue)));
}
}

View File

@@ -0,0 +1,12 @@
package sonia.scm.repository;
import com.google.inject.AbstractModule;
import sonia.scm.lifecycle.modules.CloseableModule;
import sonia.scm.repository.spi.SyncAsyncExecutorProvider;
public class ExecutorModule extends AbstractModule {
@Override
protected void configure() {
bind(SyncAsyncExecutorProvider.class).to(DefaultSyncAsyncExecutorProvider.class);
}
}

View File

@@ -8,7 +8,6 @@ import org.junit.After;
import org.junit.Before;
import org.junit.Test;
import org.mapstruct.factory.Mappers;
import org.mockito.InjectMocks;
import sonia.scm.repository.BrowserResult;
import sonia.scm.repository.FileObject;
import sonia.scm.repository.NamespaceAndName;
@@ -24,9 +23,6 @@ public class BrowserResultToFileObjectDtoMapperTest {
private final URI baseUri = URI.create("http://example.com/base/");
private final ResourceLinks resourceLinks = ResourceLinksMock.createMock(baseUri);
@InjectMocks
private FileObjectToFileObjectDtoMapperImpl fileObjectToFileObjectDtoMapper;
private BrowserResultToFileObjectDtoMapper mapper;
private final Subject subject = mock(Subject.class);
@@ -34,28 +30,28 @@ public class BrowserResultToFileObjectDtoMapperTest {
private FileObject fileObject1 = new FileObject();
private FileObject fileObject2 = new FileObject();
private FileObject partialFileObject = new FileObject();
@Before
public void init() {
initMocks(this);
mapper = Mappers.getMapper(BrowserResultToFileObjectDtoMapper.class);
mapper.setChildrenMapper(fileObjectToFileObjectDtoMapper);
mapper.setResourceLinks(resourceLinks);
subjectThreadState.bind();
ThreadContext.bind(subject);
fileObject1.setName("FO 1");
fileObject1.setLength(100);
fileObject1.setLastModified(0L);
fileObject1.setLength(100L);
fileObject1.setCommitDate(0L);
fileObject1.setPath("/path/object/1");
fileObject1.setDescription("description of file object 1");
fileObject1.setDirectory(false);
fileObject2.setName("FO 2");
fileObject2.setLength(100);
fileObject2.setLastModified(101L);
fileObject2.setLength(100L);
fileObject2.setCommitDate(101L);
fileObject2.setPath("/path/object/2");
fileObject2.setDescription("description of file object 2");
fileObject2.setDirectory(true);

View File

@@ -1,121 +0,0 @@
package sonia.scm.api.v2.resources;
import org.apache.shiro.subject.Subject;
import org.apache.shiro.subject.support.SubjectThreadState;
import org.apache.shiro.util.ThreadContext;
import org.apache.shiro.util.ThreadState;
import org.junit.After;
import org.junit.Before;
import org.junit.Test;
import org.junit.runner.RunWith;
import org.mockito.InjectMocks;
import org.mockito.junit.MockitoJUnitRunner;
import sonia.scm.repository.BrowserResult;
import sonia.scm.repository.FileObject;
import sonia.scm.repository.NamespaceAndName;
import sonia.scm.repository.SubRepository;
import java.net.URI;
import static org.assertj.core.api.Assertions.assertThat;
import static org.mockito.Mockito.mock;
@RunWith(MockitoJUnitRunner.Silent.class)
public class FileObjectToFileObjectDtoMapperTest {
private final URI baseUri = URI.create("http://example.com/base/");
@SuppressWarnings("unused") // Is injected
private final ResourceLinks resourceLinks = ResourceLinksMock.createMock(baseUri);
@InjectMocks
private FileObjectToFileObjectDtoMapperImpl mapper;
private final Subject subject = mock(Subject.class);
private final ThreadState subjectThreadState = new SubjectThreadState(subject);
private URI expectedBaseUri;
@Before
public void init() {
expectedBaseUri = baseUri.resolve(RepositoryRootResource.REPOSITORIES_PATH_V2 + "/");
subjectThreadState.bind();
ThreadContext.bind(subject);
}
@After
public void unbind() {
ThreadContext.unbindSubject();
}
@Test
public void shouldMapAttributesCorrectly() {
FileObject fileObject = createFileObject();
FileObjectDto dto = mapper.map(fileObject, new NamespaceAndName("namespace", "name"), new BrowserResult("revision", fileObject));
assertEqualAttributes(fileObject, dto);
}
@Test
public void shouldHaveCorrectSelfLinkForDirectory() {
FileObject fileObject = createDirectoryObject();
FileObjectDto dto = mapper.map(fileObject, new NamespaceAndName("namespace", "name"), new BrowserResult("revision", fileObject));
assertThat(dto.getLinks().getLinkBy("self").get().getHref()).isEqualTo(expectedBaseUri.resolve("namespace/name/sources/revision/foo/bar").toString());
}
@Test
public void shouldHaveCorrectContentLink() {
FileObject fileObject = createFileObject();
fileObject.setDirectory(false);
FileObjectDto dto = mapper.map(fileObject, new NamespaceAndName("namespace", "name"), new BrowserResult("revision", fileObject));
assertThat(dto.getLinks().getLinkBy("self").get().getHref()).isEqualTo(expectedBaseUri.resolve("namespace/name/content/revision/foo/bar").toString());
}
@Test
public void shouldAppendLinks() {
HalEnricherRegistry registry = new HalEnricherRegistry();
registry.register(FileObject.class, (ctx, appender) -> {
NamespaceAndName repository = ctx.oneRequireByType(NamespaceAndName.class);
FileObject fo = ctx.oneRequireByType(FileObject.class);
String rev = ctx.oneRequireByType(String.class);
appender.appendLink("hog", "http://" + repository.logString() + "/" + fo.getName() + "/" + rev);
});
mapper.setRegistry(registry);
FileObject fileObject = createFileObject();
FileObjectDto dto = mapper.map(fileObject, new NamespaceAndName("hitchhiker", "hog"), new BrowserResult("42", fileObject));
assertThat(dto.getLinks().getLinkBy("hog").get().getHref()).isEqualTo("http://hitchhiker/hog/foo/42");
}
private FileObject createDirectoryObject() {
FileObject fileObject = createFileObject();
fileObject.setDirectory(true);
return fileObject;
}
private FileObject createFileObject() {
FileObject fileObject = new FileObject();
fileObject.setName("foo");
fileObject.setDescription("bar");
fileObject.setPath("foo/bar");
fileObject.setDirectory(false);
fileObject.setLength(100);
fileObject.setLastModified(123L);
fileObject.setSubRepository(new SubRepository("repo.url"));
return fileObject;
}
private void assertEqualAttributes(FileObject fileObject, FileObjectDto dto) {
assertThat(dto.getName()).isEqualTo(fileObject.getName());
assertThat(dto.getDescription()).isEqualTo(fileObject.getDescription());
assertThat(dto.getPath()).isEqualTo(fileObject.getPath());
assertThat(dto.isDirectory()).isEqualTo(fileObject.isDirectory());
assertThat(dto.getLength()).isEqualTo(fileObject.getLength());
assertThat(dto.getLastModified().toEpochMilli()).isEqualTo((long) fileObject.getLastModified());
assertThat(dto.getSubRepository().getBrowserUrl()).isEqualTo(fileObject.getSubRepository().getBrowserUrl());
}
}

View File

@@ -7,7 +7,6 @@ import org.junit.Before;
import org.junit.Test;
import org.junit.runner.RunWith;
import org.mapstruct.factory.Mappers;
import org.mockito.InjectMocks;
import org.mockito.Mock;
import org.mockito.junit.MockitoJUnitRunner;
import sonia.scm.NotFoundException;
@@ -41,16 +40,12 @@ public class SourceRootResourceTest extends RepositoryTestBase {
@Mock
private BrowseCommandBuilder browseCommandBuilder;
@InjectMocks
private FileObjectToFileObjectDtoMapperImpl fileObjectToFileObjectDtoMapper;
private BrowserResultToFileObjectDtoMapper browserResultToFileObjectDtoMapper;
@Before
public void prepareEnvironment() throws Exception {
public void prepareEnvironment() {
browserResultToFileObjectDtoMapper = Mappers.getMapper(BrowserResultToFileObjectDtoMapper.class);
browserResultToFileObjectDtoMapper.setChildrenMapper(fileObjectToFileObjectDtoMapper);
browserResultToFileObjectDtoMapper.setResourceLinks(resourceLinks);
when(serviceFactory.create(new NamespaceAndName("space", "repo"))).thenReturn(service);
when(service.getBrowseCommand()).thenReturn(browseCommandBuilder);
@@ -127,7 +122,7 @@ public class SourceRootResourceTest extends RepositoryTestBase {
fileObject1.setDescription("File object 1");
fileObject1.setPath("/foo/bar/fo1");
fileObject1.setLength(1024L);
fileObject1.setLastModified(0L);
fileObject1.setCommitDate(0L);
parent.addChild(fileObject1);
FileObject fileObject2 = new FileObject();
@@ -136,7 +131,7 @@ public class SourceRootResourceTest extends RepositoryTestBase {
fileObject2.setDescription("File object 2");
fileObject2.setPath("/foo/bar/fo2");
fileObject2.setLength(4096L);
fileObject2.setLastModified(1234L);
fileObject2.setCommitDate(1234L);
parent.addChild(fileObject2);
return parent;

View File

@@ -0,0 +1,54 @@
package sonia.scm.repository;
import org.junit.jupiter.api.Test;
import sonia.scm.repository.spi.SyncAsyncExecutor.ExecutionType;
import java.time.Instant;
import static java.lang.Integer.MAX_VALUE;
import static java.time.Instant.MAX;
import static java.time.temporal.ChronoUnit.MILLIS;
import static org.assertj.core.api.Assertions.assertThat;
import static sonia.scm.repository.spi.SyncAsyncExecutor.ExecutionType.ASYNCHRONOUS;
import static sonia.scm.repository.spi.SyncAsyncExecutor.ExecutionType.SYNCHRONOUS;
class DefaultSyncAsyncExecutorTest {
ExecutionType calledWithType = null;
boolean aborted = false;
@Test
void shouldExecuteSynchronouslyBeforeTimeout() {
DefaultSyncAsyncExecutor executor = new DefaultSyncAsyncExecutor(Runnable::run, MAX, MAX_VALUE);
ExecutionType result = executor.execute(type -> calledWithType = type, () -> aborted = true);
assertThat(result).isEqualTo(SYNCHRONOUS);
assertThat(calledWithType).isEqualTo(SYNCHRONOUS);
assertThat(executor.hasExecutedAllSynchronously()).isTrue();
assertThat(aborted).isFalse();
}
@Test
void shouldExecuteAsynchronouslyAfterTimeout() {
DefaultSyncAsyncExecutor executor = new DefaultSyncAsyncExecutor(Runnable::run, Instant.now().minus(1, MILLIS), MAX_VALUE);
ExecutionType result = executor.execute(type -> calledWithType = type, () -> aborted = true);
assertThat(result).isEqualTo(ASYNCHRONOUS);
assertThat(calledWithType).isEqualTo(ASYNCHRONOUS);
assertThat(executor.hasExecutedAllSynchronously()).isFalse();
assertThat(aborted).isFalse();
}
@Test
void shouldCallFallbackAfterAbortion() {
DefaultSyncAsyncExecutor executor = new DefaultSyncAsyncExecutor(Runnable::run, Instant.now().minus(1, MILLIS), 0);
ExecutionType result = executor.execute(type -> calledWithType = type, () -> aborted = true);
assertThat(result).isEqualTo(ASYNCHRONOUS);
assertThat(calledWithType).isNull();
assertThat(aborted).isTrue();
}
}

3153
yarn.lock

File diff suppressed because it is too large Load Diff