merge repository heads

This commit is contained in:
Sebastian Sdorra
2019-12-18 14:55:38 +01:00
35 changed files with 1012 additions and 465 deletions

View File

@@ -46,8 +46,11 @@ import java.io.Serializable;
import java.util.ArrayList;
import java.util.Collection;
import java.util.List;
import java.util.Optional;
import java.util.OptionalLong;
import static java.util.Collections.unmodifiableCollection;
import static java.util.Optional.ofNullable;
/**
* The FileObject represents a file or a directory in a repository.
@@ -90,7 +93,9 @@ public class FileObject implements LastModifiedAware, Serializable
&& Objects.equal(description, other.description)
&& Objects.equal(length, other.length)
&& Objects.equal(subRepository, other.subRepository)
&& Objects.equal(lastModified, other.lastModified);
&& Objects.equal(commitDate, other.commitDate)
&& Objects.equal(partialResult, other.partialResult)
&& Objects.equal(computationAborted, other.computationAborted);
//J+
}
@@ -100,8 +105,16 @@ public class FileObject implements LastModifiedAware, Serializable
@Override
public int hashCode()
{
return Objects.hashCode(name, path, directory, description, length,
subRepository, lastModified);
return Objects.hashCode(
name,
path,
directory,
description,
length,
subRepository,
commitDate,
partialResult,
computationAborted);
}
/**
@@ -118,7 +131,9 @@ public class FileObject implements LastModifiedAware, Serializable
.add("description", description)
.add("length", length)
.add("subRepository", subRepository)
.add("lastModified", lastModified)
.add("commitDate", commitDate)
.add("partialResult", partialResult)
.add("computationAborted", computationAborted)
.toString();
//J+
}
@@ -130,35 +145,44 @@ public class FileObject implements LastModifiedAware, Serializable
* if the repository provider is not able to get the last commit for the path.
*
*
* @return last commit message
* @return Last commit message or <code>null</code>, when this value has not been computed
* (see {@link #isPartialResult()}).
*/
public String getDescription()
public Optional<String> getDescription()
{
return description;
return ofNullable(description);
}
/**
* Returns the last commit date for this. The method will return null,
* if the repository provider is not able to get the last commit for the path.
* if the repository provider is not able to get the last commit for the path
* or it has not been computed.
*
*
* @return last commit date
*/
@Override
public Long getLastModified()
{
return lastModified;
public Long getLastModified() {
return this.isPartialResult()? null: this.commitDate;
}
/**
* Returns the length of the file.
*
*
* @return length of file
* Returns the last commit date for this. The method will return {@link OptionalLong#empty()},
* if the repository provider is not able to get the last commit for the path or if this value has not been computed
* (see {@link #isPartialResult()} and {@link #isComputationAborted()}).
*/
public long getLength()
public OptionalLong getCommitDate()
{
return length;
return commitDate == null? OptionalLong.empty(): OptionalLong.of(commitDate);
}
/**
* Returns the length of the file or {@link OptionalLong#empty()}, when this value has not been computed
* (see {@link #isPartialResult()} and {@link #isComputationAborted()}).
*/
public OptionalLong getLength()
{
return length == null? OptionalLong.empty(): OptionalLong.of(length);
}
/**
@@ -200,7 +224,7 @@ public class FileObject implements LastModifiedAware, Serializable
}
/**
* Return sub repository informations or null if the file is not
* Return sub repository information or null if the file is not
* sub repository.
*
* @since 1.10
@@ -222,6 +246,42 @@ public class FileObject implements LastModifiedAware, Serializable
return directory;
}
/**
* Returns the children of this file.
*
* @return The children of this file if it is a directory.
*/
public Collection<FileObject> getChildren() {
return children == null? null: unmodifiableCollection(children);
}
/**
* If this is <code>true</code>, some values for this object have not been computed, yet. These values (like
* {@link #getLength()}, {@link #getDescription()} or {@link #getCommitDate()})
* will return {@link Optional#empty()} (or {@link OptionalLong#empty()} respectively), unless they are computed.
* There may be an asynchronous task running, that will set these values in the future.
*
* @since 2.0.0
*
* @return <code>true</code>, whenever some values of this object have not been computed, yet.
*/
public boolean isPartialResult() {
return partialResult;
}
/**
* If this is <code>true</code>, some values for this object have not been computed and will not be computed. These
* values (like {@link #getLength()}, {@link #getDescription()} or {@link #getCommitDate()})
* will return {@link Optional#empty()} (or {@link OptionalLong#empty()} respectively), unless they are computed.
*
* @since 2.0.0
*
* @return <code>true</code>, whenever some values of this object finally are not computed.
*/
public boolean isComputationAborted() {
return computationAborted;
}
//~--- set methods ----------------------------------------------------------
/**
@@ -247,14 +307,14 @@ public class FileObject implements LastModifiedAware, Serializable
}
/**
* Sets the last modified date of the file.
* Sets the commit date of the file.
*
*
* @param lastModified last modified date
* @param commitDate commit date
*/
public void setLastModified(Long lastModified)
public void setCommitDate(Long commitDate)
{
this.lastModified = lastModified;
this.commitDate = commitDate;
}
/**
@@ -263,7 +323,7 @@ public class FileObject implements LastModifiedAware, Serializable
*
* @param length file length
*/
public void setLength(long length)
public void setLength(Long length)
{
this.length = length;
}
@@ -302,22 +362,47 @@ public class FileObject implements LastModifiedAware, Serializable
this.subRepository = subRepository;
}
public Collection<FileObject> getChildren() {
return unmodifiableCollection(children);
/**
* Set marker, that some values for this object are not computed, yet.
*
* @since 2.0.0
*
* @param partialResult Set this to <code>true</code>, whenever some values of this object are not computed, yet.
*/
public void setPartialResult(boolean partialResult) {
this.partialResult = partialResult;
}
/**
* Set marker, that computation of some values for this object has been aborted.
*
* @since 2.0.0
*
* @param computationAborted Set this to <code>true</code>, whenever some values of this object are not computed and
* will not be computed in the future.
*/
public void setComputationAborted(boolean computationAborted) {
this.computationAborted = computationAborted;
}
/**
* Set the children for this file.
*
* @param children The new childre.
*/
public void setChildren(List<FileObject> children) {
this.children = new ArrayList<>(children);
}
/**
* Adds a child to the list of children .
*
* @param child The additional child.
*/
public void addChild(FileObject child) {
this.children.add(child);
}
public boolean hasChildren() {
return !children.isEmpty();
}
//~--- fields ---------------------------------------------------------------
/** file description */
@@ -326,11 +411,11 @@ public class FileObject implements LastModifiedAware, Serializable
/** directory indicator */
private boolean directory;
/** last modified date */
private Long lastModified;
/** commit date */
private Long commitDate;
/** file length */
private long length;
private Long length;
/** filename */
private String name;
@@ -338,9 +423,16 @@ public class FileObject implements LastModifiedAware, Serializable
/** file path */
private String path;
/** Marker for partial result. */
private boolean partialResult = false;
/** Marker for aborted computation. */
private boolean computationAborted = false;
/** sub repository informations */
@XmlElement(name = "subrepository")
private SubRepository subRepository;
/** Children of this file (aka directory). */
private Collection<FileObject> children = new ArrayList<>();
}

View File

@@ -300,6 +300,13 @@ public final class BrowseCommandBuilder
return this;
}
private void updateCache(BrowserResult updatedResult) {
if (!disableCache) {
CacheKey key = new CacheKey(repository, request);
cache.put(key, updatedResult);
}
}
//~--- inner classes --------------------------------------------------------
/**
@@ -416,5 +423,5 @@ public final class BrowseCommandBuilder
private final Repository repository;
/** request for the command */
private final BrowseCommandRequest request = new BrowseCommandRequest();
private final BrowseCommandRequest request = new BrowseCommandRequest(this::updateCache);
}

View File

@@ -37,6 +37,10 @@ package sonia.scm.repository.spi;
import com.google.common.base.MoreObjects;
import com.google.common.base.Objects;
import sonia.scm.repository.BrowserResult;
import java.util.function.Consumer;
/**
*
* @author Sebastian Sdorra
@@ -48,6 +52,14 @@ public final class BrowseCommandRequest extends FileBaseCommandRequest
/** Field description */
private static final long serialVersionUID = 7956624623516803183L;
public BrowseCommandRequest() {
this(null);
}
public BrowseCommandRequest(Consumer<BrowserResult> updater) {
this.updater = updater;
}
//~--- methods --------------------------------------------------------------
/**
@@ -220,6 +232,12 @@ public final class BrowseCommandRequest extends FileBaseCommandRequest
return recursive;
}
public void updateCache(BrowserResult update) {
if (updater != null) {
updater.accept(update);
}
}
//~--- fields ---------------------------------------------------------------
/** disable last commit */
@@ -230,4 +248,8 @@ public final class BrowseCommandRequest extends FileBaseCommandRequest
/** browse file objects recursive */
private boolean recursive = false;
// WARNING / TODO: This field creates a reverse channel from the implementation to the API. This will break
// whenever the API runs in a different process than the SPI (for example to run explicit hosts for git repositories).
private final transient Consumer<BrowserResult> updater;
}

View File

@@ -0,0 +1,83 @@
package sonia.scm.repository.spi;
import java.util.function.Consumer;
/**
* Tasks submitted to this executor will be run synchronously up to a given time, after which they will be queued and
* processed asynchronously. After a maximum amount of time consumed by these tasks, they will be skipped. Note that
* this only works for short-living tasks.
* <p>
* Get instances of this using a {@link SyncAsyncExecutorProvider}.
*/
public interface SyncAsyncExecutor {
/**
* Execute the given task (either synchronously or asynchronously). If this task is skipped due to
* timeouts, nothing will be done.
*
* @param task The {@link Runnable} to be executed.
* @return Either {@link ExecutionType#SYNCHRONOUS} when the given {@link Runnable} has been executed immediately or
* {@link ExecutionType#ASYNCHRONOUS}, when the task was queued to be executed asynchronously in the future.
*/
default ExecutionType execute(Runnable task) {
return execute(
ignored -> task.run(),
() -> {}
);
}
/**
* Execute the given <code>task</code> (either synchronously or asynchronously). If this task is
* skipped due to timeouts, the <code>abortionFallback</code> will be called.
*
* @param task The {@link Runnable} to be executed.
* @param abortionFallback This will only be run, when this and all remaining tasks are aborted. This task should
* only consume a negligible amount of time.
* @return Either {@link ExecutionType#SYNCHRONOUS} when the given {@link Runnable} has been executed immediately or
* {@link ExecutionType#ASYNCHRONOUS}, when the task was queued to be executed asynchronously in the future.
*/
default ExecutionType execute(Runnable task, Runnable abortionFallback) {
return execute(ignored -> task.run(), abortionFallback);
}
/**
* Execute the given <code>task</code> (either synchronously or asynchronously). If this task is skipped due to
* timeouts, nothing will be done.
*
* @param task The {@link Consumer} to be executed. The parameter given to this is either
* {@link ExecutionType#SYNCHRONOUS} when the given {@link Consumer} is executed immediately
* or {@link ExecutionType#ASYNCHRONOUS}, when the task had been queued and now is executed
* asynchronously.
* @return Either {@link ExecutionType#SYNCHRONOUS} when the given {@link Runnable} has been executed immediately or
* {@link ExecutionType#ASYNCHRONOUS}, when the task was queued to be executed asynchronously in the future.
*/
default ExecutionType execute(Consumer<ExecutionType> task) {
return execute(task, () -> {});
}
/**
* Execute the given <code>task</code> (either synchronously or asynchronously). If this task is
* skipped due to timeouts, the <code>abortionFallback</code> will be called.
*
* @param task The {@link Consumer} to be executed. The parameter given to this is either
* {@link ExecutionType#SYNCHRONOUS} when the given {@link Consumer} is executed immediately
* or {@link ExecutionType#ASYNCHRONOUS}, when the task had been queued and now is executed
* asynchronously.
* @param abortionFallback This will only be run, when this and all remaining tasks are aborted. This task should
* only consume a negligible amount of time.
* @return Either {@link ExecutionType#SYNCHRONOUS} when the given {@link Runnable} has been executed immediately or
* {@link ExecutionType#ASYNCHRONOUS}, when the task was queued to be executed asynchronously in the future.
*/
ExecutionType execute(Consumer<ExecutionType> task, Runnable abortionFallback);
/**
* When all submitted tasks have been executed synchronously, this will return <code>true</code>. If at least one task
* has been enqueued to be executed asynchronously, this returns <code>false</code> (even when none of the enqueued
* tasks have been run, yet).
*/
boolean hasExecutedAllSynchronously();
enum ExecutionType {
SYNCHRONOUS, ASYNCHRONOUS
}
}

View File

@@ -0,0 +1,56 @@
package sonia.scm.repository.spi;
/**
* Use this provider to get {@link SyncAsyncExecutor} instances to execute a number of normally short-lived tasks, that
* should be run asynchronously (or even be skipped) whenever they take too long in summary.
* <p>
* The goal of this is a "best effort" approach: The submitted tasks are run immediately when they are submitted, unless
* a given timespan (<code>switchToAsyncInSeconds</code>) has passed. From this moment on the tasks are put into a queue to be
* processed asynchronously. If even then they take too long and their accumulated asynchronous runtime exceeds another
* limit (<code>maxAsyncAbortSeconds</code>), the tasks are skipped.
* <p>
* Note that whenever a task has been started either synchronously or asynchronously it will neither be terminated nor
* switched from foreground to background execution, so this will only work well for short-living tasks. A long running
* task can still block this for longer than the configured amount of seconds.
*/
public interface SyncAsyncExecutorProvider {
int DEFAULT_SWITCH_TO_ASYNC_IN_SECONDS = 2;
/**
* Creates an {@link SyncAsyncExecutor} that will run tasks synchronously for
* {@link #DEFAULT_SWITCH_TO_ASYNC_IN_SECONDS} seconds. The limit of asynchronous runtime is implementation dependant.
*
* @return The executor.
*/
default SyncAsyncExecutor createExecutorWithDefaultTimeout() {
return createExecutorWithSecondsToTimeout(DEFAULT_SWITCH_TO_ASYNC_IN_SECONDS);
}
/**
* Creates an {@link SyncAsyncExecutor} that will run tasks synchronously for
* <code>switchToAsyncInSeconds</code> seconds. The limit of asynchronous runtime is implementation dependant.
*
* @param switchToAsyncInSeconds The amount of seconds submitted tasks will be run synchronously. After this time,
* further tasks will be run asynchronously. To run all tasks asynchronously no matter
* what, set this to <code>0</code>.
* @return The executor.
*/
SyncAsyncExecutor createExecutorWithSecondsToTimeout(int switchToAsyncInSeconds);
/**
* Creates an {@link SyncAsyncExecutor} that will run tasks synchronously for
* <code>switchToAsyncInSeconds</code> seconds and will abort tasks after they ran
* <code>maxAsyncAbortSeconds</code> asynchronously.
*
* @param switchToAsyncInSeconds The amount of seconds submitted tasks will be run synchronously. After this time,
* further tasks will be run asynchronously. To run all tasks asynchronously no matter
* what, set this to <code>0</code>.
* @param maxAsyncAbortSeconds The amount of seconds, tasks that were started asynchronously may run in summary
* before remaining tasks will not be executed at all anymore. To abort all tasks that
* are submitted after <code>switchToAsyncInSeconds</code> immediately, set this to
* <code>0</code>.
* @return The executor.
*/
SyncAsyncExecutor createExecutorWithSecondsToTimeout(int switchToAsyncInSeconds, int maxAsyncAbortSeconds);
}

View File

@@ -745,6 +745,10 @@ public final class GitUtil
public static Optional<LfsPointer> getLfsPointer(org.eclipse.jgit.lib.Repository repo, String path, RevCommit commit, TreeWalk treeWalk) throws IOException {
Attributes attributes = LfsFactory.getAttributesForPath(repo, path, commit);
return getLfsPointer(repo, treeWalk, attributes);
}
public static Optional<LfsPointer> getLfsPointer(org.eclipse.jgit.lib.Repository repo, TreeWalk treeWalk, Attributes attributes) throws IOException {
Attribute filter = attributes.get("filter");
if (filter != null && "lfs".equals(filter.getValue())) {
ObjectId blobId = treeWalk.getObjectId(0);

View File

@@ -35,9 +35,11 @@ package sonia.scm.repository.spi;
//~--- non-JDK imports --------------------------------------------------------
import com.google.common.base.Stopwatch;
import com.google.common.base.Strings;
import com.google.common.collect.Lists;
import com.google.common.collect.Maps;
import org.eclipse.jgit.attributes.Attributes;
import org.eclipse.jgit.lfs.LfsPointer;
import org.eclipse.jgit.lib.Constants;
import org.eclipse.jgit.lib.ObjectId;
@@ -49,6 +51,7 @@ import org.eclipse.jgit.treewalk.TreeWalk;
import org.eclipse.jgit.treewalk.filter.AndTreeFilter;
import org.eclipse.jgit.treewalk.filter.PathFilter;
import org.eclipse.jgit.treewalk.filter.TreeFilter;
import org.eclipse.jgit.util.LfsFactory;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import sonia.scm.NotFoundException;
@@ -56,6 +59,7 @@ import sonia.scm.repository.BrowserResult;
import sonia.scm.repository.FileObject;
import sonia.scm.repository.GitSubModuleParser;
import sonia.scm.repository.GitUtil;
import sonia.scm.repository.InternalRepositoryException;
import sonia.scm.repository.Repository;
import sonia.scm.repository.SubRepository;
import sonia.scm.store.Blob;
@@ -69,10 +73,13 @@ import java.util.Collections;
import java.util.List;
import java.util.Map;
import java.util.Optional;
import java.util.function.Consumer;
import static java.util.Optional.empty;
import static java.util.Optional.of;
import static sonia.scm.ContextEntry.ContextBuilder.entity;
import static sonia.scm.NotFoundException.notFound;
import static sonia.scm.repository.spi.SyncAsyncExecutor.ExecutionType.ASYNCHRONOUS;
//~--- JDK imports ------------------------------------------------------------
@@ -90,71 +97,56 @@ public class GitBrowseCommand extends AbstractGitCommand
/**
* the logger for GitBrowseCommand
*/
private static final Logger logger =
LoggerFactory.getLogger(GitBrowseCommand.class);
private static final Logger logger = LoggerFactory.getLogger(GitBrowseCommand.class);
/** sub repository cache */
private final Map<ObjectId, Map<String, SubRepository>> subrepositoryCache = Maps.newHashMap();
private final Object asyncMonitor = new Object();
private final LfsBlobStoreFactory lfsBlobStoreFactory;
//~--- constructors ---------------------------------------------------------
private final SyncAsyncExecutor executor;
/**
* Constructs ...
* @param context
* @param repository
* @param lfsBlobStoreFactory
*/
public GitBrowseCommand(GitContext context, Repository repository, LfsBlobStoreFactory lfsBlobStoreFactory)
{
private BrowserResult browserResult;
public GitBrowseCommand(GitContext context, Repository repository, LfsBlobStoreFactory lfsBlobStoreFactory, SyncAsyncExecutor executor) {
super(context, repository);
this.lfsBlobStoreFactory = lfsBlobStoreFactory;
this.executor = executor;
}
//~--- get methods ----------------------------------------------------------
@Override
@SuppressWarnings("unchecked")
public BrowserResult getBrowserResult(BrowseCommandRequest request)
throws IOException {
logger.debug("try to create browse result for {}", request);
BrowserResult result;
org.eclipse.jgit.lib.Repository repo = open();
ObjectId revId;
ObjectId revId = computeRevIdToBrowse(request, repo);
if (Util.isEmpty(request.getRevision()))
{
revId = getDefaultBranch(repo);
if (revId != null) {
browserResult = new BrowserResult(revId.getName(), request.getRevision(), getEntry(repo, request, revId));
return browserResult;
} else {
logger.warn("could not find head of repository {}, empty?", repository.getNamespaceAndName());
return new BrowserResult(Constants.HEAD, request.getRevision(), createEmptyRoot());
}
else
{
revId = GitUtil.getRevisionId(repo, request.getRevision());
}
if (revId != null)
{
result = new BrowserResult(revId.getName(), request.getRevision(), getEntry(repo, request, revId));
}
else
{
if (Util.isNotEmpty(request.getRevision()))
{
private ObjectId computeRevIdToBrowse(BrowseCommandRequest request, org.eclipse.jgit.lib.Repository repo) throws IOException {
if (Util.isEmpty(request.getRevision())) {
return getDefaultBranch(repo);
} else {
ObjectId revId = GitUtil.getRevisionId(repo, request.getRevision());
if (revId == null) {
logger.error("could not find revision {}", request.getRevision());
throw notFound(entity("Revision", request.getRevision()).in(this.repository));
}
else if (logger.isWarnEnabled())
{
logger.warn("could not find head of repository, empty?");
return revId;
}
}
result = new BrowserResult(Constants.HEAD, request.getRevision(), createEmtpyRoot());
}
return result;
}
//~--- methods --------------------------------------------------------------
private FileObject createEmtpyRoot() {
private FileObject createEmptyRoot() {
FileObject fileObject = new FileObject();
fileObject.setName("");
fileObject.setPath("");
@@ -162,18 +154,6 @@ public class GitBrowseCommand extends AbstractGitCommand
return fileObject;
}
/**
* Method description
*
* @param repo
* @param request
* @param revId
* @param treeWalk
*
* @return
*
* @throws IOException
*/
private FileObject createFileObject(org.eclipse.jgit.lib.Repository repo,
BrowseCommandRequest request, ObjectId revId, TreeWalk treeWalk)
throws IOException {
@@ -207,128 +187,63 @@ public class GitBrowseCommand extends AbstractGitCommand
// don't show message and date for directories to improve performance
if (!file.isDirectory() &&!request.isDisableLastCommit())
{
logger.trace("fetch last commit for {} at {}", path, revId.getName());
RevCommit commit = getLatestCommit(repo, revId, path);
Optional<LfsPointer> lfsPointer = commit == null? empty(): GitUtil.getLfsPointer(repo, path, commit, treeWalk);
file.setPartialResult(true);
RevCommit commit;
try (RevWalk walk = new RevWalk(repo)) {
commit = walk.parseCommit(revId);
}
Optional<LfsPointer> lfsPointer = getLfsPointer(repo, path, commit, treeWalk);
if (lfsPointer.isPresent()) {
BlobStore lfsBlobStore = lfsBlobStoreFactory.getLfsBlobStore(repository);
String oid = lfsPointer.get().getOid().getName();
Blob blob = lfsBlobStore.get(oid);
if (blob == null) {
logger.error("lfs blob for lob id {} not found in lfs store of repository {}", oid, repository.getNamespaceAndName());
file.setLength(-1);
} else {
file.setLength(blob.getSize());
}
setFileLengthFromLfsBlob(lfsPointer.get(), file);
} else {
file.setLength(loader.getSize());
}
if (commit != null)
{
file.setLastModified(GitUtil.getCommitTime(commit));
file.setDescription(commit.getShortMessage());
}
else if (logger.isWarnEnabled())
{
logger.warn("could not find latest commit for {} on {}", path,
revId);
}
executor.execute(
new CompleteFileInformation(path, revId, repo, file, request),
new AbortFileInformation(request)
);
}
}
return file;
}
//~--- get methods ----------------------------------------------------------
/**
* Method description
*
*
*
* @param repo
* @param revId
* @param path
*
* @return
*/
private RevCommit getLatestCommit(org.eclipse.jgit.lib.Repository repo,
ObjectId revId, String path)
{
RevCommit result = null;
RevWalk walk = null;
try
{
walk = new RevWalk(repo);
walk.setTreeFilter(AndTreeFilter.create(PathFilter.create(path),
TreeFilter.ANY_DIFF));
RevCommit commit = walk.parseCommit(revId);
walk.markStart(commit);
result = Util.getFirst(walk);
}
catch (IOException ex)
{
logger.error("could not parse commit for file", ex);
}
finally
{
GitUtil.release(walk);
}
return result;
private void updateCache(BrowseCommandRequest request) {
request.updateCache(browserResult);
logger.info("updated browser result for repository {}", repository.getNamespaceAndName());
}
private FileObject getEntry(org.eclipse.jgit.lib.Repository repo, BrowseCommandRequest request, ObjectId revId) throws IOException {
RevWalk revWalk = null;
TreeWalk treeWalk = null;
FileObject result;
try {
try (RevWalk revWalk = new RevWalk(repo); TreeWalk treeWalk = new TreeWalk(repo)) {
logger.debug("load repository browser for revision {}", revId.name());
treeWalk = new TreeWalk(repo);
if (!isRootRequest(request)) {
treeWalk.setFilter(PathFilter.create(request.getPath()));
}
revWalk = new RevWalk(repo);
RevTree tree = revWalk.parseTree(revId);
if (tree != null)
{
if (tree != null) {
treeWalk.addTree(tree);
}
else
{
} else {
throw new IllegalStateException("could not find tree for " + revId.name());
}
if (isRootRequest(request)) {
result = createEmtpyRoot();
FileObject result = createEmptyRoot();
findChildren(result, repo, request, revId, treeWalk);
return result;
} else {
result = findFirstMatch(repo, request, revId, treeWalk);
FileObject result = findFirstMatch(repo, request, revId, treeWalk);
if ( result.isDirectory() ) {
treeWalk.enterSubtree();
findChildren(result, repo, request, revId, treeWalk);
}
}
}
finally
{
GitUtil.release(revWalk);
GitUtil.release(treeWalk);
}
return result;
}
}
}
private boolean isRootRequest(BrowseCommandRequest request) {
return Strings.isNullOrEmpty(request.getPath()) || "/".equals(request.getPath());
@@ -384,56 +299,144 @@ public class GitBrowseCommand extends AbstractGitCommand
throw notFound(entity("File", request.getPath()).in("Revision", revId.getName()).in(this.repository));
}
@SuppressWarnings("unchecked")
private Map<String,
SubRepository> getSubRepositories(org.eclipse.jgit.lib.Repository repo,
ObjectId revision)
private Map<String, SubRepository> getSubRepositories(org.eclipse.jgit.lib.Repository repo, ObjectId revision)
throws IOException {
if (logger.isDebugEnabled())
{
logger.debug("read submodules of {} at {}", repository.getName(),
revision);
}
Map<String, SubRepository> subRepositories;
try ( ByteArrayOutputStream baos = new ByteArrayOutputStream() )
{
logger.debug("read submodules of {} at {}", repository.getName(), revision);
try ( ByteArrayOutputStream baos = new ByteArrayOutputStream() ) {
new GitCatCommand(context, repository, lfsBlobStoreFactory).getContent(repo, revision,
PATH_MODULES, baos);
subRepositories = GitSubModuleParser.parse(baos.toString());
return GitSubModuleParser.parse(baos.toString());
} catch (NotFoundException ex) {
logger.trace("could not find .gitmodules: {}", ex.getMessage());
return Collections.emptyMap();
}
catch (NotFoundException ex)
{
logger.trace("could not find .gitmodules", ex);
subRepositories = Collections.EMPTY_MAP;
}
return subRepositories;
}
private SubRepository getSubRepository(org.eclipse.jgit.lib.Repository repo,
ObjectId revId, String path)
private SubRepository getSubRepository(org.eclipse.jgit.lib.Repository repo, ObjectId revId, String path)
throws IOException {
Map<String, SubRepository> subRepositories = subrepositoryCache.get(revId);
if (subRepositories == null)
{
if (subRepositories == null) {
subRepositories = getSubRepositories(repo, revId);
subrepositoryCache.put(revId, subRepositories);
}
SubRepository sub = null;
if (subRepositories != null)
{
sub = subRepositories.get(path);
if (subRepositories != null) {
return subRepositories.get(path);
}
return null;
}
return sub;
private Optional<LfsPointer> getLfsPointer(org.eclipse.jgit.lib.Repository repo, String path, RevCommit commit, TreeWalk treeWalk) {
try {
Attributes attributes = LfsFactory.getAttributesForPath(repo, path, commit);
return GitUtil.getLfsPointer(repo, treeWalk, attributes);
} catch (IOException e) {
throw new InternalRepositoryException(repository, "could not read lfs pointer", e);
}
}
//~--- fields ---------------------------------------------------------------
private void setFileLengthFromLfsBlob(LfsPointer lfsPointer, FileObject file) {
BlobStore lfsBlobStore = lfsBlobStoreFactory.getLfsBlobStore(repository);
String oid = lfsPointer.getOid().getName();
Blob blob = lfsBlobStore.get(oid);
if (blob == null) {
logger.error("lfs blob for lob id {} not found in lfs store of repository {}", oid, repository.getNamespaceAndName());
file.setLength(null);
} else {
file.setLength(blob.getSize());
}
}
/** sub repository cache */
private final Map<ObjectId, Map<String, SubRepository>> subrepositoryCache = Maps.newHashMap();
private class CompleteFileInformation implements Consumer<SyncAsyncExecutor.ExecutionType> {
private final String path;
private final ObjectId revId;
private final org.eclipse.jgit.lib.Repository repo;
private final FileObject file;
private final BrowseCommandRequest request;
public CompleteFileInformation(String path, ObjectId revId, org.eclipse.jgit.lib.Repository repo, FileObject file, BrowseCommandRequest request) {
this.path = path;
this.revId = revId;
this.repo = repo;
this.file = file;
this.request = request;
}
@Override
public void accept(SyncAsyncExecutor.ExecutionType executionType) {
logger.trace("fetch last commit for {} at {}", path, revId.getName());
Stopwatch sw = Stopwatch.createStarted();
Optional<RevCommit> commit = getLatestCommit(repo, revId, path);
synchronized (asyncMonitor) {
file.setPartialResult(false);
if (commit.isPresent()) {
applyValuesFromCommit(executionType, commit.get());
} else {
logger.warn("could not find latest commit for {} on {}", path, revId);
}
}
logger.trace("finished loading of last commit {} of {} in {}", revId.getName(), path, sw.stop());
}
private Optional<RevCommit> getLatestCommit(org.eclipse.jgit.lib.Repository repo,
ObjectId revId, String path) {
try (RevWalk walk = new RevWalk(repo)) {
walk.setTreeFilter(AndTreeFilter.create(TreeFilter.ANY_DIFF, PathFilter.create(path)));
RevCommit commit = walk.parseCommit(revId);
walk.markStart(commit);
return of(Util.getFirst(walk));
} catch (IOException ex) {
logger.error("could not parse commit for file", ex);
return empty();
}
}
private void applyValuesFromCommit(SyncAsyncExecutor.ExecutionType executionType, RevCommit commit) {
file.setCommitDate(GitUtil.getCommitTime(commit));
file.setDescription(commit.getShortMessage());
if (executionType == ASYNCHRONOUS && browserResult != null) {
updateCache(request);
}
}
}
private class AbortFileInformation implements Runnable {
private final BrowseCommandRequest request;
public AbortFileInformation(BrowseCommandRequest request) {
this.request = request;
}
@Override
public void run() {
synchronized (asyncMonitor) {
if (markPartialAsAborted(browserResult.getFile())) {
updateCache(request);
}
}
}
private boolean markPartialAsAborted(FileObject file) {
boolean changed = false;
if (file.isPartialResult()) {
file.setPartialResult(false);
file.setComputationAborted(true);
changed = true;
}
for (FileObject child : file.getChildren()) {
changed |= markPartialAsAborted(child);
}
return changed;
}
}
}

View File

@@ -80,12 +80,13 @@ public class GitRepositoryServiceProvider extends RepositoryServiceProvider
//~--- constructors ---------------------------------------------------------
public GitRepositoryServiceProvider(GitRepositoryHandler handler, Repository repository, GitRepositoryConfigStoreProvider storeProvider, LfsBlobStoreFactory lfsBlobStoreFactory, HookContextFactory hookContextFactory, ScmEventBus eventBus) {
public GitRepositoryServiceProvider(GitRepositoryHandler handler, Repository repository, GitRepositoryConfigStoreProvider storeProvider, LfsBlobStoreFactory lfsBlobStoreFactory, HookContextFactory hookContextFactory, ScmEventBus eventBus, SyncAsyncExecutorProvider executorProvider) {
this.handler = handler;
this.repository = repository;
this.lfsBlobStoreFactory = lfsBlobStoreFactory;
this.hookContextFactory = hookContextFactory;
this.eventBus = eventBus;
this.executorProvider = executorProvider;
this.context = new GitContext(handler.getDirectory(repository.getId()), repository, storeProvider);
}
@@ -150,7 +151,7 @@ public class GitRepositoryServiceProvider extends RepositoryServiceProvider
@Override
public BrowseCommand getBrowseCommand()
{
return new GitBrowseCommand(context, repository, lfsBlobStoreFactory);
return new GitBrowseCommand(context, repository, lfsBlobStoreFactory, executorProvider.createExecutorWithDefaultTimeout());
}
/**
@@ -301,4 +302,6 @@ public class GitRepositoryServiceProvider extends RepositoryServiceProvider
private final HookContextFactory hookContextFactory;
private final ScmEventBus eventBus;
private final SyncAsyncExecutorProvider executorProvider;
}

View File

@@ -55,14 +55,16 @@ public class GitRepositoryServiceResolver implements RepositoryServiceResolver {
private final LfsBlobStoreFactory lfsBlobStoreFactory;
private final HookContextFactory hookContextFactory;
private final ScmEventBus eventBus;
private final SyncAsyncExecutorProvider executorProvider;
@Inject
public GitRepositoryServiceResolver(GitRepositoryHandler handler, GitRepositoryConfigStoreProvider storeProvider, LfsBlobStoreFactory lfsBlobStoreFactory, HookContextFactory hookContextFactory, ScmEventBus eventBus) {
public GitRepositoryServiceResolver(GitRepositoryHandler handler, GitRepositoryConfigStoreProvider storeProvider, LfsBlobStoreFactory lfsBlobStoreFactory, HookContextFactory hookContextFactory, ScmEventBus eventBus, SyncAsyncExecutorProvider executorProvider) {
this.handler = handler;
this.storeProvider = storeProvider;
this.lfsBlobStoreFactory = lfsBlobStoreFactory;
this.hookContextFactory = hookContextFactory;
this.eventBus = eventBus;
this.executorProvider = executorProvider;
}
@Override
@@ -70,7 +72,7 @@ public class GitRepositoryServiceResolver implements RepositoryServiceResolver {
GitRepositoryServiceProvider provider = null;
if (GitRepositoryHandler.TYPE_NAME.equalsIgnoreCase(repository.getType())) {
provider = new GitRepositoryServiceProvider(handler, repository, storeProvider, lfsBlobStoreFactory, hookContextFactory, eventBus);
provider = new GitRepositoryServiceProvider(handler, repository, storeProvider, lfsBlobStoreFactory, hookContextFactory, eventBus, executorProvider);
}
return provider;

View File

@@ -35,15 +35,21 @@ import org.junit.Test;
import sonia.scm.repository.BrowserResult;
import sonia.scm.repository.FileObject;
import sonia.scm.repository.GitRepositoryConfig;
import sonia.scm.repository.spi.SyncAsyncExecutors.AsyncExecutorStepper;
import java.io.IOException;
import java.util.Collection;
import java.util.LinkedList;
import java.util.List;
import static org.assertj.core.api.Assertions.assertThat;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertFalse;
import static org.junit.Assert.assertNotNull;
import static org.junit.Assert.assertNull;
import static org.junit.Assert.assertTrue;
import static sonia.scm.repository.spi.SyncAsyncExecutors.stepperAsynchronousExecutor;
import static sonia.scm.repository.spi.SyncAsyncExecutors.synchronousExecutor;
/**
* Unit tests for {@link GitBrowseCommand}.
@@ -102,15 +108,55 @@ public class GitBrowseCommandTest extends AbstractGitCommandTestBase {
assertFalse(a.isDirectory());
assertEquals("a.txt", a.getName());
assertEquals("a.txt", a.getPath());
assertEquals("added new line for blame", a.getDescription());
assertTrue(a.getLength() > 0);
checkDate(a.getLastModified());
assertEquals("added new line for blame", a.getDescription().get());
assertTrue(a.getLength().getAsLong() > 0);
checkDate(a.getCommitDate().getAsLong());
assertTrue(c.isDirectory());
assertEquals("c", c.getName());
assertEquals("c", c.getPath());
}
@Test
public void testAsynchronousBrowse() throws IOException {
try (AsyncExecutorStepper executor = stepperAsynchronousExecutor()) {
GitBrowseCommand command = new GitBrowseCommand(createContext(), repository, null, executor);
List<BrowserResult> updatedResults = new LinkedList<>();
BrowseCommandRequest request = new BrowseCommandRequest(updatedResults::add);
FileObject root = command.getBrowserResult(request).getFile();
assertNotNull(root);
Collection<FileObject> foList = root.getChildren();
FileObject a = findFile(foList, "a.txt");
FileObject b = findFile(foList, "b.txt");
assertTrue(a.isPartialResult());
assertFalse("expected empty name before commit could have been read", a.getDescription().isPresent());
assertFalse("expected empty date before commit could have been read", a.getCommitDate().isPresent());
assertTrue(b.isPartialResult());
assertFalse("expected empty name before commit could have been read", b.getDescription().isPresent());
assertFalse("expected empty date before commit could have been read", b.getCommitDate().isPresent());
executor.next();
assertEquals(1, updatedResults.size());
assertFalse(a.isPartialResult());
assertNotNull("expected correct name after commit could have been read", a.getDescription());
assertTrue("expected correct date after commit could have been read", a.getCommitDate().isPresent());
assertTrue(b.isPartialResult());
assertFalse("expected empty name before commit could have been read", b.getDescription().isPresent());
assertFalse("expected empty date before commit could have been read", b.getCommitDate().isPresent());
executor.next();
assertEquals(2, updatedResults.size());
assertFalse(b.isPartialResult());
assertNotNull("expected correct name after commit could have been read", b.getDescription());
assertTrue("expected correct date after commit could have been read", b.getCommitDate().isPresent());
}
}
@Test
public void testBrowseSubDirectory() throws IOException {
BrowseCommandRequest request = new BrowseCommandRequest();
@@ -129,20 +175,20 @@ public class GitBrowseCommandTest extends AbstractGitCommandTestBase {
assertFalse(d.isDirectory());
assertEquals("d.txt", d.getName());
assertEquals("c/d.txt", d.getPath());
assertEquals("added file d and e in folder c", d.getDescription());
assertTrue(d.getLength() > 0);
checkDate(d.getLastModified());
assertEquals("added file d and e in folder c", d.getDescription().get());
assertTrue(d.getLength().getAsLong() > 0);
checkDate(d.getCommitDate().getAsLong());
assertFalse(e.isDirectory());
assertEquals("e.txt", e.getName());
assertEquals("c/e.txt", e.getPath());
assertEquals("added file d and e in folder c", e.getDescription());
assertTrue(e.getLength() > 0);
checkDate(e.getLastModified());
assertEquals("added file d and e in folder c", e.getDescription().get());
assertTrue(e.getLength().getAsLong() > 0);
checkDate(e.getCommitDate().getAsLong());
}
@Test
public void testRecusive() throws IOException {
public void testRecursive() throws IOException {
BrowseCommandRequest request = new BrowseCommandRequest();
request.setRecursive(true);
@@ -171,6 +217,6 @@ public class GitBrowseCommandTest extends AbstractGitCommandTestBase {
}
private GitBrowseCommand createCommand() {
return new GitBrowseCommand(createContext(), repository, null);
return new GitBrowseCommand(createContext(), repository, null, synchronousExecutor());
}
}

View File

@@ -231,13 +231,13 @@ public class HgFileviewCommand extends AbstractCommand
file.setName(getNameFromPath(path));
file.setPath(path);
file.setDirectory(false);
file.setLength(stream.decimalIntUpTo(' '));
file.setLength((long) stream.decimalIntUpTo(' '));
DateTime timestamp = stream.dateTimeUpTo(' ');
String description = stream.textUpTo('\0');
if (!disableLastCommit) {
file.setLastModified(timestamp.getDate().getTime());
file.setCommitDate(timestamp.getDate().getTime());
file.setDescription(description);
}

View File

@@ -61,7 +61,7 @@ public class HgBrowseCommandTest extends AbstractHgCommandTestBase {
FileObject file = new HgBrowseCommand(cmdContext, repository).getBrowserResult(request).getFile();
assertEquals("a.txt", file.getName());
assertFalse(file.isDirectory());
assertTrue(file.getChildren().isEmpty());
assertTrue(file.getChildren() == null || file.getChildren().isEmpty());
}
@Test
@@ -73,9 +73,9 @@ public class HgBrowseCommandTest extends AbstractHgCommandTestBase {
assertFalse(a.isDirectory());
assertEquals("a.txt", a.getName());
assertEquals("a.txt", a.getPath());
assertEquals("added new line for blame", a.getDescription());
assertTrue(a.getLength() > 0);
checkDate(a.getLastModified());
assertEquals("added new line for blame", a.getDescription().get());
assertTrue(a.getLength().getAsLong() > 0);
checkDate(a.getCommitDate().getAsLong());
assertTrue(c.isDirectory());
assertEquals("c", c.getName());
assertEquals("c", c.getPath());
@@ -132,16 +132,16 @@ public class HgBrowseCommandTest extends AbstractHgCommandTestBase {
assertFalse(d.isDirectory());
assertEquals("d.txt", d.getName());
assertEquals("c/d.txt", d.getPath());
assertEquals("added file d and e in folder c", d.getDescription());
assertTrue(d.getLength() > 0);
checkDate(d.getLastModified());
assertEquals("added file d and e in folder c", d.getDescription().get());
assertTrue(d.getLength().getAsLong() > 0);
checkDate(d.getCommitDate().getAsLong());
assertNotNull(e);
assertFalse(e.isDirectory());
assertEquals("e.txt", e.getName());
assertEquals("c/e.txt", e.getPath());
assertEquals("added file d and e in folder c", e.getDescription());
assertTrue(e.getLength() > 0);
checkDate(e.getLastModified());
assertEquals("added file d and e in folder c", e.getDescription().get());
assertTrue(e.getLength().getAsLong() > 0);
checkDate(e.getCommitDate().getAsLong());
}
@Test
@@ -154,8 +154,8 @@ public class HgBrowseCommandTest extends AbstractHgCommandTestBase {
FileObject a = getFileObject(foList, "a.txt");
assertNull(a.getDescription());
assertNull(a.getLastModified());
assertFalse(a.getDescription().isPresent());
assertFalse(a.getCommitDate().isPresent());
}
@Test

View File

@@ -173,7 +173,7 @@ public class SvnBrowseCommand extends AbstractSvnCommand
{
if (entry.getDate() != null)
{
fileObject.setLastModified(entry.getDate().getTime());
fileObject.setCommitDate(entry.getDate().getTime());
}
fileObject.setDescription(entry.getCommitMessage());

View File

@@ -60,7 +60,7 @@ public class SvnBrowseCommandTest extends AbstractSvnCommandTestBase
FileObject file = createCommand().getBrowserResult(request).getFile();
assertEquals("a.txt", file.getName());
assertFalse(file.isDirectory());
assertTrue(file.getChildren().isEmpty());
assertTrue(file.getChildren() == null || file.getChildren().isEmpty());
}
@Test
@@ -73,9 +73,9 @@ public class SvnBrowseCommandTest extends AbstractSvnCommandTestBase
assertFalse(a.isDirectory());
assertEquals("a.txt", a.getName());
assertEquals("a.txt", a.getPath());
assertEquals("added line for blame test", a.getDescription());
assertTrue(a.getLength() > 0);
checkDate(a.getLastModified());
assertEquals("added line for blame test", a.getDescription().get());
assertTrue(a.getLength().getAsLong() > 0);
checkDate(a.getCommitDate().getAsLong());
assertTrue(c.isDirectory());
assertEquals("c", c.getName());
assertEquals("c", c.getPath());
@@ -122,16 +122,16 @@ public class SvnBrowseCommandTest extends AbstractSvnCommandTestBase
assertFalse(d.isDirectory());
assertEquals("d.txt", d.getName());
assertEquals("c/d.txt", d.getPath());
assertEquals("added d and e in folder c", d.getDescription());
assertTrue(d.getLength() > 0);
checkDate(d.getLastModified());
assertEquals("added d and e in folder c", d.getDescription().get());
assertTrue(d.getLength().getAsLong() > 0);
checkDate(d.getCommitDate().getAsLong());
assertNotNull(e);
assertFalse(e.isDirectory());
assertEquals("e.txt", e.getName());
assertEquals("c/e.txt", e.getPath());
assertEquals("added d and e in folder c", e.getDescription());
assertTrue(e.getLength() > 0);
checkDate(e.getLastModified());
assertEquals("added d and e in folder c", e.getDescription().get());
assertTrue(e.getLength().getAsLong() > 0);
checkDate(e.getCommitDate().getAsLong());
}
@Test
@@ -144,8 +144,8 @@ public class SvnBrowseCommandTest extends AbstractSvnCommandTestBase
FileObject a = getFileObject(foList, "a.txt");
assertNull(a.getDescription());
assertNull(a.getLastModified());
assertFalse(a.getDescription().isPresent());
assertFalse(a.getCommitDate().isPresent());
}
@Test

View File

@@ -0,0 +1,107 @@
package sonia.scm.repository.spi;
import java.io.Closeable;
import java.util.concurrent.Executor;
import java.util.concurrent.Executors;
import java.util.concurrent.Semaphore;
import java.util.function.Consumer;
import static sonia.scm.repository.spi.SyncAsyncExecutor.ExecutionType.ASYNCHRONOUS;
import static sonia.scm.repository.spi.SyncAsyncExecutor.ExecutionType.SYNCHRONOUS;
public final class SyncAsyncExecutors {
public static SyncAsyncExecutor synchronousExecutor() {
return new SyncAsyncExecutor() {
@Override
public ExecutionType execute(Consumer<ExecutionType> runnable, Runnable abortionFallback) {
runnable.accept(SYNCHRONOUS);
return SYNCHRONOUS;
}
@Override
public boolean hasExecutedAllSynchronously() {
return true;
}
};
}
public static SyncAsyncExecutor asynchronousExecutor() {
Executor executor = Executors.newSingleThreadExecutor();
return new SyncAsyncExecutor() {
@Override
public ExecutionType execute(Consumer<ExecutionType> runnable, Runnable abortionFallback) {
executor.execute(() -> runnable.accept(ASYNCHRONOUS));
return ASYNCHRONOUS;
}
@Override
public boolean hasExecutedAllSynchronously() {
return true;
}
};
}
public static AsyncExecutorStepper stepperAsynchronousExecutor() {
return new AsyncExecutorStepper() {
Executor executor = Executors.newSingleThreadExecutor();
Semaphore enterSemaphore = new Semaphore(0);
Semaphore exitSemaphore = new Semaphore(0);
boolean timedOut = false;
@Override
public void close() {
enterSemaphore.release(Integer.MAX_VALUE/2);
exitSemaphore.release(Integer.MAX_VALUE/2);
}
@Override
public ExecutionType execute(Consumer<ExecutionType> runnable, Runnable abortionFallback) {
executor.execute(() -> {
try {
enterSemaphore.acquire();
} catch (InterruptedException e) {
Thread.currentThread().interrupt();
}
if (timedOut) {
abortionFallback.run();
} else {
runnable.accept(ASYNCHRONOUS);
exitSemaphore.release();
}
});
return ASYNCHRONOUS;
}
@Override
public void next() {
enterSemaphore.release();
try {
exitSemaphore.acquire();
} catch (InterruptedException e) {
Thread.currentThread().interrupt();
}
}
@Override
public void timeout() {
timedOut = true;
close();
}
@Override
public boolean hasExecutedAllSynchronously() {
return true;
}
};
}
public interface AsyncExecutorStepper extends SyncAsyncExecutor, Closeable {
void next();
void timeout();
}
}

View File

@@ -13,9 +13,11 @@ export type File = {
directory: boolean;
description?: string;
revision: string;
length: number;
lastModified?: string;
length?: number;
commitDate?: string;
subRepository?: SubRepository; // TODO
partialResult: boolean;
computationAborted: boolean;
_links: Links;
_embedded: {
children: File[] | null | undefined;

View File

@@ -101,7 +101,9 @@
"length": "Größe",
"lastModified": "Zuletzt bearbeitet",
"description": "Beschreibung",
"branch": "Branch"
"branch": "Branch",
"notYetComputed": "Noch nicht berechnet; Der Wert wird in Kürze aktualisiert",
"computationAborted": "Die Berechnung dauert zu lange und wurde abgebrochen"
},
"content": {
"historyButton": "History",

View File

@@ -101,7 +101,9 @@
"length": "Length",
"lastModified": "Last modified",
"description": "Description",
"branch": "Branch"
"branch": "Branch",
"notYetComputed": "Not yet computed, will be updated in a short while",
"computationAborted": "The computation took too long and was aborted"
},
"content": {
"historyButton": "History",

View File

@@ -7,7 +7,7 @@ import styled from "styled-components";
import { binder } from "@scm-manager/ui-extensions";
import { Repository, File } from "@scm-manager/ui-types";
import { ErrorNotification, Loading, Notification } from "@scm-manager/ui-components";
import { getFetchSourcesFailure, isFetchSourcesPending, getSources } from "../modules/sources";
import { getFetchSourcesFailure, isFetchSourcesPending, getSources, fetchSources } from "../modules/sources";
import FileTreeLeaf from "./FileTreeLeaf";
type Props = WithTranslation & {
@@ -19,10 +19,16 @@ type Props = WithTranslation & {
path: string;
baseUrl: string;
updateSources: () => void;
// context props
match: any;
};
type State = {
stoppableUpdateHandler?: number;
};
const FixedWidthTh = styled.th`
width: 16px;
`;
@@ -39,7 +45,28 @@ export function findParent(path: string) {
return "";
}
class FileTree extends React.Component<Props> {
class FileTree extends React.Component<Props, State> {
constructor(props: Props) {
super(props);
this.state = {};
}
componentDidUpdate(prevProps: Readonly<Props>, prevState: Readonly<State>): void {
if (prevState.stoppableUpdateHandler === this.state.stoppableUpdateHandler) {
const { tree, updateSources } = this.props;
if (tree?._embedded?.children && tree._embedded.children.find(c => c.partialResult)) {
const stoppableUpdateHandler = setTimeout(updateSources, 3000);
this.setState({ stoppableUpdateHandler: stoppableUpdateHandler });
}
}
}
componentWillUnmount(): void {
if (this.state.stoppableUpdateHandler) {
clearTimeout(this.state.stoppableUpdateHandler);
}
}
render() {
const { error, loading, tree } = this.props;
@@ -106,7 +133,7 @@ class FileTree extends React.Component<Props> {
<FixedWidthTh />
<th>{t("sources.file-tree.name")}</th>
<th className="is-hidden-mobile">{t("sources.file-tree.length")}</th>
<th className="is-hidden-mobile">{t("sources.file-tree.lastModified")}</th>
<th className="is-hidden-mobile">{t("sources.file-tree.commitDate")}</th>
<th className="is-hidden-touch">{t("sources.file-tree.description")}</th>
{binder.hasExtension("repos.sources.tree.row.right") && <th className="is-hidden-mobile" />}
</tr>
@@ -123,6 +150,14 @@ class FileTree extends React.Component<Props> {
}
}
const mapDispatchToProps = (dispatch: any, ownProps: Props) => {
const { repository, revision, path } = ownProps;
const updateSources = () => dispatch(fetchSources(repository, revision, path, false));
return { updateSources };
};
const mapStateToProps = (state: any, ownProps: Props) => {
const { repository, revision, path } = ownProps;
@@ -141,5 +176,8 @@ const mapStateToProps = (state: any, ownProps: Props) => {
export default compose(
withRouter,
connect(mapStateToProps)
connect(
mapStateToProps,
mapDispatchToProps
)
)(withTranslation("repos")(FileTree));

View File

@@ -4,10 +4,12 @@ import classNames from "classnames";
import styled from "styled-components";
import { binder, ExtensionPoint } from "@scm-manager/ui-extensions";
import { File } from "@scm-manager/ui-types";
import { DateFromNow, FileSize } from "@scm-manager/ui-components";
import { DateFromNow, FileSize, Tooltip } from "@scm-manager/ui-components";
import FileIcon from "./FileIcon";
import { Icon } from "@scm-manager/ui-components/src";
import { WithTranslation, withTranslation } from "react-i18next";
type Props = {
type Props = WithTranslation & {
file: File;
baseUrl: string;
};
@@ -35,7 +37,7 @@ export function createLink(base: string, file: File) {
return link;
}
export default class FileTreeLeaf extends React.Component<Props> {
class FileTreeLeaf extends React.Component<Props> {
createLink = (file: File) => {
return createLink(this.props.baseUrl, file);
};
@@ -62,20 +64,42 @@ export default class FileTreeLeaf extends React.Component<Props> {
return <Link to={this.createLink(file)}>{file.name}</Link>;
};
contentIfPresent = (file: File, attribute: string, content: (file: File) => any) => {
const { t } = this.props;
if (file.hasOwnProperty(attribute)) {
return content(file);
} else if (file.computationAborted) {
return (
<Tooltip location="top" message={t("sources.file-tree.computationAborted")}>
<Icon name={"question-circle"} />
</Tooltip>
);
} else if (file.partialResult) {
return (
<Tooltip location="top" message={t("sources.file-tree.notYetComputed")}>
<Icon name={"hourglass"} />
</Tooltip>
);
} else {
return content(file);
}
};
render() {
const { file } = this.props;
const fileSize = file.directory ? "" : <FileSize bytes={file.length} />;
const renderFileSize = (file: File) => <FileSize bytes={file.length} />;
const renderCommitDate = (file: File) => <DateFromNow date={file.commitDate} />;
return (
<tr>
<td>{this.createFileIcon(file)}</td>
<MinWidthTd className="is-word-break">{this.createFileName(file)}</MinWidthTd>
<NoWrapTd className="is-hidden-mobile">{fileSize}</NoWrapTd>
<td className="is-hidden-mobile">
<DateFromNow date={file.lastModified} />
</td>
<MinWidthTd className={classNames("is-word-break", "is-hidden-touch")}>{file.description}</MinWidthTd>
<NoWrapTd className="is-hidden-mobile">{file.directory ? "" : this.contentIfPresent(file, "length", renderFileSize)}</NoWrapTd>
<td className="is-hidden-mobile">{this.contentIfPresent(file, "commitDate", renderCommitDate)}</td>
<MinWidthTd className={classNames("is-word-break", "is-hidden-touch")}>
{this.contentIfPresent(file, "description", file => file.description)}
</MinWidthTd>
{binder.hasExtension("repos.sources.tree.row.right") && (
<td className="is-hidden-mobile">
{!file.directory && (
@@ -93,3 +117,5 @@ export default class FileTreeLeaf extends React.Component<Props> {
);
}
}
export default withTranslation("repos")(FileTreeLeaf);

View File

@@ -115,7 +115,7 @@ class Content extends React.Component<Props, State> {
showMoreInformation() {
const collapsed = this.state.collapsed;
const { file, revision, t, repository } = this.props;
const date = <DateFromNow date={file.lastModified} />;
const date = <DateFromNow date={file.commitDate} />;
const description = file.description ? (
<p>
{file.description.split("\n").map((item, key) => {

View File

@@ -49,10 +49,8 @@ const collection = {
name: "src",
path: "src",
directory: true,
description: "",
length: 176,
revision: "76aae4bb4ceacf0e88938eb5b6832738b7d537b4",
lastModified: "",
subRepository: undefined,
_links: {
self: {
@@ -71,7 +69,7 @@ const collection = {
description: "bump version",
length: 780,
revision: "76aae4bb4ceacf0e88938eb5b6832738b7d537b4",
lastModified: "2017-07-31T11:17:19Z",
commitDate: "2017-07-31T11:17:19Z",
subRepository: undefined,
_links: {
self: {
@@ -127,7 +125,7 @@ describe("sources fetch", () => {
{
type: FETCH_SOURCES_SUCCESS,
itemId: "scm/core/_/",
payload: collection
payload: { updatePending: false, sources: collection }
}
];
@@ -148,7 +146,7 @@ describe("sources fetch", () => {
{
type: FETCH_SOURCES_SUCCESS,
itemId: "scm/core/abc/src",
payload: collection
payload: { updatePending: false, sources: collection }
}
];
@@ -182,14 +180,14 @@ describe("reducer tests", () => {
it("should store the collection, without revision and path", () => {
const expectedState = {
"scm/core/_/": collection
"scm/core/_/": { updatePending: false, sources: collection }
};
expect(reducer({}, fetchSourcesSuccess(repository, "", "", collection))).toEqual(expectedState);
});
it("should store the collection, with revision and path", () => {
const expectedState = {
"scm/core/abc/src/main": collection
"scm/core/abc/src/main": { updatePending: false, sources: collection }
};
expect(reducer({}, fetchSourcesSuccess(repository, "abc", "src/main", collection))).toEqual(expectedState);
});
@@ -200,7 +198,7 @@ describe("selector tests", () => {
const state = {
sources: {
"scm/core/abc/src/main/package.json": {
noDirectory
sources: {noDirectory}
}
}
};
@@ -223,7 +221,9 @@ describe("selector tests", () => {
it("should return the source collection without revision and path", () => {
const state = {
sources: {
"scm/core/_/": collection
"scm/core/_/": {
sources: collection
}
}
};
expect(getSources(state, repository, "", "")).toBe(collection);
@@ -232,7 +232,9 @@ describe("selector tests", () => {
it("should return the source collection with revision and path", () => {
const state = {
sources: {
"scm/core/abc/src/main": collection
"scm/core/abc/src/main": {
sources: collection
}
}
};
expect(getSources(state, repository, "abc", "src/main")).toBe(collection);

View File

@@ -9,13 +9,25 @@ export const FETCH_SOURCES_PENDING = `${FETCH_SOURCES}_${types.PENDING_SUFFIX}`;
export const FETCH_SOURCES_SUCCESS = `${FETCH_SOURCES}_${types.SUCCESS_SUFFIX}`;
export const FETCH_SOURCES_FAILURE = `${FETCH_SOURCES}_${types.FAILURE_SUFFIX}`;
export function fetchSources(repository: Repository, revision: string, path: string) {
return function(dispatch: any) {
export function fetchSources(repository: Repository, revision: string, path: string, initialLoad = true) {
return function(dispatch: any, getState: () => any) {
const state = getState();
if (
isFetchSourcesPending(state, repository, revision, path) ||
isUpdateSourcePending(state, repository, revision, path)
) {
return;
}
if (initialLoad) {
dispatch(fetchSourcesPending(repository, revision, path));
} else {
dispatch(updateSourcesPending(repository, revision, path, getSources(state, repository, revision, path)));
}
return apiClient
.get(createUrl(repository, revision, path))
.then(response => response.json())
.then(sources => {
.then((sources: File) => {
dispatch(fetchSourcesSuccess(repository, revision, path, sources));
})
.catch(err => {
@@ -42,10 +54,23 @@ export function fetchSourcesPending(repository: Repository, revision: string, pa
};
}
export function updateSourcesPending(
repository: Repository,
revision: string,
path: string,
currentSources: any
): Action {
return {
type: "UPDATE_PENDING",
payload: { updatePending: true, sources: currentSources },
itemId: createItemId(repository, revision, path)
};
}
export function fetchSourcesSuccess(repository: Repository, revision: string, path: string, sources: File) {
return {
type: FETCH_SOURCES_SUCCESS,
payload: sources,
payload: { updatePending: false, sources },
itemId: createItemId(repository, revision, path)
};
}
@@ -72,7 +97,7 @@ export default function reducer(
type: "UNKNOWN"
}
): any {
if (action.itemId && action.type === FETCH_SOURCES_SUCCESS) {
if (action.itemId && (action.type === FETCH_SOURCES_SUCCESS || action.type === "UPDATE_PENDING")) {
return {
...state,
[action.itemId]: action.payload
@@ -99,13 +124,17 @@ export function getSources(
path: string
): File | null | undefined {
if (state.sources) {
return state.sources[createItemId(repository, revision, path)];
return state.sources[createItemId(repository, revision, path)]?.sources;
}
return null;
}
export function isFetchSourcesPending(state: any, repository: Repository, revision: string, path: string): boolean {
return isPending(state, FETCH_SOURCES, createItemId(repository, revision, path));
return state && isPending(state, FETCH_SOURCES, createItemId(repository, revision, path));
}
function isUpdateSourcePending(state: any, repository: Repository, revision: string, path: string): boolean {
return state?.sources && state.sources[createItemId(repository, revision, path)]?.updatePending;
}
export function getFetchSourcesFailure(

View File

@@ -1,6 +1,5 @@
package sonia.scm.api.v2.resources;
import com.google.common.annotations.VisibleForTesting;
import de.otto.edison.hal.Embedded;
import de.otto.edison.hal.Links;
import org.mapstruct.Context;
@@ -16,18 +15,13 @@ import java.lang.annotation.ElementType;
import java.lang.annotation.Retention;
import java.lang.annotation.RetentionPolicy;
import java.lang.annotation.Target;
import java.time.Instant;
import java.util.Optional;
import java.util.OptionalLong;
@Mapper
public abstract class BrowserResultToFileObjectDtoMapper extends BaseFileObjectDtoMapper {
@Inject
private FileObjectToFileObjectDtoMapper childrenMapper;
@VisibleForTesting
void setChildrenMapper(FileObjectToFileObjectDtoMapper childrenMapper) {
this.childrenMapper = childrenMapper;
}
FileObjectDto map(BrowserResult browserResult, @Context NamespaceAndName namespaceAndName) {
FileObjectDto fileObjectDto = fileObjectToDto(browserResult.getFile(), namespaceAndName, browserResult);
fileObjectDto.setRevision(browserResult.getRevision());
@@ -36,12 +30,8 @@ public abstract class BrowserResultToFileObjectDtoMapper extends BaseFileObjectD
@Mapping(target = "attributes", ignore = true) // We do not map HAL attributes
@Mapping(target = "children", qualifiedBy = Children.class)
protected abstract FileObjectDto fileObjectToDto(FileObject fileObject, @Context NamespaceAndName namespaceAndName, @Context BrowserResult browserResult);
@Children
protected FileObjectDto childrenToDto(FileObject fileObject, @Context NamespaceAndName namespaceAndName, @Context BrowserResult browserResult) {
return childrenMapper.map(fileObject, namespaceAndName, browserResult);
}
protected abstract FileObjectDto fileObjectToDto(FileObject fileObject, @Context NamespaceAndName namespaceAndName, @Context BrowserResult browserResult);
@Override
void applyEnrichers(Links.Builder links, Embedded.Builder embeddedBuilder, NamespaceAndName namespaceAndName, BrowserResult browserResult, FileObject fileObject) {
@@ -52,6 +42,14 @@ public abstract class BrowserResultToFileObjectDtoMapper extends BaseFileObjectD
applyEnrichers(appender, fileObject, namespaceAndName, browserResult, browserResult.getRevision());
}
Optional<Instant> mapOptionalInstant(OptionalLong optionalLong) {
if (optionalLong.isPresent()) {
return Optional.of(Instant.ofEpochMilli(optionalLong.getAsLong()));
} else {
return Optional.empty();
}
}
@Qualifier
@Target(ElementType.METHOD)
@Retention(RetentionPolicy.CLASS)

View File

@@ -10,6 +10,8 @@ import lombok.Setter;
import java.time.Instant;
import java.util.List;
import java.util.Optional;
import java.util.OptionalLong;
@Getter
@Setter
@@ -19,14 +21,16 @@ public class FileObjectDto extends HalRepresentation {
private String path;
private boolean directory;
@JsonInclude(JsonInclude.Include.NON_EMPTY)
private String description;
private long length;
private Optional<String> description;
private OptionalLong length;
@JsonInclude(JsonInclude.Include.NON_EMPTY)
private Instant lastModified;
private Optional<Instant> commitDate;
@JsonInclude(JsonInclude.Include.NON_EMPTY)
private SubRepositoryDto subRepository;
@JsonInclude(JsonInclude.Include.NON_EMPTY)
private String revision;
private boolean partialResult;
private boolean computationAborted;
public FileObjectDto(Links links, Embedded embedded) {
super(links, embedded);

View File

@@ -1,22 +0,0 @@
package sonia.scm.api.v2.resources;
import de.otto.edison.hal.Embedded;
import de.otto.edison.hal.Links;
import org.mapstruct.Context;
import org.mapstruct.Mapper;
import org.mapstruct.Mapping;
import sonia.scm.repository.BrowserResult;
import sonia.scm.repository.FileObject;
import sonia.scm.repository.NamespaceAndName;
@Mapper
public abstract class FileObjectToFileObjectDtoMapper extends BaseFileObjectDtoMapper {
@Mapping(target = "attributes", ignore = true) // We do not map HAL attributes
protected abstract FileObjectDto map(FileObject fileObject, @Context NamespaceAndName namespaceAndName, @Context BrowserResult browserResult);
@Override
void applyEnrichers(Links.Builder links, Embedded.Builder embeddedBuilder, NamespaceAndName namespaceAndName, BrowserResult browserResult, FileObject fileObject) {
applyEnrichers(new EdisonHalAppender(links, embeddedBuilder), fileObject, namespaceAndName, browserResult, browserResult.getRevision());
}
}

View File

@@ -37,7 +37,6 @@ public class MapperModule extends AbstractModule {
bind(TagToTagDtoMapper.class).to(Mappers.getMapper(TagToTagDtoMapper.class).getClass());
bind(FileObjectToFileObjectDtoMapper.class).to(Mappers.getMapper(FileObjectToFileObjectDtoMapper.class).getClass());
bind(BrowserResultToFileObjectDtoMapper.class).to(Mappers.getMapper(BrowserResultToFileObjectDtoMapper.class).getClass());
bind(ModificationsToDtoMapper.class).to(Mappers.getMapper(ModificationsToDtoMapper.class).getClass());

View File

@@ -12,6 +12,7 @@ import sonia.scm.debug.DebugModule;
import sonia.scm.filter.WebElementModule;
import sonia.scm.plugin.ExtensionProcessor;
import sonia.scm.plugin.PluginLoader;
import sonia.scm.repository.ExecutorModule;
import javax.servlet.ServletContext;
import java.util.ArrayList;
@@ -51,6 +52,7 @@ public class ApplicationModuleProvider implements ModuleProvider {
moduleList.add(new DebugModule());
}
moduleList.add(new MapperModule());
moduleList.add(new ExecutorModule());
return moduleList;
}

View File

@@ -0,0 +1,49 @@
package sonia.scm.repository;
import sonia.scm.repository.spi.SyncAsyncExecutor;
import java.time.Instant;
import java.util.concurrent.Executor;
import java.util.concurrent.atomic.AtomicLong;
import java.util.function.Consumer;
import static sonia.scm.repository.spi.SyncAsyncExecutor.ExecutionType.ASYNCHRONOUS;
import static sonia.scm.repository.spi.SyncAsyncExecutor.ExecutionType.SYNCHRONOUS;
public class DefaultSyncAsyncExecutor implements SyncAsyncExecutor {
private final Executor executor;
private final Instant switchToAsyncTime;
private final long maxAsyncAbortMilliseconds;
private AtomicLong accumulatedAsyncRuntime = new AtomicLong(0L);
private boolean executedAllSynchronously = true;
DefaultSyncAsyncExecutor(Executor executor, Instant switchToAsyncTime, int maxAsyncAbortSeconds) {
this.executor = executor;
this.switchToAsyncTime = switchToAsyncTime;
this.maxAsyncAbortMilliseconds = maxAsyncAbortSeconds * 1000L;
}
public ExecutionType execute(Consumer<ExecutionType> task, Runnable abortionFallback) {
if (switchToAsyncTime.isBefore(Instant.now())) {
executor.execute(() -> {
if (accumulatedAsyncRuntime.get() < maxAsyncAbortMilliseconds) {
long chunkStartTime = System.currentTimeMillis();
task.accept(ASYNCHRONOUS);
accumulatedAsyncRuntime.addAndGet(System.currentTimeMillis() - chunkStartTime);
} else {
abortionFallback.run();
}
});
executedAllSynchronously = false;
return ASYNCHRONOUS;
} else {
task.accept(SYNCHRONOUS);
return SYNCHRONOUS;
}
}
public boolean hasExecutedAllSynchronously() {
return executedAllSynchronously;
}
}

View File

@@ -0,0 +1,55 @@
package sonia.scm.repository;
import sonia.scm.repository.spi.SyncAsyncExecutor;
import sonia.scm.repository.spi.SyncAsyncExecutorProvider;
import javax.inject.Inject;
import javax.inject.Singleton;
import java.io.Closeable;
import java.time.Instant;
import java.time.temporal.ChronoUnit;
import java.util.concurrent.ExecutorService;
import java.util.concurrent.Executors;
@Singleton
public class DefaultSyncAsyncExecutorProvider implements SyncAsyncExecutorProvider, Closeable {
public static final int DEFAULT_MAX_ASYNC_ABORT_SECONDS = 60;
public static final String MAX_ASYNC_ABORT_SECONDS_PROPERTY = "scm.maxAsyncAbortSeconds";
public static final int DEFAULT_NUMBER_OF_THREADS = 4;
public static final String NUMBER_OF_THREADS_PROPERTY = "scm.asyncThreads";
private final ExecutorService executor;
private final int defaultMaxAsyncAbortSeconds;
@Inject
public DefaultSyncAsyncExecutorProvider() {
this(Executors.newFixedThreadPool(getProperty(NUMBER_OF_THREADS_PROPERTY, DEFAULT_NUMBER_OF_THREADS)));
}
public DefaultSyncAsyncExecutorProvider(ExecutorService executor) {
this.executor = executor;
this.defaultMaxAsyncAbortSeconds = getProperty(MAX_ASYNC_ABORT_SECONDS_PROPERTY, DEFAULT_MAX_ASYNC_ABORT_SECONDS);
}
public SyncAsyncExecutor createExecutorWithSecondsToTimeout(int switchToAsyncInSeconds) {
return createExecutorWithSecondsToTimeout(switchToAsyncInSeconds, defaultMaxAsyncAbortSeconds);
}
public SyncAsyncExecutor createExecutorWithSecondsToTimeout(int switchToAsyncInSeconds, int maxAsyncAbortSeconds) {
return new DefaultSyncAsyncExecutor(
executor,
Instant.now().plus(switchToAsyncInSeconds, ChronoUnit.SECONDS),
maxAsyncAbortSeconds);
}
@Override
public void close() {
executor.shutdownNow();
}
private static int getProperty(String key, int defaultValue) {
return Integer.parseInt(System.getProperty(key, Integer.toString(defaultValue)));
}
}

View File

@@ -0,0 +1,12 @@
package sonia.scm.repository;
import com.google.inject.AbstractModule;
import sonia.scm.lifecycle.modules.CloseableModule;
import sonia.scm.repository.spi.SyncAsyncExecutorProvider;
public class ExecutorModule extends AbstractModule {
@Override
protected void configure() {
bind(SyncAsyncExecutorProvider.class).to(DefaultSyncAsyncExecutorProvider.class);
}
}

View File

@@ -8,7 +8,6 @@ import org.junit.After;
import org.junit.Before;
import org.junit.Test;
import org.mapstruct.factory.Mappers;
import org.mockito.InjectMocks;
import sonia.scm.repository.BrowserResult;
import sonia.scm.repository.FileObject;
import sonia.scm.repository.NamespaceAndName;
@@ -24,9 +23,6 @@ public class BrowserResultToFileObjectDtoMapperTest {
private final URI baseUri = URI.create("http://example.com/base/");
private final ResourceLinks resourceLinks = ResourceLinksMock.createMock(baseUri);
@InjectMocks
private FileObjectToFileObjectDtoMapperImpl fileObjectToFileObjectDtoMapper;
private BrowserResultToFileObjectDtoMapper mapper;
private final Subject subject = mock(Subject.class);
@@ -34,28 +30,28 @@ public class BrowserResultToFileObjectDtoMapperTest {
private FileObject fileObject1 = new FileObject();
private FileObject fileObject2 = new FileObject();
private FileObject partialFileObject = new FileObject();
@Before
public void init() {
initMocks(this);
mapper = Mappers.getMapper(BrowserResultToFileObjectDtoMapper.class);
mapper.setChildrenMapper(fileObjectToFileObjectDtoMapper);
mapper.setResourceLinks(resourceLinks);
subjectThreadState.bind();
ThreadContext.bind(subject);
fileObject1.setName("FO 1");
fileObject1.setLength(100);
fileObject1.setLastModified(0L);
fileObject1.setLength(100L);
fileObject1.setCommitDate(0L);
fileObject1.setPath("/path/object/1");
fileObject1.setDescription("description of file object 1");
fileObject1.setDirectory(false);
fileObject2.setName("FO 2");
fileObject2.setLength(100);
fileObject2.setLastModified(101L);
fileObject2.setLength(100L);
fileObject2.setCommitDate(101L);
fileObject2.setPath("/path/object/2");
fileObject2.setDescription("description of file object 2");
fileObject2.setDirectory(true);

View File

@@ -1,121 +0,0 @@
package sonia.scm.api.v2.resources;
import org.apache.shiro.subject.Subject;
import org.apache.shiro.subject.support.SubjectThreadState;
import org.apache.shiro.util.ThreadContext;
import org.apache.shiro.util.ThreadState;
import org.junit.After;
import org.junit.Before;
import org.junit.Test;
import org.junit.runner.RunWith;
import org.mockito.InjectMocks;
import org.mockito.junit.MockitoJUnitRunner;
import sonia.scm.repository.BrowserResult;
import sonia.scm.repository.FileObject;
import sonia.scm.repository.NamespaceAndName;
import sonia.scm.repository.SubRepository;
import java.net.URI;
import static org.assertj.core.api.Assertions.assertThat;
import static org.mockito.Mockito.mock;
@RunWith(MockitoJUnitRunner.Silent.class)
public class FileObjectToFileObjectDtoMapperTest {
private final URI baseUri = URI.create("http://example.com/base/");
@SuppressWarnings("unused") // Is injected
private final ResourceLinks resourceLinks = ResourceLinksMock.createMock(baseUri);
@InjectMocks
private FileObjectToFileObjectDtoMapperImpl mapper;
private final Subject subject = mock(Subject.class);
private final ThreadState subjectThreadState = new SubjectThreadState(subject);
private URI expectedBaseUri;
@Before
public void init() {
expectedBaseUri = baseUri.resolve(RepositoryRootResource.REPOSITORIES_PATH_V2 + "/");
subjectThreadState.bind();
ThreadContext.bind(subject);
}
@After
public void unbind() {
ThreadContext.unbindSubject();
}
@Test
public void shouldMapAttributesCorrectly() {
FileObject fileObject = createFileObject();
FileObjectDto dto = mapper.map(fileObject, new NamespaceAndName("namespace", "name"), new BrowserResult("revision", fileObject));
assertEqualAttributes(fileObject, dto);
}
@Test
public void shouldHaveCorrectSelfLinkForDirectory() {
FileObject fileObject = createDirectoryObject();
FileObjectDto dto = mapper.map(fileObject, new NamespaceAndName("namespace", "name"), new BrowserResult("revision", fileObject));
assertThat(dto.getLinks().getLinkBy("self").get().getHref()).isEqualTo(expectedBaseUri.resolve("namespace/name/sources/revision/foo/bar").toString());
}
@Test
public void shouldHaveCorrectContentLink() {
FileObject fileObject = createFileObject();
fileObject.setDirectory(false);
FileObjectDto dto = mapper.map(fileObject, new NamespaceAndName("namespace", "name"), new BrowserResult("revision", fileObject));
assertThat(dto.getLinks().getLinkBy("self").get().getHref()).isEqualTo(expectedBaseUri.resolve("namespace/name/content/revision/foo/bar").toString());
}
@Test
public void shouldAppendLinks() {
HalEnricherRegistry registry = new HalEnricherRegistry();
registry.register(FileObject.class, (ctx, appender) -> {
NamespaceAndName repository = ctx.oneRequireByType(NamespaceAndName.class);
FileObject fo = ctx.oneRequireByType(FileObject.class);
String rev = ctx.oneRequireByType(String.class);
appender.appendLink("hog", "http://" + repository.logString() + "/" + fo.getName() + "/" + rev);
});
mapper.setRegistry(registry);
FileObject fileObject = createFileObject();
FileObjectDto dto = mapper.map(fileObject, new NamespaceAndName("hitchhiker", "hog"), new BrowserResult("42", fileObject));
assertThat(dto.getLinks().getLinkBy("hog").get().getHref()).isEqualTo("http://hitchhiker/hog/foo/42");
}
private FileObject createDirectoryObject() {
FileObject fileObject = createFileObject();
fileObject.setDirectory(true);
return fileObject;
}
private FileObject createFileObject() {
FileObject fileObject = new FileObject();
fileObject.setName("foo");
fileObject.setDescription("bar");
fileObject.setPath("foo/bar");
fileObject.setDirectory(false);
fileObject.setLength(100);
fileObject.setLastModified(123L);
fileObject.setSubRepository(new SubRepository("repo.url"));
return fileObject;
}
private void assertEqualAttributes(FileObject fileObject, FileObjectDto dto) {
assertThat(dto.getName()).isEqualTo(fileObject.getName());
assertThat(dto.getDescription()).isEqualTo(fileObject.getDescription());
assertThat(dto.getPath()).isEqualTo(fileObject.getPath());
assertThat(dto.isDirectory()).isEqualTo(fileObject.isDirectory());
assertThat(dto.getLength()).isEqualTo(fileObject.getLength());
assertThat(dto.getLastModified().toEpochMilli()).isEqualTo((long) fileObject.getLastModified());
assertThat(dto.getSubRepository().getBrowserUrl()).isEqualTo(fileObject.getSubRepository().getBrowserUrl());
}
}

View File

@@ -7,7 +7,6 @@ import org.junit.Before;
import org.junit.Test;
import org.junit.runner.RunWith;
import org.mapstruct.factory.Mappers;
import org.mockito.InjectMocks;
import org.mockito.Mock;
import org.mockito.junit.MockitoJUnitRunner;
import sonia.scm.NotFoundException;
@@ -41,16 +40,12 @@ public class SourceRootResourceTest extends RepositoryTestBase {
@Mock
private BrowseCommandBuilder browseCommandBuilder;
@InjectMocks
private FileObjectToFileObjectDtoMapperImpl fileObjectToFileObjectDtoMapper;
private BrowserResultToFileObjectDtoMapper browserResultToFileObjectDtoMapper;
@Before
public void prepareEnvironment() throws Exception {
public void prepareEnvironment() {
browserResultToFileObjectDtoMapper = Mappers.getMapper(BrowserResultToFileObjectDtoMapper.class);
browserResultToFileObjectDtoMapper.setChildrenMapper(fileObjectToFileObjectDtoMapper);
browserResultToFileObjectDtoMapper.setResourceLinks(resourceLinks);
when(serviceFactory.create(new NamespaceAndName("space", "repo"))).thenReturn(service);
when(service.getBrowseCommand()).thenReturn(browseCommandBuilder);
@@ -127,7 +122,7 @@ public class SourceRootResourceTest extends RepositoryTestBase {
fileObject1.setDescription("File object 1");
fileObject1.setPath("/foo/bar/fo1");
fileObject1.setLength(1024L);
fileObject1.setLastModified(0L);
fileObject1.setCommitDate(0L);
parent.addChild(fileObject1);
FileObject fileObject2 = new FileObject();
@@ -136,7 +131,7 @@ public class SourceRootResourceTest extends RepositoryTestBase {
fileObject2.setDescription("File object 2");
fileObject2.setPath("/foo/bar/fo2");
fileObject2.setLength(4096L);
fileObject2.setLastModified(1234L);
fileObject2.setCommitDate(1234L);
parent.addChild(fileObject2);
return parent;

View File

@@ -0,0 +1,54 @@
package sonia.scm.repository;
import org.junit.jupiter.api.Test;
import sonia.scm.repository.spi.SyncAsyncExecutor.ExecutionType;
import java.time.Instant;
import static java.lang.Integer.MAX_VALUE;
import static java.time.Instant.MAX;
import static java.time.temporal.ChronoUnit.MILLIS;
import static org.assertj.core.api.Assertions.assertThat;
import static sonia.scm.repository.spi.SyncAsyncExecutor.ExecutionType.ASYNCHRONOUS;
import static sonia.scm.repository.spi.SyncAsyncExecutor.ExecutionType.SYNCHRONOUS;
class DefaultSyncAsyncExecutorTest {
ExecutionType calledWithType = null;
boolean aborted = false;
@Test
void shouldExecuteSynchronouslyBeforeTimeout() {
DefaultSyncAsyncExecutor executor = new DefaultSyncAsyncExecutor(Runnable::run, MAX, MAX_VALUE);
ExecutionType result = executor.execute(type -> calledWithType = type, () -> aborted = true);
assertThat(result).isEqualTo(SYNCHRONOUS);
assertThat(calledWithType).isEqualTo(SYNCHRONOUS);
assertThat(executor.hasExecutedAllSynchronously()).isTrue();
assertThat(aborted).isFalse();
}
@Test
void shouldExecuteAsynchronouslyAfterTimeout() {
DefaultSyncAsyncExecutor executor = new DefaultSyncAsyncExecutor(Runnable::run, Instant.now().minus(1, MILLIS), MAX_VALUE);
ExecutionType result = executor.execute(type -> calledWithType = type, () -> aborted = true);
assertThat(result).isEqualTo(ASYNCHRONOUS);
assertThat(calledWithType).isEqualTo(ASYNCHRONOUS);
assertThat(executor.hasExecutedAllSynchronously()).isFalse();
assertThat(aborted).isFalse();
}
@Test
void shouldCallFallbackAfterAbortion() {
DefaultSyncAsyncExecutor executor = new DefaultSyncAsyncExecutor(Runnable::run, Instant.now().minus(1, MILLIS), 0);
ExecutionType result = executor.execute(type -> calledWithType = type, () -> aborted = true);
assertThat(result).isEqualTo(ASYNCHRONOUS);
assertThat(calledWithType).isNull();
assertThat(aborted).isTrue();
}
}