mirror of
https://github.com/scm-manager/scm-manager.git
synced 2025-11-09 06:55:47 +01:00
Sorted autocomplete (#1918)
Users, groups, repositories and repository roles have been sorted in the rest layer by default if no other sort option was given. In the layers "below" (aka the manager classes or the dao), the collections have been unsorted. This led to the effect, that the autocomplete resource, which did not sort all values beforehand, returned unsorted results. As a sideeffect, direct matches for an input could occur at a random position or not at all (as reported in #1695), when there were enough other matches. With this pull request the databases for users, groups, repositories and repository roles will use instances of TreeMap instead of LinkedHashMap internally, so that these values are sorted implicitly (by id respectively name for users, groups and repository roles and namespace/name for repositories). Due to this change the default sort applied in the rest layer could be removed.
This commit is contained in:
2
gradle/changelog/autocomplete_sorted.yaml
Normal file
2
gradle/changelog/autocomplete_sorted.yaml
Normal file
@@ -0,0 +1,2 @@
|
|||||||
|
- type: fixed
|
||||||
|
description: Autocompletion has sorted suggestions ([#1918](https://github.com/scm-manager/scm-manager/pull/1918))
|
||||||
@@ -21,25 +21,20 @@
|
|||||||
* OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
|
* OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
|
||||||
* SOFTWARE.
|
* SOFTWARE.
|
||||||
*/
|
*/
|
||||||
|
|
||||||
package sonia.scm.group.xml;
|
|
||||||
|
|
||||||
//~--- non-JDK imports --------------------------------------------------------
|
package sonia.scm.group.xml;
|
||||||
|
|
||||||
import sonia.scm.group.Group;
|
import sonia.scm.group.Group;
|
||||||
import sonia.scm.xml.XmlDatabase;
|
import sonia.scm.xml.XmlDatabase;
|
||||||
|
|
||||||
//~--- JDK imports ------------------------------------------------------------
|
|
||||||
|
|
||||||
import java.util.Collection;
|
|
||||||
import java.util.LinkedHashMap;
|
|
||||||
import java.util.Map;
|
|
||||||
|
|
||||||
import javax.xml.bind.annotation.XmlAccessType;
|
import javax.xml.bind.annotation.XmlAccessType;
|
||||||
import javax.xml.bind.annotation.XmlAccessorType;
|
import javax.xml.bind.annotation.XmlAccessorType;
|
||||||
import javax.xml.bind.annotation.XmlElement;
|
import javax.xml.bind.annotation.XmlElement;
|
||||||
import javax.xml.bind.annotation.XmlRootElement;
|
import javax.xml.bind.annotation.XmlRootElement;
|
||||||
import javax.xml.bind.annotation.adapters.XmlJavaTypeAdapter;
|
import javax.xml.bind.annotation.adapters.XmlJavaTypeAdapter;
|
||||||
|
import java.util.Collection;
|
||||||
|
import java.util.Map;
|
||||||
|
import java.util.TreeMap;
|
||||||
|
|
||||||
/**
|
/**
|
||||||
*
|
*
|
||||||
@@ -190,7 +185,7 @@ public class XmlGroupDatabase implements XmlDatabase<Group>
|
|||||||
/** Field description */
|
/** Field description */
|
||||||
@XmlJavaTypeAdapter(XmlGroupMapAdapter.class)
|
@XmlJavaTypeAdapter(XmlGroupMapAdapter.class)
|
||||||
@XmlElement(name = "groups")
|
@XmlElement(name = "groups")
|
||||||
private Map<String, Group> groupMap = new LinkedHashMap<>();
|
private Map<String, Group> groupMap = new TreeMap<>();
|
||||||
|
|
||||||
/** Field description */
|
/** Field description */
|
||||||
private Long lastModified;
|
private Long lastModified;
|
||||||
|
|||||||
@@ -21,19 +21,14 @@
|
|||||||
* OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
|
* OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
|
||||||
* SOFTWARE.
|
* SOFTWARE.
|
||||||
*/
|
*/
|
||||||
|
|
||||||
package sonia.scm.group.xml;
|
|
||||||
|
|
||||||
//~--- non-JDK imports --------------------------------------------------------
|
package sonia.scm.group.xml;
|
||||||
|
|
||||||
import sonia.scm.group.Group;
|
import sonia.scm.group.Group;
|
||||||
|
|
||||||
//~--- JDK imports ------------------------------------------------------------
|
|
||||||
|
|
||||||
import java.util.LinkedHashMap;
|
|
||||||
import java.util.Map;
|
|
||||||
|
|
||||||
import javax.xml.bind.annotation.adapters.XmlAdapter;
|
import javax.xml.bind.annotation.adapters.XmlAdapter;
|
||||||
|
import java.util.Map;
|
||||||
|
import java.util.TreeMap;
|
||||||
|
|
||||||
/**
|
/**
|
||||||
*
|
*
|
||||||
@@ -72,7 +67,7 @@ public class XmlGroupMapAdapter
|
|||||||
@Override
|
@Override
|
||||||
public Map<String, Group> unmarshal(XmlGroupList groups) throws Exception
|
public Map<String, Group> unmarshal(XmlGroupList groups) throws Exception
|
||||||
{
|
{
|
||||||
Map<String, Group> groupMap = new LinkedHashMap<>();
|
Map<String, Group> groupMap = new TreeMap<>();
|
||||||
|
|
||||||
for (Group group : groups)
|
for (Group group : groups)
|
||||||
{
|
{
|
||||||
|
|||||||
@@ -41,8 +41,13 @@ import javax.inject.Inject;
|
|||||||
import java.io.IOException;
|
import java.io.IOException;
|
||||||
import java.nio.file.Path;
|
import java.nio.file.Path;
|
||||||
import java.util.Collection;
|
import java.util.Collection;
|
||||||
|
import java.util.HashMap;
|
||||||
import java.util.Map;
|
import java.util.Map;
|
||||||
import java.util.concurrent.ConcurrentHashMap;
|
import java.util.TreeMap;
|
||||||
|
import java.util.concurrent.locks.Lock;
|
||||||
|
import java.util.concurrent.locks.ReadWriteLock;
|
||||||
|
import java.util.concurrent.locks.ReentrantReadWriteLock;
|
||||||
|
import java.util.function.Supplier;
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* @author Sebastian Sdorra
|
* @author Sebastian Sdorra
|
||||||
@@ -50,7 +55,6 @@ import java.util.concurrent.ConcurrentHashMap;
|
|||||||
@Singleton
|
@Singleton
|
||||||
public class XmlRepositoryDAO implements RepositoryDAO {
|
public class XmlRepositoryDAO implements RepositoryDAO {
|
||||||
|
|
||||||
|
|
||||||
private final MetadataStore metadataStore = new MetadataStore();
|
private final MetadataStore metadataStore = new MetadataStore();
|
||||||
|
|
||||||
private final PathBasedRepositoryLocationResolver repositoryLocationResolver;
|
private final PathBasedRepositoryLocationResolver repositoryLocationResolver;
|
||||||
@@ -59,6 +63,7 @@ public class XmlRepositoryDAO implements RepositoryDAO {
|
|||||||
|
|
||||||
private final Map<String, Repository> byId;
|
private final Map<String, Repository> byId;
|
||||||
private final Map<NamespaceAndName, Repository> byNamespaceAndName;
|
private final Map<NamespaceAndName, Repository> byNamespaceAndName;
|
||||||
|
private final ReadWriteLock byNamespaceLock = new ReentrantReadWriteLock();
|
||||||
|
|
||||||
@Inject
|
@Inject
|
||||||
public XmlRepositoryDAO(PathBasedRepositoryLocationResolver repositoryLocationResolver, FileSystem fileSystem, RepositoryExportingCheck repositoryExportingCheck) {
|
public XmlRepositoryDAO(PathBasedRepositoryLocationResolver repositoryLocationResolver, FileSystem fileSystem, RepositoryExportingCheck repositoryExportingCheck) {
|
||||||
@@ -66,18 +71,20 @@ public class XmlRepositoryDAO implements RepositoryDAO {
|
|||||||
this.fileSystem = fileSystem;
|
this.fileSystem = fileSystem;
|
||||||
this.repositoryExportingCheck = repositoryExportingCheck;
|
this.repositoryExportingCheck = repositoryExportingCheck;
|
||||||
|
|
||||||
this.byId = new ConcurrentHashMap<>();
|
this.byId = new HashMap<>();
|
||||||
this.byNamespaceAndName = new ConcurrentHashMap<>();
|
this.byNamespaceAndName = new TreeMap<>();
|
||||||
|
|
||||||
init();
|
init();
|
||||||
}
|
}
|
||||||
|
|
||||||
private void init() {
|
private void init() {
|
||||||
RepositoryLocationResolver.RepositoryLocationResolverInstance<Path> pathRepositoryLocationResolverInstance = repositoryLocationResolver.create(Path.class);
|
withWriteLockedMaps(() -> {
|
||||||
pathRepositoryLocationResolverInstance.forAllLocations((repositoryId, repositoryPath) -> {
|
RepositoryLocationResolver.RepositoryLocationResolverInstance<Path> pathRepositoryLocationResolverInstance = repositoryLocationResolver.create(Path.class);
|
||||||
Repository repository = metadataStore.read(repositoryPath);
|
pathRepositoryLocationResolverInstance.forAllLocations((repositoryId, repositoryPath) -> {
|
||||||
byNamespaceAndName.put(repository.getNamespaceAndName(), repository);
|
Repository repository = metadataStore.read(repositoryPath);
|
||||||
byId.put(repositoryId, repository);
|
byNamespaceAndName.put(repository.getNamespaceAndName(), repository);
|
||||||
|
byId.put(repositoryId, repository);
|
||||||
|
});
|
||||||
});
|
});
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -106,38 +113,40 @@ public class XmlRepositoryDAO implements RepositoryDAO {
|
|||||||
throw new InternalRepositoryException(repository, "failed to create filesystem", e);
|
throw new InternalRepositoryException(repository, "failed to create filesystem", e);
|
||||||
}
|
}
|
||||||
|
|
||||||
byId.put(repository.getId(), clone);
|
withWriteLockedMaps(() -> {
|
||||||
byNamespaceAndName.put(repository.getNamespaceAndName(), clone);
|
byId.put(repository.getId(), clone);
|
||||||
|
byNamespaceAndName.put(repository.getNamespaceAndName(), clone);
|
||||||
|
});
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public boolean contains(Repository repository) {
|
public boolean contains(Repository repository) {
|
||||||
return byId.containsKey(repository.getId());
|
return withReadLockedMaps(() -> byId.containsKey(repository.getId()));
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public boolean contains(NamespaceAndName namespaceAndName) {
|
public boolean contains(NamespaceAndName namespaceAndName) {
|
||||||
return byNamespaceAndName.containsKey(namespaceAndName);
|
return withReadLockedMaps(() -> byNamespaceAndName.containsKey(namespaceAndName));
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public boolean contains(String id) {
|
public boolean contains(String id) {
|
||||||
return byId.containsKey(id);
|
return withReadLockedMaps(() -> byId.containsKey(id));
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public Repository get(NamespaceAndName namespaceAndName) {
|
public Repository get(NamespaceAndName namespaceAndName) {
|
||||||
return byNamespaceAndName.get(namespaceAndName);
|
return withReadLockedMaps(() -> byNamespaceAndName.get(namespaceAndName));
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public Repository get(String id) {
|
public Repository get(String id) {
|
||||||
return byId.get(id);
|
return withReadLockedMaps(() -> byId.get(id));
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public Collection<Repository> getAll() {
|
public Collection<Repository> getAll() {
|
||||||
return ImmutableList.copyOf(byNamespaceAndName.values());
|
return withReadLockedMaps(() -> ImmutableList.copyOf(byNamespaceAndName.values()));
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
@@ -147,14 +156,14 @@ public class XmlRepositoryDAO implements RepositoryDAO {
|
|||||||
throw new StoreReadOnlyException(repository);
|
throw new StoreReadOnlyException(repository);
|
||||||
}
|
}
|
||||||
|
|
||||||
synchronized (this) {
|
withWriteLockedMaps(() -> {
|
||||||
// remove old namespaceAndName from map, in case of rename
|
// remove old namespaceAndName from map, in case of rename
|
||||||
Repository prev = byId.put(clone.getId(), clone);
|
Repository prev = byId.put(clone.getId(), clone);
|
||||||
if (prev != null) {
|
if (prev != null) {
|
||||||
byNamespaceAndName.remove(prev.getNamespaceAndName());
|
byNamespaceAndName.remove(prev.getNamespaceAndName());
|
||||||
}
|
}
|
||||||
byNamespaceAndName.put(clone.getNamespaceAndName(), clone);
|
byNamespaceAndName.put(clone.getNamespaceAndName(), clone);
|
||||||
}
|
});
|
||||||
|
|
||||||
Path repositoryPath = repositoryLocationResolver
|
Path repositoryPath = repositoryLocationResolver
|
||||||
.create(Path.class)
|
.create(Path.class)
|
||||||
@@ -164,8 +173,10 @@ public class XmlRepositoryDAO implements RepositoryDAO {
|
|||||||
}
|
}
|
||||||
|
|
||||||
private boolean mustNotModifyRepository(Repository clone) {
|
private boolean mustNotModifyRepository(Repository clone) {
|
||||||
return clone.isArchived() && byId.get(clone.getId()).isArchived()
|
return withReadLockedMaps(() ->
|
||||||
|| repositoryExportingCheck.isExporting(clone);
|
clone.isArchived() && byId.get(clone.getId()).isArchived()
|
||||||
|
|| repositoryExportingCheck.isExporting(clone)
|
||||||
|
);
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
@@ -173,14 +184,13 @@ public class XmlRepositoryDAO implements RepositoryDAO {
|
|||||||
if (repository.isArchived() || repositoryExportingCheck.isExporting(repository)) {
|
if (repository.isArchived() || repositoryExportingCheck.isExporting(repository)) {
|
||||||
throw new StoreReadOnlyException(repository);
|
throw new StoreReadOnlyException(repository);
|
||||||
}
|
}
|
||||||
Path path;
|
Path path = withWriteLockedMaps(() -> {
|
||||||
synchronized (this) {
|
|
||||||
Repository prev = byId.remove(repository.getId());
|
Repository prev = byId.remove(repository.getId());
|
||||||
if (prev != null) {
|
if (prev != null) {
|
||||||
byNamespaceAndName.remove(prev.getNamespaceAndName());
|
byNamespaceAndName.remove(prev.getNamespaceAndName());
|
||||||
}
|
}
|
||||||
path = repositoryLocationResolver.remove(repository.getId());
|
return repositoryLocationResolver.remove(repository.getId());
|
||||||
}
|
});
|
||||||
|
|
||||||
try {
|
try {
|
||||||
fileSystem.destroy(path.toFile());
|
fileSystem.destroy(path.toFile());
|
||||||
@@ -201,8 +211,40 @@ public class XmlRepositoryDAO implements RepositoryDAO {
|
|||||||
|
|
||||||
public void refresh() {
|
public void refresh() {
|
||||||
repositoryLocationResolver.refresh();
|
repositoryLocationResolver.refresh();
|
||||||
byNamespaceAndName.clear();
|
withWriteLockedMaps(() -> {
|
||||||
byId.clear();
|
byNamespaceAndName.clear();
|
||||||
|
byId.clear();
|
||||||
|
});
|
||||||
init();
|
init();
|
||||||
}
|
}
|
||||||
|
|
||||||
|
private void withWriteLockedMaps(Runnable runnable) {
|
||||||
|
Lock lock = byNamespaceLock.writeLock();
|
||||||
|
lock.lock();
|
||||||
|
try {
|
||||||
|
runnable.run();
|
||||||
|
} finally {
|
||||||
|
lock.unlock();
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
private <T> T withWriteLockedMaps(Supplier<T> runnable) {
|
||||||
|
Lock lock = byNamespaceLock.writeLock();
|
||||||
|
lock.lock();
|
||||||
|
try {
|
||||||
|
return runnable.get();
|
||||||
|
} finally {
|
||||||
|
lock.unlock();
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
private <T> T withReadLockedMaps(Supplier<T> runnable) {
|
||||||
|
Lock lock = byNamespaceLock.readLock();
|
||||||
|
lock.lock();
|
||||||
|
try {
|
||||||
|
return runnable.get();
|
||||||
|
} finally {
|
||||||
|
lock.unlock();
|
||||||
|
}
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -21,7 +21,7 @@
|
|||||||
* OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
|
* OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
|
||||||
* SOFTWARE.
|
* SOFTWARE.
|
||||||
*/
|
*/
|
||||||
|
|
||||||
package sonia.scm.repository.xml;
|
package sonia.scm.repository.xml;
|
||||||
|
|
||||||
import sonia.scm.repository.RepositoryRole;
|
import sonia.scm.repository.RepositoryRole;
|
||||||
@@ -33,8 +33,8 @@ import javax.xml.bind.annotation.XmlElement;
|
|||||||
import javax.xml.bind.annotation.XmlRootElement;
|
import javax.xml.bind.annotation.XmlRootElement;
|
||||||
import javax.xml.bind.annotation.adapters.XmlJavaTypeAdapter;
|
import javax.xml.bind.annotation.adapters.XmlJavaTypeAdapter;
|
||||||
import java.util.Collection;
|
import java.util.Collection;
|
||||||
import java.util.LinkedHashMap;
|
|
||||||
import java.util.Map;
|
import java.util.Map;
|
||||||
|
import java.util.TreeMap;
|
||||||
|
|
||||||
@XmlRootElement(name = "user-db")
|
@XmlRootElement(name = "user-db")
|
||||||
@XmlAccessorType(XmlAccessType.FIELD)
|
@XmlAccessorType(XmlAccessType.FIELD)
|
||||||
@@ -45,7 +45,7 @@ public class XmlRepositoryRoleDatabase implements XmlDatabase<RepositoryRole> {
|
|||||||
|
|
||||||
@XmlJavaTypeAdapter(XmlRepositoryRoleMapAdapter.class)
|
@XmlJavaTypeAdapter(XmlRepositoryRoleMapAdapter.class)
|
||||||
@XmlElement(name = "roles")
|
@XmlElement(name = "roles")
|
||||||
private Map<String, RepositoryRole> roleMap = new LinkedHashMap<>();
|
private Map<String, RepositoryRole> roleMap = new TreeMap<>();
|
||||||
|
|
||||||
public XmlRepositoryRoleDatabase() {
|
public XmlRepositoryRoleDatabase() {
|
||||||
long c = System.currentTimeMillis();
|
long c = System.currentTimeMillis();
|
||||||
|
|||||||
@@ -21,14 +21,14 @@
|
|||||||
* OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
|
* OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
|
||||||
* SOFTWARE.
|
* SOFTWARE.
|
||||||
*/
|
*/
|
||||||
|
|
||||||
package sonia.scm.repository.xml;
|
package sonia.scm.repository.xml;
|
||||||
|
|
||||||
import sonia.scm.repository.RepositoryRole;
|
import sonia.scm.repository.RepositoryRole;
|
||||||
|
|
||||||
import javax.xml.bind.annotation.adapters.XmlAdapter;
|
import javax.xml.bind.annotation.adapters.XmlAdapter;
|
||||||
import java.util.LinkedHashMap;
|
|
||||||
import java.util.Map;
|
import java.util.Map;
|
||||||
|
import java.util.TreeMap;
|
||||||
|
|
||||||
public class XmlRepositoryRoleMapAdapter
|
public class XmlRepositoryRoleMapAdapter
|
||||||
extends XmlAdapter<XmlRepositoryRoleList, Map<String, RepositoryRole>> {
|
extends XmlAdapter<XmlRepositoryRoleList, Map<String, RepositoryRole>> {
|
||||||
@@ -40,7 +40,7 @@ public class XmlRepositoryRoleMapAdapter
|
|||||||
|
|
||||||
@Override
|
@Override
|
||||||
public Map<String, RepositoryRole> unmarshal(XmlRepositoryRoleList roles) {
|
public Map<String, RepositoryRole> unmarshal(XmlRepositoryRoleList roles) {
|
||||||
Map<String, RepositoryRole> roleMap = new LinkedHashMap<>();
|
Map<String, RepositoryRole> roleMap = new TreeMap<>();
|
||||||
|
|
||||||
for (RepositoryRole role : roles) {
|
for (RepositoryRole role : roles) {
|
||||||
roleMap.put(role.getName(), role);
|
roleMap.put(role.getName(), role);
|
||||||
|
|||||||
@@ -21,25 +21,20 @@
|
|||||||
* OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
|
* OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
|
||||||
* SOFTWARE.
|
* SOFTWARE.
|
||||||
*/
|
*/
|
||||||
|
|
||||||
package sonia.scm.user.xml;
|
|
||||||
|
|
||||||
//~--- non-JDK imports --------------------------------------------------------
|
package sonia.scm.user.xml;
|
||||||
|
|
||||||
import sonia.scm.user.User;
|
import sonia.scm.user.User;
|
||||||
import sonia.scm.xml.XmlDatabase;
|
import sonia.scm.xml.XmlDatabase;
|
||||||
|
|
||||||
//~--- JDK imports ------------------------------------------------------------
|
|
||||||
|
|
||||||
import java.util.Collection;
|
|
||||||
import java.util.LinkedHashMap;
|
|
||||||
import java.util.Map;
|
|
||||||
|
|
||||||
import javax.xml.bind.annotation.XmlAccessType;
|
import javax.xml.bind.annotation.XmlAccessType;
|
||||||
import javax.xml.bind.annotation.XmlAccessorType;
|
import javax.xml.bind.annotation.XmlAccessorType;
|
||||||
import javax.xml.bind.annotation.XmlElement;
|
import javax.xml.bind.annotation.XmlElement;
|
||||||
import javax.xml.bind.annotation.XmlRootElement;
|
import javax.xml.bind.annotation.XmlRootElement;
|
||||||
import javax.xml.bind.annotation.adapters.XmlJavaTypeAdapter;
|
import javax.xml.bind.annotation.adapters.XmlJavaTypeAdapter;
|
||||||
|
import java.util.Collection;
|
||||||
|
import java.util.Map;
|
||||||
|
import java.util.TreeMap;
|
||||||
|
|
||||||
/**
|
/**
|
||||||
*
|
*
|
||||||
@@ -193,5 +188,5 @@ public class XmlUserDatabase implements XmlDatabase<User>
|
|||||||
/** Field description */
|
/** Field description */
|
||||||
@XmlJavaTypeAdapter(XmlUserMapAdapter.class)
|
@XmlJavaTypeAdapter(XmlUserMapAdapter.class)
|
||||||
@XmlElement(name = "users")
|
@XmlElement(name = "users")
|
||||||
private Map<String, User> userMap = new LinkedHashMap<>();
|
private Map<String, User> userMap = new TreeMap<>();
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -21,19 +21,14 @@
|
|||||||
* OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
|
* OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
|
||||||
* SOFTWARE.
|
* SOFTWARE.
|
||||||
*/
|
*/
|
||||||
|
|
||||||
package sonia.scm.user.xml;
|
|
||||||
|
|
||||||
//~--- non-JDK imports --------------------------------------------------------
|
package sonia.scm.user.xml;
|
||||||
|
|
||||||
import sonia.scm.user.User;
|
import sonia.scm.user.User;
|
||||||
|
|
||||||
//~--- JDK imports ------------------------------------------------------------
|
|
||||||
|
|
||||||
import java.util.LinkedHashMap;
|
|
||||||
import java.util.Map;
|
|
||||||
|
|
||||||
import javax.xml.bind.annotation.adapters.XmlAdapter;
|
import javax.xml.bind.annotation.adapters.XmlAdapter;
|
||||||
|
import java.util.Map;
|
||||||
|
import java.util.TreeMap;
|
||||||
|
|
||||||
/**
|
/**
|
||||||
*
|
*
|
||||||
@@ -72,7 +67,7 @@ public class XmlUserMapAdapter
|
|||||||
@Override
|
@Override
|
||||||
public Map<String, User> unmarshal(XmlUserList users) throws Exception
|
public Map<String, User> unmarshal(XmlUserList users) throws Exception
|
||||||
{
|
{
|
||||||
Map<String, User> userMap = new LinkedHashMap<>();
|
Map<String, User> userMap = new TreeMap<>();
|
||||||
|
|
||||||
for (User user : users)
|
for (User user : users)
|
||||||
{
|
{
|
||||||
|
|||||||
@@ -21,7 +21,7 @@
|
|||||||
* OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
|
* OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
|
||||||
* SOFTWARE.
|
* SOFTWARE.
|
||||||
*/
|
*/
|
||||||
|
|
||||||
package sonia.scm.xml;
|
package sonia.scm.xml;
|
||||||
|
|
||||||
//~--- non-JDK imports --------------------------------------------------------
|
//~--- non-JDK imports --------------------------------------------------------
|
||||||
@@ -45,7 +45,7 @@ import java.util.Collection;
|
|||||||
* @param <T>
|
* @param <T>
|
||||||
*/
|
*/
|
||||||
public abstract class AbstractXmlDAO<I extends ModelObject,
|
public abstract class AbstractXmlDAO<I extends ModelObject,
|
||||||
T extends XmlDatabase> implements GenericDAO<I>
|
T extends XmlDatabase<I>> implements GenericDAO<I>
|
||||||
{
|
{
|
||||||
|
|
||||||
/** Field description */
|
/** Field description */
|
||||||
|
|||||||
@@ -89,11 +89,7 @@ describe("Test repository hooks", () => {
|
|||||||
it("should return repositories", async () => {
|
it("should return repositories", async () => {
|
||||||
const queryClient = createInfiniteCachingClient();
|
const queryClient = createInfiniteCachingClient();
|
||||||
setIndexLink(queryClient, "repositories", "/repos");
|
setIndexLink(queryClient, "repositories", "/repos");
|
||||||
fetchMock.get("/api/v2/repos", repositoryCollection, {
|
fetchMock.get("/api/v2/repos", repositoryCollection);
|
||||||
query: {
|
|
||||||
sortBy: "namespaceAndName"
|
|
||||||
}
|
|
||||||
});
|
|
||||||
|
|
||||||
await expectCollection(queryClient);
|
await expectCollection(queryClient);
|
||||||
});
|
});
|
||||||
@@ -103,7 +99,6 @@ describe("Test repository hooks", () => {
|
|||||||
setIndexLink(queryClient, "repositories", "/repos");
|
setIndexLink(queryClient, "repositories", "/repos");
|
||||||
fetchMock.get("/api/v2/repos", repositoryCollection, {
|
fetchMock.get("/api/v2/repos", repositoryCollection, {
|
||||||
query: {
|
query: {
|
||||||
sortBy: "namespaceAndName",
|
|
||||||
page: "42"
|
page: "42"
|
||||||
}
|
}
|
||||||
});
|
});
|
||||||
@@ -116,11 +111,7 @@ describe("Test repository hooks", () => {
|
|||||||
it("should use repository from namespace", async () => {
|
it("should use repository from namespace", async () => {
|
||||||
const queryClient = createInfiniteCachingClient();
|
const queryClient = createInfiniteCachingClient();
|
||||||
setIndexLink(queryClient, "repositories", "/repos");
|
setIndexLink(queryClient, "repositories", "/repos");
|
||||||
fetchMock.get("/api/v2/spaceships", repositoryCollection, {
|
fetchMock.get("/api/v2/spaceships", repositoryCollection);
|
||||||
query: {
|
|
||||||
sortBy: "namespaceAndName"
|
|
||||||
}
|
|
||||||
});
|
|
||||||
|
|
||||||
await expectCollection(queryClient, {
|
await expectCollection(queryClient, {
|
||||||
namespace: {
|
namespace: {
|
||||||
@@ -139,7 +130,6 @@ describe("Test repository hooks", () => {
|
|||||||
setIndexLink(queryClient, "repositories", "/repos");
|
setIndexLink(queryClient, "repositories", "/repos");
|
||||||
fetchMock.get("/api/v2/repos", repositoryCollection, {
|
fetchMock.get("/api/v2/repos", repositoryCollection, {
|
||||||
query: {
|
query: {
|
||||||
sortBy: "namespaceAndName",
|
|
||||||
q: "heart"
|
q: "heart"
|
||||||
}
|
}
|
||||||
});
|
});
|
||||||
@@ -152,11 +142,7 @@ describe("Test repository hooks", () => {
|
|||||||
it("should update repository cache", async () => {
|
it("should update repository cache", async () => {
|
||||||
const queryClient = createInfiniteCachingClient();
|
const queryClient = createInfiniteCachingClient();
|
||||||
setIndexLink(queryClient, "repositories", "/repos");
|
setIndexLink(queryClient, "repositories", "/repos");
|
||||||
fetchMock.get("/api/v2/repos", repositoryCollection, {
|
fetchMock.get("/api/v2/repos", repositoryCollection);
|
||||||
query: {
|
|
||||||
sortBy: "namespaceAndName"
|
|
||||||
}
|
|
||||||
});
|
|
||||||
|
|
||||||
await expectCollection(queryClient);
|
await expectCollection(queryClient);
|
||||||
|
|
||||||
|
|||||||
@@ -30,7 +30,7 @@ import {
|
|||||||
Repository,
|
Repository,
|
||||||
RepositoryCollection,
|
RepositoryCollection,
|
||||||
RepositoryCreation,
|
RepositoryCreation,
|
||||||
RepositoryTypeCollection,
|
RepositoryTypeCollection
|
||||||
} from "@scm-manager/ui-types";
|
} from "@scm-manager/ui-types";
|
||||||
import { useMutation, useQuery, useQueryClient } from "react-query";
|
import { useMutation, useQuery, useQueryClient } from "react-query";
|
||||||
import { apiClient } from "./apiclient";
|
import { apiClient } from "./apiclient";
|
||||||
@@ -55,9 +55,7 @@ export const useRepositories = (request?: UseRepositoriesRequest): ApiResult<Rep
|
|||||||
const namespaceLink = (request?.namespace?._links.repositories as Link)?.href;
|
const namespaceLink = (request?.namespace?._links.repositories as Link)?.href;
|
||||||
const link = namespaceLink || indexLink;
|
const link = namespaceLink || indexLink;
|
||||||
|
|
||||||
const queryParams: Record<string, string> = {
|
const queryParams: Record<string, string> = {};
|
||||||
sortBy: "namespaceAndName",
|
|
||||||
};
|
|
||||||
if (request?.search) {
|
if (request?.search) {
|
||||||
queryParams.q = request.search;
|
queryParams.q = request.search;
|
||||||
}
|
}
|
||||||
@@ -66,7 +64,7 @@ export const useRepositories = (request?: UseRepositoriesRequest): ApiResult<Rep
|
|||||||
}
|
}
|
||||||
return useQuery<RepositoryCollection, Error>(
|
return useQuery<RepositoryCollection, Error>(
|
||||||
["repositories", request?.namespace?.namespace, request?.search || "", request?.page || 0],
|
["repositories", request?.namespace?.namespace, request?.search || "", request?.page || 0],
|
||||||
() => apiClient.get(`${link}?${createQueryString(queryParams)}`).then((response) => response.json()),
|
() => apiClient.get(`${link}?${createQueryString(queryParams)}`).then(response => response.json()),
|
||||||
{
|
{
|
||||||
enabled: !request?.disabled,
|
enabled: !request?.disabled,
|
||||||
onSuccess: (repositories: RepositoryCollection) => {
|
onSuccess: (repositories: RepositoryCollection) => {
|
||||||
@@ -74,7 +72,7 @@ export const useRepositories = (request?: UseRepositoriesRequest): ApiResult<Rep
|
|||||||
repositories._embedded?.repositories.forEach((repository: Repository) => {
|
repositories._embedded?.repositories.forEach((repository: Repository) => {
|
||||||
queryClient.setQueryData(["repository", repository.namespace, repository.name], repository);
|
queryClient.setQueryData(["repository", repository.namespace, repository.name], repository);
|
||||||
});
|
});
|
||||||
},
|
}
|
||||||
}
|
}
|
||||||
);
|
);
|
||||||
};
|
};
|
||||||
@@ -92,14 +90,14 @@ const createRepository = (link: string) => {
|
|||||||
}
|
}
|
||||||
return apiClient
|
return apiClient
|
||||||
.post(createLink, request.repository, "application/vnd.scmm-repository+json;v=2")
|
.post(createLink, request.repository, "application/vnd.scmm-repository+json;v=2")
|
||||||
.then((response) => {
|
.then(response => {
|
||||||
const location = response.headers.get("Location");
|
const location = response.headers.get("Location");
|
||||||
if (!location) {
|
if (!location) {
|
||||||
throw new Error("Server does not return required Location header");
|
throw new Error("Server does not return required Location header");
|
||||||
}
|
}
|
||||||
return apiClient.get(location);
|
return apiClient.get(location);
|
||||||
})
|
})
|
||||||
.then((response) => response.json());
|
.then(response => response.json());
|
||||||
};
|
};
|
||||||
};
|
};
|
||||||
|
|
||||||
@@ -111,10 +109,10 @@ export const useCreateRepository = () => {
|
|||||||
const { mutate, data, isLoading, error } = useMutation<Repository, Error, CreateRepositoryRequest>(
|
const { mutate, data, isLoading, error } = useMutation<Repository, Error, CreateRepositoryRequest>(
|
||||||
createRepository(link),
|
createRepository(link),
|
||||||
{
|
{
|
||||||
onSuccess: (repository) => {
|
onSuccess: repository => {
|
||||||
queryClient.setQueryData(["repository", repository.namespace, repository.name], repository);
|
queryClient.setQueryData(["repository", repository.namespace, repository.name], repository);
|
||||||
return queryClient.invalidateQueries(["repositories"]);
|
return queryClient.invalidateQueries(["repositories"]);
|
||||||
},
|
}
|
||||||
}
|
}
|
||||||
);
|
);
|
||||||
return {
|
return {
|
||||||
@@ -123,7 +121,7 @@ export const useCreateRepository = () => {
|
|||||||
},
|
},
|
||||||
isLoading,
|
isLoading,
|
||||||
error,
|
error,
|
||||||
repository: data,
|
repository: data
|
||||||
};
|
};
|
||||||
};
|
};
|
||||||
|
|
||||||
@@ -133,7 +131,7 @@ export const useRepositoryTypes = () => useIndexJsonResource<RepositoryTypeColle
|
|||||||
export const useRepository = (namespace: string, name: string): ApiResult<Repository> => {
|
export const useRepository = (namespace: string, name: string): ApiResult<Repository> => {
|
||||||
const link = useRequiredIndexLink("repositories");
|
const link = useRequiredIndexLink("repositories");
|
||||||
return useQuery<Repository, Error>(["repository", namespace, name], () =>
|
return useQuery<Repository, Error>(["repository", namespace, name], () =>
|
||||||
apiClient.get(concat(link, namespace, name)).then((response) => response.json())
|
apiClient.get(concat(link, namespace, name)).then(response => response.json())
|
||||||
);
|
);
|
||||||
};
|
};
|
||||||
|
|
||||||
@@ -144,7 +142,7 @@ export type UseDeleteRepositoryOptions = {
|
|||||||
export const useDeleteRepository = (options?: UseDeleteRepositoryOptions) => {
|
export const useDeleteRepository = (options?: UseDeleteRepositoryOptions) => {
|
||||||
const queryClient = useQueryClient();
|
const queryClient = useQueryClient();
|
||||||
const { mutate, isLoading, error, data } = useMutation<unknown, Error, Repository>(
|
const { mutate, isLoading, error, data } = useMutation<unknown, Error, Repository>(
|
||||||
(repository) => {
|
repository => {
|
||||||
const link = requiredLink(repository, "delete");
|
const link = requiredLink(repository, "delete");
|
||||||
return apiClient.delete(link);
|
return apiClient.delete(link);
|
||||||
},
|
},
|
||||||
@@ -153,23 +151,23 @@ export const useDeleteRepository = (options?: UseDeleteRepositoryOptions) => {
|
|||||||
if (options?.onSuccess) {
|
if (options?.onSuccess) {
|
||||||
options.onSuccess(repository);
|
options.onSuccess(repository);
|
||||||
}
|
}
|
||||||
await queryClient.removeQueries(repoQueryKey(repository));
|
queryClient.removeQueries(repoQueryKey(repository));
|
||||||
await queryClient.invalidateQueries(["repositories"]);
|
await queryClient.invalidateQueries(["repositories"]);
|
||||||
},
|
}
|
||||||
}
|
}
|
||||||
);
|
);
|
||||||
return {
|
return {
|
||||||
remove: (repository: Repository) => mutate(repository),
|
remove: (repository: Repository) => mutate(repository),
|
||||||
isLoading,
|
isLoading,
|
||||||
error,
|
error,
|
||||||
isDeleted: !!data,
|
isDeleted: !!data
|
||||||
};
|
};
|
||||||
};
|
};
|
||||||
|
|
||||||
export const useUpdateRepository = () => {
|
export const useUpdateRepository = () => {
|
||||||
const queryClient = useQueryClient();
|
const queryClient = useQueryClient();
|
||||||
const { mutate, isLoading, error, data } = useMutation<unknown, Error, Repository>(
|
const { mutate, isLoading, error, data } = useMutation<unknown, Error, Repository>(
|
||||||
(repository) => {
|
repository => {
|
||||||
const link = requiredLink(repository, "update");
|
const link = requiredLink(repository, "update");
|
||||||
return apiClient.put(link, repository, "application/vnd.scmm-repository+json;v=2");
|
return apiClient.put(link, repository, "application/vnd.scmm-repository+json;v=2");
|
||||||
},
|
},
|
||||||
@@ -177,21 +175,21 @@ export const useUpdateRepository = () => {
|
|||||||
onSuccess: async (_, repository) => {
|
onSuccess: async (_, repository) => {
|
||||||
await queryClient.invalidateQueries(repoQueryKey(repository));
|
await queryClient.invalidateQueries(repoQueryKey(repository));
|
||||||
await queryClient.invalidateQueries(["repositories"]);
|
await queryClient.invalidateQueries(["repositories"]);
|
||||||
},
|
}
|
||||||
}
|
}
|
||||||
);
|
);
|
||||||
return {
|
return {
|
||||||
update: (repository: Repository) => mutate(repository),
|
update: (repository: Repository) => mutate(repository),
|
||||||
isLoading,
|
isLoading,
|
||||||
error,
|
error,
|
||||||
isUpdated: !!data,
|
isUpdated: !!data
|
||||||
};
|
};
|
||||||
};
|
};
|
||||||
|
|
||||||
export const useArchiveRepository = () => {
|
export const useArchiveRepository = () => {
|
||||||
const queryClient = useQueryClient();
|
const queryClient = useQueryClient();
|
||||||
const { mutate, isLoading, error, data } = useMutation<unknown, Error, Repository>(
|
const { mutate, isLoading, error, data } = useMutation<unknown, Error, Repository>(
|
||||||
(repository) => {
|
repository => {
|
||||||
const link = requiredLink(repository, "archive");
|
const link = requiredLink(repository, "archive");
|
||||||
return apiClient.post(link);
|
return apiClient.post(link);
|
||||||
},
|
},
|
||||||
@@ -199,21 +197,21 @@ export const useArchiveRepository = () => {
|
|||||||
onSuccess: async (_, repository) => {
|
onSuccess: async (_, repository) => {
|
||||||
await queryClient.invalidateQueries(repoQueryKey(repository));
|
await queryClient.invalidateQueries(repoQueryKey(repository));
|
||||||
await queryClient.invalidateQueries(["repositories"]);
|
await queryClient.invalidateQueries(["repositories"]);
|
||||||
},
|
}
|
||||||
}
|
}
|
||||||
);
|
);
|
||||||
return {
|
return {
|
||||||
archive: (repository: Repository) => mutate(repository),
|
archive: (repository: Repository) => mutate(repository),
|
||||||
isLoading,
|
isLoading,
|
||||||
error,
|
error,
|
||||||
isArchived: !!data,
|
isArchived: !!data
|
||||||
};
|
};
|
||||||
};
|
};
|
||||||
|
|
||||||
export const useUnarchiveRepository = () => {
|
export const useUnarchiveRepository = () => {
|
||||||
const queryClient = useQueryClient();
|
const queryClient = useQueryClient();
|
||||||
const { mutate, isLoading, error, data } = useMutation<unknown, Error, Repository>(
|
const { mutate, isLoading, error, data } = useMutation<unknown, Error, Repository>(
|
||||||
(repository) => {
|
repository => {
|
||||||
const link = requiredLink(repository, "unarchive");
|
const link = requiredLink(repository, "unarchive");
|
||||||
return apiClient.post(link);
|
return apiClient.post(link);
|
||||||
},
|
},
|
||||||
@@ -221,35 +219,35 @@ export const useUnarchiveRepository = () => {
|
|||||||
onSuccess: async (_, repository) => {
|
onSuccess: async (_, repository) => {
|
||||||
await queryClient.invalidateQueries(repoQueryKey(repository));
|
await queryClient.invalidateQueries(repoQueryKey(repository));
|
||||||
await queryClient.invalidateQueries(["repositories"]);
|
await queryClient.invalidateQueries(["repositories"]);
|
||||||
},
|
}
|
||||||
}
|
}
|
||||||
);
|
);
|
||||||
return {
|
return {
|
||||||
unarchive: (repository: Repository) => mutate(repository),
|
unarchive: (repository: Repository) => mutate(repository),
|
||||||
isLoading,
|
isLoading,
|
||||||
error,
|
error,
|
||||||
isUnarchived: !!data,
|
isUnarchived: !!data
|
||||||
};
|
};
|
||||||
};
|
};
|
||||||
|
|
||||||
export const useRunHealthCheck = () => {
|
export const useRunHealthCheck = () => {
|
||||||
const queryClient = useQueryClient();
|
const queryClient = useQueryClient();
|
||||||
const { mutate, isLoading, error, data } = useMutation<unknown, Error, Repository>(
|
const { mutate, isLoading, error, data } = useMutation<unknown, Error, Repository>(
|
||||||
(repository) => {
|
repository => {
|
||||||
const link = requiredLink(repository, "runHealthCheck");
|
const link = requiredLink(repository, "runHealthCheck");
|
||||||
return apiClient.post(link);
|
return apiClient.post(link);
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
onSuccess: async (_, repository) => {
|
onSuccess: async (_, repository) => {
|
||||||
await queryClient.invalidateQueries(repoQueryKey(repository));
|
await queryClient.invalidateQueries(repoQueryKey(repository));
|
||||||
},
|
}
|
||||||
}
|
}
|
||||||
);
|
);
|
||||||
return {
|
return {
|
||||||
runHealthCheck: (repository: Repository) => mutate(repository),
|
runHealthCheck: (repository: Repository) => mutate(repository),
|
||||||
isLoading,
|
isLoading,
|
||||||
error,
|
error,
|
||||||
isRunning: !!data,
|
isRunning: !!data
|
||||||
};
|
};
|
||||||
};
|
};
|
||||||
|
|
||||||
@@ -258,7 +256,7 @@ export const useExportInfo = (repository: Repository): ApiResultWithFetching<Exp
|
|||||||
//TODO Refetch while exporting to update the page
|
//TODO Refetch while exporting to update the page
|
||||||
const { isLoading, isFetching, error, data } = useQuery<ExportInfo, Error>(
|
const { isLoading, isFetching, error, data } = useQuery<ExportInfo, Error>(
|
||||||
["repository", repository.namespace, repository.name, "exportInfo"],
|
["repository", repository.namespace, repository.name, "exportInfo"],
|
||||||
() => apiClient.get(link).then((response) => response.json()),
|
() => apiClient.get(link).then(response => response.json()),
|
||||||
{}
|
{}
|
||||||
);
|
);
|
||||||
|
|
||||||
@@ -266,7 +264,7 @@ export const useExportInfo = (repository: Repository): ApiResultWithFetching<Exp
|
|||||||
isLoading,
|
isLoading,
|
||||||
isFetching,
|
isFetching,
|
||||||
error: error instanceof NotFoundError ? null : error,
|
error: error instanceof NotFoundError ? null : error,
|
||||||
data,
|
data
|
||||||
};
|
};
|
||||||
};
|
};
|
||||||
|
|
||||||
@@ -309,14 +307,14 @@ export const useExportRepository = () => {
|
|||||||
const id = setInterval(() => {
|
const id = setInterval(() => {
|
||||||
apiClient
|
apiClient
|
||||||
.get(infolink)
|
.get(infolink)
|
||||||
.then((r) => r.json())
|
.then(r => r.json())
|
||||||
.then((info: ExportInfo) => {
|
.then((info: ExportInfo) => {
|
||||||
if (info._links.download) {
|
if (info._links.download) {
|
||||||
clearInterval(id);
|
clearInterval(id);
|
||||||
resolve(info);
|
resolve(info);
|
||||||
}
|
}
|
||||||
})
|
})
|
||||||
.catch((e) => {
|
.catch(e => {
|
||||||
clearInterval(id);
|
clearInterval(id);
|
||||||
reject(e);
|
reject(e);
|
||||||
});
|
});
|
||||||
@@ -329,21 +327,21 @@ export const useExportRepository = () => {
|
|||||||
onSuccess: async (_, { repository }) => {
|
onSuccess: async (_, { repository }) => {
|
||||||
await queryClient.invalidateQueries(repoQueryKey(repository));
|
await queryClient.invalidateQueries(repoQueryKey(repository));
|
||||||
await queryClient.invalidateQueries(["repositories"]);
|
await queryClient.invalidateQueries(["repositories"]);
|
||||||
},
|
}
|
||||||
}
|
}
|
||||||
);
|
);
|
||||||
return {
|
return {
|
||||||
exportRepository: (repository: Repository, options: ExportOptions) => mutate({ repository, options }),
|
exportRepository: (repository: Repository, options: ExportOptions) => mutate({ repository, options }),
|
||||||
isLoading,
|
isLoading,
|
||||||
error,
|
error,
|
||||||
data,
|
data
|
||||||
};
|
};
|
||||||
};
|
};
|
||||||
|
|
||||||
export const usePaths = (repository: Repository, revision: string): ApiResult<Paths> => {
|
export const usePaths = (repository: Repository, revision: string): ApiResult<Paths> => {
|
||||||
const link = requiredLink(repository, "paths").replace("{revision}", revision);
|
const link = requiredLink(repository, "paths").replace("{revision}", revision);
|
||||||
return useQuery<Paths, Error>(repoQueryKey(repository, "paths", revision), () =>
|
return useQuery<Paths, Error>(repoQueryKey(repository, "paths", revision), () =>
|
||||||
apiClient.get(link).then((response) => response.json())
|
apiClient.get(link).then(response => response.json())
|
||||||
);
|
);
|
||||||
};
|
};
|
||||||
|
|
||||||
@@ -364,7 +362,7 @@ export const useRenameRepository = (repository: Repository) => {
|
|||||||
const { mutate, isLoading, error, data } = useMutation<unknown, Error, RenameRepositoryRequest>(
|
const { mutate, isLoading, error, data } = useMutation<unknown, Error, RenameRepositoryRequest>(
|
||||||
({ name, namespace }) => apiClient.post(url, { namespace, name }, "application/vnd.scmm-repository+json;v=2"),
|
({ name, namespace }) => apiClient.post(url, { namespace, name }, "application/vnd.scmm-repository+json;v=2"),
|
||||||
{
|
{
|
||||||
onSuccess: () => queryClient.removeQueries(repoQueryKey(repository)),
|
onSuccess: () => queryClient.removeQueries(repoQueryKey(repository))
|
||||||
}
|
}
|
||||||
);
|
);
|
||||||
|
|
||||||
@@ -372,6 +370,6 @@ export const useRenameRepository = (repository: Repository) => {
|
|||||||
renameRepository: (namespace: string, name: string) => mutate({ namespace, name }),
|
renameRepository: (namespace: string, name: string) => mutate({ namespace, name }),
|
||||||
isLoading,
|
isLoading,
|
||||||
error,
|
error,
|
||||||
isRenamed: !!data,
|
isRenamed: !!data
|
||||||
};
|
};
|
||||||
};
|
};
|
||||||
|
|||||||
@@ -35,10 +35,10 @@ describe("getProtocolLinkByType tests", () => {
|
|||||||
protocol: [
|
protocol: [
|
||||||
{
|
{
|
||||||
name: "http",
|
name: "http",
|
||||||
href: "http://scm.scm-manager.org/repo/scm/core",
|
href: "http://scm.scm-manager.org/repo/scm/core"
|
||||||
},
|
}
|
||||||
],
|
]
|
||||||
},
|
}
|
||||||
};
|
};
|
||||||
|
|
||||||
const link = getProtocolLinkByType(repository, "http");
|
const link = getProtocolLinkByType(repository, "http");
|
||||||
@@ -54,14 +54,14 @@ describe("getProtocolLinkByType tests", () => {
|
|||||||
protocol: [
|
protocol: [
|
||||||
{
|
{
|
||||||
name: "http",
|
name: "http",
|
||||||
href: "http://scm.scm-manager.org/repo/scm/core",
|
href: "http://scm.scm-manager.org/repo/scm/core"
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
name: "ssh",
|
name: "ssh",
|
||||||
href: "git@scm.scm-manager.org:scm/core",
|
href: "git@scm.scm-manager.org:scm/core"
|
||||||
},
|
}
|
||||||
],
|
]
|
||||||
},
|
}
|
||||||
};
|
};
|
||||||
|
|
||||||
const link = getProtocolLinkByType(repository, "http");
|
const link = getProtocolLinkByType(repository, "http");
|
||||||
@@ -76,9 +76,9 @@ describe("getProtocolLinkByType tests", () => {
|
|||||||
_links: {
|
_links: {
|
||||||
protocol: {
|
protocol: {
|
||||||
name: "http",
|
name: "http",
|
||||||
href: "http://scm.scm-manager.org/repo/scm/core",
|
href: "http://scm.scm-manager.org/repo/scm/core"
|
||||||
},
|
}
|
||||||
},
|
}
|
||||||
};
|
};
|
||||||
|
|
||||||
const link = getProtocolLinkByType(repository, "http");
|
const link = getProtocolLinkByType(repository, "http");
|
||||||
@@ -94,14 +94,14 @@ describe("getProtocolLinkByType tests", () => {
|
|||||||
protocol: [
|
protocol: [
|
||||||
{
|
{
|
||||||
name: "http",
|
name: "http",
|
||||||
href: "http://scm.scm-manager.org/repo/scm/core",
|
href: "http://scm.scm-manager.org/repo/scm/core"
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
name: "ssh",
|
name: "ssh",
|
||||||
href: "git@scm.scm-manager.org:scm/core",
|
href: "git@scm.scm-manager.org:scm/core"
|
||||||
},
|
}
|
||||||
],
|
]
|
||||||
},
|
}
|
||||||
};
|
};
|
||||||
|
|
||||||
const link = getProtocolLinkByType(repository, "awesome");
|
const link = getProtocolLinkByType(repository, "awesome");
|
||||||
@@ -113,7 +113,7 @@ describe("getProtocolLinkByType tests", () => {
|
|||||||
namespace: "scm",
|
namespace: "scm",
|
||||||
name: "core",
|
name: "core",
|
||||||
type: "git",
|
type: "git",
|
||||||
_links: {},
|
_links: {}
|
||||||
};
|
};
|
||||||
|
|
||||||
const link = getProtocolLinkByType(repository, "http");
|
const link = getProtocolLinkByType(repository, "http");
|
||||||
|
|||||||
@@ -31,6 +31,7 @@ import java.util.Collection;
|
|||||||
import java.util.Optional;
|
import java.util.Optional;
|
||||||
import java.util.function.Function;
|
import java.util.function.Function;
|
||||||
|
|
||||||
|
import static java.util.Optional.empty;
|
||||||
import static java.util.Optional.ofNullable;
|
import static java.util.Optional.ofNullable;
|
||||||
|
|
||||||
public abstract class GenericDisplayManager<D, T extends ReducedModelObject> implements DisplayManager<T> {
|
public abstract class GenericDisplayManager<D, T extends ReducedModelObject> implements DisplayManager<T> {
|
||||||
@@ -60,6 +61,9 @@ public abstract class GenericDisplayManager<D, T extends ReducedModelObject> imp
|
|||||||
|
|
||||||
@Override
|
@Override
|
||||||
public Optional<T> get(String id) {
|
public Optional<T> get(String id) {
|
||||||
|
if (id == null) {
|
||||||
|
return empty();
|
||||||
|
}
|
||||||
return ofNullable(dao.get(id)).map(transform);
|
return ofNullable(dao.get(id)).map(transform);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -21,7 +21,7 @@
|
|||||||
* OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
|
* OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
|
||||||
* SOFTWARE.
|
* SOFTWARE.
|
||||||
*/
|
*/
|
||||||
|
|
||||||
package sonia.scm.api.v2.resources;
|
package sonia.scm.api.v2.resources;
|
||||||
|
|
||||||
import de.otto.edison.hal.HalRepresentation;
|
import de.otto.edison.hal.HalRepresentation;
|
||||||
@@ -77,12 +77,12 @@ class CollectionResourceManagerAdapter<MODEL_OBJECT extends ModelObject,
|
|||||||
AssertUtil.assertPositive(pageNumber);
|
AssertUtil.assertPositive(pageNumber);
|
||||||
AssertUtil.assertPositive(pageSize);
|
AssertUtil.assertPositive(pageSize);
|
||||||
|
|
||||||
if (Util.isEmpty(sortBy)) {
|
Comparator<MODEL_OBJECT> comparator = null;
|
||||||
// replace with something useful
|
if (!Util.isEmpty(sortBy)) {
|
||||||
sortBy = "id";
|
comparator = createComparator(sortBy, desc);
|
||||||
}
|
}
|
||||||
|
|
||||||
return manager.getPage(filter, createComparator(sortBy, desc), pageNumber, pageSize);
|
return manager.getPage(filter, comparator, pageNumber, pageSize);
|
||||||
}
|
}
|
||||||
|
|
||||||
private Comparator<MODEL_OBJECT> createComparator(String sortBy, boolean desc) {
|
private Comparator<MODEL_OBJECT> createComparator(String sortBy, boolean desc) {
|
||||||
|
|||||||
@@ -41,7 +41,6 @@ import sonia.scm.event.ScmEventBus;
|
|||||||
import sonia.scm.security.AuthorizationChangedEvent;
|
import sonia.scm.security.AuthorizationChangedEvent;
|
||||||
import sonia.scm.security.KeyGenerator;
|
import sonia.scm.security.KeyGenerator;
|
||||||
import sonia.scm.util.AssertUtil;
|
import sonia.scm.util.AssertUtil;
|
||||||
import sonia.scm.util.CollectionAppender;
|
|
||||||
import sonia.scm.util.IOUtil;
|
import sonia.scm.util.IOUtil;
|
||||||
import sonia.scm.util.Util;
|
import sonia.scm.util.Util;
|
||||||
|
|
||||||
@@ -51,13 +50,19 @@ import java.util.Collections;
|
|||||||
import java.util.Comparator;
|
import java.util.Comparator;
|
||||||
import java.util.HashMap;
|
import java.util.HashMap;
|
||||||
import java.util.HashSet;
|
import java.util.HashSet;
|
||||||
|
import java.util.LinkedHashSet;
|
||||||
import java.util.List;
|
import java.util.List;
|
||||||
import java.util.Map;
|
import java.util.Map;
|
||||||
import java.util.Set;
|
import java.util.Set;
|
||||||
|
import java.util.function.BiConsumer;
|
||||||
|
import java.util.function.BinaryOperator;
|
||||||
import java.util.function.Consumer;
|
import java.util.function.Consumer;
|
||||||
|
import java.util.function.Function;
|
||||||
import java.util.function.Predicate;
|
import java.util.function.Predicate;
|
||||||
|
import java.util.function.Supplier;
|
||||||
|
import java.util.stream.Collector;
|
||||||
|
|
||||||
import static java.util.stream.Collectors.toSet;
|
import static java.util.Collections.emptySet;
|
||||||
import static sonia.scm.AlreadyExistsException.alreadyExists;
|
import static sonia.scm.AlreadyExistsException.alreadyExists;
|
||||||
import static sonia.scm.ContextEntry.ContextBuilder.entity;
|
import static sonia.scm.ContextEntry.ContextBuilder.entity;
|
||||||
import static sonia.scm.NotFoundException.notFound;
|
import static sonia.scm.NotFoundException.notFound;
|
||||||
@@ -70,6 +75,33 @@ import static sonia.scm.NotFoundException.notFound;
|
|||||||
@Singleton
|
@Singleton
|
||||||
public class DefaultRepositoryManager extends AbstractRepositoryManager {
|
public class DefaultRepositoryManager extends AbstractRepositoryManager {
|
||||||
|
|
||||||
|
@SuppressWarnings("unchecked")
|
||||||
|
public static final Collector<String, Object, Collection<String>> LINKED_HASH_SET_COLLECTOR = new Collector<String, Object, Collection<String>>() {
|
||||||
|
@Override
|
||||||
|
public Supplier<Object> supplier() {
|
||||||
|
return LinkedHashSet::new;
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public BiConsumer<Object, String> accumulator() {
|
||||||
|
return (collection, value) -> ((Collection<String>) collection).add(value);
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public BinaryOperator<Object> combiner() {
|
||||||
|
return (c1, c2) -> ((Collection<String>) c1).addAll((Collection<String>) c2);
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public Function<Object, Collection<String>> finisher() {
|
||||||
|
return collection -> (Collection<String>) collection;
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public Set<Characteristics> characteristics() {
|
||||||
|
return emptySet();
|
||||||
|
}
|
||||||
|
};
|
||||||
private static final Logger logger = LoggerFactory.getLogger(DefaultRepositoryManager.class);
|
private static final Logger logger = LoggerFactory.getLogger(DefaultRepositoryManager.class);
|
||||||
private final Map<String, RepositoryHandler> handlerMap;
|
private final Map<String, RepositoryHandler> handlerMap;
|
||||||
private final KeyGenerator keyGenerator;
|
private final KeyGenerator keyGenerator;
|
||||||
@@ -340,12 +372,9 @@ public class DefaultRepositoryManager extends AbstractRepositoryManager {
|
|||||||
int start, int limit) {
|
int start, int limit) {
|
||||||
|
|
||||||
return Util.createSubCollection(repositoryDAO.getAll(), comparator,
|
return Util.createSubCollection(repositoryDAO.getAll(), comparator,
|
||||||
new CollectionAppender<Repository>() {
|
(collection, item) -> {
|
||||||
@Override
|
if (RepositoryPermissions.read().isPermitted(item)) {
|
||||||
public void append(Collection<Repository> collection, Repository item) {
|
collection.add(postProcess(item));
|
||||||
if (RepositoryPermissions.read().isPermitted(item)) {
|
|
||||||
collection.add(postProcess(item));
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
}, start, limit);
|
}, start, limit);
|
||||||
}
|
}
|
||||||
@@ -363,7 +392,7 @@ public class DefaultRepositoryManager extends AbstractRepositoryManager {
|
|||||||
public Collection<String> getAllNamespaces() {
|
public Collection<String> getAllNamespaces() {
|
||||||
return getAll().stream()
|
return getAll().stream()
|
||||||
.map(Repository::getNamespace)
|
.map(Repository::getNamespace)
|
||||||
.collect(toSet());
|
.collect(LINKED_HASH_SET_COLLECTOR);
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
|
|||||||
@@ -21,7 +21,7 @@
|
|||||||
* OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
|
* OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
|
||||||
* SOFTWARE.
|
* SOFTWARE.
|
||||||
*/
|
*/
|
||||||
|
|
||||||
package sonia.scm.api.v2.resources;
|
package sonia.scm.api.v2.resources;
|
||||||
|
|
||||||
import de.otto.edison.hal.HalRepresentation;
|
import de.otto.edison.hal.HalRepresentation;
|
||||||
@@ -38,6 +38,7 @@ import sonia.scm.api.rest.resources.Simple;
|
|||||||
import java.util.Comparator;
|
import java.util.Comparator;
|
||||||
import java.util.function.Predicate;
|
import java.util.function.Predicate;
|
||||||
|
|
||||||
|
import static org.junit.Assert.assertNull;
|
||||||
import static org.junit.Assert.assertTrue;
|
import static org.junit.Assert.assertTrue;
|
||||||
import static org.mockito.ArgumentMatchers.eq;
|
import static org.mockito.ArgumentMatchers.eq;
|
||||||
import static org.mockito.Mockito.when;
|
import static org.mockito.Mockito.when;
|
||||||
@@ -61,11 +62,11 @@ public class CollectionResourceManagerAdapterTest {
|
|||||||
}
|
}
|
||||||
|
|
||||||
@Test
|
@Test
|
||||||
public void shouldAcceptDefaultSortByParameter() {
|
public void shouldNotSortByDefault() {
|
||||||
abstractManagerResource.getAll(0, 1, x -> true, null, true, r -> null);
|
abstractManagerResource.getAll(0, 1, x -> true, null, true, r -> null);
|
||||||
|
|
||||||
Comparator<Simple> comparator = comparatorCaptor.getValue();
|
Comparator<Simple> comparator = comparatorCaptor.getValue();
|
||||||
assertTrue(comparator.compare(new Simple("1", null), new Simple("2", null)) > 0);
|
assertNull(comparator);
|
||||||
}
|
}
|
||||||
|
|
||||||
@Test
|
@Test
|
||||||
|
|||||||
Reference in New Issue
Block a user