Commit d80ef05f authored by andrey zaytsev's avatar andrey zaytsev
Browse files

FSRecords sharding WIP

parent 49981fc4
Showing with 916 additions and 81 deletions
+916 -81
......@@ -51,15 +51,15 @@ import java.util.List;
import java.util.concurrent.ExecutorService;
import java.util.concurrent.ScheduledFuture;
import java.util.concurrent.locks.ReentrantReadWriteLock;
import java.util.function.IntPredicate;
/**
* @author max
*/
@SuppressWarnings({"PointlessArithmeticExpression", "HardCodedStringLiteral"})
public class FSRecords {
public class FSRecords implements IFSRecords {
private static final Logger LOG = Logger.getInstance("#com.intellij.vfs.persistent.FSRecords");
public static final boolean weHaveContentHashes = SystemProperties.getBooleanProperty("idea.share.contents", true);
private static final boolean lazyVfsDataCleaning = SystemProperties.getBooleanProperty("idea.lazy.vfs.data.cleaning", true);
static final boolean backgroundVfsFlush = SystemProperties.getBooleanProperty("idea.background.vfs.flush", true);
private static final boolean inlineAttributes = SystemProperties.getBooleanProperty("idea.inline.vfs.attributes", true);
......@@ -68,6 +68,7 @@ public class FSRecords {
private static final boolean useSmallAttrTable = SystemProperties.getBooleanProperty("idea.use.small.attr.table.for.vfs", true);
static final String VFS_FILES_EXTENSION = System.getProperty("idea.vfs.files.extension", ".dat");
private static final boolean ourStoreRootsSeparately = SystemProperties.getBooleanProperty("idea.store.roots.separately", false);
public static boolean weHaveContentHashes = SystemProperties.getBooleanProperty("idea.share.contents", true);
private static final int VERSION = 21 + (weHaveContentHashes ? 0x10:0) + (IOUtil.ourByteBuffersUseNativeByteOrder ? 0x37:0) +
31 + (bulkAttrReadSupport ? 0x27:0) + (inlineAttributes ? 0x31 : 0) +
......@@ -118,6 +119,15 @@ public class FSRecords {
private static final int FREE_RECORD_FLAG = 0x100;
private static final int ALL_VALID_FLAGS = PersistentFS.ALL_VALID_FLAGS | FREE_RECORD_FLAG;
static File defaultBasePath() {
return new File(getCachesDir());
}
static String getCachesDir() {
String dir = System.getProperty("caches_dir"); //TODO: return here
return dir == null ? PathManager.getSystemPath() + "/caches/" : dir;
}
static {
//noinspection ConstantConditions
assert HEADER_SIZE <= RECORD_SIZE;
......@@ -132,7 +142,8 @@ public class FSRecords {
w = lock.writeLock();
}
void writeAttributesToRecord(int id, int parentId, @NotNull FileAttributes attributes, @NotNull String name) {
@Override
public void writeAttributesToRecord(int id, int parentId, @NotNull FileAttributes attributes, @NotNull String name) {
w.lock();
try {
setName(id, name);
......@@ -141,10 +152,10 @@ public class FSRecords {
setLength(id, attributes.isDirectory() ? -1L : attributes.length);
setFlags(id, (attributes.isDirectory() ? PersistentFS.IS_DIRECTORY_FLAG : 0) |
(attributes.isWritable() ? 0 : PersistentFS.IS_READ_ONLY) |
(attributes.isSymLink() ? PersistentFS.IS_SYMLINK : 0) |
(attributes.isSpecial() ? PersistentFS.IS_SPECIAL : 0) |
(attributes.isHidden() ? PersistentFS.IS_HIDDEN : 0), true);
(attributes.isWritable() ? 0 : PersistentFS.IS_READ_ONLY) |
(attributes.isSymLink() ? PersistentFS.IS_SYMLINK : 0) |
(attributes.isSpecial() ? PersistentFS.IS_SPECIAL : 0) |
(attributes.isHidden() ? PersistentFS.IS_HIDDEN : 0), true);
setParent(id, parentId);
}
catch (Throwable e) {
......@@ -164,10 +175,6 @@ public class FSRecords {
return myBaseFile;
}
public static File defaultBasePath() {
return new File(getCachesDir());
}
private boolean myInitialized;
private PersistentStringEnumerator myNames;
......@@ -187,6 +194,7 @@ public class FSRecords {
private final AttrPageAwareCapacityAllocationPolicy REASONABLY_SMALL = new AttrPageAwareCapacityAllocationPolicy();
@Override
public void connect(PagedFileStorage.StorageLockContext lockContext, PersistentStringEnumerator names, FileNameCache fileNameCache) {
w.lock();
try {
......@@ -370,11 +378,6 @@ public class FSRecords {
}
}
public static String getCachesDir() {
String dir = System.getProperty("caches_dir"); //TODO: return here
return dir == null ? PathManager.getSystemPath() + "/caches/" : dir;
}
private void markDirty() {
if (!myDirty) {
myDirty = true;
......@@ -400,6 +403,7 @@ public class FSRecords {
});
}
@Override
public void force() {
w.lock();
try {
......@@ -436,6 +440,7 @@ public class FSRecords {
}
}
@Override
public boolean isDirty() {
return myDirty || myNames.isDirty() || myAttributes.isDirty() || myContents.isDirty() || myRecords.isDirty() ||
myContentHashesEnumerator != null && myContentHashesEnumerator.isDirty();
......@@ -449,6 +454,7 @@ public class FSRecords {
return recordsVersion;
}
@Override
public long getTimestamp() {
return myRecords.getLong(HEADER_TIMESTAMP_OFFSET);
}
......@@ -465,10 +471,6 @@ public class FSRecords {
myRecords.put(id * RECORD_SIZE, ZEROES, 0, RECORD_SIZE);
}
public PersistentStringEnumerator getNames() {
return myNames;
}
private void closeFiles() throws IOException {
if (myFlushingFuture != null) {
myFlushingFuture.cancel(false);
......@@ -517,7 +519,8 @@ public class FSRecords {
return myAttributesList.getId(attId) + FIRST_ATTR_ID_OFFSET;
}
void handleError(@NotNull Throwable e) throws RuntimeException, Error {
@Override
public void handleError(@NotNull Throwable e) throws RuntimeException, Error {
if (!myIsDisposed) {
// No need to forcibly mark VFS corrupted if it is already shut down
if (!myCorrupted && w.tryLock()) { // avoid deadlock if r lock is occupied by current thread
......@@ -533,6 +536,11 @@ public class FSRecords {
throw new RuntimeException(e);
}
@Override
public void handleError(int fileId, @NotNull Throwable e) throws RuntimeException, Error {
handleError(e);
}
private static class AttrPageAwareCapacityAllocationPolicy extends CapacityAllocationPolicy {
boolean myAttrPageRequested;
......@@ -542,6 +550,7 @@ public class FSRecords {
}
}
@Override
public long getCreationTimestamp() {
r.lock();
try {
......@@ -568,6 +577,11 @@ public class FSRecords {
return myAttributes;
}
@Override
public int createChildRecord(int parentId) {
return createRecord();
}
// todo: Address / capacity store in records table, size store with payload
public int createRecord() {
w.lock();
......@@ -601,6 +615,7 @@ public class FSRecords {
private int length() {
return (int)getRecords().length();
}
public int getMaxId() {
r.lock();
try {
......@@ -611,7 +626,8 @@ public class FSRecords {
}
}
void deleteRecordRecursively(int id) {
@Override
public void deleteRecordRecursively(int id) {
w.lock();
try {
incModCount(id);
......@@ -716,8 +732,9 @@ public class FSRecords {
private static final int ROOT_RECORD_ID = 1;
@Override
@NotNull
int[] listRoots() {
public int[] listRoots() {
try {
r.lock();
try {
......@@ -738,7 +755,7 @@ public class FSRecords {
return result.toNativeArray();
}
final DataInputStream input = readAttribute(ROOT_RECORD_ID, ourChildrenAttr);
final DataInputStream input = readAttributeNoLock(ROOT_RECORD_ID, ourChildrenAttr);
if (input == null) return ArrayUtil.EMPTY_INT_ARRAY;
try {
......@@ -777,7 +794,8 @@ public class FSRecords {
}
}
int findRootRecord(@NotNull String rootUrl) {
@Override
public int findRootRecord(@NotNull String rootUrl) {
w.lock();
try {
......@@ -802,9 +820,9 @@ public class FSRecords {
}
}
final int root = getNames().enumerate(rootUrl);
final int root = myNames.enumerate(rootUrl);
final DataInputStream input = readAttribute(ROOT_RECORD_ID, ourChildrenAttr);
final DataInputStream input = readAttributeNoLock(ROOT_RECORD_ID, ourChildrenAttr);
int[] names = ArrayUtil.EMPTY_INT_ARRAY;
int[] ids = ArrayUtil.EMPTY_INT_ARRAY;
......@@ -854,7 +872,8 @@ public class FSRecords {
return -1;
}
void deleteRootRecord(int id) {
@Override
public void deleteRootRecord(int id) {
w.lock();
try {
......@@ -883,7 +902,7 @@ public class FSRecords {
return;
}
final DataInputStream input = readAttribute(ROOT_RECORD_ID, ourChildrenAttr);
final DataInputStream input = readAttributeNoLock(ROOT_RECORD_ID, ourChildrenAttr);
assert input != null;
int[] names;
int[] ids;
......@@ -923,12 +942,13 @@ public class FSRecords {
}
}
@Override
@NotNull
public int[] list(int id) {
try {
r.lock();
try {
final DataInputStream input = readAttribute(id, ourChildrenAttr);
final DataInputStream input = readAttributeNoLock(id, ourChildrenAttr);
if (input == null) return ArrayUtil.EMPTY_INT_ARRAY;
final int count = DataInputOutputUtil.readINT(input);
......@@ -950,31 +970,13 @@ public class FSRecords {
}
}
public static class NameId {
@NotNull
public static final NameId[] EMPTY_ARRAY = new NameId[0];
public final int id;
public final CharSequence name;
public final int nameId;
public NameId(int id, int nameId, @NotNull CharSequence name) {
this.id = id;
this.nameId = nameId;
this.name = name;
}
@Override
public String toString() {
return name + " (" + id + ")";
}
}
@Override
@NotNull
public NameId[] listAll(int parentId) {
try {
r.lock();
try {
final DataInputStream input = readAttribute(parentId, ourChildrenAttr);
final DataInputStream input = readAttributeNoLock(parentId, ourChildrenAttr);
if (input == null) return NameId.EMPTY_ARRAY;
int count = DataInputOutputUtil.readINT(input);
......@@ -999,7 +1001,8 @@ public class FSRecords {
}
}
boolean wereChildrenAccessed(int id) {
@Override
public boolean wereChildrenAccessed(int id) {
try {
r.lock();
try {
......@@ -1014,6 +1017,7 @@ public class FSRecords {
return false;
}
@Override
public void updateList(int id, @NotNull int[] childIds) {
Arrays.sort(childIds);
w.lock();
......@@ -1057,10 +1061,12 @@ public class FSRecords {
myLocalModificationCount++;
}
int getLocalModCount() {
@Override
public int getLocalModCount() {
return myLocalModificationCount; // This is volatile, only modified under Application.runWriteAction() lock.
}
@Override
public int getModCount() {
r.lock();
try {
......@@ -1094,15 +1100,16 @@ public class FSRecords {
}
// returns id, parent(id), parent(parent(id)), ... (already cached id or rootId)
@Override
@NotNull
public TIntArrayList getParents(int id, @NotNull ConcurrentIntObjectMap<?> idCache) {
public TIntArrayList getParents(int id, @NotNull IntPredicate cached) {
TIntArrayList result = new TIntArrayList(10);
r.lock();
try {
int parentId;
do {
result.add(id);
if (idCache.containsKey(id)) {
if (cached.test(id)) {
break;
}
parentId = getRecordInt(id, PARENT_OFFSET);
......@@ -1122,6 +1129,7 @@ public class FSRecords {
return result;
}
@Override
public void setParent(int id, int parentId) {
if (id == parentId) {
LOG.error("Cyclic parent/child relations");
......@@ -1141,6 +1149,7 @@ public class FSRecords {
}
}
@Override
public int getNameId(int id) {
try {
r.lock();
......@@ -1157,11 +1166,12 @@ public class FSRecords {
return -1;
}
@Override
public int getNameId(String name) {
try {
r.lock();
try {
return getNames().enumerate(name);
return myNames.enumerate(name);
}
finally {
r.unlock();
......@@ -1173,10 +1183,12 @@ public class FSRecords {
return -1;
}
@Override
public String getName(int id) {
return getNameSequence(id).toString();
}
@Override
@NotNull
public CharSequence getNameSequence(int id) {
try {
......@@ -1195,11 +1207,12 @@ public class FSRecords {
}
}
@Override
public void setName(int id, @NotNull String name) {
w.lock();
try {
incModCount(id);
int nameId = getNames().enumerate(name);
int nameId = myNames.enumerate(name);
putRecordInt(id, NAME_OFFSET, nameId);
}
catch (Throwable e) {
......@@ -1210,6 +1223,7 @@ public class FSRecords {
}
}
@Override
public int getFlags(int id) {
r.lock();
try {
......@@ -1220,6 +1234,7 @@ public class FSRecords {
}
}
@Override
public void setFlags(int id, int flags, final boolean markAsChange) {
w.lock();
try {
......@@ -1236,6 +1251,7 @@ public class FSRecords {
}
}
@Override
public long getLength(int id) {
r.lock();
try {
......@@ -1246,6 +1262,7 @@ public class FSRecords {
}
}
@Override
public void setLength(int id, long len) {
w.lock();
try {
......@@ -1264,6 +1281,7 @@ public class FSRecords {
}
}
@Override
public long getTimestamp(int id) {
r.lock();
try {
......@@ -1274,6 +1292,7 @@ public class FSRecords {
}
}
@Override
public void setTimestamp(int id, long value) {
w.lock();
try {
......@@ -1292,7 +1311,8 @@ public class FSRecords {
}
}
int getModCount(int id) {
@Override
public int getModCount(int id) {
r.lock();
try {
return getRecordInt(id, MOD_COUNT_OFFSET);
......@@ -1334,6 +1354,7 @@ public class FSRecords {
return id * RECORD_SIZE + offset;
}
@Override
@Nullable
public DataInputStream readContent(int fileId) {
try {
......@@ -1356,8 +1377,9 @@ public class FSRecords {
return null;
}
@Override
@Nullable
DataInputStream readContentById(int contentId) {
public DataInputStream readContentById(int contentId) {
try {
return doReadContentById(contentId);
}
......@@ -1377,12 +1399,13 @@ public class FSRecords {
return stream;
}
@Override
@Nullable
public DataInputStream readAttributeWithLock(int fileId, FileAttribute att) {
public DataInputStream readAttribute(int fileId, FileAttribute att) {
try {
r.lock();
try {
DataInputStream stream = readAttribute(fileId, att);
DataInputStream stream = readAttributeNoLock(fileId, att);
if (stream != null && att.isVersioned()) {
try {
int actualVersion = DataInputOutputUtil.readINT(stream);
......@@ -1410,7 +1433,7 @@ public class FSRecords {
// should be called under r or w lock
@Nullable
private DataInputStream readAttribute(int fileId, FileAttribute attribute) throws IOException {
private DataInputStream readAttributeNoLock(int fileId, FileAttribute attribute) throws IOException {
checkFileIsValid(fileId);
int recordId = getAttributeRecordId(fileId);
......@@ -1543,7 +1566,8 @@ public class FSRecords {
}
}
int acquireFileContent(int fileId) {
@Override
public int acquireFileContent(int fileId) {
w.lock();
try {
int record = getContentRecordId(fileId);
......@@ -1559,7 +1583,8 @@ public class FSRecords {
return -1;
}
void releaseContent(int contentId) {
@Override
public void releaseContent(int contentId) {
w.lock();
try {
RefCountingStorage contentStorage = getContentStorage();
......@@ -1577,6 +1602,7 @@ public class FSRecords {
}
}
@Override
public int getContentId(int fileId) {
try {
r.lock();
......@@ -1593,14 +1619,16 @@ public class FSRecords {
return -1;
}
@Override
@NotNull
DataOutputStream writeContent(int fileId, boolean readOnly) {
public DataOutputStream writeContent(int fileId, boolean readOnly) {
return new ContentOutputStream(fileId, readOnly);
}
private static final MessageDigest myDigest = ContentHashesUtil.createHashDigest();
void writeContent(int fileId, ByteSequence bytes, boolean readOnly) {
@Override
public void writeContent(int fileId, ByteSequence bytes, boolean readOnly) {
try {
writeBytes(fileId, bytes, readOnly);
}
......@@ -1609,7 +1637,8 @@ public class FSRecords {
}
}
int storeUnlinkedContent(byte[] bytes) {
@Override
public int storeUnlinkedContent(byte[] bytes) {
w.lock();
try {
int recordId;
......@@ -1635,6 +1664,7 @@ public class FSRecords {
return -1;
}
@Override
@NotNull
public DataOutputStream writeAttribute(final int fileId, @NotNull FileAttribute att) {
DataOutputStream stream = new AttributeOutputStream(fileId, att);
......@@ -1649,6 +1679,7 @@ public class FSRecords {
return stream;
}
@Override
public void writeBytes(int fileId, ByteSequence bytes, boolean preferFixedSize) throws IOException {
RefCountingStorage contentStorage = getContentStorage();
w.lock();
......@@ -1939,6 +1970,7 @@ public class FSRecords {
}
}
@Override
public void dispose() {
w.lock();
try {
......@@ -1954,6 +1986,7 @@ public class FSRecords {
}
}
@Override
public void invalidateCaches() {
createBrokenMarkerFile(null);
}
......@@ -1990,7 +2023,7 @@ public class FSRecords {
}
private void checkRecordSanity(final int id, final int recordCount, final IntArrayList usedAttributeRecordIds,
final IntArrayList validAttributeIds) {
final IntArrayList validAttributeIds) {
int parentId = getParent(id);
assert parentId >= 0 && parentId < recordCount;
if (parentId > 0 && getParent(parentId) > 0) {
......@@ -2032,7 +2065,7 @@ public class FSRecords {
}
private void checkAttributesSanity(final int attributeRecordId, final IntArrayList usedAttributeRecordIds,
final IntArrayList validAttributeIds) throws IOException {
final IntArrayList validAttributeIds) throws IOException {
assert !usedAttributeRecordIds.contains(attributeRecordId);
usedAttributeRecordIds.add(attributeRecordId);
......
package com.intellij.openapi.vfs.newvfs.persistent;
import com.intellij.openapi.util.io.ByteSequence;
import com.intellij.openapi.util.io.FileAttributes;
import com.intellij.openapi.vfs.newvfs.FileAttribute;
import com.intellij.openapi.vfs.newvfs.impl.FileNameCache;
import com.intellij.util.containers.ContainerUtil;
import com.intellij.util.io.DataOutputStream;
import com.intellij.util.io.PagedFileStorage;
import com.intellij.util.io.PersistentStringEnumerator;
import gnu.trove.TIntArrayList;
import org.jetbrains.annotations.NotNull;
import org.jetbrains.annotations.Nullable;
import java.io.DataInputStream;
import java.io.IOException;
import java.util.function.IntPredicate;
public class FSRecordsShard implements IFSRecords {
private final int myShardId;
private final IFSRecords myDelegate;
public FSRecordsShard(int shardId, IFSRecords delegate) {
myShardId = shardId;
myDelegate = delegate;
}
private static int removeShardId(int id) {
return id >> 8;
}
private int addShardId(int id) {
return myShardId & (id << 8);
}
private int[] addShardId(int[] ids) {
int[] res = new int[ids.length];
for (int i = 0; i < ids.length; i++) {
res[i] = addShardId(ids[i]);
}
return res;
}
private static int[] removeShardId(int[] ids) {
int[] res = new int[ids.length];
for (int i = 0; i < ids.length; i++) {
res[i] = removeShardId(ids[i]);
}
return res;
}
@Override
public void writeAttributesToRecord(int id, int parentId, @NotNull FileAttributes attributes, @NotNull String name) {
myDelegate.writeAttributesToRecord(removeShardId(id), removeShardId(parentId), attributes, name);
}
@Override
public void connect(PagedFileStorage.StorageLockContext lockContext, PersistentStringEnumerator names, FileNameCache fileNameCache) {
myDelegate.connect(lockContext, names, fileNameCache);
}
@Override
public void force() {
myDelegate.force();
}
@Override
public boolean isDirty() {
return myDelegate.isDirty();
}
@Override
public long getTimestamp() {
return myDelegate.getTimestamp();
}
@Override
public void handleError(@NotNull Throwable e) throws RuntimeException, Error {
myDelegate.handleError(e);
}
@Override
public void handleError(int fileId, @NotNull Throwable e) throws RuntimeException, Error {
myDelegate.handleError(removeShardId(fileId), e);
}
@Override
public long getCreationTimestamp() {
return myDelegate.getCreationTimestamp();
}
@Override
public int createChildRecord(int parentId) {
return addShardId(myDelegate.createChildRecord(removeShardId(parentId)));
}
@Override
public void deleteRecordRecursively(int id) {
myDelegate.deleteRecordRecursively(removeShardId(id));
}
@NotNull
@Override
public int[] listRoots() {
return addShardId(myDelegate.listRoots());
}
@Override
public int findRootRecord(@NotNull String rootUrl) {
return addShardId(myDelegate.findRootRecord(rootUrl));
}
@Override
public void deleteRootRecord(int id) {
myDelegate.deleteRootRecord(removeShardId(id));
}
@NotNull
@Override
public int[] list(int id) {
return addShardId(myDelegate.list(removeShardId(id)));
}
@NotNull
@Override
public NameId[] listAll(int parentId) {
return ContainerUtil.map2Array(myDelegate.listAll(removeShardId(parentId)), NameId.class, nameId -> nameId.withId(addShardId(nameId.id)));
}
@Override
public boolean wereChildrenAccessed(int id) {
return myDelegate.wereChildrenAccessed(removeShardId(id));
}
@Override
public void updateList(int id, @NotNull int[] childIds) {
myDelegate.updateList(removeShardId(id), removeShardId(childIds));
}
@Override
public int getLocalModCount() {
return myDelegate.getLocalModCount();
}
@Override
public int getModCount() {
return myDelegate.getModCount();
}
@NotNull
@Override
public TIntArrayList getParents(int id, @NotNull IntPredicate cached) {
TIntArrayList parents = myDelegate.getParents(removeShardId(id), i -> cached.test(addShardId(i)));
TIntArrayList res = new TIntArrayList(parents.size());
for (int i = 0; i < parents.size(); ++i) {
res.set(i, parents.getQuick(i));
}
return res;
}
@Override
public void setParent(int id, int parentId) {
myDelegate.setParent(removeShardId(id), removeShardId(parentId));
}
@Override
public int getNameId(int id) {
return myDelegate.getNameId(removeShardId(id));
}
@Override
public int getNameId(String name) {
return myDelegate.getNameId(name);
}
@Override
public String getName(int id) {
return myDelegate.getName(removeShardId(id));
}
@NotNull
@Override
public CharSequence getNameSequence(int id) {
return myDelegate.getNameSequence(removeShardId(id));
}
@Override
public void setName(int id, @NotNull String name) {
myDelegate.setName(removeShardId(id), name);
}
@Override
public int getFlags(int id) {
return myDelegate.getFlags(removeShardId(id));
}
@Override
public void setFlags(int id, int flags, boolean markAsChange) {
myDelegate.setFlags(removeShardId(id), flags, markAsChange);
}
@Override
public long getLength(int id) {
return myDelegate.getLength(removeShardId(id));
}
@Override
public void setLength(int id, long len) {
myDelegate.setLength(removeShardId(id), len);
}
@Override
public long getTimestamp(int id) {
return myDelegate.getTimestamp(removeShardId(id));
}
@Override
public void setTimestamp(int id, long value) {
myDelegate.setTimestamp(removeShardId(id), value);
}
@Override
public int getModCount(int id) {
return myDelegate.getModCount(removeShardId(id));
}
@Nullable
@Override
public DataInputStream readContent(int fileId) {
return myDelegate.readContent(removeShardId(fileId));
}
@Nullable
@Override
public DataInputStream readContentById(int contentId) {
return myDelegate.readContentById(removeShardId(contentId));
}
@Nullable
@Override
public DataInputStream readAttribute(int fileId, FileAttribute att) {
return myDelegate.readAttribute(removeShardId(fileId), att);
}
@Override
public int acquireFileContent(int fileId) {
return addShardId(myDelegate.acquireFileContent(removeShardId(fileId)));
}
@Override
public void releaseContent(int contentId) {
myDelegate.releaseContent(removeShardId(contentId));
}
@Override
public int getContentId(int fileId) {
return addShardId(myDelegate.getContentId(removeShardId(fileId)));
}
@NotNull
@Override
public DataOutputStream writeContent(int fileId, boolean fixedSize) {
return myDelegate.writeContent(removeShardId(fileId), fixedSize);
}
@Override
public void writeContent(int fileId, ByteSequence bytes, boolean fixedSize) {
myDelegate.writeContent(removeShardId(fileId), bytes, fixedSize);
}
@Override
public int storeUnlinkedContent(byte[] bytes) {
return addShardId(myDelegate.storeUnlinkedContent(bytes));
}
@NotNull
@Override
public DataOutputStream writeAttribute(int fileId, @NotNull FileAttribute att) {
return myDelegate.writeAttribute(removeShardId(fileId), att);
}
@Override
public void writeBytes(int fileId, ByteSequence bytes, boolean preferFixedSize) throws IOException {
myDelegate.writeBytes(removeShardId(fileId), bytes, preferFixedSize);
}
@Override
public void dispose() {
myDelegate.dispose();
}
@Override
public void invalidateCaches() {
myDelegate.invalidateCaches();
}
}
\ No newline at end of file
/*
* Copyright 2000-2017 JetBrains s.r.o.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.intellij.openapi.vfs.newvfs.persistent;
import com.intellij.openapi.util.io.ByteSequence;
import com.intellij.openapi.util.io.FileAttributes;
import com.intellij.openapi.vfs.newvfs.FileAttribute;
import com.intellij.openapi.vfs.newvfs.impl.FileNameCache;
import com.intellij.util.io.DataOutputStream;
import com.intellij.util.io.PagedFileStorage;
import com.intellij.util.io.PersistentStringEnumerator;
import gnu.trove.TIntArrayList;
import org.jetbrains.annotations.NotNull;
import org.jetbrains.annotations.Nullable;
import java.io.DataInputStream;
import java.io.IOException;
import java.util.function.IntPredicate;
public interface IFSRecords {
void writeAttributesToRecord(int id, int parentId, @NotNull FileAttributes attributes, @NotNull String name);
void connect(PagedFileStorage.StorageLockContext lockContext, PersistentStringEnumerator names, FileNameCache fileNameCache);
void force();
boolean isDirty();
long getTimestamp();
void handleError(@NotNull Throwable e) throws RuntimeException, Error;
void handleError(int fileId, @NotNull Throwable e) throws RuntimeException, Error;
long getCreationTimestamp();
// todo: Address / capacity store in records table, size store with payload
int createChildRecord(int parentId);
void deleteRecordRecursively(int id);
@NotNull
int[] listRoots();
int findRootRecord(@NotNull String rootUrl);
void deleteRootRecord(int id);
@NotNull
int[] list(int id);
@NotNull
NameId[] listAll(int parentId);
boolean wereChildrenAccessed(int id);
void updateList(int id, @NotNull int[] childIds);
int getLocalModCount();
int getModCount();
// returns id, parent(id), parent(parent(id)), ... (already cached id or rootId)
@NotNull
TIntArrayList getParents(int id, @NotNull IntPredicate cached);
void setParent(int id, int parentId);
int getNameId(int id);
int getNameId(String name);
String getName(int id);
@NotNull
CharSequence getNameSequence(int id);
void setName(int id, @NotNull String name);
int getFlags(int id);
void setFlags(int id, int flags, boolean markAsChange);
long getLength(int id);
void setLength(int id, long len);
long getTimestamp(int id);
void setTimestamp(int id, long value);
int getModCount(int id);
@Nullable
DataInputStream readContent(int fileId);
@Nullable
DataInputStream readContentById(int contentId);
@Nullable
DataInputStream readAttribute(int fileId, FileAttribute att);
int acquireFileContent(int fileId);
void releaseContent(int contentId);
int getContentId(int fileId);
@NotNull
DataOutputStream writeContent(int fileId, boolean fixedSize);
void writeContent(int fileId, ByteSequence bytes, boolean fixedSize);
int storeUnlinkedContent(byte[] bytes);
@NotNull
DataOutputStream writeAttribute(int fileId, @NotNull FileAttribute att);
void writeBytes(int fileId, ByteSequence bytes, boolean preferFixedSize) throws IOException;
void dispose();
void invalidateCaches();
class NameId {
@NotNull
public static final NameId[] EMPTY_ARRAY = new NameId[0];
public final int id;
public final CharSequence name;
public final int nameId;
public NameId(int id, int nameId, @NotNull CharSequence name) {
this.id = id;
this.nameId = nameId;
this.name = name;
}
public NameId withId(int id) {
return new NameId(id, nameId, name);
}
@Override
public String toString() {
return name + " (" + id + ")";
}
}
}
......@@ -72,7 +72,7 @@ public class PersistentFSImpl extends PersistentFS implements ApplicationCompone
private final AtomicBoolean myShutDown = new AtomicBoolean(false);
private volatile int myStructureModificationCount;
private FSRecords myRecords;
private IFSRecords myRecords;
private PersistentStringEnumerator myNames;
private FileNameCache myNamesCache;
......@@ -81,7 +81,7 @@ public class PersistentFSImpl extends PersistentFS implements ApplicationCompone
}
@TestOnly
public FSRecords getRecords() {
public IFSRecords getRecords() {
return myRecords;
}
......@@ -253,7 +253,7 @@ public class PersistentFSImpl extends PersistentFS implements ApplicationCompone
@Nullable
public DataInputStream readAttribute(@NotNull final VirtualFile file, @NotNull final FileAttribute att) {
System.out.println("readAttribute file = [" + file + "], att = [" + att + "]");
return myRecords.readAttributeWithLock(getFileId(file), att);
return myRecords.readAttribute(getFileId(file), att);
}
@Override
......@@ -575,7 +575,7 @@ public class PersistentFSImpl extends PersistentFS implements ApplicationCompone
return FileUtil.loadBytes(contentStream, (int)length);
}
catch (IOException e) {
myRecords.handleError(e);
myRecords.handleError(fileId, e);
return ArrayUtil.EMPTY_BYTE_ARRAY;
}
}
......@@ -829,7 +829,7 @@ public class PersistentFSImpl extends PersistentFS implements ApplicationCompone
@Override
public DataInputStream readAttributeById(int fileId, FileAttribute attr) {
return myRecords.readAttributeWithLock(fileId, attr);
return myRecords.readAttribute(fileId, attr);
}
@Override
......@@ -1016,7 +1016,8 @@ public class PersistentFSImpl extends PersistentFS implements ApplicationCompone
VirtualFileSystemEntry cached = myIdToDirCache.get(id);
if (cached != null) return cached;
TIntArrayList parents = myRecords.getParents(id, myIdToDirCache); //TODO: ids are global in myIdToDirCache
TIntArrayList parents = myRecords.getParents(id, i -> myIdToDirCache.containsKey(i));
// the last element of the parents is either a root or already cached element
int parentId = parents.get(parents.size() - 1);
VirtualFileSystemEntry result = myIdToDirCache.get(parentId);
......@@ -1145,10 +1146,10 @@ public class PersistentFSImpl extends PersistentFS implements ApplicationCompone
}
private int createAndFillRecord(@NotNull NewVirtualFileSystem delegateSystem,
@NotNull VirtualFile delegateFile,
int parentId,
@NotNull FileAttributes attributes) {
final int childId = myRecords.createRecord();
@NotNull VirtualFile delegateFile,
int parentId,
@NotNull FileAttributes attributes) {
final int childId = myRecords.createChildRecord(parentId);
writeAttributesToRecord(childId, parentId, delegateFile, delegateSystem, attributes);
return childId;
}
......
package com.intellij.openapi.vfs.newvfs.persistent;
import com.intellij.openapi.util.io.ByteSequence;
import com.intellij.openapi.util.io.FileAttributes;
import com.intellij.openapi.vfs.newvfs.FileAttribute;
import com.intellij.openapi.vfs.newvfs.impl.FileNameCache;
import com.intellij.util.Function;
import com.intellij.util.containers.ContainerUtil;
import com.intellij.util.io.DataOutputStream;
import com.intellij.util.io.PagedFileStorage;
import com.intellij.util.io.PersistentStringEnumerator;
import gnu.trove.TIntArrayList;
import gnu.trove.TIntObjectHashMap;
import org.jetbrains.annotations.NotNull;
import org.jetbrains.annotations.Nullable;
import java.io.DataInputStream;
import java.io.IOException;
import java.util.Collection;
import java.util.function.IntFunction;
import java.util.function.IntPredicate;
public class ShardingFSRecords implements IFSRecords {
private final IntFunction<IFSRecords> myFactory;
private PagedFileStorage.StorageLockContext myContext;
private PersistentStringEnumerator myNames;
private FileNameCache myCache;
private final Object lock = new Object();
private TIntObjectHashMap<IFSRecords> myShards = new TIntObjectHashMap<>();
public ShardingFSRecords(IntFunction<IFSRecords> shardFactory) {
myFactory = shardFactory;
}
private IFSRecords getShard(int recordId) {
int shardId = (recordId << 24) >> 24;
IFSRecords shard = myShards.get(shardId);
if (shard != null) {
return shard;
}
synchronized (lock) {
shard = myShards.get(shardId);
if (shard != null) {
return shard;
}
shard = myFactory.apply(shardId);
shard.connect(myContext, myNames, myCache);
myShards.put(shardId, shard);
return shard;
}
}
private Collection<IFSRecords> getShards() {
return ContainerUtil.map2List(myShards.getValues(), Function.ID);
}
private IFSRecords getAnyShard() {
return getShard(1);
}
@Override
public void writeAttributesToRecord(int id, int parentId, @NotNull FileAttributes attributes, @NotNull String name) {
getShard(id).writeAttributesToRecord(id, parentId, attributes, name);
}
@Override
public void connect(PagedFileStorage.StorageLockContext lockContext, PersistentStringEnumerator names, FileNameCache fileNameCache) {
myContext = lockContext;
myNames = names;
myCache = fileNameCache;
}
@Override
public void force() {
for (IFSRecords records : getShards()) {
records.force();
}
}
@Override
public boolean isDirty() {
for (IFSRecords records : getShards()) {
if (records.isDirty()) {
return true;
}
}
return false;
}
@Override
public long getTimestamp() {
return getCreationTimestamp();
}
@Override
public void handleError(@NotNull Throwable e) throws RuntimeException, Error {
}
@Override
public void handleError(int fileId, @NotNull Throwable e) throws RuntimeException, Error {
getShard(fileId).handleError(fileId, e);
}
@Override
public long getCreationTimestamp() {
// TODO: not thread safe!!!
long ts = Long.MAX_VALUE;
for (IFSRecords records : getShards()) {
ts = Math.min(records.getCreationTimestamp(), ts);
}
return ts;
}
@Override
public int createChildRecord(int parentId) {
return getShard(parentId).createChildRecord(parentId);
}
@Override
public void deleteRecordRecursively(int id) {
getShard(id).deleteRecordRecursively(id);
}
@NotNull
@Override
public int[] listRoots() {
TIntArrayList l = new TIntArrayList();
for (IFSRecords records : getShards()) {
l.add(records.listRoots());
}
return l.toNativeArray();
}
@Override
public int findRootRecord(@NotNull String rootUrl) {
for (IFSRecords records : getShards()) {
int record = records.findRootRecord(rootUrl);
if (record > 0) {
return record;
}
}
return -1;
}
@Override
public void deleteRootRecord(int id) {
getShard(id).deleteRootRecord(id);
}
@NotNull
@Override
public int[] list(int id) {
return getShard(id).list(id);
}
@NotNull
@Override
public NameId[] listAll(int parentId) {
return getShard(parentId).listAll(parentId);
}
@Override
public boolean wereChildrenAccessed(int id) {
return getShard(id).wereChildrenAccessed(id);
}
@Override
public void updateList(int id, @NotNull int[] childIds) {
getShard(id).updateList(id, childIds);
}
@Override
public int getLocalModCount() {
int res = 0;
for (IFSRecords records : getShards()) {
res += records.getLocalModCount();
}
return res;
}
@Override
public int getModCount() {
int res = 0;
for (IFSRecords records : getShards()) {
res += records.getModCount();
}
return res;
}
@NotNull
@Override
public TIntArrayList getParents(int id, @NotNull IntPredicate cached) {
return getShard(id).getParents(id, cached);
}
@Override
public void setParent(int id, int parentId) {
getShard(id).setParent(id, parentId);
}
@Override
public int getNameId(int id) {
return getShard(id).getNameId(id);
}
@Override
public int getNameId(String name) {
try {
return myNames.enumerate(name);
}
catch (Throwable e) {
handleError(e);
}
return -1;
}
@Override
public String getName(int id) {
return getShard(id).getName(id);
}
@NotNull
@Override
public CharSequence getNameSequence(int id) {
return getShard(id).getNameSequence(id);
}
@Override
public void setName(int id, @NotNull String name) {
getShard(id).setName(id, name);
}
@Override
public int getFlags(int id) {
return getShard(id).getFlags(id);
}
@Override
public void setFlags(int id, int flags, boolean markAsChange) {
getShard(id).setFlags(id, flags, markAsChange);
}
@Override
public long getLength(int id) {
return getShard(id).getLength(id);
}
@Override
public void setLength(int id, long len) {
getShard(id).setLength(id, len);
}
@Override
public long getTimestamp(int id) {
return getShard(id).getTimestamp(id);
}
@Override
public void setTimestamp(int id, long value) {
getShard(id).setTimestamp(id, value);
}
@Override
public int getModCount(int id) {
return getShard(id).getModCount(id);
}
@Nullable
@Override
public DataInputStream readContent(int fileId) {
return getShard(fileId).readContent(fileId);
}
@Nullable
@Override
public DataInputStream readContentById(int contentId) {
return getShard(contentId).readContentById(contentId);
}
@Nullable
@Override
public DataInputStream readAttribute(int fileId, FileAttribute att) {
return getShard(fileId).readAttribute(fileId, att);
}
@Override
public int acquireFileContent(int fileId) {
return getShard(fileId).acquireFileContent(fileId);
}
@Override
public void releaseContent(int contentId) {
getShard(contentId).releaseContent(contentId);
}
@Override
public int getContentId(int fileId) {
return getShard(fileId).getContentId(fileId);
}
@NotNull
@Override
public DataOutputStream writeContent(int fileId, boolean readOnly) {
return getShard(fileId).writeContent(fileId, readOnly);
}
@Override
public void writeContent(int fileId, ByteSequence bytes, boolean readOnly) {
getShard(fileId).writeContent(fileId, bytes, readOnly);
}
@Override
public int storeUnlinkedContent(byte[] bytes) {
return getAnyShard().storeUnlinkedContent(bytes);
}
@NotNull
@Override
public DataOutputStream writeAttribute(int fileId, @NotNull FileAttribute att) {
return getShard(fileId).writeAttribute(fileId, att);
}
@Override
public void writeBytes(int fileId, ByteSequence bytes, boolean preferFixedSize) throws IOException {
getShard(fileId).writeBytes(fileId, bytes, preferFixedSize);
}
@Override
public void dispose() {
for (IFSRecords records : getShards()) {
records.dispose();
}
}
@Override
public void invalidateCaches() {
for (IFSRecords records : getShards()) {
records.invalidateCaches();
}
}
}
\ No newline at end of file
......@@ -26,7 +26,7 @@ import com.intellij.openapi.vfs.ex.temp.TempFileSystem;
import com.intellij.openapi.vfs.newvfs.FileAttribute;
import com.intellij.openapi.vfs.newvfs.ManagingFS;
import com.intellij.openapi.vfs.newvfs.NewVirtualFile;
import com.intellij.openapi.vfs.newvfs.persistent.FSRecords;
import com.intellij.openapi.vfs.newvfs.persistent.IFSRecords;
import com.intellij.openapi.vfs.newvfs.persistent.PersistentFS;
import com.intellij.openapi.vfs.newvfs.persistent.PersistentFSImpl;
import com.intellij.testFramework.LoggedErrorProcessor;
......@@ -45,7 +45,7 @@ import java.util.jar.JarFile;
public class PersistentFsTest extends PlatformTestCase {
private PersistentFS myFs;
private LocalFileSystem myLocalFs;
private FSRecords myRecords;
private IFSRecords myRecords;
@Override
public void setUp() throws Exception {
......
......@@ -18,7 +18,6 @@ package com.intellij.util.indexing.impl;
import com.intellij.util.io.DataExternalizer;
import com.intellij.util.io.KeyDescriptor;
import com.intellij.util.io.PersistentHashMap;
import com.intellij.util.io.PersistentMap;
import org.jetbrains.annotations.NotNull;
import java.io.*;
......
Supports Markdown
0% or .
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment