Skip to content

Commit

Permalink
removed old MySQL dependencies
Browse files Browse the repository at this point in the history
  • Loading branch information
porterbot committed Mar 10, 2021
1 parent bf82ae9 commit a03dc89
Show file tree
Hide file tree
Showing 5 changed files with 2 additions and 100 deletions.
2 changes: 1 addition & 1 deletion build.gradle
Original file line number Diff line number Diff line change
Expand Up @@ -7,7 +7,7 @@ allprojects {
apply plugin: 'maven-publish'
apply plugin: 'idea'
group = 'org.janelia.jacs-model'
version = '2.41.2'
version = '2.42'
}

subprojects {
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -20,12 +20,9 @@ public interface TmNeuronMetadataDao extends DomainObjectDao<TmNeuronMetadata> {
List<TmNeuronMetadata> getTmNeuronMetadataByWorkspaceId(TmWorkspace workspace, String subjectKey, long offset, int length);
Iterable<TmNeuronMetadata> streamWorkspaceNeurons(TmWorkspace workspace, String subjectKey, long offset, int length);
List<TmNeuronMetadata> getTmNeuronMetadataByNeuronIds(TmWorkspace workspace, List<Long> neuronList);
List<Pair<TmNeuronMetadata, InputStream>> getTmNeuronsMetadataWithPointStreamsByWorkspaceId(
TmWorkspace workspace, String subjectKey, long offset, int length);
boolean removeTmNeuron(Long neuronId, boolean isLarge, TmWorkspace workspace, String subjectKey);
void updateNeuronStyles(BulkNeuronStyleUpdate bulkNeuronStyleUpdate, TmWorkspace workspace, String subjectKey);
void removeEmptyNeuronsInWorkspace(TmWorkspace workspace, String subjectKey);
void bulkMigrateNeuronsInWorkspace(TmWorkspace workspace, Collection<TmNeuronMetadata> neurons, String subjectKey);
void updateNeuronTagsForNeurons(TmWorkspace workspace, List<Long> neuronIds, List<String> tags, boolean tagState, String subjectKey);
Long getNeuronCountsForWorkspace(TmWorkspace workspace, String subjectKey);
TmNeuronMetadata saveNeuronMetadata(TmWorkspace workspace, TmNeuronMetadata neuron, String subjectKey);
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -22,12 +22,9 @@
import org.apache.commons.collections4.CollectionUtils;
import org.apache.commons.lang3.StringUtils;
import org.apache.commons.lang3.time.StopWatch;
import org.apache.commons.lang3.tuple.ImmutablePair;
import org.apache.commons.lang3.tuple.Pair;
import org.bson.conversions.Bson;
import org.janelia.model.access.domain.DomainDAO;
import org.janelia.model.access.domain.dao.*;
import org.janelia.model.domain.DomainUtils;
import org.janelia.model.domain.Reference;
import org.janelia.model.domain.tiledMicroscope.*;
import org.janelia.model.access.domain.TimebasedIdentifierGenerator;
Expand Down Expand Up @@ -357,70 +354,6 @@ public void removeEmptyNeuronsInWorkspace(TmWorkspace workspace, String subjectK
));
}

@Override
public void bulkMigrateNeuronsInWorkspace(TmWorkspace workspace, Collection<TmNeuronMetadata> neurons, String subjectKey) {
String collectionName = MongoDaoHelper.findOrCreateCappedCollection (this, mongoDatabase,
"tmNeuron", 20000000000L, TmNeuronMetadata.class);
try {
workspace.setNeuronCollection(collectionName);
workspace = domainDao.save(subjectKey, workspace);
} catch (Exception e) {
LOG.error("ERROR SAVING WORKSPACE {} - Issue with Neuron Collection Key", workspace.getId(),e);
throw new RuntimeException("ERROR SAVING WORKSPACE" + workspace.getId() + " - Issue with Neuron Collection Key");
}

MongoCollection<TmNeuronMetadata> copyLoc = getNeuronCollection(collectionName);
if (neurons==null || neurons.isEmpty())
return;
List<TmNeuronMetadata> neuronList = new ArrayList<>(neurons);
try {
int prevCount = 0;
for (int i=0; i<neuronList.size(); i=i+1000) {
if (neuronList.size()>(i+1000)) {
copyLoc.insertMany(neuronList.subList(i, i + 1000));
LOG.info("Inserted {} neurons", i+1000);
}
else {
copyLoc.insertMany(neuronList.subList(i, neuronList.size()));
LOG.info("Inserted {} neurons", neuronList.size());
}
}
LOG.info("Finished workspace {}",workspace.getId());
} catch (org.bson.BsonMaximumSizeExceededException e) {
// one or more of the documents is too big
// save them to gridfs and then try again
int count = 0;
List<Long> largeNeurons = new ArrayList<>();
List<TmNeuronMetadata> truncatedList = new ArrayList<>();
boolean hitFirstLarge = false;
for (TmNeuronMetadata neuron: neuronList) {
if (hitFirstLarge)
truncatedList.add(neuron);
// remove records that were saved before batch failed
MongoDaoHelper.deleteMatchingRecords(copyLoc,
Filters.and(MongoDaoHelper.createFilterById(neuron.getId()),
permissionsHelper.createWritePermissionFilterForSubjectKey(subjectKey)));
boolean isLarge = checkLargeNeuron(neuron);
TmNeuronData pointData = neuron.getNeuronData();
if (isLarge) {
hitFirstLarge = true;
largeNeurons.add(neuron.getId());
neuron.setNeuronData(null);
saveLargeNeuronPointData(neuron.getId(), pointData);
neuron.setLargeNeuron(isLarge);
}
}

try {
LOG.info("Large neurons in this batch are {}",largeNeurons);
if (!truncatedList.isEmpty())
copyLoc.insertMany(truncatedList);
} catch (org.bson.BsonMaximumSizeExceededException ee) {
LOG.error("ERROR PROCESSING {} - Issue with Large Neurons", workspace.getId(),e);
}
}
}

@Override
public void updateNeuronTagsForNeurons(TmWorkspace workspace, List<Long> neuronIds, List<String> tags, boolean tagState,
String subjectKey) {
Expand Down Expand Up @@ -459,21 +392,4 @@ public Long getNeuronCountsForWorkspace(TmWorkspace workspace, String subjectKey
private MongoCollection<TmNeuronMetadata> getNeuronCollection(String collectionName) {
return mongoDatabase.getCollection(collectionName, TmNeuronMetadata.class);
}

@Override
public List<Pair<TmNeuronMetadata, InputStream>> getTmNeuronsMetadataWithPointStreamsByWorkspaceId(TmWorkspace workspace,
String subjectKey,
long offset, int length) {
workspace.setNeuronCollection("tmNeuron");
List<TmNeuronMetadata> workspaceNeurons = getTmNeuronMetadataByWorkspaceId(workspace,subjectKey, offset, length);
if (workspaceNeurons.isEmpty()) {
return Collections.emptyList();
}
Map<Long, TmNeuronMetadata> indexedWorkspaceNeurons = DomainUtils.getMapById(workspaceNeurons);
Map<Long, InputStream> neuronsPointStreams = tmNeuronBufferDao.streamNeuronPointsByWorkspaceId(indexedWorkspaceNeurons.keySet(), workspace.getId());

return workspaceNeurons.stream()
.map(neuron -> ImmutablePair.of(neuron, neuronsPointStreams.get(neuron.getId())))
.collect(Collectors.toList());
}
}
Original file line number Diff line number Diff line change
Expand Up @@ -77,12 +77,6 @@ public void removeEmptyNeuronsInWorkspace(TmWorkspace workspace, String subjectK
tmNeuronMetadataDao.removeEmptyNeuronsInWorkspace(workspace, subjectKey);
}

@Override
public void bulkMigrateNeuronsInWorkspace(TmWorkspace workspace, Collection<TmNeuronMetadata> neurons,
String subjectKey) {
tmNeuronMetadataDao.bulkMigrateNeuronsInWorkspace(workspace, neurons, subjectKey);
}

@Override
public void updateNeuronTagsForNeurons(TmWorkspace workspace, List<Long> neuronIds, List<String> tags, boolean tagState,
String subjectKey) {
Expand All @@ -109,10 +103,4 @@ public List<TmOperation> getOperations(Long workspaceId, Long neuronId, Date sta
return tmNeuronMetadataDao.getOperations(workspaceId, neuronId, startDate, endDate);
}

@Override
public List<Pair<TmNeuronMetadata, InputStream>> getTmNeuronsMetadataWithPointStreamsByWorkspaceId(TmWorkspace workspace,
String subjectKey, long offset, int length) {
return tmNeuronMetadataDao.getTmNeuronsMetadataWithPointStreamsByWorkspaceId(workspace, subjectKey,offset,length);
}

}
Original file line number Diff line number Diff line change
Expand Up @@ -192,6 +192,7 @@ public void initNeuronData() {
for (Long geoId : neuronData.getGeoAnnotationMap().keySet()) {
TmGeoAnnotation geoAnnotation = neuronData.getGeoAnnotationMap().get(geoId);
geoAnnotation.setNeuronId(getId());
geoAnnotation.setModificationDate(geoAnnotation.getCreationDate());
}

}
Expand Down

0 comments on commit a03dc89

Please sign in to comment.