Mercury-IM/core-old/src/main/java/org/mercury_im/messenger/core/stores/EntityCapsStore.java

102 lines
4.4 KiB
Java

package org.mercury_im.messenger.core.stores;
import org.jivesoftware.smack.util.PacketParserUtils;
import org.jivesoftware.smack.xml.XmlPullParser;
import org.jivesoftware.smackx.caps.cache.EntityCapsPersistentCache;
import org.jivesoftware.smackx.disco.packet.DiscoverInfo;
import org.mercury_im.messenger.data.model.EntityCapsModel;
import org.mercury_im.messenger.data.repository.XmppEntityCapsRepository;
import java.io.StringReader;
import java.util.HashMap;
import java.util.Map;
import java.util.Set;
import java.util.logging.Level;
import java.util.logging.Logger;
import javax.inject.Inject;
import io.reactivex.disposables.CompositeDisposable;
public class EntityCapsStore implements EntityCapsPersistentCache {
private static final Logger LOGGER = Logger.getLogger(EntityCapsStore.class.getName());
private final XmppEntityCapsRepository entityCapsRepository;
private final Map<String, DiscoverInfo> discoverInfoMap = new HashMap<>();
private final CompositeDisposable disposable = new CompositeDisposable();
@Inject
public EntityCapsStore(XmppEntityCapsRepository entityCapsRepository) {
this.entityCapsRepository = entityCapsRepository;
populateFromDatabase();
}
/*
* Since nodeVers are - if ever - only deleted all at once but added one by one and never
* modified, we can simply determine the set of newly added nodeVers, process those and add
* them to the database.
*/
private void populateFromDatabase() {
disposable.add(entityCapsRepository.getAll()
.subscribe(
entityCapsModels -> {
Map<String, EntityCapsModel> nextEntityCaps = entityCapsModels.toMap(EntityCapsModel.NODE_VER);
// New set of nodeVers
Set<String> nextKeys = nextEntityCaps.keySet();
// Old set of nodeVers
Set<String> previousKeys = discoverInfoMap.keySet();
// Added nodeVers
nextKeys.removeAll(previousKeys);
for (String key : nextKeys) {
// Only add new items. Items itself cannot change, so we don't have to deal
// with changed items.
EntityCapsModel addedModel = nextEntityCaps.get(key);
DiscoverInfo info;
try {
XmlPullParser parser = PacketParserUtils.getParserFor(new StringReader(addedModel.getXml()));
info = (DiscoverInfo) PacketParserUtils.parseIQ(parser);
discoverInfoMap.put(addedModel.getNodeVer(), info);
} catch (Exception e) {
LOGGER.log(Level.SEVERE, "Error parsing EntityCaps: ", e);
}
}
},
error -> LOGGER.log(Level.WARNING, "An error occurred while updating the EntityCaps cache.", error)));
}
@Override
public void addDiscoverInfoByNodePersistent(String nodeVer, DiscoverInfo info) {
EntityCapsModel model = new EntityCapsModel();
model.setNodeVer(nodeVer);
CharSequence xml = info.toXML();
String string = xml.toString();
model.setXml(string);
disposable.add(entityCapsRepository.upsert(model).subscribe(
success -> LOGGER.log(Level.FINE, "Upserted EntityCaps model " + success),
error -> LOGGER.log(Level.WARNING, "An error occurred upserting EntityCaps model", error)
));
}
@Override
public DiscoverInfo lookup(String nodeVer) {
LOGGER.log(Level.FINE, "Looking up caps for " + nodeVer + " in cache...");
DiscoverInfo info = discoverInfoMap.get(nodeVer);
LOGGER.log(Level.FINE, "Entry found: " + (info != null ? info.toXML().toString() : "null"));
return info;
}
@Override
public void emptyCache() {
disposable.add(entityCapsRepository.deleteAll().subscribe(
success -> LOGGER.log(Level.FINE, "EntityCaps table cleared successfully."),
error -> LOGGER.log(Level.WARNING, "An error occurred while clearing EntityCaps table.", error)
));
}
}