package org.mercury_im.messenger.core.stores; import org.jivesoftware.smack.util.PacketParserUtils; import org.jivesoftware.smack.xml.XmlPullParser; import org.jivesoftware.smackx.caps.cache.EntityCapsPersistentCache; import org.jivesoftware.smackx.disco.packet.DiscoverInfo; import org.mercury_im.messenger.xmpp.model.EntityCapsModel; import org.mercury_im.messenger.xmpp.repository.EntityCapsRepository; import java.io.StringReader; import java.util.HashMap; import java.util.Map; import java.util.Set; import java.util.logging.Level; import java.util.logging.Logger; import javax.inject.Inject; import io.reactivex.disposables.CompositeDisposable; public class EntityCapsStore implements EntityCapsPersistentCache { private static final Logger LOGGER = Logger.getLogger(EntityCapsStore.class.getName()); private final EntityCapsRepository entityCapsRepository; private final Map discoverInfoMap = new HashMap<>(); private final CompositeDisposable disposable = new CompositeDisposable(); @Inject public EntityCapsStore(EntityCapsRepository entityCapsRepository) { this.entityCapsRepository = entityCapsRepository; populateFromDatabase(); } /* * Since nodeVers are - if ever - only deleted all at once but added one by one and never * modified, we can simply determine the set of newly added nodeVers, process those and add * them to the database. */ private void populateFromDatabase() { disposable.add(entityCapsRepository.getAll() .subscribe( entityCapsModels -> { Map nextEntityCaps = entityCapsModels.toMap(EntityCapsModel.NODE_VER); // New set of nodeVers Set nextKeys = nextEntityCaps.keySet(); // Old set of nodeVers Set previousKeys = discoverInfoMap.keySet(); // Added nodeVers nextKeys.removeAll(previousKeys); for (String key : nextKeys) { // Only add new items. Items itself cannot change, so we don't have to deal // with changed items. EntityCapsModel addedModel = nextEntityCaps.get(key); DiscoverInfo info; try { XmlPullParser parser = PacketParserUtils.getParserFor(new StringReader(addedModel.getXml())); info = (DiscoverInfo) PacketParserUtils.parseIQ(parser); discoverInfoMap.put(addedModel.getNodeVer(), info); } catch (Exception e) { LOGGER.log(Level.SEVERE, "Error parsing EntityCaps: ", e); } } }, error -> LOGGER.log(Level.WARNING, "An error occurred while updating the EntityCaps cache.", error))); } @Override public void addDiscoverInfoByNodePersistent(String nodeVer, DiscoverInfo info) { EntityCapsModel model = new EntityCapsModel(); model.setNodeVer(nodeVer); CharSequence xml = info.toXML(); String string = xml.toString(); model.setXml(string); disposable.add(entityCapsRepository.upsert(model).subscribe( success -> LOGGER.log(Level.FINE, "Upserted EntityCaps model " + success), error -> LOGGER.log(Level.WARNING, "An error occurred upserting EntityCaps model", error) )); } @Override public DiscoverInfo lookup(String nodeVer) { LOGGER.log(Level.FINE, "Looking up caps for " + nodeVer + " in cache..."); DiscoverInfo info = discoverInfoMap.get(nodeVer); LOGGER.log(Level.FINE, "Entry found: " + (info != null ? info.toXML().toString() : "null")); return info; } @Override public void emptyCache() { disposable.add(entityCapsRepository.deleteAll().subscribe( success -> LOGGER.log(Level.FINE, "EntityCaps table cleared successfully."), error -> LOGGER.log(Level.WARNING, "An error occurred while clearing EntityCaps table.", error) )); } }