From da9c85a42fe6eed7191bd69c45ca9aaabbf6d702 Mon Sep 17 00:00:00 2001 From: Nathaniel Davidson Date: Tue, 10 Sep 2024 12:00:26 -0700 Subject: [PATCH] show logs --- build.gradle | 2 +- .../com/nucleodb/library/NodeFilterTest.java | 186 ++++++++++++++++++ .../library/database/lock/LockManager.java | 2 +- 3 files changed, 188 insertions(+), 2 deletions(-) create mode 100644 src/integrationTest/java/com/nucleodb/library/NodeFilterTest.java diff --git a/build.gradle b/build.gradle index 3645888..1a6c02b 100644 --- a/build.gradle +++ b/build.gradle @@ -7,7 +7,7 @@ plugins { } group = 'com.nucleodb' -version = '1.18.0' +version = '1.18.1' repositories { mavenCentral() diff --git a/src/integrationTest/java/com/nucleodb/library/NodeFilterTest.java b/src/integrationTest/java/com/nucleodb/library/NodeFilterTest.java new file mode 100644 index 0000000..0956cba --- /dev/null +++ b/src/integrationTest/java/com/nucleodb/library/NodeFilterTest.java @@ -0,0 +1,186 @@ +package com.nucleodb.library; + +import com.nucleodb.library.database.modifications.*; +import com.nucleodb.library.database.tables.connection.Connection; +import com.nucleodb.library.database.tables.connection.ConnectionHandler; +import com.nucleodb.library.database.tables.table.DataEntry; +import com.nucleodb.library.database.tables.table.DataTable; +import com.nucleodb.library.database.tables.table.NodeFilter; +import com.nucleodb.library.database.utils.InvalidConnectionException; +import com.nucleodb.library.database.utils.exceptions.IncorrectDataEntryClassException; +import com.nucleodb.library.database.utils.exceptions.IncorrectDataEntryObjectException; +import com.nucleodb.library.database.utils.exceptions.MissingDataEntryConstructorsException; +import com.nucleodb.library.database.utils.exceptions.ObjectNotSavedException; +import com.nucleodb.library.models.*; +import com.nucleodb.library.mqs.kafka.KafkaConfiguration; +import org.junit.jupiter.api.AfterEach; +import org.junit.jupiter.api.BeforeEach; +import org.junit.jupiter.api.Test; + +import java.beans.IntrospectionException; +import java.io.IOException; +import java.lang.reflect.InvocationTargetException; +import java.util.Arrays; +import java.util.List; +import java.util.stream.Collectors; + +import static org.junit.jupiter.api.Assertions.assertEquals; + +public class NodeFilterTest { + NucleoDB nucleoDB; + DataTable authorTable; + DataTable bookTable; + ConnectionHandler wroteConnections; + + @BeforeEach + public void createLocalDB() throws IncorrectDataEntryClassException, MissingDataEntryConstructorsException, IntrospectionException, InvocationTargetException, NoSuchMethodException, InstantiationException, IllegalAccessException, IncorrectDataEntryObjectException, InterruptedException, InvalidConnectionException { + nucleoDB = new NucleoDB( + NucleoDB.DBType.ALL, + c -> { + c.getConnectionConfig().setMqsConfiguration(new KafkaConfiguration()); + c.getConnectionConfig().setLoadSaved(true); + c.getConnectionConfig().setJsonExport(true); + c.getConnectionConfig().setSaveChanges(true); + c.getConnectionConfig().setConnectionFileName("./data/"+ c.getConnectionConfig().getLabel()+".dat"); + c.getConnectionConfig().setExportInterval(50); + c.getConnectionConfig().setSaveInterval(50); + c.getConnectionConfig().setNodeFilter(new com.nucleodb.library.database.tables.connection.NodeFilter(){ + List accept = Arrays.asList("test1", "test2"); + @Override + public boolean create(ConnectionCreate c) { + return accept.contains(c.getUuid()); + } + + @Override + public boolean delete(ConnectionDelete d, C existing) { + return accept.contains(d.getUuid()); + } + + @Override + public boolean update(ConnectionUpdate u, C existing) { + return accept.contains(u.getUuid()); + } + + @Override + public boolean accept(String key) { + return accept.contains(key); + } + }); + }, + c -> { + c.getDataTableConfig().setMqsConfiguration(new KafkaConfiguration()); + c.getDataTableConfig().setLoadSave(true); + c.getDataTableConfig().setSaveChanges(true); + c.getDataTableConfig().setJsonExport(true); + c.getDataTableConfig().setTableFileName("./data/"+ c.getDataTableConfig().getTable()+".dat"); + c.getDataTableConfig().setExportInterval(50); + c.getDataTableConfig().setSaveInterval(50); + c.getDataTableConfig().setNodeFilter(new NodeFilter(){ + List accept = Arrays.asList("test1", "test2"); + @Override + public boolean create(Create c) { + return accept.contains(c.getKey()); + } + + @Override + public boolean delete(Delete d, T existing) { + return accept.contains(d.getKey()); + } + + @Override + public boolean update(Update u, T existing) { + return accept.contains(u.getKey()); + } + + @Override + public boolean accept(String key) { + return accept.contains(key); + } + }); + }, + c -> { + c.setMqsConfiguration(new KafkaConfiguration()); + }, + "com.nucleodb.library.models" + ); + authorTable = nucleoDB.getTable(Author.class); + bookTable = nucleoDB.getTable(Book.class); + wroteConnections = nucleoDB.getConnectionHandler(WroteConnection.class); + } + @AfterEach + public void deleteEntries() { + try { + Thread.sleep(2000); + } catch (InterruptedException e) { + throw new RuntimeException(e); + } + authorTable + .getEntries() + .stream() + .map(author -> { + try { + return author.copy(AuthorDE.class, true); + } catch (ObjectNotSavedException e) { + throw new RuntimeException(e); + } + }) + .collect(Collectors.toSet()).forEach(author -> { + try { + authorTable.deleteSync(author); + } catch (InterruptedException e) { + throw new RuntimeException(e); + } + }); + bookTable + .getEntries() + .stream() + .map(book -> { + try { + return book.copy(BookDE.class, true); + } catch (ObjectNotSavedException e) { + throw new RuntimeException(e); + } + }) + .collect(Collectors.toSet()).forEach(book -> { + try { + bookTable.deleteSync(book); + } catch (InterruptedException e) { + throw new RuntimeException(e); + } + }); + wroteConnections + .getAllConnections() + .stream() + .map(c->c.copy(WroteConnection.class,true)) + .collect(Collectors.toSet()).forEach(c -> { + try { + wroteConnections.deleteSync(c); + } catch (InterruptedException e) { + throw new RuntimeException(e); + } catch (IOException e) { + throw new RuntimeException(e); + } + }); + try { + Thread.sleep(2000); + } catch (InterruptedException e) { + throw new RuntimeException(e); + } + } + @Test + public void checkSaving() throws IncorrectDataEntryObjectException, InterruptedException { + AuthorDE edgarAllenPoe = new AuthorDE(new Author("Edgar Allen Poe", "fiction")); + edgarAllenPoe.setKey("test1"); + authorTable.saveSync(edgarAllenPoe); + assertEquals( + 1, + authorTable.get( + "id", + edgarAllenPoe.getKey(), + null + ).size() + ); + assertEquals(2, authorTable.getEntries().size()); + } + +} diff --git a/src/main/java/com/nucleodb/library/database/lock/LockManager.java b/src/main/java/com/nucleodb/library/database/lock/LockManager.java index 11e7f28..d65396e 100644 --- a/src/main/java/com/nucleodb/library/database/lock/LockManager.java +++ b/src/main/java/com/nucleodb/library/database/lock/LockManager.java @@ -236,7 +236,7 @@ public void lockAction(LockReference lockReference) { } void log(String key, String msg){ - logger.log(Level.FINE,String.format("%s: %s", key, msg)); + logger.log(Level.ALL, String.format("%s: %s", key, msg)); } public ConcurrentMap getActiveLocks() {