Skip to content

Commit

Permalink
#237 Modernize codebase towards modern Java (#240)
Browse files Browse the repository at this point in the history
- applies several migration aids to modernize existing code towards Java 11
- fixes C-style array declarations
- declares several fields as `final` where applicable
- collapses / merges identical catch blocks
- reduces use of deprecated API (mostly Hibernate)
- reduces use of generic classes without type parameters
- simplifies unnecessary String conversions
- removes redundant initializers
- removes unnecessary modifiers of methods declared in interfaces
- removes unnecessary semicolons
- removes unnecessary toString() invocations
  • Loading branch information
mawiesne authored Oct 26, 2023
1 parent c648b2b commit b52e4a9
Show file tree
Hide file tree
Showing 246 changed files with 2,079 additions and 2,671 deletions.
Original file line number Diff line number Diff line change
Expand Up @@ -30,8 +30,6 @@
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;

import static org.junit.Assert.fail;

public class PerformanceIT implements WikiConstants {

private static final Logger logger = LoggerFactory.getLogger(MethodHandles.lookup().lookupClass());
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -37,15 +37,15 @@ class PerformanceTest {

private static final Logger logger = LoggerFactory.getLogger(MethodHandles.lookup().lookupClass());

private Wikipedia wiki;
private final Wikipedia wiki;

private final Set<Integer> pageIDs;
private List<List<Integer>> randomIdList;
private List<List<String>> randomTitleList;

// determines how many sample pageCycles are run for averaging results
private int maxiCycles;
private int pageCycles;
private final int maxiCycles;
private final int pageCycles;

PerformanceTest(Wikipedia pWiki, int maxiCycles, int pageCycles) throws WikiApiException {
this.wiki = pWiki;
Expand Down Expand Up @@ -112,8 +112,8 @@ void loadPagesAndAccessFieldsTest(String mode) throws WikiApiException {
for (int i=0; i<pageCycles; i++) {

Set<Integer> page = GraphUtilities.getRandomPageSubset(pageIDs, 1);
Iterator it = page.iterator();
int pageID = (Integer) it.next();
Iterator<Integer> it = page.iterator();
int pageID = it.next();
long id = wiki.__getPageHibernateId(pageID);

double startTime = System.currentTimeMillis();
Expand Down Expand Up @@ -172,7 +172,7 @@ private void loadPageAndAccessFields_extern(long id) throws WikiApiException {
* certain number of pages in order to gain efficiency.
* We get the same number of pages from a Wikipedia using
* different buffer sizes and return the performance.
*
* <p>
* For an unbuffered iterator set bufferSize to 1.
*/
void loadPageAndIterate(int numberOfPages, int bufferSize, Wikipedia wiki) {
Expand Down
44 changes: 23 additions & 21 deletions dkpro-jwpl-api/src/main/java/org/dkpro/jwpl/api/Category.java
Original file line number Diff line number Diff line change
Expand Up @@ -31,9 +31,9 @@

public class Category implements WikiConstants {

private CategoryDAO catDAO;
private final CategoryDAO catDAO;
private org.dkpro.jwpl.api.hibernate.Category hibernateCategory;
private Wikipedia wiki;
private final Wikipedia wiki;


/**
Expand Down Expand Up @@ -105,13 +105,13 @@ private void createCategory(Title title) throws WikiPageNotFoundException {
Session session = this.wiki.__getHibernateSession();
session.beginTransaction();

Object returnValue;
Integer returnValue;

String query = "select cat.pageId from Category as cat where cat.name = :name";
if(wiki.getDatabaseConfiguration().supportsCollation()) {
query += Wikipedia.SQL_COLLATION;
}
returnValue = session.createNativeQuery(query)
returnValue = session.createNativeQuery(query, Integer.class)
.setParameter("name", name, StandardBasicTypes.STRING)
.uniqueResult();
session.getTransaction().commit();
Expand All @@ -122,8 +122,7 @@ private void createCategory(Title title) throws WikiPageNotFoundException {
throw new WikiPageNotFoundException("No category with name " + name + " was found.");
}
else {
// now cast it into an integer
int pageID = (Integer) returnValue;
int pageID = returnValue;
createCategory( pageID);
}
}
Expand Down Expand Up @@ -164,10 +163,10 @@ public Set<Category> getParents() {
Session session = this.wiki.__getHibernateSession();
session.beginTransaction();
session.lock(hibernateCategory, LockMode.NONE);
Set<Integer> tmpSet = new HashSet<Integer>(hibernateCategory.getInLinks());
Set<Integer> tmpSet = new HashSet<>(hibernateCategory.getInLinks());
session.getTransaction().commit();

Set<Category> categories = new HashSet<Category>();
Set<Category> categories = new HashSet<>();
for (int pageID : tmpSet) {
categories.add(this.wiki.getCategory(pageID));
}
Expand All @@ -184,13 +183,14 @@ public int getNumberOfParents() {
long id = this.__getId();
Session session = this.wiki.__getHibernateSession();
session.beginTransaction();
Object returnValue = session.createNativeQuery("select count(inLinks) from category_inlinks where id = :id")
String sql = "select count(inLinks) from category_inlinks where id = :id";
Long returnValue = session.createNativeQuery(sql, Long.class)
.setParameter("id", id, StandardBasicTypes.LONG)
.uniqueResult();
session.getTransaction().commit();

if (returnValue != null) {
nrOfInlinks = ((Long) returnValue).intValue();
nrOfInlinks = returnValue.intValue();
}
return nrOfInlinks;
}
Expand All @@ -202,7 +202,7 @@ public Set<Integer> getParentIDs() {
Session session = this.wiki.__getHibernateSession();
session.beginTransaction();
session.lock(hibernateCategory, LockMode.NONE);
Set<Integer> tmpSet = new HashSet<Integer>(hibernateCategory.getInLinks());
Set<Integer> tmpSet = new HashSet<>(hibernateCategory.getInLinks());
session.getTransaction().commit();
return tmpSet;
}
Expand All @@ -214,10 +214,10 @@ public Set<Category> getChildren() {
Session session = this.wiki.__getHibernateSession();
session.beginTransaction();
session.lock(hibernateCategory, LockMode.NONE);
Set<Integer> tmpSet = new HashSet<Integer>(hibernateCategory.getOutLinks());
Set<Integer> tmpSet = new HashSet<>(hibernateCategory.getOutLinks());
session.getTransaction().commit();

Set<Category> categories = new HashSet<Category>();
Set<Category> categories = new HashSet<>();
for (int pageID : tmpSet) {
categories.add(this.wiki.getCategory(pageID));
}
Expand All @@ -234,13 +234,14 @@ public int getNumberOfChildren() {
long id = this.__getId();
Session session = this.wiki.__getHibernateSession();
session.beginTransaction();
Object returnValue = session.createNativeQuery("select count(outLinks) from category_outlinks where id = :id")
String sql = "select count(outLinks) from category_outlinks where id = :id";
Long returnValue = session.createNativeQuery(sql, Long.class)
.setParameter("id", id, StandardBasicTypes.LONG)
.uniqueResult();
session.getTransaction().commit();

if (returnValue != null) {
nrOfOutlinks = ((Long) returnValue).intValue();
nrOfOutlinks = returnValue.intValue();
}
return nrOfOutlinks;
}
Expand All @@ -252,7 +253,7 @@ public Set<Integer> getChildrenIDs() {
Session session = this.wiki.__getHibernateSession();
session.beginTransaction();
session.lock(hibernateCategory, LockMode.NONE);
Set<Integer> tmpSet = new HashSet<Integer>(hibernateCategory.getOutLinks());
Set<Integer> tmpSet = new HashSet<>(hibernateCategory.getOutLinks());
session.getTransaction().commit();
return tmpSet;
}
Expand All @@ -277,7 +278,7 @@ public Title getTitle() throws WikiTitleParsingException {
*/
public Set<Page> getArticles() throws WikiApiException {
Set<Integer> tmpSet = getArticleIds();
Set<Page> pages = new HashSet<Page>();
Set<Page> pages = new HashSet<>();
for (int pageID : tmpSet) {
pages.add(this.wiki.getPage(pageID));
}
Expand All @@ -291,7 +292,7 @@ public Set<Integer> getArticleIds() {
Session session = this.wiki.__getHibernateSession();
session.beginTransaction();
session.lock(hibernateCategory, LockMode.NONE);
Set<Integer> tmpSet = new HashSet<Integer>(hibernateCategory.getPages());
Set<Integer> tmpSet = new HashSet<>(hibernateCategory.getPages());
session.getTransaction().commit();

return tmpSet;
Expand All @@ -307,13 +308,14 @@ public int getNumberOfPages() {
long id = this.__getId();
Session session = this.wiki.__getHibernateSession();
session.beginTransaction();
Object returnValue = session.createNativeQuery("select count(pages) from category_pages where id = :id")
String sql = "select count(pages) from category_pages where id = :id";
Long returnValue = session.createNativeQuery(sql, Long.class)
.setParameter("id", id, StandardBasicTypes.LONG)
.uniqueResult();
session.getTransaction().commit();

if (returnValue != null) {
nrOfPages = ((Long) returnValue).intValue();
nrOfPages = returnValue.intValue();
}
return nrOfPages;
}
Expand Down Expand Up @@ -352,7 +354,7 @@ protected Iterable <Category> getDescendants(int bufferSize) {
* @return Returns the siblings of this category or null, if there are none.
*/
public Set<Category> getSiblings() {
Set<Category> siblings = new HashSet<Category>();
Set<Category> siblings = new HashSet<>();

// add siblings
for (Category parent : this.getParents()) {
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -26,8 +26,8 @@
*/
public class CategoryDescendantsIterable implements Iterable<Category> {

private Wikipedia wiki;
private Category startCategory;
private final Wikipedia wiki;
private final Category startCategory;

/**
* The size of the page buffer.
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -37,26 +37,26 @@ public class CategoryDescendantsIterator implements Iterator<Category> {

private final Logger logger = LoggerFactory.getLogger(MethodHandles.lookup().lookupClass());

private Wikipedia wiki;
private final Wikipedia wiki;

private CategoryBuffer buffer;
private final CategoryBuffer buffer;

/** Contains all category ids that have not been expanded, yet. */
private Set<Integer> notExpandedCategories;
private final Set<Integer> notExpandedCategories;

/** As we do not inspect the whole graph at once now, we need a way to check whether a node was already expanded, to avoid infinite loops. */
private Set<Integer> expandedCategoryIds;
private final Set<Integer> expandedCategoryIds;

public CategoryDescendantsIterator(Wikipedia wiki, int bufferSize, Category startCategory) {
this.wiki = wiki;
buffer = new CategoryBuffer(bufferSize);
notExpandedCategories = new HashSet<Integer>();
notExpandedCategories = new HashSet<>();
// initialize with children of start category
for (Category catItem : startCategory.getChildren()) {
notExpandedCategories.add(catItem.getPageId());
}

expandedCategoryIds = new HashSet<Integer>();
expandedCategoryIds = new HashSet<>();
}

public boolean hasNext(){
Expand All @@ -78,15 +78,15 @@ public void remove() {
*/
class CategoryBuffer{

private List<Category> buffer;
private int maxBufferSize; // the number of pages to be buffered after a query to the database.
private final List<Category> buffer;
private final int maxBufferSize; // the number of pages to be buffered after a query to the database.
private int bufferFillSize; // even a 500 slot buffer can be filled with only 5 elements
private int bufferOffset; // the offset in the buffer
private int dataOffset; // the overall offset in the data

public CategoryBuffer(int bufferSize){
this.maxBufferSize = bufferSize;
this.buffer = new ArrayList<Category>();
this.buffer = new ArrayList<>();
this.bufferFillSize = 0;
this.bufferOffset = 0;
this.dataOffset = 0;
Expand Down Expand Up @@ -142,7 +142,7 @@ private boolean fillBuffer() {
bufferOffset = 0;
bufferFillSize = 0;

List<Integer> queue = new LinkedList<Integer>();
List<Integer> queue = new LinkedList<>();

// add not expanded categories to queue
queue.addAll(notExpandedCategories);
Expand Down
Loading

0 comments on commit b52e4a9

Please sign in to comment.