6
\$\begingroup\$

I'm writing a small program that generates a file containing an adjancency matrix, it then reads from that file, constructs a graph and does something like a parallel depth-first search (dfs) on it.

How can I make the dfs part faster, because now it seems to be doing a lot of nothing (each thread just sets the nodes parent and marks it as visited)?

Is this implementation even ok?

I'm open to any implementation changes and ideas.

import java.io.BufferedOutputStream;
import java.io.BufferedReader;
import java.io.BufferedWriter;
import java.io.File;
import java.io.FileNotFoundException;
import java.io.FileOutputStream;
import java.io.FileReader;
import java.io.FileWriter;
import java.io.IOException;
import java.io.PrintStream;
import java.util.LinkedList;
import java.util.List;
import java.util.Objects;
import java.util.Set;
import java.util.concurrent.ConcurrentHashMap;
import java.util.concurrent.ConcurrentSkipListSet;
import java.util.concurrent.ForkJoinPool;
import java.util.concurrent.RecursiveAction;
import java.util.concurrent.ThreadLocalRandom;
import java.util.concurrent.atomic.AtomicBoolean;
import java.util.logging.Level;
import java.util.logging.Logger;
import org.apache.commons.cli.CommandLine;
import org.apache.commons.cli.CommandLineParser;
import org.apache.commons.cli.DefaultParser;
import org.apache.commons.cli.Options;
import org.apache.commons.cli.ParseException;
public class Graph {
/**
 * A map containing the node number and a boolean to decide if it is visited.
 */
private ConcurrentHashMap<Integer, AtomicBoolean> visited = new ConcurrentHashMap<>();
/**
 * A map containing a list of neighbors for each node. 
 */
private ConcurrentHashMap<Integer, List<Integer>> adjacencyList = new ConcurrentHashMap<>();
/**
 * A map containing a node number and a node object.
 */
private ConcurrentHashMap<Integer,Node> treeNodes = new ConcurrentHashMap<>();
public static void main(String[] args) {
 Options options = new Options();
 setOptions(options); 
 CommandLineParser parser = new DefaultParser();
 CommandLine cmd = null;
 try {
 cmd = parser.parse( options, args);
 } catch (ParseException e) {
 System.exit(-1);
 }
 determinePrintLocation(cmd);
 PrintStream originalStream = System.out;
 Graph graph = new Graph();
 determineGraphCreation(cmd, graph);
 doExecution(cmd, graph);
 shouldWriteTreeToFile(cmd, graph);
}
/**
 * Determines if the tree information from the program execution should be written to a file.
 * @param cmd
 * @param graph
 */
protected static void shouldWriteTreeToFile(CommandLine cmd, Graph graph) {
 if(cmd.hasOption("gf")) {
 String outputFileName = cmd.getOptionValue("gf");
 graph.writeToFile(outputFileName);
 }
}
/**
 * Determines with how many threads should the algorithm be executed.
 * @param cmd
 * @param graph
 */
protected static void doExecution(CommandLine cmd, Graph graph) {
 if(cmd.hasOption("t")) {
 String threadCountString = cmd.getOptionValue("t");
 Integer threadCount = Integer.valueOf(threadCountString);
 graph.execute(threadCount);
 } else {
 graph.execute(1);
 }
}
/**
 * Determines if the graph should be created from a file or on the go.
 * @param cmd
 * @param graph
 */
protected static void determineGraphCreation(CommandLine cmd, Graph graph) {
 if(cmd.hasOption("n")) {
 String nodeCountString = cmd.getOptionValue("n", "1");
 Integer nodeCount = Integer.valueOf(nodeCountString);
 graph.generateGraph(nodeCount);
 } else if (cmd.hasOption("i")) {
 String fileName = cmd.getOptionValue("i");
 graph.constructGraphFromFile(fileName);
 }
}
/**
 * Determines the location of the print file in which the 
 * thread execution time and data should be written.
 * @param cmd
 */
protected static void determinePrintLocation(CommandLine cmd) {
 if(cmd.hasOption("q")) {
 System.setOut(new NullPrintStream());
 } else if(cmd.hasOption("o")) {
 String fileName = cmd.getOptionValue("o", "output_file.txt");
 try {
 System.setOut(new PrintStream(new BufferedOutputStream(new FileOutputStream(fileName)), true));
 } catch (FileNotFoundException e) {
 Logger.getLogger(Graph.class.getName()).log(Level.SEVERE, "Failed to create output stream.", e);
 }
 }
}
/**
 * Sets the command line options.
 * @param options
 */
protected static void setOptions(Options options) {
 options.addOption("n", true, "The number of nodes of the graph.");
 options.addOption("i", true, "The input file for the graph.");
 options.addOption("o", true, "The output file for the application");
 options.addOption("t", true, "The amount of threads to be used");
 options.addOption("q", false, "Determine if there should be output");
 options.addOption("gf",true, "File to which to write the graph information");
}
/**
 * Executes the parallel DFS.
 */
private void execute(int threadCount) { 
 ForkJoinPool pool = new ForkJoinPool(threadCount);
 // check each node, because the graph may not be connected
 System.out.println("Algorithm execution started with " + threadCount + " threads");
 long startTime = System.nanoTime();
 for(Integer node : visited.keySet()) {
 if(visited.get(node).get()) {
 continue;
 }
 DFS dfs = new DFS(node);
 pool.invoke(dfs);
 }
 long endTime = System.nanoTime();
 long elapsedTime = endTime - startTime;
 System.out.println("Algorithm execution took " + elapsedTime + " nanoseconds");
}
/**
 * Writes the graph data to a file.
 * @param fileName
 */
public void writeToFile(String fileName) {
 File file = new File(fileName);
 try(BufferedWriter bwr = new BufferedWriter(new FileWriter(file))) {
 for(Node node : treeNodes.values()) {
 bwr.write("Node number: " + node.getNode());
 bwr.newLine();
 if(node.getParent() != null){
 bwr.write("Node parent: " + node.getParent().getNode());
 bwr.newLine();
 } 
 }
 } catch (IOException ex) {
 Logger.getLogger(Graph.class.getName()).log(Level.SEVERE, "Failed to create output stream for the graph data.", ex);
 }
}
/**
 * Generates a graph in memory.
 * @param nodeCount
 */
private void generateGraph(int nodeCount) {
 ThreadLocalRandom random = ThreadLocalRandom.current();
 visited = new ConcurrentHashMap<>(nodeCount);
 System.out.println("Started graph creation.");
 for(int i=0 ; i < nodeCount ; i++) {
 List<Integer> neighbours = new LinkedList<>();
 for(int j=0; j < nodeCount ; j++) {
 int choice = random.nextInt(2);
 if(choice == 1) {
 neighbours.add(j);
 } 
 }
 adjacencyList.put(i, neighbours);
 visited.put(i, new AtomicBoolean(false));
 treeNodes.put(i, new Node(i));
 }
 System.out.println("Graph created.");
}
/**
 * Construct a graph from an adjacency matrix in a file.
 * @param fileName
 */
public void constructGraphFromFile(String fileName) {
 System.out.println("Construction of graph from file started");
 File file = new File(fileName);
 try(BufferedReader br = new BufferedReader(new FileReader(file))) {
 String numberLine = br.readLine();
 int nodeCount = Integer.valueOf(numberLine);
 visited = new ConcurrentHashMap<>(nodeCount);
 int count = 0;
 for(String line; (line = br.readLine()) != null; ) {
 String[] data = line.split("\\s+");
 List<Integer> neighbours = new LinkedList<>();
 for(int i = 0; i < data.length ; i++) {
 if(data[i].equals("1")) {
 neighbours.add(i);
 }
 }
 adjacencyList.put(count, neighbours);
 treeNodes.put(count, new Node(count));
 visited.put(count, new AtomicBoolean(false));
 count++;
 }
 } catch (IOException ex) {
 Logger.getLogger(Graph.class.getName()).log(Level.SEVERE, "Failed to construct graph from file.", ex);
 return;
 }
 System.out.println("Construction of graph from file finished");
}
private void constructGraphFile(String fileName, int size) {
 System.out.println("Construction of graph file started.");
 File file = new File(fileName);
 try {
 file.createNewFile();
 } catch (IOException ex) {
 Logger.getLogger(Graph.class.getName()).log(Level.SEVERE, "Failed to create file with name: " + fileName, ex);
 }
 try(BufferedWriter bwr = new BufferedWriter(new FileWriter(file))){
 bwr.write(String.valueOf(size));
 bwr.newLine();
 ThreadLocalRandom random = ThreadLocalRandom.current();
 for(int i=0; i < size; i++) {
 StringBuilder builder = new StringBuilder();
 for(int j=0; j < size; j++) {
 builder.append(random.nextInt(2));
 builder.append(" ");
 }
 String trimed = builder.toString().trim();
 bwr.write(trimed);
 bwr.newLine(); 
 } 
 } catch (IOException ex) {
 Logger.getLogger(Graph.class.getName()).log(Level.SEVERE, "Failed to construct adjecancy matrix file.", ex);
 return;
 }
 System.out.println("Construction of graph file finished.");
}
/**
 * Class implementing {@link RecursiveAction} which does the actual computation.
 * @author aleksandar
 *
 */
private class DFS extends RecursiveAction{
 int node;
 public DFS(int node) {
 this.node = node;
 }
 @Override
 protected void compute() {
 String threadName = Thread.currentThread().getName(); 
 AtomicBoolean isVisited = visited.get(node); 
 if(isVisited.getAndSet(true)) {
 return;
 }
 //System.out.println("Executing thread " + threadName + ". For node " + node);
 List<Integer> adjList = adjacencyList.get(node);
 Node parent = treeNodes.get(node);
 for(Integer neighbour : adjList) {
 Node child = treeNodes.get(neighbour);
 Integer childNode = child.getNode();
 if(visited.get(childNode).get()) {
 continue;
 }
 child.setParent(parent);
 DFS dfs = new DFS(neighbour);
 dfs.fork();
 }
 // System.out.println("Thread " + threadName + ". Finished work on node " + node + " and returned to thread pool"); 
 }
}
/**
 * Class representing a node in the graph.
 * @author aleksandar
 *
 */
private class Node {
 private Integer node;
 private Node parent;
 public Node(int node) {
 this.node = node;
 } 
 @Override
 public int hashCode() {
 int hash = 5;
 hash = 79 * hash + Objects.hashCode(this.node);
 return hash;
 }
 @Override
 public boolean equals(Object obj) {
 if (this == obj) {
 return true;
 }
 if (obj == null) {
 return false;
 }
 if (getClass() != obj.getClass()) {
 return false;
 }
 final Node other = (Node) obj;
 if (!Objects.equals(this.node, other.node)) {
 return false;
 }
 return true;
 } 
 public Integer getNode() {
 return node;
 }
 public Node getParent() {
 return parent;
 }
 public void setParent(Node parent) {
 this.parent = parent;
 }
}
}

PS: Updated with the latest version. It works with command line arguments, but for testing can use the methods directly. The compute method and whole synchonisation idea, I think can be improved.

The helper class if needed.

import java.io.ByteArrayOutputStream;
import java.io.IOException;
import java.io.OutputStream;
import java.io.PrintStream;
/**
 * Class for redirecting a stream to nowhere.<br>
 * (Something like dev\null on UNIX)
 */
public class NullPrintStream extends PrintStream {
public NullPrintStream() {
 super(new NullByteArrayOutputStream());
}
private static class NullByteArrayOutputStream extends ByteArrayOutputStream {
@Override
public void write(int b) {
 // do nothing
}
@Override
public void write(byte[] b, int off, int len) {
 // do nothing
}
@Override
public void writeTo(OutputStream out) throws IOException {
 // do nothing
}
}
}
dfhwze
14.1k3 gold badges40 silver badges101 bronze badges
asked May 2, 2016 at 19:14
\$\endgroup\$
2
  • \$\begingroup\$ Is this realisation even ok ? Does it work as intended? If it doesn't, you're asking in the wrong place. \$\endgroup\$ Commented May 2, 2016 at 19:21
  • 1
    \$\begingroup\$ @Mast Yes it works right. When printing the node and it's parent everything is ok. I even tested it with some visualisation. The realisation part was more of in the lines of "Is the idea of doing it like this, stupid or not ?" :D \$\endgroup\$ Commented May 2, 2016 at 19:24

0

Know someone who can answer? Share a link to this question via email, Twitter, or Facebook.

Your Answer

Draft saved
Draft discarded

Sign up or log in

Sign up using Google
Sign up using Email and Password

Post as a guest

Required, but never shown

Post as a guest

Required, but never shown

By clicking "Post Your Answer", you agree to our terms of service and acknowledge you have read our privacy policy.

Start asking to get answers

Find the answer to your question by asking.

Ask question

Explore related questions

See similar questions with these tags.